diff --git a/pyperformance/data-files/benchmarks/MANIFEST b/pyperformance/data-files/benchmarks/MANIFEST index d472c2c1..5e2e28bc 100644 --- a/pyperformance/data-files/benchmarks/MANIFEST +++ b/pyperformance/data-files/benchmarks/MANIFEST @@ -69,6 +69,7 @@ sympy telco tomli_loads tornado_http +typeshed_stats unpack_sequence unpickle unpickle_list diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/LICENSE b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/LICENSE new file mode 100644 index 00000000..13264487 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/LICENSE @@ -0,0 +1,237 @@ +The "typeshed" project is licensed under the terms of the Apache license, as +reproduced below. + += = = = = + +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + += = = = = + +Parts of typeshed are licensed under different licenses (like the MIT +license), reproduced below. + += = = = = + +The MIT License + +Copyright (c) 2015 Jukka Lehtosalo and contributors + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + += = = = = diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/pyrightconfig.json b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/pyrightconfig.json new file mode 100644 index 00000000..c4460210 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/pyrightconfig.json @@ -0,0 +1,45 @@ +{ + "$schema": "https://raw.githubusercontent.com/microsoft/pyright/main/packages/vscode-pyright/schemas/pyrightconfig.schema.json", + "typeshedPath": ".", + "include": [ + "stdlib", + "stubs", + ], + "exclude": [ + // test cases use a custom config file + "stubs/**/@tests/test_cases" + ], + "typeCheckingMode": "strict", + // Allowed in base settings for incomplete stubs, checked in stricter settings + "reportIncompleteStub": "none", + "reportMissingParameterType": "none", + "reportUnknownMemberType": "none", + "reportUnknownParameterType": "none", + "reportUnknownVariableType": "none", + // Extra strict settings + "reportCallInDefaultInitializer": "error", + "reportImplicitStringConcatenation": "error", + "reportUnnecessaryTypeIgnoreComment": "error", + // Leave "type: ignore" comments to mypy + "enableTypeIgnoreComments": false, + // No effect in stubs + "reportMissingSuperCall": "none", + "reportUninitializedInstanceVariable": "none", + // stdlib stubs trigger reportShadowedImports + "reportShadowedImports": "none", + // Stubs are allowed to use private variables + "reportPrivateUsage": "none", + // Stubs don't need the actual modules to be installed + "reportMissingModuleSource": "none", + // Incompatible overrides and property type mismatches are out of typeshed's control + // as they are inherited from the implementation. + "reportIncompatibleMethodOverride": "none", + "reportIncompatibleVariableOverride": "none", + "reportPropertyTypeMismatch": "none", + // Overlapping overloads are often necessary in a stub, meaning pyright's check + // (which is stricter than mypy's; see mypy issue #10143 and #10157) + // would cause many false positives and catch few bugs. + "reportOverlappingOverload": "none", + // The name of the self/cls parameter is out of typeshed's control. + "reportSelfClsParameterName": "none", +} diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/pyrightconfig.stricter.json b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/pyrightconfig.stricter.json new file mode 100644 index 00000000..9125ba43 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/pyrightconfig.stricter.json @@ -0,0 +1,107 @@ +{ + "$schema": "https://raw.githubusercontent.com/microsoft/pyright/main/packages/vscode-pyright/schemas/pyrightconfig.schema.json", + "typeshedPath": ".", + "include": [ + "stdlib", + "stubs", + ], + "exclude": [ + // test cases use a custom pyrightconfig file + "stubs/**/@tests/test_cases", + "stdlib/distutils/command", + "stdlib/distutils/dist.pyi", + "stdlib/lib2to3/refactor.pyi", + "stdlib/_tkinter.pyi", + "stdlib/tkinter/__init__.pyi", + "stdlib/tkinter/filedialog.pyi", + "stdlib/tkinter/messagebox.pyi", + "stdlib/tkinter/tix.pyi", + "stdlib/tkinter/ttk.pyi", + "stdlib/xml/dom/NodeFilter.pyi", + "stdlib/xml/dom/expatbuilder.pyi", + "stdlib/xml/dom/minidom.pyi", + "stdlib/xml/dom/pulldom.pyi", + "stdlib/xml/sax", + "stubs/aws-xray-sdk", + "stubs/babel", + "stubs/bleach", + "stubs/boto", + "stubs/beautifulsoup4", + "stubs/braintree", + "stubs/caldav", + "stubs/cffi", + "stubs/commonmark", + "stubs/dateparser", + "stubs/docutils", + "stubs/Flask-Migrate", + "stubs/Flask-SQLAlchemy", + "stubs/fpdf2", + "stubs/html5lib", + "stubs/httplib2", + "stubs/humanfriendly", + "stubs/invoke", + "stubs/jmespath", + "stubs/jsonschema", + "stubs/ldap3", + "stubs/Markdown", + "stubs/mysqlclient", + "stubs/oauthlib", + "stubs/openpyxl", + "stubs/Pillow", + "stubs/prettytable", + "stubs/protobuf", + "stubs/google-cloud-ndb", + "stubs/influxdb-client", + "stubs/passlib", + "stubs/peewee", + "stubs/pika", + "stubs/psutil", + "stubs/psycopg2", + "stubs/pyasn1", + "stubs/pyflakes", + "stubs/Pygments", + "stubs/PyMySQL", + "stubs/python-dateutil", + "stubs/python-jose", + "stubs/pywin32", + "stubs/PyYAML", + "stubs/redis", + "stubs/requests", + "stubs/setuptools", + "stubs/SQLAlchemy", + "stubs/stripe", + "stubs/tqdm", + "stubs/ttkthemes", + "stubs/urllib3", + "stubs/vobject", + ], + "typeCheckingMode": "strict", + // TODO: Complete incomplete stubs + "reportIncompleteStub": "none", + // Extra strict settings + "reportCallInDefaultInitializer": "error", + "reportImplicitStringConcatenation": "error", + "reportUnnecessaryTypeIgnoreComment": "error", + // Leave "type: ignore" comments to mypy + "enableTypeIgnoreComments": false, + // No effect in stubs + "reportMissingSuperCall": "none", + "reportUninitializedInstanceVariable": "none", + // stdlib stubs trigger reportShadowedImports + "reportShadowedImports": "none", + // Stubs are allowed to use private variables + "reportPrivateUsage": "none", + // Stubs don't need the actual modules to be installed + "reportMissingModuleSource": "none", + // Incompatible overrides and property type mismatches are out of typeshed's control + // as they are inherited from the implementation. + "reportIncompatibleMethodOverride": "none", + "reportIncompatibleVariableOverride": "none", + "reportPropertyTypeMismatch": "none", + // Overlapping overloads are often necessary in a stub, meaning pyright's check + // (which is stricter than mypy's; see mypy issue #10143 and #10157) + // would cause many false positives and catch few bugs. + "reportOverlappingOverload": "none", + // The name of the self/cls parameter is out of typeshed's control. + "reportSelfClsParameterName": "none", +} diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/VERSIONS b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/VERSIONS new file mode 100644 index 00000000..bd1abd20 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/VERSIONS @@ -0,0 +1,299 @@ +# The structure of this file is as follows: +# - Blank lines and comments starting with `#` are ignored. +# - Lines contain the name of a module, followed by a colon, +# a space, and a version range (for example: `symbol: 2.7-3.9`). +# +# Version ranges may be of the form "X.Y-A.B" or "X.Y-". The +# first form means that a module was introduced in version X.Y and last +# available in version A.B. The second form means that the module was +# introduced in version X.Y and is still available in the latest +# version of Python. +# +# If a submodule is not listed separately, it has the same lifetime as +# its parent module. +# +# Python versions before 2.7 are ignored, so any module that was already +# present in 2.7 will have "2.7" as its minimum version. Version ranges +# for unsupported versions of Python 3 are generally accurate but we do +# not guarantee their correctness. + +__future__: 2.7- +__main__: 2.7- +_ast: 2.7- +_bisect: 2.7- +_bootlocale: 3.4-3.9 +_codecs: 2.7- +_collections_abc: 3.3- +_compat_pickle: 3.1- +_compression: 3.5- +_csv: 2.7- +_ctypes: 2.7- +_curses: 2.7- +_decimal: 3.3- +_dummy_thread: 3.0-3.8 +_dummy_threading: 2.7-3.8 +_heapq: 2.7- +_imp: 3.0- +_json: 2.7- +_markupbase: 2.7- +_msi: 2.7- +_operator: 3.4- +_osx_support: 2.7- +_posixsubprocess: 3.2- +_py_abc: 3.7- +_pydecimal: 3.5- +_random: 2.7- +_sitebuiltins: 3.4- +_socket: 3.0- # present in 2.7 at runtime, but not in typeshed +_stat: 3.4- +_thread: 2.7- +_threading_local: 2.7- +_tkinter: 2.7- +_tracemalloc: 3.4- +_typeshed: 2.7- # not present at runtime, only for type checking +_warnings: 2.7- +_weakref: 2.7- +_weakrefset: 2.7- +_winapi: 3.3- +abc: 2.7- +aifc: 2.7- +antigravity: 2.7- +argparse: 2.7- +array: 2.7- +ast: 2.7- +asynchat: 2.7- +asyncio: 3.4- +asyncio.mixins: 3.10- +asyncio.exceptions: 3.8- +asyncio.format_helpers: 3.7- +asyncio.runners: 3.7- +asyncio.staggered: 3.8- +asyncio.taskgroups: 3.11- +asyncio.threads: 3.9- +asyncio.timeouts: 3.11- +asyncio.trsock: 3.8- +asyncore: 2.7- +atexit: 2.7- +audioop: 2.7- +base64: 2.7- +bdb: 2.7- +binascii: 2.7- +binhex: 2.7-3.10 +bisect: 2.7- +builtins: 3.0- +bz2: 2.7- +cProfile: 2.7- +calendar: 2.7- +cgi: 2.7- +cgitb: 2.7- +chunk: 2.7- +cmath: 2.7- +cmd: 2.7- +code: 2.7- +codecs: 2.7- +codeop: 2.7- +collections: 2.7- +collections.abc: 3.3- +colorsys: 2.7- +compileall: 2.7- +concurrent: 3.2- +configparser: 3.0- +contextlib: 2.7- +contextvars: 3.7- +copy: 2.7- +copyreg: 2.7- +crypt: 2.7- +csv: 2.7- +ctypes: 2.7- +curses: 2.7- +dataclasses: 3.7- +datetime: 2.7- +dbm: 2.7- +decimal: 2.7- +difflib: 2.7- +dis: 2.7- +distutils: 2.7- +distutils.command.bdist_msi: 2.7-3.10 +distutils.command.bdist_wininst: 2.7-3.9 +doctest: 2.7- +dummy_threading: 2.7-3.8 +email: 2.7- +encodings: 2.7- +ensurepip: 2.7- +enum: 3.4- +errno: 2.7- +faulthandler: 3.3- +fcntl: 2.7- +filecmp: 2.7- +fileinput: 2.7- +fnmatch: 2.7- +formatter: 2.7-3.9 +fractions: 2.7- +ftplib: 2.7- +functools: 2.7- +gc: 2.7- +genericpath: 2.7- +getopt: 2.7- +getpass: 2.7- +gettext: 2.7- +glob: 2.7- +graphlib: 3.9- +grp: 2.7- +gzip: 2.7- +hashlib: 2.7- +heapq: 2.7- +hmac: 2.7- +html: 3.0- +http: 3.0- +imaplib: 2.7- +imghdr: 2.7- +imp: 2.7- +importlib: 2.7- +importlib.metadata: 3.8- +importlib.metadata._meta: 3.10- +importlib.resources: 3.7- +inspect: 2.7- +io: 2.7- +ipaddress: 3.3- +itertools: 2.7- +json: 2.7- +keyword: 2.7- +lib2to3: 2.7- +linecache: 2.7- +locale: 2.7- +logging: 2.7- +lzma: 3.3- +macpath: 2.7-3.7 +mailbox: 2.7- +mailcap: 2.7- +marshal: 2.7- +math: 2.7- +mimetypes: 2.7- +mmap: 2.7- +modulefinder: 2.7- +msilib: 2.7- +msvcrt: 2.7- +multiprocessing: 2.7- +multiprocessing.resource_tracker: 3.8- +multiprocessing.shared_memory: 3.8- +netrc: 2.7- +nis: 2.7- +nntplib: 2.7- +ntpath: 2.7- +nturl2path: 2.7- +numbers: 2.7- +opcode: 2.7- +operator: 2.7- +optparse: 2.7- +os: 2.7- +ossaudiodev: 2.7- +parser: 2.7-3.9 +pathlib: 3.4- +pdb: 2.7- +pickle: 2.7- +pickletools: 2.7- +pipes: 2.7- +pkgutil: 2.7- +platform: 2.7- +plistlib: 2.7- +poplib: 2.7- +posix: 2.7- +posixpath: 2.7- +pprint: 2.7- +profile: 2.7- +pstats: 2.7- +pty: 2.7- +pwd: 2.7- +py_compile: 2.7- +pyclbr: 2.7- +pydoc: 2.7- +pydoc_data: 2.7- +pyexpat: 2.7- +queue: 3.0- +quopri: 2.7- +random: 2.7- +re: 2.7- +readline: 2.7- +reprlib: 3.0- +resource: 2.7- +rlcompleter: 2.7- +runpy: 2.7- +sched: 2.7- +secrets: 3.6- +select: 2.7- +selectors: 3.4- +shelve: 2.7- +shlex: 2.7- +shutil: 2.7- +signal: 2.7- +site: 2.7- +smtpd: 2.7- +smtplib: 2.7- +sndhdr: 2.7- +socket: 2.7- +socketserver: 3.0- +spwd: 2.7- +sqlite3: 2.7- +sre_compile: 2.7- +sre_constants: 2.7- +sre_parse: 2.7- +ssl: 2.7- +stat: 2.7- +statistics: 3.4- +string: 2.7- +stringprep: 2.7- +struct: 2.7- +subprocess: 2.7- +sunau: 2.7- +symbol: 2.7-3.9 +symtable: 2.7- +sys: 2.7- +sysconfig: 2.7- +syslog: 2.7- +tabnanny: 2.7- +tarfile: 2.7- +telnetlib: 2.7- +tempfile: 2.7- +termios: 2.7- +textwrap: 2.7- +this: 2.7- +threading: 2.7- +time: 2.7- +timeit: 2.7- +tkinter: 3.0- +token: 2.7- +tokenize: 2.7- +tomllib: 3.11- +trace: 2.7- +traceback: 2.7- +tracemalloc: 3.4- +tty: 2.7- +turtle: 2.7- +types: 2.7- +typing: 3.5- +typing_extensions: 2.7- +unicodedata: 2.7- +unittest: 2.7- +unittest._log: 3.9- +unittest.async_case: 3.8- +urllib: 2.7- +uu: 2.7- +uuid: 2.7- +venv: 3.3- +warnings: 2.7- +wave: 2.7- +weakref: 2.7- +webbrowser: 2.7- +winreg: 3.0- +winsound: 2.7- +wsgiref: 2.7- +wsgiref.types: 3.11- +xdrlib: 2.7- +xml: 2.7- +xmlrpc: 3.0- +xxlimited: 3.2- +zipapp: 3.5- +zipfile: 2.7- +zipimport: 2.7- +zlib: 2.7- +zoneinfo: 3.9- diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/__future__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/__future__.pyi new file mode 100644 index 00000000..a90cf1ed --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/__future__.pyi @@ -0,0 +1,36 @@ +from typing_extensions import TypeAlias + +_VersionInfo: TypeAlias = tuple[int, int, int, str, int] + +class _Feature: + def __init__(self, optionalRelease: _VersionInfo, mandatoryRelease: _VersionInfo | None, compiler_flag: int) -> None: ... + def getOptionalRelease(self) -> _VersionInfo: ... + def getMandatoryRelease(self) -> _VersionInfo | None: ... + compiler_flag: int + +absolute_import: _Feature +division: _Feature +generators: _Feature +nested_scopes: _Feature +print_function: _Feature +unicode_literals: _Feature +with_statement: _Feature +barry_as_FLUFL: _Feature +generator_stop: _Feature +annotations: _Feature + +all_feature_names: list[str] # undocumented + +__all__ = [ + "all_feature_names", + "absolute_import", + "division", + "generators", + "nested_scopes", + "print_function", + "unicode_literals", + "with_statement", + "barry_as_FLUFL", + "generator_stop", + "annotations", +] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/__main__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/__main__.pyi new file mode 100644 index 00000000..e27843e5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/__main__.pyi @@ -0,0 +1,3 @@ +from typing import Any + +def __getattr__(name: str) -> Any: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_ast.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_ast.pyi new file mode 100644 index 00000000..7bc47266 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_ast.pyi @@ -0,0 +1,573 @@ +import sys +from typing import Any, ClassVar +from typing_extensions import Literal, TypeAlias + +PyCF_ONLY_AST: Literal[1024] +if sys.version_info >= (3, 8): + PyCF_TYPE_COMMENTS: Literal[4096] + PyCF_ALLOW_TOP_LEVEL_AWAIT: Literal[8192] + +_Identifier: TypeAlias = str + +class AST: + if sys.version_info >= (3, 10): + __match_args__ = () + _attributes: ClassVar[tuple[str, ...]] + _fields: ClassVar[tuple[str, ...]] + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + # TODO: Not all nodes have all of the following attributes + lineno: int + col_offset: int + if sys.version_info >= (3, 8): + end_lineno: int | None + end_col_offset: int | None + type_comment: str | None + +class mod(AST): ... + +if sys.version_info >= (3, 8): + class type_ignore(AST): ... + + class TypeIgnore(type_ignore): + if sys.version_info >= (3, 10): + __match_args__ = ("lineno", "tag") + tag: str + + class FunctionType(mod): + if sys.version_info >= (3, 10): + __match_args__ = ("argtypes", "returns") + argtypes: list[expr] + returns: expr + +class Module(mod): + if sys.version_info >= (3, 10): + __match_args__ = ("body", "type_ignores") + body: list[stmt] + if sys.version_info >= (3, 8): + type_ignores: list[TypeIgnore] + +class Interactive(mod): + if sys.version_info >= (3, 10): + __match_args__ = ("body",) + body: list[stmt] + +class Expression(mod): + if sys.version_info >= (3, 10): + __match_args__ = ("body",) + body: expr + +class stmt(AST): ... + +class FunctionDef(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment") + name: _Identifier + args: arguments + body: list[stmt] + decorator_list: list[expr] + returns: expr | None + +class AsyncFunctionDef(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment") + name: _Identifier + args: arguments + body: list[stmt] + decorator_list: list[expr] + returns: expr | None + +class ClassDef(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("name", "bases", "keywords", "body", "decorator_list") + name: _Identifier + bases: list[expr] + keywords: list[keyword] + body: list[stmt] + decorator_list: list[expr] + +class Return(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("value",) + value: expr | None + +class Delete(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("targets",) + targets: list[expr] + +class Assign(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("targets", "value", "type_comment") + targets: list[expr] + value: expr + +class AugAssign(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("target", "op", "value") + target: Name | Attribute | Subscript + op: operator + value: expr + +class AnnAssign(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("target", "annotation", "value", "simple") + target: Name | Attribute | Subscript + annotation: expr + value: expr | None + simple: int + +class For(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("target", "iter", "body", "orelse", "type_comment") + target: expr + iter: expr + body: list[stmt] + orelse: list[stmt] + +class AsyncFor(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("target", "iter", "body", "orelse", "type_comment") + target: expr + iter: expr + body: list[stmt] + orelse: list[stmt] + +class While(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("test", "body", "orelse") + test: expr + body: list[stmt] + orelse: list[stmt] + +class If(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("test", "body", "orelse") + test: expr + body: list[stmt] + orelse: list[stmt] + +class With(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("items", "body", "type_comment") + items: list[withitem] + body: list[stmt] + +class AsyncWith(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("items", "body", "type_comment") + items: list[withitem] + body: list[stmt] + +class Raise(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("exc", "cause") + exc: expr | None + cause: expr | None + +class Try(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("body", "handlers", "orelse", "finalbody") + body: list[stmt] + handlers: list[ExceptHandler] + orelse: list[stmt] + finalbody: list[stmt] + +if sys.version_info >= (3, 11): + class TryStar(stmt): + __match_args__ = ("body", "handlers", "orelse", "finalbody") + body: list[stmt] + handlers: list[ExceptHandler] + orelse: list[stmt] + finalbody: list[stmt] + +class Assert(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("test", "msg") + test: expr + msg: expr | None + +class Import(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("names",) + names: list[alias] + +class ImportFrom(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("module", "names", "level") + module: str | None + names: list[alias] + level: int + +class Global(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("names",) + names: list[_Identifier] + +class Nonlocal(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("names",) + names: list[_Identifier] + +class Expr(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("value",) + value: expr + +class Pass(stmt): ... +class Break(stmt): ... +class Continue(stmt): ... +class expr(AST): ... + +class BoolOp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("op", "values") + op: boolop + values: list[expr] + +class BinOp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("left", "op", "right") + left: expr + op: operator + right: expr + +class UnaryOp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("op", "operand") + op: unaryop + operand: expr + +class Lambda(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("args", "body") + args: arguments + body: expr + +class IfExp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("test", "body", "orelse") + test: expr + body: expr + orelse: expr + +class Dict(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("keys", "values") + keys: list[expr | None] + values: list[expr] + +class Set(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("elts",) + elts: list[expr] + +class ListComp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("elt", "generators") + elt: expr + generators: list[comprehension] + +class SetComp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("elt", "generators") + elt: expr + generators: list[comprehension] + +class DictComp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("key", "value", "generators") + key: expr + value: expr + generators: list[comprehension] + +class GeneratorExp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("elt", "generators") + elt: expr + generators: list[comprehension] + +class Await(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value",) + value: expr + +class Yield(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value",) + value: expr | None + +class YieldFrom(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value",) + value: expr + +class Compare(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("left", "ops", "comparators") + left: expr + ops: list[cmpop] + comparators: list[expr] + +class Call(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("func", "args", "keywords") + func: expr + args: list[expr] + keywords: list[keyword] + +class FormattedValue(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value", "conversion", "format_spec") + value: expr + conversion: int + format_spec: expr | None + +class JoinedStr(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("values",) + values: list[expr] + +if sys.version_info < (3, 8): + class Num(expr): # Deprecated in 3.8; use Constant + n: int | float | complex + + class Str(expr): # Deprecated in 3.8; use Constant + s: str + + class Bytes(expr): # Deprecated in 3.8; use Constant + s: bytes + + class NameConstant(expr): # Deprecated in 3.8; use Constant + value: Any + + class Ellipsis(expr): ... # Deprecated in 3.8; use Constant + +class Constant(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value", "kind") + value: Any # None, str, bytes, bool, int, float, complex, Ellipsis + kind: str | None + # Aliases for value, for backwards compatibility + s: Any + n: int | float | complex + +if sys.version_info >= (3, 8): + class NamedExpr(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("target", "value") + target: Name + value: expr + +class Attribute(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value", "attr", "ctx") + value: expr + attr: _Identifier + ctx: expr_context + +if sys.version_info >= (3, 9): + _Slice: TypeAlias = expr +else: + class slice(AST): ... + _Slice: TypeAlias = slice + +class Slice(_Slice): + if sys.version_info >= (3, 10): + __match_args__ = ("lower", "upper", "step") + lower: expr | None + upper: expr | None + step: expr | None + +if sys.version_info < (3, 9): + class ExtSlice(slice): + dims: list[slice] + + class Index(slice): + value: expr + +class Subscript(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value", "slice", "ctx") + value: expr + slice: _Slice + ctx: expr_context + +class Starred(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value", "ctx") + value: expr + ctx: expr_context + +class Name(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("id", "ctx") + id: _Identifier + ctx: expr_context + +class List(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("elts", "ctx") + elts: list[expr] + ctx: expr_context + +class Tuple(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("elts", "ctx") + elts: list[expr] + ctx: expr_context + if sys.version_info >= (3, 9): + dims: list[expr] + +class expr_context(AST): ... + +if sys.version_info < (3, 9): + class AugLoad(expr_context): ... + class AugStore(expr_context): ... + class Param(expr_context): ... + + class Suite(mod): + body: list[stmt] + +class Del(expr_context): ... +class Load(expr_context): ... +class Store(expr_context): ... +class boolop(AST): ... +class And(boolop): ... +class Or(boolop): ... +class operator(AST): ... +class Add(operator): ... +class BitAnd(operator): ... +class BitOr(operator): ... +class BitXor(operator): ... +class Div(operator): ... +class FloorDiv(operator): ... +class LShift(operator): ... +class Mod(operator): ... +class Mult(operator): ... +class MatMult(operator): ... +class Pow(operator): ... +class RShift(operator): ... +class Sub(operator): ... +class unaryop(AST): ... +class Invert(unaryop): ... +class Not(unaryop): ... +class UAdd(unaryop): ... +class USub(unaryop): ... +class cmpop(AST): ... +class Eq(cmpop): ... +class Gt(cmpop): ... +class GtE(cmpop): ... +class In(cmpop): ... +class Is(cmpop): ... +class IsNot(cmpop): ... +class Lt(cmpop): ... +class LtE(cmpop): ... +class NotEq(cmpop): ... +class NotIn(cmpop): ... + +class comprehension(AST): + if sys.version_info >= (3, 10): + __match_args__ = ("target", "iter", "ifs", "is_async") + target: expr + iter: expr + ifs: list[expr] + is_async: int + +class excepthandler(AST): ... + +class ExceptHandler(excepthandler): + if sys.version_info >= (3, 10): + __match_args__ = ("type", "name", "body") + type: expr | None + name: _Identifier | None + body: list[stmt] + +class arguments(AST): + if sys.version_info >= (3, 10): + __match_args__ = ("posonlyargs", "args", "vararg", "kwonlyargs", "kw_defaults", "kwarg", "defaults") + if sys.version_info >= (3, 8): + posonlyargs: list[arg] + args: list[arg] + vararg: arg | None + kwonlyargs: list[arg] + kw_defaults: list[expr | None] + kwarg: arg | None + defaults: list[expr] + +class arg(AST): + if sys.version_info >= (3, 10): + __match_args__ = ("arg", "annotation", "type_comment") + arg: _Identifier + annotation: expr | None + +class keyword(AST): + if sys.version_info >= (3, 10): + __match_args__ = ("arg", "value") + arg: _Identifier | None + value: expr + +class alias(AST): + if sys.version_info >= (3, 10): + __match_args__ = ("name", "asname") + name: _Identifier + asname: _Identifier | None + +class withitem(AST): + if sys.version_info >= (3, 10): + __match_args__ = ("context_expr", "optional_vars") + context_expr: expr + optional_vars: expr | None + +if sys.version_info >= (3, 10): + class Match(stmt): + __match_args__ = ("subject", "cases") + subject: expr + cases: list[match_case] + + class pattern(AST): ... + # Without the alias, Pyright complains variables named pattern are recursively defined + _Pattern: TypeAlias = pattern + + class match_case(AST): + __match_args__ = ("pattern", "guard", "body") + pattern: _Pattern + guard: expr | None + body: list[stmt] + + class MatchValue(pattern): + __match_args__ = ("value",) + value: expr + + class MatchSingleton(pattern): + __match_args__ = ("value",) + value: Literal[True, False, None] + + class MatchSequence(pattern): + __match_args__ = ("patterns",) + patterns: list[pattern] + + class MatchStar(pattern): + __match_args__ = ("name",) + name: _Identifier | None + + class MatchMapping(pattern): + __match_args__ = ("keys", "patterns", "rest") + keys: list[expr] + patterns: list[pattern] + rest: _Identifier | None + + class MatchClass(pattern): + __match_args__ = ("cls", "patterns", "kwd_attrs", "kwd_patterns") + cls: expr + patterns: list[pattern] + kwd_attrs: list[_Identifier] + kwd_patterns: list[pattern] + + class MatchAs(pattern): + __match_args__ = ("pattern", "name") + pattern: _Pattern | None + name: _Identifier | None + + class MatchOr(pattern): + __match_args__ = ("patterns",) + patterns: list[pattern] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_bisect.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_bisect.pyi new file mode 100644 index 00000000..4c79eec1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_bisect.pyi @@ -0,0 +1,74 @@ +import sys +from _typeshed import SupportsRichComparisonT +from collections.abc import Callable, MutableSequence, Sequence +from typing import TypeVar, overload + +_T = TypeVar("_T") + +if sys.version_info >= (3, 10): + @overload + def bisect_left( + a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None, *, key: None = None + ) -> int: ... + @overload + def bisect_left( + a: Sequence[_T], + x: SupportsRichComparisonT, + lo: int = 0, + hi: int | None = None, + *, + key: Callable[[_T], SupportsRichComparisonT], + ) -> int: ... + @overload + def bisect_right( + a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None, *, key: None = None + ) -> int: ... + @overload + def bisect_right( + a: Sequence[_T], + x: SupportsRichComparisonT, + lo: int = 0, + hi: int | None = None, + *, + key: Callable[[_T], SupportsRichComparisonT], + ) -> int: ... + @overload + def insort_left( + a: MutableSequence[SupportsRichComparisonT], + x: SupportsRichComparisonT, + lo: int = 0, + hi: int | None = None, + *, + key: None = None, + ) -> None: ... + @overload + def insort_left( + a: MutableSequence[_T], x: _T, lo: int = 0, hi: int | None = None, *, key: Callable[[_T], SupportsRichComparisonT] + ) -> None: ... + @overload + def insort_right( + a: MutableSequence[SupportsRichComparisonT], + x: SupportsRichComparisonT, + lo: int = 0, + hi: int | None = None, + *, + key: None = None, + ) -> None: ... + @overload + def insort_right( + a: MutableSequence[_T], x: _T, lo: int = 0, hi: int | None = None, *, key: Callable[[_T], SupportsRichComparisonT] + ) -> None: ... + +else: + def bisect_left( + a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None + ) -> int: ... + def bisect_right( + a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None + ) -> int: ... + def insort_left( + a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None + ) -> None: ... + def insort_right( + a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_bootlocale.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_bootlocale.pyi new file mode 100644 index 00000000..233d4934 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_bootlocale.pyi @@ -0,0 +1 @@ +def getpreferredencoding(do_setlocale: bool = True) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_codecs.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_codecs.pyi new file mode 100644 index 00000000..51f17f01 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_codecs.pyi @@ -0,0 +1,138 @@ +import codecs +import sys +from _typeshed import ReadableBuffer +from collections.abc import Callable +from typing import overload +from typing_extensions import Literal, TypeAlias + +# This type is not exposed; it is defined in unicodeobject.c +class _EncodingMap: + def size(self) -> int: ... + +_CharMap: TypeAlias = dict[int, int] | _EncodingMap +_Handler: TypeAlias = Callable[[UnicodeError], tuple[str | bytes, int]] +_SearchFunction: TypeAlias = Callable[[str], codecs.CodecInfo | None] + +def register(__search_function: _SearchFunction) -> None: ... + +if sys.version_info >= (3, 10): + def unregister(__search_function: _SearchFunction) -> None: ... + +def register_error(__errors: str, __handler: _Handler) -> None: ... +def lookup_error(__name: str) -> _Handler: ... + +# The type ignore on `encode` and `decode` is to avoid issues with overlapping overloads, for more details, see #300 +# https://docs.python.org/3/library/codecs.html#binary-transforms +_BytesToBytesEncoding: TypeAlias = Literal[ + "base64", + "base_64", + "base64_codec", + "bz2", + "bz2_codec", + "hex", + "hex_codec", + "quopri", + "quotedprintable", + "quoted_printable", + "quopri_codec", + "uu", + "uu_codec", + "zip", + "zlib", + "zlib_codec", +] +# https://docs.python.org/3/library/codecs.html#text-transforms +_StrToStrEncoding: TypeAlias = Literal["rot13", "rot_13"] + +@overload +def encode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: ... +@overload +def encode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ... # type: ignore[misc] +@overload +def encode(obj: str, encoding: str = "utf-8", errors: str = "strict") -> bytes: ... +@overload +def decode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: ... # type: ignore[misc] +@overload +def decode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ... + +# these are documented as text encodings but in practice they also accept str as input +@overload +def decode( + obj: str, + encoding: Literal["unicode_escape", "unicode-escape", "raw_unicode_escape", "raw-unicode-escape"], + errors: str = "strict", +) -> str: ... + +# hex is officially documented as a bytes to bytes encoding, but it appears to also work with str +@overload +def decode(obj: str, encoding: Literal["hex", "hex_codec"], errors: str = "strict") -> bytes: ... +@overload +def decode(obj: ReadableBuffer, encoding: str = "utf-8", errors: str = "strict") -> str: ... +def lookup(__encoding: str) -> codecs.CodecInfo: ... +def charmap_build(__map: str) -> _CharMap: ... +def ascii_decode(__data: ReadableBuffer, __errors: str | None = None) -> tuple[str, int]: ... +def ascii_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def charmap_decode(__data: ReadableBuffer, __errors: str | None = None, __mapping: _CharMap | None = None) -> tuple[str, int]: ... +def charmap_encode(__str: str, __errors: str | None = None, __mapping: _CharMap | None = None) -> tuple[bytes, int]: ... +def escape_decode(__data: str | ReadableBuffer, __errors: str | None = None) -> tuple[str, int]: ... +def escape_encode(__data: bytes, __errors: str | None = None) -> tuple[bytes, int]: ... +def latin_1_decode(__data: ReadableBuffer, __errors: str | None = None) -> tuple[str, int]: ... +def latin_1_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... + +if sys.version_info >= (3, 9): + def raw_unicode_escape_decode( + __data: str | ReadableBuffer, __errors: str | None = None, __final: bool = True + ) -> tuple[str, int]: ... + +else: + def raw_unicode_escape_decode(__data: str | ReadableBuffer, __errors: str | None = None) -> tuple[str, int]: ... + +def raw_unicode_escape_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def readbuffer_encode(__data: str | ReadableBuffer, __errors: str | None = None) -> tuple[bytes, int]: ... + +if sys.version_info >= (3, 9): + def unicode_escape_decode( + __data: str | ReadableBuffer, __errors: str | None = None, __final: bool = True + ) -> tuple[str, int]: ... + +else: + def unicode_escape_decode(__data: str | ReadableBuffer, __errors: str | None = None) -> tuple[str, int]: ... + +def unicode_escape_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... + +if sys.version_info < (3, 8): + def unicode_internal_decode(__obj: str | ReadableBuffer, __errors: str | None = None) -> tuple[str, int]: ... + def unicode_internal_encode(__obj: str | ReadableBuffer, __errors: str | None = None) -> tuple[bytes, int]: ... + +def utf_16_be_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... +def utf_16_be_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def utf_16_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... +def utf_16_encode(__str: str, __errors: str | None = None, __byteorder: int = 0) -> tuple[bytes, int]: ... +def utf_16_ex_decode( + __data: ReadableBuffer, __errors: str | None = None, __byteorder: int = 0, __final: bool = False +) -> tuple[str, int, int]: ... +def utf_16_le_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... +def utf_16_le_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def utf_32_be_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... +def utf_32_be_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def utf_32_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... +def utf_32_encode(__str: str, __errors: str | None = None, __byteorder: int = 0) -> tuple[bytes, int]: ... +def utf_32_ex_decode( + __data: ReadableBuffer, __errors: str | None = None, __byteorder: int = 0, __final: bool = False +) -> tuple[str, int, int]: ... +def utf_32_le_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... +def utf_32_le_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def utf_7_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... +def utf_7_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def utf_8_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... +def utf_8_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... + +if sys.platform == "win32": + def mbcs_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... + def mbcs_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... + def code_page_decode( + __codepage: int, __data: ReadableBuffer, __errors: str | None = None, __final: bool = False + ) -> tuple[str, int]: ... + def code_page_encode(__code_page: int, __str: str, __errors: str | None = None) -> tuple[bytes, int]: ... + def oem_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... + def oem_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_collections_abc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_collections_abc.pyi new file mode 100644 index 00000000..352da6cf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_collections_abc.pyi @@ -0,0 +1,81 @@ +import sys +from types import MappingProxyType +from typing import ( # noqa: Y022,Y038 + AbstractSet as Set, + AsyncGenerator as AsyncGenerator, + AsyncIterable as AsyncIterable, + AsyncIterator as AsyncIterator, + Awaitable as Awaitable, + ByteString as ByteString, + Callable as Callable, + Collection as Collection, + Container as Container, + Coroutine as Coroutine, + Generator as Generator, + Generic, + Hashable as Hashable, + ItemsView as ItemsView, + Iterable as Iterable, + Iterator as Iterator, + KeysView as KeysView, + Mapping as Mapping, + MappingView as MappingView, + MutableMapping as MutableMapping, + MutableSequence as MutableSequence, + MutableSet as MutableSet, + Reversible as Reversible, + Sequence as Sequence, + Sized as Sized, + TypeVar, + ValuesView as ValuesView, +) +from typing_extensions import final + +__all__ = [ + "Awaitable", + "Coroutine", + "AsyncIterable", + "AsyncIterator", + "AsyncGenerator", + "Hashable", + "Iterable", + "Iterator", + "Generator", + "Reversible", + "Sized", + "Container", + "Callable", + "Collection", + "Set", + "MutableSet", + "Mapping", + "MutableMapping", + "MappingView", + "KeysView", + "ItemsView", + "ValuesView", + "Sequence", + "MutableSequence", + "ByteString", +] + +_KT_co = TypeVar("_KT_co", covariant=True) # Key type covariant containers. +_VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers. + +@final +class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): # undocumented + if sys.version_info >= (3, 10): + @property + def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ... + +@final +class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]): # undocumented + if sys.version_info >= (3, 10): + @property + def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ... + +@final +class dict_items(ItemsView[_KT_co, _VT_co], Generic[_KT_co, _VT_co]): # undocumented + if sys.version_info >= (3, 10): + @property + def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_compat_pickle.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_compat_pickle.pyi new file mode 100644 index 00000000..50fb2244 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_compat_pickle.pyi @@ -0,0 +1,8 @@ +IMPORT_MAPPING: dict[str, str] +NAME_MAPPING: dict[tuple[str, str], tuple[str, str]] +PYTHON2_EXCEPTIONS: tuple[str, ...] +MULTIPROCESSING_EXCEPTIONS: tuple[str, ...] +REVERSE_IMPORT_MAPPING: dict[str, str] +REVERSE_NAME_MAPPING: dict[tuple[str, str], tuple[str, str]] +PYTHON3_OSERROR_EXCEPTIONS: tuple[str, ...] +PYTHON3_IMPORTERROR_EXCEPTIONS: tuple[str, ...] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_compression.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_compression.pyi new file mode 100644 index 00000000..817f2515 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_compression.pyi @@ -0,0 +1,25 @@ +from _typeshed import WriteableBuffer +from collections.abc import Callable +from io import DEFAULT_BUFFER_SIZE, BufferedIOBase, RawIOBase +from typing import Any, Protocol + +BUFFER_SIZE = DEFAULT_BUFFER_SIZE + +class _Reader(Protocol): + def read(self, __n: int) -> bytes: ... + def seekable(self) -> bool: ... + def seek(self, __n: int) -> Any: ... + +class BaseStream(BufferedIOBase): ... + +class DecompressReader(RawIOBase): + def __init__( + self, + fp: _Reader, + decomp_factory: Callable[..., object], + trailing_error: type[Exception] | tuple[type[Exception], ...] = ..., + **decomp_args: Any, + ) -> None: ... + def readinto(self, b: WriteableBuffer) -> int: ... + def read(self, size: int = -1) -> bytes: ... + def seek(self, offset: int, whence: int = 0) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_csv.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_csv.pyi new file mode 100644 index 00000000..c9b9f47e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_csv.pyi @@ -0,0 +1,86 @@ +from _typeshed import SupportsWrite +from collections.abc import Iterable, Iterator +from typing import Any +from typing_extensions import Final, Literal, TypeAlias + +__version__: Final[str] + +QUOTE_ALL: Literal[1] +QUOTE_MINIMAL: Literal[0] +QUOTE_NONE: Literal[3] +QUOTE_NONNUMERIC: Literal[2] + +# Ideally this would be `QUOTE_ALL | QUOTE_MINIMAL | QUOTE_NONE | QUOTE_NONNUMERIC` +# However, using literals in situations like these can cause false-positives (see #7258) +_QuotingType: TypeAlias = int + +class Error(Exception): ... + +class Dialect: + delimiter: str + quotechar: str | None + escapechar: str | None + doublequote: bool + skipinitialspace: bool + lineterminator: str + quoting: _QuotingType + strict: bool + def __init__(self) -> None: ... + +_DialectLike: TypeAlias = str | Dialect | type[Dialect] + +class _reader(Iterator[list[str]]): + @property + def dialect(self) -> Dialect: ... + line_num: int + def __next__(self) -> list[str]: ... + +class _writer: + @property + def dialect(self) -> Dialect: ... + def writerow(self, row: Iterable[Any]) -> Any: ... + def writerows(self, rows: Iterable[Iterable[Any]]) -> None: ... + +def writer( + csvfile: SupportsWrite[str], + dialect: _DialectLike = "excel", + *, + delimiter: str = ",", + quotechar: str | None = '"', + escapechar: str | None = None, + doublequote: bool = True, + skipinitialspace: bool = False, + lineterminator: str = "\r\n", + quoting: _QuotingType = 0, + strict: bool = False, +) -> _writer: ... +def reader( + csvfile: Iterable[str], + dialect: _DialectLike = "excel", + *, + delimiter: str = ",", + quotechar: str | None = '"', + escapechar: str | None = None, + doublequote: bool = True, + skipinitialspace: bool = False, + lineterminator: str = "\r\n", + quoting: _QuotingType = 0, + strict: bool = False, +) -> _reader: ... +def register_dialect( + name: str, + dialect: type[Dialect] = ..., + *, + delimiter: str = ",", + quotechar: str | None = '"', + escapechar: str | None = None, + doublequote: bool = True, + skipinitialspace: bool = False, + lineterminator: str = "\r\n", + quoting: _QuotingType = 0, + strict: bool = False, +) -> None: ... +def unregister_dialect(name: str) -> None: ... +def get_dialect(name: str) -> Dialect: ... +def list_dialects() -> list[str]: ... +def field_size_limit(new_limit: int = ...) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_ctypes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_ctypes.pyi new file mode 100644 index 00000000..0ad2fcb5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_ctypes.pyi @@ -0,0 +1,29 @@ +import sys +from ctypes import _CArgObject, _PointerLike +from typing_extensions import TypeAlias + +FUNCFLAG_CDECL: int +FUNCFLAG_PYTHONAPI: int +FUNCFLAG_USE_ERRNO: int +FUNCFLAG_USE_LASTERROR: int +RTLD_GLOBAL: int +RTLD_LOCAL: int + +if sys.version_info >= (3, 11): + CTYPES_MAX_ARGCOUNT: int + +if sys.platform == "win32": + # Description, Source, HelpFile, HelpContext, scode + _COMError_Details: TypeAlias = tuple[str | None, str | None, str | None, int | None, int | None] + + class COMError(Exception): + hresult: int + text: str | None + details: _COMError_Details + + def __init__(self, hresult: int, text: str | None, details: _COMError_Details) -> None: ... + + def CopyComPointer(src: _PointerLike, dst: _PointerLike | _CArgObject) -> int: ... + + FUNCFLAG_HRESULT: int + FUNCFLAG_STDCALL: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_curses.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_curses.pyi new file mode 100644 index 00000000..61881fc0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_curses.pyi @@ -0,0 +1,555 @@ +import sys +from _typeshed import ReadOnlyBuffer, SupportsRead +from typing import IO, Any, NamedTuple, overload +from typing_extensions import TypeAlias, final + +if sys.platform != "win32": + # Handled by PyCurses_ConvertToChtype in _cursesmodule.c. + _ChType: TypeAlias = str | bytes | int + + # ACS codes are only initialized after initscr is called + ACS_BBSS: int + ACS_BLOCK: int + ACS_BOARD: int + ACS_BSBS: int + ACS_BSSB: int + ACS_BSSS: int + ACS_BTEE: int + ACS_BULLET: int + ACS_CKBOARD: int + ACS_DARROW: int + ACS_DEGREE: int + ACS_DIAMOND: int + ACS_GEQUAL: int + ACS_HLINE: int + ACS_LANTERN: int + ACS_LARROW: int + ACS_LEQUAL: int + ACS_LLCORNER: int + ACS_LRCORNER: int + ACS_LTEE: int + ACS_NEQUAL: int + ACS_PI: int + ACS_PLMINUS: int + ACS_PLUS: int + ACS_RARROW: int + ACS_RTEE: int + ACS_S1: int + ACS_S3: int + ACS_S7: int + ACS_S9: int + ACS_SBBS: int + ACS_SBSB: int + ACS_SBSS: int + ACS_SSBB: int + ACS_SSBS: int + ACS_SSSB: int + ACS_SSSS: int + ACS_STERLING: int + ACS_TTEE: int + ACS_UARROW: int + ACS_ULCORNER: int + ACS_URCORNER: int + ACS_VLINE: int + ALL_MOUSE_EVENTS: int + A_ALTCHARSET: int + A_ATTRIBUTES: int + A_BLINK: int + A_BOLD: int + A_CHARTEXT: int + A_COLOR: int + A_DIM: int + A_HORIZONTAL: int + A_INVIS: int + A_ITALIC: int + A_LEFT: int + A_LOW: int + A_NORMAL: int + A_PROTECT: int + A_REVERSE: int + A_RIGHT: int + A_STANDOUT: int + A_TOP: int + A_UNDERLINE: int + A_VERTICAL: int + BUTTON1_CLICKED: int + BUTTON1_DOUBLE_CLICKED: int + BUTTON1_PRESSED: int + BUTTON1_RELEASED: int + BUTTON1_TRIPLE_CLICKED: int + BUTTON2_CLICKED: int + BUTTON2_DOUBLE_CLICKED: int + BUTTON2_PRESSED: int + BUTTON2_RELEASED: int + BUTTON2_TRIPLE_CLICKED: int + BUTTON3_CLICKED: int + BUTTON3_DOUBLE_CLICKED: int + BUTTON3_PRESSED: int + BUTTON3_RELEASED: int + BUTTON3_TRIPLE_CLICKED: int + BUTTON4_CLICKED: int + BUTTON4_DOUBLE_CLICKED: int + BUTTON4_PRESSED: int + BUTTON4_RELEASED: int + BUTTON4_TRIPLE_CLICKED: int + # Darwin ncurses doesn't provide BUTTON5_* constants + if sys.version_info >= (3, 10) and sys.platform != "darwin": + BUTTON5_PRESSED: int + BUTTON5_RELEASED: int + BUTTON5_CLICKED: int + BUTTON5_DOUBLE_CLICKED: int + BUTTON5_TRIPLE_CLICKED: int + BUTTON_ALT: int + BUTTON_CTRL: int + BUTTON_SHIFT: int + COLOR_BLACK: int + COLOR_BLUE: int + COLOR_CYAN: int + COLOR_GREEN: int + COLOR_MAGENTA: int + COLOR_RED: int + COLOR_WHITE: int + COLOR_YELLOW: int + ERR: int + KEY_A1: int + KEY_A3: int + KEY_B2: int + KEY_BACKSPACE: int + KEY_BEG: int + KEY_BREAK: int + KEY_BTAB: int + KEY_C1: int + KEY_C3: int + KEY_CANCEL: int + KEY_CATAB: int + KEY_CLEAR: int + KEY_CLOSE: int + KEY_COMMAND: int + KEY_COPY: int + KEY_CREATE: int + KEY_CTAB: int + KEY_DC: int + KEY_DL: int + KEY_DOWN: int + KEY_EIC: int + KEY_END: int + KEY_ENTER: int + KEY_EOL: int + KEY_EOS: int + KEY_EXIT: int + KEY_F0: int + KEY_F1: int + KEY_F10: int + KEY_F11: int + KEY_F12: int + KEY_F13: int + KEY_F14: int + KEY_F15: int + KEY_F16: int + KEY_F17: int + KEY_F18: int + KEY_F19: int + KEY_F2: int + KEY_F20: int + KEY_F21: int + KEY_F22: int + KEY_F23: int + KEY_F24: int + KEY_F25: int + KEY_F26: int + KEY_F27: int + KEY_F28: int + KEY_F29: int + KEY_F3: int + KEY_F30: int + KEY_F31: int + KEY_F32: int + KEY_F33: int + KEY_F34: int + KEY_F35: int + KEY_F36: int + KEY_F37: int + KEY_F38: int + KEY_F39: int + KEY_F4: int + KEY_F40: int + KEY_F41: int + KEY_F42: int + KEY_F43: int + KEY_F44: int + KEY_F45: int + KEY_F46: int + KEY_F47: int + KEY_F48: int + KEY_F49: int + KEY_F5: int + KEY_F50: int + KEY_F51: int + KEY_F52: int + KEY_F53: int + KEY_F54: int + KEY_F55: int + KEY_F56: int + KEY_F57: int + KEY_F58: int + KEY_F59: int + KEY_F6: int + KEY_F60: int + KEY_F61: int + KEY_F62: int + KEY_F63: int + KEY_F7: int + KEY_F8: int + KEY_F9: int + KEY_FIND: int + KEY_HELP: int + KEY_HOME: int + KEY_IC: int + KEY_IL: int + KEY_LEFT: int + KEY_LL: int + KEY_MARK: int + KEY_MAX: int + KEY_MESSAGE: int + KEY_MIN: int + KEY_MOUSE: int + KEY_MOVE: int + KEY_NEXT: int + KEY_NPAGE: int + KEY_OPEN: int + KEY_OPTIONS: int + KEY_PPAGE: int + KEY_PREVIOUS: int + KEY_PRINT: int + KEY_REDO: int + KEY_REFERENCE: int + KEY_REFRESH: int + KEY_REPLACE: int + KEY_RESET: int + KEY_RESIZE: int + KEY_RESTART: int + KEY_RESUME: int + KEY_RIGHT: int + KEY_SAVE: int + KEY_SBEG: int + KEY_SCANCEL: int + KEY_SCOMMAND: int + KEY_SCOPY: int + KEY_SCREATE: int + KEY_SDC: int + KEY_SDL: int + KEY_SELECT: int + KEY_SEND: int + KEY_SEOL: int + KEY_SEXIT: int + KEY_SF: int + KEY_SFIND: int + KEY_SHELP: int + KEY_SHOME: int + KEY_SIC: int + KEY_SLEFT: int + KEY_SMESSAGE: int + KEY_SMOVE: int + KEY_SNEXT: int + KEY_SOPTIONS: int + KEY_SPREVIOUS: int + KEY_SPRINT: int + KEY_SR: int + KEY_SREDO: int + KEY_SREPLACE: int + KEY_SRESET: int + KEY_SRIGHT: int + KEY_SRSUME: int + KEY_SSAVE: int + KEY_SSUSPEND: int + KEY_STAB: int + KEY_SUNDO: int + KEY_SUSPEND: int + KEY_UNDO: int + KEY_UP: int + OK: int + REPORT_MOUSE_POSITION: int + _C_API: Any + version: bytes + def baudrate() -> int: ... + def beep() -> None: ... + def can_change_color() -> bool: ... + def cbreak(__flag: bool = True) -> None: ... + def color_content(__color_number: int) -> tuple[int, int, int]: ... + # Changed in Python 3.8.8 and 3.9.2 + if sys.version_info >= (3, 8): + def color_pair(pair_number: int) -> int: ... + else: + def color_pair(__color_number: int) -> int: ... + + def curs_set(__visibility: int) -> int: ... + def def_prog_mode() -> None: ... + def def_shell_mode() -> None: ... + def delay_output(__ms: int) -> None: ... + def doupdate() -> None: ... + def echo(__flag: bool = True) -> None: ... + def endwin() -> None: ... + def erasechar() -> bytes: ... + def filter() -> None: ... + def flash() -> None: ... + def flushinp() -> None: ... + if sys.version_info >= (3, 9): + def get_escdelay() -> int: ... + def get_tabsize() -> int: ... + + def getmouse() -> tuple[int, int, int, int, int]: ... + def getsyx() -> tuple[int, int]: ... + def getwin(__file: SupportsRead[bytes]) -> _CursesWindow: ... + def halfdelay(__tenths: int) -> None: ... + def has_colors() -> bool: ... + if sys.version_info >= (3, 10): + def has_extended_color_support() -> bool: ... + + def has_ic() -> bool: ... + def has_il() -> bool: ... + def has_key(__key: int) -> bool: ... + def init_color(__color_number: int, __r: int, __g: int, __b: int) -> None: ... + def init_pair(__pair_number: int, __fg: int, __bg: int) -> None: ... + def initscr() -> _CursesWindow: ... + def intrflush(__flag: bool) -> None: ... + def is_term_resized(__nlines: int, __ncols: int) -> bool: ... + def isendwin() -> bool: ... + def keyname(__key: int) -> bytes: ... + def killchar() -> bytes: ... + def longname() -> bytes: ... + def meta(__yes: bool) -> None: ... + def mouseinterval(__interval: int) -> None: ... + def mousemask(__newmask: int) -> tuple[int, int]: ... + def napms(__ms: int) -> int: ... + def newpad(__nlines: int, __ncols: int) -> _CursesWindow: ... + def newwin(__nlines: int, __ncols: int, __begin_y: int = ..., __begin_x: int = ...) -> _CursesWindow: ... + def nl(__flag: bool = True) -> None: ... + def nocbreak() -> None: ... + def noecho() -> None: ... + def nonl() -> None: ... + def noqiflush() -> None: ... + def noraw() -> None: ... + def pair_content(__pair_number: int) -> tuple[int, int]: ... + def pair_number(__attr: int) -> int: ... + def putp(__string: ReadOnlyBuffer) -> None: ... + def qiflush(__flag: bool = True) -> None: ... + def raw(__flag: bool = True) -> None: ... + def reset_prog_mode() -> None: ... + def reset_shell_mode() -> None: ... + def resetty() -> None: ... + def resize_term(__nlines: int, __ncols: int) -> None: ... + def resizeterm(__nlines: int, __ncols: int) -> None: ... + def savetty() -> None: ... + if sys.version_info >= (3, 9): + def set_escdelay(__ms: int) -> None: ... + def set_tabsize(__size: int) -> None: ... + + def setsyx(__y: int, __x: int) -> None: ... + def setupterm(term: str | None = None, fd: int = -1) -> None: ... + def start_color() -> None: ... + def termattrs() -> int: ... + def termname() -> bytes: ... + def tigetflag(__capname: str) -> int: ... + def tigetnum(__capname: str) -> int: ... + def tigetstr(__capname: str) -> bytes | None: ... + def tparm( + __str: ReadOnlyBuffer, + __i1: int = 0, + __i2: int = 0, + __i3: int = 0, + __i4: int = 0, + __i5: int = 0, + __i6: int = 0, + __i7: int = 0, + __i8: int = 0, + __i9: int = 0, + ) -> bytes: ... + def typeahead(__fd: int) -> None: ... + def unctrl(__ch: _ChType) -> bytes: ... + def unget_wch(__ch: int | str) -> None: ... + def ungetch(__ch: _ChType) -> None: ... + def ungetmouse(__id: int, __x: int, __y: int, __z: int, __bstate: int) -> None: ... + def update_lines_cols() -> None: ... + def use_default_colors() -> None: ... + def use_env(__flag: bool) -> None: ... + + class error(Exception): ... + + @final + class _CursesWindow: + encoding: str + @overload + def addch(self, ch: _ChType, attr: int = ...) -> None: ... + @overload + def addch(self, y: int, x: int, ch: _ChType, attr: int = ...) -> None: ... + @overload + def addnstr(self, str: str, n: int, attr: int = ...) -> None: ... + @overload + def addnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: ... + @overload + def addstr(self, str: str, attr: int = ...) -> None: ... + @overload + def addstr(self, y: int, x: int, str: str, attr: int = ...) -> None: ... + def attroff(self, __attr: int) -> None: ... + def attron(self, __attr: int) -> None: ... + def attrset(self, __attr: int) -> None: ... + def bkgd(self, __ch: _ChType, __attr: int = ...) -> None: ... + def bkgdset(self, __ch: _ChType, __attr: int = ...) -> None: ... + def border( + self, + ls: _ChType = ..., + rs: _ChType = ..., + ts: _ChType = ..., + bs: _ChType = ..., + tl: _ChType = ..., + tr: _ChType = ..., + bl: _ChType = ..., + br: _ChType = ..., + ) -> None: ... + @overload + def box(self) -> None: ... + @overload + def box(self, vertch: _ChType = ..., horch: _ChType = ...) -> None: ... + @overload + def chgat(self, attr: int) -> None: ... + @overload + def chgat(self, num: int, attr: int) -> None: ... + @overload + def chgat(self, y: int, x: int, attr: int) -> None: ... + @overload + def chgat(self, y: int, x: int, num: int, attr: int) -> None: ... + def clear(self) -> None: ... + def clearok(self, yes: int) -> None: ... + def clrtobot(self) -> None: ... + def clrtoeol(self) -> None: ... + def cursyncup(self) -> None: ... + @overload + def delch(self) -> None: ... + @overload + def delch(self, y: int, x: int) -> None: ... + def deleteln(self) -> None: ... + @overload + def derwin(self, begin_y: int, begin_x: int) -> _CursesWindow: ... + @overload + def derwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ... + def echochar(self, __ch: _ChType, __attr: int = ...) -> None: ... + def enclose(self, __y: int, __x: int) -> bool: ... + def erase(self) -> None: ... + def getbegyx(self) -> tuple[int, int]: ... + def getbkgd(self) -> tuple[int, int]: ... + @overload + def getch(self) -> int: ... + @overload + def getch(self, y: int, x: int) -> int: ... + @overload + def get_wch(self) -> int | str: ... + @overload + def get_wch(self, y: int, x: int) -> int | str: ... + @overload + def getkey(self) -> str: ... + @overload + def getkey(self, y: int, x: int) -> str: ... + def getmaxyx(self) -> tuple[int, int]: ... + def getparyx(self) -> tuple[int, int]: ... + @overload + def getstr(self) -> bytes: ... + @overload + def getstr(self, n: int) -> bytes: ... + @overload + def getstr(self, y: int, x: int) -> bytes: ... + @overload + def getstr(self, y: int, x: int, n: int) -> bytes: ... + def getyx(self) -> tuple[int, int]: ... + @overload + def hline(self, ch: _ChType, n: int) -> None: ... + @overload + def hline(self, y: int, x: int, ch: _ChType, n: int) -> None: ... + def idcok(self, flag: bool) -> None: ... + def idlok(self, yes: bool) -> None: ... + def immedok(self, flag: bool) -> None: ... + @overload + def inch(self) -> int: ... + @overload + def inch(self, y: int, x: int) -> int: ... + @overload + def insch(self, ch: _ChType, attr: int = ...) -> None: ... + @overload + def insch(self, y: int, x: int, ch: _ChType, attr: int = ...) -> None: ... + def insdelln(self, nlines: int) -> None: ... + def insertln(self) -> None: ... + @overload + def insnstr(self, str: str, n: int, attr: int = ...) -> None: ... + @overload + def insnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: ... + @overload + def insstr(self, str: str, attr: int = ...) -> None: ... + @overload + def insstr(self, y: int, x: int, str: str, attr: int = ...) -> None: ... + @overload + def instr(self, n: int = ...) -> bytes: ... + @overload + def instr(self, y: int, x: int, n: int = ...) -> bytes: ... + def is_linetouched(self, __line: int) -> bool: ... + def is_wintouched(self) -> bool: ... + def keypad(self, yes: bool) -> None: ... + def leaveok(self, yes: bool) -> None: ... + def move(self, new_y: int, new_x: int) -> None: ... + def mvderwin(self, y: int, x: int) -> None: ... + def mvwin(self, new_y: int, new_x: int) -> None: ... + def nodelay(self, yes: bool) -> None: ... + def notimeout(self, yes: bool) -> None: ... + @overload + def noutrefresh(self) -> None: ... + @overload + def noutrefresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ... + @overload + def overlay(self, destwin: _CursesWindow) -> None: ... + @overload + def overlay( + self, destwin: _CursesWindow, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int + ) -> None: ... + @overload + def overwrite(self, destwin: _CursesWindow) -> None: ... + @overload + def overwrite( + self, destwin: _CursesWindow, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int + ) -> None: ... + def putwin(self, __file: IO[Any]) -> None: ... + def redrawln(self, __beg: int, __num: int) -> None: ... + def redrawwin(self) -> None: ... + @overload + def refresh(self) -> None: ... + @overload + def refresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ... + def resize(self, nlines: int, ncols: int) -> None: ... + def scroll(self, lines: int = ...) -> None: ... + def scrollok(self, flag: bool) -> None: ... + def setscrreg(self, __top: int, __bottom: int) -> None: ... + def standend(self) -> None: ... + def standout(self) -> None: ... + @overload + def subpad(self, begin_y: int, begin_x: int) -> _CursesWindow: ... + @overload + def subpad(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ... + @overload + def subwin(self, begin_y: int, begin_x: int) -> _CursesWindow: ... + @overload + def subwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ... + def syncdown(self) -> None: ... + def syncok(self, flag: bool) -> None: ... + def syncup(self) -> None: ... + def timeout(self, delay: int) -> None: ... + def touchline(self, start: int, count: int, changed: bool = ...) -> None: ... + def touchwin(self) -> None: ... + def untouchwin(self) -> None: ... + @overload + def vline(self, ch: _ChType, n: int) -> None: ... + @overload + def vline(self, y: int, x: int, ch: _ChType, n: int) -> None: ... + if sys.version_info >= (3, 8): + class _ncurses_version(NamedTuple): + major: int + minor: int + patch: int + ncurses_version: _ncurses_version + window = _CursesWindow # undocumented diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_decimal.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_decimal.pyi new file mode 100644 index 00000000..60c60945 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_decimal.pyi @@ -0,0 +1,280 @@ +import numbers +import sys +from collections.abc import Container, Sequence +from types import TracebackType +from typing import Any, ClassVar, NamedTuple, overload +from typing_extensions import Final, Literal, Self, TypeAlias + +_Decimal: TypeAlias = Decimal | int +_DecimalNew: TypeAlias = Decimal | float | str | tuple[int, Sequence[int], int] +_ComparableNum: TypeAlias = Decimal | float | numbers.Rational + +__version__: Final[str] +__libmpdec_version__: Final[str] + +class DecimalTuple(NamedTuple): + sign: int + digits: tuple[int, ...] + exponent: int | Literal["n", "N", "F"] + +ROUND_DOWN: str +ROUND_HALF_UP: str +ROUND_HALF_EVEN: str +ROUND_CEILING: str +ROUND_FLOOR: str +ROUND_UP: str +ROUND_HALF_DOWN: str +ROUND_05UP: str +HAVE_CONTEXTVAR: bool +HAVE_THREADS: bool +MAX_EMAX: int +MAX_PREC: int +MIN_EMIN: int +MIN_ETINY: int + +class DecimalException(ArithmeticError): ... +class Clamped(DecimalException): ... +class InvalidOperation(DecimalException): ... +class ConversionSyntax(InvalidOperation): ... +class DivisionByZero(DecimalException, ZeroDivisionError): ... +class DivisionImpossible(InvalidOperation): ... +class DivisionUndefined(InvalidOperation, ZeroDivisionError): ... +class Inexact(DecimalException): ... +class InvalidContext(InvalidOperation): ... +class Rounded(DecimalException): ... +class Subnormal(DecimalException): ... +class Overflow(Inexact, Rounded): ... +class Underflow(Inexact, Rounded, Subnormal): ... +class FloatOperation(DecimalException, TypeError): ... + +def setcontext(__context: Context) -> None: ... +def getcontext() -> Context: ... + +if sys.version_info >= (3, 11): + def localcontext( + ctx: Context | None = None, + *, + prec: int | None = ..., + rounding: str | None = ..., + Emin: int | None = ..., + Emax: int | None = ..., + capitals: int | None = ..., + clamp: int | None = ..., + traps: dict[_TrapType, bool] | None = ..., + flags: dict[_TrapType, bool] | None = ..., + ) -> _ContextManager: ... + +else: + def localcontext(ctx: Context | None = None) -> _ContextManager: ... + +class Decimal: + def __new__(cls, value: _DecimalNew = ..., context: Context | None = ...) -> Self: ... + @classmethod + def from_float(cls, __f: float) -> Self: ... + def __bool__(self) -> bool: ... + def compare(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def as_tuple(self) -> DecimalTuple: ... + def as_integer_ratio(self) -> tuple[int, int]: ... + def to_eng_string(self, context: Context | None = None) -> str: ... + def __abs__(self) -> Decimal: ... + def __add__(self, __value: _Decimal) -> Decimal: ... + def __divmod__(self, __value: _Decimal) -> tuple[Decimal, Decimal]: ... + def __eq__(self, __value: object) -> bool: ... + def __floordiv__(self, __value: _Decimal) -> Decimal: ... + def __ge__(self, __value: _ComparableNum) -> bool: ... + def __gt__(self, __value: _ComparableNum) -> bool: ... + def __le__(self, __value: _ComparableNum) -> bool: ... + def __lt__(self, __value: _ComparableNum) -> bool: ... + def __mod__(self, __value: _Decimal) -> Decimal: ... + def __mul__(self, __value: _Decimal) -> Decimal: ... + def __neg__(self) -> Decimal: ... + def __pos__(self) -> Decimal: ... + def __pow__(self, __value: _Decimal, __mod: _Decimal | None = None) -> Decimal: ... + def __radd__(self, __value: _Decimal) -> Decimal: ... + def __rdivmod__(self, __value: _Decimal) -> tuple[Decimal, Decimal]: ... + def __rfloordiv__(self, __value: _Decimal) -> Decimal: ... + def __rmod__(self, __value: _Decimal) -> Decimal: ... + def __rmul__(self, __value: _Decimal) -> Decimal: ... + def __rsub__(self, __value: _Decimal) -> Decimal: ... + def __rtruediv__(self, __value: _Decimal) -> Decimal: ... + def __sub__(self, __value: _Decimal) -> Decimal: ... + def __truediv__(self, __value: _Decimal) -> Decimal: ... + def remainder_near(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def __float__(self) -> float: ... + def __int__(self) -> int: ... + def __trunc__(self) -> int: ... + @property + def real(self) -> Decimal: ... + @property + def imag(self) -> Decimal: ... + def conjugate(self) -> Decimal: ... + def __complex__(self) -> complex: ... + @overload + def __round__(self) -> int: ... + @overload + def __round__(self, __ndigits: int) -> Decimal: ... + def __floor__(self) -> int: ... + def __ceil__(self) -> int: ... + def fma(self, other: _Decimal, third: _Decimal, context: Context | None = None) -> Decimal: ... + def __rpow__(self, __value: _Decimal, __mod: Context | None = None) -> Decimal: ... + def normalize(self, context: Context | None = None) -> Decimal: ... + def quantize(self, exp: _Decimal, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def same_quantum(self, other: _Decimal, context: Context | None = None) -> bool: ... + def to_integral_exact(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def to_integral_value(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def to_integral(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def sqrt(self, context: Context | None = None) -> Decimal: ... + def max(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def min(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def adjusted(self) -> int: ... + def canonical(self) -> Decimal: ... + def compare_signal(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def compare_total(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def compare_total_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def copy_abs(self) -> Decimal: ... + def copy_negate(self) -> Decimal: ... + def copy_sign(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def exp(self, context: Context | None = None) -> Decimal: ... + def is_canonical(self) -> bool: ... + def is_finite(self) -> bool: ... + def is_infinite(self) -> bool: ... + def is_nan(self) -> bool: ... + def is_normal(self, context: Context | None = None) -> bool: ... + def is_qnan(self) -> bool: ... + def is_signed(self) -> bool: ... + def is_snan(self) -> bool: ... + def is_subnormal(self, context: Context | None = None) -> bool: ... + def is_zero(self) -> bool: ... + def ln(self, context: Context | None = None) -> Decimal: ... + def log10(self, context: Context | None = None) -> Decimal: ... + def logb(self, context: Context | None = None) -> Decimal: ... + def logical_and(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def logical_invert(self, context: Context | None = None) -> Decimal: ... + def logical_or(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def logical_xor(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def max_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def min_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def next_minus(self, context: Context | None = None) -> Decimal: ... + def next_plus(self, context: Context | None = None) -> Decimal: ... + def next_toward(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def number_class(self, context: Context | None = None) -> str: ... + def radix(self) -> Decimal: ... + def rotate(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def scaleb(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def shift(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def __reduce__(self) -> tuple[type[Self], tuple[str]]: ... + def __copy__(self) -> Self: ... + def __deepcopy__(self, __memo: Any) -> Self: ... + def __format__(self, __specifier: str, __context: Context | None = ...) -> str: ... + +class _ContextManager: + new_context: Context + saved_context: Context + def __init__(self, new_context: Context) -> None: ... + def __enter__(self) -> Context: ... + def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... + +_TrapType: TypeAlias = type[DecimalException] + +class Context: + # TODO: Context doesn't allow you to delete *any* attributes from instances of the class at runtime, + # even settable attributes like `prec` and `rounding`, + # but that's inexpressable in the stub. + # Type checkers either ignore it or misinterpret it + # if you add a `def __delattr__(self, __name: str) -> NoReturn` method to the stub + prec: int + rounding: str + Emin: int + Emax: int + capitals: int + clamp: int + traps: dict[_TrapType, bool] + flags: dict[_TrapType, bool] + def __init__( + self, + prec: int | None = ..., + rounding: str | None = ..., + Emin: int | None = ..., + Emax: int | None = ..., + capitals: int | None = ..., + clamp: int | None = ..., + flags: None | dict[_TrapType, bool] | Container[_TrapType] = ..., + traps: None | dict[_TrapType, bool] | Container[_TrapType] = ..., + _ignored_flags: list[_TrapType] | None = ..., + ) -> None: ... + def __reduce__(self) -> tuple[type[Self], tuple[Any, ...]]: ... + def clear_flags(self) -> None: ... + def clear_traps(self) -> None: ... + def copy(self) -> Context: ... + def __copy__(self) -> Context: ... + # see https://github.com/python/cpython/issues/94107 + __hash__: ClassVar[None] # type: ignore[assignment] + def Etiny(self) -> int: ... + def Etop(self) -> int: ... + def create_decimal(self, __num: _DecimalNew = "0") -> Decimal: ... + def create_decimal_from_float(self, __f: float) -> Decimal: ... + def abs(self, __x: _Decimal) -> Decimal: ... + def add(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def canonical(self, __x: Decimal) -> Decimal: ... + def compare(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def compare_signal(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def compare_total(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def compare_total_mag(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def copy_abs(self, __x: _Decimal) -> Decimal: ... + def copy_decimal(self, __x: _Decimal) -> Decimal: ... + def copy_negate(self, __x: _Decimal) -> Decimal: ... + def copy_sign(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def divide(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def divide_int(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def divmod(self, __x: _Decimal, __y: _Decimal) -> tuple[Decimal, Decimal]: ... + def exp(self, __x: _Decimal) -> Decimal: ... + def fma(self, __x: _Decimal, __y: _Decimal, __z: _Decimal) -> Decimal: ... + def is_canonical(self, __x: _Decimal) -> bool: ... + def is_finite(self, __x: _Decimal) -> bool: ... + def is_infinite(self, __x: _Decimal) -> bool: ... + def is_nan(self, __x: _Decimal) -> bool: ... + def is_normal(self, __x: _Decimal) -> bool: ... + def is_qnan(self, __x: _Decimal) -> bool: ... + def is_signed(self, __x: _Decimal) -> bool: ... + def is_snan(self, __x: _Decimal) -> bool: ... + def is_subnormal(self, __x: _Decimal) -> bool: ... + def is_zero(self, __x: _Decimal) -> bool: ... + def ln(self, __x: _Decimal) -> Decimal: ... + def log10(self, __x: _Decimal) -> Decimal: ... + def logb(self, __x: _Decimal) -> Decimal: ... + def logical_and(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def logical_invert(self, __x: _Decimal) -> Decimal: ... + def logical_or(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def logical_xor(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def max(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def max_mag(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def min(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def min_mag(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def minus(self, __x: _Decimal) -> Decimal: ... + def multiply(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def next_minus(self, __x: _Decimal) -> Decimal: ... + def next_plus(self, __x: _Decimal) -> Decimal: ... + def next_toward(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def normalize(self, __x: _Decimal) -> Decimal: ... + def number_class(self, __x: _Decimal) -> str: ... + def plus(self, __x: _Decimal) -> Decimal: ... + def power(self, a: _Decimal, b: _Decimal, modulo: _Decimal | None = None) -> Decimal: ... + def quantize(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def radix(self) -> Decimal: ... + def remainder(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def remainder_near(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def rotate(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def same_quantum(self, __x: _Decimal, __y: _Decimal) -> bool: ... + def scaleb(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def shift(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def sqrt(self, __x: _Decimal) -> Decimal: ... + def subtract(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def to_eng_string(self, __x: _Decimal) -> str: ... + def to_sci_string(self, __x: _Decimal) -> str: ... + def to_integral_exact(self, __x: _Decimal) -> Decimal: ... + def to_integral_value(self, __x: _Decimal) -> Decimal: ... + def to_integral(self, __x: _Decimal) -> Decimal: ... + +DefaultContext: Context +BasicContext: Context +ExtendedContext: Context diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_dummy_thread.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_dummy_thread.pyi new file mode 100644 index 00000000..e371dd0e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_dummy_thread.pyi @@ -0,0 +1,27 @@ +from collections.abc import Callable +from types import TracebackType +from typing import Any, NoReturn + +__all__ = ["error", "start_new_thread", "exit", "get_ident", "allocate_lock", "interrupt_main", "LockType", "RLock"] + +TIMEOUT_MAX: int +error = RuntimeError + +def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any] = ...) -> None: ... +def exit() -> NoReturn: ... +def get_ident() -> int: ... +def allocate_lock() -> LockType: ... +def stack_size(size: int | None = None) -> int: ... + +class LockType: + locked_status: bool + def acquire(self, waitflag: bool | None = None, timeout: int = -1) -> bool: ... + def __enter__(self, waitflag: bool | None = None, timeout: int = -1) -> bool: ... + def __exit__(self, typ: type[BaseException] | None, val: BaseException | None, tb: TracebackType | None) -> None: ... + def release(self) -> bool: ... + def locked(self) -> bool: ... + +class RLock(LockType): + def release(self) -> None: ... # type: ignore[override] + +def interrupt_main() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_dummy_threading.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_dummy_threading.pyi new file mode 100644 index 00000000..9a49dfa9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_dummy_threading.pyi @@ -0,0 +1,169 @@ +import sys +from _typeshed import ProfileFunction, TraceFunction +from collections.abc import Callable, Iterable, Mapping +from types import TracebackType +from typing import Any, TypeVar + +_T = TypeVar("_T") + +__all__ = [ + "get_ident", + "active_count", + "Condition", + "current_thread", + "enumerate", + "main_thread", + "TIMEOUT_MAX", + "Event", + "Lock", + "RLock", + "Semaphore", + "BoundedSemaphore", + "Thread", + "Barrier", + "BrokenBarrierError", + "Timer", + "ThreadError", + "setprofile", + "settrace", + "local", + "stack_size", +] + +if sys.version_info >= (3, 8): + __all__ += ["ExceptHookArgs", "excepthook"] + +def active_count() -> int: ... +def current_thread() -> Thread: ... +def currentThread() -> Thread: ... +def get_ident() -> int: ... +def enumerate() -> list[Thread]: ... +def main_thread() -> Thread: ... +def settrace(func: TraceFunction) -> None: ... +def setprofile(func: ProfileFunction | None) -> None: ... +def stack_size(size: int | None = None) -> int: ... + +TIMEOUT_MAX: float + +class ThreadError(Exception): ... + +class local: + def __getattribute__(self, name: str) -> Any: ... + def __setattr__(self, name: str, value: Any) -> None: ... + def __delattr__(self, name: str) -> None: ... + +class Thread: + name: str + daemon: bool + @property + def ident(self) -> int | None: ... + def __init__( + self, + group: None = None, + target: Callable[..., object] | None = None, + name: str | None = None, + args: Iterable[Any] = ..., + kwargs: Mapping[str, Any] | None = None, + *, + daemon: bool | None = None, + ) -> None: ... + def start(self) -> None: ... + def run(self) -> None: ... + def join(self, timeout: float | None = None) -> None: ... + def getName(self) -> str: ... + def setName(self, name: str) -> None: ... + if sys.version_info >= (3, 8): + @property + def native_id(self) -> int | None: ... # only available on some platforms + + def is_alive(self) -> bool: ... + if sys.version_info < (3, 9): + def isAlive(self) -> bool: ... + + def isDaemon(self) -> bool: ... + def setDaemon(self, daemonic: bool) -> None: ... + +class _DummyThread(Thread): ... + +class Lock: + def __enter__(self) -> bool: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> bool | None: ... + def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... + def release(self) -> None: ... + def locked(self) -> bool: ... + +class _RLock: + def __enter__(self) -> bool: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> bool | None: ... + def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ... + def release(self) -> None: ... + +RLock = _RLock + +class Condition: + def __init__(self, lock: Lock | _RLock | None = None) -> None: ... + def __enter__(self) -> bool: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> bool | None: ... + def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... + def release(self) -> None: ... + def wait(self, timeout: float | None = None) -> bool: ... + def wait_for(self, predicate: Callable[[], _T], timeout: float | None = None) -> _T: ... + def notify(self, n: int = 1) -> None: ... + def notify_all(self) -> None: ... + def notifyAll(self) -> None: ... + +class Semaphore: + def __init__(self, value: int = 1) -> None: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> bool | None: ... + def acquire(self, blocking: bool = True, timeout: float | None = None) -> bool: ... + def __enter__(self, blocking: bool = True, timeout: float | None = None) -> bool: ... + if sys.version_info >= (3, 9): + def release(self, n: int = ...) -> None: ... + else: + def release(self) -> None: ... + +class BoundedSemaphore(Semaphore): ... + +class Event: + def is_set(self) -> bool: ... + def set(self) -> None: ... + def clear(self) -> None: ... + def wait(self, timeout: float | None = None) -> bool: ... + +if sys.version_info >= (3, 8): + from _thread import _excepthook, _ExceptHookArgs + + excepthook = _excepthook + ExceptHookArgs = _ExceptHookArgs + +class Timer(Thread): + def __init__( + self, + interval: float, + function: Callable[..., object], + args: Iterable[Any] | None = None, + kwargs: Mapping[str, Any] | None = None, + ) -> None: ... + def cancel(self) -> None: ... + +class Barrier: + @property + def parties(self) -> int: ... + @property + def n_waiting(self) -> int: ... + @property + def broken(self) -> bool: ... + def __init__(self, parties: int, action: Callable[[], None] | None = None, timeout: float | None = None) -> None: ... + def wait(self, timeout: float | None = None) -> int: ... + def reset(self) -> None: ... + def abort(self) -> None: ... + +class BrokenBarrierError(RuntimeError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_heapq.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_heapq.pyi new file mode 100644 index 00000000..8d6c3e88 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_heapq.pyi @@ -0,0 +1,12 @@ +from typing import Any, TypeVar +from typing_extensions import Final + +_T = TypeVar("_T") + +__about__: Final[str] + +def heapify(__heap: list[Any]) -> None: ... +def heappop(__heap: list[_T]) -> _T: ... +def heappush(__heap: list[_T], __item: _T) -> None: ... +def heappushpop(__heap: list[_T], __item: _T) -> _T: ... +def heapreplace(__heap: list[_T], __item: _T) -> _T: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_imp.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_imp.pyi new file mode 100644 index 00000000..adab2e80 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_imp.pyi @@ -0,0 +1,28 @@ +import sys +import types +from _typeshed import ReadableBuffer +from importlib.machinery import ModuleSpec +from typing import Any + +check_hash_based_pycs: str + +def source_hash(key: int, source: ReadableBuffer) -> bytes: ... +def create_builtin(__spec: ModuleSpec) -> types.ModuleType: ... +def create_dynamic(__spec: ModuleSpec, __file: Any = None) -> types.ModuleType: ... +def acquire_lock() -> None: ... +def exec_builtin(__mod: types.ModuleType) -> int: ... +def exec_dynamic(__mod: types.ModuleType) -> int: ... +def extension_suffixes() -> list[str]: ... +def init_frozen(__name: str) -> types.ModuleType: ... +def is_builtin(__name: str) -> int: ... +def is_frozen(__name: str) -> bool: ... +def is_frozen_package(__name: str) -> bool: ... +def lock_held() -> bool: ... +def release_lock() -> None: ... + +if sys.version_info >= (3, 11): + def find_frozen(__name: str, *, withdata: bool = False) -> tuple[memoryview | None, bool, str | None] | None: ... + def get_frozen_object(__name: str, __data: ReadableBuffer | None = None) -> types.CodeType: ... + +else: + def get_frozen_object(__name: str) -> types.CodeType: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_json.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_json.pyi new file mode 100644 index 00000000..130f7ab9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_json.pyi @@ -0,0 +1,50 @@ +from collections.abc import Callable +from typing import Any +from typing_extensions import final + +@final +class make_encoder: + @property + def sort_keys(self) -> bool: ... + @property + def skipkeys(self) -> bool: ... + @property + def key_separator(self) -> str: ... + @property + def indent(self) -> int | None: ... + @property + def markers(self) -> dict[int, Any] | None: ... + @property + def default(self) -> Callable[[Any], Any]: ... + @property + def encoder(self) -> Callable[[str], str]: ... + @property + def item_separator(self) -> str: ... + def __init__( + self, + markers: dict[int, Any] | None, + default: Callable[[Any], Any], + encoder: Callable[[str], str], + indent: int | None, + key_separator: str, + item_separator: str, + sort_keys: bool, + skipkeys: bool, + allow_nan: bool, + ) -> None: ... + def __call__(self, obj: object, _current_indent_level: int) -> Any: ... + +@final +class make_scanner: + object_hook: Any + object_pairs_hook: Any + parse_int: Any + parse_constant: Any + parse_float: Any + strict: bool + # TODO: 'context' needs the attrs above (ducktype), but not __call__. + def __init__(self, context: make_scanner) -> None: ... + def __call__(self, string: str, index: int) -> tuple[Any, int]: ... + +def encode_basestring_ascii(s: str) -> str: ... +def scanstring(string: str, end: int, strict: bool = ...) -> tuple[str, int]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_markupbase.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_markupbase.pyi new file mode 100644 index 00000000..62bad25e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_markupbase.pyi @@ -0,0 +1,16 @@ +import sys +from typing import Any + +class ParserBase: + def reset(self) -> None: ... + def getpos(self) -> tuple[int, int]: ... + def unknown_decl(self, data: str) -> None: ... + def parse_comment(self, i: int, report: int = 1) -> int: ... # undocumented + def parse_declaration(self, i: int) -> int: ... # undocumented + def parse_marked_section(self, i: int, report: int = 1) -> int: ... # undocumented + def updatepos(self, i: int, j: int) -> int: ... # undocumented + if sys.version_info < (3, 10): + # Removed from ParserBase: https://bugs.python.org/issue31844 + def error(self, message: str) -> Any: ... # undocumented + lineno: int # undocumented + offset: int # undocumented diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_msi.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_msi.pyi new file mode 100644 index 00000000..2fdbdfd0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_msi.pyi @@ -0,0 +1,90 @@ +import sys + +if sys.platform == "win32": + # Actual typename View, not exposed by the implementation + class _View: + def Execute(self, params: _Record | None = ...) -> None: ... + def GetColumnInfo(self, kind: int) -> _Record: ... + def Fetch(self) -> _Record: ... + def Modify(self, mode: int, record: _Record) -> None: ... + def Close(self) -> None: ... + # Don't exist at runtime + __new__: None # type: ignore[assignment] + __init__: None # type: ignore[assignment] + + # Actual typename SummaryInformation, not exposed by the implementation + class _SummaryInformation: + def GetProperty(self, field: int) -> int | bytes | None: ... + def GetPropertyCount(self) -> int: ... + def SetProperty(self, field: int, value: int | str) -> None: ... + def Persist(self) -> None: ... + # Don't exist at runtime + __new__: None # type: ignore[assignment] + __init__: None # type: ignore[assignment] + + # Actual typename Database, not exposed by the implementation + class _Database: + def OpenView(self, sql: str) -> _View: ... + def Commit(self) -> None: ... + def GetSummaryInformation(self, updateCount: int) -> _SummaryInformation: ... + def Close(self) -> None: ... + # Don't exist at runtime + __new__: None # type: ignore[assignment] + __init__: None # type: ignore[assignment] + + # Actual typename Record, not exposed by the implementation + class _Record: + def GetFieldCount(self) -> int: ... + def GetInteger(self, field: int) -> int: ... + def GetString(self, field: int) -> str: ... + def SetString(self, field: int, str: str) -> None: ... + def SetStream(self, field: int, stream: str) -> None: ... + def SetInteger(self, field: int, int: int) -> None: ... + def ClearData(self) -> None: ... + # Don't exist at runtime + __new__: None # type: ignore[assignment] + __init__: None # type: ignore[assignment] + def UuidCreate() -> str: ... + def FCICreate(__cabname: str, __files: list[str]) -> None: ... + def OpenDatabase(__path: str, __persist: int) -> _Database: ... + def CreateRecord(__count: int) -> _Record: ... + + MSICOLINFO_NAMES: int + MSICOLINFO_TYPES: int + MSIDBOPEN_CREATE: int + MSIDBOPEN_CREATEDIRECT: int + MSIDBOPEN_DIRECT: int + MSIDBOPEN_PATCHFILE: int + MSIDBOPEN_READONLY: int + MSIDBOPEN_TRANSACT: int + MSIMODIFY_ASSIGN: int + MSIMODIFY_DELETE: int + MSIMODIFY_INSERT: int + MSIMODIFY_INSERT_TEMPORARY: int + MSIMODIFY_MERGE: int + MSIMODIFY_REFRESH: int + MSIMODIFY_REPLACE: int + MSIMODIFY_SEEK: int + MSIMODIFY_UPDATE: int + MSIMODIFY_VALIDATE: int + MSIMODIFY_VALIDATE_DELETE: int + MSIMODIFY_VALIDATE_FIELD: int + MSIMODIFY_VALIDATE_NEW: int + + PID_APPNAME: int + PID_AUTHOR: int + PID_CHARCOUNT: int + PID_CODEPAGE: int + PID_COMMENTS: int + PID_CREATE_DTM: int + PID_KEYWORDS: int + PID_LASTAUTHOR: int + PID_LASTPRINTED: int + PID_LASTSAVE_DTM: int + PID_PAGECOUNT: int + PID_REVNUMBER: int + PID_SECURITY: int + PID_SUBJECT: int + PID_TEMPLATE: int + PID_TITLE: int + PID_WORDCOUNT: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_operator.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_operator.pyi new file mode 100644 index 00000000..e7d1a98c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_operator.pyi @@ -0,0 +1,156 @@ +import sys +from _typeshed import SupportsGetItem +from collections.abc import Callable, Container, Iterable, MutableMapping, MutableSequence, Sequence +from typing import Any, AnyStr, Generic, Protocol, SupportsAbs, TypeVar, overload +from typing_extensions import ParamSpec, SupportsIndex, TypeAlias, final + +_R = TypeVar("_R") +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) +_K = TypeVar("_K") +_V = TypeVar("_V") +_P = ParamSpec("_P") + +# The following protocols return "Any" instead of bool, since the comparison +# operators can be overloaded to return an arbitrary object. For example, +# the numpy.array comparison dunders return another numpy.array. + +class _SupportsDunderLT(Protocol): + def __lt__(self, __other: Any) -> Any: ... + +class _SupportsDunderGT(Protocol): + def __gt__(self, __other: Any) -> Any: ... + +class _SupportsDunderLE(Protocol): + def __le__(self, __other: Any) -> Any: ... + +class _SupportsDunderGE(Protocol): + def __ge__(self, __other: Any) -> Any: ... + +_SupportsComparison: TypeAlias = _SupportsDunderLE | _SupportsDunderGE | _SupportsDunderGT | _SupportsDunderLT + +class _SupportsInversion(Protocol[_T_co]): + def __invert__(self) -> _T_co: ... + +class _SupportsNeg(Protocol[_T_co]): + def __neg__(self) -> _T_co: ... + +class _SupportsPos(Protocol[_T_co]): + def __pos__(self) -> _T_co: ... + +# All four comparison functions must have the same signature, or we get false-positive errors +def lt(__a: _SupportsComparison, __b: _SupportsComparison) -> Any: ... +def le(__a: _SupportsComparison, __b: _SupportsComparison) -> Any: ... +def eq(__a: object, __b: object) -> Any: ... +def ne(__a: object, __b: object) -> Any: ... +def ge(__a: _SupportsComparison, __b: _SupportsComparison) -> Any: ... +def gt(__a: _SupportsComparison, __b: _SupportsComparison) -> Any: ... +def not_(__a: object) -> bool: ... +def truth(__a: object) -> bool: ... +def is_(__a: object, __b: object) -> bool: ... +def is_not(__a: object, __b: object) -> bool: ... +def abs(__a: SupportsAbs[_T]) -> _T: ... +def add(__a: Any, __b: Any) -> Any: ... +def and_(__a: Any, __b: Any) -> Any: ... +def floordiv(__a: Any, __b: Any) -> Any: ... +def index(__a: SupportsIndex) -> int: ... +def inv(__a: _SupportsInversion[_T_co]) -> _T_co: ... +def invert(__a: _SupportsInversion[_T_co]) -> _T_co: ... +def lshift(__a: Any, __b: Any) -> Any: ... +def mod(__a: Any, __b: Any) -> Any: ... +def mul(__a: Any, __b: Any) -> Any: ... +def matmul(__a: Any, __b: Any) -> Any: ... +def neg(__a: _SupportsNeg[_T_co]) -> _T_co: ... +def or_(__a: Any, __b: Any) -> Any: ... +def pos(__a: _SupportsPos[_T_co]) -> _T_co: ... +def pow(__a: Any, __b: Any) -> Any: ... +def rshift(__a: Any, __b: Any) -> Any: ... +def sub(__a: Any, __b: Any) -> Any: ... +def truediv(__a: Any, __b: Any) -> Any: ... +def xor(__a: Any, __b: Any) -> Any: ... +def concat(__a: Sequence[_T], __b: Sequence[_T]) -> Sequence[_T]: ... +def contains(__a: Container[object], __b: object) -> bool: ... +def countOf(__a: Iterable[object], __b: object) -> int: ... +@overload +def delitem(__a: MutableSequence[Any], __b: SupportsIndex) -> None: ... +@overload +def delitem(__a: MutableSequence[Any], __b: slice) -> None: ... +@overload +def delitem(__a: MutableMapping[_K, Any], __b: _K) -> None: ... +@overload +def getitem(__a: Sequence[_T], __b: slice) -> Sequence[_T]: ... +@overload +def getitem(__a: SupportsGetItem[_K, _V], __b: _K) -> _V: ... +def indexOf(__a: Iterable[_T], __b: _T) -> int: ... +@overload +def setitem(__a: MutableSequence[_T], __b: SupportsIndex, __c: _T) -> None: ... +@overload +def setitem(__a: MutableSequence[_T], __b: slice, __c: Sequence[_T]) -> None: ... +@overload +def setitem(__a: MutableMapping[_K, _V], __b: _K, __c: _V) -> None: ... +def length_hint(__obj: object, __default: int = 0) -> int: ... +@final +class attrgetter(Generic[_T_co]): + @overload + def __new__(cls, attr: str) -> attrgetter[Any]: ... + @overload + def __new__(cls, attr: str, __attr2: str) -> attrgetter[tuple[Any, Any]]: ... + @overload + def __new__(cls, attr: str, __attr2: str, __attr3: str) -> attrgetter[tuple[Any, Any, Any]]: ... + @overload + def __new__(cls, attr: str, __attr2: str, __attr3: str, __attr4: str) -> attrgetter[tuple[Any, Any, Any, Any]]: ... + @overload + def __new__(cls, attr: str, *attrs: str) -> attrgetter[tuple[Any, ...]]: ... + def __call__(self, obj: Any) -> _T_co: ... + +@final +class itemgetter(Generic[_T_co]): + # mypy lacks support for PEP 646 https://github.com/python/mypy/issues/12280 + # So we have to define all of these overloads to simulate unpacking the arguments + @overload + def __new__(cls, item: _T_co) -> itemgetter[_T_co]: ... + @overload + def __new__(cls, item: _T_co, __item2: _T_co) -> itemgetter[tuple[_T_co, _T_co]]: ... + @overload + def __new__(cls, item: _T_co, __item2: _T_co, __item3: _T_co) -> itemgetter[tuple[_T_co, _T_co, _T_co]]: ... + @overload + def __new__( + cls, item: _T_co, __item2: _T_co, __item3: _T_co, __item4: _T_co + ) -> itemgetter[tuple[_T_co, _T_co, _T_co, _T_co]]: ... + @overload + def __new__( + cls, item: _T_co, __item2: _T_co, __item3: _T_co, __item4: _T_co, *items: _T_co + ) -> itemgetter[tuple[_T_co, ...]]: ... + # __key: _KT_contra in SupportsGetItem seems to be causing variance issues, ie: + # TypeVar "_KT_contra@SupportsGetItem" is contravariant + # "tuple[int, int]" is incompatible with protocol "SupportsIndex" + # preventing [_T_co, ...] instead of [Any, ...] + # + # A suspected mypy issue prevents using [..., _T] instead of [..., Any] here. + # https://github.com/python/mypy/issues/14032 + def __call__(self, obj: SupportsGetItem[Any, Any]) -> Any: ... + +@final +class methodcaller: + def __init__(self, __name: str, *args: Any, **kwargs: Any) -> None: ... + def __call__(self, obj: Any) -> Any: ... + +def iadd(__a: Any, __b: Any) -> Any: ... +def iand(__a: Any, __b: Any) -> Any: ... +def iconcat(__a: Any, __b: Any) -> Any: ... +def ifloordiv(__a: Any, __b: Any) -> Any: ... +def ilshift(__a: Any, __b: Any) -> Any: ... +def imod(__a: Any, __b: Any) -> Any: ... +def imul(__a: Any, __b: Any) -> Any: ... +def imatmul(__a: Any, __b: Any) -> Any: ... +def ior(__a: Any, __b: Any) -> Any: ... +def ipow(__a: Any, __b: Any) -> Any: ... +def irshift(__a: Any, __b: Any) -> Any: ... +def isub(__a: Any, __b: Any) -> Any: ... +def itruediv(__a: Any, __b: Any) -> Any: ... +def ixor(__a: Any, __b: Any) -> Any: ... + +if sys.version_info >= (3, 11): + def call(__obj: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs) -> _R: ... + +def _compare_digest(__a: AnyStr, __b: AnyStr) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_osx_support.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_osx_support.pyi new file mode 100644 index 00000000..3eb6f4dd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_osx_support.pyi @@ -0,0 +1,41 @@ +import sys +from collections.abc import Iterable, Sequence +from typing import TypeVar + +_T = TypeVar("_T") +_K = TypeVar("_K") +_V = TypeVar("_V") + +__all__ = ["compiler_fixup", "customize_config_vars", "customize_compiler", "get_platform_osx"] + +_UNIVERSAL_CONFIG_VARS: tuple[str, ...] # undocumented +_COMPILER_CONFIG_VARS: tuple[str, ...] # undocumented +_INITPRE: str # undocumented + +def _find_executable(executable: str, path: str | None = None) -> str | None: ... # undocumented + +if sys.version_info >= (3, 8): + def _read_output(commandstring: str, capture_stderr: bool = False) -> str | None: ... # undocumented + +else: + def _read_output(commandstring: str) -> str | None: ... # undocumented + +def _find_build_tool(toolname: str) -> str: ... # undocumented + +_SYSTEM_VERSION: str | None # undocumented + +def _get_system_version() -> str: ... # undocumented +def _remove_original_values(_config_vars: dict[str, str]) -> None: ... # undocumented +def _save_modified_value(_config_vars: dict[str, str], cv: str, newvalue: str) -> None: ... # undocumented +def _supports_universal_builds() -> bool: ... # undocumented +def _find_appropriate_compiler(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented +def _remove_universal_flags(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented +def _remove_unsupported_archs(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented +def _override_all_archs(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented +def _check_for_unavailable_sdk(_config_vars: dict[str, str]) -> dict[str, str]: ... # undocumented +def compiler_fixup(compiler_so: Iterable[str], cc_args: Sequence[str]) -> list[str]: ... +def customize_config_vars(_config_vars: dict[str, str]) -> dict[str, str]: ... +def customize_compiler(_config_vars: dict[str, str]) -> dict[str, str]: ... +def get_platform_osx( + _config_vars: dict[str, str], osname: _T, release: _K, machine: _V +) -> tuple[str | _T, str | _K, str | _V]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_posixsubprocess.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_posixsubprocess.pyi new file mode 100644 index 00000000..ca95336b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_posixsubprocess.pyi @@ -0,0 +1,32 @@ +import sys +from _typeshed import StrOrBytesPath +from collections.abc import Callable, Sequence +from typing_extensions import SupportsIndex + +if sys.platform != "win32": + def cloexec_pipe() -> tuple[int, int]: ... + def fork_exec( + __process_args: Sequence[StrOrBytesPath] | None, + __executable_list: Sequence[bytes], + __close_fds: bool, + __fds_to_keep: tuple[int, ...], + __cwd_obj: str, + __env_list: Sequence[bytes] | None, + __p2cread: int, + __p2cwrite: int, + __c2pred: int, + __c2pwrite: int, + __errread: int, + __errwrite: int, + __errpipe_read: int, + __errpipe_write: int, + __restore_signals: int, + __call_setsid: int, + __pgid_to_set: int, + __gid_object: SupportsIndex | None, + __groups_list: list[int] | None, + __uid_object: SupportsIndex | None, + __child_umask: int, + __preexec_fn: Callable[[], None], + __allow_vfork: bool, + ) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_py_abc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_py_abc.pyi new file mode 100644 index 00000000..cc45c6ad --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_py_abc.pyi @@ -0,0 +1,14 @@ +import _typeshed +from typing import Any, NewType, TypeVar + +_T = TypeVar("_T") + +_CacheToken = NewType("_CacheToken", int) + +def get_cache_token() -> _CacheToken: ... + +class ABCMeta(type): + def __new__( + __mcls: type[_typeshed.Self], __name: str, __bases: tuple[type[Any], ...], __namespace: dict[str, Any] + ) -> _typeshed.Self: ... + def register(cls, subclass: type[_T]) -> type[_T]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_pydecimal.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_pydecimal.pyi new file mode 100644 index 00000000..faff626a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_pydecimal.pyi @@ -0,0 +1,43 @@ +# This is a slight lie, the implementations aren't exactly identical +# However, in all likelihood, the differences are inconsequential +from _decimal import * + +__all__ = [ + "Decimal", + "Context", + "DecimalTuple", + "DefaultContext", + "BasicContext", + "ExtendedContext", + "DecimalException", + "Clamped", + "InvalidOperation", + "DivisionByZero", + "Inexact", + "Rounded", + "Subnormal", + "Overflow", + "Underflow", + "FloatOperation", + "DivisionImpossible", + "InvalidContext", + "ConversionSyntax", + "DivisionUndefined", + "ROUND_DOWN", + "ROUND_HALF_UP", + "ROUND_HALF_EVEN", + "ROUND_CEILING", + "ROUND_FLOOR", + "ROUND_UP", + "ROUND_HALF_DOWN", + "ROUND_05UP", + "setcontext", + "getcontext", + "localcontext", + "MAX_PREC", + "MAX_EMAX", + "MIN_EMIN", + "MIN_ETINY", + "HAVE_THREADS", + "HAVE_CONTEXTVAR", +] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_random.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_random.pyi new file mode 100644 index 00000000..7c5803ed --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_random.pyi @@ -0,0 +1,12 @@ +from typing_extensions import TypeAlias + +# Actually Tuple[(int,) * 625] +_State: TypeAlias = tuple[int, ...] + +class Random: + def __init__(self, seed: object = ...) -> None: ... + def seed(self, __n: object = None) -> None: ... + def getstate(self) -> _State: ... + def setstate(self, __state: _State) -> None: ... + def random(self) -> float: ... + def getrandbits(self, __k: int) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_sitebuiltins.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_sitebuiltins.pyi new file mode 100644 index 00000000..3bda2d88 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_sitebuiltins.pyi @@ -0,0 +1,17 @@ +from collections.abc import Iterable +from typing import ClassVar, NoReturn +from typing_extensions import Literal + +class Quitter: + name: str + eof: str + def __init__(self, name: str, eof: str) -> None: ... + def __call__(self, code: int | None = None) -> NoReturn: ... + +class _Printer: + MAXLINES: ClassVar[Literal[23]] + def __init__(self, name: str, data: str, files: Iterable[str] = ..., dirs: Iterable[str] = ...) -> None: ... + def __call__(self) -> None: ... + +class _Helper: + def __call__(self, request: object) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_socket.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_socket.pyi new file mode 100644 index 00000000..f7b0e690 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_socket.pyi @@ -0,0 +1,694 @@ +import sys +from _typeshed import ReadableBuffer, WriteableBuffer +from collections.abc import Iterable +from typing import Any, SupportsInt, overload +from typing_extensions import TypeAlias + +if sys.version_info >= (3, 8): + from typing import SupportsIndex + + _FD: TypeAlias = SupportsIndex +else: + _FD: TypeAlias = SupportsInt + +_CMSG: TypeAlias = tuple[int, int, bytes] +_CMSGArg: TypeAlias = tuple[int, int, ReadableBuffer] + +# Addresses can be either tuples of varying lengths (AF_INET, AF_INET6, +# AF_NETLINK, AF_TIPC) or strings/buffers (AF_UNIX). +# See getsockaddrarg() in socketmodule.c. +_Address: TypeAlias = tuple[Any, ...] | str | ReadableBuffer +_RetAddress: TypeAlias = Any + +# ----- Constants ----- +# Some socket families are listed in the "Socket families" section of the docs, +# but not the "Constants" section. These are listed at the end of the list of +# constants. +# +# Besides those and the first few constants listed, the constants are listed in +# documentation order. + +has_ipv6: bool + +AF_INET: int +AF_INET6: int + +SOCK_STREAM: int +SOCK_DGRAM: int +SOCK_RAW: int +SOCK_RDM: int +SOCK_SEQPACKET: int + +if sys.platform == "linux": + SOCK_CLOEXEC: int + SOCK_NONBLOCK: int + +# Address families not mentioned in the docs +AF_APPLETALK: int +AF_DECnet: int +AF_IPX: int +AF_SNA: int +AF_UNSPEC: int + +if sys.platform != "win32": + AF_ROUTE: int + AF_SYSTEM: int + AF_UNIX: int + +if sys.platform != "darwin": + AF_IRDA: int + +if sys.platform != "darwin" and sys.platform != "win32": + AF_AAL5: int + AF_ASH: int + AF_ATMPVC: int + AF_ATMSVC: int + AF_AX25: int + AF_BRIDGE: int + AF_ECONET: int + AF_KEY: int + AF_LLC: int + AF_NETBEUI: int + AF_NETROM: int + AF_PPPOX: int + AF_ROSE: int + AF_SECURITY: int + AF_WANPIPE: int + AF_X25: int + +# The "many constants" referenced by the docs +SOMAXCONN: int +AI_ADDRCONFIG: int +AI_ALL: int +AI_CANONNAME: int +AI_NUMERICHOST: int +AI_NUMERICSERV: int +AI_PASSIVE: int +AI_V4MAPPED: int +EAI_AGAIN: int +EAI_BADFLAGS: int +EAI_FAIL: int +EAI_FAMILY: int +EAI_MEMORY: int +EAI_NODATA: int +EAI_NONAME: int +EAI_SERVICE: int +EAI_SOCKTYPE: int +INADDR_ALLHOSTS_GROUP: int +INADDR_ANY: int +INADDR_BROADCAST: int +INADDR_LOOPBACK: int +INADDR_MAX_LOCAL_GROUP: int +INADDR_NONE: int +INADDR_UNSPEC_GROUP: int +IPPORT_RESERVED: int +IPPORT_USERRESERVED: int + +if sys.platform != "win32" or sys.version_info >= (3, 8): + IPPROTO_AH: int + IPPROTO_DSTOPTS: int + IPPROTO_EGP: int + IPPROTO_ESP: int + IPPROTO_FRAGMENT: int + IPPROTO_GGP: int + IPPROTO_HOPOPTS: int + IPPROTO_ICMPV6: int + IPPROTO_IDP: int + IPPROTO_IGMP: int + IPPROTO_IPV4: int + IPPROTO_IPV6: int + IPPROTO_MAX: int + IPPROTO_ND: int + IPPROTO_NONE: int + IPPROTO_PIM: int + IPPROTO_PUP: int + IPPROTO_ROUTING: int + IPPROTO_SCTP: int + + if sys.platform != "darwin": + IPPROTO_CBT: int + IPPROTO_ICLFXBM: int + IPPROTO_IGP: int + IPPROTO_L2TP: int + IPPROTO_PGM: int + IPPROTO_RDP: int + IPPROTO_ST: int + +IPPROTO_ICMP: int +IPPROTO_IP: int +IPPROTO_RAW: int +IPPROTO_TCP: int +IPPROTO_UDP: int +IPV6_CHECKSUM: int +IPV6_JOIN_GROUP: int +IPV6_LEAVE_GROUP: int +IPV6_MULTICAST_HOPS: int +IPV6_MULTICAST_IF: int +IPV6_MULTICAST_LOOP: int +IPV6_RECVTCLASS: int +IPV6_TCLASS: int +IPV6_UNICAST_HOPS: int +IPV6_V6ONLY: int + +if sys.platform != "darwin" or sys.version_info >= (3, 9): + IPV6_DONTFRAG: int + IPV6_HOPLIMIT: int + IPV6_HOPOPTS: int + IPV6_PKTINFO: int + IPV6_RECVRTHDR: int + IPV6_RTHDR: int + +IP_ADD_MEMBERSHIP: int +IP_DROP_MEMBERSHIP: int +IP_HDRINCL: int +IP_MULTICAST_IF: int +IP_MULTICAST_LOOP: int +IP_MULTICAST_TTL: int +IP_OPTIONS: int +IP_RECVDSTADDR: int +if sys.version_info >= (3, 10): + IP_RECVTOS: int +elif sys.platform != "win32" and sys.platform != "darwin": + IP_RECVTOS: int +IP_TOS: int +IP_TTL: int +MSG_CTRUNC: int +MSG_DONTROUTE: int + +if sys.platform != "darwin": + MSG_ERRQUEUE: int + +MSG_OOB: int +MSG_PEEK: int +MSG_TRUNC: int +MSG_WAITALL: int +NI_DGRAM: int +NI_MAXHOST: int +NI_MAXSERV: int +NI_NAMEREQD: int +NI_NOFQDN: int +NI_NUMERICHOST: int +NI_NUMERICSERV: int +SHUT_RD: int +SHUT_RDWR: int +SHUT_WR: int +SOL_IP: int +SOL_SOCKET: int +SOL_TCP: int +SOL_UDP: int +SO_ACCEPTCONN: int +SO_BROADCAST: int +SO_DEBUG: int +SO_DONTROUTE: int +SO_ERROR: int +SO_KEEPALIVE: int +SO_LINGER: int +SO_OOBINLINE: int +SO_RCVBUF: int +SO_RCVLOWAT: int +SO_RCVTIMEO: int +SO_REUSEADDR: int +SO_SNDBUF: int +SO_SNDLOWAT: int +SO_SNDTIMEO: int +SO_TYPE: int +SO_USELOOPBACK: int +if sys.platform == "linux" and sys.version_info >= (3, 11): + SO_INCOMING_CPU: int +TCP_FASTOPEN: int +TCP_KEEPCNT: int +TCP_KEEPINTVL: int + +if sys.platform != "darwin": + TCP_KEEPIDLE: int + +TCP_MAXSEG: int +TCP_NODELAY: int +if sys.platform != "win32": + TCP_NOTSENT_LOWAT: int +if sys.version_info >= (3, 10) and sys.platform == "darwin": + TCP_KEEPALIVE: int +if sys.version_info >= (3, 11) and sys.platform == "darwin": + TCP_CONNECTION_INFO: int + +if sys.platform != "darwin": + MSG_BCAST: int + MSG_MCAST: int + SO_EXCLUSIVEADDRUSE: int + +if sys.platform != "win32": + AI_DEFAULT: int + AI_MASK: int + AI_V4MAPPED_CFG: int + EAI_ADDRFAMILY: int + EAI_BADHINTS: int + EAI_MAX: int + EAI_OVERFLOW: int + EAI_PROTOCOL: int + EAI_SYSTEM: int + IPPROTO_EON: int + IPPROTO_GRE: int + IPPROTO_HELLO: int + IPPROTO_IPCOMP: int + IPPROTO_IPIP: int + IPPROTO_RSVP: int + IPPROTO_TP: int + IPPROTO_XTP: int + IPV6_RTHDR_TYPE_0: int + IP_DEFAULT_MULTICAST_LOOP: int + IP_DEFAULT_MULTICAST_TTL: int + IP_MAX_MEMBERSHIPS: int + IP_RECVOPTS: int + IP_RECVRETOPTS: int + IP_RETOPTS: int + LOCAL_PEERCRED: int + MSG_DONTWAIT: int + MSG_EOF: int + MSG_EOR: int + MSG_NOSIGNAL: int # Sometimes this exists on darwin, sometimes not + SCM_CREDS: int + SCM_RIGHTS: int + SO_REUSEPORT: int + +if sys.platform != "win32": + if sys.platform != "darwin" or sys.version_info >= (3, 9): + IPV6_DSTOPTS: int + IPV6_NEXTHOP: int + IPV6_PATHMTU: int + IPV6_RECVDSTOPTS: int + IPV6_RECVHOPLIMIT: int + IPV6_RECVHOPOPTS: int + IPV6_RECVPATHMTU: int + IPV6_RECVPKTINFO: int + IPV6_RTHDRDSTOPTS: int + IPV6_USE_MIN_MTU: int + +if sys.platform != "win32" and sys.platform != "darwin": + IPPROTO_BIP: int + IPPROTO_MOBILE: int + IPPROTO_VRRP: int + IPX_TYPE: int + IP_TRANSPARENT: int + MSG_BTAG: int + MSG_CMSG_CLOEXEC: int + MSG_CONFIRM: int + MSG_ETAG: int + MSG_FASTOPEN: int + MSG_MORE: int + MSG_NOTIFICATION: int + SCM_CREDENTIALS: int + SOL_ATALK: int + SOL_AX25: int + SOL_HCI: int + SOL_IPX: int + SOL_NETROM: int + SOL_ROSE: int + SO_BINDTODEVICE: int + SO_MARK: int + SO_PASSCRED: int + SO_PEERCRED: int + SO_PRIORITY: int + SO_SETFIB: int + TCP_CORK: int + TCP_DEFER_ACCEPT: int + TCP_INFO: int + TCP_LINGER2: int + TCP_QUICKACK: int + TCP_SYNCNT: int + TCP_WINDOW_CLAMP: int + +# Specifically-documented constants + +if sys.platform == "linux": + AF_CAN: int + PF_CAN: int + SOL_CAN_BASE: int + SOL_CAN_RAW: int + CAN_EFF_FLAG: int + CAN_EFF_MASK: int + CAN_ERR_FLAG: int + CAN_ERR_MASK: int + CAN_RAW: int + CAN_RAW_ERR_FILTER: int + CAN_RAW_FILTER: int + CAN_RAW_LOOPBACK: int + CAN_RAW_RECV_OWN_MSGS: int + CAN_RTR_FLAG: int + CAN_SFF_MASK: int + + CAN_BCM: int + CAN_BCM_TX_SETUP: int + CAN_BCM_TX_DELETE: int + CAN_BCM_TX_READ: int + CAN_BCM_TX_SEND: int + CAN_BCM_RX_SETUP: int + CAN_BCM_RX_DELETE: int + CAN_BCM_RX_READ: int + CAN_BCM_TX_STATUS: int + CAN_BCM_TX_EXPIRED: int + CAN_BCM_RX_STATUS: int + CAN_BCM_RX_TIMEOUT: int + CAN_BCM_RX_CHANGED: int + + CAN_RAW_FD_FRAMES: int + +if sys.platform == "linux" and sys.version_info >= (3, 8): + CAN_BCM_SETTIMER: int + CAN_BCM_STARTTIMER: int + CAN_BCM_TX_COUNTEVT: int + CAN_BCM_TX_ANNOUNCE: int + CAN_BCM_TX_CP_CAN_ID: int + CAN_BCM_RX_FILTER_ID: int + CAN_BCM_RX_CHECK_DLC: int + CAN_BCM_RX_NO_AUTOTIMER: int + CAN_BCM_RX_ANNOUNCE_RESUME: int + CAN_BCM_TX_RESET_MULTI_IDX: int + CAN_BCM_RX_RTR_FRAME: int + CAN_BCM_CAN_FD_FRAME: int + +if sys.platform == "linux": + CAN_ISOTP: int + +if sys.platform == "linux" and sys.version_info >= (3, 9): + CAN_J1939: int + CAN_RAW_JOIN_FILTERS: int + + J1939_MAX_UNICAST_ADDR: int + J1939_IDLE_ADDR: int + J1939_NO_ADDR: int + J1939_NO_NAME: int + J1939_PGN_REQUEST: int + J1939_PGN_ADDRESS_CLAIMED: int + J1939_PGN_ADDRESS_COMMANDED: int + J1939_PGN_PDU1_MAX: int + J1939_PGN_MAX: int + J1939_NO_PGN: int + + SO_J1939_FILTER: int + SO_J1939_PROMISC: int + SO_J1939_SEND_PRIO: int + SO_J1939_ERRQUEUE: int + + SCM_J1939_DEST_ADDR: int + SCM_J1939_DEST_NAME: int + SCM_J1939_PRIO: int + SCM_J1939_ERRQUEUE: int + + J1939_NLA_PAD: int + J1939_NLA_BYTES_ACKED: int + + J1939_EE_INFO_NONE: int + J1939_EE_INFO_TX_ABORT: int + + J1939_FILTER_MAX: int + +if sys.platform == "linux" and sys.version_info >= (3, 10): + IPPROTO_MPTCP: int + +if sys.platform == "linux": + AF_PACKET: int + PF_PACKET: int + PACKET_BROADCAST: int + PACKET_FASTROUTE: int + PACKET_HOST: int + PACKET_LOOPBACK: int + PACKET_MULTICAST: int + PACKET_OTHERHOST: int + PACKET_OUTGOING: int + +if sys.platform == "linux": + AF_RDS: int + PF_RDS: int + SOL_RDS: int + RDS_CANCEL_SENT_TO: int + RDS_CMSG_RDMA_ARGS: int + RDS_CMSG_RDMA_DEST: int + RDS_CMSG_RDMA_MAP: int + RDS_CMSG_RDMA_STATUS: int + RDS_CMSG_RDMA_UPDATE: int + RDS_CONG_MONITOR: int + RDS_FREE_MR: int + RDS_GET_MR: int + RDS_GET_MR_FOR_DEST: int + RDS_RDMA_DONTWAIT: int + RDS_RDMA_FENCE: int + RDS_RDMA_INVALIDATE: int + RDS_RDMA_NOTIFY_ME: int + RDS_RDMA_READWRITE: int + RDS_RDMA_SILENT: int + RDS_RDMA_USE_ONCE: int + RDS_RECVERR: int + +if sys.platform == "win32": + SIO_RCVALL: int + SIO_KEEPALIVE_VALS: int + SIO_LOOPBACK_FAST_PATH: int + RCVALL_MAX: int + RCVALL_OFF: int + RCVALL_ON: int + RCVALL_SOCKETLEVELONLY: int + +if sys.platform == "linux": + AF_TIPC: int + SOL_TIPC: int + TIPC_ADDR_ID: int + TIPC_ADDR_NAME: int + TIPC_ADDR_NAMESEQ: int + TIPC_CFG_SRV: int + TIPC_CLUSTER_SCOPE: int + TIPC_CONN_TIMEOUT: int + TIPC_CRITICAL_IMPORTANCE: int + TIPC_DEST_DROPPABLE: int + TIPC_HIGH_IMPORTANCE: int + TIPC_IMPORTANCE: int + TIPC_LOW_IMPORTANCE: int + TIPC_MEDIUM_IMPORTANCE: int + TIPC_NODE_SCOPE: int + TIPC_PUBLISHED: int + TIPC_SRC_DROPPABLE: int + TIPC_SUBSCR_TIMEOUT: int + TIPC_SUB_CANCEL: int + TIPC_SUB_PORTS: int + TIPC_SUB_SERVICE: int + TIPC_TOP_SRV: int + TIPC_WAIT_FOREVER: int + TIPC_WITHDRAWN: int + TIPC_ZONE_SCOPE: int + +if sys.platform == "linux": + AF_ALG: int + SOL_ALG: int + ALG_OP_DECRYPT: int + ALG_OP_ENCRYPT: int + ALG_OP_SIGN: int + ALG_OP_VERIFY: int + ALG_SET_AEAD_ASSOCLEN: int + ALG_SET_AEAD_AUTHSIZE: int + ALG_SET_IV: int + ALG_SET_KEY: int + ALG_SET_OP: int + ALG_SET_PUBKEY: int + +if sys.platform == "linux": + AF_VSOCK: int + IOCTL_VM_SOCKETS_GET_LOCAL_CID: int + VMADDR_CID_ANY: int + VMADDR_CID_HOST: int + VMADDR_PORT_ANY: int + SO_VM_SOCKETS_BUFFER_MAX_SIZE: int + SO_VM_SOCKETS_BUFFER_SIZE: int + SO_VM_SOCKETS_BUFFER_MIN_SIZE: int + VM_SOCKETS_INVALID_VERSION: int + +if sys.platform != "win32" or sys.version_info >= (3, 9): + AF_LINK: int + +# BDADDR_* and HCI_* listed with other bluetooth constants below + +if sys.platform != "win32" and sys.platform != "darwin": + SO_DOMAIN: int + SO_PASSSEC: int + SO_PEERSEC: int + SO_PROTOCOL: int + TCP_CONGESTION: int + TCP_USER_TIMEOUT: int + +if sys.platform == "linux" and sys.version_info >= (3, 8): + AF_QIPCRTR: int + +# Semi-documented constants +# (Listed under "Socket families" in the docs, but not "Constants") + +if sys.platform == "linux": + # Netlink is defined by Linux + AF_NETLINK: int + NETLINK_ARPD: int + NETLINK_CRYPTO: int + NETLINK_DNRTMSG: int + NETLINK_FIREWALL: int + NETLINK_IP6_FW: int + NETLINK_NFLOG: int + NETLINK_ROUTE6: int + NETLINK_ROUTE: int + NETLINK_SKIP: int + NETLINK_TAPBASE: int + NETLINK_TCPDIAG: int + NETLINK_USERSOCK: int + NETLINK_W1: int + NETLINK_XFRM: int + +if sys.platform != "darwin": + if sys.platform != "win32" or sys.version_info >= (3, 9): + AF_BLUETOOTH: int + BDADDR_ANY: str + BDADDR_LOCAL: str + BTPROTO_RFCOMM: int + +if sys.platform != "win32" and sys.platform != "darwin": + # Linux and some BSD support is explicit in the docs + # Windows and macOS do not support in practice + BTPROTO_HCI: int + BTPROTO_L2CAP: int + BTPROTO_SCO: int # not in FreeBSD + HCI_FILTER: int # not in NetBSD or DragonFlyBSD + # not in FreeBSD, NetBSD, or DragonFlyBSD + HCI_TIME_STAMP: int + HCI_DATA_DIR: int + +if sys.platform == "darwin": + # PF_SYSTEM is defined by macOS + PF_SYSTEM: int + SYSPROTO_CONTROL: int + +# ----- Exceptions ----- + +error = OSError + +class herror(error): ... +class gaierror(error): ... + +if sys.version_info >= (3, 10): + timeout = TimeoutError +else: + class timeout(error): ... + +# ----- Classes ----- + +class socket: + @property + def family(self) -> int: ... + @property + def type(self) -> int: ... + @property + def proto(self) -> int: ... + @property + def timeout(self) -> float | None: ... + if sys.platform == "win32": + def __init__(self, family: int = ..., type: int = ..., proto: int = ..., fileno: _FD | bytes | None = ...) -> None: ... + else: + def __init__(self, family: int = ..., type: int = ..., proto: int = ..., fileno: _FD | None = ...) -> None: ... + + def bind(self, __address: _Address) -> None: ... + def close(self) -> None: ... + def connect(self, __address: _Address) -> None: ... + def connect_ex(self, __address: _Address) -> int: ... + def detach(self) -> int: ... + def fileno(self) -> int: ... + def getpeername(self) -> _RetAddress: ... + def getsockname(self) -> _RetAddress: ... + @overload + def getsockopt(self, __level: int, __optname: int) -> int: ... + @overload + def getsockopt(self, __level: int, __optname: int, __buflen: int) -> bytes: ... + def getblocking(self) -> bool: ... + def gettimeout(self) -> float | None: ... + if sys.platform == "win32": + def ioctl(self, __control: int, __option: int | tuple[int, int, int] | bool) -> None: ... + + def listen(self, __backlog: int = ...) -> None: ... + def recv(self, __bufsize: int, __flags: int = ...) -> bytes: ... + def recvfrom(self, __bufsize: int, __flags: int = ...) -> tuple[bytes, _RetAddress]: ... + if sys.platform != "win32": + def recvmsg(self, __bufsize: int, __ancbufsize: int = ..., __flags: int = ...) -> tuple[bytes, list[_CMSG], int, Any]: ... + def recvmsg_into( + self, __buffers: Iterable[WriteableBuffer], __ancbufsize: int = ..., __flags: int = ... + ) -> tuple[int, list[_CMSG], int, Any]: ... + + def recvfrom_into(self, buffer: WriteableBuffer, nbytes: int = ..., flags: int = ...) -> tuple[int, _RetAddress]: ... + def recv_into(self, buffer: WriteableBuffer, nbytes: int = ..., flags: int = ...) -> int: ... + def send(self, __data: ReadableBuffer, __flags: int = ...) -> int: ... + def sendall(self, __data: ReadableBuffer, __flags: int = ...) -> None: ... + @overload + def sendto(self, __data: ReadableBuffer, __address: _Address) -> int: ... + @overload + def sendto(self, __data: ReadableBuffer, __flags: int, __address: _Address) -> int: ... + if sys.platform != "win32": + def sendmsg( + self, + __buffers: Iterable[ReadableBuffer], + __ancdata: Iterable[_CMSGArg] = ..., + __flags: int = ..., + __address: _Address | None = ..., + ) -> int: ... + if sys.platform == "linux": + def sendmsg_afalg( + self, msg: Iterable[ReadableBuffer] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = ... + ) -> int: ... + + def setblocking(self, __flag: bool) -> None: ... + def settimeout(self, __value: float | None) -> None: ... + @overload + def setsockopt(self, __level: int, __optname: int, __value: int | ReadableBuffer) -> None: ... + @overload + def setsockopt(self, __level: int, __optname: int, __value: None, __optlen: int) -> None: ... + if sys.platform == "win32": + def share(self, __process_id: int) -> bytes: ... + + def shutdown(self, __how: int) -> None: ... + +SocketType = socket + +# ----- Functions ----- + +def close(__fd: _FD) -> None: ... +def dup(__fd: _FD) -> int: ... + +# the 5th tuple item is an address +def getaddrinfo( + host: bytes | str | None, + port: bytes | str | int | None, + family: int = ..., + type: int = ..., + proto: int = ..., + flags: int = ..., +) -> list[tuple[int, int, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ... +def gethostbyname(__hostname: str) -> str: ... +def gethostbyname_ex(__hostname: str) -> tuple[str, list[str], list[str]]: ... +def gethostname() -> str: ... +def gethostbyaddr(__ip_address: str) -> tuple[str, list[str], list[str]]: ... +def getnameinfo(__sockaddr: tuple[str, int] | tuple[str, int, int, int], __flags: int) -> tuple[str, str]: ... +def getprotobyname(__protocolname: str) -> int: ... +def getservbyname(__servicename: str, __protocolname: str = ...) -> int: ... +def getservbyport(__port: int, __protocolname: str = ...) -> str: ... +def ntohl(__x: int) -> int: ... # param & ret val are 32-bit ints +def ntohs(__x: int) -> int: ... # param & ret val are 16-bit ints +def htonl(__x: int) -> int: ... # param & ret val are 32-bit ints +def htons(__x: int) -> int: ... # param & ret val are 16-bit ints +def inet_aton(__ip_string: str) -> bytes: ... # ret val 4 bytes in length +def inet_ntoa(__packed_ip: ReadableBuffer) -> str: ... +def inet_pton(__address_family: int, __ip_string: str) -> bytes: ... +def inet_ntop(__address_family: int, __packed_ip: ReadableBuffer) -> str: ... +def getdefaulttimeout() -> float | None: ... +def setdefaulttimeout(__timeout: float | None) -> None: ... + +if sys.platform != "win32": + def sethostname(__name: str) -> None: ... + def CMSG_LEN(__length: int) -> int: ... + def CMSG_SPACE(__length: int) -> int: ... + def socketpair(__family: int = ..., __type: int = ..., __proto: int = ...) -> tuple[socket, socket]: ... + +# Windows added these in 3.8, but didn't have them before +if sys.platform != "win32" or sys.version_info >= (3, 8): + def if_nameindex() -> list[tuple[int, str]]: ... + def if_nametoindex(__name: str) -> int: ... + def if_indextoname(__index: int) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_stat.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_stat.pyi new file mode 100644 index 00000000..83d832e4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_stat.pyi @@ -0,0 +1,103 @@ +import sys +from typing_extensions import Literal + +SF_APPEND: Literal[0x00040000] +SF_ARCHIVED: Literal[0x00010000] +SF_IMMUTABLE: Literal[0x00020000] +SF_NOUNLINK: Literal[0x00100000] +SF_SNAPSHOT: Literal[0x00200000] + +ST_MODE: Literal[0] +ST_INO: Literal[1] +ST_DEV: Literal[2] +ST_NLINK: Literal[3] +ST_UID: Literal[4] +ST_GID: Literal[5] +ST_SIZE: Literal[6] +ST_ATIME: Literal[7] +ST_MTIME: Literal[8] +ST_CTIME: Literal[9] + +S_IFIFO: Literal[0o010000] +S_IFLNK: Literal[0o120000] +S_IFREG: Literal[0o100000] +S_IFSOCK: Literal[0o140000] +S_IFBLK: Literal[0o060000] +S_IFCHR: Literal[0o020000] +S_IFDIR: Literal[0o040000] + +# These are 0 on systems that don't support the specific kind of file. +# Example: Linux doesn't support door files, so S_IFDOOR is 0 on linux. +S_IFDOOR: int +S_IFPORT: int +S_IFWHT: int + +S_ISUID: Literal[0o4000] +S_ISGID: Literal[0o2000] +S_ISVTX: Literal[0o1000] + +S_IRWXU: Literal[0o0700] +S_IRUSR: Literal[0o0400] +S_IWUSR: Literal[0o0200] +S_IXUSR: Literal[0o0100] + +S_IRWXG: Literal[0o0070] +S_IRGRP: Literal[0o0040] +S_IWGRP: Literal[0o0020] +S_IXGRP: Literal[0o0010] + +S_IRWXO: Literal[0o0007] +S_IROTH: Literal[0o0004] +S_IWOTH: Literal[0o0002] +S_IXOTH: Literal[0o0001] + +S_ENFMT: Literal[0o2000] +S_IREAD: Literal[0o0400] +S_IWRITE: Literal[0o0200] +S_IEXEC: Literal[0o0100] + +UF_APPEND: Literal[0x00000004] +UF_COMPRESSED: Literal[0x00000020] # OS X 10.6+ only +UF_HIDDEN: Literal[0x00008000] # OX X 10.5+ only +UF_IMMUTABLE: Literal[0x00000002] +UF_NODUMP: Literal[0x00000001] +UF_NOUNLINK: Literal[0x00000010] +UF_OPAQUE: Literal[0x00000008] + +def S_IMODE(mode: int) -> int: ... +def S_IFMT(mode: int) -> int: ... +def S_ISBLK(mode: int) -> bool: ... +def S_ISCHR(mode: int) -> bool: ... +def S_ISDIR(mode: int) -> bool: ... +def S_ISDOOR(mode: int) -> bool: ... +def S_ISFIFO(mode: int) -> bool: ... +def S_ISLNK(mode: int) -> bool: ... +def S_ISPORT(mode: int) -> bool: ... +def S_ISREG(mode: int) -> bool: ... +def S_ISSOCK(mode: int) -> bool: ... +def S_ISWHT(mode: int) -> bool: ... +def filemode(mode: int) -> str: ... + +if sys.platform == "win32" and sys.version_info >= (3, 8): + IO_REPARSE_TAG_SYMLINK: int + IO_REPARSE_TAG_MOUNT_POINT: int + IO_REPARSE_TAG_APPEXECLINK: int + +if sys.platform == "win32": + FILE_ATTRIBUTE_ARCHIVE: Literal[32] + FILE_ATTRIBUTE_COMPRESSED: Literal[2048] + FILE_ATTRIBUTE_DEVICE: Literal[64] + FILE_ATTRIBUTE_DIRECTORY: Literal[16] + FILE_ATTRIBUTE_ENCRYPTED: Literal[16384] + FILE_ATTRIBUTE_HIDDEN: Literal[2] + FILE_ATTRIBUTE_INTEGRITY_STREAM: Literal[32768] + FILE_ATTRIBUTE_NORMAL: Literal[128] + FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: Literal[8192] + FILE_ATTRIBUTE_NO_SCRUB_DATA: Literal[131072] + FILE_ATTRIBUTE_OFFLINE: Literal[4096] + FILE_ATTRIBUTE_READONLY: Literal[1] + FILE_ATTRIBUTE_REPARSE_POINT: Literal[1024] + FILE_ATTRIBUTE_SPARSE_FILE: Literal[512] + FILE_ATTRIBUTE_SYSTEM: Literal[4] + FILE_ATTRIBUTE_TEMPORARY: Literal[256] + FILE_ATTRIBUTE_VIRTUAL: Literal[65536] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_thread.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_thread.pyi new file mode 100644 index 00000000..152362ed --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_thread.pyi @@ -0,0 +1,45 @@ +import sys +from _typeshed import structseq +from collections.abc import Callable +from threading import Thread +from types import TracebackType +from typing import Any, NoReturn +from typing_extensions import Final, final + +error = RuntimeError + +def _count() -> int: ... +@final +class LockType: + def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... + def release(self) -> None: ... + def locked(self) -> bool: ... + def __enter__(self) -> bool: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + +def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any] = ...) -> int: ... +def interrupt_main() -> None: ... +def exit() -> NoReturn: ... +def allocate_lock() -> LockType: ... +def get_ident() -> int: ... +def stack_size(size: int = ...) -> int: ... + +TIMEOUT_MAX: float + +if sys.version_info >= (3, 8): + def get_native_id() -> int: ... # only available on some platforms + @final + class _ExceptHookArgs(structseq[Any], tuple[type[BaseException], BaseException | None, TracebackType | None, Thread | None]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("exc_type", "exc_value", "exc_traceback", "thread") + @property + def exc_type(self) -> type[BaseException]: ... + @property + def exc_value(self) -> BaseException | None: ... + @property + def exc_traceback(self) -> TracebackType | None: ... + @property + def thread(self) -> Thread | None: ... + _excepthook: Callable[[_ExceptHookArgs], Any] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_threading_local.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_threading_local.pyi new file mode 100644 index 00000000..98683dab --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_threading_local.pyi @@ -0,0 +1,17 @@ +from typing import Any +from typing_extensions import TypeAlias +from weakref import ReferenceType + +__all__ = ["local"] +_LocalDict: TypeAlias = dict[Any, Any] + +class _localimpl: + key: str + dicts: dict[int, tuple[ReferenceType[Any], _LocalDict]] + def get_dict(self) -> _LocalDict: ... + def create_dict(self) -> _LocalDict: ... + +class local: + def __getattribute__(self, name: str) -> Any: ... + def __setattr__(self, name: str, value: Any) -> None: ... + def __delattr__(self, name: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_tkinter.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_tkinter.pyi new file mode 100644 index 00000000..271fd37d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_tkinter.pyi @@ -0,0 +1,135 @@ +import sys +from typing import Any, ClassVar +from typing_extensions import Literal, final + +# _tkinter is meant to be only used internally by tkinter, but some tkinter +# functions e.g. return _tkinter.Tcl_Obj objects. Tcl_Obj represents a Tcl +# object that hasn't been converted to a string. +# +# There are not many ways to get Tcl_Objs from tkinter, and I'm not sure if the +# only existing ways are supposed to return Tcl_Objs as opposed to returning +# strings. Here's one of these things that return Tcl_Objs: +# +# >>> import tkinter +# >>> text = tkinter.Text() +# >>> text.tag_add('foo', '1.0', 'end') +# >>> text.tag_ranges('foo') +# (, ) +@final +class Tcl_Obj: + @property + def string(self) -> str: ... + @property + def typename(self) -> str: ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __eq__(self, __other): ... + def __ge__(self, __other): ... + def __gt__(self, __other): ... + def __le__(self, __other): ... + def __lt__(self, __other): ... + def __ne__(self, __other): ... + +class TclError(Exception): ... + +# This class allows running Tcl code. Tkinter uses it internally a lot, and +# it's often handy to drop a piece of Tcl code into a tkinter program. Example: +# +# >>> import tkinter, _tkinter +# >>> tkapp = tkinter.Tk().tk +# >>> isinstance(tkapp, _tkinter.TkappType) +# True +# >>> tkapp.call('set', 'foo', (1,2,3)) +# (1, 2, 3) +# >>> tkapp.eval('return $foo') +# '1 2 3' +# >>> +# +# call args can be pretty much anything. Also, call(some_tuple) is same as call(*some_tuple). +# +# eval always returns str because _tkinter_tkapp_eval_impl in _tkinter.c calls +# Tkapp_UnicodeResult, and it returns a string when it succeeds. +@final +class TkappType: + # Please keep in sync with tkinter.Tk + def adderrorinfo(self, __msg): ... + def call(self, __command: Any, *args: Any) -> Any: ... + def createcommand(self, __name, __func): ... + if sys.platform != "win32": + def createfilehandler(self, __file, __mask, __func): ... + def deletefilehandler(self, __file): ... + + def createtimerhandler(self, __milliseconds, __func): ... + def deletecommand(self, __name): ... + def dooneevent(self, __flags: int = 0): ... + def eval(self, __script: str) -> str: ... + def evalfile(self, __fileName): ... + def exprboolean(self, __s): ... + def exprdouble(self, __s): ... + def exprlong(self, __s): ... + def exprstring(self, __s): ... + def getboolean(self, __arg): ... + def getdouble(self, __arg): ... + def getint(self, __arg): ... + def getvar(self, *args, **kwargs): ... + def globalgetvar(self, *args, **kwargs): ... + def globalsetvar(self, *args, **kwargs): ... + def globalunsetvar(self, *args, **kwargs): ... + def interpaddr(self): ... + def loadtk(self) -> None: ... + def mainloop(self, __threshold: int = 0): ... + def quit(self): ... + def record(self, __script): ... + def setvar(self, *ags, **kwargs): ... + if sys.version_info < (3, 11): + def split(self, __arg): ... + + def splitlist(self, __arg): ... + def unsetvar(self, *args, **kwargs): ... + def wantobjects(self, *args, **kwargs): ... + def willdispatch(self): ... + +# These should be kept in sync with tkinter.tix constants, except ALL_EVENTS which doesn't match TCL_ALL_EVENTS +ALL_EVENTS: Literal[-3] +FILE_EVENTS: Literal[8] +IDLE_EVENTS: Literal[32] +TIMER_EVENTS: Literal[16] +WINDOW_EVENTS: Literal[4] + +DONT_WAIT: Literal[2] +EXCEPTION: Literal[8] +READABLE: Literal[2] +WRITABLE: Literal[4] + +TCL_VERSION: str +TK_VERSION: str + +@final +class TkttType: + def deletetimerhandler(self): ... + +if sys.version_info >= (3, 8): + def create( + __screenName: str | None = None, + __baseName: str = "", + __className: str = "Tk", + __interactive: bool = False, + __wantobjects: bool = False, + __wantTk: bool = True, + __sync: bool = False, + __use: str | None = None, + ): ... + +else: + def create( + __screenName: str | None = None, + __baseName: str | None = None, + __className: str = "Tk", + __interactive: bool = False, + __wantobjects: bool = False, + __wantTk: bool = True, + __sync: bool = False, + __use: str | None = None, + ): ... + +def getbusywaitinterval(): ... +def setbusywaitinterval(__new_val): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_tracemalloc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_tracemalloc.pyi new file mode 100644 index 00000000..1b79d9dc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_tracemalloc.pyi @@ -0,0 +1,17 @@ +import sys +from collections.abc import Sequence +from tracemalloc import _FrameTuple, _TraceTuple + +def _get_object_traceback(__obj: object) -> Sequence[_FrameTuple] | None: ... +def _get_traces() -> Sequence[_TraceTuple]: ... +def clear_traces() -> None: ... +def get_traceback_limit() -> int: ... +def get_traced_memory() -> tuple[int, int]: ... +def get_tracemalloc_memory() -> int: ... +def is_tracing() -> bool: ... + +if sys.version_info >= (3, 9): + def reset_peak() -> None: ... + +def start(__nframe: int = 1) -> None: ... +def stop() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_typeshed/README.md b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_typeshed/README.md new file mode 100644 index 00000000..f4808944 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_typeshed/README.md @@ -0,0 +1,34 @@ +# Utility types for typeshed + +This package and its submodules contains various common types used by +typeshed. It can also be used by packages outside typeshed, but beware +the API stability guarantees below. + +## Usage + +The `_typeshed` package and its types do not exist at runtime, but can be +used freely in stubs (`.pyi`) files. To import the types from this package in +implementation (`.py`) files, use the following construct: + +```python +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from _typeshed import ... +``` + +Types can then be used in annotations by either quoting them or +using: + +```python +from __future__ import annotations +``` + +## API Stability + +You can use this package and its submodules outside of typeshed, but we +guarantee only limited API stability. Items marked as "stable" will not be +removed or changed in an incompatible way for at least one year. +Before making such a change, the "stable" moniker will be removed +and we will mark the type in question as deprecated. No guarantees +are made about unmarked types. diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_typeshed/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_typeshed/__init__.pyi new file mode 100644 index 00000000..d0c6b3ab --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_typeshed/__init__.pyi @@ -0,0 +1,314 @@ +# Utility types for typeshed +# +# See the README.md file in this directory for more information. + +import array +import ctypes +import mmap +import pickle +import sys +from collections.abc import Awaitable, Callable, Iterable, Set as AbstractSet +from dataclasses import Field +from os import PathLike +from types import FrameType, TracebackType +from typing import Any, AnyStr, ClassVar, Generic, Protocol, TypeVar +from typing_extensions import Final, Literal, LiteralString, TypeAlias, final + +_KT = TypeVar("_KT") +_KT_co = TypeVar("_KT_co", covariant=True) +_KT_contra = TypeVar("_KT_contra", contravariant=True) +_VT = TypeVar("_VT") +_VT_co = TypeVar("_VT_co", covariant=True) +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) +_T_contra = TypeVar("_T_contra", contravariant=True) + +# Use for "self" annotations: +# def __enter__(self: Self) -> Self: ... +Self = TypeVar("Self") # noqa: Y001 + +# covariant version of typing.AnyStr, useful for protocols +AnyStr_co = TypeVar("AnyStr_co", str, bytes, covariant=True) # noqa: Y001 + +# For partially known annotations. Usually, fields where type annotations +# haven't been added are left unannotated, but in some situations this +# isn't possible or a type is already partially known. In cases like these, +# use Incomplete instead of Any as a marker. For example, use +# "Incomplete | None" instead of "Any | None". +Incomplete: TypeAlias = Any + +# To describe a function parameter that is unused and will work with anything. +Unused: TypeAlias = object + +# stable +class IdentityFunction(Protocol): + def __call__(self, __x: _T) -> _T: ... + +# stable +class SupportsNext(Protocol[_T_co]): + def __next__(self) -> _T_co: ... + +# stable +class SupportsAnext(Protocol[_T_co]): + def __anext__(self) -> Awaitable[_T_co]: ... + +# Comparison protocols + +class SupportsDunderLT(Protocol[_T_contra]): + def __lt__(self, __other: _T_contra) -> bool: ... + +class SupportsDunderGT(Protocol[_T_contra]): + def __gt__(self, __other: _T_contra) -> bool: ... + +class SupportsDunderLE(Protocol[_T_contra]): + def __le__(self, __other: _T_contra) -> bool: ... + +class SupportsDunderGE(Protocol[_T_contra]): + def __ge__(self, __other: _T_contra) -> bool: ... + +class SupportsAllComparisons( + SupportsDunderLT[Any], SupportsDunderGT[Any], SupportsDunderLE[Any], SupportsDunderGE[Any], Protocol +): ... + +SupportsRichComparison: TypeAlias = SupportsDunderLT[Any] | SupportsDunderGT[Any] +SupportsRichComparisonT = TypeVar("SupportsRichComparisonT", bound=SupportsRichComparison) # noqa: Y001 + +# Dunder protocols + +class SupportsAdd(Protocol[_T_contra, _T_co]): + def __add__(self, __x: _T_contra) -> _T_co: ... + +class SupportsRAdd(Protocol[_T_contra, _T_co]): + def __radd__(self, __x: _T_contra) -> _T_co: ... + +class SupportsSub(Protocol[_T_contra, _T_co]): + def __sub__(self, __x: _T_contra) -> _T_co: ... + +class SupportsRSub(Protocol[_T_contra, _T_co]): + def __rsub__(self, __x: _T_contra) -> _T_co: ... + +class SupportsDivMod(Protocol[_T_contra, _T_co]): + def __divmod__(self, __other: _T_contra) -> _T_co: ... + +class SupportsRDivMod(Protocol[_T_contra, _T_co]): + def __rdivmod__(self, __other: _T_contra) -> _T_co: ... + +# This protocol is generic over the iterator type, while Iterable is +# generic over the type that is iterated over. +class SupportsIter(Protocol[_T_co]): + def __iter__(self) -> _T_co: ... + +# This protocol is generic over the iterator type, while AsyncIterable is +# generic over the type that is iterated over. +class SupportsAiter(Protocol[_T_co]): + def __aiter__(self) -> _T_co: ... + +class SupportsLenAndGetItem(Protocol[_T_co]): + def __len__(self) -> int: ... + def __getitem__(self, __k: int) -> _T_co: ... + +class SupportsTrunc(Protocol): + def __trunc__(self) -> int: ... + +# Mapping-like protocols + +# stable +class SupportsItems(Protocol[_KT_co, _VT_co]): + def items(self) -> AbstractSet[tuple[_KT_co, _VT_co]]: ... + +# stable +class SupportsKeysAndGetItem(Protocol[_KT, _VT_co]): + def keys(self) -> Iterable[_KT]: ... + def __getitem__(self, __key: _KT) -> _VT_co: ... + +# stable +class SupportsGetItem(Protocol[_KT_contra, _VT_co]): + def __contains__(self, __x: Any) -> bool: ... + def __getitem__(self, __key: _KT_contra) -> _VT_co: ... + +# stable +class SupportsItemAccess(SupportsGetItem[_KT_contra, _VT], Protocol[_KT_contra, _VT]): + def __setitem__(self, __key: _KT_contra, __value: _VT) -> None: ... + def __delitem__(self, __key: _KT_contra) -> None: ... + +StrPath: TypeAlias = str | PathLike[str] # stable +BytesPath: TypeAlias = bytes | PathLike[bytes] # stable +GenericPath: TypeAlias = AnyStr | PathLike[AnyStr] +StrOrBytesPath: TypeAlias = str | bytes | PathLike[str] | PathLike[bytes] # stable + +OpenTextModeUpdating: TypeAlias = Literal[ + "r+", + "+r", + "rt+", + "r+t", + "+rt", + "tr+", + "t+r", + "+tr", + "w+", + "+w", + "wt+", + "w+t", + "+wt", + "tw+", + "t+w", + "+tw", + "a+", + "+a", + "at+", + "a+t", + "+at", + "ta+", + "t+a", + "+ta", + "x+", + "+x", + "xt+", + "x+t", + "+xt", + "tx+", + "t+x", + "+tx", +] +OpenTextModeWriting: TypeAlias = Literal["w", "wt", "tw", "a", "at", "ta", "x", "xt", "tx"] +OpenTextModeReading: TypeAlias = Literal["r", "rt", "tr", "U", "rU", "Ur", "rtU", "rUt", "Urt", "trU", "tUr", "Utr"] +OpenTextMode: TypeAlias = OpenTextModeUpdating | OpenTextModeWriting | OpenTextModeReading +OpenBinaryModeUpdating: TypeAlias = Literal[ + "rb+", + "r+b", + "+rb", + "br+", + "b+r", + "+br", + "wb+", + "w+b", + "+wb", + "bw+", + "b+w", + "+bw", + "ab+", + "a+b", + "+ab", + "ba+", + "b+a", + "+ba", + "xb+", + "x+b", + "+xb", + "bx+", + "b+x", + "+bx", +] +OpenBinaryModeWriting: TypeAlias = Literal["wb", "bw", "ab", "ba", "xb", "bx"] +OpenBinaryModeReading: TypeAlias = Literal["rb", "br", "rbU", "rUb", "Urb", "brU", "bUr", "Ubr"] +OpenBinaryMode: TypeAlias = OpenBinaryModeUpdating | OpenBinaryModeReading | OpenBinaryModeWriting + +# stable +class HasFileno(Protocol): + def fileno(self) -> int: ... + +FileDescriptor: TypeAlias = int # stable +FileDescriptorLike: TypeAlias = int | HasFileno # stable +FileDescriptorOrPath: TypeAlias = int | StrOrBytesPath + +# stable +class SupportsRead(Protocol[_T_co]): + def read(self, __length: int = ...) -> _T_co: ... + +# stable +class SupportsReadline(Protocol[_T_co]): + def readline(self, __length: int = ...) -> _T_co: ... + +# stable +class SupportsNoArgReadline(Protocol[_T_co]): + def readline(self) -> _T_co: ... + +# stable +class SupportsWrite(Protocol[_T_contra]): + def write(self, __s: _T_contra) -> object: ... + +ReadOnlyBuffer: TypeAlias = bytes # stable +# Anything that implements the read-write buffer interface. +# The buffer interface is defined purely on the C level, so we cannot define a normal Protocol +# for it (until PEP 688 is implemented). Instead we have to list the most common stdlib buffer classes in a Union. +if sys.version_info >= (3, 8): + WriteableBuffer: TypeAlias = ( + bytearray | memoryview | array.array[Any] | mmap.mmap | ctypes._CData | pickle.PickleBuffer + ) # stable +else: + WriteableBuffer: TypeAlias = bytearray | memoryview | array.array[Any] | mmap.mmap | ctypes._CData # stable +# Same as _WriteableBuffer, but also includes read-only buffer types (like bytes). +ReadableBuffer: TypeAlias = ReadOnlyBuffer | WriteableBuffer # stable +_BufferWithLen: TypeAlias = ReadableBuffer # not stable # noqa: Y047 + +# Anything that implements the read-write buffer interface, and can be sliced/indexed. +SliceableBuffer: TypeAlias = bytes | bytearray | memoryview | array.array[Any] | mmap.mmap +IndexableBuffer: TypeAlias = bytes | bytearray | memoryview | array.array[Any] | mmap.mmap +# https://github.com/python/typeshed/pull/9115#issuecomment-1304905864 +# Post PEP 688, they should be rewritten as such: +# from collections.abc import Sequence +# from typing import Sized, overload +# class SliceableBuffer(Protocol): +# def __buffer__(self, __flags: int) -> memoryview: ... +# def __getitem__(self, __slice: slice) -> Sequence[int]: ... +# class IndexableBuffer(Protocol): +# def __buffer__(self, __flags: int) -> memoryview: ... +# def __getitem__(self, __i: int) -> int: ... +# class SupportsGetItemBuffer(SliceableBuffer, IndexableBuffer, Protocol): +# def __buffer__(self, __flags: int) -> memoryview: ... +# def __contains__(self, __x: Any) -> bool: ... +# @overload +# def __getitem__(self, __slice: slice) -> Sequence[int]: ... +# @overload +# def __getitem__(self, __i: int) -> int: ... +# class SizedBuffer(Sized, Protocol): # instead of _BufferWithLen +# def __buffer__(self, __flags: int) -> memoryview: ... + +ExcInfo: TypeAlias = tuple[type[BaseException], BaseException, TracebackType] +OptExcInfo: TypeAlias = ExcInfo | tuple[None, None, None] + +# stable +if sys.version_info >= (3, 10): + from types import NoneType as NoneType +else: + # Used by type checkers for checks involving None (does not exist at runtime) + @final + class NoneType: + def __bool__(self) -> Literal[False]: ... + +# This is an internal CPython type that is like, but subtly different from, a NamedTuple +# Subclasses of this type are found in multiple modules. +# In typeshed, `structseq` is only ever used as a mixin in combination with a fixed-length `Tuple` +# See discussion at #6546 & #6560 +# `structseq` classes are unsubclassable, so are all decorated with `@final`. +class structseq(Generic[_T_co]): + n_fields: Final[int] + n_unnamed_fields: Final[int] + n_sequence_fields: Final[int] + # The first parameter will generally only take an iterable of a specific length. + # E.g. `os.uname_result` takes any iterable of length exactly 5. + # + # The second parameter will accept a dict of any kind without raising an exception, + # but only has any meaning if you supply it a dict where the keys are strings. + # https://github.com/python/typeshed/pull/6560#discussion_r767149830 + def __new__(cls: type[Self], sequence: Iterable[_T_co], dict: dict[str, Any] = ...) -> Self: ... + +# Superset of typing.AnyStr that also inclues LiteralString +AnyOrLiteralStr = TypeVar("AnyOrLiteralStr", str, bytes, LiteralString) # noqa: Y001 + +# Represents when str or LiteralStr is acceptable. Useful for string processing +# APIs where literalness of return value depends on literalness of inputs +StrOrLiteralStr = TypeVar("StrOrLiteralStr", LiteralString, str) # noqa: Y001 + +# Objects suitable to be passed to sys.setprofile, threading.setprofile, and similar +ProfileFunction: TypeAlias = Callable[[FrameType, str, Any], object] + +# Objects suitable to be passed to sys.settrace, threading.settrace, and similar +TraceFunction: TypeAlias = Callable[[FrameType, str, Any], TraceFunction | None] + +# experimental +# Might not work as expected for pyright, see +# https://github.com/python/typeshed/pull/9362 +# https://github.com/microsoft/pyright/issues/4339 +class DataclassInstance(Protocol): + __dataclass_fields__: ClassVar[dict[str, Field[Any]]] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_typeshed/dbapi.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_typeshed/dbapi.pyi new file mode 100644 index 00000000..022e9599 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_typeshed/dbapi.pyi @@ -0,0 +1,37 @@ +# PEP 249 Database API 2.0 Types +# https://www.python.org/dev/peps/pep-0249/ + +from collections.abc import Mapping, Sequence +from typing import Any, Protocol +from typing_extensions import TypeAlias + +DBAPITypeCode: TypeAlias = Any | None +# Strictly speaking, this should be a Sequence, but the type system does +# not support fixed-length sequences. +DBAPIColumnDescription: TypeAlias = tuple[str, DBAPITypeCode, int | None, int | None, int | None, int | None, bool | None] + +class DBAPIConnection(Protocol): + def close(self) -> object: ... + def commit(self) -> object: ... + # optional: + # def rollback(self) -> Any: ... + def cursor(self) -> DBAPICursor: ... + +class DBAPICursor(Protocol): + @property + def description(self) -> Sequence[DBAPIColumnDescription] | None: ... + @property + def rowcount(self) -> int: ... + # optional: + # def callproc(self, __procname: str, __parameters: Sequence[Any] = ...) -> Sequence[Any]: ... + def close(self) -> object: ... + def execute(self, __operation: str, __parameters: Sequence[Any] | Mapping[str, Any] = ...) -> object: ... + def executemany(self, __operation: str, __seq_of_parameters: Sequence[Sequence[Any]]) -> object: ... + def fetchone(self) -> Sequence[Any] | None: ... + def fetchmany(self, __size: int = ...) -> Sequence[Sequence[Any]]: ... + def fetchall(self) -> Sequence[Sequence[Any]]: ... + # optional: + # def nextset(self) -> None | Literal[True]: ... + arraysize: int + def setinputsizes(self, __sizes: Sequence[DBAPITypeCode | int | None]) -> object: ... + def setoutputsize(self, __size: int, __column: int = ...) -> object: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_typeshed/wsgi.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_typeshed/wsgi.pyi new file mode 100644 index 00000000..de731aea --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_typeshed/wsgi.pyi @@ -0,0 +1,44 @@ +# Types to support PEP 3333 (WSGI) +# +# Obsolete since Python 3.11: Use wsgiref.types instead. +# +# See the README.md file in this directory for more information. + +import sys +from _typeshed import OptExcInfo +from collections.abc import Callable, Iterable, Iterator +from typing import Any, Protocol +from typing_extensions import TypeAlias + +class _Readable(Protocol): + def read(self, size: int = ...) -> bytes: ... + # Optional: def close(self) -> object: ... + +if sys.version_info >= (3, 11): + from wsgiref.types import * +else: + # stable + class StartResponse(Protocol): + def __call__( + self, __status: str, __headers: list[tuple[str, str]], __exc_info: OptExcInfo | None = ... + ) -> Callable[[bytes], object]: ... + + WSGIEnvironment: TypeAlias = dict[str, Any] # stable + WSGIApplication: TypeAlias = Callable[[WSGIEnvironment, StartResponse], Iterable[bytes]] # stable + + # WSGI input streams per PEP 3333, stable + class InputStream(Protocol): + def read(self, __size: int = ...) -> bytes: ... + def readline(self, __size: int = ...) -> bytes: ... + def readlines(self, __hint: int = ...) -> list[bytes]: ... + def __iter__(self) -> Iterator[bytes]: ... + + # WSGI error streams per PEP 3333, stable + class ErrorStream(Protocol): + def flush(self) -> object: ... + def write(self, __s: str) -> object: ... + def writelines(self, __seq: list[str]) -> object: ... + + # Optional file wrapper in wsgi.file_wrapper + class FileWrapper(Protocol): + def __call__(self, __file: _Readable, __block_size: int = ...) -> Iterable[bytes]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_typeshed/xml.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_typeshed/xml.pyi new file mode 100644 index 00000000..231c2b86 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_typeshed/xml.pyi @@ -0,0 +1,9 @@ +# See the README.md file in this directory for more information. + +from typing import Any, Protocol + +# As defined https://docs.python.org/3/library/xml.dom.html#domimplementation-objects +class DOMImplementation(Protocol): + def hasFeature(self, feature: str, version: str | None) -> bool: ... + def createDocument(self, namespaceUri: str, qualifiedName: str, doctype: Any | None) -> Any: ... + def createDocumentType(self, qualifiedName: str, publicId: str, systemId: str) -> Any: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_warnings.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_warnings.pyi new file mode 100644 index 00000000..0981dfea --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_warnings.pyi @@ -0,0 +1,32 @@ +from typing import Any, overload + +_defaultaction: str +_onceregistry: dict[Any, Any] +filters: list[tuple[str, str | None, type[Warning], str | None, int]] + +@overload +def warn(message: str, category: type[Warning] | None = None, stacklevel: int = 1, source: Any | None = None) -> None: ... +@overload +def warn(message: Warning, category: Any = None, stacklevel: int = 1, source: Any | None = None) -> None: ... +@overload +def warn_explicit( + message: str, + category: type[Warning], + filename: str, + lineno: int, + module: str | None = ..., + registry: dict[str | tuple[str, type[Warning], int], int] | None = ..., + module_globals: dict[str, Any] | None = ..., + source: Any | None = ..., +) -> None: ... +@overload +def warn_explicit( + message: Warning, + category: Any, + filename: str, + lineno: int, + module: str | None = ..., + registry: dict[str | tuple[str, type[Warning], int], int] | None = ..., + module_globals: dict[str, Any] | None = ..., + source: Any | None = ..., +) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_weakref.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_weakref.pyi new file mode 100644 index 00000000..2a43de3f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_weakref.pyi @@ -0,0 +1,37 @@ +import sys +from collections.abc import Callable +from typing import Any, Generic, TypeVar, overload +from typing_extensions import Self, final + +if sys.version_info >= (3, 9): + from types import GenericAlias + +_C = TypeVar("_C", bound=Callable[..., Any]) +_T = TypeVar("_T") + +@final +class CallableProxyType(Generic[_C]): # "weakcallableproxy" + def __getattr__(self, attr: str) -> Any: ... + __call__: _C + +@final +class ProxyType(Generic[_T]): # "weakproxy" + def __getattr__(self, attr: str) -> Any: ... + +class ReferenceType(Generic[_T]): + __callback__: Callable[[ReferenceType[_T]], Any] + def __new__(cls, o: _T, callback: Callable[[ReferenceType[_T]], Any] | None = ...) -> Self: ... + def __call__(self) -> _T | None: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +ref = ReferenceType + +def getweakrefcount(__object: Any) -> int: ... +def getweakrefs(__object: Any) -> list[Any]: ... + +# Return CallableProxyType if object is callable, ProxyType otherwise +@overload +def proxy(__object: _C, __callback: Callable[[_C], Any] | None = None) -> CallableProxyType[_C]: ... +@overload +def proxy(__object: _T, __callback: Callable[[_T], Any] | None = None) -> Any: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_weakrefset.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_weakrefset.pyi new file mode 100644 index 00000000..d73d7915 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_weakrefset.pyi @@ -0,0 +1,51 @@ +import sys +from collections.abc import Iterable, Iterator, MutableSet +from typing import Any, Generic, TypeVar, overload +from typing_extensions import Self + +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = ["WeakSet"] + +_S = TypeVar("_S") +_T = TypeVar("_T") + +class WeakSet(MutableSet[_T], Generic[_T]): + @overload + def __init__(self, data: None = None) -> None: ... + @overload + def __init__(self, data: Iterable[_T]) -> None: ... + def add(self, item: _T) -> None: ... + def discard(self, item: _T) -> None: ... + def copy(self) -> Self: ... + def remove(self, item: _T) -> None: ... + def update(self, other: Iterable[_T]) -> None: ... + def __contains__(self, item: object) -> bool: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[_T]: ... + def __ior__(self, other: Iterable[_T]) -> Self: ... # type: ignore[override,misc] + def difference(self, other: Iterable[_T]) -> Self: ... + def __sub__(self, other: Iterable[Any]) -> Self: ... + def difference_update(self, other: Iterable[Any]) -> None: ... + def __isub__(self, other: Iterable[Any]) -> Self: ... + def intersection(self, other: Iterable[_T]) -> Self: ... + def __and__(self, other: Iterable[Any]) -> Self: ... + def intersection_update(self, other: Iterable[Any]) -> None: ... + def __iand__(self, other: Iterable[Any]) -> Self: ... + def issubset(self, other: Iterable[_T]) -> bool: ... + def __le__(self, other: Iterable[_T]) -> bool: ... + def __lt__(self, other: Iterable[_T]) -> bool: ... + def issuperset(self, other: Iterable[_T]) -> bool: ... + def __ge__(self, other: Iterable[_T]) -> bool: ... + def __gt__(self, other: Iterable[_T]) -> bool: ... + def __eq__(self, other: object) -> bool: ... + def symmetric_difference(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... + def __xor__(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... + def symmetric_difference_update(self, other: Iterable[_T]) -> None: ... + def __ixor__(self, other: Iterable[_T]) -> Self: ... # type: ignore[override,misc] + def union(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... + def __or__(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... + def isdisjoint(self, other: Iterable[_T]) -> bool: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_winapi.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_winapi.pyi new file mode 100644 index 00000000..e21402b8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/_winapi.pyi @@ -0,0 +1,223 @@ +import sys +from _typeshed import ReadableBuffer +from collections.abc import Sequence +from typing import Any, NoReturn, overload +from typing_extensions import Literal, final + +if sys.platform == "win32": + ABOVE_NORMAL_PRIORITY_CLASS: Literal[0x8000] + BELOW_NORMAL_PRIORITY_CLASS: Literal[0x4000] + + CREATE_BREAKAWAY_FROM_JOB: Literal[0x1000000] + CREATE_DEFAULT_ERROR_MODE: Literal[0x4000000] + CREATE_NO_WINDOW: Literal[0x8000000] + CREATE_NEW_CONSOLE: Literal[0x10] + CREATE_NEW_PROCESS_GROUP: Literal[0x200] + + DETACHED_PROCESS: Literal[8] + DUPLICATE_CLOSE_SOURCE: Literal[1] + DUPLICATE_SAME_ACCESS: Literal[2] + + ERROR_ALREADY_EXISTS: Literal[183] + ERROR_BROKEN_PIPE: Literal[109] + ERROR_IO_PENDING: Literal[997] + ERROR_MORE_DATA: Literal[234] + ERROR_NETNAME_DELETED: Literal[64] + ERROR_NO_DATA: Literal[232] + ERROR_NO_SYSTEM_RESOURCES: Literal[1450] + ERROR_OPERATION_ABORTED: Literal[995] + ERROR_PIPE_BUSY: Literal[231] + ERROR_PIPE_CONNECTED: Literal[535] + ERROR_SEM_TIMEOUT: Literal[121] + + FILE_FLAG_FIRST_PIPE_INSTANCE: Literal[0x80000] + FILE_FLAG_OVERLAPPED: Literal[0x40000000] + + FILE_GENERIC_READ: Literal[1179785] + FILE_GENERIC_WRITE: Literal[1179926] + + if sys.version_info >= (3, 8): + FILE_MAP_ALL_ACCESS: Literal[983071] + FILE_MAP_COPY: Literal[1] + FILE_MAP_EXECUTE: Literal[32] + FILE_MAP_READ: Literal[4] + FILE_MAP_WRITE: Literal[2] + + FILE_TYPE_CHAR: Literal[2] + FILE_TYPE_DISK: Literal[1] + FILE_TYPE_PIPE: Literal[3] + FILE_TYPE_REMOTE: Literal[32768] + FILE_TYPE_UNKNOWN: Literal[0] + + GENERIC_READ: Literal[0x80000000] + GENERIC_WRITE: Literal[0x40000000] + HIGH_PRIORITY_CLASS: Literal[0x80] + INFINITE: Literal[0xFFFFFFFF] + if sys.version_info >= (3, 8): + INVALID_HANDLE_VALUE: Literal[0xFFFFFFFFFFFFFFFF] + IDLE_PRIORITY_CLASS: Literal[0x40] + NORMAL_PRIORITY_CLASS: Literal[0x20] + REALTIME_PRIORITY_CLASS: Literal[0x100] + NMPWAIT_WAIT_FOREVER: Literal[0xFFFFFFFF] + + if sys.version_info >= (3, 8): + MEM_COMMIT: Literal[0x1000] + MEM_FREE: Literal[0x10000] + MEM_IMAGE: Literal[0x1000000] + MEM_MAPPED: Literal[0x40000] + MEM_PRIVATE: Literal[0x20000] + MEM_RESERVE: Literal[0x2000] + + NULL: Literal[0] + OPEN_EXISTING: Literal[3] + + PIPE_ACCESS_DUPLEX: Literal[3] + PIPE_ACCESS_INBOUND: Literal[1] + PIPE_READMODE_MESSAGE: Literal[2] + PIPE_TYPE_MESSAGE: Literal[4] + PIPE_UNLIMITED_INSTANCES: Literal[255] + PIPE_WAIT: Literal[0] + + if sys.version_info >= (3, 8): + PAGE_EXECUTE: Literal[0x10] + PAGE_EXECUTE_READ: Literal[0x20] + PAGE_EXECUTE_READWRITE: Literal[0x40] + PAGE_EXECUTE_WRITECOPY: Literal[0x80] + PAGE_GUARD: Literal[0x100] + PAGE_NOACCESS: Literal[0x1] + PAGE_NOCACHE: Literal[0x200] + PAGE_READONLY: Literal[0x2] + PAGE_READWRITE: Literal[0x4] + PAGE_WRITECOMBINE: Literal[0x400] + PAGE_WRITECOPY: Literal[0x8] + + PROCESS_ALL_ACCESS: Literal[0x1FFFFF] + PROCESS_DUP_HANDLE: Literal[0x40] + + if sys.version_info >= (3, 8): + SEC_COMMIT: Literal[0x8000000] + SEC_IMAGE: Literal[0x1000000] + SEC_LARGE_PAGES: Literal[0x80000000] + SEC_NOCACHE: Literal[0x10000000] + SEC_RESERVE: Literal[0x4000000] + SEC_WRITECOMBINE: Literal[0x40000000] + + STARTF_USESHOWWINDOW: Literal[0x1] + STARTF_USESTDHANDLES: Literal[0x100] + + STD_ERROR_HANDLE: Literal[0xFFFFFFF4] + STD_OUTPUT_HANDLE: Literal[0xFFFFFFF5] + STD_INPUT_HANDLE: Literal[0xFFFFFFF6] + + STILL_ACTIVE: Literal[259] + SW_HIDE: Literal[0] + if sys.version_info >= (3, 8): + SYNCHRONIZE: Literal[0x100000] + WAIT_ABANDONED_0: Literal[128] + WAIT_OBJECT_0: Literal[0] + WAIT_TIMEOUT: Literal[258] + + if sys.version_info >= (3, 10): + LOCALE_NAME_INVARIANT: str + LOCALE_NAME_MAX_LENGTH: int + LOCALE_NAME_SYSTEM_DEFAULT: str + LOCALE_NAME_USER_DEFAULT: str | None + + LCMAP_FULLWIDTH: int + LCMAP_HALFWIDTH: int + LCMAP_HIRAGANA: int + LCMAP_KATAKANA: int + LCMAP_LINGUISTIC_CASING: int + LCMAP_LOWERCASE: int + LCMAP_SIMPLIFIED_CHINESE: int + LCMAP_TITLECASE: int + LCMAP_TRADITIONAL_CHINESE: int + LCMAP_UPPERCASE: int + + def CloseHandle(__handle: int) -> None: ... + @overload + def ConnectNamedPipe(handle: int, overlapped: Literal[True]) -> Overlapped: ... + @overload + def ConnectNamedPipe(handle: int, overlapped: Literal[False] = False) -> None: ... + @overload + def ConnectNamedPipe(handle: int, overlapped: bool) -> Overlapped | None: ... + def CreateFile( + __file_name: str, + __desired_access: int, + __share_mode: int, + __security_attributes: int, + __creation_disposition: int, + __flags_and_attributes: int, + __template_file: int, + ) -> int: ... + def CreateJunction(__src_path: str, __dst_path: str) -> None: ... + def CreateNamedPipe( + __name: str, + __open_mode: int, + __pipe_mode: int, + __max_instances: int, + __out_buffer_size: int, + __in_buffer_size: int, + __default_timeout: int, + __security_attributes: int, + ) -> int: ... + def CreatePipe(__pipe_attrs: Any, __size: int) -> tuple[int, int]: ... + def CreateProcess( + __application_name: str | None, + __command_line: str | None, + __proc_attrs: Any, + __thread_attrs: Any, + __inherit_handles: bool, + __creation_flags: int, + __env_mapping: dict[str, str], + __current_directory: str | None, + __startup_info: Any, + ) -> tuple[int, int, int, int]: ... + def DuplicateHandle( + __source_process_handle: int, + __source_handle: int, + __target_process_handle: int, + __desired_access: int, + __inherit_handle: bool, + __options: int = 0, + ) -> int: ... + def ExitProcess(__ExitCode: int) -> NoReturn: ... + def GetACP() -> int: ... + def GetFileType(handle: int) -> int: ... + def GetCurrentProcess() -> int: ... + def GetExitCodeProcess(__process: int) -> int: ... + def GetLastError() -> int: ... + def GetModuleFileName(__module_handle: int) -> str: ... + def GetStdHandle(__std_handle: int) -> int: ... + def GetVersion() -> int: ... + def OpenProcess(__desired_access: int, __inherit_handle: bool, __process_id: int) -> int: ... + def PeekNamedPipe(__handle: int, __size: int = 0) -> tuple[int, int] | tuple[bytes, int, int]: ... + if sys.version_info >= (3, 10): + def LCMapStringEx(locale: str, flags: int, src: str) -> str: ... + def UnmapViewOfFile(__address: int) -> None: ... + + @overload + def ReadFile(handle: int, size: int, overlapped: Literal[True]) -> tuple[Overlapped, int]: ... + @overload + def ReadFile(handle: int, size: int, overlapped: Literal[False] = False) -> tuple[bytes, int]: ... + @overload + def ReadFile(handle: int, size: int, overlapped: int | bool) -> tuple[Any, int]: ... + def SetNamedPipeHandleState( + __named_pipe: int, __mode: int | None, __max_collection_count: int | None, __collect_data_timeout: int | None + ) -> None: ... + def TerminateProcess(__handle: int, __exit_code: int) -> None: ... + def WaitForMultipleObjects(__handle_seq: Sequence[int], __wait_flag: bool, __milliseconds: int = 0xFFFFFFFF) -> int: ... + def WaitForSingleObject(__handle: int, __milliseconds: int) -> int: ... + def WaitNamedPipe(__name: str, __timeout: int) -> None: ... + @overload + def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: Literal[True]) -> tuple[Overlapped, int]: ... + @overload + def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: Literal[False] = False) -> tuple[int, int]: ... + @overload + def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: int | bool) -> tuple[Any, int]: ... + @final + class Overlapped: + event: int + def GetOverlappedResult(self, __wait: bool) -> tuple[int, int]: ... + def cancel(self) -> None: ... + def getbuffer(self) -> bytes | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/abc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/abc.pyi new file mode 100644 index 00000000..068dab47 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/abc.pyi @@ -0,0 +1,47 @@ +import _typeshed +import sys +from _typeshed import SupportsWrite +from collections.abc import Callable +from typing import Any, Generic, TypeVar +from typing_extensions import Literal + +_T = TypeVar("_T") +_R_co = TypeVar("_R_co", covariant=True) +_FuncT = TypeVar("_FuncT", bound=Callable[..., Any]) + +# These definitions have special processing in mypy +class ABCMeta(type): + __abstractmethods__: frozenset[str] + if sys.version_info >= (3, 11): + def __new__( + __mcls: type[_typeshed.Self], __name: str, __bases: tuple[type, ...], __namespace: dict[str, Any], **kwargs: Any + ) -> _typeshed.Self: ... + else: + def __new__( + mcls: type[_typeshed.Self], name: str, bases: tuple[type, ...], namespace: dict[str, Any], **kwargs: Any + ) -> _typeshed.Self: ... + + def __instancecheck__(cls: ABCMeta, instance: Any) -> bool: ... + def __subclasscheck__(cls: ABCMeta, subclass: type) -> bool: ... + def _dump_registry(cls: ABCMeta, file: SupportsWrite[str] | None = None) -> None: ... + def register(cls: ABCMeta, subclass: type[_T]) -> type[_T]: ... + +def abstractmethod(funcobj: _FuncT) -> _FuncT: ... + +class abstractclassmethod(classmethod[_R_co], Generic[_R_co]): + __isabstractmethod__: Literal[True] + def __init__(self: abstractclassmethod[_R_co], callable: Callable[..., _R_co]) -> None: ... + +class abstractstaticmethod(staticmethod[_R_co], Generic[_R_co]): + __isabstractmethod__: Literal[True] + def __init__(self, callable: Callable[..., _R_co]) -> None: ... + +class abstractproperty(property): + __isabstractmethod__: Literal[True] + +class ABC(metaclass=ABCMeta): ... + +def get_cache_token() -> object: ... + +if sys.version_info >= (3, 10): + def update_abstractmethods(cls: type[_T]) -> type[_T]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/aifc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/aifc.pyi new file mode 100644 index 00000000..ab0c18ed --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/aifc.pyi @@ -0,0 +1,91 @@ +import sys +from types import TracebackType +from typing import IO, Any, NamedTuple, overload +from typing_extensions import Literal, Self, TypeAlias + +if sys.version_info >= (3, 9): + __all__ = ["Error", "open"] +else: + __all__ = ["Error", "open", "openfp"] + +class Error(Exception): ... + +class _aifc_params(NamedTuple): + nchannels: int + sampwidth: int + framerate: int + nframes: int + comptype: bytes + compname: bytes + +_File: TypeAlias = str | IO[bytes] +_Marker: TypeAlias = tuple[int, int, bytes] + +class Aifc_read: + def __init__(self, f: _File) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def initfp(self, file: IO[bytes]) -> None: ... + def getfp(self) -> IO[bytes]: ... + def rewind(self) -> None: ... + def close(self) -> None: ... + def tell(self) -> int: ... + def getnchannels(self) -> int: ... + def getnframes(self) -> int: ... + def getsampwidth(self) -> int: ... + def getframerate(self) -> int: ... + def getcomptype(self) -> bytes: ... + def getcompname(self) -> bytes: ... + def getparams(self) -> _aifc_params: ... + def getmarkers(self) -> list[_Marker] | None: ... + def getmark(self, id: int) -> _Marker: ... + def setpos(self, pos: int) -> None: ... + def readframes(self, nframes: int) -> bytes: ... + +class Aifc_write: + def __init__(self, f: _File) -> None: ... + def __del__(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def initfp(self, file: IO[bytes]) -> None: ... + def aiff(self) -> None: ... + def aifc(self) -> None: ... + def setnchannels(self, nchannels: int) -> None: ... + def getnchannels(self) -> int: ... + def setsampwidth(self, sampwidth: int) -> None: ... + def getsampwidth(self) -> int: ... + def setframerate(self, framerate: int) -> None: ... + def getframerate(self) -> int: ... + def setnframes(self, nframes: int) -> None: ... + def getnframes(self) -> int: ... + def setcomptype(self, comptype: bytes, compname: bytes) -> None: ... + def getcomptype(self) -> bytes: ... + def getcompname(self) -> bytes: ... + def setparams(self, params: tuple[int, int, int, int, bytes, bytes]) -> None: ... + def getparams(self) -> _aifc_params: ... + def setmark(self, id: int, pos: int, name: bytes) -> None: ... + def getmark(self, id: int) -> _Marker: ... + def getmarkers(self) -> list[_Marker] | None: ... + def tell(self) -> int: ... + def writeframesraw(self, data: Any) -> None: ... # Actual type for data is Buffer Protocol + def writeframes(self, data: Any) -> None: ... + def close(self) -> None: ... + +@overload +def open(f: _File, mode: Literal["r", "rb"]) -> Aifc_read: ... +@overload +def open(f: _File, mode: Literal["w", "wb"]) -> Aifc_write: ... +@overload +def open(f: _File, mode: str | None = None) -> Any: ... + +if sys.version_info < (3, 9): + @overload + def openfp(f: _File, mode: Literal["r", "rb"]) -> Aifc_read: ... + @overload + def openfp(f: _File, mode: Literal["w", "wb"]) -> Aifc_write: ... + @overload + def openfp(f: _File, mode: str | None = None) -> Any: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/antigravity.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/antigravity.pyi new file mode 100644 index 00000000..3986e7d1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/antigravity.pyi @@ -0,0 +1,3 @@ +from _typeshed import ReadableBuffer + +def geohash(latitude: float, longitude: float, datedow: ReadableBuffer) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/argparse.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/argparse.pyi new file mode 100644 index 00000000..eb0b707b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/argparse.pyi @@ -0,0 +1,521 @@ +import sys +from collections.abc import Callable, Generator, Iterable, Sequence +from re import Pattern +from typing import IO, Any, Generic, NewType, NoReturn, Protocol, TypeVar, overload +from typing_extensions import Literal, TypeAlias + +__all__ = [ + "ArgumentParser", + "ArgumentError", + "ArgumentTypeError", + "FileType", + "HelpFormatter", + "ArgumentDefaultsHelpFormatter", + "RawDescriptionHelpFormatter", + "RawTextHelpFormatter", + "MetavarTypeHelpFormatter", + "Namespace", + "Action", + "ONE_OR_MORE", + "OPTIONAL", + "PARSER", + "REMAINDER", + "SUPPRESS", + "ZERO_OR_MORE", +] + +if sys.version_info >= (3, 9): + __all__ += ["BooleanOptionalAction"] + +_T = TypeVar("_T") +_ActionT = TypeVar("_ActionT", bound=Action) +_ArgumentParserT = TypeVar("_ArgumentParserT", bound=ArgumentParser) +_N = TypeVar("_N") +# more precisely, Literal["store", "store_const", "store_true", +# "store_false", "append", "append_const", "count", "help", "version", +# "extend"], but using this would make it hard to annotate callers +# that don't use a literal argument +_ActionStr: TypeAlias = str +# more precisely, Literal["?", "*", "+", "...", "A...", +# "==SUPPRESS=="], but using this would make it hard to annotate +# callers that don't use a literal argument +_NArgsStr: TypeAlias = str + +ONE_OR_MORE: Literal["+"] +OPTIONAL: Literal["?"] +PARSER: Literal["A..."] +REMAINDER: Literal["..."] +_SUPPRESS_T = NewType("_SUPPRESS_T", str) +SUPPRESS: _SUPPRESS_T | str # not using Literal because argparse sometimes compares SUPPRESS with is +# the | str is there so that foo = argparse.SUPPRESS; foo = "test" checks out in mypy +ZERO_OR_MORE: Literal["*"] +_UNRECOGNIZED_ARGS_ATTR: str # undocumented + +class ArgumentError(Exception): + argument_name: str | None + message: str + def __init__(self, argument: Action | None, message: str) -> None: ... + +# undocumented +class _AttributeHolder: + def _get_kwargs(self) -> list[tuple[str, Any]]: ... + def _get_args(self) -> list[Any]: ... + +# undocumented +class _ActionsContainer: + description: str | None + prefix_chars: str + argument_default: Any + conflict_handler: str + + _registries: dict[str, dict[Any, Any]] + _actions: list[Action] + _option_string_actions: dict[str, Action] + _action_groups: list[_ArgumentGroup] + _mutually_exclusive_groups: list[_MutuallyExclusiveGroup] + _defaults: dict[str, Any] + _negative_number_matcher: Pattern[str] + _has_negative_number_optionals: list[bool] + def __init__(self, description: str | None, prefix_chars: str, argument_default: Any, conflict_handler: str) -> None: ... + def register(self, registry_name: str, value: Any, object: Any) -> None: ... + def _registry_get(self, registry_name: str, value: Any, default: Any = None) -> Any: ... + def set_defaults(self, **kwargs: Any) -> None: ... + def get_default(self, dest: str) -> Any: ... + def add_argument( + self, + *name_or_flags: str, + action: _ActionStr | type[Action] = ..., + nargs: int | _NArgsStr | _SUPPRESS_T = ..., + const: Any = ..., + default: Any = ..., + type: Callable[[str], _T] | FileType = ..., + choices: Iterable[_T] | None = ..., + required: bool = ..., + help: str | None = ..., + metavar: str | tuple[str, ...] | None = ..., + dest: str | None = ..., + version: str = ..., + **kwargs: Any, + ) -> Action: ... + def add_argument_group(self, *args: Any, **kwargs: Any) -> _ArgumentGroup: ... + def add_mutually_exclusive_group(self, **kwargs: Any) -> _MutuallyExclusiveGroup: ... + def _add_action(self, action: _ActionT) -> _ActionT: ... + def _remove_action(self, action: Action) -> None: ... + def _add_container_actions(self, container: _ActionsContainer) -> None: ... + def _get_positional_kwargs(self, dest: str, **kwargs: Any) -> dict[str, Any]: ... + def _get_optional_kwargs(self, *args: Any, **kwargs: Any) -> dict[str, Any]: ... + def _pop_action_class(self, kwargs: Any, default: type[Action] | None = None) -> type[Action]: ... + def _get_handler(self) -> Callable[[Action, Iterable[tuple[str, Action]]], Any]: ... + def _check_conflict(self, action: Action) -> None: ... + def _handle_conflict_error(self, action: Action, conflicting_actions: Iterable[tuple[str, Action]]) -> NoReturn: ... + def _handle_conflict_resolve(self, action: Action, conflicting_actions: Iterable[tuple[str, Action]]) -> None: ... + +class _FormatterClass(Protocol): + def __call__(self, prog: str) -> HelpFormatter: ... + +class ArgumentParser(_AttributeHolder, _ActionsContainer): + prog: str + usage: str | None + epilog: str | None + formatter_class: _FormatterClass + fromfile_prefix_chars: str | None + add_help: bool + allow_abbrev: bool + + # undocumented + _positionals: _ArgumentGroup + _optionals: _ArgumentGroup + _subparsers: _ArgumentGroup | None + + # Note: the constructor arguments are also used in _SubParsersAction.add_parser. + if sys.version_info >= (3, 9): + def __init__( + self, + prog: str | None = None, + usage: str | None = None, + description: str | None = None, + epilog: str | None = None, + parents: Sequence[ArgumentParser] = ..., + formatter_class: _FormatterClass = ..., + prefix_chars: str = "-", + fromfile_prefix_chars: str | None = None, + argument_default: Any = None, + conflict_handler: str = "error", + add_help: bool = True, + allow_abbrev: bool = True, + exit_on_error: bool = True, + ) -> None: ... + else: + def __init__( + self, + prog: str | None = None, + usage: str | None = None, + description: str | None = None, + epilog: str | None = None, + parents: Sequence[ArgumentParser] = ..., + formatter_class: _FormatterClass = ..., + prefix_chars: str = "-", + fromfile_prefix_chars: str | None = None, + argument_default: Any = None, + conflict_handler: str = "error", + add_help: bool = True, + allow_abbrev: bool = True, + ) -> None: ... + # Ignore errors about overlapping overloads + @overload + def parse_args(self, args: Sequence[str] | None = None, namespace: None = None) -> Namespace: ... # type: ignore[misc] + @overload + def parse_args(self, args: Sequence[str] | None, namespace: _N) -> _N: ... + @overload + def parse_args(self, *, namespace: _N) -> _N: ... + @overload + def add_subparsers( + self: _ArgumentParserT, + *, + title: str = ..., + description: str | None = ..., + prog: str = ..., + action: type[Action] = ..., + option_string: str = ..., + dest: str | None = ..., + required: bool = ..., + help: str | None = ..., + metavar: str | None = ..., + ) -> _SubParsersAction[_ArgumentParserT]: ... + @overload + def add_subparsers( + self, + *, + title: str = ..., + description: str | None = ..., + prog: str = ..., + parser_class: type[_ArgumentParserT], + action: type[Action] = ..., + option_string: str = ..., + dest: str | None = ..., + required: bool = ..., + help: str | None = ..., + metavar: str | None = ..., + ) -> _SubParsersAction[_ArgumentParserT]: ... + def print_usage(self, file: IO[str] | None = None) -> None: ... + def print_help(self, file: IO[str] | None = None) -> None: ... + def format_usage(self) -> str: ... + def format_help(self) -> str: ... + def parse_known_args( + self, args: Sequence[str] | None = None, namespace: Namespace | None = None + ) -> tuple[Namespace, list[str]]: ... + def convert_arg_line_to_args(self, arg_line: str) -> list[str]: ... + def exit(self, status: int = 0, message: str | None = None) -> NoReturn: ... + def error(self, message: str) -> NoReturn: ... + def parse_intermixed_args(self, args: Sequence[str] | None = None, namespace: Namespace | None = None) -> Namespace: ... + def parse_known_intermixed_args( + self, args: Sequence[str] | None = None, namespace: Namespace | None = None + ) -> tuple[Namespace, list[str]]: ... + # undocumented + def _get_optional_actions(self) -> list[Action]: ... + def _get_positional_actions(self) -> list[Action]: ... + def _parse_known_args(self, arg_strings: list[str], namespace: Namespace) -> tuple[Namespace, list[str]]: ... + def _read_args_from_files(self, arg_strings: list[str]) -> list[str]: ... + def _match_argument(self, action: Action, arg_strings_pattern: str) -> int: ... + def _match_arguments_partial(self, actions: Sequence[Action], arg_strings_pattern: str) -> list[int]: ... + def _parse_optional(self, arg_string: str) -> tuple[Action | None, str, str | None] | None: ... + def _get_option_tuples(self, option_string: str) -> list[tuple[Action, str, str | None]]: ... + def _get_nargs_pattern(self, action: Action) -> str: ... + def _get_values(self, action: Action, arg_strings: list[str]) -> Any: ... + def _get_value(self, action: Action, arg_string: str) -> Any: ... + def _check_value(self, action: Action, value: Any) -> None: ... + def _get_formatter(self) -> HelpFormatter: ... + def _print_message(self, message: str, file: IO[str] | None = None) -> None: ... + +class HelpFormatter: + # undocumented + _prog: str + _indent_increment: int + _max_help_position: int + _width: int + _current_indent: int + _level: int + _action_max_length: int + _root_section: Any + _current_section: Any + _whitespace_matcher: Pattern[str] + _long_break_matcher: Pattern[str] + _Section: type[Any] # Nested class + def __init__(self, prog: str, indent_increment: int = 2, max_help_position: int = 24, width: int | None = None) -> None: ... + def _indent(self) -> None: ... + def _dedent(self) -> None: ... + def _add_item(self, func: Callable[..., str], args: Iterable[Any]) -> None: ... + def start_section(self, heading: str | None) -> None: ... + def end_section(self) -> None: ... + def add_text(self, text: str | None) -> None: ... + def add_usage( + self, usage: str | None, actions: Iterable[Action], groups: Iterable[_ArgumentGroup], prefix: str | None = None + ) -> None: ... + def add_argument(self, action: Action) -> None: ... + def add_arguments(self, actions: Iterable[Action]) -> None: ... + def format_help(self) -> str: ... + def _join_parts(self, part_strings: Iterable[str]) -> str: ... + def _format_usage( + self, usage: str | None, actions: Iterable[Action], groups: Iterable[_ArgumentGroup], prefix: str | None + ) -> str: ... + def _format_actions_usage(self, actions: Iterable[Action], groups: Iterable[_ArgumentGroup]) -> str: ... + def _format_text(self, text: str) -> str: ... + def _format_action(self, action: Action) -> str: ... + def _format_action_invocation(self, action: Action) -> str: ... + def _metavar_formatter(self, action: Action, default_metavar: str) -> Callable[[int], tuple[str, ...]]: ... + def _format_args(self, action: Action, default_metavar: str) -> str: ... + def _expand_help(self, action: Action) -> str: ... + def _iter_indented_subactions(self, action: Action) -> Generator[Action, None, None]: ... + def _split_lines(self, text: str, width: int) -> list[str]: ... + def _fill_text(self, text: str, width: int, indent: str) -> str: ... + def _get_help_string(self, action: Action) -> str | None: ... + def _get_default_metavar_for_optional(self, action: Action) -> str: ... + def _get_default_metavar_for_positional(self, action: Action) -> str: ... + +class RawDescriptionHelpFormatter(HelpFormatter): ... +class RawTextHelpFormatter(RawDescriptionHelpFormatter): ... +class ArgumentDefaultsHelpFormatter(HelpFormatter): ... +class MetavarTypeHelpFormatter(HelpFormatter): ... + +class Action(_AttributeHolder): + option_strings: Sequence[str] + dest: str + nargs: int | str | None + const: Any + default: Any + type: Callable[[str], Any] | FileType | None + choices: Iterable[Any] | None + required: bool + help: str | None + metavar: str | tuple[str, ...] | None + def __init__( + self, + option_strings: Sequence[str], + dest: str, + nargs: int | str | None = None, + const: _T | None = None, + default: _T | str | None = None, + type: Callable[[str], _T] | FileType | None = None, + choices: Iterable[_T] | None = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, + ) -> None: ... + def __call__( + self, parser: ArgumentParser, namespace: Namespace, values: str | Sequence[Any] | None, option_string: str | None = None + ) -> None: ... + if sys.version_info >= (3, 9): + def format_usage(self) -> str: ... + +if sys.version_info >= (3, 9): + class BooleanOptionalAction(Action): + def __init__( + self, + option_strings: Sequence[str], + dest: str, + default: _T | str | None = None, + type: Callable[[str], _T] | FileType | None = None, + choices: Iterable[_T] | None = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, + ) -> None: ... + +class Namespace(_AttributeHolder): + def __init__(self, **kwargs: Any) -> None: ... + def __getattr__(self, name: str) -> Any: ... + def __setattr__(self, __name: str, __value: Any) -> None: ... + def __contains__(self, key: str) -> bool: ... + def __eq__(self, other: object) -> bool: ... + +class FileType: + # undocumented + _mode: str + _bufsize: int + _encoding: str | None + _errors: str | None + def __init__(self, mode: str = "r", bufsize: int = -1, encoding: str | None = None, errors: str | None = None) -> None: ... + def __call__(self, string: str) -> IO[Any]: ... + +# undocumented +class _ArgumentGroup(_ActionsContainer): + title: str | None + _group_actions: list[Action] + def __init__( + self, container: _ActionsContainer, title: str | None = None, description: str | None = None, **kwargs: Any + ) -> None: ... + +# undocumented +class _MutuallyExclusiveGroup(_ArgumentGroup): + required: bool + _container: _ActionsContainer + def __init__(self, container: _ActionsContainer, required: bool = False) -> None: ... + +# undocumented +class _StoreAction(Action): ... + +# undocumented +class _StoreConstAction(Action): + if sys.version_info >= (3, 11): + def __init__( + self, + option_strings: Sequence[str], + dest: str, + const: Any | None = None, + default: Any = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, + ) -> None: ... + else: + def __init__( + self, + option_strings: Sequence[str], + dest: str, + const: Any, + default: Any = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, + ) -> None: ... + +# undocumented +class _StoreTrueAction(_StoreConstAction): + def __init__( + self, option_strings: Sequence[str], dest: str, default: bool = False, required: bool = False, help: str | None = None + ) -> None: ... + +# undocumented +class _StoreFalseAction(_StoreConstAction): + def __init__( + self, option_strings: Sequence[str], dest: str, default: bool = True, required: bool = False, help: str | None = None + ) -> None: ... + +# undocumented +class _AppendAction(Action): ... + +# undocumented +if sys.version_info >= (3, 8): + class _ExtendAction(_AppendAction): ... + +# undocumented +class _AppendConstAction(Action): + if sys.version_info >= (3, 11): + def __init__( + self, + option_strings: Sequence[str], + dest: str, + const: Any | None = None, + default: Any = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, + ) -> None: ... + else: + def __init__( + self, + option_strings: Sequence[str], + dest: str, + const: Any, + default: Any = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, + ) -> None: ... + +# undocumented +class _CountAction(Action): + def __init__( + self, option_strings: Sequence[str], dest: str, default: Any = None, required: bool = False, help: str | None = None + ) -> None: ... + +# undocumented +class _HelpAction(Action): + def __init__( + self, option_strings: Sequence[str], dest: str = "==SUPPRESS==", default: str = "==SUPPRESS==", help: str | None = None + ) -> None: ... + +# undocumented +class _VersionAction(Action): + version: str | None + def __init__( + self, + option_strings: Sequence[str], + version: str | None = None, + dest: str = "==SUPPRESS==", + default: str = "==SUPPRESS==", + help: str = "show program's version number and exit", + ) -> None: ... + +# undocumented +class _SubParsersAction(Action, Generic[_ArgumentParserT]): + _ChoicesPseudoAction: type[Any] # nested class + _prog_prefix: str + _parser_class: type[_ArgumentParserT] + _name_parser_map: dict[str, _ArgumentParserT] + choices: dict[str, _ArgumentParserT] + _choices_actions: list[Action] + def __init__( + self, + option_strings: Sequence[str], + prog: str, + parser_class: type[_ArgumentParserT], + dest: str = "==SUPPRESS==", + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, + ) -> None: ... + + # Note: `add_parser` accepts all kwargs of `ArgumentParser.__init__`. It also + # accepts its own `help` and `aliases` kwargs. + if sys.version_info >= (3, 9): + def add_parser( + self, + name: str, + *, + help: str | None = ..., + aliases: Sequence[str] = ..., + # Kwargs from ArgumentParser constructor + prog: str | None = ..., + usage: str | None = ..., + description: str | None = ..., + epilog: str | None = ..., + parents: Sequence[_ArgumentParserT] = ..., + formatter_class: _FormatterClass = ..., + prefix_chars: str = ..., + fromfile_prefix_chars: str | None = ..., + argument_default: Any = ..., + conflict_handler: str = ..., + add_help: bool = ..., + allow_abbrev: bool = ..., + exit_on_error: bool = ..., + ) -> _ArgumentParserT: ... + else: + def add_parser( + self, + name: str, + *, + help: str | None = ..., + aliases: Sequence[str] = ..., + # Kwargs from ArgumentParser constructor + prog: str | None = ..., + usage: str | None = ..., + description: str | None = ..., + epilog: str | None = ..., + parents: Sequence[_ArgumentParserT] = ..., + formatter_class: _FormatterClass = ..., + prefix_chars: str = ..., + fromfile_prefix_chars: str | None = ..., + argument_default: Any = ..., + conflict_handler: str = ..., + add_help: bool = ..., + allow_abbrev: bool = ..., + ) -> _ArgumentParserT: ... + + def _get_subactions(self) -> list[Action]: ... + +# undocumented +class ArgumentTypeError(Exception): ... + +# undocumented +def _get_action_name(argument: Action | None) -> str | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/array.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/array.pyi new file mode 100644 index 00000000..827bbb97 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/array.pyi @@ -0,0 +1,84 @@ +import sys +from _typeshed import ReadableBuffer, SupportsRead, SupportsWrite +from collections.abc import Iterable + +# pytype crashes if array inherits from collections.abc.MutableSequence instead of typing.MutableSequence +from typing import Any, Generic, MutableSequence, TypeVar, overload # noqa: Y022 +from typing_extensions import Literal, Self, SupportsIndex, TypeAlias + +_IntTypeCode: TypeAlias = Literal["b", "B", "h", "H", "i", "I", "l", "L", "q", "Q"] +_FloatTypeCode: TypeAlias = Literal["f", "d"] +_UnicodeTypeCode: TypeAlias = Literal["u"] +_TypeCode: TypeAlias = _IntTypeCode | _FloatTypeCode | _UnicodeTypeCode + +_T = TypeVar("_T", int, float, str) + +typecodes: str + +class array(MutableSequence[_T], Generic[_T]): + @property + def typecode(self) -> _TypeCode: ... + @property + def itemsize(self) -> int: ... + @overload + def __init__(self: array[int], __typecode: _IntTypeCode, __initializer: bytes | bytearray | Iterable[int] = ...) -> None: ... + @overload + def __init__( + self: array[float], __typecode: _FloatTypeCode, __initializer: bytes | bytearray | Iterable[float] = ... + ) -> None: ... + @overload + def __init__( + self: array[str], __typecode: _UnicodeTypeCode, __initializer: bytes | bytearray | Iterable[str] = ... + ) -> None: ... + @overload + def __init__(self, __typecode: str, __initializer: Iterable[_T]) -> None: ... + @overload + def __init__(self, __typecode: str, __initializer: bytes | bytearray = ...) -> None: ... + def append(self, __v: _T) -> None: ... + def buffer_info(self) -> tuple[int, int]: ... + def byteswap(self) -> None: ... + def count(self, __v: _T) -> int: ... + def extend(self, __bb: Iterable[_T]) -> None: ... + def frombytes(self, __buffer: ReadableBuffer) -> None: ... + def fromfile(self, __f: SupportsRead[bytes], __n: int) -> None: ... + def fromlist(self, __list: list[_T]) -> None: ... + def fromunicode(self, __ustr: str) -> None: ... + if sys.version_info >= (3, 10): + def index(self, __v: _T, __start: int = 0, __stop: int = sys.maxsize) -> int: ... + else: + def index(self, __v: _T) -> int: ... # type: ignore[override] + + def insert(self, __i: int, __v: _T) -> None: ... + def pop(self, __i: int = -1) -> _T: ... + def remove(self, __v: _T) -> None: ... + def tobytes(self) -> bytes: ... + def tofile(self, __f: SupportsWrite[bytes]) -> None: ... + def tolist(self) -> list[_T]: ... + def tounicode(self) -> str: ... + if sys.version_info < (3, 9): + def fromstring(self, __buffer: str | ReadableBuffer) -> None: ... + def tostring(self) -> bytes: ... + + def __len__(self) -> int: ... + @overload + def __getitem__(self, __i: SupportsIndex) -> _T: ... + @overload + def __getitem__(self, __s: slice) -> array[_T]: ... + @overload # type: ignore[override] + def __setitem__(self, __i: SupportsIndex, __o: _T) -> None: ... + @overload + def __setitem__(self, __s: slice, __o: array[_T]) -> None: ... + def __delitem__(self, __i: SupportsIndex | slice) -> None: ... + def __add__(self, __x: array[_T]) -> array[_T]: ... + def __ge__(self, __other: array[_T]) -> bool: ... + def __gt__(self, __other: array[_T]) -> bool: ... + def __iadd__(self, __x: array[_T]) -> Self: ... # type: ignore[override] + def __imul__(self, __n: int) -> Self: ... + def __le__(self, __other: array[_T]) -> bool: ... + def __lt__(self, __other: array[_T]) -> bool: ... + def __mul__(self, __n: int) -> array[_T]: ... + def __rmul__(self, __n: int) -> array[_T]: ... + def __copy__(self) -> array[_T]: ... + def __deepcopy__(self, __unused: Any) -> array[_T]: ... + +ArrayType = array diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ast.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ast.pyi new file mode 100644 index 00000000..ea899e15 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ast.pyi @@ -0,0 +1,294 @@ +import os +import sys +from _ast import * +from _typeshed import ReadableBuffer, Unused +from collections.abc import Iterator +from typing import Any, TypeVar, overload +from typing_extensions import Literal + +if sys.version_info >= (3, 8): + class _ABC(type): + if sys.version_info >= (3, 9): + def __init__(cls, *args: Unused) -> None: ... + + class Num(Constant, metaclass=_ABC): + value: int | float | complex + + class Str(Constant, metaclass=_ABC): + value: str + # Aliases for value, for backwards compatibility + s: str + + class Bytes(Constant, metaclass=_ABC): + value: bytes + # Aliases for value, for backwards compatibility + s: bytes + + class NameConstant(Constant, metaclass=_ABC): ... + class Ellipsis(Constant, metaclass=_ABC): ... + +if sys.version_info >= (3, 9): + class slice(AST): ... + class ExtSlice(slice): ... + class Index(slice): ... + class Suite(mod): ... + class AugLoad(expr_context): ... + class AugStore(expr_context): ... + class Param(expr_context): ... + +class NodeVisitor: + def visit(self, node: AST) -> Any: ... + def generic_visit(self, node: AST) -> Any: ... + def visit_Module(self, node: Module) -> Any: ... + def visit_Interactive(self, node: Interactive) -> Any: ... + def visit_Expression(self, node: Expression) -> Any: ... + def visit_FunctionDef(self, node: FunctionDef) -> Any: ... + def visit_AsyncFunctionDef(self, node: AsyncFunctionDef) -> Any: ... + def visit_ClassDef(self, node: ClassDef) -> Any: ... + def visit_Return(self, node: Return) -> Any: ... + def visit_Delete(self, node: Delete) -> Any: ... + def visit_Assign(self, node: Assign) -> Any: ... + def visit_AugAssign(self, node: AugAssign) -> Any: ... + def visit_AnnAssign(self, node: AnnAssign) -> Any: ... + def visit_For(self, node: For) -> Any: ... + def visit_AsyncFor(self, node: AsyncFor) -> Any: ... + def visit_While(self, node: While) -> Any: ... + def visit_If(self, node: If) -> Any: ... + def visit_With(self, node: With) -> Any: ... + def visit_AsyncWith(self, node: AsyncWith) -> Any: ... + def visit_Raise(self, node: Raise) -> Any: ... + def visit_Try(self, node: Try) -> Any: ... + def visit_Assert(self, node: Assert) -> Any: ... + def visit_Import(self, node: Import) -> Any: ... + def visit_ImportFrom(self, node: ImportFrom) -> Any: ... + def visit_Global(self, node: Global) -> Any: ... + def visit_Nonlocal(self, node: Nonlocal) -> Any: ... + def visit_Expr(self, node: Expr) -> Any: ... + def visit_Pass(self, node: Pass) -> Any: ... + def visit_Break(self, node: Break) -> Any: ... + def visit_Continue(self, node: Continue) -> Any: ... + def visit_Slice(self, node: Slice) -> Any: ... + def visit_BoolOp(self, node: BoolOp) -> Any: ... + def visit_BinOp(self, node: BinOp) -> Any: ... + def visit_UnaryOp(self, node: UnaryOp) -> Any: ... + def visit_Lambda(self, node: Lambda) -> Any: ... + def visit_IfExp(self, node: IfExp) -> Any: ... + def visit_Dict(self, node: Dict) -> Any: ... + def visit_Set(self, node: Set) -> Any: ... + def visit_ListComp(self, node: ListComp) -> Any: ... + def visit_SetComp(self, node: SetComp) -> Any: ... + def visit_DictComp(self, node: DictComp) -> Any: ... + def visit_GeneratorExp(self, node: GeneratorExp) -> Any: ... + def visit_Await(self, node: Await) -> Any: ... + def visit_Yield(self, node: Yield) -> Any: ... + def visit_YieldFrom(self, node: YieldFrom) -> Any: ... + def visit_Compare(self, node: Compare) -> Any: ... + def visit_Call(self, node: Call) -> Any: ... + def visit_FormattedValue(self, node: FormattedValue) -> Any: ... + def visit_JoinedStr(self, node: JoinedStr) -> Any: ... + def visit_Constant(self, node: Constant) -> Any: ... + if sys.version_info >= (3, 8): + def visit_NamedExpr(self, node: NamedExpr) -> Any: ... + def visit_TypeIgnore(self, node: TypeIgnore) -> Any: ... + + def visit_Attribute(self, node: Attribute) -> Any: ... + def visit_Subscript(self, node: Subscript) -> Any: ... + def visit_Starred(self, node: Starred) -> Any: ... + def visit_Name(self, node: Name) -> Any: ... + def visit_List(self, node: List) -> Any: ... + def visit_Tuple(self, node: Tuple) -> Any: ... + def visit_Del(self, node: Del) -> Any: ... + def visit_Load(self, node: Load) -> Any: ... + def visit_Store(self, node: Store) -> Any: ... + def visit_And(self, node: And) -> Any: ... + def visit_Or(self, node: Or) -> Any: ... + def visit_Add(self, node: Add) -> Any: ... + def visit_BitAnd(self, node: BitAnd) -> Any: ... + def visit_BitOr(self, node: BitOr) -> Any: ... + def visit_BitXor(self, node: BitXor) -> Any: ... + def visit_Div(self, node: Div) -> Any: ... + def visit_FloorDiv(self, node: FloorDiv) -> Any: ... + def visit_LShift(self, node: LShift) -> Any: ... + def visit_Mod(self, node: Mod) -> Any: ... + def visit_Mult(self, node: Mult) -> Any: ... + def visit_MatMult(self, node: MatMult) -> Any: ... + def visit_Pow(self, node: Pow) -> Any: ... + def visit_RShift(self, node: RShift) -> Any: ... + def visit_Sub(self, node: Sub) -> Any: ... + def visit_Invert(self, node: Invert) -> Any: ... + def visit_Not(self, node: Not) -> Any: ... + def visit_UAdd(self, node: UAdd) -> Any: ... + def visit_USub(self, node: USub) -> Any: ... + def visit_Eq(self, node: Eq) -> Any: ... + def visit_Gt(self, node: Gt) -> Any: ... + def visit_GtE(self, node: GtE) -> Any: ... + def visit_In(self, node: In) -> Any: ... + def visit_Is(self, node: Is) -> Any: ... + def visit_IsNot(self, node: IsNot) -> Any: ... + def visit_Lt(self, node: Lt) -> Any: ... + def visit_LtE(self, node: LtE) -> Any: ... + def visit_NotEq(self, node: NotEq) -> Any: ... + def visit_NotIn(self, node: NotIn) -> Any: ... + def visit_comprehension(self, node: comprehension) -> Any: ... + def visit_ExceptHandler(self, node: ExceptHandler) -> Any: ... + def visit_arguments(self, node: arguments) -> Any: ... + def visit_arg(self, node: arg) -> Any: ... + def visit_keyword(self, node: keyword) -> Any: ... + def visit_alias(self, node: alias) -> Any: ... + def visit_withitem(self, node: withitem) -> Any: ... + if sys.version_info >= (3, 10): + def visit_Match(self, node: Match) -> Any: ... + def visit_MatchValue(self, node: MatchValue) -> Any: ... + def visit_MatchSequence(self, node: MatchSequence) -> Any: ... + def visit_MatchStar(self, node: MatchStar) -> Any: ... + def visit_MatchMapping(self, node: MatchMapping) -> Any: ... + def visit_MatchClass(self, node: MatchClass) -> Any: ... + def visit_MatchAs(self, node: MatchAs) -> Any: ... + def visit_MatchOr(self, node: MatchOr) -> Any: ... + + if sys.version_info >= (3, 11): + def visit_TryStar(self, node: TryStar) -> Any: ... + + # visit methods for deprecated nodes + def visit_ExtSlice(self, node: ExtSlice) -> Any: ... + def visit_Index(self, node: Index) -> Any: ... + def visit_Suite(self, node: Suite) -> Any: ... + def visit_AugLoad(self, node: AugLoad) -> Any: ... + def visit_AugStore(self, node: AugStore) -> Any: ... + def visit_Param(self, node: Param) -> Any: ... + def visit_Num(self, node: Num) -> Any: ... + def visit_Str(self, node: Str) -> Any: ... + def visit_Bytes(self, node: Bytes) -> Any: ... + def visit_NameConstant(self, node: NameConstant) -> Any: ... + def visit_Ellipsis(self, node: Ellipsis) -> Any: ... + +class NodeTransformer(NodeVisitor): + def generic_visit(self, node: AST) -> AST: ... + # TODO: Override the visit_* methods with better return types. + # The usual return type is AST | None, but Iterable[AST] + # is also allowed in some cases -- this needs to be mapped. + +_T = TypeVar("_T", bound=AST) + +if sys.version_info >= (3, 8): + @overload + def parse( + source: str | ReadableBuffer, + filename: str | ReadableBuffer | os.PathLike[Any] = "", + mode: Literal["exec"] = "exec", + *, + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + ) -> Module: ... + @overload + def parse( + source: str | ReadableBuffer, + filename: str | ReadableBuffer | os.PathLike[Any], + mode: Literal["eval"], + *, + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + ) -> Expression: ... + @overload + def parse( + source: str | ReadableBuffer, + filename: str | ReadableBuffer | os.PathLike[Any], + mode: Literal["func_type"], + *, + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + ) -> FunctionType: ... + @overload + def parse( + source: str | ReadableBuffer, + filename: str | ReadableBuffer | os.PathLike[Any], + mode: Literal["single"], + *, + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + ) -> Interactive: ... + @overload + def parse( + source: str | ReadableBuffer, + *, + mode: Literal["eval"], + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + ) -> Expression: ... + @overload + def parse( + source: str | ReadableBuffer, + *, + mode: Literal["func_type"], + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + ) -> FunctionType: ... + @overload + def parse( + source: str | ReadableBuffer, + *, + mode: Literal["single"], + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + ) -> Interactive: ... + @overload + def parse( + source: str | ReadableBuffer, + filename: str | ReadableBuffer | os.PathLike[Any] = "", + mode: str = "exec", + *, + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, + ) -> AST: ... + +else: + @overload + def parse( + source: str | ReadableBuffer, + filename: str | ReadableBuffer | os.PathLike[Any] = "", + mode: Literal["exec"] = "exec", + ) -> Module: ... + @overload + def parse( + source: str | ReadableBuffer, filename: str | ReadableBuffer | os.PathLike[Any], mode: Literal["eval"] + ) -> Expression: ... + @overload + def parse( + source: str | ReadableBuffer, filename: str | ReadableBuffer | os.PathLike[Any], mode: Literal["single"] + ) -> Interactive: ... + @overload + def parse(source: str | ReadableBuffer, *, mode: Literal["eval"]) -> Expression: ... + @overload + def parse(source: str | ReadableBuffer, *, mode: Literal["single"]) -> Interactive: ... + @overload + def parse( + source: str | ReadableBuffer, filename: str | ReadableBuffer | os.PathLike[Any] = "", mode: str = "exec" + ) -> AST: ... + +if sys.version_info >= (3, 9): + def unparse(ast_obj: AST) -> str: ... + +def copy_location(new_node: _T, old_node: AST) -> _T: ... + +if sys.version_info >= (3, 9): + def dump( + node: AST, annotate_fields: bool = True, include_attributes: bool = False, *, indent: int | str | None = None + ) -> str: ... + +else: + def dump(node: AST, annotate_fields: bool = True, include_attributes: bool = False) -> str: ... + +def fix_missing_locations(node: _T) -> _T: ... +def get_docstring(node: AsyncFunctionDef | FunctionDef | ClassDef | Module, clean: bool = True) -> str | None: ... +def increment_lineno(node: _T, n: int = 1) -> _T: ... +def iter_child_nodes(node: AST) -> Iterator[AST]: ... +def iter_fields(node: AST) -> Iterator[tuple[str, Any]]: ... +def literal_eval(node_or_string: str | AST) -> Any: ... + +if sys.version_info >= (3, 8): + def get_source_segment(source: str, node: AST, *, padded: bool = False) -> str | None: ... + +def walk(node: AST) -> Iterator[AST]: ... + +if sys.version_info >= (3, 9): + def main() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asynchat.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asynchat.pyi new file mode 100644 index 00000000..79a70d1c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asynchat.pyi @@ -0,0 +1,21 @@ +import asyncore +from abc import abstractmethod + +class simple_producer: + def __init__(self, data: bytes, buffer_size: int = 512) -> None: ... + def more(self) -> bytes: ... + +class async_chat(asyncore.dispatcher): + ac_in_buffer_size: int + ac_out_buffer_size: int + @abstractmethod + def collect_incoming_data(self, data: bytes) -> None: ... + @abstractmethod + def found_terminator(self) -> None: ... + def set_terminator(self, term: bytes | int | None) -> None: ... + def get_terminator(self) -> bytes | int | None: ... + def push(self, data: bytes) -> None: ... + def push_with_producer(self, producer: simple_producer) -> None: ... + def close_when_done(self) -> None: ... + def initiate_send(self) -> None: ... + def discard_buffers(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/__init__.pyi new file mode 100644 index 00000000..4afcd37f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/__init__.pyi @@ -0,0 +1,30 @@ +import sys + +# As at runtime, this depends on all submodules defining __all__ accurately. +from .base_events import * +from .coroutines import * +from .events import * +from .futures import * +from .locks import * +from .protocols import * +from .queues import * +from .runners import * +from .streams import * +from .subprocess import * +from .tasks import * +from .transports import * + +if sys.version_info >= (3, 8): + from .exceptions import * + +if sys.version_info >= (3, 9): + from .threads import * + +if sys.version_info >= (3, 11): + from .taskgroups import * + from .timeouts import * + +if sys.platform == "win32": + from .windows_events import * +else: + from .unix_events import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/base_events.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/base_events.pyi new file mode 100644 index 00000000..3b8f2867 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/base_events.pyi @@ -0,0 +1,435 @@ +import ssl +import sys +from _typeshed import FileDescriptorLike, ReadableBuffer, WriteableBuffer +from asyncio.events import AbstractEventLoop, AbstractServer, Handle, TimerHandle, _TaskFactory +from asyncio.futures import Future +from asyncio.protocols import BaseProtocol +from asyncio.tasks import Task +from asyncio.transports import BaseTransport, DatagramTransport, ReadTransport, SubprocessTransport, Transport, WriteTransport +from collections.abc import Awaitable, Callable, Coroutine, Generator, Iterable, Sequence +from contextvars import Context +from socket import AddressFamily, SocketKind, _Address, _RetAddress, socket +from typing import IO, Any, TypeVar, overload +from typing_extensions import Literal, TypeAlias + +if sys.version_info >= (3, 9): + __all__ = ("BaseEventLoop", "Server") +else: + __all__ = ("BaseEventLoop",) + +_T = TypeVar("_T") +_ProtocolT = TypeVar("_ProtocolT", bound=BaseProtocol) +_Context: TypeAlias = dict[str, Any] +_ExceptionHandler: TypeAlias = Callable[[AbstractEventLoop, _Context], object] +_ProtocolFactory: TypeAlias = Callable[[], BaseProtocol] +_SSLContext: TypeAlias = bool | None | ssl.SSLContext + +class Server(AbstractServer): + if sys.version_info >= (3, 11): + def __init__( + self, + loop: AbstractEventLoop, + sockets: Iterable[socket], + protocol_factory: _ProtocolFactory, + ssl_context: _SSLContext, + backlog: int, + ssl_handshake_timeout: float | None, + ssl_shutdown_timeout: float | None = None, + ) -> None: ... + else: + def __init__( + self, + loop: AbstractEventLoop, + sockets: Iterable[socket], + protocol_factory: _ProtocolFactory, + ssl_context: _SSLContext, + backlog: int, + ssl_handshake_timeout: float | None, + ) -> None: ... + + def get_loop(self) -> AbstractEventLoop: ... + def is_serving(self) -> bool: ... + async def start_serving(self) -> None: ... + async def serve_forever(self) -> None: ... + if sys.version_info >= (3, 8): + @property + def sockets(self) -> tuple[socket, ...]: ... + else: + @property + def sockets(self) -> list[socket]: ... + + def close(self) -> None: ... + async def wait_closed(self) -> None: ... + +class BaseEventLoop(AbstractEventLoop): + def run_forever(self) -> None: ... + # Can't use a union, see mypy issue # 1873. + @overload + def run_until_complete(self, future: Generator[Any, None, _T]) -> _T: ... + @overload + def run_until_complete(self, future: Awaitable[_T]) -> _T: ... + def stop(self) -> None: ... + def is_running(self) -> bool: ... + def is_closed(self) -> bool: ... + def close(self) -> None: ... + async def shutdown_asyncgens(self) -> None: ... + # Methods scheduling callbacks. All these return Handles. + def call_soon(self, callback: Callable[..., object], *args: Any, context: Context | None = None) -> Handle: ... + def call_later( + self, delay: float, callback: Callable[..., object], *args: Any, context: Context | None = None + ) -> TimerHandle: ... + def call_at( + self, when: float, callback: Callable[..., object], *args: Any, context: Context | None = None + ) -> TimerHandle: ... + def time(self) -> float: ... + # Future methods + def create_future(self) -> Future[Any]: ... + # Tasks methods + if sys.version_info >= (3, 11): + def create_task( + self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], *, name: object = None, context: Context | None = None + ) -> Task[_T]: ... + elif sys.version_info >= (3, 8): + def create_task(self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], *, name: object = None) -> Task[_T]: ... + else: + def create_task(self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T]) -> Task[_T]: ... + + def set_task_factory(self, factory: _TaskFactory | None) -> None: ... + def get_task_factory(self) -> _TaskFactory | None: ... + # Methods for interacting with threads + def call_soon_threadsafe(self, callback: Callable[..., object], *args: Any, context: Context | None = None) -> Handle: ... + def run_in_executor(self, executor: Any, func: Callable[..., _T], *args: Any) -> Future[_T]: ... + def set_default_executor(self, executor: Any) -> None: ... + # Network I/O methods returning Futures. + async def getaddrinfo( + self, + host: bytes | str | None, + port: bytes | str | int | None, + *, + family: int = 0, + type: int = 0, + proto: int = 0, + flags: int = 0, + ) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ... + async def getnameinfo(self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = 0) -> tuple[str, str]: ... + if sys.version_info >= (3, 11): + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: None = None, + port: None = None, + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: socket, + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + elif sys.version_info >= (3, 8): + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: None = None, + port: None = None, + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: socket, + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + else: + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: None = None, + port: None = None, + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: socket, + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + if sys.version_info >= (3, 11): + @overload + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: str | Sequence[str] | None = None, + port: int = ..., + *, + family: int = ..., + flags: int = ..., + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + @overload + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: None = None, + port: None = None, + *, + family: int = ..., + flags: int = ..., + sock: socket = ..., + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + async def start_tls( + self, + transport: BaseTransport, + protocol: BaseProtocol, + sslcontext: ssl.SSLContext, + *, + server_side: bool = False, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> Transport: ... + async def connect_accepted_socket( + self, + protocol_factory: Callable[[], _ProtocolT], + sock: socket, + *, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + else: + @overload + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: str | Sequence[str] | None = None, + port: int = ..., + *, + family: int = ..., + flags: int = ..., + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + @overload + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: None = None, + port: None = None, + *, + family: int = ..., + flags: int = ..., + sock: socket = ..., + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + async def start_tls( + self, + transport: BaseTransport, + protocol: BaseProtocol, + sslcontext: ssl.SSLContext, + *, + server_side: bool = False, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> Transport: ... + async def connect_accepted_socket( + self, + protocol_factory: Callable[[], _ProtocolT], + sock: socket, + *, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + + async def sock_sendfile( + self, sock: socket, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool | None = True + ) -> int: ... + async def sendfile( + self, transport: WriteTransport, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool = True + ) -> int: ... + if sys.version_info >= (3, 11): + async def create_datagram_endpoint( # type: ignore[override] + self, + protocol_factory: Callable[[], _ProtocolT], + local_addr: tuple[str, int] | str | None = None, + remote_addr: tuple[str, int] | str | None = None, + *, + family: int = 0, + proto: int = 0, + flags: int = 0, + reuse_port: bool | None = None, + allow_broadcast: bool | None = None, + sock: socket | None = None, + ) -> tuple[DatagramTransport, _ProtocolT]: ... + else: + async def create_datagram_endpoint( + self, + protocol_factory: Callable[[], _ProtocolT], + local_addr: tuple[str, int] | str | None = None, + remote_addr: tuple[str, int] | str | None = None, + *, + family: int = 0, + proto: int = 0, + flags: int = 0, + reuse_address: bool | None = ..., + reuse_port: bool | None = None, + allow_broadcast: bool | None = None, + sock: socket | None = None, + ) -> tuple[DatagramTransport, _ProtocolT]: ... + # Pipes and subprocesses. + async def connect_read_pipe( + self, protocol_factory: Callable[[], _ProtocolT], pipe: Any + ) -> tuple[ReadTransport, _ProtocolT]: ... + async def connect_write_pipe( + self, protocol_factory: Callable[[], _ProtocolT], pipe: Any + ) -> tuple[WriteTransport, _ProtocolT]: ... + async def subprocess_shell( + self, + protocol_factory: Callable[[], _ProtocolT], + cmd: bytes | str, + *, + stdin: int | IO[Any] | None = -1, + stdout: int | IO[Any] | None = -1, + stderr: int | IO[Any] | None = -1, + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + text: Literal[False, None] = None, + **kwargs: Any, + ) -> tuple[SubprocessTransport, _ProtocolT]: ... + async def subprocess_exec( + self, + protocol_factory: Callable[[], _ProtocolT], + program: Any, + *args: Any, + stdin: int | IO[Any] | None = -1, + stdout: int | IO[Any] | None = -1, + stderr: int | IO[Any] | None = -1, + universal_newlines: Literal[False] = False, + shell: Literal[False] = False, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + **kwargs: Any, + ) -> tuple[SubprocessTransport, _ProtocolT]: ... + def add_reader(self, fd: FileDescriptorLike, callback: Callable[..., Any], *args: Any) -> None: ... + def remove_reader(self, fd: FileDescriptorLike) -> bool: ... + def add_writer(self, fd: FileDescriptorLike, callback: Callable[..., Any], *args: Any) -> None: ... + def remove_writer(self, fd: FileDescriptorLike) -> bool: ... + # The sock_* methods (and probably some others) are not actually implemented on + # BaseEventLoop, only on subclasses. We list them here for now for convenience. + async def sock_recv(self, sock: socket, nbytes: int) -> bytes: ... + async def sock_recv_into(self, sock: socket, buf: WriteableBuffer) -> int: ... + async def sock_sendall(self, sock: socket, data: ReadableBuffer) -> None: ... + async def sock_connect(self, sock: socket, address: _Address) -> None: ... + async def sock_accept(self, sock: socket) -> tuple[socket, _RetAddress]: ... + if sys.version_info >= (3, 11): + async def sock_recvfrom(self, sock: socket, bufsize: int) -> bytes: ... + async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = 0) -> int: ... + async def sock_sendto(self, sock: socket, data: ReadableBuffer, address: _Address) -> None: ... + # Signal handling. + def add_signal_handler(self, sig: int, callback: Callable[..., Any], *args: Any) -> None: ... + def remove_signal_handler(self, sig: int) -> bool: ... + # Error handlers. + def set_exception_handler(self, handler: _ExceptionHandler | None) -> None: ... + def get_exception_handler(self) -> _ExceptionHandler | None: ... + def default_exception_handler(self, context: _Context) -> None: ... + def call_exception_handler(self, context: _Context) -> None: ... + # Debug flag management. + def get_debug(self) -> bool: ... + def set_debug(self, enabled: bool) -> None: ... + if sys.version_info >= (3, 9): + async def shutdown_default_executor(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/base_futures.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/base_futures.pyi new file mode 100644 index 00000000..c51174ef --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/base_futures.pyi @@ -0,0 +1,20 @@ +from collections.abc import Callable, Sequence +from contextvars import Context +from typing import Any +from typing_extensions import Literal + +from . import futures + +__all__ = () + +# asyncio defines 'isfuture()' in base_futures.py and re-imports it in futures.py +# but it leads to circular import error in pytype tool. +# That's why the import order is reversed. +from .futures import isfuture as isfuture + +_PENDING: Literal["PENDING"] # undocumented +_CANCELLED: Literal["CANCELLED"] # undocumented +_FINISHED: Literal["FINISHED"] # undocumented + +def _format_callbacks(cb: Sequence[tuple[Callable[[futures.Future[Any]], None], Context]]) -> str: ... # undocumented +def _future_repr_info(future: futures.Future[Any]) -> list[str]: ... # undocumented diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/base_subprocess.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/base_subprocess.pyi new file mode 100644 index 00000000..597c8302 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/base_subprocess.pyi @@ -0,0 +1,62 @@ +import subprocess +from collections import deque +from collections.abc import Callable, Sequence +from typing import IO, Any +from typing_extensions import TypeAlias + +from . import events, futures, protocols, transports + +_File: TypeAlias = int | IO[Any] | None + +class BaseSubprocessTransport(transports.SubprocessTransport): + _closed: bool # undocumented + _protocol: protocols.SubprocessProtocol # undocumented + _loop: events.AbstractEventLoop # undocumented + _proc: subprocess.Popen[Any] | None # undocumented + _pid: int | None # undocumented + _returncode: int | None # undocumented + _exit_waiters: list[futures.Future[Any]] # undocumented + _pending_calls: deque[tuple[Callable[..., Any], tuple[Any, ...]]] # undocumented + _pipes: dict[int, _File] # undocumented + _finished: bool # undocumented + def __init__( + self, + loop: events.AbstractEventLoop, + protocol: protocols.SubprocessProtocol, + args: str | bytes | Sequence[str | bytes], + shell: bool, + stdin: _File, + stdout: _File, + stderr: _File, + bufsize: int, + waiter: futures.Future[Any] | None = None, + extra: Any | None = None, + **kwargs: Any, + ) -> None: ... + def _start( + self, + args: str | bytes | Sequence[str | bytes], + shell: bool, + stdin: _File, + stdout: _File, + stderr: _File, + bufsize: int, + **kwargs: Any, + ) -> None: ... # undocumented + def get_pid(self) -> int | None: ... # type: ignore[override] + def get_pipe_transport(self, fd: int) -> _File: ... # type: ignore[override] + def _check_proc(self) -> None: ... # undocumented + def send_signal(self, signal: int) -> None: ... # type: ignore[override] + async def _connect_pipes(self, waiter: futures.Future[Any] | None) -> None: ... # undocumented + def _call(self, cb: Callable[..., object], *data: Any) -> None: ... # undocumented + def _pipe_connection_lost(self, fd: int, exc: BaseException | None) -> None: ... # undocumented + def _pipe_data_received(self, fd: int, data: bytes) -> None: ... # undocumented + def _process_exited(self, returncode: int) -> None: ... # undocumented + async def _wait(self) -> int: ... # undocumented + def _try_finish(self) -> None: ... # undocumented + def _call_connection_lost(self, exc: BaseException | None) -> None: ... # undocumented + +class WriteSubprocessPipeProto(protocols.BaseProtocol): # undocumented + def __init__(self, proc: BaseSubprocessTransport, fd: int) -> None: ... + +class ReadSubprocessPipeProto(WriteSubprocessPipeProto, protocols.Protocol): ... # undocumented diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/base_tasks.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/base_tasks.pyi new file mode 100644 index 00000000..42e952ff --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/base_tasks.pyi @@ -0,0 +1,9 @@ +from _typeshed import StrOrBytesPath +from types import FrameType +from typing import Any + +from . import tasks + +def _task_repr_info(task: tasks.Task[Any]) -> list[str]: ... # undocumented +def _task_get_stack(task: tasks.Task[Any], limit: int | None) -> list[FrameType]: ... # undocumented +def _task_print_stack(task: tasks.Task[Any], limit: int | None, file: StrOrBytesPath) -> None: ... # undocumented diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/constants.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/constants.pyi new file mode 100644 index 00000000..af209fa9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/constants.pyi @@ -0,0 +1,18 @@ +import enum +import sys +from typing_extensions import Literal + +LOG_THRESHOLD_FOR_CONNLOST_WRITES: Literal[5] +ACCEPT_RETRY_DELAY: Literal[1] +DEBUG_STACK_DEPTH: Literal[10] +SSL_HANDSHAKE_TIMEOUT: float +SENDFILE_FALLBACK_READBUFFER_SIZE: Literal[262144] +if sys.version_info >= (3, 11): + SSL_SHUTDOWN_TIMEOUT: float + FLOW_CONTROL_HIGH_WATER_SSL_READ: Literal[256] + FLOW_CONTROL_HIGH_WATER_SSL_WRITE: Literal[512] + +class _SendfileMode(enum.Enum): + UNSUPPORTED: int + TRY_NATIVE: int + FALLBACK: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/coroutines.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/coroutines.pyi new file mode 100644 index 00000000..14fb627a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/coroutines.pyi @@ -0,0 +1,28 @@ +import sys +from collections.abc import Awaitable, Callable, Coroutine +from typing import Any, TypeVar, overload +from typing_extensions import ParamSpec, TypeGuard + +if sys.version_info >= (3, 11): + __all__ = ("iscoroutinefunction", "iscoroutine") +else: + __all__ = ("coroutine", "iscoroutinefunction", "iscoroutine") + +_T = TypeVar("_T") +_FunctionT = TypeVar("_FunctionT", bound=Callable[..., Any]) +_P = ParamSpec("_P") + +if sys.version_info < (3, 11): + def coroutine(func: _FunctionT) -> _FunctionT: ... + +@overload +def iscoroutinefunction(func: Callable[..., Coroutine[Any, Any, Any]]) -> bool: ... +@overload +def iscoroutinefunction(func: Callable[_P, Awaitable[_T]]) -> TypeGuard[Callable[_P, Coroutine[Any, Any, _T]]]: ... +@overload +def iscoroutinefunction(func: Callable[_P, object]) -> TypeGuard[Callable[_P, Coroutine[Any, Any, Any]]]: ... +@overload +def iscoroutinefunction(func: object) -> TypeGuard[Callable[..., Coroutine[Any, Any, Any]]]: ... + +# Can actually be a generator-style coroutine on Python 3.7 +def iscoroutine(obj: object) -> TypeGuard[Coroutine[Any, Any, Any]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/events.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/events.pyi new file mode 100644 index 00000000..f97afe87 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/events.pyi @@ -0,0 +1,630 @@ +import ssl +import sys +from _typeshed import FileDescriptorLike, ReadableBuffer, StrPath, Unused, WriteableBuffer +from abc import ABCMeta, abstractmethod +from collections.abc import Awaitable, Callable, Coroutine, Generator, Sequence +from contextvars import Context +from socket import AddressFamily, SocketKind, _Address, _RetAddress, socket +from typing import IO, Any, Protocol, TypeVar, overload +from typing_extensions import Literal, Self, TypeAlias + +from .base_events import Server +from .futures import Future +from .protocols import BaseProtocol +from .tasks import Task +from .transports import BaseTransport, DatagramTransport, ReadTransport, SubprocessTransport, Transport, WriteTransport +from .unix_events import AbstractChildWatcher + +if sys.version_info >= (3, 8): + __all__ = ( + "AbstractEventLoopPolicy", + "AbstractEventLoop", + "AbstractServer", + "Handle", + "TimerHandle", + "get_event_loop_policy", + "set_event_loop_policy", + "get_event_loop", + "set_event_loop", + "new_event_loop", + "get_child_watcher", + "set_child_watcher", + "_set_running_loop", + "get_running_loop", + "_get_running_loop", + ) + +else: + __all__ = ( + "AbstractEventLoopPolicy", + "AbstractEventLoop", + "AbstractServer", + "Handle", + "TimerHandle", + "SendfileNotAvailableError", + "get_event_loop_policy", + "set_event_loop_policy", + "get_event_loop", + "set_event_loop", + "new_event_loop", + "get_child_watcher", + "set_child_watcher", + "_set_running_loop", + "get_running_loop", + "_get_running_loop", + ) + +_T = TypeVar("_T") +_ProtocolT = TypeVar("_ProtocolT", bound=BaseProtocol) +_Context: TypeAlias = dict[str, Any] +_ExceptionHandler: TypeAlias = Callable[[AbstractEventLoop, _Context], object] +_ProtocolFactory: TypeAlias = Callable[[], BaseProtocol] +_SSLContext: TypeAlias = bool | None | ssl.SSLContext + +class _TaskFactory(Protocol): + def __call__( + self, __loop: AbstractEventLoop, __factory: Coroutine[Any, Any, _T] | Generator[Any, None, _T] + ) -> Future[_T]: ... + +class Handle: + _cancelled: bool + _args: Sequence[Any] + def __init__( + self, callback: Callable[..., object], args: Sequence[Any], loop: AbstractEventLoop, context: Context | None = None + ) -> None: ... + def cancel(self) -> None: ... + def _run(self) -> None: ... + def cancelled(self) -> bool: ... + +class TimerHandle(Handle): + def __init__( + self, + when: float, + callback: Callable[..., object], + args: Sequence[Any], + loop: AbstractEventLoop, + context: Context | None = None, + ) -> None: ... + def when(self) -> float: ... + def __lt__(self, other: TimerHandle) -> bool: ... + def __le__(self, other: TimerHandle) -> bool: ... + def __gt__(self, other: TimerHandle) -> bool: ... + def __ge__(self, other: TimerHandle) -> bool: ... + def __eq__(self, other: object) -> bool: ... + +class AbstractServer: + @abstractmethod + def close(self) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__(self, *exc: Unused) -> None: ... + @abstractmethod + def get_loop(self) -> AbstractEventLoop: ... + @abstractmethod + def is_serving(self) -> bool: ... + @abstractmethod + async def start_serving(self) -> None: ... + @abstractmethod + async def serve_forever(self) -> None: ... + @abstractmethod + async def wait_closed(self) -> None: ... + +class AbstractEventLoop: + slow_callback_duration: float + @abstractmethod + def run_forever(self) -> None: ... + # Can't use a union, see mypy issue # 1873. + @overload + @abstractmethod + def run_until_complete(self, future: Generator[Any, None, _T]) -> _T: ... + @overload + @abstractmethod + def run_until_complete(self, future: Awaitable[_T]) -> _T: ... + @abstractmethod + def stop(self) -> None: ... + @abstractmethod + def is_running(self) -> bool: ... + @abstractmethod + def is_closed(self) -> bool: ... + @abstractmethod + def close(self) -> None: ... + @abstractmethod + async def shutdown_asyncgens(self) -> None: ... + # Methods scheduling callbacks. All these return Handles. + if sys.version_info >= (3, 9): # "context" added in 3.9.10/3.10.2 + @abstractmethod + def call_soon(self, callback: Callable[..., object], *args: Any, context: Context | None = None) -> Handle: ... + @abstractmethod + def call_later( + self, delay: float, callback: Callable[..., object], *args: Any, context: Context | None = None + ) -> TimerHandle: ... + @abstractmethod + def call_at( + self, when: float, callback: Callable[..., object], *args: Any, context: Context | None = None + ) -> TimerHandle: ... + else: + @abstractmethod + def call_soon(self, callback: Callable[..., object], *args: Any) -> Handle: ... + @abstractmethod + def call_later(self, delay: float, callback: Callable[..., object], *args: Any) -> TimerHandle: ... + @abstractmethod + def call_at(self, when: float, callback: Callable[..., object], *args: Any) -> TimerHandle: ... + + @abstractmethod + def time(self) -> float: ... + # Future methods + @abstractmethod + def create_future(self) -> Future[Any]: ... + # Tasks methods + if sys.version_info >= (3, 11): + @abstractmethod + def create_task( + self, + coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], + *, + name: str | None = None, + context: Context | None = None, + ) -> Task[_T]: ... + elif sys.version_info >= (3, 8): + @abstractmethod + def create_task( + self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], *, name: str | None = None + ) -> Task[_T]: ... + else: + @abstractmethod + def create_task(self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T]) -> Task[_T]: ... + + @abstractmethod + def set_task_factory(self, factory: _TaskFactory | None) -> None: ... + @abstractmethod + def get_task_factory(self) -> _TaskFactory | None: ... + # Methods for interacting with threads + if sys.version_info >= (3, 9): # "context" added in 3.9.10/3.10.2 + @abstractmethod + def call_soon_threadsafe(self, callback: Callable[..., object], *args: Any, context: Context | None = None) -> Handle: ... + else: + @abstractmethod + def call_soon_threadsafe(self, callback: Callable[..., object], *args: Any) -> Handle: ... + + @abstractmethod + def run_in_executor(self, executor: Any, func: Callable[..., _T], *args: Any) -> Future[_T]: ... + @abstractmethod + def set_default_executor(self, executor: Any) -> None: ... + # Network I/O methods returning Futures. + @abstractmethod + async def getaddrinfo( + self, + host: bytes | str | None, + port: bytes | str | int | None, + *, + family: int = 0, + type: int = 0, + proto: int = 0, + flags: int = 0, + ) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ... + @abstractmethod + async def getnameinfo(self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = 0) -> tuple[str, str]: ... + if sys.version_info >= (3, 11): + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: None = None, + port: None = None, + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: socket, + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + elif sys.version_info >= (3, 8): + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: None = None, + port: None = None, + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: socket, + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + else: + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: None = None, + port: None = None, + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: socket, + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + if sys.version_info >= (3, 11): + @overload + @abstractmethod + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: str | Sequence[str] | None = None, + port: int = ..., + *, + family: int = ..., + flags: int = ..., + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + @overload + @abstractmethod + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: None = None, + port: None = None, + *, + family: int = ..., + flags: int = ..., + sock: socket = ..., + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + @abstractmethod + async def start_tls( + self, + transport: WriteTransport, + protocol: BaseProtocol, + sslcontext: ssl.SSLContext, + *, + server_side: bool = False, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> Transport: ... + async def create_unix_server( + self, + protocol_factory: _ProtocolFactory, + path: StrPath | None = None, + *, + sock: socket | None = None, + backlog: int = 100, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + else: + @overload + @abstractmethod + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: str | Sequence[str] | None = None, + port: int = ..., + *, + family: int = ..., + flags: int = ..., + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + @overload + @abstractmethod + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: None = None, + port: None = None, + *, + family: int = ..., + flags: int = ..., + sock: socket = ..., + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + @abstractmethod + async def start_tls( + self, + transport: BaseTransport, + protocol: BaseProtocol, + sslcontext: ssl.SSLContext, + *, + server_side: bool = False, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> Transport: ... + async def create_unix_server( + self, + protocol_factory: _ProtocolFactory, + path: StrPath | None = None, + *, + sock: socket | None = None, + backlog: int = 100, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + if sys.version_info >= (3, 11): + async def connect_accepted_socket( + self, + protocol_factory: Callable[[], _ProtocolT], + sock: socket, + *, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + elif sys.version_info >= (3, 10): + async def connect_accepted_socket( + self, + protocol_factory: Callable[[], _ProtocolT], + sock: socket, + *, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + if sys.version_info >= (3, 11): + async def create_unix_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + path: str | None = None, + *, + ssl: _SSLContext = None, + sock: socket | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + else: + async def create_unix_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + path: str | None = None, + *, + ssl: _SSLContext = None, + sock: socket | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + + @abstractmethod + async def sock_sendfile( + self, sock: socket, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool | None = None + ) -> int: ... + @abstractmethod + async def sendfile( + self, transport: WriteTransport, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool = True + ) -> int: ... + @abstractmethod + async def create_datagram_endpoint( + self, + protocol_factory: Callable[[], _ProtocolT], + local_addr: tuple[str, int] | str | None = None, + remote_addr: tuple[str, int] | str | None = None, + *, + family: int = 0, + proto: int = 0, + flags: int = 0, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + allow_broadcast: bool | None = None, + sock: socket | None = None, + ) -> tuple[DatagramTransport, _ProtocolT]: ... + # Pipes and subprocesses. + @abstractmethod + async def connect_read_pipe( + self, protocol_factory: Callable[[], _ProtocolT], pipe: Any + ) -> tuple[ReadTransport, _ProtocolT]: ... + @abstractmethod + async def connect_write_pipe( + self, protocol_factory: Callable[[], _ProtocolT], pipe: Any + ) -> tuple[WriteTransport, _ProtocolT]: ... + @abstractmethod + async def subprocess_shell( + self, + protocol_factory: Callable[[], _ProtocolT], + cmd: bytes | str, + *, + stdin: int | IO[Any] | None = -1, + stdout: int | IO[Any] | None = -1, + stderr: int | IO[Any] | None = -1, + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + text: Literal[False, None] = ..., + **kwargs: Any, + ) -> tuple[SubprocessTransport, _ProtocolT]: ... + @abstractmethod + async def subprocess_exec( + self, + protocol_factory: Callable[[], _ProtocolT], + program: Any, + *args: Any, + stdin: int | IO[Any] | None = -1, + stdout: int | IO[Any] | None = -1, + stderr: int | IO[Any] | None = -1, + universal_newlines: Literal[False] = False, + shell: Literal[False] = False, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + **kwargs: Any, + ) -> tuple[SubprocessTransport, _ProtocolT]: ... + @abstractmethod + def add_reader(self, fd: FileDescriptorLike, callback: Callable[..., Any], *args: Any) -> None: ... + @abstractmethod + def remove_reader(self, fd: FileDescriptorLike) -> bool: ... + @abstractmethod + def add_writer(self, fd: FileDescriptorLike, callback: Callable[..., Any], *args: Any) -> None: ... + @abstractmethod + def remove_writer(self, fd: FileDescriptorLike) -> bool: ... + # Completion based I/O methods returning Futures prior to 3.7 + @abstractmethod + async def sock_recv(self, sock: socket, nbytes: int) -> bytes: ... + @abstractmethod + async def sock_recv_into(self, sock: socket, buf: WriteableBuffer) -> int: ... + @abstractmethod + async def sock_sendall(self, sock: socket, data: ReadableBuffer) -> None: ... + @abstractmethod + async def sock_connect(self, sock: socket, address: _Address) -> None: ... + @abstractmethod + async def sock_accept(self, sock: socket) -> tuple[socket, _RetAddress]: ... + if sys.version_info >= (3, 11): + @abstractmethod + async def sock_recvfrom(self, sock: socket, bufsize: int) -> bytes: ... + @abstractmethod + async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = 0) -> int: ... + @abstractmethod + async def sock_sendto(self, sock: socket, data: ReadableBuffer, address: _Address) -> None: ... + # Signal handling. + @abstractmethod + def add_signal_handler(self, sig: int, callback: Callable[..., object], *args: Any) -> None: ... + @abstractmethod + def remove_signal_handler(self, sig: int) -> bool: ... + # Error handlers. + @abstractmethod + def set_exception_handler(self, handler: _ExceptionHandler | None) -> None: ... + @abstractmethod + def get_exception_handler(self) -> _ExceptionHandler | None: ... + @abstractmethod + def default_exception_handler(self, context: _Context) -> None: ... + @abstractmethod + def call_exception_handler(self, context: _Context) -> None: ... + # Debug flag management. + @abstractmethod + def get_debug(self) -> bool: ... + @abstractmethod + def set_debug(self, enabled: bool) -> None: ... + if sys.version_info >= (3, 9): + @abstractmethod + async def shutdown_default_executor(self) -> None: ... + +class AbstractEventLoopPolicy: + @abstractmethod + def get_event_loop(self) -> AbstractEventLoop: ... + @abstractmethod + def set_event_loop(self, loop: AbstractEventLoop | None) -> None: ... + @abstractmethod + def new_event_loop(self) -> AbstractEventLoop: ... + # Child processes handling (Unix only). + @abstractmethod + def get_child_watcher(self) -> AbstractChildWatcher: ... + @abstractmethod + def set_child_watcher(self, watcher: AbstractChildWatcher) -> None: ... + +class BaseDefaultEventLoopPolicy(AbstractEventLoopPolicy, metaclass=ABCMeta): + def get_event_loop(self) -> AbstractEventLoop: ... + def set_event_loop(self, loop: AbstractEventLoop | None) -> None: ... + def new_event_loop(self) -> AbstractEventLoop: ... + +def get_event_loop_policy() -> AbstractEventLoopPolicy: ... +def set_event_loop_policy(policy: AbstractEventLoopPolicy | None) -> None: ... +def get_event_loop() -> AbstractEventLoop: ... +def set_event_loop(loop: AbstractEventLoop | None) -> None: ... +def new_event_loop() -> AbstractEventLoop: ... +def get_child_watcher() -> AbstractChildWatcher: ... +def set_child_watcher(watcher: AbstractChildWatcher) -> None: ... +def _set_running_loop(__loop: AbstractEventLoop | None) -> None: ... +def _get_running_loop() -> AbstractEventLoop: ... +def get_running_loop() -> AbstractEventLoop: ... + +if sys.version_info < (3, 8): + class SendfileNotAvailableError(RuntimeError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/exceptions.pyi new file mode 100644 index 00000000..075fbb80 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/exceptions.pyi @@ -0,0 +1,38 @@ +import sys + +if sys.version_info >= (3, 11): + __all__ = ( + "BrokenBarrierError", + "CancelledError", + "InvalidStateError", + "TimeoutError", + "IncompleteReadError", + "LimitOverrunError", + "SendfileNotAvailableError", + ) +else: + __all__ = ( + "CancelledError", + "InvalidStateError", + "TimeoutError", + "IncompleteReadError", + "LimitOverrunError", + "SendfileNotAvailableError", + ) + +class CancelledError(BaseException): ... +class TimeoutError(Exception): ... +class InvalidStateError(Exception): ... +class SendfileNotAvailableError(RuntimeError): ... + +class IncompleteReadError(EOFError): + expected: int | None + partial: bytes + def __init__(self, partial: bytes, expected: int | None) -> None: ... + +class LimitOverrunError(Exception): + consumed: int + def __init__(self, message: str, consumed: int) -> None: ... + +if sys.version_info >= (3, 11): + class BrokenBarrierError(RuntimeError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/format_helpers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/format_helpers.pyi new file mode 100644 index 00000000..1c78dff3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/format_helpers.pyi @@ -0,0 +1,20 @@ +import functools +import traceback +from collections.abc import Iterable +from types import FrameType, FunctionType +from typing import Any, overload +from typing_extensions import TypeAlias + +class _HasWrapper: + __wrapper__: _HasWrapper | FunctionType + +_FuncType: TypeAlias = FunctionType | _HasWrapper | functools.partial[Any] | functools.partialmethod[Any] + +@overload +def _get_function_source(func: _FuncType) -> tuple[str, int]: ... +@overload +def _get_function_source(func: object) -> tuple[str, int] | None: ... +def _format_callback_source(func: object, args: Iterable[Any]) -> str: ... +def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any]) -> str: ... +def _format_callback(func: object, args: Iterable[Any], kwargs: dict[str, Any], suffix: str = "") -> str: ... +def extract_stack(f: FrameType | None = None, limit: int | None = None) -> traceback.StackSummary: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/futures.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/futures.pyi new file mode 100644 index 00000000..79209f5e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/futures.pyi @@ -0,0 +1,66 @@ +import sys +from collections.abc import Awaitable, Callable, Generator, Iterable +from concurrent.futures._base import Error, Future as _ConcurrentFuture +from typing import Any, TypeVar +from typing_extensions import Literal, Self, TypeGuard + +from .events import AbstractEventLoop + +if sys.version_info < (3, 8): + from concurrent.futures import CancelledError as CancelledError, TimeoutError as TimeoutError + + class InvalidStateError(Error): ... + +from contextvars import Context + +if sys.version_info >= (3, 9): + from types import GenericAlias + +if sys.version_info >= (3, 8): + __all__ = ("Future", "wrap_future", "isfuture") +else: + __all__ = ("CancelledError", "TimeoutError", "InvalidStateError", "Future", "wrap_future", "isfuture") + +_T = TypeVar("_T") + +# asyncio defines 'isfuture()' in base_futures.py and re-imports it in futures.py +# but it leads to circular import error in pytype tool. +# That's why the import order is reversed. +def isfuture(obj: object) -> TypeGuard[Future[Any]]: ... + +class Future(Awaitable[_T], Iterable[_T]): + _state: str + @property + def _exception(self) -> BaseException: ... + _blocking: bool + @property + def _log_traceback(self) -> bool: ... + @_log_traceback.setter + def _log_traceback(self, val: Literal[False]) -> None: ... + _asyncio_future_blocking: bool # is a part of duck-typing contract for `Future` + def __init__(self, *, loop: AbstractEventLoop | None = ...) -> None: ... + def __del__(self) -> None: ... + def get_loop(self) -> AbstractEventLoop: ... + @property + def _callbacks(self) -> list[tuple[Callable[[Self], Any], Context]]: ... + def add_done_callback(self, __fn: Callable[[Self], object], *, context: Context | None = None) -> None: ... + if sys.version_info >= (3, 9): + def cancel(self, msg: Any | None = None) -> bool: ... + else: + def cancel(self) -> bool: ... + + def cancelled(self) -> bool: ... + def done(self) -> bool: ... + def result(self) -> _T: ... + def exception(self) -> BaseException | None: ... + def remove_done_callback(self, __fn: Callable[[Self], object]) -> int: ... + def set_result(self, __result: _T) -> None: ... + def set_exception(self, __exception: type | BaseException) -> None: ... + def __iter__(self) -> Generator[Any, None, _T]: ... + def __await__(self) -> Generator[Any, None, _T]: ... + @property + def _loop(self) -> AbstractEventLoop: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +def wrap_future(future: _ConcurrentFuture[_T] | Future[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/locks.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/locks.pyi new file mode 100644 index 00000000..ab4e63ab --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/locks.pyi @@ -0,0 +1,116 @@ +import enum +import sys +from _typeshed import Unused +from collections import deque +from collections.abc import Callable, Generator +from types import TracebackType +from typing import Any, TypeVar +from typing_extensions import Literal, Self + +from .events import AbstractEventLoop +from .futures import Future + +if sys.version_info >= (3, 11): + from .mixins import _LoopBoundMixin + +if sys.version_info >= (3, 11): + __all__ = ("Lock", "Event", "Condition", "Semaphore", "BoundedSemaphore", "Barrier") +else: + __all__ = ("Lock", "Event", "Condition", "Semaphore", "BoundedSemaphore") + +_T = TypeVar("_T") + +if sys.version_info >= (3, 9): + class _ContextManagerMixin: + async def __aenter__(self) -> None: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None + ) -> None: ... + +else: + class _ContextManager: + def __init__(self, lock: Lock | Semaphore) -> None: ... + def __enter__(self) -> None: ... + def __exit__(self, *args: Unused) -> None: ... + + class _ContextManagerMixin: + # Apparently this exists to *prohibit* use as a context manager. + # def __enter__(self) -> NoReturn: ... see: https://github.com/python/typing/issues/1043 + # def __exit__(self, *args: Any) -> None: ... + def __iter__(self) -> Generator[Any, None, _ContextManager]: ... + def __await__(self) -> Generator[Any, None, _ContextManager]: ... + async def __aenter__(self) -> None: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None + ) -> None: ... + +class Lock(_ContextManagerMixin): + if sys.version_info >= (3, 10): + def __init__(self) -> None: ... + else: + def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ... + + def locked(self) -> bool: ... + async def acquire(self) -> Literal[True]: ... + def release(self) -> None: ... + +class Event: + if sys.version_info >= (3, 10): + def __init__(self) -> None: ... + else: + def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ... + + def is_set(self) -> bool: ... + def set(self) -> None: ... + def clear(self) -> None: ... + async def wait(self) -> Literal[True]: ... + +class Condition(_ContextManagerMixin): + if sys.version_info >= (3, 10): + def __init__(self, lock: Lock | None = None) -> None: ... + else: + def __init__(self, lock: Lock | None = None, *, loop: AbstractEventLoop | None = None) -> None: ... + + def locked(self) -> bool: ... + async def acquire(self) -> Literal[True]: ... + def release(self) -> None: ... + async def wait(self) -> Literal[True]: ... + async def wait_for(self, predicate: Callable[[], _T]) -> _T: ... + def notify(self, n: int = 1) -> None: ... + def notify_all(self) -> None: ... + +class Semaphore(_ContextManagerMixin): + _value: int + _waiters: deque[Future[Any]] + if sys.version_info >= (3, 10): + def __init__(self, value: int = 1) -> None: ... + else: + def __init__(self, value: int = 1, *, loop: AbstractEventLoop | None = None) -> None: ... + + def locked(self) -> bool: ... + async def acquire(self) -> Literal[True]: ... + def release(self) -> None: ... + def _wake_up_next(self) -> None: ... + +class BoundedSemaphore(Semaphore): ... + +if sys.version_info >= (3, 11): + class _BarrierState(enum.Enum): # undocumented + FILLING: str + DRAINING: str + RESETTING: str + BROKEN: str + + class Barrier(_LoopBoundMixin): + def __init__(self, parties: int) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__(self, *args: Unused) -> None: ... + async def wait(self) -> int: ... + async def abort(self) -> None: ... + async def reset(self) -> None: ... + @property + def parties(self) -> int: ... + @property + def n_waiting(self) -> int: ... + @property + def broken(self) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/log.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/log.pyi new file mode 100644 index 00000000..e1de0b3b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/log.pyi @@ -0,0 +1,3 @@ +import logging + +logger: logging.Logger diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/mixins.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/mixins.pyi new file mode 100644 index 00000000..6ebcf543 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/mixins.pyi @@ -0,0 +1,9 @@ +import sys +import threading +from typing_extensions import Never + +_global_lock: threading.Lock + +class _LoopBoundMixin: + if sys.version_info < (3, 11): + def __init__(self, *, loop: Never = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/proactor_events.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/proactor_events.pyi new file mode 100644 index 00000000..33fdf84a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/proactor_events.pyi @@ -0,0 +1,74 @@ +import sys +from collections.abc import Mapping +from socket import socket +from typing import Any, ClassVar, Protocol +from typing_extensions import Literal + +from . import base_events, constants, events, futures, streams, transports + +__all__ = ("BaseProactorEventLoop",) + +if sys.version_info >= (3, 8): + class _WarnCallbackProtocol(Protocol): + def __call__( + self, message: str, category: type[Warning] | None = ..., stacklevel: int = ..., source: Any | None = ... + ) -> object: ... + +class _ProactorBasePipeTransport(transports._FlowControlMixin, transports.BaseTransport): + def __init__( + self, + loop: events.AbstractEventLoop, + sock: socket, + protocol: streams.StreamReaderProtocol, + waiter: futures.Future[Any] | None = None, + extra: Mapping[Any, Any] | None = None, + server: events.AbstractServer | None = None, + ) -> None: ... + if sys.version_info >= (3, 8): + def __del__(self, _warn: _WarnCallbackProtocol = ...) -> None: ... + else: + def __del__(self) -> None: ... + +class _ProactorReadPipeTransport(_ProactorBasePipeTransport, transports.ReadTransport): + if sys.version_info >= (3, 10): + def __init__( + self, + loop: events.AbstractEventLoop, + sock: socket, + protocol: streams.StreamReaderProtocol, + waiter: futures.Future[Any] | None = None, + extra: Mapping[Any, Any] | None = None, + server: events.AbstractServer | None = None, + buffer_size: int = 65536, + ) -> None: ... + else: + def __init__( + self, + loop: events.AbstractEventLoop, + sock: socket, + protocol: streams.StreamReaderProtocol, + waiter: futures.Future[Any] | None = None, + extra: Mapping[Any, Any] | None = None, + server: events.AbstractServer | None = None, + ) -> None: ... + +class _ProactorBaseWritePipeTransport(_ProactorBasePipeTransport, transports.WriteTransport): ... +class _ProactorWritePipeTransport(_ProactorBaseWritePipeTransport): ... +class _ProactorDuplexPipeTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport): ... + +class _ProactorSocketTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport): + _sendfile_compatible: ClassVar[constants._SendfileMode] + def __init__( + self, + loop: events.AbstractEventLoop, + sock: socket, + protocol: streams.StreamReaderProtocol, + waiter: futures.Future[Any] | None = None, + extra: Mapping[Any, Any] | None = None, + server: events.AbstractServer | None = None, + ) -> None: ... + def _set_extra(self, sock: socket) -> None: ... + def can_write_eof(self) -> Literal[True]: ... + +class BaseProactorEventLoop(base_events.BaseEventLoop): + def __init__(self, proactor: Any) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/protocols.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/protocols.pyi new file mode 100644 index 00000000..5173b74e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/protocols.pyi @@ -0,0 +1,34 @@ +from _typeshed import ReadableBuffer +from asyncio import transports +from typing import Any + +__all__ = ("BaseProtocol", "Protocol", "DatagramProtocol", "SubprocessProtocol", "BufferedProtocol") + +class BaseProtocol: + def connection_made(self, transport: transports.BaseTransport) -> None: ... + def connection_lost(self, exc: Exception | None) -> None: ... + def pause_writing(self) -> None: ... + def resume_writing(self) -> None: ... + +class Protocol(BaseProtocol): + def data_received(self, data: bytes) -> None: ... + def eof_received(self) -> bool | None: ... + +class BufferedProtocol(BaseProtocol): + def get_buffer(self, sizehint: int) -> ReadableBuffer: ... + def buffer_updated(self, nbytes: int) -> None: ... + def eof_received(self) -> bool | None: ... + +class DatagramProtocol(BaseProtocol): + def connection_made(self, transport: transports.DatagramTransport) -> None: ... # type: ignore[override] + # addr can be a tuple[int, int] for some unusual protocols like socket.AF_NETLINK. + # Use tuple[str | Any, int] to not cause typechecking issues on most usual cases. + # This could be improved by using tuple[AnyOf[str, int], int] if the AnyOf feature is accepted. + # See https://github.com/python/typing/issues/566 + def datagram_received(self, data: bytes, addr: tuple[str | Any, int]) -> None: ... + def error_received(self, exc: Exception) -> None: ... + +class SubprocessProtocol(BaseProtocol): + def pipe_data_received(self, fd: int, data: bytes) -> None: ... + def pipe_connection_lost(self, fd: int, exc: Exception | None) -> None: ... + def process_exited(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/queues.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/queues.pyi new file mode 100644 index 00000000..f56a0952 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/queues.pyi @@ -0,0 +1,40 @@ +import sys +from asyncio.events import AbstractEventLoop +from typing import Any, Generic, TypeVar + +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = ("Queue", "PriorityQueue", "LifoQueue", "QueueFull", "QueueEmpty") + +class QueueEmpty(Exception): ... +class QueueFull(Exception): ... + +_T = TypeVar("_T") + +class Queue(Generic[_T]): + if sys.version_info >= (3, 10): + def __init__(self, maxsize: int = 0) -> None: ... + else: + def __init__(self, maxsize: int = 0, *, loop: AbstractEventLoop | None = None) -> None: ... + + def _init(self, maxsize: int) -> None: ... + def _get(self) -> _T: ... + def _put(self, item: _T) -> None: ... + def _format(self) -> str: ... + def qsize(self) -> int: ... + @property + def maxsize(self) -> int: ... + def empty(self) -> bool: ... + def full(self) -> bool: ... + async def put(self, item: _T) -> None: ... + def put_nowait(self, item: _T) -> None: ... + async def get(self) -> _T: ... + def get_nowait(self) -> _T: ... + async def join(self) -> None: ... + def task_done(self) -> None: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, type: Any) -> GenericAlias: ... + +class PriorityQueue(Queue[_T]): ... +class LifoQueue(Queue[_T]): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/runners.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/runners.pyi new file mode 100644 index 00000000..847072b6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/runners.pyi @@ -0,0 +1,35 @@ +import sys +from _typeshed import Unused +from collections.abc import Callable, Coroutine +from contextvars import Context +from typing import Any, TypeVar +from typing_extensions import Self, final + +from .events import AbstractEventLoop + +if sys.version_info >= (3, 11): + __all__ = ("Runner", "run") +else: + __all__ = ("run",) +_T = TypeVar("_T") + +if sys.version_info >= (3, 11): + @final + class Runner: + def __init__(self, *, debug: bool | None = None, loop_factory: Callable[[], AbstractEventLoop] | None = None) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, exc_type: Unused, exc_val: Unused, exc_tb: Unused) -> None: ... + def close(self) -> None: ... + def get_loop(self) -> AbstractEventLoop: ... + def run(self, coro: Coroutine[Any, Any, _T], *, context: Context | None = None) -> _T: ... + +if sys.version_info >= (3, 12): + def run( + main: Coroutine[Any, Any, _T], *, debug: bool | None = ..., loop_factory: Callable[[], AbstractEventLoop] | None = ... + ) -> _T: ... + +elif sys.version_info >= (3, 8): + def run(main: Coroutine[Any, Any, _T], *, debug: bool | None = None) -> _T: ... + +else: + def run(main: Coroutine[Any, Any, _T], *, debug: bool = False) -> _T: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/selector_events.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/selector_events.pyi new file mode 100644 index 00000000..430f2dd4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/selector_events.pyi @@ -0,0 +1,8 @@ +import selectors + +from . import base_events + +__all__ = ("BaseSelectorEventLoop",) + +class BaseSelectorEventLoop(base_events.BaseEventLoop): + def __init__(self, selector: selectors.BaseSelector | None = None) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/sslproto.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/sslproto.pyi new file mode 100644 index 00000000..aadc7d32 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/sslproto.pyi @@ -0,0 +1,158 @@ +import ssl +import sys +from collections import deque +from collections.abc import Callable +from enum import Enum +from typing import Any, ClassVar +from typing_extensions import Literal, TypeAlias + +from . import constants, events, futures, protocols, transports + +def _create_transport_context(server_side: bool, server_hostname: str | None) -> ssl.SSLContext: ... + +if sys.version_info >= (3, 11): + SSLAgainErrors: tuple[type[ssl.SSLWantReadError], type[ssl.SSLSyscallError]] + + class SSLProtocolState(Enum): + UNWRAPPED: str + DO_HANDSHAKE: str + WRAPPED: str + FLUSHING: str + SHUTDOWN: str + + class AppProtocolState(Enum): + STATE_INIT: str + STATE_CON_MADE: str + STATE_EOF: str + STATE_CON_LOST: str + def add_flowcontrol_defaults(high: int | None, low: int | None, kb: int) -> tuple[int, int]: ... + +else: + _UNWRAPPED: Literal["UNWRAPPED"] + _DO_HANDSHAKE: Literal["DO_HANDSHAKE"] + _WRAPPED: Literal["WRAPPED"] + _SHUTDOWN: Literal["SHUTDOWN"] + +if sys.version_info < (3, 11): + class _SSLPipe: + max_size: ClassVar[int] + + _context: ssl.SSLContext + _server_side: bool + _server_hostname: str | None + _state: str + _incoming: ssl.MemoryBIO + _outgoing: ssl.MemoryBIO + _sslobj: ssl.SSLObject | None + _need_ssldata: bool + _handshake_cb: Callable[[BaseException | None], None] | None + _shutdown_cb: Callable[[], None] | None + def __init__(self, context: ssl.SSLContext, server_side: bool, server_hostname: str | None = None) -> None: ... + @property + def context(self) -> ssl.SSLContext: ... + @property + def ssl_object(self) -> ssl.SSLObject | None: ... + @property + def need_ssldata(self) -> bool: ... + @property + def wrapped(self) -> bool: ... + def do_handshake(self, callback: Callable[[BaseException | None], object] | None = None) -> list[bytes]: ... + def shutdown(self, callback: Callable[[], object] | None = None) -> list[bytes]: ... + def feed_eof(self) -> None: ... + def feed_ssldata(self, data: bytes, only_handshake: bool = False) -> tuple[list[bytes], list[bytes]]: ... + def feed_appdata(self, data: bytes, offset: int = 0) -> tuple[list[bytes], int]: ... + +class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport): + _sendfile_compatible: ClassVar[constants._SendfileMode] + + _loop: events.AbstractEventLoop + _ssl_protocol: SSLProtocol + _closed: bool + def __init__(self, loop: events.AbstractEventLoop, ssl_protocol: SSLProtocol) -> None: ... + def get_extra_info(self, name: str, default: Any | None = None) -> dict[str, Any]: ... + @property + def _protocol_paused(self) -> bool: ... + def write(self, data: bytes | bytearray | memoryview) -> None: ... + def can_write_eof(self) -> Literal[False]: ... + if sys.version_info >= (3, 11): + def get_write_buffer_limits(self) -> tuple[int, int]: ... + def get_read_buffer_limits(self) -> tuple[int, int]: ... + def set_read_buffer_limits(self, high: int | None = None, low: int | None = None) -> None: ... + def get_read_buffer_size(self) -> int: ... + +if sys.version_info >= (3, 11): + _SSLProtocolBase: TypeAlias = protocols.BufferedProtocol +else: + _SSLProtocolBase: TypeAlias = protocols.Protocol + +class SSLProtocol(_SSLProtocolBase): + _server_side: bool + _server_hostname: str | None + _sslcontext: ssl.SSLContext + _extra: dict[str, Any] + _write_backlog: deque[tuple[bytes, int]] + _write_buffer_size: int + _waiter: futures.Future[Any] + _loop: events.AbstractEventLoop + _app_transport: _SSLProtocolTransport + _transport: transports.BaseTransport | None + _ssl_handshake_timeout: int | None + _app_protocol: protocols.BaseProtocol + _app_protocol_is_buffer: bool + + if sys.version_info >= (3, 11): + max_size: ClassVar[int] + else: + _sslpipe: _SSLPipe | None + _session_established: bool + _call_connection_made: bool + _in_handshake: bool + _in_shutdown: bool + + if sys.version_info >= (3, 11): + def __init__( + self, + loop: events.AbstractEventLoop, + app_protocol: protocols.BaseProtocol, + sslcontext: ssl.SSLContext, + waiter: futures.Future[Any], + server_side: bool = False, + server_hostname: str | None = None, + call_connection_made: bool = True, + ssl_handshake_timeout: int | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> None: ... + else: + def __init__( + self, + loop: events.AbstractEventLoop, + app_protocol: protocols.BaseProtocol, + sslcontext: ssl.SSLContext, + waiter: futures.Future[Any], + server_side: bool = False, + server_hostname: str | None = None, + call_connection_made: bool = True, + ssl_handshake_timeout: int | None = None, + ) -> None: ... + + def _set_app_protocol(self, app_protocol: protocols.BaseProtocol) -> None: ... + def _wakeup_waiter(self, exc: BaseException | None = None) -> None: ... + def connection_lost(self, exc: BaseException | None) -> None: ... + def eof_received(self) -> None: ... + def _get_extra_info(self, name: str, default: Any | None = None) -> Any: ... + def _start_shutdown(self) -> None: ... + if sys.version_info >= (3, 11): + def _write_appdata(self, list_of_data: list[bytes]) -> None: ... + else: + def _write_appdata(self, data: bytes) -> None: ... + + def _start_handshake(self) -> None: ... + def _check_handshake_timeout(self) -> None: ... + def _on_handshake_complete(self, handshake_exc: BaseException | None) -> None: ... + def _fatal_error(self, exc: BaseException, message: str = "Fatal error on transport") -> None: ... + def _abort(self) -> None: ... + if sys.version_info >= (3, 11): + def get_buffer(self, n: int) -> memoryview: ... + else: + def _finalize(self) -> None: ... + def _process_write_backlog(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/staggered.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/staggered.pyi new file mode 100644 index 00000000..3324777f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/staggered.pyi @@ -0,0 +1,10 @@ +from collections.abc import Awaitable, Callable, Iterable +from typing import Any + +from . import events + +__all__ = ("staggered_race",) + +async def staggered_race( + coro_fns: Iterable[Callable[[], Awaitable[Any]]], delay: float | None, *, loop: events.AbstractEventLoop | None = None +) -> tuple[Any, int | None, list[Exception | None]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/streams.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/streams.pyi new file mode 100644 index 00000000..f30c5730 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/streams.pyi @@ -0,0 +1,170 @@ +import ssl +import sys +from _typeshed import StrPath +from collections.abc import AsyncIterator, Awaitable, Callable, Iterable, Sequence +from typing import Any +from typing_extensions import Self, SupportsIndex, TypeAlias + +from . import events, protocols, transports +from .base_events import Server + +if sys.platform == "win32": + if sys.version_info >= (3, 8): + __all__ = ("StreamReader", "StreamWriter", "StreamReaderProtocol", "open_connection", "start_server") + else: + __all__ = ( + "StreamReader", + "StreamWriter", + "StreamReaderProtocol", + "open_connection", + "start_server", + "IncompleteReadError", + "LimitOverrunError", + ) +else: + if sys.version_info >= (3, 8): + __all__ = ( + "StreamReader", + "StreamWriter", + "StreamReaderProtocol", + "open_connection", + "start_server", + "open_unix_connection", + "start_unix_server", + ) + else: + __all__ = ( + "StreamReader", + "StreamWriter", + "StreamReaderProtocol", + "open_connection", + "start_server", + "IncompleteReadError", + "LimitOverrunError", + "open_unix_connection", + "start_unix_server", + ) + +_ClientConnectedCallback: TypeAlias = Callable[[StreamReader, StreamWriter], Awaitable[None] | None] + +if sys.version_info < (3, 8): + class IncompleteReadError(EOFError): + expected: int | None + partial: bytes + def __init__(self, partial: bytes, expected: int | None) -> None: ... + + class LimitOverrunError(Exception): + consumed: int + def __init__(self, message: str, consumed: int) -> None: ... + +if sys.version_info >= (3, 10): + async def open_connection( + host: str | None = None, + port: int | str | None = None, + *, + limit: int = 65536, + ssl_handshake_timeout: float | None = ..., + **kwds: Any, + ) -> tuple[StreamReader, StreamWriter]: ... + async def start_server( + client_connected_cb: _ClientConnectedCallback, + host: str | Sequence[str] | None = None, + port: int | str | None = None, + *, + limit: int = 65536, + ssl_handshake_timeout: float | None = ..., + **kwds: Any, + ) -> Server: ... + +else: + async def open_connection( + host: str | None = None, + port: int | str | None = None, + *, + loop: events.AbstractEventLoop | None = None, + limit: int = 65536, + ssl_handshake_timeout: float | None = ..., + **kwds: Any, + ) -> tuple[StreamReader, StreamWriter]: ... + async def start_server( + client_connected_cb: _ClientConnectedCallback, + host: str | None = None, + port: int | str | None = None, + *, + loop: events.AbstractEventLoop | None = None, + limit: int = 65536, + ssl_handshake_timeout: float | None = ..., + **kwds: Any, + ) -> Server: ... + +if sys.platform != "win32": + if sys.version_info >= (3, 10): + async def open_unix_connection( + path: StrPath | None = None, *, limit: int = 65536, **kwds: Any + ) -> tuple[StreamReader, StreamWriter]: ... + async def start_unix_server( + client_connected_cb: _ClientConnectedCallback, path: StrPath | None = None, *, limit: int = 65536, **kwds: Any + ) -> Server: ... + else: + async def open_unix_connection( + path: StrPath | None = None, *, loop: events.AbstractEventLoop | None = None, limit: int = 65536, **kwds: Any + ) -> tuple[StreamReader, StreamWriter]: ... + async def start_unix_server( + client_connected_cb: _ClientConnectedCallback, + path: StrPath | None = None, + *, + loop: events.AbstractEventLoop | None = None, + limit: int = 65536, + **kwds: Any, + ) -> Server: ... + +class FlowControlMixin(protocols.Protocol): + def __init__(self, loop: events.AbstractEventLoop | None = None) -> None: ... + +class StreamReaderProtocol(FlowControlMixin, protocols.Protocol): + def __init__( + self, + stream_reader: StreamReader, + client_connected_cb: _ClientConnectedCallback | None = None, + loop: events.AbstractEventLoop | None = None, + ) -> None: ... + +class StreamWriter: + def __init__( + self, + transport: transports.WriteTransport, + protocol: protocols.BaseProtocol, + reader: StreamReader | None, + loop: events.AbstractEventLoop, + ) -> None: ... + @property + def transport(self) -> transports.WriteTransport: ... + def write(self, data: bytes | bytearray | memoryview) -> None: ... + def writelines(self, data: Iterable[bytes | bytearray | memoryview]) -> None: ... + def write_eof(self) -> None: ... + def can_write_eof(self) -> bool: ... + def close(self) -> None: ... + def is_closing(self) -> bool: ... + async def wait_closed(self) -> None: ... + def get_extra_info(self, name: str, default: Any = None) -> Any: ... + async def drain(self) -> None: ... + if sys.version_info >= (3, 11): + async def start_tls( + self, sslcontext: ssl.SSLContext, *, server_hostname: str | None = None, ssl_handshake_timeout: float | None = None + ) -> None: ... + +class StreamReader(AsyncIterator[bytes]): + def __init__(self, limit: int = 65536, loop: events.AbstractEventLoop | None = None) -> None: ... + def exception(self) -> Exception: ... + def set_exception(self, exc: Exception) -> None: ... + def set_transport(self, transport: transports.BaseTransport) -> None: ... + def feed_eof(self) -> None: ... + def at_eof(self) -> bool: ... + def feed_data(self, data: Iterable[SupportsIndex]) -> None: ... + async def readline(self) -> bytes: ... + # Can be any buffer that supports len(); consider changing to a Protocol if PEP 688 is accepted + async def readuntil(self, separator: bytes | bytearray | memoryview = b"\n") -> bytes: ... + async def read(self, n: int = -1) -> bytes: ... + async def readexactly(self, n: int) -> bytes: ... + def __aiter__(self) -> Self: ... + async def __anext__(self) -> bytes: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/subprocess.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/subprocess.pyi new file mode 100644 index 00000000..b8877b36 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/subprocess.pyi @@ -0,0 +1,235 @@ +import subprocess +import sys +from _typeshed import StrOrBytesPath +from asyncio import events, protocols, streams, transports +from collections.abc import Callable, Collection +from typing import IO, Any +from typing_extensions import Literal, TypeAlias + +__all__ = ("create_subprocess_exec", "create_subprocess_shell") + +if sys.version_info >= (3, 8): + _ExecArg: TypeAlias = StrOrBytesPath +else: + _ExecArg: TypeAlias = str | bytes + +PIPE: int +STDOUT: int +DEVNULL: int + +class SubprocessStreamProtocol(streams.FlowControlMixin, protocols.SubprocessProtocol): + stdin: streams.StreamWriter | None + stdout: streams.StreamReader | None + stderr: streams.StreamReader | None + def __init__(self, limit: int, loop: events.AbstractEventLoop) -> None: ... + def pipe_data_received(self, fd: int, data: bytes | str) -> None: ... + +class Process: + stdin: streams.StreamWriter | None + stdout: streams.StreamReader | None + stderr: streams.StreamReader | None + pid: int + def __init__( + self, transport: transports.BaseTransport, protocol: protocols.BaseProtocol, loop: events.AbstractEventLoop + ) -> None: ... + @property + def returncode(self) -> int | None: ... + async def wait(self) -> int: ... + def send_signal(self, signal: int) -> None: ... + def terminate(self) -> None: ... + def kill(self) -> None: ... + async def communicate(self, input: bytes | bytearray | memoryview | None = None) -> tuple[bytes, bytes]: ... + +if sys.version_info >= (3, 11): + async def create_subprocess_shell( + cmd: str | bytes, + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + limit: int = 65536, + *, + # These parameters are forced to these values by BaseEventLoop.subprocess_shell + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + text: Literal[False, None] = None, + # These parameters are taken by subprocess.Popen, which this ultimately delegates to + executable: StrOrBytesPath | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + cwd: StrOrBytesPath | None = None, + env: subprocess._ENV | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + group: None | str | int = None, + extra_groups: None | Collection[str | int] = None, + user: None | str | int = None, + umask: int = -1, + process_group: int | None = None, + pipesize: int = -1, + ) -> Process: ... + async def create_subprocess_exec( + program: _ExecArg, + *args: _ExecArg, + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + limit: int = 65536, + # These parameters are forced to these values by BaseEventLoop.subprocess_shell + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + # These parameters are taken by subprocess.Popen, which this ultimately delegates to + text: bool | None = None, + executable: StrOrBytesPath | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + cwd: StrOrBytesPath | None = None, + env: subprocess._ENV | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + group: None | str | int = None, + extra_groups: None | Collection[str | int] = None, + user: None | str | int = None, + umask: int = -1, + process_group: int | None = None, + pipesize: int = -1, + ) -> Process: ... + +elif sys.version_info >= (3, 10): + async def create_subprocess_shell( + cmd: str | bytes, + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + limit: int = 65536, + *, + # These parameters are forced to these values by BaseEventLoop.subprocess_shell + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + text: Literal[False, None] = None, + # These parameters are taken by subprocess.Popen, which this ultimately delegates to + executable: StrOrBytesPath | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + cwd: StrOrBytesPath | None = None, + env: subprocess._ENV | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + group: None | str | int = None, + extra_groups: None | Collection[str | int] = None, + user: None | str | int = None, + umask: int = -1, + pipesize: int = -1, + ) -> Process: ... + async def create_subprocess_exec( + program: _ExecArg, + *args: _ExecArg, + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + limit: int = 65536, + # These parameters are forced to these values by BaseEventLoop.subprocess_shell + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + # These parameters are taken by subprocess.Popen, which this ultimately delegates to + text: bool | None = None, + executable: StrOrBytesPath | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + cwd: StrOrBytesPath | None = None, + env: subprocess._ENV | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + group: None | str | int = None, + extra_groups: None | Collection[str | int] = None, + user: None | str | int = None, + umask: int = -1, + pipesize: int = -1, + ) -> Process: ... + +else: # >= 3.9 + async def create_subprocess_shell( + cmd: str | bytes, + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + loop: events.AbstractEventLoop | None = None, + limit: int = 65536, + *, + # These parameters are forced to these values by BaseEventLoop.subprocess_shell + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + text: Literal[False, None] = None, + # These parameters are taken by subprocess.Popen, which this ultimately delegates to + executable: StrOrBytesPath | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + cwd: StrOrBytesPath | None = None, + env: subprocess._ENV | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + group: None | str | int = None, + extra_groups: None | Collection[str | int] = None, + user: None | str | int = None, + umask: int = -1, + ) -> Process: ... + async def create_subprocess_exec( + program: _ExecArg, + *args: _ExecArg, + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + loop: events.AbstractEventLoop | None = None, + limit: int = 65536, + # These parameters are forced to these values by BaseEventLoop.subprocess_shell + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + # These parameters are taken by subprocess.Popen, which this ultimately delegates to + text: bool | None = None, + executable: StrOrBytesPath | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + cwd: StrOrBytesPath | None = None, + env: subprocess._ENV | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + group: None | str | int = None, + extra_groups: None | Collection[str | int] = None, + user: None | str | int = None, + umask: int = -1, + ) -> Process: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/taskgroups.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/taskgroups.pyi new file mode 100644 index 00000000..8daa96f1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/taskgroups.pyi @@ -0,0 +1,20 @@ +# This only exists in 3.11+. See VERSIONS. + +from collections.abc import Coroutine, Generator +from contextvars import Context +from types import TracebackType +from typing import Any, TypeVar +from typing_extensions import Self + +from .tasks import Task + +__all__ = ["TaskGroup"] + +_T = TypeVar("_T") + +class TaskGroup: + async def __aenter__(self) -> Self: ... + async def __aexit__(self, et: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ... + def create_task( + self, coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T], *, name: str | None = None, context: Context | None = None + ) -> Task[_T]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/tasks.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/tasks.pyi new file mode 100644 index 00000000..0a44255a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/tasks.pyi @@ -0,0 +1,329 @@ +import concurrent.futures +import sys +from collections.abc import Awaitable, Coroutine, Generator, Iterable, Iterator +from types import FrameType +from typing import Any, Generic, TextIO, TypeVar, overload +from typing_extensions import Literal, TypeAlias + +from .events import AbstractEventLoop +from .futures import Future + +if sys.version_info >= (3, 9): + from types import GenericAlias +if sys.version_info >= (3, 11): + from contextvars import Context + +__all__ = ( + "Task", + "create_task", + "FIRST_COMPLETED", + "FIRST_EXCEPTION", + "ALL_COMPLETED", + "wait", + "wait_for", + "as_completed", + "sleep", + "gather", + "shield", + "ensure_future", + "run_coroutine_threadsafe", + "current_task", + "all_tasks", + "_register_task", + "_unregister_task", + "_enter_task", + "_leave_task", +) + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") +_T4 = TypeVar("_T4") +_T5 = TypeVar("_T5") +_FT = TypeVar("_FT", bound=Future[Any]) +_FutureLike: TypeAlias = Future[_T] | Generator[Any, None, _T] | Awaitable[_T] +_TaskYieldType: TypeAlias = Future[object] | None + +FIRST_COMPLETED = concurrent.futures.FIRST_COMPLETED +FIRST_EXCEPTION = concurrent.futures.FIRST_EXCEPTION +ALL_COMPLETED = concurrent.futures.ALL_COMPLETED + +if sys.version_info >= (3, 10): + def as_completed(fs: Iterable[_FutureLike[_T]], *, timeout: float | None = None) -> Iterator[Future[_T]]: ... + +else: + def as_completed( + fs: Iterable[_FutureLike[_T]], *, loop: AbstractEventLoop | None = None, timeout: float | None = None + ) -> Iterator[Future[_T]]: ... + +@overload +def ensure_future(coro_or_future: _FT, *, loop: AbstractEventLoop | None = None) -> _FT: ... # type: ignore[misc] +@overload +def ensure_future(coro_or_future: Awaitable[_T], *, loop: AbstractEventLoop | None = None) -> Task[_T]: ... + +# `gather()` actually returns a list with length equal to the number +# of tasks passed; however, Tuple is used similar to the annotation for +# zip() because typing does not support variadic type variables. See +# typing PR #1550 for discussion. +# +# The many type: ignores here are because the overloads overlap, +# but having overlapping overloads is the only way to get acceptable type inference in all edge cases. +if sys.version_info >= (3, 10): + @overload + def gather(__coro_or_future1: _FutureLike[_T1], *, return_exceptions: Literal[False] = False) -> Future[tuple[_T1]]: ... # type: ignore[misc] + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], __coro_or_future2: _FutureLike[_T2], *, return_exceptions: Literal[False] = False + ) -> Future[tuple[_T1, _T2]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + *, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2, _T3]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + __coro_or_future4: _FutureLike[_T4], + *, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2, _T3, _T4]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + __coro_or_future4: _FutureLike[_T4], + __coro_or_future5: _FutureLike[_T5], + *, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5]]: ... + @overload + def gather(__coro_or_future1: _FutureLike[_T1], *, return_exceptions: bool) -> Future[tuple[_T1 | BaseException]]: ... # type: ignore[misc] + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], __coro_or_future2: _FutureLike[_T2], *, return_exceptions: bool + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + *, + return_exceptions: bool, + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + __coro_or_future4: _FutureLike[_T4], + *, + return_exceptions: bool, + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + __coro_or_future4: _FutureLike[_T4], + __coro_or_future5: _FutureLike[_T5], + *, + return_exceptions: bool, + ) -> Future[ + tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException, _T5 | BaseException] + ]: ... + @overload + def gather(*coros_or_futures: _FutureLike[Any], return_exceptions: bool = False) -> Future[list[Any]]: ... # type: ignore[misc] + +else: + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], *, loop: AbstractEventLoop | None = None, return_exceptions: Literal[False] = False + ) -> Future[tuple[_T1]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + *, + loop: AbstractEventLoop | None = None, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + *, + loop: AbstractEventLoop | None = None, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2, _T3]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + __coro_or_future4: _FutureLike[_T4], + *, + loop: AbstractEventLoop | None = None, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2, _T3, _T4]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + __coro_or_future4: _FutureLike[_T4], + __coro_or_future5: _FutureLike[_T5], + *, + loop: AbstractEventLoop | None = None, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], *, loop: AbstractEventLoop | None = None, return_exceptions: bool + ) -> Future[tuple[_T1 | BaseException]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + *, + loop: AbstractEventLoop | None = None, + return_exceptions: bool, + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + *, + loop: AbstractEventLoop | None = None, + return_exceptions: bool, + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + __coro_or_future4: _FutureLike[_T4], + *, + loop: AbstractEventLoop | None = None, + return_exceptions: bool, + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + __coro_or_future4: _FutureLike[_T4], + __coro_or_future5: _FutureLike[_T5], + *, + loop: AbstractEventLoop | None = None, + return_exceptions: bool, + ) -> Future[ + tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException, _T5 | BaseException] + ]: ... + @overload + def gather( # type: ignore[misc] + *coros_or_futures: _FutureLike[Any], loop: AbstractEventLoop | None = None, return_exceptions: bool = False + ) -> Future[list[Any]]: ... + +def run_coroutine_threadsafe(coro: _FutureLike[_T], loop: AbstractEventLoop) -> concurrent.futures.Future[_T]: ... + +if sys.version_info >= (3, 10): + def shield(arg: _FutureLike[_T]) -> Future[_T]: ... + @overload + async def sleep(delay: float) -> None: ... + @overload + async def sleep(delay: float, result: _T) -> _T: ... + @overload + async def wait(fs: Iterable[_FT], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED") -> tuple[set[_FT], set[_FT]]: ... # type: ignore[misc] + @overload + async def wait( + fs: Iterable[Awaitable[_T]], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED" + ) -> tuple[set[Task[_T]], set[Task[_T]]]: ... + async def wait_for(fut: _FutureLike[_T], timeout: float | None) -> _T: ... + +else: + def shield(arg: _FutureLike[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: ... + @overload + async def sleep(delay: float, *, loop: AbstractEventLoop | None = None) -> None: ... + @overload + async def sleep(delay: float, result: _T, *, loop: AbstractEventLoop | None = None) -> _T: ... + @overload + async def wait( # type: ignore[misc] + fs: Iterable[_FT], + *, + loop: AbstractEventLoop | None = None, + timeout: float | None = None, + return_when: str = "ALL_COMPLETED", + ) -> tuple[set[_FT], set[_FT]]: ... + @overload + async def wait( + fs: Iterable[Awaitable[_T]], + *, + loop: AbstractEventLoop | None = None, + timeout: float | None = None, + return_when: str = "ALL_COMPLETED", + ) -> tuple[set[Task[_T]], set[Task[_T]]]: ... + async def wait_for(fut: _FutureLike[_T], timeout: float | None, *, loop: AbstractEventLoop | None = None) -> _T: ... + +# mypy and pyright complain that a subclass of an invariant class shouldn't be covariant. +# While this is true in general, here it's sort-of okay to have a covariant subclass, +# since the only reason why `asyncio.Future` is invariant is the `set_result()` method, +# and `asyncio.Task.set_result()` always raises. +class Task(Future[_T_co], Generic[_T_co]): # type: ignore[type-var] # pyright: ignore[reportGeneralTypeIssues] + if sys.version_info >= (3, 8): + def __init__( + self, + coro: Generator[_TaskYieldType, None, _T_co] | Awaitable[_T_co], + *, + loop: AbstractEventLoop = ..., + name: str | None = ..., + ) -> None: ... + else: + def __init__( + self, coro: Generator[_TaskYieldType, None, _T_co] | Awaitable[_T_co], *, loop: AbstractEventLoop = ... + ) -> None: ... + if sys.version_info >= (3, 8): + def get_coro(self) -> Generator[_TaskYieldType, None, _T_co] | Awaitable[_T_co]: ... + def get_name(self) -> str: ... + def set_name(self, __value: object) -> None: ... + + def get_stack(self, *, limit: int | None = None) -> list[FrameType]: ... + def print_stack(self, *, limit: int | None = None, file: TextIO | None = None) -> None: ... + if sys.version_info >= (3, 11): + def cancelling(self) -> int: ... + def uncancel(self) -> int: ... + if sys.version_info < (3, 9): + @classmethod + def current_task(cls, loop: AbstractEventLoop | None = None) -> Task[Any] | None: ... + @classmethod + def all_tasks(cls, loop: AbstractEventLoop | None = None) -> set[Task[Any]]: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +def all_tasks(loop: AbstractEventLoop | None = None) -> set[Task[Any]]: ... + +if sys.version_info >= (3, 11): + def create_task( + coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T], *, name: str | None = None, context: Context | None = None + ) -> Task[_T]: ... + +elif sys.version_info >= (3, 8): + def create_task(coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T], *, name: str | None = None) -> Task[_T]: ... + +else: + def create_task(coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T]) -> Task[_T]: ... + +def current_task(loop: AbstractEventLoop | None = None) -> Task[Any] | None: ... +def _enter_task(loop: AbstractEventLoop, task: Task[Any]) -> None: ... +def _leave_task(loop: AbstractEventLoop, task: Task[Any]) -> None: ... +def _register_task(task: Task[Any]) -> None: ... +def _unregister_task(task: Task[Any]) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/threads.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/threads.pyi new file mode 100644 index 00000000..88c4fddc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/threads.pyi @@ -0,0 +1,9 @@ +from collections.abc import Callable +from typing import TypeVar +from typing_extensions import ParamSpec + +__all__ = ("to_thread",) +_P = ParamSpec("_P") +_R = TypeVar("_R") + +async def to_thread(__func: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs) -> _R: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/timeouts.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/timeouts.pyi new file mode 100644 index 00000000..2d31b777 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/timeouts.pyi @@ -0,0 +1,18 @@ +from types import TracebackType +from typing_extensions import Self, final + +__all__ = ("Timeout", "timeout", "timeout_at") + +@final +class Timeout: + def __init__(self, when: float | None) -> None: ... + def when(self) -> float | None: ... + def reschedule(self, when: float | None) -> None: ... + def expired(self) -> bool: ... + async def __aenter__(self) -> Self: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + +def timeout(delay: float | None) -> Timeout: ... +def timeout_at(when: float | None) -> Timeout: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/transports.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/transports.pyi new file mode 100644 index 00000000..531f7767 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/transports.pyi @@ -0,0 +1,47 @@ +from asyncio.events import AbstractEventLoop +from asyncio.protocols import BaseProtocol +from collections.abc import Iterable, Mapping +from socket import _Address +from typing import Any + +__all__ = ("BaseTransport", "ReadTransport", "WriteTransport", "Transport", "DatagramTransport", "SubprocessTransport") + +class BaseTransport: + def __init__(self, extra: Mapping[str, Any] | None = None) -> None: ... + def get_extra_info(self, name: str, default: Any = None) -> Any: ... + def is_closing(self) -> bool: ... + def close(self) -> None: ... + def set_protocol(self, protocol: BaseProtocol) -> None: ... + def get_protocol(self) -> BaseProtocol: ... + +class ReadTransport(BaseTransport): + def is_reading(self) -> bool: ... + def pause_reading(self) -> None: ... + def resume_reading(self) -> None: ... + +class WriteTransport(BaseTransport): + def set_write_buffer_limits(self, high: int | None = None, low: int | None = None) -> None: ... + def get_write_buffer_size(self) -> int: ... + def get_write_buffer_limits(self) -> tuple[int, int]: ... + def write(self, data: bytes | bytearray | memoryview) -> None: ... + def writelines(self, list_of_data: Iterable[bytes | bytearray | memoryview]) -> None: ... + def write_eof(self) -> None: ... + def can_write_eof(self) -> bool: ... + def abort(self) -> None: ... + +class Transport(ReadTransport, WriteTransport): ... + +class DatagramTransport(BaseTransport): + def sendto(self, data: bytes | bytearray | memoryview, addr: _Address | None = None) -> None: ... + def abort(self) -> None: ... + +class SubprocessTransport(BaseTransport): + def get_pid(self) -> int: ... + def get_returncode(self) -> int | None: ... + def get_pipe_transport(self, fd: int) -> BaseTransport | None: ... + def send_signal(self, signal: int) -> None: ... + def terminate(self) -> None: ... + def kill(self) -> None: ... + +class _FlowControlMixin(Transport): + def __init__(self, extra: Mapping[str, Any] | None = None, loop: AbstractEventLoop | None = None) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/trsock.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/trsock.pyi new file mode 100644 index 00000000..742216a8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/trsock.pyi @@ -0,0 +1,98 @@ +import socket +import sys +from _typeshed import ReadableBuffer +from builtins import type as Type # alias to avoid name clashes with property named "type" +from collections.abc import Iterable +from types import TracebackType +from typing import Any, BinaryIO, NoReturn, overload +from typing_extensions import TypeAlias + +# These are based in socket, maybe move them out into _typeshed.pyi or such +_Address: TypeAlias = socket._Address +_RetAddress: TypeAlias = Any +_WriteBuffer: TypeAlias = bytearray | memoryview +_CMSG: TypeAlias = tuple[int, int, bytes] + +class TransportSocket: + def __init__(self, sock: socket.socket) -> None: ... + @property + def family(self) -> int: ... + @property + def type(self) -> int: ... + @property + def proto(self) -> int: ... + def __getstate__(self) -> NoReturn: ... + def fileno(self) -> int: ... + def dup(self) -> socket.socket: ... + def get_inheritable(self) -> bool: ... + def shutdown(self, how: int) -> None: ... + @overload + def getsockopt(self, level: int, optname: int) -> int: ... + @overload + def getsockopt(self, level: int, optname: int, buflen: int) -> bytes: ... + @overload + def setsockopt(self, level: int, optname: int, value: int | ReadableBuffer) -> None: ... + @overload + def setsockopt(self, level: int, optname: int, value: None, optlen: int) -> None: ... + def getpeername(self) -> _RetAddress: ... + def getsockname(self) -> _RetAddress: ... + def getsockbyname(self) -> NoReturn: ... # This method doesn't exist on socket, yet is passed through? + def settimeout(self, value: float | None) -> None: ... + def gettimeout(self) -> float | None: ... + def setblocking(self, flag: bool) -> None: ... + if sys.version_info < (3, 11): + def _na(self, what: str) -> None: ... + def accept(self) -> tuple[socket.socket, _RetAddress]: ... + def connect(self, address: _Address) -> None: ... + def connect_ex(self, address: _Address) -> int: ... + def bind(self, address: _Address) -> None: ... + if sys.platform == "win32": + def ioctl(self, control: int, option: int | tuple[int, int, int] | bool) -> None: ... + else: + def ioctl(self, control: int, option: int | tuple[int, int, int] | bool) -> NoReturn: ... + + def listen(self, __backlog: int = ...) -> None: ... + def makefile(self) -> BinaryIO: ... + def sendfile(self, file: BinaryIO, offset: int = ..., count: int | None = ...) -> int: ... + def close(self) -> None: ... + def detach(self) -> int: ... + if sys.platform == "linux": + def sendmsg_afalg( + self, msg: Iterable[ReadableBuffer] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = ... + ) -> int: ... + else: + def sendmsg_afalg( + self, msg: Iterable[ReadableBuffer] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = ... + ) -> NoReturn: ... + + def sendmsg( + self, + __buffers: Iterable[ReadableBuffer], + __ancdata: Iterable[_CMSG] = ..., + __flags: int = ..., + __address: _Address = ..., + ) -> int: ... + @overload + def sendto(self, data: ReadableBuffer, address: _Address) -> int: ... + @overload + def sendto(self, data: ReadableBuffer, flags: int, address: _Address) -> int: ... + def send(self, data: ReadableBuffer, flags: int = ...) -> int: ... + def sendall(self, data: ReadableBuffer, flags: int = ...) -> None: ... + def set_inheritable(self, inheritable: bool) -> None: ... + if sys.platform == "win32": + def share(self, process_id: int) -> bytes: ... + else: + def share(self, process_id: int) -> NoReturn: ... + + def recv_into(self, buffer: _WriteBuffer, nbytes: int = ..., flags: int = ...) -> int: ... + def recvfrom_into(self, buffer: _WriteBuffer, nbytes: int = ..., flags: int = ...) -> tuple[int, _RetAddress]: ... + def recvmsg_into( + self, __buffers: Iterable[_WriteBuffer], __ancbufsize: int = ..., __flags: int = ... + ) -> tuple[int, list[_CMSG], int, Any]: ... + def recvmsg(self, __bufsize: int, __ancbufsize: int = ..., __flags: int = ...) -> tuple[bytes, list[_CMSG], int, Any]: ... + def recvfrom(self, bufsize: int, flags: int = ...) -> tuple[bytes, _RetAddress]: ... + def recv(self, bufsize: int, flags: int = ...) -> bytes: ... + def __enter__(self) -> socket.socket: ... + def __exit__( + self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/unix_events.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/unix_events.pyi new file mode 100644 index 00000000..e28d64b5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/unix_events.pyi @@ -0,0 +1,127 @@ +import sys +import types +from abc import ABCMeta, abstractmethod +from collections.abc import Callable +from typing import Any +from typing_extensions import Literal, Self + +from .events import AbstractEventLoop, BaseDefaultEventLoopPolicy +from .selector_events import BaseSelectorEventLoop + +# This is also technically not available on Win, +# but other parts of typeshed need this definition. +# So, it is special cased. +class AbstractChildWatcher: + @abstractmethod + def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ... + @abstractmethod + def remove_child_handler(self, pid: int) -> bool: ... + @abstractmethod + def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... + @abstractmethod + def close(self) -> None: ... + @abstractmethod + def __enter__(self) -> Self: ... + @abstractmethod + def __exit__(self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None) -> None: ... + if sys.version_info >= (3, 8): + @abstractmethod + def is_active(self) -> bool: ... + +if sys.platform != "win32": + if sys.version_info >= (3, 9): + __all__ = ( + "SelectorEventLoop", + "AbstractChildWatcher", + "SafeChildWatcher", + "FastChildWatcher", + "PidfdChildWatcher", + "MultiLoopChildWatcher", + "ThreadedChildWatcher", + "DefaultEventLoopPolicy", + ) + elif sys.version_info >= (3, 8): + __all__ = ( + "SelectorEventLoop", + "AbstractChildWatcher", + "SafeChildWatcher", + "FastChildWatcher", + "MultiLoopChildWatcher", + "ThreadedChildWatcher", + "DefaultEventLoopPolicy", + ) + else: + __all__ = ("SelectorEventLoop", "AbstractChildWatcher", "SafeChildWatcher", "FastChildWatcher", "DefaultEventLoopPolicy") + + # Doesn't actually have ABCMeta metaclass at runtime, but mypy complains if we don't have it in the stub. + # See discussion in #7412 + class BaseChildWatcher(AbstractChildWatcher, metaclass=ABCMeta): + def close(self) -> None: ... + if sys.version_info >= (3, 8): + def is_active(self) -> bool: ... + + def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... + + class SafeChildWatcher(BaseChildWatcher): + def __enter__(self) -> Self: ... + def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ... + def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... + + class FastChildWatcher(BaseChildWatcher): + def __enter__(self) -> Self: ... + def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ... + def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... + + class _UnixSelectorEventLoop(BaseSelectorEventLoop): ... + + class _UnixDefaultEventLoopPolicy(BaseDefaultEventLoopPolicy): + def get_child_watcher(self) -> AbstractChildWatcher: ... + def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: ... + SelectorEventLoop = _UnixSelectorEventLoop + + DefaultEventLoopPolicy = _UnixDefaultEventLoopPolicy + + if sys.version_info >= (3, 8): + from typing import Protocol + + class _Warn(Protocol): + def __call__( + self, message: str, category: type[Warning] | None = ..., stacklevel: int = ..., source: Any | None = ... + ) -> object: ... + + class MultiLoopChildWatcher(AbstractChildWatcher): + def is_active(self) -> bool: ... + def close(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None + ) -> None: ... + def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... + def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... + + class ThreadedChildWatcher(AbstractChildWatcher): + def is_active(self) -> Literal[True]: ... + def close(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None + ) -> None: ... + def __del__(self, _warn: _Warn = ...) -> None: ... + def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... + def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... + + if sys.version_info >= (3, 9): + class PidfdChildWatcher(AbstractChildWatcher): + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None + ) -> None: ... + def is_active(self) -> bool: ... + def close(self) -> None: ... + def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... + def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/windows_events.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/windows_events.pyi new file mode 100644 index 00000000..2942a25c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/windows_events.pyi @@ -0,0 +1,74 @@ +import socket +import sys +from _typeshed import Incomplete, WriteableBuffer +from collections.abc import Callable +from typing import IO, Any, ClassVar, NoReturn +from typing_extensions import Literal + +from . import events, futures, proactor_events, selector_events, streams, windows_utils + +if sys.platform == "win32": + __all__ = ( + "SelectorEventLoop", + "ProactorEventLoop", + "IocpProactor", + "DefaultEventLoopPolicy", + "WindowsSelectorEventLoopPolicy", + "WindowsProactorEventLoopPolicy", + ) + + NULL: Literal[0] + INFINITE: Literal[0xFFFFFFFF] + ERROR_CONNECTION_REFUSED: Literal[1225] + ERROR_CONNECTION_ABORTED: Literal[1236] + CONNECT_PIPE_INIT_DELAY: float + CONNECT_PIPE_MAX_DELAY: float + + class PipeServer: + def __init__(self, address: str) -> None: ... + def __del__(self) -> None: ... + def closed(self) -> bool: ... + def close(self) -> None: ... + + class _WindowsSelectorEventLoop(selector_events.BaseSelectorEventLoop): ... + + class ProactorEventLoop(proactor_events.BaseProactorEventLoop): + def __init__(self, proactor: IocpProactor | None = None) -> None: ... + async def create_pipe_connection( + self, protocol_factory: Callable[[], streams.StreamReaderProtocol], address: str + ) -> tuple[proactor_events._ProactorDuplexPipeTransport, streams.StreamReaderProtocol]: ... + async def start_serving_pipe( + self, protocol_factory: Callable[[], streams.StreamReaderProtocol], address: str + ) -> list[PipeServer]: ... + + class IocpProactor: + def __init__(self, concurrency: int = 0xFFFFFFFF) -> None: ... + def __del__(self) -> None: ... + def set_loop(self, loop: events.AbstractEventLoop) -> None: ... + def select(self, timeout: int | None = None) -> list[futures.Future[Any]]: ... + def recv(self, conn: socket.socket, nbytes: int, flags: int = 0) -> futures.Future[bytes]: ... + def recv_into(self, conn: socket.socket, buf: WriteableBuffer, flags: int = 0) -> futures.Future[Any]: ... + def send(self, conn: socket.socket, buf: WriteableBuffer, flags: int = 0) -> futures.Future[Any]: ... + def accept(self, listener: socket.socket) -> futures.Future[Any]: ... + def connect( + self, + conn: socket.socket, + address: tuple[Incomplete, Incomplete] | tuple[Incomplete, Incomplete, Incomplete, Incomplete], + ) -> futures.Future[Any]: ... + def sendfile(self, sock: socket.socket, file: IO[bytes], offset: int, count: int) -> futures.Future[Any]: ... + def accept_pipe(self, pipe: socket.socket) -> futures.Future[Any]: ... + async def connect_pipe(self, address: str) -> windows_utils.PipeHandle: ... + def wait_for_handle(self, handle: windows_utils.PipeHandle, timeout: int | None = None) -> bool: ... + def close(self) -> None: ... + SelectorEventLoop = _WindowsSelectorEventLoop + + class WindowsSelectorEventLoopPolicy(events.BaseDefaultEventLoopPolicy): + _loop_factory: ClassVar[type[SelectorEventLoop]] + def get_child_watcher(self) -> NoReturn: ... + def set_child_watcher(self, watcher: Any) -> NoReturn: ... + + class WindowsProactorEventLoopPolicy(events.BaseDefaultEventLoopPolicy): + _loop_factory: ClassVar[type[ProactorEventLoop]] + def get_child_watcher(self) -> NoReturn: ... + def set_child_watcher(self, watcher: Any) -> NoReturn: ... + DefaultEventLoopPolicy = WindowsSelectorEventLoopPolicy diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/windows_utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/windows_utils.pyi new file mode 100644 index 00000000..f3a82e2b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncio/windows_utils.pyi @@ -0,0 +1,57 @@ +import subprocess +import sys +from collections.abc import Callable +from types import TracebackType +from typing import Any, AnyStr, Protocol +from typing_extensions import Literal, Self + +if sys.platform == "win32": + __all__ = ("pipe", "Popen", "PIPE", "PipeHandle") + + class _WarnFunction(Protocol): + def __call__( + self, message: str, category: type[Warning] = ..., stacklevel: int = ..., source: PipeHandle = ... + ) -> object: ... + BUFSIZE: Literal[8192] + PIPE = subprocess.PIPE + STDOUT = subprocess.STDOUT + def pipe(*, duplex: bool = False, overlapped: tuple[bool, bool] = ..., bufsize: int = 8192) -> tuple[int, int]: ... + + class PipeHandle: + def __init__(self, handle: int) -> None: ... + if sys.version_info >= (3, 8): + def __del__(self, _warn: _WarnFunction = ...) -> None: ... + else: + def __del__(self) -> None: ... + + def __enter__(self) -> Self: ... + def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... + @property + def handle(self) -> int: ... + def fileno(self) -> int: ... + def close(self, *, CloseHandle: Callable[[int], object] = ...) -> None: ... + + class Popen(subprocess.Popen[AnyStr]): + stdin: PipeHandle | None # type: ignore[assignment] + stdout: PipeHandle | None # type: ignore[assignment] + stderr: PipeHandle | None # type: ignore[assignment] + # For simplicity we omit the full overloaded __new__ signature of + # subprocess.Popen. The arguments are mostly the same, but + # subprocess.Popen takes other positional-or-keyword arguments before + # stdin. + def __new__( + cls, + args: subprocess._CMD, + stdin: subprocess._FILE | None = ..., + stdout: subprocess._FILE | None = ..., + stderr: subprocess._FILE | None = ..., + **kwds: Any, + ) -> Self: ... + def __init__( + self, + args: subprocess._CMD, + stdin: subprocess._FILE | None = None, + stdout: subprocess._FILE | None = None, + stderr: subprocess._FILE | None = None, + **kwds: Any, + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncore.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncore.pyi new file mode 100644 index 00000000..47c8e220 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/asyncore.pyi @@ -0,0 +1,89 @@ +import sys +from _typeshed import FileDescriptorLike, ReadableBuffer +from socket import socket +from typing import Any, overload +from typing_extensions import TypeAlias + +# cyclic dependence with asynchat +_MapType: TypeAlias = dict[int, Any] +_Socket: TypeAlias = socket + +socket_map: _MapType # undocumented + +class ExitNow(Exception): ... + +def read(obj: Any) -> None: ... +def write(obj: Any) -> None: ... +def readwrite(obj: Any, flags: int) -> None: ... +def poll(timeout: float = 0.0, map: _MapType | None = None) -> None: ... +def poll2(timeout: float = 0.0, map: _MapType | None = None) -> None: ... + +poll3 = poll2 + +def loop(timeout: float = 30.0, use_poll: bool = False, map: _MapType | None = None, count: int | None = None) -> None: ... + +# Not really subclass of socket.socket; it's only delegation. +# It is not covariant to it. +class dispatcher: + debug: bool + connected: bool + accepting: bool + connecting: bool + closing: bool + ignore_log_types: frozenset[str] + socket: _Socket | None + def __init__(self, sock: _Socket | None = None, map: _MapType | None = None) -> None: ... + def add_channel(self, map: _MapType | None = None) -> None: ... + def del_channel(self, map: _MapType | None = None) -> None: ... + def create_socket(self, family: int = ..., type: int = ...) -> None: ... + def set_socket(self, sock: _Socket, map: _MapType | None = None) -> None: ... + def set_reuse_addr(self) -> None: ... + def readable(self) -> bool: ... + def writable(self) -> bool: ... + def listen(self, num: int) -> None: ... + def bind(self, addr: tuple[Any, ...] | str) -> None: ... + def connect(self, address: tuple[Any, ...] | str) -> None: ... + def accept(self) -> tuple[_Socket, Any] | None: ... + def send(self, data: ReadableBuffer) -> int: ... + def recv(self, buffer_size: int) -> bytes: ... + def close(self) -> None: ... + def log(self, message: Any) -> None: ... + def log_info(self, message: Any, type: str = "info") -> None: ... + def handle_read_event(self) -> None: ... + def handle_connect_event(self) -> None: ... + def handle_write_event(self) -> None: ... + def handle_expt_event(self) -> None: ... + def handle_error(self) -> None: ... + def handle_expt(self) -> None: ... + def handle_read(self) -> None: ... + def handle_write(self) -> None: ... + def handle_connect(self) -> None: ... + def handle_accept(self) -> None: ... + def handle_close(self) -> None: ... + +class dispatcher_with_send(dispatcher): + def initiate_send(self) -> None: ... + # incompatible signature: + # def send(self, data: bytes) -> int | None: ... + +def compact_traceback() -> tuple[tuple[str, str, str], type, type, str]: ... +def close_all(map: _MapType | None = None, ignore_all: bool = False) -> None: ... + +if sys.platform != "win32": + class file_wrapper: + fd: int + def __init__(self, fd: int) -> None: ... + def recv(self, bufsize: int, flags: int = ...) -> bytes: ... + def send(self, data: bytes, flags: int = ...) -> int: ... + @overload + def getsockopt(self, level: int, optname: int, buflen: None = None) -> int: ... + @overload + def getsockopt(self, level: int, optname: int, buflen: int) -> bytes: ... + def read(self, bufsize: int, flags: int = ...) -> bytes: ... + def write(self, data: bytes, flags: int = ...) -> int: ... + def close(self) -> None: ... + def fileno(self) -> int: ... + + class file_dispatcher(dispatcher): + def __init__(self, fd: FileDescriptorLike, map: _MapType | None = None) -> None: ... + def set_file(self, fd: int) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/atexit.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/atexit.pyi new file mode 100644 index 00000000..ea041d7b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/atexit.pyi @@ -0,0 +1,12 @@ +from collections.abc import Callable +from typing import TypeVar +from typing_extensions import ParamSpec + +_T = TypeVar("_T") +_P = ParamSpec("_P") + +def _clear() -> None: ... +def _ncallbacks() -> int: ... +def _run_exitfuncs() -> None: ... +def register(func: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> Callable[_P, _T]: ... +def unregister(func: Callable[..., object]) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/audioop.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/audioop.pyi new file mode 100644 index 00000000..b5934516 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/audioop.pyi @@ -0,0 +1,42 @@ +from typing_extensions import TypeAlias + +_AdpcmState: TypeAlias = tuple[int, int] +_RatecvState: TypeAlias = tuple[int, tuple[tuple[int, int], ...]] + +class error(Exception): ... + +def add(__fragment1: bytes, __fragment2: bytes, __width: int) -> bytes: ... +def adpcm2lin(__fragment: bytes, __width: int, __state: _AdpcmState | None) -> tuple[bytes, _AdpcmState]: ... +def alaw2lin(__fragment: bytes, __width: int) -> bytes: ... +def avg(__fragment: bytes, __width: int) -> int: ... +def avgpp(__fragment: bytes, __width: int) -> int: ... +def bias(__fragment: bytes, __width: int, __bias: int) -> bytes: ... +def byteswap(__fragment: bytes, __width: int) -> bytes: ... +def cross(__fragment: bytes, __width: int) -> int: ... +def findfactor(__fragment: bytes, __reference: bytes) -> float: ... +def findfit(__fragment: bytes, __reference: bytes) -> tuple[int, float]: ... +def findmax(__fragment: bytes, __length: int) -> int: ... +def getsample(__fragment: bytes, __width: int, __index: int) -> int: ... +def lin2adpcm(__fragment: bytes, __width: int, __state: _AdpcmState | None) -> tuple[bytes, _AdpcmState]: ... +def lin2alaw(__fragment: bytes, __width: int) -> bytes: ... +def lin2lin(__fragment: bytes, __width: int, __newwidth: int) -> bytes: ... +def lin2ulaw(__fragment: bytes, __width: int) -> bytes: ... +def max(__fragment: bytes, __width: int) -> int: ... +def maxpp(__fragment: bytes, __width: int) -> int: ... +def minmax(__fragment: bytes, __width: int) -> tuple[int, int]: ... +def mul(__fragment: bytes, __width: int, __factor: float) -> bytes: ... +def ratecv( + __fragment: bytes, + __width: int, + __nchannels: int, + __inrate: int, + __outrate: int, + __state: _RatecvState | None, + __weightA: int = 1, + __weightB: int = 0, +) -> tuple[bytes, _RatecvState]: ... +def reverse(__fragment: bytes, __width: int) -> bytes: ... +def rms(__fragment: bytes, __width: int) -> int: ... +def tomono(__fragment: bytes, __width: int, __lfactor: float, __rfactor: float) -> bytes: ... +def tostereo(__fragment: bytes, __width: int, __lfactor: float, __rfactor: float) -> bytes: ... +def ulaw2lin(__fragment: bytes, __width: int) -> bytes: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/base64.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/base64.pyi new file mode 100644 index 00000000..24830cbf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/base64.pyi @@ -0,0 +1,59 @@ +import sys +from _typeshed import ReadableBuffer +from typing import IO + +__all__ = [ + "encode", + "decode", + "encodebytes", + "decodebytes", + "b64encode", + "b64decode", + "b32encode", + "b32decode", + "b16encode", + "b16decode", + "b85encode", + "b85decode", + "a85encode", + "a85decode", + "standard_b64encode", + "standard_b64decode", + "urlsafe_b64encode", + "urlsafe_b64decode", +] + +if sys.version_info >= (3, 10): + __all__ += ["b32hexencode", "b32hexdecode"] + +def b64encode(s: ReadableBuffer, altchars: ReadableBuffer | None = None) -> bytes: ... +def b64decode(s: str | ReadableBuffer, altchars: ReadableBuffer | None = None, validate: bool = False) -> bytes: ... +def standard_b64encode(s: ReadableBuffer) -> bytes: ... +def standard_b64decode(s: str | ReadableBuffer) -> bytes: ... +def urlsafe_b64encode(s: ReadableBuffer) -> bytes: ... +def urlsafe_b64decode(s: str | ReadableBuffer) -> bytes: ... +def b32encode(s: ReadableBuffer) -> bytes: ... +def b32decode(s: str | ReadableBuffer, casefold: bool = False, map01: bytes | None = None) -> bytes: ... +def b16encode(s: ReadableBuffer) -> bytes: ... +def b16decode(s: str | ReadableBuffer, casefold: bool = False) -> bytes: ... + +if sys.version_info >= (3, 10): + def b32hexencode(s: ReadableBuffer) -> bytes: ... + def b32hexdecode(s: str | ReadableBuffer, casefold: bool = False) -> bytes: ... + +def a85encode( + b: ReadableBuffer, *, foldspaces: bool = False, wrapcol: int = 0, pad: bool = False, adobe: bool = False +) -> bytes: ... +def a85decode( + b: str | ReadableBuffer, *, foldspaces: bool = False, adobe: bool = False, ignorechars: bytearray | bytes = b" \t\n\r\x0b" +) -> bytes: ... +def b85encode(b: ReadableBuffer, pad: bool = False) -> bytes: ... +def b85decode(b: str | ReadableBuffer) -> bytes: ... +def decode(input: IO[bytes], output: IO[bytes]) -> None: ... +def encode(input: IO[bytes], output: IO[bytes]) -> None: ... +def encodebytes(s: ReadableBuffer) -> bytes: ... +def decodebytes(s: ReadableBuffer) -> bytes: ... + +if sys.version_info < (3, 9): + def encodestring(s: ReadableBuffer) -> bytes: ... + def decodestring(s: ReadableBuffer) -> bytes: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/bdb.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/bdb.pyi new file mode 100644 index 00000000..2a1fdddf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/bdb.pyi @@ -0,0 +1,102 @@ +import sys +from _typeshed import ExcInfo, TraceFunction +from collections.abc import Callable, Iterable, Mapping +from types import CodeType, FrameType, TracebackType +from typing import IO, Any, SupportsInt, TypeVar +from typing_extensions import Literal, ParamSpec + +__all__ = ["BdbQuit", "Bdb", "Breakpoint"] + +_T = TypeVar("_T") +_P = ParamSpec("_P") + +GENERATOR_AND_COROUTINE_FLAGS: Literal[672] + +class BdbQuit(Exception): ... + +class Bdb: + skip: set[str] | None + breaks: dict[str, list[int]] + fncache: dict[str, str] + frame_returning: FrameType | None + botframe: FrameType | None + quitting: bool + stopframe: FrameType | None + returnframe: FrameType | None + stoplineno: int + def __init__(self, skip: Iterable[str] | None = None) -> None: ... + def canonic(self, filename: str) -> str: ... + def reset(self) -> None: ... + def trace_dispatch(self, frame: FrameType, event: str, arg: Any) -> TraceFunction: ... + def dispatch_line(self, frame: FrameType) -> TraceFunction: ... + def dispatch_call(self, frame: FrameType, arg: None) -> TraceFunction: ... + def dispatch_return(self, frame: FrameType, arg: Any) -> TraceFunction: ... + def dispatch_exception(self, frame: FrameType, arg: ExcInfo) -> TraceFunction: ... + def is_skipped_module(self, module_name: str) -> bool: ... + def stop_here(self, frame: FrameType) -> bool: ... + def break_here(self, frame: FrameType) -> bool: ... + def do_clear(self, arg: Any) -> bool | None: ... + def break_anywhere(self, frame: FrameType) -> bool: ... + def user_call(self, frame: FrameType, argument_list: None) -> None: ... + def user_line(self, frame: FrameType) -> None: ... + def user_return(self, frame: FrameType, return_value: Any) -> None: ... + def user_exception(self, frame: FrameType, exc_info: ExcInfo) -> None: ... + def set_until(self, frame: FrameType, lineno: int | None = None) -> None: ... + def set_step(self) -> None: ... + def set_next(self, frame: FrameType) -> None: ... + def set_return(self, frame: FrameType) -> None: ... + def set_trace(self, frame: FrameType | None = None) -> None: ... + def set_continue(self) -> None: ... + def set_quit(self) -> None: ... + def set_break( + self, filename: str, lineno: int, temporary: bool = False, cond: str | None = None, funcname: str | None = None + ) -> None: ... + def clear_break(self, filename: str, lineno: int) -> None: ... + def clear_bpbynumber(self, arg: SupportsInt) -> None: ... + def clear_all_file_breaks(self, filename: str) -> None: ... + def clear_all_breaks(self) -> None: ... + def get_bpbynumber(self, arg: SupportsInt) -> Breakpoint: ... + def get_break(self, filename: str, lineno: int) -> bool: ... + def get_breaks(self, filename: str, lineno: int) -> list[Breakpoint]: ... + def get_file_breaks(self, filename: str) -> list[Breakpoint]: ... + def get_all_breaks(self) -> list[Breakpoint]: ... + def get_stack(self, f: FrameType | None, t: TracebackType | None) -> tuple[list[tuple[FrameType, int]], int]: ... + def format_stack_entry(self, frame_lineno: int, lprefix: str = ": ") -> str: ... + def run( + self, cmd: str | CodeType, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None + ) -> None: ... + def runeval(self, expr: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> None: ... + def runctx(self, cmd: str | CodeType, globals: dict[str, Any] | None, locals: Mapping[str, Any] | None) -> None: ... + def runcall(self, __func: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> _T | None: ... + +class Breakpoint: + next: int + bplist: dict[tuple[str, int], list[Breakpoint]] + bpbynumber: list[Breakpoint | None] + + funcname: str | None + func_first_executable_line: int | None + file: str + line: int + temporary: bool + cond: str | None + enabled: bool + ignore: int + hits: int + number: int + def __init__( + self, file: str, line: int, temporary: bool = False, cond: str | None = None, funcname: str | None = None + ) -> None: ... + if sys.version_info >= (3, 11): + @staticmethod + def clearBreakpoints() -> None: ... + + def deleteMe(self) -> None: ... + def enable(self) -> None: ... + def disable(self) -> None: ... + def bpprint(self, out: IO[str] | None = None) -> None: ... + def bpformat(self) -> str: ... + +def checkfuncname(b: Breakpoint, frame: FrameType) -> bool: ... +def effective(file: str, line: int, frame: FrameType) -> tuple[Breakpoint, bool] | tuple[None, None]: ... +def set_trace() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/binascii.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/binascii.pyi new file mode 100644 index 00000000..759b6c39 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/binascii.pyi @@ -0,0 +1,44 @@ +import sys +from _typeshed import ReadableBuffer +from typing_extensions import TypeAlias + +# Many functions in binascii accept buffer objects +# or ASCII-only strings. +_AsciiBuffer: TypeAlias = str | ReadableBuffer + +def a2b_uu(__data: _AsciiBuffer) -> bytes: ... +def b2a_uu(__data: ReadableBuffer, *, backtick: bool = False) -> bytes: ... + +if sys.version_info >= (3, 11): + def a2b_base64(__data: _AsciiBuffer, *, strict_mode: bool = False) -> bytes: ... + +else: + def a2b_base64(__data: _AsciiBuffer) -> bytes: ... + +def b2a_base64(__data: ReadableBuffer, *, newline: bool = True) -> bytes: ... +def a2b_qp(data: _AsciiBuffer, header: bool = False) -> bytes: ... +def b2a_qp(data: ReadableBuffer, quotetabs: bool = False, istext: bool = True, header: bool = False) -> bytes: ... + +if sys.version_info < (3, 11): + def a2b_hqx(__data: _AsciiBuffer) -> bytes: ... + def rledecode_hqx(__data: ReadableBuffer) -> bytes: ... + def rlecode_hqx(__data: ReadableBuffer) -> bytes: ... + def b2a_hqx(__data: ReadableBuffer) -> bytes: ... + +def crc_hqx(__data: ReadableBuffer, __crc: int) -> int: ... +def crc32(__data: ReadableBuffer, __crc: int = 0) -> int: ... + +if sys.version_info >= (3, 8): + # sep must be str or bytes, not bytearray or any other buffer + def b2a_hex(data: ReadableBuffer, sep: str | bytes = ..., bytes_per_sep: int = ...) -> bytes: ... + def hexlify(data: ReadableBuffer, sep: str | bytes = ..., bytes_per_sep: int = ...) -> bytes: ... + +else: + def b2a_hex(__data: ReadableBuffer) -> bytes: ... + def hexlify(__data: ReadableBuffer) -> bytes: ... + +def a2b_hex(__hexstr: _AsciiBuffer) -> bytes: ... +def unhexlify(__hexstr: _AsciiBuffer) -> bytes: ... + +class Error(ValueError): ... +class Incomplete(Exception): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/binhex.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/binhex.pyi new file mode 100644 index 00000000..e0993c84 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/binhex.pyi @@ -0,0 +1,45 @@ +from _typeshed import _BufferWithLen +from typing import IO, Any +from typing_extensions import Literal, TypeAlias + +__all__ = ["binhex", "hexbin", "Error"] + +class Error(Exception): ... + +REASONABLY_LARGE: Literal[32768] +LINELEN: Literal[64] +RUNCHAR: Literal[b"\x90"] + +class FInfo: + Type: str + Creator: str + Flags: int + +_FileInfoTuple: TypeAlias = tuple[str, FInfo, int, int] +_FileHandleUnion: TypeAlias = str | IO[bytes] + +def getfileinfo(name: str) -> _FileInfoTuple: ... + +class openrsrc: + def __init__(self, *args: Any) -> None: ... + def read(self, *args: Any) -> bytes: ... + def write(self, *args: Any) -> None: ... + def close(self) -> None: ... + +class BinHex: + def __init__(self, name_finfo_dlen_rlen: _FileInfoTuple, ofp: _FileHandleUnion) -> None: ... + def write(self, data: _BufferWithLen) -> None: ... + def close_data(self) -> None: ... + def write_rsrc(self, data: _BufferWithLen) -> None: ... + def close(self) -> None: ... + +def binhex(inp: str, out: str) -> None: ... + +class HexBin: + def __init__(self, ifp: _FileHandleUnion) -> None: ... + def read(self, *n: int) -> bytes: ... + def close_data(self) -> None: ... + def read_rsrc(self, *n: int) -> bytes: ... + def close(self) -> None: ... + +def hexbin(inp: str, out: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/bisect.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/bisect.pyi new file mode 100644 index 00000000..60dfc48d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/bisect.pyi @@ -0,0 +1,4 @@ +from _bisect import * + +bisect = bisect_right +insort = insort_right diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/builtins.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/builtins.pyi new file mode 100644 index 00000000..6f26df3b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/builtins.pyi @@ -0,0 +1,2069 @@ +import _ast +import _typeshed +import sys +import types +from _collections_abc import dict_items, dict_keys, dict_values +from _typeshed import ( + AnyStr_co, + FileDescriptorOrPath, + OpenBinaryMode, + OpenBinaryModeReading, + OpenBinaryModeUpdating, + OpenBinaryModeWriting, + OpenTextMode, + ReadableBuffer, + SupportsAdd, + SupportsAiter, + SupportsAnext, + SupportsDivMod, + SupportsIter, + SupportsKeysAndGetItem, + SupportsLenAndGetItem, + SupportsNext, + SupportsRAdd, + SupportsRDivMod, + SupportsRichComparison, + SupportsRichComparisonT, + SupportsTrunc, + SupportsWrite, +) +from collections.abc import Awaitable, Callable, Iterable, Iterator, MutableSet, Reversible, Set as AbstractSet, Sized +from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper +from types import CodeType, TracebackType, _Cell + +# mypy crashes if any of {ByteString, Sequence, MutableSequence, Mapping, MutableMapping} are imported from collections.abc in builtins.pyi +from typing import ( # noqa: Y022 + IO, + Any, + BinaryIO, + ByteString, + ClassVar, + Generic, + Mapping, + MutableMapping, + MutableSequence, + NoReturn, + Protocol, + Sequence, + SupportsAbs, + SupportsBytes, + SupportsComplex, + SupportsFloat, + SupportsInt, + TypeVar, + overload, + type_check_only, +) +from typing_extensions import Literal, LiteralString, Self, SupportsIndex, TypeAlias, TypeGuard, final + +if sys.version_info >= (3, 9): + from types import GenericAlias + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) +_T_contra = TypeVar("_T_contra", contravariant=True) +_R_co = TypeVar("_R_co", covariant=True) +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") +_S = TypeVar("_S") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") +_T4 = TypeVar("_T4") +_T5 = TypeVar("_T5") +_SupportsNextT = TypeVar("_SupportsNextT", bound=SupportsNext[Any], covariant=True) +_SupportsAnextT = TypeVar("_SupportsAnextT", bound=SupportsAnext[Any], covariant=True) +_AwaitableT = TypeVar("_AwaitableT", bound=Awaitable[Any]) +_AwaitableT_co = TypeVar("_AwaitableT_co", bound=Awaitable[Any], covariant=True) + +class object: + __doc__: str | None + __dict__: dict[str, Any] + __module__: str + __annotations__: dict[str, Any] + @property + def __class__(self) -> type[Self]: ... + # Ignore errors about type mismatch between property getter and setter + @__class__.setter + def __class__(self, __type: type[object]) -> None: ... # noqa: F811 + def __init__(self) -> None: ... + def __new__(cls) -> Self: ... + # N.B. `object.__setattr__` and `object.__delattr__` are heavily special-cased by type checkers. + # Overriding them in subclasses has different semantics, even if the override has an identical signature. + def __setattr__(self, __name: str, __value: Any) -> None: ... + def __delattr__(self, __name: str) -> None: ... + def __eq__(self, __o: object) -> bool: ... + def __ne__(self, __o: object) -> bool: ... + def __str__(self) -> str: ... # noqa: Y029 + def __repr__(self) -> str: ... # noqa: Y029 + def __hash__(self) -> int: ... + def __format__(self, __format_spec: str) -> str: ... + def __getattribute__(self, __name: str) -> Any: ... + def __sizeof__(self) -> int: ... + # return type of pickle methods is rather hard to express in the current type system + # see #6661 and https://docs.python.org/3/library/pickle.html#object.__reduce__ + def __reduce__(self) -> str | tuple[Any, ...]: ... + if sys.version_info >= (3, 8): + def __reduce_ex__(self, __protocol: SupportsIndex) -> str | tuple[Any, ...]: ... + else: + def __reduce_ex__(self, __protocol: int) -> str | tuple[Any, ...]: ... + + def __dir__(self) -> Iterable[str]: ... + def __init_subclass__(cls) -> None: ... + +class staticmethod(Generic[_R_co]): + @property + def __func__(self) -> Callable[..., _R_co]: ... + @property + def __isabstractmethod__(self) -> bool: ... + def __init__(self: staticmethod[_R_co], __f: Callable[..., _R_co]) -> None: ... + def __get__(self, __instance: _T, __owner: type[_T] | None = None) -> Callable[..., _R_co]: ... + if sys.version_info >= (3, 10): + __name__: str + __qualname__: str + @property + def __wrapped__(self) -> Callable[..., _R_co]: ... + def __call__(self, *args: Any, **kwargs: Any) -> _R_co: ... + +class classmethod(Generic[_R_co]): + @property + def __func__(self) -> Callable[..., _R_co]: ... + @property + def __isabstractmethod__(self) -> bool: ... + def __init__(self: classmethod[_R_co], __f: Callable[..., _R_co]) -> None: ... + def __get__(self, __instance: _T, __owner: type[_T] | None = None) -> Callable[..., _R_co]: ... + if sys.version_info >= (3, 10): + __name__: str + __qualname__: str + @property + def __wrapped__(self) -> Callable[..., _R_co]: ... + +class type: + @property + def __base__(self) -> type: ... + __bases__: tuple[type, ...] + @property + def __basicsize__(self) -> int: ... + @property + def __dict__(self) -> types.MappingProxyType[str, Any]: ... # type: ignore[override] + @property + def __dictoffset__(self) -> int: ... + @property + def __flags__(self) -> int: ... + @property + def __itemsize__(self) -> int: ... + __module__: str + @property + def __mro__(self) -> tuple[type, ...]: ... + __name__: str + __qualname__: str + @property + def __text_signature__(self) -> str | None: ... + @property + def __weakrefoffset__(self) -> int: ... + @overload + def __init__(self, __o: object) -> None: ... + @overload + def __init__(self, __name: str, __bases: tuple[type, ...], __dict: dict[str, Any], **kwds: Any) -> None: ... + @overload + def __new__(cls, __o: object) -> type: ... + @overload + def __new__( + cls: type[_typeshed.Self], __name: str, __bases: tuple[type, ...], __namespace: dict[str, Any], **kwds: Any + ) -> _typeshed.Self: ... + def __call__(self, *args: Any, **kwds: Any) -> Any: ... + def __subclasses__(self: _typeshed.Self) -> list[_typeshed.Self]: ... + # Note: the documentation doesn't specify what the return type is, the standard + # implementation seems to be returning a list. + def mro(self) -> list[type]: ... + def __instancecheck__(self, __instance: Any) -> bool: ... + def __subclasscheck__(self, __subclass: type) -> bool: ... + @classmethod + def __prepare__(metacls, __name: str, __bases: tuple[type, ...], **kwds: Any) -> Mapping[str, object]: ... + if sys.version_info >= (3, 10): + def __or__(self, __t: Any) -> types.UnionType: ... + def __ror__(self, __t: Any) -> types.UnionType: ... + +class super: + @overload + def __init__(self, __t: Any, __obj: Any) -> None: ... + @overload + def __init__(self, __t: Any) -> None: ... + @overload + def __init__(self) -> None: ... + +_PositiveInteger: TypeAlias = Literal[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25] +_NegativeInteger: TypeAlias = Literal[-1, -2, -3, -4, -5, -6, -7, -8, -9, -10, -11, -12, -13, -14, -15, -16, -17, -18, -19, -20] +_LiteralInteger = _PositiveInteger | _NegativeInteger | Literal[0] # noqa: Y026 # TODO: Use TypeAlias once mypy bugs are fixed + +class int: + @overload + def __new__(cls, __x: str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc = ...) -> Self: ... + @overload + def __new__(cls, __x: str | bytes | bytearray, base: SupportsIndex) -> Self: ... + if sys.version_info >= (3, 8): + def as_integer_ratio(self) -> tuple[int, Literal[1]]: ... + + @property + def real(self) -> int: ... + @property + def imag(self) -> Literal[0]: ... + @property + def numerator(self) -> int: ... + @property + def denominator(self) -> Literal[1]: ... + def conjugate(self) -> int: ... + def bit_length(self) -> int: ... + if sys.version_info >= (3, 10): + def bit_count(self) -> int: ... + + if sys.version_info >= (3, 11): + def to_bytes( + self, length: SupportsIndex = 1, byteorder: Literal["little", "big"] = "big", *, signed: bool = False + ) -> bytes: ... + @classmethod + def from_bytes( + cls, + bytes: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer, + byteorder: Literal["little", "big"] = "big", + *, + signed: bool = False, + ) -> Self: ... + else: + def to_bytes(self, length: SupportsIndex, byteorder: Literal["little", "big"], *, signed: bool = False) -> bytes: ... + @classmethod + def from_bytes( + cls, + bytes: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer, + byteorder: Literal["little", "big"], + *, + signed: bool = False, + ) -> Self: ... + + def __add__(self, __x: int) -> int: ... + def __sub__(self, __x: int) -> int: ... + def __mul__(self, __x: int) -> int: ... + def __floordiv__(self, __x: int) -> int: ... + def __truediv__(self, __x: int) -> float: ... + def __mod__(self, __x: int) -> int: ... + def __divmod__(self, __x: int) -> tuple[int, int]: ... + def __radd__(self, __x: int) -> int: ... + def __rsub__(self, __x: int) -> int: ... + def __rmul__(self, __x: int) -> int: ... + def __rfloordiv__(self, __x: int) -> int: ... + def __rtruediv__(self, __x: int) -> float: ... + def __rmod__(self, __x: int) -> int: ... + def __rdivmod__(self, __x: int) -> tuple[int, int]: ... + @overload + def __pow__(self, __x: Literal[0]) -> Literal[1]: ... + @overload + def __pow__(self, __x: Literal[0], __modulo: None) -> Literal[1]: ... + @overload + def __pow__(self, __x: _PositiveInteger, __modulo: None = None) -> int: ... + @overload + def __pow__(self, __x: _NegativeInteger, __modulo: None = None) -> float: ... + # positive x -> int; negative x -> float + # return type must be Any as `int | float` causes too many false-positive errors + @overload + def __pow__(self, __x: int, __modulo: None = None) -> Any: ... + @overload + def __pow__(self, __x: int, __modulo: int) -> int: ... + def __rpow__(self, __x: int, __mod: int | None = None) -> Any: ... + def __and__(self, __n: int) -> int: ... + def __or__(self, __n: int) -> int: ... + def __xor__(self, __n: int) -> int: ... + def __lshift__(self, __n: int) -> int: ... + def __rshift__(self, __n: int) -> int: ... + def __rand__(self, __n: int) -> int: ... + def __ror__(self, __n: int) -> int: ... + def __rxor__(self, __n: int) -> int: ... + def __rlshift__(self, __n: int) -> int: ... + def __rrshift__(self, __n: int) -> int: ... + def __neg__(self) -> int: ... + def __pos__(self) -> int: ... + def __invert__(self) -> int: ... + def __trunc__(self) -> int: ... + def __ceil__(self) -> int: ... + def __floor__(self) -> int: ... + def __round__(self, __ndigits: SupportsIndex = ...) -> int: ... + def __getnewargs__(self) -> tuple[int]: ... + def __eq__(self, __x: object) -> bool: ... + def __ne__(self, __x: object) -> bool: ... + def __lt__(self, __x: int) -> bool: ... + def __le__(self, __x: int) -> bool: ... + def __gt__(self, __x: int) -> bool: ... + def __ge__(self, __x: int) -> bool: ... + def __float__(self) -> float: ... + def __int__(self) -> int: ... + def __abs__(self) -> int: ... + def __bool__(self) -> bool: ... + def __index__(self) -> int: ... + +class float: + def __new__(cls, __x: SupportsFloat | SupportsIndex | str | ReadableBuffer = ...) -> Self: ... + def as_integer_ratio(self) -> tuple[int, int]: ... + def hex(self) -> str: ... + def is_integer(self) -> bool: ... + @classmethod + def fromhex(cls, __s: str) -> Self: ... + @property + def real(self) -> float: ... + @property + def imag(self) -> float: ... + def conjugate(self) -> float: ... + def __add__(self, __x: float) -> float: ... + def __sub__(self, __x: float) -> float: ... + def __mul__(self, __x: float) -> float: ... + def __floordiv__(self, __x: float) -> float: ... + def __truediv__(self, __x: float) -> float: ... + def __mod__(self, __x: float) -> float: ... + def __divmod__(self, __x: float) -> tuple[float, float]: ... + @overload + def __pow__(self, __x: int, __mod: None = None) -> float: ... + # positive x -> float; negative x -> complex + # return type must be Any as `float | complex` causes too many false-positive errors + @overload + def __pow__(self, __x: float, __mod: None = None) -> Any: ... + def __radd__(self, __x: float) -> float: ... + def __rsub__(self, __x: float) -> float: ... + def __rmul__(self, __x: float) -> float: ... + def __rfloordiv__(self, __x: float) -> float: ... + def __rtruediv__(self, __x: float) -> float: ... + def __rmod__(self, __x: float) -> float: ... + def __rdivmod__(self, __x: float) -> tuple[float, float]: ... + @overload + def __rpow__(self, __x: _PositiveInteger, __modulo: None = None) -> float: ... + @overload + def __rpow__(self, __x: _NegativeInteger, __mod: None = None) -> complex: ... + # Returning `complex` for the general case gives too many false-positive errors. + @overload + def __rpow__(self, __x: float, __mod: None = None) -> Any: ... + def __getnewargs__(self) -> tuple[float]: ... + def __trunc__(self) -> int: ... + if sys.version_info >= (3, 9): + def __ceil__(self) -> int: ... + def __floor__(self) -> int: ... + + @overload + def __round__(self, __ndigits: None = None) -> int: ... + @overload + def __round__(self, __ndigits: SupportsIndex) -> float: ... + def __eq__(self, __x: object) -> bool: ... + def __ne__(self, __x: object) -> bool: ... + def __lt__(self, __x: float) -> bool: ... + def __le__(self, __x: float) -> bool: ... + def __gt__(self, __x: float) -> bool: ... + def __ge__(self, __x: float) -> bool: ... + def __neg__(self) -> float: ... + def __pos__(self) -> float: ... + def __int__(self) -> int: ... + def __float__(self) -> float: ... + def __abs__(self) -> float: ... + def __bool__(self) -> bool: ... + +class complex: + if sys.version_info >= (3, 8): + # Python doesn't currently accept SupportsComplex for the second argument + @overload + def __new__( + cls, + real: complex | SupportsComplex | SupportsFloat | SupportsIndex = ..., + imag: complex | SupportsFloat | SupportsIndex = ..., + ) -> Self: ... + @overload + def __new__(cls, real: str | SupportsComplex | SupportsFloat | SupportsIndex | complex) -> Self: ... + else: + @overload + def __new__(cls, real: complex | SupportsComplex | SupportsFloat = ..., imag: complex | SupportsFloat = ...) -> Self: ... + @overload + def __new__(cls, real: str | SupportsComplex | SupportsFloat | complex) -> Self: ... + + @property + def real(self) -> float: ... + @property + def imag(self) -> float: ... + def conjugate(self) -> complex: ... + def __add__(self, __x: complex) -> complex: ... + def __sub__(self, __x: complex) -> complex: ... + def __mul__(self, __x: complex) -> complex: ... + def __pow__(self, __x: complex, __mod: None = None) -> complex: ... + def __truediv__(self, __x: complex) -> complex: ... + def __radd__(self, __x: complex) -> complex: ... + def __rsub__(self, __x: complex) -> complex: ... + def __rmul__(self, __x: complex) -> complex: ... + def __rpow__(self, __x: complex, __mod: None = None) -> complex: ... + def __rtruediv__(self, __x: complex) -> complex: ... + def __eq__(self, __x: object) -> bool: ... + def __ne__(self, __x: object) -> bool: ... + def __neg__(self) -> complex: ... + def __pos__(self) -> complex: ... + def __abs__(self) -> float: ... + def __bool__(self) -> bool: ... + if sys.version_info >= (3, 11): + def __complex__(self) -> complex: ... + +class _FormatMapMapping(Protocol): + def __getitem__(self, __key: str) -> Any: ... + +class _TranslateTable(Protocol): + def __getitem__(self, __key: int) -> str | int | None: ... + +class str(Sequence[str]): + @overload + def __new__(cls, object: object = ...) -> Self: ... + @overload + def __new__(cls, object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ... + @overload + def capitalize(self: LiteralString) -> LiteralString: ... + @overload + def capitalize(self) -> str: ... # type: ignore[misc] + @overload + def casefold(self: LiteralString) -> LiteralString: ... + @overload + def casefold(self) -> str: ... # type: ignore[misc] + @overload + def center(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... + @overload + def center(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] + def count(self, x: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... + def encode(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: ... + def endswith( + self, __suffix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> bool: ... + if sys.version_info >= (3, 8): + @overload + def expandtabs(self: LiteralString, tabsize: SupportsIndex = 8) -> LiteralString: ... + @overload + def expandtabs(self, tabsize: SupportsIndex = 8) -> str: ... # type: ignore[misc] + else: + @overload + def expandtabs(self: LiteralString, tabsize: int = 8) -> LiteralString: ... + @overload + def expandtabs(self, tabsize: int = 8) -> str: ... # type: ignore[misc] + + def find(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... + @overload + def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: ... + @overload + def format(self, *args: object, **kwargs: object) -> str: ... # type: ignore[misc] + def format_map(self, map: _FormatMapMapping) -> str: ... + def index(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... + def isalnum(self) -> bool: ... + def isalpha(self) -> bool: ... + def isascii(self) -> bool: ... + def isdecimal(self) -> bool: ... + def isdigit(self) -> bool: ... + def isidentifier(self) -> bool: ... + def islower(self) -> bool: ... + def isnumeric(self) -> bool: ... + def isprintable(self) -> bool: ... + def isspace(self) -> bool: ... + def istitle(self) -> bool: ... + def isupper(self) -> bool: ... + @overload + def join(self: LiteralString, __iterable: Iterable[LiteralString]) -> LiteralString: ... + @overload + def join(self, __iterable: Iterable[str]) -> str: ... # type: ignore[misc] + @overload + def ljust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... + @overload + def ljust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] + @overload + def lower(self: LiteralString) -> LiteralString: ... + @overload + def lower(self) -> str: ... # type: ignore[misc] + @overload + def lstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... + @overload + def lstrip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] + @overload + def partition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... + @overload + def partition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] + @overload + def replace( + self: LiteralString, __old: LiteralString, __new: LiteralString, __count: SupportsIndex = -1 + ) -> LiteralString: ... + @overload + def replace(self, __old: str, __new: str, __count: SupportsIndex = -1) -> str: ... # type: ignore[misc] + if sys.version_info >= (3, 9): + @overload + def removeprefix(self: LiteralString, __prefix: LiteralString) -> LiteralString: ... + @overload + def removeprefix(self, __prefix: str) -> str: ... # type: ignore[misc] + @overload + def removesuffix(self: LiteralString, __suffix: LiteralString) -> LiteralString: ... + @overload + def removesuffix(self, __suffix: str) -> str: ... # type: ignore[misc] + + def rfind(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... + def rindex(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... + @overload + def rjust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = " ") -> LiteralString: ... + @overload + def rjust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] + @overload + def rpartition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... + @overload + def rpartition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] + @overload + def rsplit(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ... + @overload + def rsplit(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] + @overload + def rstrip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... + @overload + def rstrip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] + @overload + def split(self: LiteralString, sep: LiteralString | None = None, maxsplit: SupportsIndex = -1) -> list[LiteralString]: ... + @overload + def split(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] + @overload + def splitlines(self: LiteralString, keepends: bool = False) -> list[LiteralString]: ... + @overload + def splitlines(self, keepends: bool = False) -> list[str]: ... # type: ignore[misc] + def startswith( + self, __prefix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> bool: ... + @overload + def strip(self: LiteralString, __chars: LiteralString | None = None) -> LiteralString: ... + @overload + def strip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] + @overload + def swapcase(self: LiteralString) -> LiteralString: ... + @overload + def swapcase(self) -> str: ... # type: ignore[misc] + @overload + def title(self: LiteralString) -> LiteralString: ... + @overload + def title(self) -> str: ... # type: ignore[misc] + def translate(self, __table: _TranslateTable) -> str: ... + @overload + def upper(self: LiteralString) -> LiteralString: ... + @overload + def upper(self) -> str: ... # type: ignore[misc] + @overload + def zfill(self: LiteralString, __width: SupportsIndex) -> LiteralString: ... + @overload + def zfill(self, __width: SupportsIndex) -> str: ... # type: ignore[misc] + @staticmethod + @overload + def maketrans(__x: dict[int, _T] | dict[str, _T] | dict[str | int, _T]) -> dict[int, _T]: ... + @staticmethod + @overload + def maketrans(__x: str, __y: str) -> dict[int, int]: ... + @staticmethod + @overload + def maketrans(__x: str, __y: str, __z: str) -> dict[int, int | None]: ... + @overload + def __add__(self: LiteralString, __s: LiteralString) -> LiteralString: ... + @overload + def __add__(self, __s: str) -> str: ... # type: ignore[misc] + # Incompatible with Sequence.__contains__ + def __contains__(self, __o: str) -> bool: ... # type: ignore[override] + def __eq__(self, __x: object) -> bool: ... + def __ge__(self, __x: str) -> bool: ... + def __getitem__(self, __i: SupportsIndex | slice) -> str: ... + def __gt__(self, __x: str) -> bool: ... + @overload + def __iter__(self: LiteralString) -> Iterator[LiteralString]: ... + @overload + def __iter__(self) -> Iterator[str]: ... # type: ignore[misc] + def __le__(self, __x: str) -> bool: ... + def __len__(self) -> int: ... + def __lt__(self, __x: str) -> bool: ... + @overload + def __mod__(self: LiteralString, __x: LiteralString | tuple[LiteralString, ...]) -> LiteralString: ... + @overload + def __mod__(self, __x: Any) -> str: ... # type: ignore[misc] + @overload + def __mul__(self: LiteralString, __n: SupportsIndex) -> LiteralString: ... + @overload + def __mul__(self, __n: SupportsIndex) -> str: ... # type: ignore[misc] + def __ne__(self, __x: object) -> bool: ... + @overload + def __rmul__(self: LiteralString, __n: SupportsIndex) -> LiteralString: ... + @overload + def __rmul__(self, __n: SupportsIndex) -> str: ... # type: ignore[misc] + def __getnewargs__(self) -> tuple[str]: ... + +class bytes(ByteString): + @overload + def __new__(cls, __o: Iterable[SupportsIndex] | SupportsIndex | SupportsBytes | ReadableBuffer) -> Self: ... + @overload + def __new__(cls, __string: str, encoding: str, errors: str = ...) -> Self: ... + @overload + def __new__(cls) -> Self: ... + def capitalize(self) -> bytes: ... + def center(self, __width: SupportsIndex, __fillchar: bytes = b" ") -> bytes: ... + def count( + self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> int: ... + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: ... + def endswith( + self, + __suffix: ReadableBuffer | tuple[ReadableBuffer, ...], + __start: SupportsIndex | None = ..., + __end: SupportsIndex | None = ..., + ) -> bool: ... + if sys.version_info >= (3, 8): + def expandtabs(self, tabsize: SupportsIndex = 8) -> bytes: ... + else: + def expandtabs(self, tabsize: int = ...) -> bytes: ... + + def find( + self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> int: ... + if sys.version_info >= (3, 8): + def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = ...) -> str: ... + else: + def hex(self) -> str: ... + + def index( + self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> int: ... + def isalnum(self) -> bool: ... + def isalpha(self) -> bool: ... + def isascii(self) -> bool: ... + def isdigit(self) -> bool: ... + def islower(self) -> bool: ... + def isspace(self) -> bool: ... + def istitle(self) -> bool: ... + def isupper(self) -> bool: ... + def join(self, __iterable_of_bytes: Iterable[ReadableBuffer]) -> bytes: ... + def ljust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = b" ") -> bytes: ... + def lower(self) -> bytes: ... + def lstrip(self, __bytes: ReadableBuffer | None = None) -> bytes: ... + def partition(self, __sep: ReadableBuffer) -> tuple[bytes, bytes, bytes]: ... + def replace(self, __old: ReadableBuffer, __new: ReadableBuffer, __count: SupportsIndex = -1) -> bytes: ... + if sys.version_info >= (3, 9): + def removeprefix(self, __prefix: ReadableBuffer) -> bytes: ... + def removesuffix(self, __suffix: ReadableBuffer) -> bytes: ... + + def rfind( + self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> int: ... + def rindex( + self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> int: ... + def rjust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = b" ") -> bytes: ... + def rpartition(self, __sep: ReadableBuffer) -> tuple[bytes, bytes, bytes]: ... + def rsplit(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytes]: ... + def rstrip(self, __bytes: ReadableBuffer | None = None) -> bytes: ... + def split(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytes]: ... + def splitlines(self, keepends: bool = False) -> list[bytes]: ... + def startswith( + self, + __prefix: ReadableBuffer | tuple[ReadableBuffer, ...], + __start: SupportsIndex | None = ..., + __end: SupportsIndex | None = ..., + ) -> bool: ... + def strip(self, __bytes: ReadableBuffer | None = None) -> bytes: ... + def swapcase(self) -> bytes: ... + def title(self) -> bytes: ... + def translate(self, __table: ReadableBuffer | None, delete: bytes = b"") -> bytes: ... + def upper(self) -> bytes: ... + def zfill(self, __width: SupportsIndex) -> bytes: ... + @classmethod + def fromhex(cls, __s: str) -> Self: ... + @staticmethod + def maketrans(__frm: ReadableBuffer, __to: ReadableBuffer) -> bytes: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[int]: ... + @overload + def __getitem__(self, __i: SupportsIndex) -> int: ... + @overload + def __getitem__(self, __s: slice) -> bytes: ... + def __add__(self, __s: ReadableBuffer) -> bytes: ... + def __mul__(self, __n: SupportsIndex) -> bytes: ... + def __rmul__(self, __n: SupportsIndex) -> bytes: ... + def __mod__(self, __value: Any) -> bytes: ... + # Incompatible with Sequence.__contains__ + def __contains__(self, __o: SupportsIndex | ReadableBuffer) -> bool: ... # type: ignore[override] + def __eq__(self, __x: object) -> bool: ... + def __ne__(self, __x: object) -> bool: ... + def __lt__(self, __x: bytes) -> bool: ... + def __le__(self, __x: bytes) -> bool: ... + def __gt__(self, __x: bytes) -> bool: ... + def __ge__(self, __x: bytes) -> bool: ... + def __getnewargs__(self) -> tuple[bytes]: ... + if sys.version_info >= (3, 11): + def __bytes__(self) -> bytes: ... + +class bytearray(MutableSequence[int], ByteString): + @overload + def __init__(self) -> None: ... + @overload + def __init__(self, __ints: Iterable[SupportsIndex] | SupportsIndex | ReadableBuffer) -> None: ... + @overload + def __init__(self, __string: str, encoding: str, errors: str = ...) -> None: ... + def append(self, __item: SupportsIndex) -> None: ... + def capitalize(self) -> bytearray: ... + def center(self, __width: SupportsIndex, __fillchar: bytes = b" ") -> bytearray: ... + def count( + self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> int: ... + def copy(self) -> bytearray: ... + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: ... + def endswith( + self, + __suffix: ReadableBuffer | tuple[ReadableBuffer, ...], + __start: SupportsIndex | None = ..., + __end: SupportsIndex | None = ..., + ) -> bool: ... + if sys.version_info >= (3, 8): + def expandtabs(self, tabsize: SupportsIndex = 8) -> bytearray: ... + else: + def expandtabs(self, tabsize: int = ...) -> bytearray: ... + + def extend(self, __iterable_of_ints: Iterable[SupportsIndex]) -> None: ... + def find( + self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> int: ... + if sys.version_info >= (3, 8): + def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = ...) -> str: ... + else: + def hex(self) -> str: ... + + def index( + self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> int: ... + def insert(self, __index: SupportsIndex, __item: SupportsIndex) -> None: ... + def isalnum(self) -> bool: ... + def isalpha(self) -> bool: ... + def isascii(self) -> bool: ... + def isdigit(self) -> bool: ... + def islower(self) -> bool: ... + def isspace(self) -> bool: ... + def istitle(self) -> bool: ... + def isupper(self) -> bool: ... + def join(self, __iterable_of_bytes: Iterable[ReadableBuffer]) -> bytearray: ... + def ljust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = b" ") -> bytearray: ... + def lower(self) -> bytearray: ... + def lstrip(self, __bytes: ReadableBuffer | None = None) -> bytearray: ... + def partition(self, __sep: ReadableBuffer) -> tuple[bytearray, bytearray, bytearray]: ... + def pop(self, __index: int = -1) -> int: ... + def remove(self, __value: int) -> None: ... + if sys.version_info >= (3, 9): + def removeprefix(self, __prefix: ReadableBuffer) -> bytearray: ... + def removesuffix(self, __suffix: ReadableBuffer) -> bytearray: ... + + def replace(self, __old: ReadableBuffer, __new: ReadableBuffer, __count: SupportsIndex = -1) -> bytearray: ... + def rfind( + self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> int: ... + def rindex( + self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> int: ... + def rjust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = b" ") -> bytearray: ... + def rpartition(self, __sep: ReadableBuffer) -> tuple[bytearray, bytearray, bytearray]: ... + def rsplit(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytearray]: ... + def rstrip(self, __bytes: ReadableBuffer | None = None) -> bytearray: ... + def split(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytearray]: ... + def splitlines(self, keepends: bool = False) -> list[bytearray]: ... + def startswith( + self, + __prefix: ReadableBuffer | tuple[ReadableBuffer, ...], + __start: SupportsIndex | None = ..., + __end: SupportsIndex | None = ..., + ) -> bool: ... + def strip(self, __bytes: ReadableBuffer | None = None) -> bytearray: ... + def swapcase(self) -> bytearray: ... + def title(self) -> bytearray: ... + def translate(self, __table: ReadableBuffer | None, delete: bytes = b"") -> bytearray: ... + def upper(self) -> bytearray: ... + def zfill(self, __width: SupportsIndex) -> bytearray: ... + @classmethod + def fromhex(cls, __string: str) -> Self: ... + @staticmethod + def maketrans(__frm: ReadableBuffer, __to: ReadableBuffer) -> bytes: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[int]: ... + __hash__: ClassVar[None] # type: ignore[assignment] + @overload + def __getitem__(self, __i: SupportsIndex) -> int: ... + @overload + def __getitem__(self, __s: slice) -> bytearray: ... + @overload + def __setitem__(self, __i: SupportsIndex, __x: SupportsIndex) -> None: ... + @overload + def __setitem__(self, __s: slice, __x: Iterable[SupportsIndex] | bytes) -> None: ... + def __delitem__(self, __i: SupportsIndex | slice) -> None: ... + def __add__(self, __s: ReadableBuffer) -> bytearray: ... + # The superclass wants us to accept Iterable[int], but that fails at runtime. + def __iadd__(self, __s: ReadableBuffer) -> Self: ... # type: ignore[override] + def __mul__(self, __n: SupportsIndex) -> bytearray: ... + def __rmul__(self, __n: SupportsIndex) -> bytearray: ... + def __imul__(self, __n: SupportsIndex) -> Self: ... + def __mod__(self, __value: Any) -> bytes: ... + # Incompatible with Sequence.__contains__ + def __contains__(self, __o: SupportsIndex | ReadableBuffer) -> bool: ... # type: ignore[override] + def __eq__(self, __x: object) -> bool: ... + def __ne__(self, __x: object) -> bool: ... + def __lt__(self, __x: ReadableBuffer) -> bool: ... + def __le__(self, __x: ReadableBuffer) -> bool: ... + def __gt__(self, __x: ReadableBuffer) -> bool: ... + def __ge__(self, __x: ReadableBuffer) -> bool: ... + def __alloc__(self) -> int: ... + +@final +class memoryview(Sequence[int]): + @property + def format(self) -> str: ... + @property + def itemsize(self) -> int: ... + @property + def shape(self) -> tuple[int, ...] | None: ... + @property + def strides(self) -> tuple[int, ...] | None: ... + @property + def suboffsets(self) -> tuple[int, ...] | None: ... + @property + def readonly(self) -> bool: ... + @property + def ndim(self) -> int: ... + @property + def obj(self) -> ReadableBuffer: ... + @property + def c_contiguous(self) -> bool: ... + @property + def f_contiguous(self) -> bool: ... + @property + def contiguous(self) -> bool: ... + @property + def nbytes(self) -> int: ... + def __init__(self, obj: ReadableBuffer) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, __exc_type: type[BaseException] | None, __exc_val: BaseException | None, __exc_tb: TracebackType | None + ) -> None: ... + def cast(self, format: str, shape: list[int] | tuple[int, ...] = ...) -> memoryview: ... + @overload + def __getitem__(self, __i: SupportsIndex) -> int: ... + @overload + def __getitem__(self, __s: slice) -> memoryview: ... + def __contains__(self, __x: object) -> bool: ... + def __iter__(self) -> Iterator[int]: ... + def __len__(self) -> int: ... + @overload + def __setitem__(self, __s: slice, __o: ReadableBuffer) -> None: ... + @overload + def __setitem__(self, __i: SupportsIndex, __o: SupportsIndex) -> None: ... + if sys.version_info >= (3, 10): + def tobytes(self, order: Literal["C", "F", "A"] | None = "C") -> bytes: ... + elif sys.version_info >= (3, 8): + def tobytes(self, order: Literal["C", "F", "A"] | None = None) -> bytes: ... + else: + def tobytes(self) -> bytes: ... + + def tolist(self) -> list[int]: ... + if sys.version_info >= (3, 8): + def toreadonly(self) -> memoryview: ... + + def release(self) -> None: ... + if sys.version_info >= (3, 8): + def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = ...) -> str: ... + else: + def hex(self) -> str: ... + +@final +class bool(int): + def __new__(cls, __o: object = ...) -> Self: ... + # The following overloads could be represented more elegantly with a TypeVar("_B", bool, int), + # however mypy has a bug regarding TypeVar constraints (https://github.com/python/mypy/issues/11880). + @overload + def __and__(self, __x: bool) -> bool: ... + @overload + def __and__(self, __x: int) -> int: ... + @overload + def __or__(self, __x: bool) -> bool: ... + @overload + def __or__(self, __x: int) -> int: ... + @overload + def __xor__(self, __x: bool) -> bool: ... + @overload + def __xor__(self, __x: int) -> int: ... + @overload + def __rand__(self, __x: bool) -> bool: ... + @overload + def __rand__(self, __x: int) -> int: ... + @overload + def __ror__(self, __x: bool) -> bool: ... + @overload + def __ror__(self, __x: int) -> int: ... + @overload + def __rxor__(self, __x: bool) -> bool: ... + @overload + def __rxor__(self, __x: int) -> int: ... + def __getnewargs__(self) -> tuple[int]: ... + +@final +class slice: + @property + def start(self) -> Any: ... + @property + def step(self) -> Any: ... + @property + def stop(self) -> Any: ... + @overload + def __init__(self, __stop: Any) -> None: ... + @overload + def __init__(self, __start: Any, __stop: Any, __step: Any = ...) -> None: ... + __hash__: ClassVar[None] # type: ignore[assignment] + def indices(self, __len: SupportsIndex) -> tuple[int, int, int]: ... + +class tuple(Sequence[_T_co], Generic[_T_co]): + def __new__(cls, __iterable: Iterable[_T_co] = ...) -> Self: ... + def __len__(self) -> int: ... + def __contains__(self, __x: object) -> bool: ... + @overload + def __getitem__(self, __x: SupportsIndex) -> _T_co: ... + @overload + def __getitem__(self, __x: slice) -> tuple[_T_co, ...]: ... + def __iter__(self) -> Iterator[_T_co]: ... + def __lt__(self, __x: tuple[_T_co, ...]) -> bool: ... + def __le__(self, __x: tuple[_T_co, ...]) -> bool: ... + def __gt__(self, __x: tuple[_T_co, ...]) -> bool: ... + def __ge__(self, __x: tuple[_T_co, ...]) -> bool: ... + @overload + def __add__(self, __x: tuple[_T_co, ...]) -> tuple[_T_co, ...]: ... + @overload + def __add__(self, __x: tuple[_T, ...]) -> tuple[_T_co | _T, ...]: ... + def __mul__(self, __n: SupportsIndex) -> tuple[_T_co, ...]: ... + def __rmul__(self, __n: SupportsIndex) -> tuple[_T_co, ...]: ... + def count(self, __value: Any) -> int: ... + def index(self, __value: Any, __start: SupportsIndex = 0, __stop: SupportsIndex = sys.maxsize) -> int: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... + +# Doesn't exist at runtime, but deleting this breaks mypy. See #2999 +@final +@type_check_only +class function: + # Make sure this class definition stays roughly in line with `types.FunctionType` + @property + def __closure__(self) -> tuple[_Cell, ...] | None: ... + __code__: CodeType + __defaults__: tuple[Any, ...] | None + __dict__: dict[str, Any] + @property + def __globals__(self) -> dict[str, Any]: ... + __name__: str + __qualname__: str + __annotations__: dict[str, Any] + __kwdefaults__: dict[str, Any] + if sys.version_info >= (3, 10): + @property + def __builtins__(self) -> dict[str, Any]: ... + + __module__: str + # mypy uses `builtins.function.__get__` to represent methods, properties, and getset_descriptors so we type the return as Any. + def __get__(self, __instance: object, __owner: type | None = None) -> Any: ... + +class list(MutableSequence[_T], Generic[_T]): + @overload + def __init__(self) -> None: ... + @overload + def __init__(self, __iterable: Iterable[_T]) -> None: ... + def copy(self) -> list[_T]: ... + def append(self, __object: _T) -> None: ... + def extend(self, __iterable: Iterable[_T]) -> None: ... + def pop(self, __index: SupportsIndex = -1) -> _T: ... + # Signature of `list.index` should be kept in line with `collections.UserList.index()` + # and multiprocessing.managers.ListProxy.index() + def index(self, __value: _T, __start: SupportsIndex = 0, __stop: SupportsIndex = sys.maxsize) -> int: ... + def count(self, __value: _T) -> int: ... + def insert(self, __index: SupportsIndex, __object: _T) -> None: ... + def remove(self, __value: _T) -> None: ... + # Signature of `list.sort` should be kept inline with `collections.UserList.sort()` + # and multiprocessing.managers.ListProxy.sort() + # + # Use list[SupportsRichComparisonT] for the first overload rather than [SupportsRichComparison] + # to work around invariance + @overload + def sort(self: list[SupportsRichComparisonT], *, key: None = None, reverse: bool = False) -> None: ... + @overload + def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = False) -> None: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[_T]: ... + __hash__: ClassVar[None] # type: ignore[assignment] + @overload + def __getitem__(self, __i: SupportsIndex) -> _T: ... + @overload + def __getitem__(self, __s: slice) -> list[_T]: ... + @overload + def __setitem__(self, __i: SupportsIndex, __o: _T) -> None: ... + @overload + def __setitem__(self, __s: slice, __o: Iterable[_T]) -> None: ... + def __delitem__(self, __i: SupportsIndex | slice) -> None: ... + # Overloading looks unnecessary, but is needed to work around complex mypy problems + @overload + def __add__(self, __x: list[_T]) -> list[_T]: ... + @overload + def __add__(self, __x: list[_S]) -> list[_S | _T]: ... + def __iadd__(self, __x: Iterable[_T]) -> Self: ... # type: ignore[misc] + def __mul__(self, __n: SupportsIndex) -> list[_T]: ... + def __rmul__(self, __n: SupportsIndex) -> list[_T]: ... + def __imul__(self, __n: SupportsIndex) -> Self: ... + def __contains__(self, __o: object) -> bool: ... + def __reversed__(self) -> Iterator[_T]: ... + def __gt__(self, __x: list[_T]) -> bool: ... + def __ge__(self, __x: list[_T]) -> bool: ... + def __lt__(self, __x: list[_T]) -> bool: ... + def __le__(self, __x: list[_T]) -> bool: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... + +class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): + # __init__ should be kept roughly in line with `collections.UserDict.__init__`, which has similar semantics + # Also multiprocessing.managers.SyncManager.dict() + @overload + def __init__(self) -> None: ... + @overload + def __init__(self: dict[str, _VT], **kwargs: _VT) -> None: ... + @overload + def __init__(self, __map: SupportsKeysAndGetItem[_KT, _VT]) -> None: ... + @overload + def __init__(self: dict[str, _VT], __map: SupportsKeysAndGetItem[str, _VT], **kwargs: _VT) -> None: ... + @overload + def __init__(self, __iterable: Iterable[tuple[_KT, _VT]]) -> None: ... + @overload + def __init__(self: dict[str, _VT], __iterable: Iterable[tuple[str, _VT]], **kwargs: _VT) -> None: ... + # Next overload is for dict(string.split(sep) for string in iterable) + # Cannot be Iterable[Sequence[_T]] or otherwise dict(["foo", "bar", "baz"]) is not an error + @overload + def __init__(self: dict[str, str], __iterable: Iterable[list[str]]) -> None: ... + def __new__(cls, *args: Any, **kwargs: Any) -> Self: ... + def copy(self) -> dict[_KT, _VT]: ... + def keys(self) -> dict_keys[_KT, _VT]: ... + def values(self) -> dict_values[_KT, _VT]: ... + def items(self) -> dict_items[_KT, _VT]: ... + # Signature of `dict.fromkeys` should be kept identical to `fromkeys` methods of `OrderedDict`/`ChainMap`/`UserDict` in `collections` + # TODO: the true signature of `dict.fromkeys` is not expressible in the current type system. + # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963. + @classmethod + @overload + def fromkeys(cls, __iterable: Iterable[_T], __value: None = None) -> dict[_T, Any | None]: ... + @classmethod + @overload + def fromkeys(cls, __iterable: Iterable[_T], __value: _S) -> dict[_T, _S]: ... + # Positional-only in dict, but not in MutableMapping + @overload + def get(self, __key: _KT) -> _VT | None: ... + @overload + def get(self, __key: _KT, __default: _VT | _T) -> _VT | _T: ... + @overload + def pop(self, __key: _KT) -> _VT: ... + @overload + def pop(self, __key: _KT, __default: _VT | _T) -> _VT | _T: ... + def __len__(self) -> int: ... + def __getitem__(self, __key: _KT) -> _VT: ... + def __setitem__(self, __key: _KT, __value: _VT) -> None: ... + def __delitem__(self, __key: _KT) -> None: ... + def __iter__(self) -> Iterator[_KT]: ... + if sys.version_info >= (3, 8): + def __reversed__(self) -> Iterator[_KT]: ... + __hash__: ClassVar[None] # type: ignore[assignment] + if sys.version_info >= (3, 9): + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... + def __or__(self, __value: Mapping[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ... + def __ror__(self, __value: Mapping[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ... + # dict.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self, __value: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + @overload + def __ior__(self, __value: Iterable[tuple[_KT, _VT]]) -> Self: ... + +class set(MutableSet[_T], Generic[_T]): + @overload + def __init__(self) -> None: ... + @overload + def __init__(self, __iterable: Iterable[_T]) -> None: ... + def add(self, __element: _T) -> None: ... + def copy(self) -> set[_T]: ... + def difference(self, *s: Iterable[Any]) -> set[_T]: ... + def difference_update(self, *s: Iterable[Any]) -> None: ... + def discard(self, __element: _T) -> None: ... + def intersection(self, *s: Iterable[Any]) -> set[_T]: ... + def intersection_update(self, *s: Iterable[Any]) -> None: ... + def isdisjoint(self, __s: Iterable[Any]) -> bool: ... + def issubset(self, __s: Iterable[Any]) -> bool: ... + def issuperset(self, __s: Iterable[Any]) -> bool: ... + def remove(self, __element: _T) -> None: ... + def symmetric_difference(self, __s: Iterable[_T]) -> set[_T]: ... + def symmetric_difference_update(self, __s: Iterable[_T]) -> None: ... + def union(self, *s: Iterable[_S]) -> set[_T | _S]: ... + def update(self, *s: Iterable[_T]) -> None: ... + def __len__(self) -> int: ... + def __contains__(self, __o: object) -> bool: ... + def __iter__(self) -> Iterator[_T]: ... + def __and__(self, __s: AbstractSet[object]) -> set[_T]: ... + def __iand__(self, __s: AbstractSet[object]) -> Self: ... + def __or__(self, __s: AbstractSet[_S]) -> set[_T | _S]: ... + def __ior__(self, __s: AbstractSet[_T]) -> Self: ... # type: ignore[override,misc] + def __sub__(self, __s: AbstractSet[_T | None]) -> set[_T]: ... + def __isub__(self, __s: AbstractSet[object]) -> Self: ... + def __xor__(self, __s: AbstractSet[_S]) -> set[_T | _S]: ... + def __ixor__(self, __s: AbstractSet[_T]) -> Self: ... # type: ignore[override,misc] + def __le__(self, __s: AbstractSet[object]) -> bool: ... + def __lt__(self, __s: AbstractSet[object]) -> bool: ... + def __ge__(self, __s: AbstractSet[object]) -> bool: ... + def __gt__(self, __s: AbstractSet[object]) -> bool: ... + __hash__: ClassVar[None] # type: ignore[assignment] + if sys.version_info >= (3, 9): + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... + +class frozenset(AbstractSet[_T_co], Generic[_T_co]): + @overload + def __new__(cls) -> Self: ... + @overload + def __new__(cls, __iterable: Iterable[_T_co]) -> Self: ... + def copy(self) -> frozenset[_T_co]: ... + def difference(self, *s: Iterable[object]) -> frozenset[_T_co]: ... + def intersection(self, *s: Iterable[object]) -> frozenset[_T_co]: ... + def isdisjoint(self, __s: Iterable[_T_co]) -> bool: ... + def issubset(self, __s: Iterable[object]) -> bool: ... + def issuperset(self, __s: Iterable[object]) -> bool: ... + def symmetric_difference(self, __s: Iterable[_T_co]) -> frozenset[_T_co]: ... + def union(self, *s: Iterable[_S]) -> frozenset[_T_co | _S]: ... + def __len__(self) -> int: ... + def __contains__(self, __o: object) -> bool: ... + def __iter__(self) -> Iterator[_T_co]: ... + def __and__(self, __s: AbstractSet[_T_co]) -> frozenset[_T_co]: ... + def __or__(self, __s: AbstractSet[_S]) -> frozenset[_T_co | _S]: ... + def __sub__(self, __s: AbstractSet[_T_co]) -> frozenset[_T_co]: ... + def __xor__(self, __s: AbstractSet[_S]) -> frozenset[_T_co | _S]: ... + def __le__(self, __s: AbstractSet[object]) -> bool: ... + def __lt__(self, __s: AbstractSet[object]) -> bool: ... + def __ge__(self, __s: AbstractSet[object]) -> bool: ... + def __gt__(self, __s: AbstractSet[object]) -> bool: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... + +class enumerate(Iterator[tuple[int, _T]], Generic[_T]): + def __init__(self, iterable: Iterable[_T], start: int = ...) -> None: ... + def __iter__(self) -> Self: ... + def __next__(self) -> tuple[int, _T]: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... + +@final +class range(Sequence[int]): + @property + def start(self) -> int: ... + @property + def stop(self) -> int: ... + @property + def step(self) -> int: ... + @overload + def __init__(self, __stop: SupportsIndex) -> None: ... + @overload + def __init__(self, __start: SupportsIndex, __stop: SupportsIndex, __step: SupportsIndex = ...) -> None: ... + def count(self, __value: int) -> int: ... + def index(self, __value: int) -> int: ... # type: ignore[override] + def __len__(self) -> int: ... + def __contains__(self, __o: object) -> bool: ... + def __iter__(self) -> Iterator[int]: ... + @overload + def __getitem__(self, __i: SupportsIndex) -> int: ... + @overload + def __getitem__(self, __s: slice) -> range: ... + def __reversed__(self) -> Iterator[int]: ... + +class property: + fget: Callable[[Any], Any] | None + fset: Callable[[Any, Any], None] | None + fdel: Callable[[Any], None] | None + __isabstractmethod__: bool + def __init__( + self, + fget: Callable[[Any], Any] | None = ..., + fset: Callable[[Any, Any], None] | None = ..., + fdel: Callable[[Any], None] | None = ..., + doc: str | None = ..., + ) -> None: ... + def getter(self, __fget: Callable[[Any], Any]) -> property: ... + def setter(self, __fset: Callable[[Any, Any], None]) -> property: ... + def deleter(self, __fdel: Callable[[Any], None]) -> property: ... + def __get__(self, __instance: Any, __owner: type | None = None) -> Any: ... + def __set__(self, __instance: Any, __value: Any) -> None: ... + def __delete__(self, __instance: Any) -> None: ... + +@final +class _NotImplementedType(Any): # type: ignore[misc] + # A little weird, but typing the __call__ as NotImplemented makes the error message + # for NotImplemented() much better + __call__: NotImplemented # type: ignore[valid-type] # pyright: ignore[reportGeneralTypeIssues] + +NotImplemented: _NotImplementedType + +def abs(__x: SupportsAbs[_T]) -> _T: ... +def all(__iterable: Iterable[object]) -> bool: ... +def any(__iterable: Iterable[object]) -> bool: ... +def ascii(__obj: object) -> str: ... +def bin(__number: int | SupportsIndex) -> str: ... +def breakpoint(*args: Any, **kws: Any) -> None: ... +def callable(__obj: object) -> TypeGuard[Callable[..., object]]: ... +def chr(__i: int) -> str: ... + +# We define this here instead of using os.PathLike to avoid import cycle issues. +# See https://github.com/python/typeshed/pull/991#issuecomment-288160993 +class _PathLike(Protocol[AnyStr_co]): + def __fspath__(self) -> AnyStr_co: ... + +if sys.version_info >= (3, 10): + def aiter(__async_iterable: SupportsAiter[_SupportsAnextT]) -> _SupportsAnextT: ... + + class _SupportsSynchronousAnext(Protocol[_AwaitableT_co]): + def __anext__(self) -> _AwaitableT_co: ... + + @overload + # `anext` is not, in fact, an async function. When default is not provided + # `anext` is just a passthrough for `obj.__anext__` + # See discussion in #7491 and pure-Python implementation of `anext` at https://github.com/python/cpython/blob/ea786a882b9ed4261eafabad6011bc7ef3b5bf94/Lib/test/test_asyncgen.py#L52-L80 + def anext(__i: _SupportsSynchronousAnext[_AwaitableT]) -> _AwaitableT: ... + @overload + async def anext(__i: SupportsAnext[_T], default: _VT) -> _T | _VT: ... + +# compile() returns a CodeType, unless the flags argument includes PyCF_ONLY_AST (=1024), +# in which case it returns ast.AST. We have overloads for flag 0 (the default) and for +# explicitly passing PyCF_ONLY_AST. We fall back to Any for other values of flags. +if sys.version_info >= (3, 8): + @overload + def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | ReadableBuffer | _PathLike[Any], + mode: str, + flags: Literal[0], + dont_inherit: bool = False, + optimize: int = -1, + *, + _feature_version: int = -1, + ) -> CodeType: ... + @overload + def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | ReadableBuffer | _PathLike[Any], + mode: str, + *, + dont_inherit: bool = False, + optimize: int = -1, + _feature_version: int = -1, + ) -> CodeType: ... + @overload + def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | ReadableBuffer | _PathLike[Any], + mode: str, + flags: Literal[1024], + dont_inherit: bool = False, + optimize: int = -1, + *, + _feature_version: int = -1, + ) -> _ast.AST: ... + @overload + def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | ReadableBuffer | _PathLike[Any], + mode: str, + flags: int, + dont_inherit: bool = False, + optimize: int = -1, + *, + _feature_version: int = -1, + ) -> Any: ... + +else: + @overload + def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | ReadableBuffer | _PathLike[Any], + mode: str, + flags: Literal[0], + dont_inherit: bool = False, + optimize: int = -1, + ) -> CodeType: ... + @overload + def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | ReadableBuffer | _PathLike[Any], + mode: str, + *, + dont_inherit: bool = False, + optimize: int = -1, + ) -> CodeType: ... + @overload + def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | ReadableBuffer | _PathLike[Any], + mode: str, + flags: Literal[1024], + dont_inherit: bool = False, + optimize: int = -1, + ) -> _ast.AST: ... + @overload + def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | ReadableBuffer | _PathLike[Any], + mode: str, + flags: int, + dont_inherit: bool = False, + optimize: int = -1, + ) -> Any: ... + +def copyright() -> None: ... +def credits() -> None: ... +def delattr(__obj: object, __name: str) -> None: ... +def dir(__o: object = ...) -> list[str]: ... +@overload +def divmod(__x: SupportsDivMod[_T_contra, _T_co], __y: _T_contra) -> _T_co: ... +@overload +def divmod(__x: _T_contra, __y: SupportsRDivMod[_T_contra, _T_co]) -> _T_co: ... + +# The `globals` argument to `eval` has to be `dict[str, Any]` rather than `dict[str, object]` due to invariance. +# (The `globals` argument has to be a "real dict", rather than any old mapping, unlike the `locals` argument.) +def eval( + __source: str | ReadableBuffer | CodeType, + __globals: dict[str, Any] | None = None, + __locals: Mapping[str, object] | None = None, +) -> Any: ... + +# Comment above regarding `eval` applies to `exec` as well +if sys.version_info >= (3, 11): + def exec( + __source: str | ReadableBuffer | CodeType, + __globals: dict[str, Any] | None = None, + __locals: Mapping[str, object] | None = None, + *, + closure: tuple[_Cell, ...] | None = None, + ) -> None: ... + +else: + def exec( + __source: str | ReadableBuffer | CodeType, + __globals: dict[str, Any] | None = None, + __locals: Mapping[str, object] | None = None, + ) -> None: ... + +def exit(code: sys._ExitCode = None) -> NoReturn: ... + +class filter(Iterator[_T], Generic[_T]): + @overload + def __init__(self, __function: None, __iterable: Iterable[_T | None]) -> None: ... + @overload + def __init__(self, __function: Callable[[_S], TypeGuard[_T]], __iterable: Iterable[_S]) -> None: ... + @overload + def __init__(self, __function: Callable[[_T], Any], __iterable: Iterable[_T]) -> None: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... + +def format(__value: object, __format_spec: str = "") -> str: ... +@overload +def getattr(__o: object, __name: str) -> Any: ... + +# While technically covered by the last overload, spelling out the types for None, bool +# and basic containers help mypy out in some tricky situations involving type context +# (aka bidirectional inference) +@overload +def getattr(__o: object, __name: str, __default: None) -> Any | None: ... +@overload +def getattr(__o: object, __name: str, __default: bool) -> Any | bool: ... +@overload +def getattr(__o: object, name: str, __default: list[Any]) -> Any | list[Any]: ... +@overload +def getattr(__o: object, name: str, __default: dict[Any, Any]) -> Any | dict[Any, Any]: ... +@overload +def getattr(__o: object, __name: str, __default: _T) -> Any | _T: ... +def globals() -> dict[str, Any]: ... +def hasattr(__obj: object, __name: str) -> bool: ... +def hash(__obj: object) -> int: ... +def help(request: object = ...) -> None: ... +def hex(__number: int | SupportsIndex) -> str: ... +def id(__obj: object) -> int: ... +def input(__prompt: object = "") -> str: ... + +class _GetItemIterable(Protocol[_T_co]): + def __getitem__(self, __i: int) -> _T_co: ... + +@overload +def iter(__iterable: SupportsIter[_SupportsNextT]) -> _SupportsNextT: ... +@overload +def iter(__iterable: _GetItemIterable[_T]) -> Iterator[_T]: ... +@overload +def iter(__function: Callable[[], _T | None], __sentinel: None) -> Iterator[_T]: ... +@overload +def iter(__function: Callable[[], _T], __sentinel: object) -> Iterator[_T]: ... + +# Keep this alias in sync with unittest.case._ClassInfo +if sys.version_info >= (3, 10): + _ClassInfo: TypeAlias = type | types.UnionType | tuple[_ClassInfo, ...] +else: + _ClassInfo: TypeAlias = type | tuple[_ClassInfo, ...] + +def isinstance(__obj: object, __class_or_tuple: _ClassInfo) -> bool: ... +def issubclass(__cls: type, __class_or_tuple: _ClassInfo) -> bool: ... +def len(__obj: Sized) -> int: ... +def license() -> None: ... +def locals() -> dict[str, Any]: ... + +class map(Iterator[_S], Generic[_S]): + @overload + def __init__(self, __func: Callable[[_T1], _S], __iter1: Iterable[_T1]) -> None: ... + @overload + def __init__(self, __func: Callable[[_T1, _T2], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> None: ... + @overload + def __init__( + self, __func: Callable[[_T1, _T2, _T3], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3] + ) -> None: ... + @overload + def __init__( + self, + __func: Callable[[_T1, _T2, _T3, _T4], _S], + __iter1: Iterable[_T1], + __iter2: Iterable[_T2], + __iter3: Iterable[_T3], + __iter4: Iterable[_T4], + ) -> None: ... + @overload + def __init__( + self, + __func: Callable[[_T1, _T2, _T3, _T4, _T5], _S], + __iter1: Iterable[_T1], + __iter2: Iterable[_T2], + __iter3: Iterable[_T3], + __iter4: Iterable[_T4], + __iter5: Iterable[_T5], + ) -> None: ... + @overload + def __init__( + self, + __func: Callable[..., _S], + __iter1: Iterable[Any], + __iter2: Iterable[Any], + __iter3: Iterable[Any], + __iter4: Iterable[Any], + __iter5: Iterable[Any], + __iter6: Iterable[Any], + *iterables: Iterable[Any], + ) -> None: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _S: ... + +@overload +def max( + __arg1: SupportsRichComparisonT, __arg2: SupportsRichComparisonT, *_args: SupportsRichComparisonT, key: None = None +) -> SupportsRichComparisonT: ... +@overload +def max(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], SupportsRichComparison]) -> _T: ... +@overload +def max(__iterable: Iterable[SupportsRichComparisonT], *, key: None = None) -> SupportsRichComparisonT: ... +@overload +def max(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsRichComparison]) -> _T: ... +@overload +def max(__iterable: Iterable[SupportsRichComparisonT], *, key: None = None, default: _T) -> SupportsRichComparisonT | _T: ... +@overload +def max(__iterable: Iterable[_T1], *, key: Callable[[_T1], SupportsRichComparison], default: _T2) -> _T1 | _T2: ... +@overload +def min( + __arg1: SupportsRichComparisonT, __arg2: SupportsRichComparisonT, *_args: SupportsRichComparisonT, key: None = None +) -> SupportsRichComparisonT: ... +@overload +def min(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], SupportsRichComparison]) -> _T: ... +@overload +def min(__iterable: Iterable[SupportsRichComparisonT], *, key: None = None) -> SupportsRichComparisonT: ... +@overload +def min(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsRichComparison]) -> _T: ... +@overload +def min(__iterable: Iterable[SupportsRichComparisonT], *, key: None = None, default: _T) -> SupportsRichComparisonT | _T: ... +@overload +def min(__iterable: Iterable[_T1], *, key: Callable[[_T1], SupportsRichComparison], default: _T2) -> _T1 | _T2: ... +@overload +def next(__i: SupportsNext[_T]) -> _T: ... +@overload +def next(__i: SupportsNext[_T], __default: _VT) -> _T | _VT: ... +def oct(__number: int | SupportsIndex) -> str: ... + +_Opener: TypeAlias = Callable[[str, int], int] + +# Text mode: always returns a TextIOWrapper +@overload +def open( + file: FileDescriptorOrPath, + mode: OpenTextMode = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + closefd: bool = True, + opener: _Opener | None = None, +) -> TextIOWrapper: ... + +# Unbuffered binary mode: returns a FileIO +@overload +def open( + file: FileDescriptorOrPath, + mode: OpenBinaryMode, + buffering: Literal[0], + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, +) -> FileIO: ... + +# Buffering is on: return BufferedRandom, BufferedReader, or BufferedWriter +@overload +def open( + file: FileDescriptorOrPath, + mode: OpenBinaryModeUpdating, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, +) -> BufferedRandom: ... +@overload +def open( + file: FileDescriptorOrPath, + mode: OpenBinaryModeWriting, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, +) -> BufferedWriter: ... +@overload +def open( + file: FileDescriptorOrPath, + mode: OpenBinaryModeReading, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, +) -> BufferedReader: ... + +# Buffering cannot be determined: fall back to BinaryIO +@overload +def open( + file: FileDescriptorOrPath, + mode: OpenBinaryMode, + buffering: int = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, +) -> BinaryIO: ... + +# Fallback if mode is not specified +@overload +def open( + file: FileDescriptorOrPath, + mode: str, + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + closefd: bool = True, + opener: _Opener | None = None, +) -> IO[Any]: ... +def ord(__c: str | bytes | bytearray) -> int: ... + +class _SupportsWriteAndFlush(SupportsWrite[_T_contra], Protocol[_T_contra]): + def flush(self) -> None: ... + +@overload +def print( + *values: object, + sep: str | None = " ", + end: str | None = "\n", + file: SupportsWrite[str] | None = None, + flush: Literal[False] = False, +) -> None: ... +@overload +def print( + *values: object, sep: str | None = " ", end: str | None = "\n", file: _SupportsWriteAndFlush[str] | None = None, flush: bool +) -> None: ... + +_E = TypeVar("_E", contravariant=True) +_M = TypeVar("_M", contravariant=True) + +class _SupportsPow2(Protocol[_E, _T_co]): + def __pow__(self, __other: _E) -> _T_co: ... + +class _SupportsPow3NoneOnly(Protocol[_E, _T_co]): + def __pow__(self, __other: _E, __modulo: None = None) -> _T_co: ... + +class _SupportsPow3(Protocol[_E, _M, _T_co]): + def __pow__(self, __other: _E, __modulo: _M) -> _T_co: ... + +_SupportsSomeKindOfPow = ( # noqa: Y026 # TODO: Use TypeAlias once mypy bugs are fixed + _SupportsPow2[Any, Any] | _SupportsPow3NoneOnly[Any, Any] | _SupportsPow3[Any, Any, Any] +) + +if sys.version_info >= (3, 8): + # TODO: `pow(int, int, Literal[0])` fails at runtime, + # but adding a `NoReturn` overload isn't a good solution for expressing that (see #8566). + @overload + def pow(base: int, exp: int, mod: int) -> int: ... + @overload + def pow(base: int, exp: Literal[0], mod: None = None) -> Literal[1]: ... # type: ignore[misc] + @overload + def pow(base: int, exp: _PositiveInteger, mod: None = None) -> int: ... # type: ignore[misc] + @overload + def pow(base: int, exp: _NegativeInteger, mod: None = None) -> float: ... # type: ignore[misc] + # int base & positive-int exp -> int; int base & negative-int exp -> float + # return type must be Any as `int | float` causes too many false-positive errors + @overload + def pow(base: int, exp: int, mod: None = None) -> Any: ... + @overload + def pow(base: _PositiveInteger, exp: float, mod: None = None) -> float: ... + @overload + def pow(base: _NegativeInteger, exp: float, mod: None = None) -> complex: ... + @overload + def pow(base: float, exp: int, mod: None = None) -> float: ... + # float base & float exp could return float or complex + # return type must be Any (same as complex base, complex exp), + # as `float | complex` causes too many false-positive errors + @overload + def pow(base: float, exp: complex | _SupportsSomeKindOfPow, mod: None = None) -> Any: ... + @overload + def pow(base: complex, exp: complex | _SupportsSomeKindOfPow, mod: None = None) -> complex: ... + @overload + def pow(base: _SupportsPow2[_E, _T_co], exp: _E, mod: None = None) -> _T_co: ... + @overload + def pow(base: _SupportsPow3NoneOnly[_E, _T_co], exp: _E, mod: None = None) -> _T_co: ... + @overload + def pow(base: _SupportsPow3[_E, _M, _T_co], exp: _E, mod: _M) -> _T_co: ... + @overload + def pow(base: _SupportsSomeKindOfPow, exp: float, mod: None = None) -> Any: ... + @overload + def pow(base: _SupportsSomeKindOfPow, exp: complex, mod: None = None) -> complex: ... + +else: + @overload + def pow(__base: int, __exp: int, __mod: int) -> int: ... + @overload + def pow(__base: int, __exp: Literal[0], __mod: None = None) -> Literal[1]: ... # type: ignore[misc] + @overload + def pow(__base: int, __exp: _PositiveInteger, __mod: None = None) -> int: ... # type: ignore[misc] + @overload + def pow(__base: int, __exp: _NegativeInteger, __mod: None = None) -> float: ... # type: ignore[misc] + @overload + def pow(__base: int, __exp: int, __mod: None = None) -> Any: ... + @overload + def pow(__base: _PositiveInteger, __exp: float, __mod: None = None) -> float: ... + @overload + def pow(__base: _NegativeInteger, __exp: float, __mod: None = None) -> complex: ... + @overload + def pow(__base: float, __exp: int, __mod: None = None) -> float: ... + @overload + def pow(__base: float, __exp: complex | _SupportsSomeKindOfPow, __mod: None = None) -> Any: ... + @overload + def pow(__base: complex, __exp: complex | _SupportsSomeKindOfPow, __mod: None = None) -> complex: ... + @overload + def pow(__base: _SupportsPow2[_E, _T_co], __exp: _E, __mod: None = None) -> _T_co: ... + @overload + def pow(__base: _SupportsPow3NoneOnly[_E, _T_co], __exp: _E, __mod: None = None) -> _T_co: ... + @overload + def pow(__base: _SupportsPow3[_E, _M, _T_co], __exp: _E, __mod: _M) -> _T_co: ... + @overload + def pow(__base: _SupportsSomeKindOfPow, __exp: float, __mod: None = None) -> Any: ... + @overload + def pow(__base: _SupportsSomeKindOfPow, __exp: complex, __mod: None = None) -> complex: ... + +def quit(code: sys._ExitCode = None) -> NoReturn: ... + +class reversed(Iterator[_T], Generic[_T]): + @overload + def __init__(self, __sequence: Reversible[_T]) -> None: ... + @overload + def __init__(self, __sequence: SupportsLenAndGetItem[_T]) -> None: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... + def __length_hint__(self) -> int: ... + +def repr(__obj: object) -> str: ... + +# See https://github.com/python/typeshed/pull/9141 +# and https://github.com/python/typeshed/pull/9151 +# on why we don't use `SupportsRound` from `typing.pyi` + +class _SupportsRound1(Protocol[_T_co]): + def __round__(self) -> _T_co: ... + +class _SupportsRound2(Protocol[_T_co]): + def __round__(self, __ndigits: int) -> _T_co: ... + +@overload +def round(number: _SupportsRound1[_T], ndigits: None = None) -> _T: ... +@overload +def round(number: _SupportsRound2[_T], ndigits: SupportsIndex) -> _T: ... + +# See https://github.com/python/typeshed/pull/6292#discussion_r748875189 +# for why arg 3 of `setattr` should be annotated with `Any` and not `object` +def setattr(__obj: object, __name: str, __value: Any) -> None: ... +@overload +def sorted( + __iterable: Iterable[SupportsRichComparisonT], *, key: None = None, reverse: bool = False +) -> list[SupportsRichComparisonT]: ... +@overload +def sorted(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsRichComparison], reverse: bool = False) -> list[_T]: ... + +_AddableT1 = TypeVar("_AddableT1", bound=SupportsAdd[Any, Any]) +_AddableT2 = TypeVar("_AddableT2", bound=SupportsAdd[Any, Any]) + +class _SupportsSumWithNoDefaultGiven(SupportsAdd[Any, Any], SupportsRAdd[int, Any], Protocol): ... + +_SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWithNoDefaultGiven) + +# In general, the return type of `x + x` is *not* guaranteed to be the same type as x. +# However, we can't express that in the stub for `sum()` +# without creating many false-positive errors (see #7578). +# Instead, we special-case the most common examples of this: bool and literal integers. +if sys.version_info >= (3, 8): + @overload + def sum(__iterable: Iterable[bool | _LiteralInteger], start: int = 0) -> int: ... # type: ignore[misc] + +else: + @overload + def sum(__iterable: Iterable[bool | _LiteralInteger], __start: int = 0) -> int: ... # type: ignore[misc] + +@overload +def sum(__iterable: Iterable[_SupportsSumNoDefaultT]) -> _SupportsSumNoDefaultT | Literal[0]: ... + +if sys.version_info >= (3, 8): + @overload + def sum(__iterable: Iterable[_AddableT1], start: _AddableT2) -> _AddableT1 | _AddableT2: ... + +else: + @overload + def sum(__iterable: Iterable[_AddableT1], __start: _AddableT2) -> _AddableT1 | _AddableT2: ... + +# The argument to `vars()` has to have a `__dict__` attribute, so the second overload can't be annotated with `object` +# (A "SupportsDunderDict" protocol doesn't work) +# Use a type: ignore to make complaints about overlapping overloads go away +@overload +def vars(__object: type) -> types.MappingProxyType[str, Any]: ... # type: ignore[misc] +@overload +def vars(__object: Any = ...) -> dict[str, Any]: ... + +class zip(Iterator[_T_co], Generic[_T_co]): + if sys.version_info >= (3, 10): + @overload + def __new__(cls, __iter1: Iterable[_T1], *, strict: bool = ...) -> zip[tuple[_T1]]: ... + @overload + def __new__(cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], *, strict: bool = ...) -> zip[tuple[_T1, _T2]]: ... + @overload + def __new__( + cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], *, strict: bool = ... + ) -> zip[tuple[_T1, _T2, _T3]]: ... + @overload + def __new__( + cls, + __iter1: Iterable[_T1], + __iter2: Iterable[_T2], + __iter3: Iterable[_T3], + __iter4: Iterable[_T4], + *, + strict: bool = ..., + ) -> zip[tuple[_T1, _T2, _T3, _T4]]: ... + @overload + def __new__( + cls, + __iter1: Iterable[_T1], + __iter2: Iterable[_T2], + __iter3: Iterable[_T3], + __iter4: Iterable[_T4], + __iter5: Iterable[_T5], + *, + strict: bool = ..., + ) -> zip[tuple[_T1, _T2, _T3, _T4, _T5]]: ... + @overload + def __new__( + cls, + __iter1: Iterable[Any], + __iter2: Iterable[Any], + __iter3: Iterable[Any], + __iter4: Iterable[Any], + __iter5: Iterable[Any], + __iter6: Iterable[Any], + *iterables: Iterable[Any], + strict: bool = ..., + ) -> zip[tuple[Any, ...]]: ... + else: + @overload + def __new__(cls, __iter1: Iterable[_T1]) -> zip[tuple[_T1]]: ... + @overload + def __new__(cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> zip[tuple[_T1, _T2]]: ... + @overload + def __new__(cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]) -> zip[tuple[_T1, _T2, _T3]]: ... + @overload + def __new__( + cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4] + ) -> zip[tuple[_T1, _T2, _T3, _T4]]: ... + @overload + def __new__( + cls, + __iter1: Iterable[_T1], + __iter2: Iterable[_T2], + __iter3: Iterable[_T3], + __iter4: Iterable[_T4], + __iter5: Iterable[_T5], + ) -> zip[tuple[_T1, _T2, _T3, _T4, _T5]]: ... + @overload + def __new__( + cls, + __iter1: Iterable[Any], + __iter2: Iterable[Any], + __iter3: Iterable[Any], + __iter4: Iterable[Any], + __iter5: Iterable[Any], + __iter6: Iterable[Any], + *iterables: Iterable[Any], + ) -> zip[tuple[Any, ...]]: ... + + def __iter__(self) -> Self: ... + def __next__(self) -> _T_co: ... + +# Signature of `builtins.__import__` should be kept identical to `importlib.__import__` +# Return type of `__import__` should be kept the same as return type of `importlib.import_module` +def __import__( + name: str, + globals: Mapping[str, object] | None = None, + locals: Mapping[str, object] | None = None, + fromlist: Sequence[str] = ..., + level: int = 0, +) -> types.ModuleType: ... +def __build_class__(__func: Callable[[], _Cell | Any], __name: str, *bases: Any, metaclass: Any = ..., **kwds: Any) -> Any: ... + +# Actually the type of Ellipsis is , but since it's +# not exposed anywhere under that name, we make it private here. +@final +@type_check_only +class ellipsis: ... + +Ellipsis: ellipsis + +class BaseException: + args: tuple[Any, ...] + __cause__: BaseException | None + __context__: BaseException | None + __suppress_context__: bool + __traceback__: TracebackType | None + def __init__(self, *args: object) -> None: ... + def __setstate__(self, __state: dict[str, Any] | None) -> None: ... + def with_traceback(self, __tb: TracebackType | None) -> Self: ... + if sys.version_info >= (3, 11): + # only present after add_note() is called + __notes__: list[str] + def add_note(self, __note: str) -> None: ... + +class GeneratorExit(BaseException): ... +class KeyboardInterrupt(BaseException): ... + +class SystemExit(BaseException): + code: sys._ExitCode + +class Exception(BaseException): ... + +class StopIteration(Exception): + value: Any + +class OSError(Exception): + errno: int + strerror: str + # filename, filename2 are actually str | bytes | None + filename: Any + filename2: Any + if sys.platform == "win32": + winerror: int + +EnvironmentError = OSError +IOError = OSError +if sys.platform == "win32": + WindowsError = OSError + +class ArithmeticError(Exception): ... +class AssertionError(Exception): ... + +class AttributeError(Exception): + if sys.version_info >= (3, 10): + def __init__(self, *args: object, name: str | None = ..., obj: object = ...) -> None: ... + name: str + obj: object + +class BufferError(Exception): ... +class EOFError(Exception): ... + +class ImportError(Exception): + def __init__(self, *args: object, name: str | None = ..., path: str | None = ...) -> None: ... + name: str | None + path: str | None + msg: str # undocumented + +class LookupError(Exception): ... +class MemoryError(Exception): ... + +class NameError(Exception): + if sys.version_info >= (3, 10): + name: str + +class ReferenceError(Exception): ... +class RuntimeError(Exception): ... + +class StopAsyncIteration(Exception): + value: Any + +class SyntaxError(Exception): + msg: str + lineno: int | None + offset: int | None + text: str | None + filename: str | None + if sys.version_info >= (3, 10): + end_lineno: int | None + end_offset: int | None + +class SystemError(Exception): ... +class TypeError(Exception): ... +class ValueError(Exception): ... +class FloatingPointError(ArithmeticError): ... +class OverflowError(ArithmeticError): ... +class ZeroDivisionError(ArithmeticError): ... +class ModuleNotFoundError(ImportError): ... +class IndexError(LookupError): ... +class KeyError(LookupError): ... +class UnboundLocalError(NameError): ... + +class BlockingIOError(OSError): + characters_written: int + +class ChildProcessError(OSError): ... +class ConnectionError(OSError): ... +class BrokenPipeError(ConnectionError): ... +class ConnectionAbortedError(ConnectionError): ... +class ConnectionRefusedError(ConnectionError): ... +class ConnectionResetError(ConnectionError): ... +class FileExistsError(OSError): ... +class FileNotFoundError(OSError): ... +class InterruptedError(OSError): ... +class IsADirectoryError(OSError): ... +class NotADirectoryError(OSError): ... +class PermissionError(OSError): ... +class ProcessLookupError(OSError): ... +class TimeoutError(OSError): ... +class NotImplementedError(RuntimeError): ... +class RecursionError(RuntimeError): ... +class IndentationError(SyntaxError): ... +class TabError(IndentationError): ... +class UnicodeError(ValueError): ... + +class UnicodeDecodeError(UnicodeError): + encoding: str + object: bytes + start: int + end: int + reason: str + def __init__(self, __encoding: str, __object: ReadableBuffer, __start: int, __end: int, __reason: str) -> None: ... + +class UnicodeEncodeError(UnicodeError): + encoding: str + object: str + start: int + end: int + reason: str + def __init__(self, __encoding: str, __object: str, __start: int, __end: int, __reason: str) -> None: ... + +class UnicodeTranslateError(UnicodeError): + encoding: None + object: str + start: int + end: int + reason: str + def __init__(self, __object: str, __start: int, __end: int, __reason: str) -> None: ... + +class Warning(Exception): ... +class UserWarning(Warning): ... +class DeprecationWarning(Warning): ... +class SyntaxWarning(Warning): ... +class RuntimeWarning(Warning): ... +class FutureWarning(Warning): ... +class PendingDeprecationWarning(Warning): ... +class ImportWarning(Warning): ... +class UnicodeWarning(Warning): ... +class BytesWarning(Warning): ... +class ResourceWarning(Warning): ... + +if sys.version_info >= (3, 10): + class EncodingWarning(Warning): ... + +if sys.version_info >= (3, 11): + _BaseExceptionT_co = TypeVar("_BaseExceptionT_co", bound=BaseException, covariant=True) + _BaseExceptionT = TypeVar("_BaseExceptionT", bound=BaseException) + _ExceptionT_co = TypeVar("_ExceptionT_co", bound=Exception, covariant=True) + _ExceptionT = TypeVar("_ExceptionT", bound=Exception) + + # See `check_exception_group.py` for use-cases and comments. + class BaseExceptionGroup(BaseException, Generic[_BaseExceptionT_co]): + def __new__(cls, __message: str, __exceptions: Sequence[_BaseExceptionT_co]) -> Self: ... + def __init__(self, __message: str, __exceptions: Sequence[_BaseExceptionT_co]) -> None: ... + @property + def message(self) -> str: ... + @property + def exceptions(self) -> tuple[_BaseExceptionT_co | BaseExceptionGroup[_BaseExceptionT_co], ...]: ... + @overload + def subgroup( + self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] + ) -> ExceptionGroup[_ExceptionT] | None: ... + @overload + def subgroup( + self, __condition: type[_BaseExceptionT] | tuple[type[_BaseExceptionT], ...] + ) -> BaseExceptionGroup[_BaseExceptionT] | None: ... + @overload + def subgroup( + self, __condition: Callable[[_BaseExceptionT_co | Self], bool] + ) -> BaseExceptionGroup[_BaseExceptionT_co] | None: ... + @overload + def split( + self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] + ) -> tuple[ExceptionGroup[_ExceptionT] | None, BaseExceptionGroup[_BaseExceptionT_co] | None]: ... + @overload + def split( + self, __condition: type[_BaseExceptionT] | tuple[type[_BaseExceptionT], ...] + ) -> tuple[BaseExceptionGroup[_BaseExceptionT] | None, BaseExceptionGroup[_BaseExceptionT_co] | None]: ... + @overload + def split( + self, __condition: Callable[[_BaseExceptionT_co | Self], bool] + ) -> tuple[BaseExceptionGroup[_BaseExceptionT_co] | None, BaseExceptionGroup[_BaseExceptionT_co] | None]: ... + # In reality it is `NonEmptySequence`: + @overload + def derive(self, __excs: Sequence[_ExceptionT]) -> ExceptionGroup[_ExceptionT]: ... + @overload + def derive(self, __excs: Sequence[_BaseExceptionT]) -> BaseExceptionGroup[_BaseExceptionT]: ... + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... + + class ExceptionGroup(BaseExceptionGroup[_ExceptionT_co], Exception): + def __new__(cls, __message: str, __exceptions: Sequence[_ExceptionT_co]) -> Self: ... + def __init__(self, __message: str, __exceptions: Sequence[_ExceptionT_co]) -> None: ... + @property + def exceptions(self) -> tuple[_ExceptionT_co | ExceptionGroup[_ExceptionT_co], ...]: ... + # We accept a narrower type, but that's OK. + @overload # type: ignore[override] + def subgroup( + self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] + ) -> ExceptionGroup[_ExceptionT] | None: ... + @overload + def subgroup(self, __condition: Callable[[_ExceptionT_co | Self], bool]) -> ExceptionGroup[_ExceptionT_co] | None: ... + @overload # type: ignore[override] + def split( + self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] + ) -> tuple[ExceptionGroup[_ExceptionT] | None, ExceptionGroup[_ExceptionT_co] | None]: ... + @overload + def split( + self, __condition: Callable[[_ExceptionT_co | Self], bool] + ) -> tuple[ExceptionGroup[_ExceptionT_co] | None, ExceptionGroup[_ExceptionT_co] | None]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/bz2.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/bz2.pyi new file mode 100644 index 00000000..9ad80ee6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/bz2.pyi @@ -0,0 +1,146 @@ +import _compression +import sys +from _compression import BaseStream +from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer +from collections.abc import Iterable +from typing import IO, Any, Protocol, TextIO, overload +from typing_extensions import Literal, Self, SupportsIndex, TypeAlias, final + +__all__ = ["BZ2File", "BZ2Compressor", "BZ2Decompressor", "open", "compress", "decompress"] + +# The following attributes and methods are optional: +# def fileno(self) -> int: ... +# def close(self) -> object: ... +class _ReadableFileobj(_compression._Reader, Protocol): ... + +class _WritableFileobj(Protocol): + def write(self, __b: bytes) -> object: ... + # The following attributes and methods are optional: + # def fileno(self) -> int: ... + # def close(self) -> object: ... + +def compress(data: ReadableBuffer, compresslevel: int = 9) -> bytes: ... +def decompress(data: ReadableBuffer) -> bytes: ... + +_ReadBinaryMode: TypeAlias = Literal["", "r", "rb"] +_WriteBinaryMode: TypeAlias = Literal["w", "wb", "x", "xb", "a", "ab"] +_ReadTextMode: TypeAlias = Literal["rt"] +_WriteTextMode: TypeAlias = Literal["wt", "xt", "at"] + +@overload +def open( + filename: _ReadableFileobj, + mode: _ReadBinaryMode = "rb", + compresslevel: int = 9, + encoding: None = None, + errors: None = None, + newline: None = None, +) -> BZ2File: ... +@overload +def open( + filename: _ReadableFileobj, + mode: _ReadTextMode, + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, +) -> TextIO: ... +@overload +def open( + filename: _WritableFileobj, + mode: _WriteBinaryMode, + compresslevel: int = 9, + encoding: None = None, + errors: None = None, + newline: None = None, +) -> BZ2File: ... +@overload +def open( + filename: _WritableFileobj, + mode: _WriteTextMode, + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, +) -> TextIO: ... +@overload +def open( + filename: StrOrBytesPath, + mode: _ReadBinaryMode | _WriteBinaryMode = "rb", + compresslevel: int = 9, + encoding: None = None, + errors: None = None, + newline: None = None, +) -> BZ2File: ... +@overload +def open( + filename: StrOrBytesPath, + mode: _ReadTextMode | _WriteTextMode, + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, +) -> TextIO: ... +@overload +def open( + filename: StrOrBytesPath | _ReadableFileobj | _WritableFileobj, + mode: str, + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, +) -> BZ2File | TextIO: ... + +class BZ2File(BaseStream, IO[bytes]): + def __enter__(self) -> Self: ... + if sys.version_info >= (3, 9): + @overload + def __init__(self, filename: _WritableFileobj, mode: _WriteBinaryMode, *, compresslevel: int = 9) -> None: ... + @overload + def __init__(self, filename: _ReadableFileobj, mode: _ReadBinaryMode = "r", *, compresslevel: int = 9) -> None: ... + @overload + def __init__( + self, filename: StrOrBytesPath, mode: _ReadBinaryMode | _WriteBinaryMode = "r", *, compresslevel: int = 9 + ) -> None: ... + else: + @overload + def __init__( + self, filename: _WritableFileobj, mode: _WriteBinaryMode, buffering: Any | None = None, compresslevel: int = 9 + ) -> None: ... + @overload + def __init__( + self, filename: _ReadableFileobj, mode: _ReadBinaryMode = "r", buffering: Any | None = None, compresslevel: int = 9 + ) -> None: ... + @overload + def __init__( + self, + filename: StrOrBytesPath, + mode: _ReadBinaryMode | _WriteBinaryMode = "r", + buffering: Any | None = None, + compresslevel: int = 9, + ) -> None: ... + + def read(self, size: int | None = -1) -> bytes: ... + def read1(self, size: int = -1) -> bytes: ... + def readline(self, size: SupportsIndex = -1) -> bytes: ... # type: ignore[override] + def readinto(self, b: WriteableBuffer) -> int: ... + def readlines(self, size: SupportsIndex = -1) -> list[bytes]: ... + def seek(self, offset: int, whence: int = 0) -> int: ... + def write(self, data: ReadableBuffer) -> int: ... + def writelines(self, seq: Iterable[ReadableBuffer]) -> None: ... + +@final +class BZ2Compressor: + def __init__(self, compresslevel: int = ...) -> None: ... + def compress(self, __data: ReadableBuffer) -> bytes: ... + def flush(self) -> bytes: ... + +@final +class BZ2Decompressor: + def decompress(self, data: ReadableBuffer, max_length: int = -1) -> bytes: ... + @property + def eof(self) -> bool: ... + @property + def needs_input(self) -> bool: ... + @property + def unused_data(self) -> bytes: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/cProfile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/cProfile.pyi new file mode 100644 index 00000000..8945b214 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/cProfile.pyi @@ -0,0 +1,37 @@ +import sys +from _typeshed import StrOrBytesPath, Unused +from collections.abc import Callable +from types import CodeType +from typing import Any, TypeVar +from typing_extensions import ParamSpec, Self, TypeAlias + +__all__ = ["run", "runctx", "Profile"] + +def run(statement: str, filename: str | None = None, sort: str | int = -1) -> None: ... +def runctx( + statement: str, globals: dict[str, Any], locals: dict[str, Any], filename: str | None = None, sort: str | int = -1 +) -> None: ... + +_T = TypeVar("_T") +_P = ParamSpec("_P") +_Label: TypeAlias = tuple[str, int, str] + +class Profile: + stats: dict[_Label, tuple[int, int, int, int, dict[_Label, tuple[int, int, int, int]]]] # undocumented + def __init__( + self, timer: Callable[[], float] = ..., timeunit: float = ..., subcalls: bool = ..., builtins: bool = ... + ) -> None: ... + def enable(self) -> None: ... + def disable(self) -> None: ... + def print_stats(self, sort: str | int = -1) -> None: ... + def dump_stats(self, file: StrOrBytesPath) -> None: ... + def create_stats(self) -> None: ... + def snapshot_stats(self) -> None: ... + def run(self, cmd: str) -> Self: ... + def runctx(self, cmd: str, globals: dict[str, Any], locals: dict[str, Any]) -> Self: ... + def runcall(self, __func: Callable[_P, _T], *args: _P.args, **kw: _P.kwargs) -> _T: ... + if sys.version_info >= (3, 8): + def __enter__(self) -> Self: ... + def __exit__(self, *exc_info: Unused) -> None: ... + +def label(code: str | CodeType) -> _Label: ... # undocumented diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/calendar.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/calendar.pyi new file mode 100644 index 00000000..255a12d3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/calendar.pyi @@ -0,0 +1,145 @@ +import datetime +import sys +from _typeshed import Unused +from collections.abc import Iterable, Sequence +from time import struct_time +from typing import ClassVar +from typing_extensions import Literal, TypeAlias + +__all__ = [ + "IllegalMonthError", + "IllegalWeekdayError", + "setfirstweekday", + "firstweekday", + "isleap", + "leapdays", + "weekday", + "monthrange", + "monthcalendar", + "prmonth", + "month", + "prcal", + "calendar", + "timegm", + "month_name", + "month_abbr", + "day_name", + "day_abbr", + "Calendar", + "TextCalendar", + "HTMLCalendar", + "LocaleTextCalendar", + "LocaleHTMLCalendar", + "weekheader", +] + +if sys.version_info >= (3, 10): + __all__ += ["FRIDAY", "MONDAY", "SATURDAY", "SUNDAY", "THURSDAY", "TUESDAY", "WEDNESDAY"] + +_LocaleType: TypeAlias = tuple[str | None, str | None] + +class IllegalMonthError(ValueError): + def __init__(self, month: int) -> None: ... + +class IllegalWeekdayError(ValueError): + def __init__(self, weekday: int) -> None: ... + +def isleap(year: int) -> bool: ... +def leapdays(y1: int, y2: int) -> int: ... +def weekday(year: int, month: int, day: int) -> int: ... +def monthrange(year: int, month: int) -> tuple[int, int]: ... + +class Calendar: + firstweekday: int + def __init__(self, firstweekday: int = 0) -> None: ... + def getfirstweekday(self) -> int: ... + def setfirstweekday(self, firstweekday: int) -> None: ... + def iterweekdays(self) -> Iterable[int]: ... + def itermonthdates(self, year: int, month: int) -> Iterable[datetime.date]: ... + def itermonthdays2(self, year: int, month: int) -> Iterable[tuple[int, int]]: ... + def itermonthdays(self, year: int, month: int) -> Iterable[int]: ... + def monthdatescalendar(self, year: int, month: int) -> list[list[datetime.date]]: ... + def monthdays2calendar(self, year: int, month: int) -> list[list[tuple[int, int]]]: ... + def monthdayscalendar(self, year: int, month: int) -> list[list[int]]: ... + def yeardatescalendar(self, year: int, width: int = 3) -> list[list[int]]: ... + def yeardays2calendar(self, year: int, width: int = 3) -> list[list[tuple[int, int]]]: ... + def yeardayscalendar(self, year: int, width: int = 3) -> list[list[int]]: ... + def itermonthdays3(self, year: int, month: int) -> Iterable[tuple[int, int, int]]: ... + def itermonthdays4(self, year: int, month: int) -> Iterable[tuple[int, int, int, int]]: ... + +class TextCalendar(Calendar): + def prweek(self, theweek: int, width: int) -> None: ... + def formatday(self, day: int, weekday: int, width: int) -> str: ... + def formatweek(self, theweek: int, width: int) -> str: ... + def formatweekday(self, day: int, width: int) -> str: ... + def formatweekheader(self, width: int) -> str: ... + def formatmonthname(self, theyear: int, themonth: int, width: int, withyear: bool = True) -> str: ... + def prmonth(self, theyear: int, themonth: int, w: int = 0, l: int = 0) -> None: ... + def formatmonth(self, theyear: int, themonth: int, w: int = 0, l: int = 0) -> str: ... + def formatyear(self, theyear: int, w: int = 2, l: int = 1, c: int = 6, m: int = 3) -> str: ... + def pryear(self, theyear: int, w: int = 0, l: int = 0, c: int = 6, m: int = 3) -> None: ... + +def firstweekday() -> int: ... +def monthcalendar(year: int, month: int) -> list[list[int]]: ... +def prweek(theweek: int, width: int) -> None: ... +def week(theweek: int, width: int) -> str: ... +def weekheader(width: int) -> str: ... +def prmonth(theyear: int, themonth: int, w: int = 0, l: int = 0) -> None: ... +def month(theyear: int, themonth: int, w: int = 0, l: int = 0) -> str: ... +def calendar(theyear: int, w: int = 2, l: int = 1, c: int = 6, m: int = 3) -> str: ... +def prcal(theyear: int, w: int = 0, l: int = 0, c: int = 6, m: int = 3) -> None: ... + +class HTMLCalendar(Calendar): + cssclasses: ClassVar[list[str]] + cssclass_noday: ClassVar[str] + cssclasses_weekday_head: ClassVar[list[str]] + cssclass_month_head: ClassVar[str] + cssclass_month: ClassVar[str] + cssclass_year: ClassVar[str] + cssclass_year_head: ClassVar[str] + def formatday(self, day: int, weekday: int) -> str: ... + def formatweek(self, theweek: int) -> str: ... + def formatweekday(self, day: int) -> str: ... + def formatweekheader(self) -> str: ... + def formatmonthname(self, theyear: int, themonth: int, withyear: bool = True) -> str: ... + def formatmonth(self, theyear: int, themonth: int, withyear: bool = True) -> str: ... + def formatyear(self, theyear: int, width: int = 3) -> str: ... + def formatyearpage( + self, theyear: int, width: int = 3, css: str | None = "calendar.css", encoding: str | None = None + ) -> str: ... + +class different_locale: + def __init__(self, locale: _LocaleType) -> None: ... + def __enter__(self) -> None: ... + def __exit__(self, *args: Unused) -> None: ... + +class LocaleTextCalendar(TextCalendar): + def __init__(self, firstweekday: int = 0, locale: _LocaleType | None = None) -> None: ... + +class LocaleHTMLCalendar(HTMLCalendar): + def __init__(self, firstweekday: int = 0, locale: _LocaleType | None = None) -> None: ... + def formatweekday(self, day: int) -> str: ... + def formatmonthname(self, theyear: int, themonth: int, withyear: bool = True) -> str: ... + +c: TextCalendar + +def setfirstweekday(firstweekday: int) -> None: ... +def format(cols: int, colwidth: int = 20, spacing: int = 6) -> str: ... +def formatstring(cols: int, colwidth: int = 20, spacing: int = 6) -> str: ... +def timegm(tuple: tuple[int, ...] | struct_time) -> int: ... + +# Data attributes +day_name: Sequence[str] +day_abbr: Sequence[str] +month_name: Sequence[str] +month_abbr: Sequence[str] + +MONDAY: Literal[0] +TUESDAY: Literal[1] +WEDNESDAY: Literal[2] +THURSDAY: Literal[3] +FRIDAY: Literal[4] +SATURDAY: Literal[5] +SUNDAY: Literal[6] + +EPOCH: Literal[1970] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/cgi.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/cgi.pyi new file mode 100644 index 00000000..a2acfa92 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/cgi.pyi @@ -0,0 +1,129 @@ +import sys +from _typeshed import SupportsGetItem, SupportsItemAccess, Unused +from builtins import list as _list, type as _type +from collections.abc import Iterable, Iterator, Mapping +from email.message import Message +from types import TracebackType +from typing import IO, Any, Protocol +from typing_extensions import Self + +__all__ = [ + "MiniFieldStorage", + "FieldStorage", + "parse", + "parse_multipart", + "parse_header", + "test", + "print_exception", + "print_environ", + "print_form", + "print_directory", + "print_arguments", + "print_environ_usage", +] + +if sys.version_info < (3, 8): + __all__ += ["parse_qs", "parse_qsl", "escape"] + +def parse( + fp: IO[Any] | None = None, + environ: SupportsItemAccess[str, str] = ..., + keep_blank_values: bool = ..., + strict_parsing: bool = ..., + separator: str = "&", +) -> dict[str, list[str]]: ... + +if sys.version_info < (3, 8): + def parse_qs(qs: str, keep_blank_values: bool = ..., strict_parsing: bool = ...) -> dict[str, list[str]]: ... + def parse_qsl(qs: str, keep_blank_values: bool = ..., strict_parsing: bool = ...) -> list[tuple[str, str]]: ... + +def parse_multipart( + fp: IO[Any], pdict: SupportsGetItem[str, bytes], encoding: str = "utf-8", errors: str = "replace", separator: str = "&" +) -> dict[str, list[Any]]: ... + +class _Environ(Protocol): + def __getitem__(self, __k: str) -> str: ... + def keys(self) -> Iterable[str]: ... + +def parse_header(line: str) -> tuple[str, dict[str, str]]: ... +def test(environ: _Environ = ...) -> None: ... +def print_environ(environ: _Environ = ...) -> None: ... +def print_form(form: dict[str, Any]) -> None: ... +def print_directory() -> None: ... +def print_environ_usage() -> None: ... + +if sys.version_info < (3, 8): + def escape(s: str, quote: bool | None = None) -> str: ... + +class MiniFieldStorage: + # The first five "Any" attributes here are always None, but mypy doesn't support that + filename: Any + list: Any + type: Any + file: IO[bytes] | None + type_options: dict[Any, Any] + disposition: Any + disposition_options: dict[Any, Any] + headers: dict[Any, Any] + name: Any + value: Any + def __init__(self, name: Any, value: Any) -> None: ... + +class FieldStorage: + FieldStorageClass: _type | None + keep_blank_values: int + strict_parsing: int + qs_on_post: str | None + headers: Mapping[str, str] | Message + fp: IO[bytes] + encoding: str + errors: str + outerboundary: bytes + bytes_read: int + limit: int | None + disposition: str + disposition_options: dict[str, str] + filename: str | None + file: IO[bytes] | None + type: str + type_options: dict[str, str] + innerboundary: bytes + length: int + done: int + list: _list[Any] | None + value: None | bytes | _list[Any] + def __init__( + self, + fp: IO[Any] | None = None, + headers: Mapping[str, str] | Message | None = None, + outerboundary: bytes = b"", + environ: SupportsGetItem[str, str] = ..., + keep_blank_values: int = 0, + strict_parsing: int = 0, + limit: int | None = None, + encoding: str = "utf-8", + errors: str = "replace", + max_num_fields: int | None = None, + separator: str = "&", + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + def __iter__(self) -> Iterator[str]: ... + def __getitem__(self, key: str) -> Any: ... + def getvalue(self, key: str, default: Any = None) -> Any: ... + def getfirst(self, key: str, default: Any = None) -> Any: ... + def getlist(self, key: str) -> _list[Any]: ... + def keys(self) -> _list[str]: ... + def __contains__(self, key: str) -> bool: ... + def __len__(self) -> int: ... + def __bool__(self) -> bool: ... + # In Python 3 it returns bytes or str IO depending on an internal flag + def make_file(self) -> IO[Any]: ... + +def print_exception( + type: type[BaseException] | None = None, + value: BaseException | None = None, + tb: TracebackType | None = None, + limit: int | None = None, +) -> None: ... +def print_arguments() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/cgitb.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/cgitb.pyi new file mode 100644 index 00000000..4c315bf6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/cgitb.pyi @@ -0,0 +1,33 @@ +from _typeshed import OptExcInfo, StrOrBytesPath +from collections.abc import Callable +from types import FrameType, TracebackType +from typing import IO, Any +from typing_extensions import Final + +__UNDEF__: Final[object] # undocumented sentinel + +def reset() -> str: ... # undocumented +def small(text: str) -> str: ... # undocumented +def strong(text: str) -> str: ... # undocumented +def grey(text: str) -> str: ... # undocumented +def lookup(name: str, frame: FrameType, locals: dict[str, Any]) -> tuple[str | None, Any]: ... # undocumented +def scanvars( + reader: Callable[[], bytes], frame: FrameType, locals: dict[str, Any] +) -> list[tuple[str, str | None, Any]]: ... # undocumented +def html(einfo: OptExcInfo, context: int = 5) -> str: ... +def text(einfo: OptExcInfo, context: int = 5) -> str: ... + +class Hook: # undocumented + def __init__( + self, + display: int = 1, + logdir: StrOrBytesPath | None = None, + context: int = 5, + file: IO[str] | None = None, + format: str = "html", + ) -> None: ... + def __call__(self, etype: type[BaseException] | None, evalue: BaseException | None, etb: TracebackType | None) -> None: ... + def handle(self, info: OptExcInfo | None = None) -> None: ... + +def handler(info: OptExcInfo | None = None) -> None: ... +def enable(display: int = 1, logdir: StrOrBytesPath | None = None, context: int = 5, format: str = "html") -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/chunk.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/chunk.pyi new file mode 100644 index 00000000..9788d35f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/chunk.pyi @@ -0,0 +1,20 @@ +from typing import IO + +class Chunk: + closed: bool + align: bool + file: IO[bytes] + chunkname: bytes + chunksize: int + size_read: int + offset: int + seekable: bool + def __init__(self, file: IO[bytes], align: bool = True, bigendian: bool = True, inclheader: bool = False) -> None: ... + def getname(self) -> bytes: ... + def getsize(self) -> int: ... + def close(self) -> None: ... + def isatty(self) -> bool: ... + def seek(self, pos: int, whence: int = 0) -> None: ... + def tell(self) -> int: ... + def read(self, size: int = -1) -> bytes: ... + def skip(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/cmath.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/cmath.pyi new file mode 100644 index 00000000..0a85600e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/cmath.pyi @@ -0,0 +1,43 @@ +import sys +from typing import SupportsComplex, SupportsFloat +from typing_extensions import TypeAlias + +if sys.version_info >= (3, 8): + from typing import SupportsIndex + +e: float +pi: float +inf: float +infj: complex +nan: float +nanj: complex +tau: float + +if sys.version_info >= (3, 8): + _C: TypeAlias = SupportsFloat | SupportsComplex | SupportsIndex | complex +else: + _C: TypeAlias = SupportsFloat | SupportsComplex | complex + +def acos(__z: _C) -> complex: ... +def acosh(__z: _C) -> complex: ... +def asin(__z: _C) -> complex: ... +def asinh(__z: _C) -> complex: ... +def atan(__z: _C) -> complex: ... +def atanh(__z: _C) -> complex: ... +def cos(__z: _C) -> complex: ... +def cosh(__z: _C) -> complex: ... +def exp(__z: _C) -> complex: ... +def isclose(a: _C, b: _C, *, rel_tol: SupportsFloat = 1e-09, abs_tol: SupportsFloat = 0.0) -> bool: ... +def isinf(__z: _C) -> bool: ... +def isnan(__z: _C) -> bool: ... +def log(__x: _C, __y_obj: _C = ...) -> complex: ... +def log10(__z: _C) -> complex: ... +def phase(__z: _C) -> float: ... +def polar(__z: _C) -> tuple[float, float]: ... +def rect(__r: float, __phi: float) -> complex: ... +def sin(__z: _C) -> complex: ... +def sinh(__z: _C) -> complex: ... +def sqrt(__z: _C) -> complex: ... +def tan(__z: _C) -> complex: ... +def tanh(__z: _C) -> complex: ... +def isfinite(__z: _C) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/cmd.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/cmd.pyi new file mode 100644 index 00000000..b658a873 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/cmd.pyi @@ -0,0 +1,46 @@ +from collections.abc import Callable +from typing import IO, Any +from typing_extensions import Literal + +__all__ = ["Cmd"] + +PROMPT: Literal["(Cmd) "] +IDENTCHARS: str # Too big to be `Literal` + +class Cmd: + prompt: str + identchars: str + ruler: str + lastcmd: str + intro: Any | None + doc_leader: str + doc_header: str + misc_header: str + undoc_header: str + nohelp: str + use_rawinput: bool + stdin: IO[str] + stdout: IO[str] + cmdqueue: list[str] + completekey: str + def __init__(self, completekey: str = "tab", stdin: IO[str] | None = None, stdout: IO[str] | None = None) -> None: ... + old_completer: Callable[[str, int], str | None] | None + def cmdloop(self, intro: Any | None = None) -> None: ... + def precmd(self, line: str) -> str: ... + def postcmd(self, stop: bool, line: str) -> bool: ... + def preloop(self) -> None: ... + def postloop(self) -> None: ... + def parseline(self, line: str) -> tuple[str | None, str | None, str]: ... + def onecmd(self, line: str) -> bool: ... + def emptyline(self) -> bool: ... + def default(self, line: str) -> None: ... + def completedefault(self, *ignored: Any) -> list[str]: ... + def completenames(self, text: str, *ignored: Any) -> list[str]: ... + completion_matches: list[str] | None + def complete(self, text: str, state: int) -> list[str] | None: ... + def get_names(self) -> list[str]: ... + # Only the first element of args matters. + def complete_help(self, *args: Any) -> list[str]: ... + def do_help(self, arg: str) -> bool | None: ... + def print_topics(self, header: str, cmds: list[str] | None, cmdlen: Any, maxcol: int) -> None: ... + def columnize(self, list: list[str] | None, displaywidth: int = 80) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/code.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/code.pyi new file mode 100644 index 00000000..4715bd86 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/code.pyi @@ -0,0 +1,33 @@ +from codeop import CommandCompiler +from collections.abc import Callable, Mapping +from types import CodeType +from typing import Any + +__all__ = ["InteractiveInterpreter", "InteractiveConsole", "interact", "compile_command"] + +class InteractiveInterpreter: + locals: Mapping[str, Any] # undocumented + compile: CommandCompiler # undocumented + def __init__(self, locals: Mapping[str, Any] | None = None) -> None: ... + def runsource(self, source: str, filename: str = "", symbol: str = "single") -> bool: ... + def runcode(self, code: CodeType) -> None: ... + def showsyntaxerror(self, filename: str | None = None) -> None: ... + def showtraceback(self) -> None: ... + def write(self, data: str) -> None: ... + +class InteractiveConsole(InteractiveInterpreter): + buffer: list[str] # undocumented + filename: str # undocumented + def __init__(self, locals: Mapping[str, Any] | None = None, filename: str = "") -> None: ... + def interact(self, banner: str | None = None, exitmsg: str | None = None) -> None: ... + def push(self, line: str) -> bool: ... + def resetbuffer(self) -> None: ... + def raw_input(self, prompt: str = "") -> str: ... + +def interact( + banner: str | None = None, + readfunc: Callable[[str], str] | None = None, + local: Mapping[str, Any] | None = None, + exitmsg: str | None = None, +) -> None: ... +def compile_command(source: str, filename: str = "", symbol: str = "single") -> CodeType | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/codecs.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/codecs.pyi new file mode 100644 index 00000000..5a22853b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/codecs.pyi @@ -0,0 +1,292 @@ +import sys +import types +from _codecs import * +from _typeshed import ReadableBuffer +from abc import abstractmethod +from collections.abc import Callable, Generator, Iterable +from typing import Any, BinaryIO, Protocol, TextIO +from typing_extensions import Literal, Self + +__all__ = [ + "register", + "lookup", + "open", + "EncodedFile", + "BOM", + "BOM_BE", + "BOM_LE", + "BOM32_BE", + "BOM32_LE", + "BOM64_BE", + "BOM64_LE", + "BOM_UTF8", + "BOM_UTF16", + "BOM_UTF16_LE", + "BOM_UTF16_BE", + "BOM_UTF32", + "BOM_UTF32_LE", + "BOM_UTF32_BE", + "CodecInfo", + "Codec", + "IncrementalEncoder", + "IncrementalDecoder", + "StreamReader", + "StreamWriter", + "StreamReaderWriter", + "StreamRecoder", + "getencoder", + "getdecoder", + "getincrementalencoder", + "getincrementaldecoder", + "getreader", + "getwriter", + "encode", + "decode", + "iterencode", + "iterdecode", + "strict_errors", + "ignore_errors", + "replace_errors", + "xmlcharrefreplace_errors", + "backslashreplace_errors", + "namereplace_errors", + "register_error", + "lookup_error", +] + +BOM32_BE: Literal[b"\xfe\xff"] +BOM32_LE: Literal[b"\xff\xfe"] +BOM64_BE: Literal[b"\x00\x00\xfe\xff"] +BOM64_LE: Literal[b"\xff\xfe\x00\x00"] + +class _WritableStream(Protocol): + def write(self, __data: bytes) -> object: ... + def seek(self, __offset: int, __whence: int) -> object: ... + def close(self) -> object: ... + +class _ReadableStream(Protocol): + def read(self, __size: int = ...) -> bytes: ... + def seek(self, __offset: int, __whence: int) -> object: ... + def close(self) -> object: ... + +class _Stream(_WritableStream, _ReadableStream, Protocol): ... + +# TODO: this only satisfies the most common interface, where +# bytes is the raw form and str is the cooked form. +# In the long run, both should become template parameters maybe? +# There *are* bytes->bytes and str->str encodings in the standard library. +# They were much more common in Python 2 than in Python 3. + +class _Encoder(Protocol): + def __call__(self, input: str, errors: str = ...) -> tuple[bytes, int]: ... # signature of Codec().encode + +class _Decoder(Protocol): + def __call__(self, input: bytes, errors: str = ...) -> tuple[str, int]: ... # signature of Codec().decode + +class _StreamReader(Protocol): + def __call__(self, stream: _ReadableStream, errors: str = ...) -> StreamReader: ... + +class _StreamWriter(Protocol): + def __call__(self, stream: _WritableStream, errors: str = ...) -> StreamWriter: ... + +class _IncrementalEncoder(Protocol): + def __call__(self, errors: str = ...) -> IncrementalEncoder: ... + +class _IncrementalDecoder(Protocol): + def __call__(self, errors: str = ...) -> IncrementalDecoder: ... + +class CodecInfo(tuple[_Encoder, _Decoder, _StreamReader, _StreamWriter]): + @property + def encode(self) -> _Encoder: ... + @property + def decode(self) -> _Decoder: ... + @property + def streamreader(self) -> _StreamReader: ... + @property + def streamwriter(self) -> _StreamWriter: ... + @property + def incrementalencoder(self) -> _IncrementalEncoder: ... + @property + def incrementaldecoder(self) -> _IncrementalDecoder: ... + name: str + def __new__( + cls, + encode: _Encoder, + decode: _Decoder, + streamreader: _StreamReader | None = None, + streamwriter: _StreamWriter | None = None, + incrementalencoder: _IncrementalEncoder | None = None, + incrementaldecoder: _IncrementalDecoder | None = None, + name: str | None = None, + *, + _is_text_encoding: bool | None = None, + ) -> Self: ... + +def getencoder(encoding: str) -> _Encoder: ... +def getdecoder(encoding: str) -> _Decoder: ... +def getincrementalencoder(encoding: str) -> _IncrementalEncoder: ... +def getincrementaldecoder(encoding: str) -> _IncrementalDecoder: ... +def getreader(encoding: str) -> _StreamReader: ... +def getwriter(encoding: str) -> _StreamWriter: ... + +if sys.version_info >= (3, 8): + def open( + filename: str, mode: str = "r", encoding: str | None = None, errors: str = "strict", buffering: int = -1 + ) -> StreamReaderWriter: ... + +else: + def open( + filename: str, mode: str = "r", encoding: str | None = None, errors: str = "strict", buffering: int = 1 + ) -> StreamReaderWriter: ... + +def EncodedFile(file: _Stream, data_encoding: str, file_encoding: str | None = None, errors: str = "strict") -> StreamRecoder: ... +def iterencode(iterator: Iterable[str], encoding: str, errors: str = "strict") -> Generator[bytes, None, None]: ... +def iterdecode(iterator: Iterable[bytes], encoding: str, errors: str = "strict") -> Generator[str, None, None]: ... + +BOM: Literal[b"\xff\xfe", b"\xfe\xff"] # depends on `sys.byteorder` +BOM_BE: Literal[b"\xfe\xff"] +BOM_LE: Literal[b"\xff\xfe"] +BOM_UTF8: Literal[b"\xef\xbb\xbf"] +BOM_UTF16: Literal[b"\xff\xfe", b"\xfe\xff"] # depends on `sys.byteorder` +BOM_UTF16_BE: Literal[b"\xfe\xff"] +BOM_UTF16_LE: Literal[b"\xff\xfe"] +BOM_UTF32: Literal[b"\xff\xfe\x00\x00", b"\x00\x00\xfe\xff"] # depends on `sys.byteorder` +BOM_UTF32_BE: Literal[b"\x00\x00\xfe\xff"] +BOM_UTF32_LE: Literal[b"\xff\xfe\x00\x00"] + +def strict_errors(exception: UnicodeError) -> tuple[str | bytes, int]: ... +def replace_errors(exception: UnicodeError) -> tuple[str | bytes, int]: ... +def ignore_errors(exception: UnicodeError) -> tuple[str | bytes, int]: ... +def xmlcharrefreplace_errors(exception: UnicodeError) -> tuple[str | bytes, int]: ... +def backslashreplace_errors(exception: UnicodeError) -> tuple[str | bytes, int]: ... +def namereplace_errors(exception: UnicodeError) -> tuple[str | bytes, int]: ... + +class Codec: + # These are sort of @abstractmethod but sort of not. + # The StreamReader and StreamWriter subclasses only implement one. + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... + +class IncrementalEncoder: + errors: str + def __init__(self, errors: str = "strict") -> None: ... + @abstractmethod + def encode(self, input: str, final: bool = False) -> bytes: ... + def reset(self) -> None: ... + # documentation says int but str is needed for the subclass. + def getstate(self) -> int | str: ... + def setstate(self, state: int | str) -> None: ... + +class IncrementalDecoder: + errors: str + def __init__(self, errors: str = "strict") -> None: ... + @abstractmethod + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + def reset(self) -> None: ... + def getstate(self) -> tuple[bytes, int]: ... + def setstate(self, state: tuple[bytes, int]) -> None: ... + +# These are not documented but used in encodings/*.py implementations. +class BufferedIncrementalEncoder(IncrementalEncoder): + buffer: str + def __init__(self, errors: str = "strict") -> None: ... + @abstractmethod + def _buffer_encode(self, input: str, errors: str, final: bool) -> tuple[bytes, int]: ... + def encode(self, input: str, final: bool = False) -> bytes: ... + +class BufferedIncrementalDecoder(IncrementalDecoder): + buffer: bytes + def __init__(self, errors: str = "strict") -> None: ... + @abstractmethod + def _buffer_decode(self, input: ReadableBuffer, errors: str, final: bool) -> tuple[str, int]: ... + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... + +# TODO: it is not possible to specify the requirement that all other +# attributes and methods are passed-through from the stream. +class StreamWriter(Codec): + stream: _WritableStream + errors: str + def __init__(self, stream: _WritableStream, errors: str = "strict") -> None: ... + def write(self, object: str) -> None: ... + def writelines(self, list: Iterable[str]) -> None: ... + def reset(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... + def __getattr__(self, name: str, getattr: Callable[[str], Any] = ...) -> Any: ... + +class StreamReader(Codec): + stream: _ReadableStream + errors: str + def __init__(self, stream: _ReadableStream, errors: str = "strict") -> None: ... + def read(self, size: int = -1, chars: int = -1, firstline: bool = False) -> str: ... + def readline(self, size: int | None = None, keepends: bool = True) -> str: ... + def readlines(self, sizehint: int | None = None, keepends: bool = True) -> list[str]: ... + def reset(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... + def __iter__(self) -> Self: ... + def __next__(self) -> str: ... + def __getattr__(self, name: str, getattr: Callable[[str], Any] = ...) -> Any: ... + +# Doesn't actually inherit from TextIO, but wraps a BinaryIO to provide text reading and writing +# and delegates attributes to the underlying binary stream with __getattr__. +class StreamReaderWriter(TextIO): + stream: _Stream + def __init__(self, stream: _Stream, Reader: _StreamReader, Writer: _StreamWriter, errors: str = "strict") -> None: ... + def read(self, size: int = -1) -> str: ... + def readline(self, size: int | None = None) -> str: ... + def readlines(self, sizehint: int | None = None) -> list[str]: ... + def __next__(self) -> str: ... + def __iter__(self) -> Self: ... + def write(self, data: str) -> None: ... # type: ignore[override] + def writelines(self, list: Iterable[str]) -> None: ... + def reset(self) -> None: ... + def seek(self, offset: int, whence: int = 0) -> None: ... # type: ignore[override] + def __enter__(self) -> Self: ... + def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... + def __getattr__(self, name: str) -> Any: ... + # These methods don't actually exist directly, but they are needed to satisfy the TextIO + # interface. At runtime, they are delegated through __getattr__. + def close(self) -> None: ... + def fileno(self) -> int: ... + def flush(self) -> None: ... + def isatty(self) -> bool: ... + def readable(self) -> bool: ... + def truncate(self, size: int | None = ...) -> int: ... + def seekable(self) -> bool: ... + def tell(self) -> int: ... + def writable(self) -> bool: ... + +class StreamRecoder(BinaryIO): + def __init__( + self, + stream: _Stream, + encode: _Encoder, + decode: _Decoder, + Reader: _StreamReader, + Writer: _StreamWriter, + errors: str = "strict", + ) -> None: ... + def read(self, size: int = -1) -> bytes: ... + def readline(self, size: int | None = None) -> bytes: ... + def readlines(self, sizehint: int | None = None) -> list[bytes]: ... + def __next__(self) -> bytes: ... + def __iter__(self) -> Self: ... + def write(self, data: bytes) -> None: ... # type: ignore[override] + def writelines(self, list: Iterable[bytes]) -> None: ... + def reset(self) -> None: ... + def __getattr__(self, name: str) -> Any: ... + def __enter__(self) -> Self: ... + def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... + def seek(self, offset: int, whence: int = 0) -> None: ... # type: ignore[override] + # These methods don't actually exist directly, but they are needed to satisfy the BinaryIO + # interface. At runtime, they are delegated through __getattr__. + def close(self) -> None: ... + def fileno(self) -> int: ... + def flush(self) -> None: ... + def isatty(self) -> bool: ... + def readable(self) -> bool: ... + def truncate(self, size: int | None = ...) -> int: ... + def seekable(self) -> bool: ... + def tell(self) -> int: ... + def writable(self) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/codeop.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/codeop.pyi new file mode 100644 index 00000000..6a51b778 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/codeop.pyi @@ -0,0 +1,13 @@ +from types import CodeType + +__all__ = ["compile_command", "Compile", "CommandCompiler"] + +def compile_command(source: str, filename: str = "", symbol: str = "single") -> CodeType | None: ... + +class Compile: + flags: int + def __call__(self, source: str, filename: str, symbol: str) -> CodeType: ... + +class CommandCompiler: + compiler: Compile + def __call__(self, source: str, filename: str = "", symbol: str = "single") -> CodeType | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/collections/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/collections/__init__.pyi new file mode 100644 index 00000000..893a289d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/collections/__init__.pyi @@ -0,0 +1,432 @@ +import sys +from _collections_abc import dict_items, dict_keys, dict_values +from _typeshed import SupportsKeysAndGetItem, SupportsRichComparison, SupportsRichComparisonT +from typing import Any, Generic, NoReturn, TypeVar, overload +from typing_extensions import Self, SupportsIndex, final + +if sys.version_info >= (3, 9): + from types import GenericAlias + +if sys.version_info >= (3, 10): + from collections.abc import ( + Callable, + ItemsView, + Iterable, + Iterator, + KeysView, + Mapping, + MutableMapping, + MutableSequence, + Reversible, + Sequence, + ValuesView, + ) +else: + from _collections_abc import * + +__all__ = ["ChainMap", "Counter", "OrderedDict", "UserDict", "UserList", "UserString", "defaultdict", "deque", "namedtuple"] + +_S = TypeVar("_S") +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") +_KT_co = TypeVar("_KT_co", covariant=True) +_VT_co = TypeVar("_VT_co", covariant=True) + +# namedtuple is special-cased in the type checker; the initializer is ignored. +def namedtuple( + typename: str, + field_names: str | Iterable[str], + *, + rename: bool = False, + module: str | None = None, + defaults: Iterable[Any] | None = None, +) -> type[tuple[Any, ...]]: ... + +class UserDict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): + data: dict[_KT, _VT] + # __init__ should be kept roughly in line with `dict.__init__`, which has the same semantics + @overload + def __init__(self, __dict: None = None) -> None: ... + @overload + def __init__(self: UserDict[str, _VT], __dict: None = None, **kwargs: _VT) -> None: ... + @overload + def __init__(self, __dict: SupportsKeysAndGetItem[_KT, _VT]) -> None: ... + @overload + def __init__(self: UserDict[str, _VT], __dict: SupportsKeysAndGetItem[str, _VT], **kwargs: _VT) -> None: ... + @overload + def __init__(self, __iterable: Iterable[tuple[_KT, _VT]]) -> None: ... + @overload + def __init__(self: UserDict[str, _VT], __iterable: Iterable[tuple[str, _VT]], **kwargs: _VT) -> None: ... + @overload + def __init__(self: UserDict[str, str], __iterable: Iterable[list[str]]) -> None: ... + def __len__(self) -> int: ... + def __getitem__(self, key: _KT) -> _VT: ... + def __setitem__(self, key: _KT, item: _VT) -> None: ... + def __delitem__(self, key: _KT) -> None: ... + def __iter__(self) -> Iterator[_KT]: ... + def __contains__(self, key: object) -> bool: ... + def copy(self) -> Self: ... + def __copy__(self) -> Self: ... + + # `UserDict.fromkeys` has the same semantics as `dict.fromkeys`, so should be kept in line with `dict.fromkeys`. + # TODO: Much like `dict.fromkeys`, the true signature of `UserDict.fromkeys` is inexpressible in the current type system. + # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963. + @classmethod + @overload + def fromkeys(cls, iterable: Iterable[_T], value: None = None) -> UserDict[_T, Any | None]: ... + @classmethod + @overload + def fromkeys(cls, iterable: Iterable[_T], value: _S) -> UserDict[_T, _S]: ... + if sys.version_info >= (3, 9): + def __or__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ... + def __ror__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc] + # UserDict.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + @overload + def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... + +class UserList(MutableSequence[_T]): + data: list[_T] + @overload + def __init__(self, initlist: None = None) -> None: ... + @overload + def __init__(self, initlist: Iterable[_T]) -> None: ... + def __lt__(self, other: list[_T] | UserList[_T]) -> bool: ... + def __le__(self, other: list[_T] | UserList[_T]) -> bool: ... + def __gt__(self, other: list[_T] | UserList[_T]) -> bool: ... + def __ge__(self, other: list[_T] | UserList[_T]) -> bool: ... + def __eq__(self, other: object) -> bool: ... + def __contains__(self, item: object) -> bool: ... + def __len__(self) -> int: ... + @overload + def __getitem__(self, i: SupportsIndex) -> _T: ... + @overload + def __getitem__(self, i: slice) -> Self: ... + @overload + def __setitem__(self, i: SupportsIndex, item: _T) -> None: ... + @overload + def __setitem__(self, i: slice, item: Iterable[_T]) -> None: ... + def __delitem__(self, i: SupportsIndex | slice) -> None: ... + def __add__(self, other: Iterable[_T]) -> Self: ... + def __radd__(self, other: Iterable[_T]) -> Self: ... + def __iadd__(self, other: Iterable[_T]) -> Self: ... + def __mul__(self, n: int) -> Self: ... + def __rmul__(self, n: int) -> Self: ... + def __imul__(self, n: int) -> Self: ... + def append(self, item: _T) -> None: ... + def insert(self, i: int, item: _T) -> None: ... + def pop(self, i: int = -1) -> _T: ... + def remove(self, item: _T) -> None: ... + def copy(self) -> Self: ... + def __copy__(self) -> Self: ... + def count(self, item: _T) -> int: ... + # All arguments are passed to `list.index` at runtime, so the signature should be kept in line with `list.index`. + def index(self, item: _T, __start: SupportsIndex = 0, __stop: SupportsIndex = sys.maxsize) -> int: ... + # All arguments are passed to `list.sort` at runtime, so the signature should be kept in line with `list.sort`. + @overload + def sort(self: UserList[SupportsRichComparisonT], *, key: None = None, reverse: bool = False) -> None: ... + @overload + def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = False) -> None: ... + def extend(self, other: Iterable[_T]) -> None: ... + +class UserString(Sequence[UserString]): + data: str + def __init__(self, seq: object) -> None: ... + def __int__(self) -> int: ... + def __float__(self) -> float: ... + def __complex__(self) -> complex: ... + def __getnewargs__(self) -> tuple[str]: ... + def __lt__(self, string: str | UserString) -> bool: ... + def __le__(self, string: str | UserString) -> bool: ... + def __gt__(self, string: str | UserString) -> bool: ... + def __ge__(self, string: str | UserString) -> bool: ... + def __eq__(self, string: object) -> bool: ... + def __contains__(self, char: object) -> bool: ... + def __len__(self) -> int: ... + def __getitem__(self, index: SupportsIndex | slice) -> Self: ... + def __iter__(self) -> Iterator[Self]: ... + def __reversed__(self) -> Iterator[Self]: ... + def __add__(self, other: object) -> Self: ... + def __radd__(self, other: object) -> Self: ... + def __mul__(self, n: int) -> Self: ... + def __rmul__(self, n: int) -> Self: ... + def __mod__(self, args: Any) -> Self: ... + if sys.version_info >= (3, 8): + def __rmod__(self, template: object) -> Self: ... + else: + def __rmod__(self, format: Any) -> Self: ... + + def capitalize(self) -> Self: ... + def casefold(self) -> Self: ... + def center(self, width: int, *args: Any) -> Self: ... + def count(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ... + if sys.version_info >= (3, 8): + def encode(self: UserString, encoding: str | None = "utf-8", errors: str | None = "strict") -> bytes: ... + else: + def encode(self, encoding: str | None = None, errors: str | None = None) -> Self: ... + + def endswith(self, suffix: str | tuple[str, ...], start: int | None = 0, end: int | None = sys.maxsize) -> bool: ... + def expandtabs(self, tabsize: int = 8) -> Self: ... + def find(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ... + def format(self, *args: Any, **kwds: Any) -> str: ... + def format_map(self, mapping: Mapping[str, Any]) -> str: ... + def index(self, sub: str, start: int = 0, end: int = sys.maxsize) -> int: ... + def isalpha(self) -> bool: ... + def isalnum(self) -> bool: ... + def isdecimal(self) -> bool: ... + def isdigit(self) -> bool: ... + def isidentifier(self) -> bool: ... + def islower(self) -> bool: ... + def isnumeric(self) -> bool: ... + def isprintable(self) -> bool: ... + def isspace(self) -> bool: ... + def istitle(self) -> bool: ... + def isupper(self) -> bool: ... + def isascii(self) -> bool: ... + def join(self, seq: Iterable[str]) -> str: ... + def ljust(self, width: int, *args: Any) -> Self: ... + def lower(self) -> Self: ... + def lstrip(self, chars: str | None = None) -> Self: ... + maketrans = str.maketrans + def partition(self, sep: str) -> tuple[str, str, str]: ... + if sys.version_info >= (3, 9): + def removeprefix(self, __prefix: str | UserString) -> Self: ... + def removesuffix(self, __suffix: str | UserString) -> Self: ... + + def replace(self, old: str | UserString, new: str | UserString, maxsplit: int = -1) -> Self: ... + def rfind(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ... + def rindex(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ... + def rjust(self, width: int, *args: Any) -> Self: ... + def rpartition(self, sep: str) -> tuple[str, str, str]: ... + def rstrip(self, chars: str | None = None) -> Self: ... + def split(self, sep: str | None = None, maxsplit: int = -1) -> list[str]: ... + def rsplit(self, sep: str | None = None, maxsplit: int = -1) -> list[str]: ... + def splitlines(self, keepends: bool = False) -> list[str]: ... + def startswith(self, prefix: str | tuple[str, ...], start: int | None = 0, end: int | None = sys.maxsize) -> bool: ... + def strip(self, chars: str | None = None) -> Self: ... + def swapcase(self) -> Self: ... + def title(self) -> Self: ... + def translate(self, *args: Any) -> Self: ... + def upper(self) -> Self: ... + def zfill(self, width: int) -> Self: ... + +class deque(MutableSequence[_T], Generic[_T]): + @property + def maxlen(self) -> int | None: ... + @overload + def __init__(self, *, maxlen: int | None = None) -> None: ... + @overload + def __init__(self, iterable: Iterable[_T], maxlen: int | None = None) -> None: ... + def append(self, __x: _T) -> None: ... + def appendleft(self, __x: _T) -> None: ... + def copy(self) -> Self: ... + def count(self, __x: _T) -> int: ... + def extend(self, __iterable: Iterable[_T]) -> None: ... + def extendleft(self, __iterable: Iterable[_T]) -> None: ... + def insert(self, __i: int, __x: _T) -> None: ... + def index(self, __x: _T, __start: int = 0, __stop: int = ...) -> int: ... + def pop(self) -> _T: ... # type: ignore[override] + def popleft(self) -> _T: ... + def remove(self, __value: _T) -> None: ... + def rotate(self, __n: int = 1) -> None: ... + def __copy__(self) -> Self: ... + def __len__(self) -> int: ... + # These methods of deque don't take slices, unlike MutableSequence, hence the type: ignores + def __getitem__(self, __index: SupportsIndex) -> _T: ... # type: ignore[override] + def __setitem__(self, __i: SupportsIndex, __x: _T) -> None: ... # type: ignore[override] + def __delitem__(self, __i: SupportsIndex) -> None: ... # type: ignore[override] + def __contains__(self, __o: object) -> bool: ... + def __reduce__(self) -> tuple[type[Self], tuple[()], None, Iterator[_T]]: ... + def __iadd__(self, __iterable: Iterable[_T]) -> Self: ... + def __add__(self, __other: Self) -> Self: ... + def __mul__(self, __other: int) -> Self: ... + def __imul__(self, __other: int) -> Self: ... + def __lt__(self, __other: deque[_T]) -> bool: ... + def __le__(self, __other: deque[_T]) -> bool: ... + def __gt__(self, __other: deque[_T]) -> bool: ... + def __ge__(self, __other: deque[_T]) -> bool: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... + +class Counter(dict[_T, int], Generic[_T]): + @overload + def __init__(self, __iterable: None = None) -> None: ... + @overload + def __init__(self: Counter[str], __iterable: None = None, **kwargs: int) -> None: ... + @overload + def __init__(self, __mapping: SupportsKeysAndGetItem[_T, int]) -> None: ... + @overload + def __init__(self, __iterable: Iterable[_T]) -> None: ... + def copy(self) -> Self: ... + def elements(self) -> Iterator[_T]: ... + def most_common(self, n: int | None = None) -> list[tuple[_T, int]]: ... + @classmethod + def fromkeys(cls, iterable: Any, v: int | None = None) -> NoReturn: ... # type: ignore[override] + @overload + def subtract(self, __iterable: None = None) -> None: ... + @overload + def subtract(self, __mapping: Mapping[_T, int]) -> None: ... + @overload + def subtract(self, __iterable: Iterable[_T]) -> None: ... + # Unlike dict.update(), use Mapping instead of SupportsKeysAndGetItem for the first overload + # (source code does an `isinstance(other, Mapping)` check) + # + # The second overload is also deliberately different to dict.update() + # (if it were `Iterable[_T] | Iterable[tuple[_T, int]]`, + # the tuples would be added as keys, breaking type safety) + @overload # type: ignore[override] + def update(self, __m: Mapping[_T, int], **kwargs: int) -> None: ... + @overload + def update(self, __iterable: Iterable[_T], **kwargs: int) -> None: ... + @overload + def update(self, __iterable: None = None, **kwargs: int) -> None: ... + def __missing__(self, key: _T) -> int: ... + def __delitem__(self, elem: object) -> None: ... + if sys.version_info >= (3, 10): + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + + def __add__(self, other: Counter[_S]) -> Counter[_T | _S]: ... + def __sub__(self, other: Counter[_T]) -> Counter[_T]: ... + def __and__(self, other: Counter[_T]) -> Counter[_T]: ... + def __or__(self, other: Counter[_S]) -> Counter[_T | _S]: ... # type: ignore[override] + def __pos__(self) -> Counter[_T]: ... + def __neg__(self) -> Counter[_T]: ... + # several type: ignores because __iadd__ is supposedly incompatible with __add__, etc. + def __iadd__(self, other: Counter[_T]) -> Self: ... # type: ignore[misc] + def __isub__(self, other: Counter[_T]) -> Self: ... + def __iand__(self, other: Counter[_T]) -> Self: ... + def __ior__(self, other: Counter[_T]) -> Self: ... # type: ignore[override,misc] + if sys.version_info >= (3, 10): + def total(self) -> int: ... + def __le__(self, other: Counter[Any]) -> bool: ... + def __lt__(self, other: Counter[Any]) -> bool: ... + def __ge__(self, other: Counter[Any]) -> bool: ... + def __gt__(self, other: Counter[Any]) -> bool: ... + +# The pure-Python implementations of the "views" classes +# These are exposed at runtime in `collections/__init__.py` +class _OrderedDictKeysView(KeysView[_KT_co], Reversible[_KT_co]): + def __reversed__(self) -> Iterator[_KT_co]: ... + +class _OrderedDictItemsView(ItemsView[_KT_co, _VT_co], Reversible[tuple[_KT_co, _VT_co]]): + def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... + +class _OrderedDictValuesView(ValuesView[_VT_co], Reversible[_VT_co]): + def __reversed__(self) -> Iterator[_VT_co]: ... + +# The C implementations of the "views" classes +# (At runtime, these are called `odict_keys`, `odict_items` and `odict_values`, +# but they are not exposed anywhere) +# pyright doesn't have a specific error code for subclassing error! +@final +class _odict_keys(dict_keys[_KT_co, _VT_co], Reversible[_KT_co]): # type: ignore[misc] # pyright: ignore + def __reversed__(self) -> Iterator[_KT_co]: ... + +@final +class _odict_items(dict_items[_KT_co, _VT_co], Reversible[tuple[_KT_co, _VT_co]]): # type: ignore[misc] # pyright: ignore + def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... + +@final +class _odict_values(dict_values[_KT_co, _VT_co], Reversible[_VT_co], Generic[_KT_co, _VT_co]): # type: ignore[misc] # pyright: ignore + def __reversed__(self) -> Iterator[_VT_co]: ... + +class OrderedDict(dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]): + def popitem(self, last: bool = True) -> tuple[_KT, _VT]: ... + def move_to_end(self, key: _KT, last: bool = True) -> None: ... + def copy(self) -> Self: ... + def __reversed__(self) -> Iterator[_KT]: ... + def keys(self) -> _odict_keys[_KT, _VT]: ... + def items(self) -> _odict_items[_KT, _VT]: ... + def values(self) -> _odict_values[_KT, _VT]: ... + # The signature of OrderedDict.fromkeys should be kept in line with `dict.fromkeys`, modulo positional-only differences. + # Like dict.fromkeys, its true signature is not expressible in the current type system. + # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963. + @classmethod + @overload + def fromkeys(cls, iterable: Iterable[_T], value: None = None) -> OrderedDict[_T, Any | None]: ... + @classmethod + @overload + def fromkeys(cls, iterable: Iterable[_T], value: _S) -> OrderedDict[_T, _S]: ... + # Keep OrderedDict.setdefault in line with MutableMapping.setdefault, modulo positional-only differences. + @overload + def setdefault(self: OrderedDict[_KT, _T | None], key: _KT, default: None = None) -> _T | None: ... + @overload + def setdefault(self, key: _KT, default: _VT) -> _VT: ... + +class defaultdict(dict[_KT, _VT], Generic[_KT, _VT]): + default_factory: Callable[[], _VT] | None + @overload + def __init__(self) -> None: ... + @overload + def __init__(self: defaultdict[str, _VT], **kwargs: _VT) -> None: ... + @overload + def __init__(self, __default_factory: Callable[[], _VT] | None) -> None: ... + @overload + def __init__(self: defaultdict[str, _VT], __default_factory: Callable[[], _VT] | None, **kwargs: _VT) -> None: ... + @overload + def __init__(self, __default_factory: Callable[[], _VT] | None, __map: SupportsKeysAndGetItem[_KT, _VT]) -> None: ... + @overload + def __init__( + self: defaultdict[str, _VT], + __default_factory: Callable[[], _VT] | None, + __map: SupportsKeysAndGetItem[str, _VT], + **kwargs: _VT, + ) -> None: ... + @overload + def __init__(self, __default_factory: Callable[[], _VT] | None, __iterable: Iterable[tuple[_KT, _VT]]) -> None: ... + @overload + def __init__( + self: defaultdict[str, _VT], + __default_factory: Callable[[], _VT] | None, + __iterable: Iterable[tuple[str, _VT]], + **kwargs: _VT, + ) -> None: ... + def __missing__(self, __key: _KT) -> _VT: ... + def __copy__(self) -> Self: ... + def copy(self) -> Self: ... + +class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]): + maps: list[MutableMapping[_KT, _VT]] + def __init__(self, *maps: MutableMapping[_KT, _VT]) -> None: ... + def new_child(self, m: MutableMapping[_KT, _VT] | None = None) -> Self: ... + @property + def parents(self) -> Self: ... + def __setitem__(self, key: _KT, value: _VT) -> None: ... + def __delitem__(self, key: _KT) -> None: ... + def __getitem__(self, key: _KT) -> _VT: ... + def __iter__(self) -> Iterator[_KT]: ... + def __len__(self) -> int: ... + def __contains__(self, key: object) -> bool: ... + def __missing__(self, key: _KT) -> _VT: ... # undocumented + def __bool__(self) -> bool: ... + # Keep ChainMap.setdefault in line with MutableMapping.setdefault, modulo positional-only differences. + @overload + def setdefault(self: ChainMap[_KT, _T | None], key: _KT, default: None = None) -> _T | None: ... + @overload + def setdefault(self, key: _KT, default: _VT) -> _VT: ... + @overload + def pop(self, key: _KT) -> _VT: ... + @overload + def pop(self, key: _KT, default: _VT | _T) -> _VT | _T: ... + def copy(self) -> Self: ... + __copy__ = copy + # All arguments to `fromkeys` are passed to `dict.fromkeys` at runtime, so the signature should be kept in line with `dict.fromkeys`. + @classmethod + @overload + def fromkeys(cls, iterable: Iterable[_T], __value: None = None) -> ChainMap[_T, Any | None]: ... + @classmethod + @overload + def fromkeys(cls, __iterable: Iterable[_T], __value: _S) -> ChainMap[_T, _S]: ... + if sys.version_info >= (3, 9): + def __or__(self, other: Mapping[_T1, _T2]) -> ChainMap[_KT | _T1, _VT | _T2]: ... + def __ror__(self, other: Mapping[_T1, _T2]) -> ChainMap[_KT | _T1, _VT | _T2]: ... + # ChainMap.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + @overload + def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/collections/abc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/collections/abc.pyi new file mode 100644 index 00000000..3df2a1d9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/collections/abc.pyi @@ -0,0 +1,2 @@ +from _collections_abc import * +from _collections_abc import __all__ as __all__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/colorsys.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/colorsys.pyi new file mode 100644 index 00000000..443ee828 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/colorsys.pyi @@ -0,0 +1,13 @@ +__all__ = ["rgb_to_yiq", "yiq_to_rgb", "rgb_to_hls", "hls_to_rgb", "rgb_to_hsv", "hsv_to_rgb"] + +def rgb_to_yiq(r: float, g: float, b: float) -> tuple[float, float, float]: ... +def yiq_to_rgb(y: float, i: float, q: float) -> tuple[float, float, float]: ... +def rgb_to_hls(r: float, g: float, b: float) -> tuple[float, float, float]: ... +def hls_to_rgb(h: float, l: float, s: float) -> tuple[float, float, float]: ... +def rgb_to_hsv(r: float, g: float, b: float) -> tuple[float, float, float]: ... +def hsv_to_rgb(h: float, s: float, v: float) -> tuple[float, float, float]: ... + +# TODO undocumented +ONE_SIXTH: float +ONE_THIRD: float +TWO_THIRD: float diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/compileall.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/compileall.pyi new file mode 100644 index 00000000..7520c2f5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/compileall.pyi @@ -0,0 +1,111 @@ +import sys +from _typeshed import StrPath +from py_compile import PycInvalidationMode +from typing import Any, Protocol + +__all__ = ["compile_dir", "compile_file", "compile_path"] + +class _SupportsSearch(Protocol): + def search(self, string: str) -> Any: ... + +if sys.version_info >= (3, 10): + def compile_dir( + dir: StrPath, + maxlevels: int | None = None, + ddir: StrPath | None = None, + force: bool = False, + rx: _SupportsSearch | None = None, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + workers: int = 1, + invalidation_mode: PycInvalidationMode | None = None, + *, + stripdir: StrPath | None = None, + prependdir: StrPath | None = None, + limit_sl_dest: StrPath | None = None, + hardlink_dupes: bool = False, + ) -> int: ... + def compile_file( + fullname: StrPath, + ddir: StrPath | None = None, + force: bool = False, + rx: _SupportsSearch | None = None, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + invalidation_mode: PycInvalidationMode | None = None, + *, + stripdir: StrPath | None = None, + prependdir: StrPath | None = None, + limit_sl_dest: StrPath | None = None, + hardlink_dupes: bool = False, + ) -> int: ... + +elif sys.version_info >= (3, 9): + def compile_dir( + dir: StrPath, + maxlevels: int | None = None, + ddir: StrPath | None = None, + force: bool = False, + rx: _SupportsSearch | None = None, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + workers: int = 1, + invalidation_mode: PycInvalidationMode | None = None, + *, + stripdir: str | None = None, # https://bugs.python.org/issue40447 + prependdir: StrPath | None = None, + limit_sl_dest: StrPath | None = None, + hardlink_dupes: bool = False, + ) -> int: ... + def compile_file( + fullname: StrPath, + ddir: StrPath | None = None, + force: bool = False, + rx: _SupportsSearch | None = None, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + invalidation_mode: PycInvalidationMode | None = None, + *, + stripdir: str | None = None, # https://bugs.python.org/issue40447 + prependdir: StrPath | None = None, + limit_sl_dest: StrPath | None = None, + hardlink_dupes: bool = False, + ) -> int: ... + +else: + def compile_dir( + dir: StrPath, + maxlevels: int = 10, + ddir: StrPath | None = None, + force: bool = False, + rx: _SupportsSearch | None = None, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + workers: int = 1, + invalidation_mode: PycInvalidationMode | None = None, + ) -> int: ... + def compile_file( + fullname: StrPath, + ddir: StrPath | None = None, + force: bool = False, + rx: _SupportsSearch | None = None, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + invalidation_mode: PycInvalidationMode | None = None, + ) -> int: ... + +def compile_path( + skip_curdir: bool = ..., + maxlevels: int = 0, + force: bool = False, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + invalidation_mode: PycInvalidationMode | None = None, +) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/concurrent/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/concurrent/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/concurrent/futures/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/concurrent/futures/__init__.pyi new file mode 100644 index 00000000..ff2e72bb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/concurrent/futures/__init__.pyi @@ -0,0 +1,36 @@ +import sys + +from ._base import ( + ALL_COMPLETED as ALL_COMPLETED, + FIRST_COMPLETED as FIRST_COMPLETED, + FIRST_EXCEPTION as FIRST_EXCEPTION, + BrokenExecutor as BrokenExecutor, + CancelledError as CancelledError, + Executor as Executor, + Future as Future, + TimeoutError as TimeoutError, + as_completed as as_completed, + wait as wait, +) +from .process import ProcessPoolExecutor as ProcessPoolExecutor +from .thread import ThreadPoolExecutor as ThreadPoolExecutor + +if sys.version_info >= (3, 8): + from ._base import InvalidStateError as InvalidStateError + +__all__ = ( + "FIRST_COMPLETED", + "FIRST_EXCEPTION", + "ALL_COMPLETED", + "CancelledError", + "TimeoutError", + "BrokenExecutor", + "Future", + "Executor", + "wait", + "as_completed", + "ProcessPoolExecutor", + "ThreadPoolExecutor", +) + +def __dir__() -> tuple[str, ...]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/concurrent/futures/_base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/concurrent/futures/_base.pyi new file mode 100644 index 00000000..eb5ca4e2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/concurrent/futures/_base.pyi @@ -0,0 +1,102 @@ +import sys +import threading +from _typeshed import Unused +from collections.abc import Callable, Iterable, Iterator +from logging import Logger +from types import TracebackType +from typing import Any, Generic, NamedTuple, TypeVar +from typing_extensions import Literal, ParamSpec, Self + +if sys.version_info >= (3, 9): + from types import GenericAlias + +FIRST_COMPLETED: Literal["FIRST_COMPLETED"] +FIRST_EXCEPTION: Literal["FIRST_EXCEPTION"] +ALL_COMPLETED: Literal["ALL_COMPLETED"] +PENDING: Literal["PENDING"] +RUNNING: Literal["RUNNING"] +CANCELLED: Literal["CANCELLED"] +CANCELLED_AND_NOTIFIED: Literal["CANCELLED_AND_NOTIFIED"] +FINISHED: Literal["FINISHED"] +_FUTURE_STATES: list[str] +_STATE_TO_DESCRIPTION_MAP: dict[str, str] +LOGGER: Logger + +class Error(Exception): ... +class CancelledError(Error): ... +class TimeoutError(Error): ... + +if sys.version_info >= (3, 8): + class InvalidStateError(Error): ... + +class BrokenExecutor(RuntimeError): ... + +_T = TypeVar("_T") +_P = ParamSpec("_P") + +class Future(Generic[_T]): + def cancel(self) -> bool: ... + def cancelled(self) -> bool: ... + def running(self) -> bool: ... + def done(self) -> bool: ... + def add_done_callback(self, fn: Callable[[Future[_T]], object]) -> None: ... + def result(self, timeout: float | None = None) -> _T: ... + def set_running_or_notify_cancel(self) -> bool: ... + def set_result(self, result: _T) -> None: ... + def exception(self, timeout: float | None = None) -> BaseException | None: ... + def set_exception(self, exception: BaseException | None) -> None: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +class Executor: + if sys.version_info >= (3, 9): + def submit(self, __fn: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> Future[_T]: ... + else: + def submit(self, fn: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> Future[_T]: ... + + def map( + self, fn: Callable[..., _T], *iterables: Iterable[Any], timeout: float | None = None, chunksize: int = 1 + ) -> Iterator[_T]: ... + if sys.version_info >= (3, 9): + def shutdown(self, wait: bool = True, *, cancel_futures: bool = False) -> None: ... + else: + def shutdown(self, wait: bool = True) -> None: ... + + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> bool | None: ... + +def as_completed(fs: Iterable[Future[_T]], timeout: float | None = None) -> Iterator[Future[_T]]: ... + +class DoneAndNotDoneFutures(NamedTuple, Generic[_T]): + done: set[Future[_T]] + not_done: set[Future[_T]] + +def wait( + fs: Iterable[Future[_T]], timeout: float | None = None, return_when: str = "ALL_COMPLETED" +) -> DoneAndNotDoneFutures[_T]: ... + +class _Waiter: + event: threading.Event + finished_futures: list[Future[Any]] + def add_result(self, future: Future[Any]) -> None: ... + def add_exception(self, future: Future[Any]) -> None: ... + def add_cancelled(self, future: Future[Any]) -> None: ... + +class _AsCompletedWaiter(_Waiter): + lock: threading.Lock + +class _FirstCompletedWaiter(_Waiter): ... + +class _AllCompletedWaiter(_Waiter): + num_pending_calls: int + stop_on_exception: bool + lock: threading.Lock + def __init__(self, num_pending_calls: int, stop_on_exception: bool) -> None: ... + +class _AcquireFutures: + futures: Iterable[Future[Any]] + def __init__(self, futures: Iterable[Future[Any]]) -> None: ... + def __enter__(self) -> None: ... + def __exit__(self, *args: Unused) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/concurrent/futures/process.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/concurrent/futures/process.pyi new file mode 100644 index 00000000..85af2e7f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/concurrent/futures/process.pyi @@ -0,0 +1,192 @@ +import sys +from collections.abc import Callable, Generator, Iterable, Mapping, MutableMapping, MutableSequence +from multiprocessing.connection import Connection +from multiprocessing.context import BaseContext, Process +from multiprocessing.queues import Queue, SimpleQueue +from threading import Lock, Semaphore, Thread +from types import TracebackType +from typing import Any, Generic, TypeVar +from weakref import ref + +from ._base import BrokenExecutor, Executor, Future + +_T = TypeVar("_T") + +_threads_wakeups: MutableMapping[Any, Any] +_global_shutdown: bool + +class _ThreadWakeup: + _closed: bool + _reader: Connection + _writer: Connection + def close(self) -> None: ... + def wakeup(self) -> None: ... + def clear(self) -> None: ... + +def _python_exit() -> None: ... + +EXTRA_QUEUED_CALLS: int + +_MAX_WINDOWS_WORKERS: int + +class _RemoteTraceback(Exception): + tb: str + def __init__(self, tb: TracebackType) -> None: ... + +class _ExceptionWithTraceback: + exc: BaseException + tb: TracebackType + def __init__(self, exc: BaseException, tb: TracebackType) -> None: ... + def __reduce__(self) -> str | tuple[Any, ...]: ... + +def _rebuild_exc(exc: Exception, tb: str) -> Exception: ... + +class _WorkItem(Generic[_T]): + future: Future[_T] + fn: Callable[..., _T] + args: Iterable[Any] + kwargs: Mapping[str, Any] + def __init__(self, future: Future[_T], fn: Callable[..., _T], args: Iterable[Any], kwargs: Mapping[str, Any]) -> None: ... + +class _ResultItem: + work_id: int + exception: Exception + result: Any + if sys.version_info >= (3, 11): + exit_pid: int | None + def __init__( + self, work_id: int, exception: Exception | None = None, result: Any | None = None, exit_pid: int | None = None + ) -> None: ... + else: + def __init__(self, work_id: int, exception: Exception | None = None, result: Any | None = None) -> None: ... + +class _CallItem: + work_id: int + fn: Callable[..., Any] + args: Iterable[Any] + kwargs: Mapping[str, Any] + def __init__(self, work_id: int, fn: Callable[..., Any], args: Iterable[Any], kwargs: Mapping[str, Any]) -> None: ... + +class _SafeQueue(Queue[Future[Any]]): + pending_work_items: dict[int, _WorkItem[Any]] + shutdown_lock: Lock + thread_wakeup: _ThreadWakeup + if sys.version_info >= (3, 9): + def __init__( + self, + max_size: int | None = 0, + *, + ctx: BaseContext, + pending_work_items: dict[int, _WorkItem[Any]], + shutdown_lock: Lock, + thread_wakeup: _ThreadWakeup, + ) -> None: ... + else: + def __init__( + self, max_size: int | None = 0, *, ctx: BaseContext, pending_work_items: dict[int, _WorkItem[Any]] + ) -> None: ... + + def _on_queue_feeder_error(self, e: Exception, obj: _CallItem) -> None: ... + +def _get_chunks(*iterables: Any, chunksize: int) -> Generator[tuple[Any, ...], None, None]: ... +def _process_chunk(fn: Callable[..., _T], chunk: Iterable[tuple[Any, ...]]) -> list[_T]: ... + +if sys.version_info >= (3, 11): + def _sendback_result( + result_queue: SimpleQueue[_WorkItem[Any]], + work_id: int, + result: Any | None = None, + exception: Exception | None = None, + exit_pid: int | None = None, + ) -> None: ... + +else: + def _sendback_result( + result_queue: SimpleQueue[_WorkItem[Any]], work_id: int, result: Any | None = None, exception: Exception | None = None + ) -> None: ... + +if sys.version_info >= (3, 11): + def _process_worker( + call_queue: Queue[_CallItem], + result_queue: SimpleQueue[_ResultItem], + initializer: Callable[..., object] | None, + initargs: tuple[Any, ...], + max_tasks: int | None = None, + ) -> None: ... + +else: + def _process_worker( + call_queue: Queue[_CallItem], + result_queue: SimpleQueue[_ResultItem], + initializer: Callable[..., object] | None, + initargs: tuple[Any, ...], + ) -> None: ... + +if sys.version_info >= (3, 9): + class _ExecutorManagerThread(Thread): + thread_wakeup: _ThreadWakeup + shutdown_lock: Lock + executor_reference: ref[Any] + processes: MutableMapping[int, Process] + call_queue: Queue[_CallItem] + result_queue: SimpleQueue[_ResultItem] + work_ids_queue: Queue[int] + pending_work_items: dict[int, _WorkItem[Any]] + def __init__(self, executor: ProcessPoolExecutor) -> None: ... + def run(self) -> None: ... + def add_call_item_to_queue(self) -> None: ... + def wait_result_broken_or_wakeup(self) -> tuple[Any, bool, str]: ... + def process_result_item(self, result_item: int | _ResultItem) -> None: ... + def is_shutting_down(self) -> bool: ... + def terminate_broken(self, cause: str) -> None: ... + def flag_executor_shutting_down(self) -> None: ... + def shutdown_workers(self) -> None: ... + def join_executor_internals(self) -> None: ... + def get_n_children_alive(self) -> int: ... + +_system_limits_checked: bool +_system_limited: bool | None + +def _check_system_limits() -> None: ... +def _chain_from_iterable_of_lists(iterable: Iterable[MutableSequence[Any]]) -> Any: ... + +class BrokenProcessPool(BrokenExecutor): ... + +class ProcessPoolExecutor(Executor): + _mp_context: BaseContext | None = ... + _initializer: Callable[..., None] | None = ... + _initargs: tuple[Any, ...] = ... + _executor_manager_thread: _ThreadWakeup + _processes: MutableMapping[int, Process] + _shutdown_thread: bool + _shutdown_lock: Lock + _idle_worker_semaphore: Semaphore + _broken: bool + _queue_count: int + _pending_work_items: dict[int, _WorkItem[Any]] + _cancel_pending_futures: bool + _executor_manager_thread_wakeup: _ThreadWakeup + _result_queue: SimpleQueue[Any] + _work_ids: Queue[Any] + if sys.version_info >= (3, 11): + def __init__( + self, + max_workers: int | None = None, + mp_context: BaseContext | None = None, + initializer: Callable[..., object] | None = None, + initargs: tuple[Any, ...] = ..., + *, + max_tasks_per_child: int | None = None, + ) -> None: ... + else: + def __init__( + self, + max_workers: int | None = None, + mp_context: BaseContext | None = None, + initializer: Callable[..., object] | None = None, + initargs: tuple[Any, ...] = ..., + ) -> None: ... + if sys.version_info >= (3, 9): + def _start_executor_manager_thread(self) -> None: ... + + def _adjust_process_count(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/concurrent/futures/thread.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/concurrent/futures/thread.pyi new file mode 100644 index 00000000..e43dd3df --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/concurrent/futures/thread.pyi @@ -0,0 +1,59 @@ +import queue +import sys +from collections.abc import Callable, Iterable, Mapping, Set as AbstractSet +from threading import Lock, Semaphore, Thread +from typing import Any, Generic, TypeVar +from weakref import ref + +from ._base import BrokenExecutor, Executor, Future + +_threads_queues: Mapping[Any, Any] +_shutdown: bool +_global_shutdown_lock: Lock + +def _python_exit() -> None: ... + +if sys.version_info >= (3, 9): + from types import GenericAlias + +_S = TypeVar("_S") + +class _WorkItem(Generic[_S]): + future: Future[_S] + fn: Callable[..., _S] + args: Iterable[Any] + kwargs: Mapping[str, Any] + def __init__(self, future: Future[_S], fn: Callable[..., _S], args: Iterable[Any], kwargs: Mapping[str, Any]) -> None: ... + def run(self) -> None: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +def _worker( + executor_reference: ref[Any], + work_queue: queue.SimpleQueue[Any], + initializer: Callable[..., object], + initargs: tuple[Any, ...], +) -> None: ... + +class BrokenThreadPool(BrokenExecutor): ... + +class ThreadPoolExecutor(Executor): + _max_workers: int + _idle_semaphore: Semaphore + _threads: AbstractSet[Thread] + _broken: bool + _shutdown: bool + _shutdown_lock: Lock + _thread_name_prefix: str | None = ... + _initializer: Callable[..., None] | None = ... + _initargs: tuple[Any, ...] = ... + _work_queue: queue.SimpleQueue[_WorkItem[Any]] + def __init__( + self, + max_workers: int | None = None, + thread_name_prefix: str = "", + initializer: Callable[..., object] | None = None, + initargs: tuple[Any, ...] = ..., + ) -> None: ... + def _adjust_thread_count(self) -> None: ... + def _initializer_failed(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/configparser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/configparser.pyi new file mode 100644 index 00000000..2c5b6838 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/configparser.pyi @@ -0,0 +1,284 @@ +import sys +from _typeshed import StrOrBytesPath, SupportsWrite +from collections.abc import Callable, ItemsView, Iterable, Iterator, Mapping, MutableMapping, Sequence +from re import Pattern +from typing import Any, ClassVar, TypeVar, overload +from typing_extensions import Literal, TypeAlias + +__all__ = [ + "NoSectionError", + "DuplicateOptionError", + "DuplicateSectionError", + "NoOptionError", + "InterpolationError", + "InterpolationDepthError", + "InterpolationMissingOptionError", + "InterpolationSyntaxError", + "ParsingError", + "MissingSectionHeaderError", + "ConfigParser", + "SafeConfigParser", + "RawConfigParser", + "Interpolation", + "BasicInterpolation", + "ExtendedInterpolation", + "LegacyInterpolation", + "SectionProxy", + "ConverterMapping", + "DEFAULTSECT", + "MAX_INTERPOLATION_DEPTH", +] + +_Section: TypeAlias = Mapping[str, str] +_Parser: TypeAlias = MutableMapping[str, _Section] +_ConverterCallback: TypeAlias = Callable[[str], Any] +_ConvertersMap: TypeAlias = dict[str, _ConverterCallback] +_T = TypeVar("_T") + +DEFAULTSECT: Literal["DEFAULT"] +MAX_INTERPOLATION_DEPTH: Literal[10] + +class Interpolation: + def before_get(self, parser: _Parser, section: str, option: str, value: str, defaults: _Section) -> str: ... + def before_set(self, parser: _Parser, section: str, option: str, value: str) -> str: ... + def before_read(self, parser: _Parser, section: str, option: str, value: str) -> str: ... + def before_write(self, parser: _Parser, section: str, option: str, value: str) -> str: ... + +class BasicInterpolation(Interpolation): ... +class ExtendedInterpolation(Interpolation): ... + +class LegacyInterpolation(Interpolation): + def before_get(self, parser: _Parser, section: str, option: str, value: str, vars: _Section) -> str: ... + +class RawConfigParser(_Parser): + _SECT_TMPL: ClassVar[str] # undocumented + _OPT_TMPL: ClassVar[str] # undocumented + _OPT_NV_TMPL: ClassVar[str] # undocumented + + SECTCRE: Pattern[str] + OPTCRE: ClassVar[Pattern[str]] + OPTCRE_NV: ClassVar[Pattern[str]] # undocumented + NONSPACECRE: ClassVar[Pattern[str]] # undocumented + + BOOLEAN_STATES: ClassVar[Mapping[str, bool]] # undocumented + default_section: str + @overload + def __init__( + self, + defaults: Mapping[str, str | None] | None = None, + dict_type: type[Mapping[str, str]] = ..., + *, + allow_no_value: Literal[True], + delimiters: Sequence[str] = ..., + comment_prefixes: Sequence[str] = ..., + inline_comment_prefixes: Sequence[str] | None = None, + strict: bool = True, + empty_lines_in_values: bool = True, + default_section: str = "DEFAULT", + interpolation: Interpolation | None = ..., + converters: _ConvertersMap = ..., + ) -> None: ... + @overload + def __init__( + self, + defaults: Mapping[str, str | None] | None, + dict_type: type[Mapping[str, str]], + allow_no_value: Literal[True], + *, + delimiters: Sequence[str] = ..., + comment_prefixes: Sequence[str] = ..., + inline_comment_prefixes: Sequence[str] | None = None, + strict: bool = True, + empty_lines_in_values: bool = True, + default_section: str = "DEFAULT", + interpolation: Interpolation | None = ..., + converters: _ConvertersMap = ..., + ) -> None: ... + @overload + def __init__( + self, + defaults: _Section | None = None, + dict_type: type[Mapping[str, str]] = ..., + allow_no_value: bool = False, + *, + delimiters: Sequence[str] = ..., + comment_prefixes: Sequence[str] = ..., + inline_comment_prefixes: Sequence[str] | None = None, + strict: bool = True, + empty_lines_in_values: bool = True, + default_section: str = "DEFAULT", + interpolation: Interpolation | None = ..., + converters: _ConvertersMap = ..., + ) -> None: ... + def __len__(self) -> int: ... + def __getitem__(self, key: str) -> SectionProxy: ... + def __setitem__(self, key: str, value: _Section) -> None: ... + def __delitem__(self, key: str) -> None: ... + def __iter__(self) -> Iterator[str]: ... + def __contains__(self, key: object) -> bool: ... + def defaults(self) -> _Section: ... + def sections(self) -> list[str]: ... + def add_section(self, section: str) -> None: ... + def has_section(self, section: str) -> bool: ... + def options(self, section: str) -> list[str]: ... + def has_option(self, section: str, option: str) -> bool: ... + def read(self, filenames: StrOrBytesPath | Iterable[StrOrBytesPath], encoding: str | None = None) -> list[str]: ... + def read_file(self, f: Iterable[str], source: str | None = None) -> None: ... + def read_string(self, string: str, source: str = "") -> None: ... + def read_dict(self, dictionary: Mapping[str, Mapping[str, Any]], source: str = "") -> None: ... + def readfp(self, fp: Iterable[str], filename: str | None = None) -> None: ... + # These get* methods are partially applied (with the same names) in + # SectionProxy; the stubs should be kept updated together + @overload + def getint(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None) -> int: ... + @overload + def getint( + self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T = ... + ) -> int | _T: ... + @overload + def getfloat(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None) -> float: ... + @overload + def getfloat( + self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T = ... + ) -> float | _T: ... + @overload + def getboolean(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None) -> bool: ... + @overload + def getboolean( + self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T = ... + ) -> bool | _T: ... + def _get_conv( + self, + section: str, + option: str, + conv: Callable[[str], _T], + *, + raw: bool = False, + vars: _Section | None = None, + fallback: _T = ..., + ) -> _T: ... + # This is incompatible with MutableMapping so we ignore the type + @overload # type: ignore[override] + def get(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None) -> str | Any: ... + @overload + def get( + self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T + ) -> str | _T | Any: ... + @overload + def items(self, *, raw: bool = False, vars: _Section | None = None) -> ItemsView[str, SectionProxy]: ... + @overload + def items(self, section: str, raw: bool = False, vars: _Section | None = None) -> list[tuple[str, str]]: ... + def set(self, section: str, option: str, value: str | None = None) -> None: ... + def write(self, fp: SupportsWrite[str], space_around_delimiters: bool = True) -> None: ... + def remove_option(self, section: str, option: str) -> bool: ... + def remove_section(self, section: str) -> bool: ... + def optionxform(self, optionstr: str) -> str: ... + +class ConfigParser(RawConfigParser): + # This is incompatible with MutableMapping so we ignore the type + @overload # type: ignore[override] + def get(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None) -> str: ... + @overload + def get(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T) -> str | _T: ... + +if sys.version_info < (3, 12): + class SafeConfigParser(ConfigParser): ... # deprecated alias + +class SectionProxy(MutableMapping[str, str]): + def __init__(self, parser: RawConfigParser, name: str) -> None: ... + def __getitem__(self, key: str) -> str: ... + def __setitem__(self, key: str, value: str) -> None: ... + def __delitem__(self, key: str) -> None: ... + def __contains__(self, key: object) -> bool: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[str]: ... + @property + def parser(self) -> RawConfigParser: ... + @property + def name(self) -> str: ... + def get( # type: ignore[override] + self, + option: str, + fallback: str | None = None, + *, + raw: bool = False, + vars: _Section | None = None, + _impl: Any | None = None, + **kwargs: Any, + ) -> str | Any: ... # can be None in RawConfigParser's sections + # These are partially-applied version of the methods with the same names in + # RawConfigParser; the stubs should be kept updated together + @overload + def getint(self, option: str, *, raw: bool = ..., vars: _Section | None = ...) -> int: ... + @overload + def getint(self, option: str, fallback: _T = ..., *, raw: bool = ..., vars: _Section | None = ...) -> int | _T: ... + @overload + def getfloat(self, option: str, *, raw: bool = ..., vars: _Section | None = ...) -> float: ... + @overload + def getfloat(self, option: str, fallback: _T = ..., *, raw: bool = ..., vars: _Section | None = ...) -> float | _T: ... + @overload + def getboolean(self, option: str, *, raw: bool = ..., vars: _Section | None = ...) -> bool: ... + @overload + def getboolean(self, option: str, fallback: _T = ..., *, raw: bool = ..., vars: _Section | None = ...) -> bool | _T: ... + # SectionProxy can have arbitrary attributes when custom converters are used + def __getattr__(self, key: str) -> Callable[..., Any]: ... + +class ConverterMapping(MutableMapping[str, _ConverterCallback | None]): + GETTERCRE: ClassVar[Pattern[Any]] + def __init__(self, parser: RawConfigParser) -> None: ... + def __getitem__(self, key: str) -> _ConverterCallback: ... + def __setitem__(self, key: str, value: _ConverterCallback | None) -> None: ... + def __delitem__(self, key: str) -> None: ... + def __iter__(self) -> Iterator[str]: ... + def __len__(self) -> int: ... + +class Error(Exception): + message: str + def __init__(self, msg: str = "") -> None: ... + +class NoSectionError(Error): + section: str + def __init__(self, section: str) -> None: ... + +class DuplicateSectionError(Error): + section: str + source: str | None + lineno: int | None + def __init__(self, section: str, source: str | None = None, lineno: int | None = None) -> None: ... + +class DuplicateOptionError(Error): + section: str + option: str + source: str | None + lineno: int | None + def __init__(self, section: str, option: str, source: str | None = None, lineno: int | None = None) -> None: ... + +class NoOptionError(Error): + section: str + option: str + def __init__(self, option: str, section: str) -> None: ... + +class InterpolationError(Error): + section: str + option: str + def __init__(self, option: str, section: str, msg: str) -> None: ... + +class InterpolationDepthError(InterpolationError): + def __init__(self, option: str, section: str, rawval: object) -> None: ... + +class InterpolationMissingOptionError(InterpolationError): + reference: str + def __init__(self, option: str, section: str, rawval: object, reference: str) -> None: ... + +class InterpolationSyntaxError(InterpolationError): ... + +class ParsingError(Error): + source: str + errors: list[tuple[int, str]] + def __init__(self, source: str | None = None, filename: str | None = None) -> None: ... + def append(self, lineno: int, line: str) -> None: ... + +class MissingSectionHeaderError(ParsingError): + lineno: int + line: str + def __init__(self, filename: str, lineno: int, line: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/contextlib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/contextlib.pyi new file mode 100644 index 00000000..feb43aab --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/contextlib.pyi @@ -0,0 +1,202 @@ +import abc +import sys +from _typeshed import FileDescriptorOrPath, Unused +from abc import abstractmethod +from collections.abc import AsyncGenerator, AsyncIterator, Awaitable, Callable, Generator, Iterator +from types import TracebackType +from typing import IO, Any, Generic, Protocol, TypeVar, overload, runtime_checkable +from typing_extensions import ParamSpec, Self, TypeAlias + +__all__ = [ + "contextmanager", + "closing", + "AbstractContextManager", + "ContextDecorator", + "ExitStack", + "redirect_stdout", + "redirect_stderr", + "suppress", + "AbstractAsyncContextManager", + "AsyncExitStack", + "asynccontextmanager", + "nullcontext", +] + +if sys.version_info >= (3, 10): + __all__ += ["aclosing"] + +if sys.version_info >= (3, 11): + __all__ += ["chdir"] + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) +_T_io = TypeVar("_T_io", bound=IO[str] | None) +_F = TypeVar("_F", bound=Callable[..., Any]) +_P = ParamSpec("_P") + +_ExitFunc: TypeAlias = Callable[[type[BaseException] | None, BaseException | None, TracebackType | None], bool | None] +_CM_EF = TypeVar("_CM_EF", bound=AbstractContextManager[Any] | _ExitFunc) + +@runtime_checkable +class AbstractContextManager(Protocol[_T_co]): + def __enter__(self) -> _T_co: ... + @abstractmethod + def __exit__( + self, __exc_type: type[BaseException] | None, __exc_value: BaseException | None, __traceback: TracebackType | None + ) -> bool | None: ... + +@runtime_checkable +class AbstractAsyncContextManager(Protocol[_T_co]): + async def __aenter__(self) -> _T_co: ... + @abstractmethod + async def __aexit__( + self, __exc_type: type[BaseException] | None, __exc_value: BaseException | None, __traceback: TracebackType | None + ) -> bool | None: ... + +class ContextDecorator: + def __call__(self, func: _F) -> _F: ... + +class _GeneratorContextManager(AbstractContextManager[_T_co], ContextDecorator, Generic[_T_co]): + # __init__ and all instance attributes are actually inherited from _GeneratorContextManagerBase + # _GeneratorContextManagerBase is more trouble than it's worth to include in the stub; see #6676 + def __init__(self, func: Callable[..., Iterator[_T_co]], args: tuple[Any, ...], kwds: dict[str, Any]) -> None: ... + gen: Generator[_T_co, Any, Any] + func: Callable[..., Generator[_T_co, Any, Any]] + args: tuple[Any, ...] + kwds: dict[str, Any] + def __exit__( + self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> bool | None: ... + +def contextmanager(func: Callable[_P, Iterator[_T_co]]) -> Callable[_P, _GeneratorContextManager[_T_co]]: ... + +if sys.version_info >= (3, 10): + _AF = TypeVar("_AF", bound=Callable[..., Awaitable[Any]]) + + class AsyncContextDecorator: + def __call__(self, func: _AF) -> _AF: ... + + class _AsyncGeneratorContextManager(AbstractAsyncContextManager[_T_co], AsyncContextDecorator, Generic[_T_co]): + # __init__ and these attributes are actually defined in the base class _GeneratorContextManagerBase, + # which is more trouble than it's worth to include in the stub (see #6676) + def __init__(self, func: Callable[..., AsyncIterator[_T_co]], args: tuple[Any, ...], kwds: dict[str, Any]) -> None: ... + gen: AsyncGenerator[_T_co, Any] + func: Callable[..., AsyncGenerator[_T_co, Any]] + args: tuple[Any, ...] + kwds: dict[str, Any] + async def __aexit__( + self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> bool | None: ... + +else: + class _AsyncGeneratorContextManager(AbstractAsyncContextManager[_T_co], Generic[_T_co]): + def __init__(self, func: Callable[..., AsyncIterator[_T_co]], args: tuple[Any, ...], kwds: dict[str, Any]) -> None: ... + gen: AsyncGenerator[_T_co, Any] + func: Callable[..., AsyncGenerator[_T_co, Any]] + args: tuple[Any, ...] + kwds: dict[str, Any] + async def __aexit__( + self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> bool | None: ... + +def asynccontextmanager(func: Callable[_P, AsyncIterator[_T_co]]) -> Callable[_P, _AsyncGeneratorContextManager[_T_co]]: ... + +class _SupportsClose(Protocol): + def close(self) -> object: ... + +_SupportsCloseT = TypeVar("_SupportsCloseT", bound=_SupportsClose) + +class closing(AbstractContextManager[_SupportsCloseT]): + def __init__(self, thing: _SupportsCloseT) -> None: ... + def __exit__(self, *exc_info: Unused) -> None: ... + +if sys.version_info >= (3, 10): + class _SupportsAclose(Protocol): + def aclose(self) -> Awaitable[object]: ... + _SupportsAcloseT = TypeVar("_SupportsAcloseT", bound=_SupportsAclose) + + class aclosing(AbstractAsyncContextManager[_SupportsAcloseT]): + def __init__(self, thing: _SupportsAcloseT) -> None: ... + async def __aexit__(self, *exc_info: Unused) -> None: ... + +class suppress(AbstractContextManager[None]): + def __init__(self, *exceptions: type[BaseException]) -> None: ... + def __exit__( + self, exctype: type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None + ) -> bool: ... + +class _RedirectStream(AbstractContextManager[_T_io]): + def __init__(self, new_target: _T_io) -> None: ... + def __exit__( + self, exctype: type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None + ) -> None: ... + +class redirect_stdout(_RedirectStream[_T_io]): ... +class redirect_stderr(_RedirectStream[_T_io]): ... + +# In reality this is a subclass of `AbstractContextManager`; +# see #7961 for why we don't do that in the stub +class ExitStack(metaclass=abc.ABCMeta): + def enter_context(self, cm: AbstractContextManager[_T]) -> _T: ... + def push(self, exit: _CM_EF) -> _CM_EF: ... + def callback(self, __callback: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> Callable[_P, _T]: ... + def pop_all(self) -> Self: ... + def close(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, __exc_type: type[BaseException] | None, __exc_value: BaseException | None, __traceback: TracebackType | None + ) -> bool: ... + +_ExitCoroFunc: TypeAlias = Callable[ + [type[BaseException] | None, BaseException | None, TracebackType | None], Awaitable[bool | None] +] +_ACM_EF = TypeVar("_ACM_EF", bound=AbstractAsyncContextManager[Any] | _ExitCoroFunc) + +# In reality this is a subclass of `AbstractAsyncContextManager`; +# see #7961 for why we don't do that in the stub +class AsyncExitStack(metaclass=abc.ABCMeta): + def enter_context(self, cm: AbstractContextManager[_T]) -> _T: ... + async def enter_async_context(self, cm: AbstractAsyncContextManager[_T]) -> _T: ... + def push(self, exit: _CM_EF) -> _CM_EF: ... + def push_async_exit(self, exit: _ACM_EF) -> _ACM_EF: ... + def callback(self, __callback: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> Callable[_P, _T]: ... + def push_async_callback( + self, __callback: Callable[_P, Awaitable[_T]], *args: _P.args, **kwds: _P.kwargs + ) -> Callable[_P, Awaitable[_T]]: ... + def pop_all(self) -> Self: ... + async def aclose(self) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__( + self, __exc_type: type[BaseException] | None, __exc_value: BaseException | None, __traceback: TracebackType | None + ) -> bool: ... + +if sys.version_info >= (3, 10): + class nullcontext(AbstractContextManager[_T], AbstractAsyncContextManager[_T]): + enter_result: _T + @overload + def __init__(self: nullcontext[None], enter_result: None = None) -> None: ... + @overload + def __init__(self: nullcontext[_T], enter_result: _T) -> None: ... + def __enter__(self) -> _T: ... + def __exit__(self, *exctype: Unused) -> None: ... + async def __aenter__(self) -> _T: ... + async def __aexit__(self, *exctype: Unused) -> None: ... + +else: + class nullcontext(AbstractContextManager[_T]): + enter_result: _T + @overload + def __init__(self: nullcontext[None], enter_result: None = None) -> None: ... + @overload + def __init__(self: nullcontext[_T], enter_result: _T) -> None: ... + def __enter__(self) -> _T: ... + def __exit__(self, *exctype: Unused) -> None: ... + +if sys.version_info >= (3, 11): + _T_fd_or_any_path = TypeVar("_T_fd_or_any_path", bound=FileDescriptorOrPath) + + class chdir(AbstractContextManager[None], Generic[_T_fd_or_any_path]): + path: _T_fd_or_any_path + def __init__(self, path: _T_fd_or_any_path) -> None: ... + def __enter__(self) -> None: ... + def __exit__(self, *excinfo: Unused) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/contextvars.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/contextvars.pyi new file mode 100644 index 00000000..266d96bc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/contextvars.pyi @@ -0,0 +1,57 @@ +import sys +from collections.abc import Callable, Iterator, Mapping +from typing import Any, ClassVar, Generic, TypeVar, overload +from typing_extensions import ParamSpec, final + +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = ("Context", "ContextVar", "Token", "copy_context") + +_T = TypeVar("_T") +_D = TypeVar("_D") +_P = ParamSpec("_P") + +@final +class ContextVar(Generic[_T]): + @overload + def __init__(self, name: str) -> None: ... + @overload + def __init__(self, name: str, *, default: _T) -> None: ... + @property + def name(self) -> str: ... + @overload + def get(self) -> _T: ... + @overload + def get(self, default: _D | _T) -> _D | _T: ... + def set(self, __value: _T) -> Token[_T]: ... + def reset(self, __token: Token[_T]) -> None: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +@final +class Token(Generic[_T]): + @property + def var(self) -> ContextVar[_T]: ... + @property + def old_value(self) -> Any: ... # returns either _T or MISSING, but that's hard to express + MISSING: ClassVar[object] + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +def copy_context() -> Context: ... + +# It doesn't make sense to make this generic, because for most Contexts each ContextVar will have +# a different value. +@final +class Context(Mapping[ContextVar[Any], Any]): + def __init__(self) -> None: ... + @overload + def get(self, __key: ContextVar[_T]) -> _T | None: ... + @overload + def get(self, __key: ContextVar[_T], __default: _D) -> _T | _D: ... + def run(self, callable: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> _T: ... + def copy(self) -> Context: ... + def __getitem__(self, __key: ContextVar[_T]) -> _T: ... + def __iter__(self) -> Iterator[ContextVar[Any]]: ... + def __len__(self) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/copy.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/copy.pyi new file mode 100644 index 00000000..f68965d3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/copy.pyi @@ -0,0 +1,16 @@ +from typing import Any, TypeVar + +__all__ = ["Error", "copy", "deepcopy"] + +_T = TypeVar("_T") + +# None in CPython but non-None in Jython +PyStringMap: Any + +# Note: memo and _nil are internal kwargs. +def deepcopy(x: _T, memo: dict[int, Any] | None = None, _nil: Any = ...) -> _T: ... +def copy(x: _T) -> _T: ... + +class Error(Exception): ... + +error = Error diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/copyreg.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/copyreg.pyi new file mode 100644 index 00000000..8f7fd957 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/copyreg.pyi @@ -0,0 +1,21 @@ +from collections.abc import Callable, Hashable +from typing import Any, SupportsInt, TypeVar +from typing_extensions import TypeAlias + +_T = TypeVar("_T") +_Reduce: TypeAlias = tuple[Callable[..., _T], tuple[Any, ...]] | tuple[Callable[..., _T], tuple[Any, ...], Any | None] + +__all__ = ["pickle", "constructor", "add_extension", "remove_extension", "clear_extension_cache"] + +def pickle( + ob_type: type[_T], + pickle_function: Callable[[_T], str | _Reduce[_T]], + constructor_ob: Callable[[_Reduce[_T]], _T] | None = None, +) -> None: ... +def constructor(object: Callable[[_Reduce[_T]], _T]) -> None: ... +def add_extension(module: Hashable, name: Hashable, code: SupportsInt) -> None: ... +def remove_extension(module: Hashable, name: Hashable, code: int) -> None: ... +def clear_extension_cache() -> None: ... + +_DispatchTableType: TypeAlias = dict[type, Callable[[Any], str | _Reduce[Any]]] # imported by multiprocessing.reduction +dispatch_table: _DispatchTableType # undocumented diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/crypt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/crypt.pyi new file mode 100644 index 00000000..1ad0a384 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/crypt.pyi @@ -0,0 +1,12 @@ +import sys + +if sys.platform != "win32": + class _Method: ... + METHOD_CRYPT: _Method + METHOD_MD5: _Method + METHOD_SHA256: _Method + METHOD_SHA512: _Method + METHOD_BLOWFISH: _Method + methods: list[_Method] + def mksalt(method: _Method | None = None, *, rounds: int | None = None) -> str: ... + def crypt(word: str, salt: str | _Method | None = None) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/csv.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/csv.pyi new file mode 100644 index 00000000..59f2e7a3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/csv.pyi @@ -0,0 +1,150 @@ +import sys + +# actually csv.Dialect is a different class to _csv.Dialect at runtime, but for typing purposes, they're identical +from _csv import ( + QUOTE_ALL as QUOTE_ALL, + QUOTE_MINIMAL as QUOTE_MINIMAL, + QUOTE_NONE as QUOTE_NONE, + QUOTE_NONNUMERIC as QUOTE_NONNUMERIC, + Dialect as Dialect, + Error as Error, + __version__ as __version__, + _DialectLike, + _QuotingType, + _reader, + _writer, + field_size_limit as field_size_limit, + get_dialect as get_dialect, + list_dialects as list_dialects, + reader as reader, + register_dialect as register_dialect, + unregister_dialect as unregister_dialect, + writer as writer, +) +from _typeshed import SupportsWrite +from collections.abc import Collection, Iterable, Iterator, Mapping, Sequence +from typing import Any, Generic, TypeVar, overload +from typing_extensions import Literal, Self + +if sys.version_info >= (3, 8): + from builtins import dict as _DictReadMapping +else: + from collections import OrderedDict as _DictReadMapping + +if sys.version_info >= (3, 12): + from types import GenericAlias + +__all__ = [ + "QUOTE_MINIMAL", + "QUOTE_ALL", + "QUOTE_NONNUMERIC", + "QUOTE_NONE", + "Error", + "Dialect", + "__doc__", + "excel", + "excel_tab", + "field_size_limit", + "reader", + "writer", + "register_dialect", + "get_dialect", + "list_dialects", + "Sniffer", + "unregister_dialect", + "__version__", + "DictReader", + "DictWriter", + "unix_dialect", +] + +_T = TypeVar("_T") + +class excel(Dialect): ... +class excel_tab(excel): ... +class unix_dialect(Dialect): ... + +class DictReader(Generic[_T], Iterator[_DictReadMapping[_T | Any, str | Any]]): + fieldnames: Sequence[_T] | None + restkey: str | None + restval: str | None + reader: _reader + dialect: _DialectLike + line_num: int + @overload + def __init__( + self, + f: Iterable[str], + fieldnames: Sequence[_T], + restkey: str | None = None, + restval: str | None = None, + dialect: _DialectLike = "excel", + *, + delimiter: str = ",", + quotechar: str | None = '"', + escapechar: str | None = None, + doublequote: bool = True, + skipinitialspace: bool = False, + lineterminator: str = "\r\n", + quoting: _QuotingType = 0, + strict: bool = False, + ) -> None: ... + @overload + def __init__( + self: DictReader[str], + f: Iterable[str], + fieldnames: Sequence[str] | None = None, + restkey: str | None = None, + restval: str | None = None, + dialect: _DialectLike = "excel", + *, + delimiter: str = ",", + quotechar: str | None = '"', + escapechar: str | None = None, + doublequote: bool = True, + skipinitialspace: bool = False, + lineterminator: str = "\r\n", + quoting: _QuotingType = 0, + strict: bool = False, + ) -> None: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _DictReadMapping[_T | Any, str | Any]: ... + if sys.version_info >= (3, 12): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +class DictWriter(Generic[_T]): + fieldnames: Collection[_T] + restval: Any | None + extrasaction: Literal["raise", "ignore"] + writer: _writer + def __init__( + self, + f: SupportsWrite[str], + fieldnames: Collection[_T], + restval: Any | None = "", + extrasaction: Literal["raise", "ignore"] = "raise", + dialect: _DialectLike = "excel", + *, + delimiter: str = ",", + quotechar: str | None = '"', + escapechar: str | None = None, + doublequote: bool = True, + skipinitialspace: bool = False, + lineterminator: str = "\r\n", + quoting: _QuotingType = 0, + strict: bool = False, + ) -> None: ... + if sys.version_info >= (3, 8): + def writeheader(self) -> Any: ... + else: + def writeheader(self) -> None: ... + + def writerow(self, rowdict: Mapping[_T, Any]) -> Any: ... + def writerows(self, rowdicts: Iterable[Mapping[_T, Any]]) -> None: ... + if sys.version_info >= (3, 12): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +class Sniffer: + preferred: list[str] + def sniff(self, sample: str, delimiters: str | None = None) -> type[Dialect]: ... + def has_header(self, sample: str) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ctypes/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ctypes/__init__.pyi new file mode 100644 index 00000000..aaaacf28 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ctypes/__init__.pyi @@ -0,0 +1,306 @@ +import sys +from _ctypes import RTLD_GLOBAL as RTLD_GLOBAL, RTLD_LOCAL as RTLD_LOCAL +from _typeshed import ReadableBuffer, WriteableBuffer +from abc import abstractmethod +from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence +from typing import Any, ClassVar, Generic, TypeVar, overload +from typing_extensions import Self, TypeAlias + +if sys.version_info >= (3, 9): + from types import GenericAlias + +_T = TypeVar("_T") +_DLLT = TypeVar("_DLLT", bound=CDLL) +_CT = TypeVar("_CT", bound=_CData) + +DEFAULT_MODE: int + +class CDLL: + _func_flags_: ClassVar[int] + _func_restype_: ClassVar[_CData] + _name: str + _handle: int + _FuncPtr: type[_FuncPointer] + if sys.version_info >= (3, 8): + def __init__( + self, + name: str | None, + mode: int = ..., + handle: int | None = None, + use_errno: bool = False, + use_last_error: bool = False, + winmode: int | None = None, + ) -> None: ... + else: + def __init__( + self, + name: str | None, + mode: int = ..., + handle: int | None = None, + use_errno: bool = False, + use_last_error: bool = False, + ) -> None: ... + + def __getattr__(self, name: str) -> _NamedFuncPointer: ... + def __getitem__(self, name_or_ordinal: str) -> _NamedFuncPointer: ... + +if sys.platform == "win32": + class OleDLL(CDLL): ... + class WinDLL(CDLL): ... + +class PyDLL(CDLL): ... + +class LibraryLoader(Generic[_DLLT]): + def __init__(self, dlltype: type[_DLLT]) -> None: ... + def __getattr__(self, name: str) -> _DLLT: ... + def __getitem__(self, name: str) -> _DLLT: ... + def LoadLibrary(self, name: str) -> _DLLT: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +cdll: LibraryLoader[CDLL] +if sys.platform == "win32": + windll: LibraryLoader[WinDLL] + oledll: LibraryLoader[OleDLL] +pydll: LibraryLoader[PyDLL] +pythonapi: PyDLL + +class _CDataMeta(type): + # By default mypy complains about the following two methods, because strictly speaking cls + # might not be a Type[_CT]. However this can never actually happen, because the only class that + # uses _CDataMeta as its metaclass is _CData. So it's safe to ignore the errors here. + def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + +class _CData(metaclass=_CDataMeta): + _b_base_: int + _b_needsfree_: bool + _objects: Mapping[Any, int] | None + @classmethod + def from_buffer(cls, source: WriteableBuffer, offset: int = ...) -> Self: ... + @classmethod + def from_buffer_copy(cls, source: ReadableBuffer, offset: int = ...) -> Self: ... + @classmethod + def from_address(cls, address: int) -> Self: ... + @classmethod + def from_param(cls, obj: Any) -> Self | _CArgObject: ... + @classmethod + def in_dll(cls, library: CDLL, name: str) -> Self: ... + +class _CanCastTo(_CData): ... +class _PointerLike(_CanCastTo): ... + +_ECT: TypeAlias = Callable[[type[_CData] | None, _FuncPointer, tuple[_CData, ...]], _CData] +_PF: TypeAlias = tuple[int] | tuple[int, str] | tuple[int, str, Any] + +class _FuncPointer(_PointerLike, _CData): + restype: type[_CData] | Callable[[int], Any] | None + argtypes: Sequence[type[_CData]] + errcheck: _ECT + @overload + def __init__(self, address: int) -> None: ... + @overload + def __init__(self, callable: Callable[..., Any]) -> None: ... + @overload + def __init__(self, func_spec: tuple[str | int, CDLL], paramflags: tuple[_PF, ...] = ...) -> None: ... + @overload + def __init__(self, vtlb_index: int, name: str, paramflags: tuple[_PF, ...] = ..., iid: _Pointer[c_int] = ...) -> None: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + +class _NamedFuncPointer(_FuncPointer): + __name__: str + +class ArgumentError(Exception): ... + +def CFUNCTYPE( + restype: type[_CData] | None, *argtypes: type[_CData], use_errno: bool = ..., use_last_error: bool = ... +) -> type[_FuncPointer]: ... + +if sys.platform == "win32": + def WINFUNCTYPE( + restype: type[_CData] | None, *argtypes: type[_CData], use_errno: bool = ..., use_last_error: bool = ... + ) -> type[_FuncPointer]: ... + +def PYFUNCTYPE(restype: type[_CData] | None, *argtypes: type[_CData]) -> type[_FuncPointer]: ... + +class _CArgObject: ... + +# Any type that can be implicitly converted to c_void_p when passed as a C function argument. +# (bytes is not included here, see below.) +_CVoidPLike: TypeAlias = _PointerLike | Array[Any] | _CArgObject | int +# Same as above, but including types known to be read-only (i. e. bytes). +# This distinction is not strictly necessary (ctypes doesn't differentiate between const +# and non-const pointers), but it catches errors like memmove(b'foo', buf, 4) +# when memmove(buf, b'foo', 4) was intended. +_CVoidConstPLike: TypeAlias = _CVoidPLike | bytes + +def addressof(obj: _CData) -> int: ... +def alignment(obj_or_type: _CData | type[_CData]) -> int: ... +def byref(obj: _CData, offset: int = ...) -> _CArgObject: ... + +_CastT = TypeVar("_CastT", bound=_CanCastTo) + +def cast(obj: _CData | _CArgObject | int, typ: type[_CastT]) -> _CastT: ... +def create_string_buffer(init: int | bytes, size: int | None = None) -> Array[c_char]: ... + +c_buffer = create_string_buffer + +def create_unicode_buffer(init: int | str, size: int | None = None) -> Array[c_wchar]: ... + +if sys.platform == "win32": + def DllCanUnloadNow() -> int: ... + def DllGetClassObject(rclsid: Any, riid: Any, ppv: Any) -> int: ... # TODO not documented + def FormatError(code: int = ...) -> str: ... + def GetLastError() -> int: ... + +def get_errno() -> int: ... + +if sys.platform == "win32": + def get_last_error() -> int: ... + +def memmove(dst: _CVoidPLike, src: _CVoidConstPLike, count: int) -> int: ... +def memset(dst: _CVoidPLike, c: int, count: int) -> int: ... +def POINTER(type: type[_CT]) -> type[_Pointer[_CT]]: ... + +class _Pointer(Generic[_CT], _PointerLike, _CData): + _type_: type[_CT] + contents: _CT + @overload + def __init__(self) -> None: ... + @overload + def __init__(self, arg: _CT) -> None: ... + @overload + def __getitem__(self, __i: int) -> Any: ... + @overload + def __getitem__(self, __s: slice) -> list[Any]: ... + def __setitem__(self, __i: int, __o: Any) -> None: ... + +def pointer(__arg: _CT) -> _Pointer[_CT]: ... +def resize(obj: _CData, size: int) -> None: ... +def set_errno(value: int) -> int: ... + +if sys.platform == "win32": + def set_last_error(value: int) -> int: ... + +def sizeof(obj_or_type: _CData | type[_CData]) -> int: ... +def string_at(address: _CVoidConstPLike, size: int = -1) -> bytes: ... + +if sys.platform == "win32": + def WinError(code: int | None = None, descr: str | None = None) -> OSError: ... + +def wstring_at(address: _CVoidConstPLike, size: int = -1) -> str: ... + +class _SimpleCData(Generic[_T], _CData): + value: _T + # The TypeVar can be unsolved here, + # but we can't use overloads without creating many, many mypy false-positive errors + def __init__(self, value: _T = ...) -> None: ... # pyright: ignore[reportInvalidTypeVarUse] + +class c_byte(_SimpleCData[int]): ... + +class c_char(_SimpleCData[bytes]): + def __init__(self, value: int | bytes | bytearray = ...) -> None: ... + +class c_char_p(_PointerLike, _SimpleCData[bytes | None]): + def __init__(self, value: int | bytes | None = ...) -> None: ... + +class c_double(_SimpleCData[float]): ... +class c_longdouble(_SimpleCData[float]): ... +class c_float(_SimpleCData[float]): ... +class c_int(_SimpleCData[int]): ... +class c_int8(_SimpleCData[int]): ... +class c_int16(_SimpleCData[int]): ... +class c_int32(_SimpleCData[int]): ... +class c_int64(_SimpleCData[int]): ... +class c_long(_SimpleCData[int]): ... +class c_longlong(_SimpleCData[int]): ... +class c_short(_SimpleCData[int]): ... +class c_size_t(_SimpleCData[int]): ... +class c_ssize_t(_SimpleCData[int]): ... +class c_ubyte(_SimpleCData[int]): ... +class c_uint(_SimpleCData[int]): ... +class c_uint8(_SimpleCData[int]): ... +class c_uint16(_SimpleCData[int]): ... +class c_uint32(_SimpleCData[int]): ... +class c_uint64(_SimpleCData[int]): ... +class c_ulong(_SimpleCData[int]): ... +class c_ulonglong(_SimpleCData[int]): ... +class c_ushort(_SimpleCData[int]): ... +class c_void_p(_PointerLike, _SimpleCData[int | None]): ... +class c_wchar(_SimpleCData[str]): ... + +class c_wchar_p(_PointerLike, _SimpleCData[str | None]): + def __init__(self, value: int | str | None = ...) -> None: ... + +class c_bool(_SimpleCData[bool]): + def __init__(self, value: bool = ...) -> None: ... + +if sys.platform == "win32": + class HRESULT(_SimpleCData[int]): ... # TODO undocumented + +class py_object(_CanCastTo, _SimpleCData[_T]): ... + +class _CField: + offset: int + size: int + +class _StructUnionMeta(_CDataMeta): + _fields_: Sequence[tuple[str, type[_CData]] | tuple[str, type[_CData], int]] + _pack_: int + _anonymous_: Sequence[str] + def __getattr__(self, name: str) -> _CField: ... + +class _StructUnionBase(_CData, metaclass=_StructUnionMeta): + def __init__(self, *args: Any, **kw: Any) -> None: ... + def __getattr__(self, name: str) -> Any: ... + def __setattr__(self, name: str, value: Any) -> None: ... + +class Union(_StructUnionBase): ... +class Structure(_StructUnionBase): ... +class BigEndianStructure(Structure): ... +class LittleEndianStructure(Structure): ... + +class Array(Generic[_CT], _CData): + @property + @abstractmethod + def _length_(self) -> int: ... + @_length_.setter + def _length_(self, value: int) -> None: ... + @property + @abstractmethod + def _type_(self) -> type[_CT]: ... + @_type_.setter + def _type_(self, value: type[_CT]) -> None: ... + # Note: only available if _CT == c_char + @property + def raw(self) -> bytes: ... + @raw.setter + def raw(self, value: ReadableBuffer) -> None: ... + value: Any # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise + # TODO These methods cannot be annotated correctly at the moment. + # All of these "Any"s stand for the array's element type, but it's not possible to use _CT + # here, because of a special feature of ctypes. + # By default, when accessing an element of an Array[_CT], the returned object has type _CT. + # However, when _CT is a "simple type" like c_int, ctypes automatically "unboxes" the object + # and converts it to the corresponding Python primitive. For example, when accessing an element + # of an Array[c_int], a Python int object is returned, not a c_int. + # This behavior does *not* apply to subclasses of "simple types". + # If MyInt is a subclass of c_int, then accessing an element of an Array[MyInt] returns + # a MyInt, not an int. + # This special behavior is not easy to model in a stub, so for now all places where + # the array element type would belong are annotated with Any instead. + def __init__(self, *args: Any) -> None: ... + @overload + def __getitem__(self, __i: int) -> Any: ... + @overload + def __getitem__(self, __s: slice) -> list[Any]: ... + @overload + def __setitem__(self, __i: int, __o: Any) -> None: ... + @overload + def __setitem__(self, __s: slice, __o: Iterable[Any]) -> None: ... + def __iter__(self) -> Iterator[Any]: ... + # Can't inherit from Sized because the metaclass conflict between + # Sized and _CData prevents using _CDataMeta. + def __len__(self) -> int: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ctypes/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ctypes/util.pyi new file mode 100644 index 00000000..c0274f5e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ctypes/util.pyi @@ -0,0 +1,6 @@ +import sys + +def find_library(name: str) -> str | None: ... + +if sys.platform == "win32": + def find_msvcrt() -> str | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ctypes/wintypes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ctypes/wintypes.pyi new file mode 100644 index 00000000..3bd27934 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ctypes/wintypes.pyi @@ -0,0 +1,235 @@ +from ctypes import ( + Array, + Structure, + _Pointer, + _SimpleCData, + c_byte, + c_char, + c_char_p, + c_double, + c_float, + c_int, + c_long, + c_longlong, + c_short, + c_uint, + c_ulong, + c_ulonglong, + c_ushort, + c_void_p, + c_wchar, + c_wchar_p, +) +from typing_extensions import TypeAlias + +BYTE = c_byte +WORD = c_ushort +DWORD = c_ulong +CHAR = c_char +WCHAR = c_wchar +UINT = c_uint +INT = c_int +DOUBLE = c_double +FLOAT = c_float +BOOLEAN = BYTE +BOOL = c_long + +class VARIANT_BOOL(_SimpleCData[bool]): ... + +ULONG = c_ulong +LONG = c_long +USHORT = c_ushort +SHORT = c_short +LARGE_INTEGER = c_longlong +_LARGE_INTEGER = c_longlong +ULARGE_INTEGER = c_ulonglong +_ULARGE_INTEGER = c_ulonglong + +OLESTR = c_wchar_p +LPOLESTR = c_wchar_p +LPCOLESTR = c_wchar_p +LPWSTR = c_wchar_p +LPCWSTR = c_wchar_p +LPSTR = c_char_p +LPCSTR = c_char_p +LPVOID = c_void_p +LPCVOID = c_void_p + +# These two types are pointer-sized unsigned and signed ints, respectively. +# At runtime, they are either c_[u]long or c_[u]longlong, depending on the host's pointer size +# (they are not really separate classes). +class WPARAM(_SimpleCData[int]): ... +class LPARAM(_SimpleCData[int]): ... + +ATOM = WORD +LANGID = WORD +COLORREF = DWORD +LGRPID = DWORD +LCTYPE = DWORD +LCID = DWORD + +HANDLE = c_void_p +HACCEL = HANDLE +HBITMAP = HANDLE +HBRUSH = HANDLE +HCOLORSPACE = HANDLE +HDC = HANDLE +HDESK = HANDLE +HDWP = HANDLE +HENHMETAFILE = HANDLE +HFONT = HANDLE +HGDIOBJ = HANDLE +HGLOBAL = HANDLE +HHOOK = HANDLE +HICON = HANDLE +HINSTANCE = HANDLE +HKEY = HANDLE +HKL = HANDLE +HLOCAL = HANDLE +HMENU = HANDLE +HMETAFILE = HANDLE +HMODULE = HANDLE +HMONITOR = HANDLE +HPALETTE = HANDLE +HPEN = HANDLE +HRGN = HANDLE +HRSRC = HANDLE +HSTR = HANDLE +HTASK = HANDLE +HWINSTA = HANDLE +HWND = HANDLE +SC_HANDLE = HANDLE +SERVICE_STATUS_HANDLE = HANDLE + +class RECT(Structure): + left: LONG + top: LONG + right: LONG + bottom: LONG + +RECTL = RECT +_RECTL = RECT +tagRECT = RECT + +class _SMALL_RECT(Structure): + Left: SHORT + Top: SHORT + Right: SHORT + Bottom: SHORT + +SMALL_RECT = _SMALL_RECT + +class _COORD(Structure): + X: SHORT + Y: SHORT + +class POINT(Structure): + x: LONG + y: LONG + +POINTL = POINT +_POINTL = POINT +tagPOINT = POINT + +class SIZE(Structure): + cx: LONG + cy: LONG + +SIZEL = SIZE +tagSIZE = SIZE + +def RGB(red: int, green: int, blue: int) -> int: ... + +class FILETIME(Structure): + dwLowDateTime: DWORD + dwHighDateTime: DWORD + +_FILETIME = FILETIME + +class MSG(Structure): + hWnd: HWND + message: UINT + wParam: WPARAM + lParam: LPARAM + time: DWORD + pt: POINT + +tagMSG = MSG +MAX_PATH: int + +class WIN32_FIND_DATAA(Structure): + dwFileAttributes: DWORD + ftCreationTime: FILETIME + ftLastAccessTime: FILETIME + ftLastWriteTime: FILETIME + nFileSizeHigh: DWORD + nFileSizeLow: DWORD + dwReserved0: DWORD + dwReserved1: DWORD + cFileName: Array[CHAR] + cAlternateFileName: Array[CHAR] + +class WIN32_FIND_DATAW(Structure): + dwFileAttributes: DWORD + ftCreationTime: FILETIME + ftLastAccessTime: FILETIME + ftLastWriteTime: FILETIME + nFileSizeHigh: DWORD + nFileSizeLow: DWORD + dwReserved0: DWORD + dwReserved1: DWORD + cFileName: Array[WCHAR] + cAlternateFileName: Array[WCHAR] + +# These pointer type definitions use _Pointer[...] instead of POINTER(...), to allow them +# to be used in type annotations. +PBOOL: TypeAlias = _Pointer[BOOL] +LPBOOL: TypeAlias = _Pointer[BOOL] +PBOOLEAN: TypeAlias = _Pointer[BOOLEAN] +PBYTE: TypeAlias = _Pointer[BYTE] +LPBYTE: TypeAlias = _Pointer[BYTE] +PCHAR: TypeAlias = _Pointer[CHAR] +LPCOLORREF: TypeAlias = _Pointer[COLORREF] +PDWORD: TypeAlias = _Pointer[DWORD] +LPDWORD: TypeAlias = _Pointer[DWORD] +PFILETIME: TypeAlias = _Pointer[FILETIME] +LPFILETIME: TypeAlias = _Pointer[FILETIME] +PFLOAT: TypeAlias = _Pointer[FLOAT] +PHANDLE: TypeAlias = _Pointer[HANDLE] +LPHANDLE: TypeAlias = _Pointer[HANDLE] +PHKEY: TypeAlias = _Pointer[HKEY] +LPHKL: TypeAlias = _Pointer[HKL] +PINT: TypeAlias = _Pointer[INT] +LPINT: TypeAlias = _Pointer[INT] +PLARGE_INTEGER: TypeAlias = _Pointer[LARGE_INTEGER] +PLCID: TypeAlias = _Pointer[LCID] +PLONG: TypeAlias = _Pointer[LONG] +LPLONG: TypeAlias = _Pointer[LONG] +PMSG: TypeAlias = _Pointer[MSG] +LPMSG: TypeAlias = _Pointer[MSG] +PPOINT: TypeAlias = _Pointer[POINT] +LPPOINT: TypeAlias = _Pointer[POINT] +PPOINTL: TypeAlias = _Pointer[POINTL] +PRECT: TypeAlias = _Pointer[RECT] +LPRECT: TypeAlias = _Pointer[RECT] +PRECTL: TypeAlias = _Pointer[RECTL] +LPRECTL: TypeAlias = _Pointer[RECTL] +LPSC_HANDLE: TypeAlias = _Pointer[SC_HANDLE] +PSHORT: TypeAlias = _Pointer[SHORT] +PSIZE: TypeAlias = _Pointer[SIZE] +LPSIZE: TypeAlias = _Pointer[SIZE] +PSIZEL: TypeAlias = _Pointer[SIZEL] +LPSIZEL: TypeAlias = _Pointer[SIZEL] +PSMALL_RECT: TypeAlias = _Pointer[SMALL_RECT] +PUINT: TypeAlias = _Pointer[UINT] +LPUINT: TypeAlias = _Pointer[UINT] +PULARGE_INTEGER: TypeAlias = _Pointer[ULARGE_INTEGER] +PULONG: TypeAlias = _Pointer[ULONG] +PUSHORT: TypeAlias = _Pointer[USHORT] +PWCHAR: TypeAlias = _Pointer[WCHAR] +PWIN32_FIND_DATAA: TypeAlias = _Pointer[WIN32_FIND_DATAA] +LPWIN32_FIND_DATAA: TypeAlias = _Pointer[WIN32_FIND_DATAA] +PWIN32_FIND_DATAW: TypeAlias = _Pointer[WIN32_FIND_DATAW] +LPWIN32_FIND_DATAW: TypeAlias = _Pointer[WIN32_FIND_DATAW] +PWORD: TypeAlias = _Pointer[WORD] +LPWORD: TypeAlias = _Pointer[WORD] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/curses/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/curses/__init__.pyi new file mode 100644 index 00000000..db44fa6a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/curses/__init__.pyi @@ -0,0 +1,21 @@ +import sys +from collections.abc import Callable +from typing import TypeVar +from typing_extensions import Concatenate, ParamSpec + +if sys.platform != "win32": + from _curses import * + from _curses import _CursesWindow as _CursesWindow + + _T = TypeVar("_T") + _P = ParamSpec("_P") + + # available after calling `curses.initscr()` + LINES: int + COLS: int + + # available after calling `curses.start_color()` + COLORS: int + COLOR_PAIRS: int + + def wrapper(__func: Callable[Concatenate[_CursesWindow, _P], _T], *arg: _P.args, **kwds: _P.kwargs) -> _T: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/curses/ascii.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/curses/ascii.pyi new file mode 100644 index 00000000..25de8f60 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/curses/ascii.pyi @@ -0,0 +1,63 @@ +import sys +from typing import TypeVar + +if sys.platform != "win32": + _CharT = TypeVar("_CharT", str, int) + + NUL: int + SOH: int + STX: int + ETX: int + EOT: int + ENQ: int + ACK: int + BEL: int + BS: int + TAB: int + HT: int + LF: int + NL: int + VT: int + FF: int + CR: int + SO: int + SI: int + DLE: int + DC1: int + DC2: int + DC3: int + DC4: int + NAK: int + SYN: int + ETB: int + CAN: int + EM: int + SUB: int + ESC: int + FS: int + GS: int + RS: int + US: int + SP: int + DEL: int + + controlnames: list[int] + def isalnum(c: str | int) -> bool: ... + def isalpha(c: str | int) -> bool: ... + def isascii(c: str | int) -> bool: ... + def isblank(c: str | int) -> bool: ... + def iscntrl(c: str | int) -> bool: ... + def isdigit(c: str | int) -> bool: ... + def isgraph(c: str | int) -> bool: ... + def islower(c: str | int) -> bool: ... + def isprint(c: str | int) -> bool: ... + def ispunct(c: str | int) -> bool: ... + def isspace(c: str | int) -> bool: ... + def isupper(c: str | int) -> bool: ... + def isxdigit(c: str | int) -> bool: ... + def isctrl(c: str | int) -> bool: ... + def ismeta(c: str | int) -> bool: ... + def ascii(c: _CharT) -> _CharT: ... + def ctrl(c: _CharT) -> _CharT: ... + def alt(c: _CharT) -> _CharT: ... + def unctrl(c: str | int) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/curses/has_key.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/curses/has_key.pyi new file mode 100644 index 00000000..ff728aed --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/curses/has_key.pyi @@ -0,0 +1,4 @@ +import sys + +if sys.platform != "win32": + def has_key(ch: int | str) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/curses/panel.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/curses/panel.pyi new file mode 100644 index 00000000..30803791 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/curses/panel.pyi @@ -0,0 +1,25 @@ +import sys + +if sys.platform != "win32": + from _curses import _CursesWindow + + version: str + + class _Curses_Panel: # type is (note the space in the class name) + def above(self) -> _Curses_Panel: ... + def below(self) -> _Curses_Panel: ... + def bottom(self) -> None: ... + def hidden(self) -> bool: ... + def hide(self) -> None: ... + def move(self, y: int, x: int) -> None: ... + def replace(self, win: _CursesWindow) -> None: ... + def set_userptr(self, obj: object) -> None: ... + def show(self) -> None: ... + def top(self) -> None: ... + def userptr(self) -> object: ... + def window(self) -> _CursesWindow: ... + + def bottom_panel() -> _Curses_Panel: ... + def new_panel(__win: _CursesWindow) -> _Curses_Panel: ... + def top_panel() -> _Curses_Panel: ... + def update_panels() -> _Curses_Panel: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/curses/textpad.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/curses/textpad.pyi new file mode 100644 index 00000000..4d28b4df --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/curses/textpad.pyi @@ -0,0 +1,13 @@ +import sys +from collections.abc import Callable + +if sys.platform != "win32": + from _curses import _CursesWindow + def rectangle(win: _CursesWindow, uly: int, ulx: int, lry: int, lrx: int) -> None: ... + + class Textbox: + stripspaces: bool + def __init__(self, win: _CursesWindow, insert_mode: bool = False) -> None: ... + def edit(self, validate: Callable[[int], int] | None = None) -> str: ... + def do_command(self, ch: str | int) -> None: ... + def gather(self) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/dataclasses.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/dataclasses.pyi new file mode 100644 index 00000000..d254a594 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/dataclasses.pyi @@ -0,0 +1,302 @@ +import enum +import sys +import types +from _typeshed import DataclassInstance +from builtins import type as Type # alias to avoid name clashes with fields named "type" +from collections.abc import Callable, Iterable, Mapping +from typing import Any, Generic, Protocol, TypeVar, overload +from typing_extensions import Literal, TypeAlias, TypeGuard + +if sys.version_info >= (3, 9): + from types import GenericAlias + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) + +__all__ = [ + "dataclass", + "field", + "Field", + "FrozenInstanceError", + "InitVar", + "MISSING", + "fields", + "asdict", + "astuple", + "make_dataclass", + "replace", + "is_dataclass", +] + +if sys.version_info >= (3, 10): + __all__ += ["KW_ONLY"] + +_DataclassT = TypeVar("_DataclassT", bound=DataclassInstance) + +# define _MISSING_TYPE as an enum within the type stubs, +# even though that is not really its type at runtime +# this allows us to use Literal[_MISSING_TYPE.MISSING] +# for background, see: +# https://github.com/python/typeshed/pull/5900#issuecomment-895513797 +class _MISSING_TYPE(enum.Enum): + MISSING = enum.auto() + +MISSING = _MISSING_TYPE.MISSING + +if sys.version_info >= (3, 10): + class KW_ONLY: ... + +@overload +def asdict(obj: DataclassInstance) -> dict[str, Any]: ... +@overload +def asdict(obj: DataclassInstance, *, dict_factory: Callable[[list[tuple[str, Any]]], _T]) -> _T: ... +@overload +def astuple(obj: DataclassInstance) -> tuple[Any, ...]: ... +@overload +def astuple(obj: DataclassInstance, *, tuple_factory: Callable[[list[Any]], _T]) -> _T: ... + +if sys.version_info >= (3, 8): + # cls argument is now positional-only + @overload + def dataclass(__cls: None) -> Callable[[type[_T]], type[_T]]: ... + @overload + def dataclass(__cls: type[_T]) -> type[_T]: ... + +else: + @overload + def dataclass(_cls: None) -> Callable[[type[_T]], type[_T]]: ... + @overload + def dataclass(_cls: type[_T]) -> type[_T]: ... + +if sys.version_info >= (3, 11): + @overload + def dataclass( + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + weakref_slot: bool = False, + ) -> Callable[[type[_T]], type[_T]]: ... + +elif sys.version_info >= (3, 10): + @overload + def dataclass( + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + ) -> Callable[[type[_T]], type[_T]]: ... + +else: + @overload + def dataclass( + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + ) -> Callable[[type[_T]], type[_T]]: ... + +# See https://github.com/python/mypy/issues/10750 +class _DefaultFactory(Protocol[_T_co]): + def __call__(self) -> _T_co: ... + +class Field(Generic[_T]): + name: str + type: Type[_T] + default: _T | Literal[_MISSING_TYPE.MISSING] + default_factory: _DefaultFactory[_T] | Literal[_MISSING_TYPE.MISSING] + repr: bool + hash: bool | None + init: bool + compare: bool + metadata: types.MappingProxyType[Any, Any] + if sys.version_info >= (3, 10): + kw_only: bool | Literal[_MISSING_TYPE.MISSING] + def __init__( + self, + default: _T, + default_factory: Callable[[], _T], + init: bool, + repr: bool, + hash: bool | None, + compare: bool, + metadata: Mapping[Any, Any], + kw_only: bool, + ) -> None: ... + else: + def __init__( + self, + default: _T, + default_factory: Callable[[], _T], + init: bool, + repr: bool, + hash: bool | None, + compare: bool, + metadata: Mapping[Any, Any], + ) -> None: ... + + def __set_name__(self, owner: Type[Any], name: str) -> None: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +# NOTE: Actual return type is 'Field[_T]', but we want to help type checkers +# to understand the magic that happens at runtime. +if sys.version_info >= (3, 10): + @overload # `default` and `default_factory` are optional and mutually exclusive. + def field( + *, + default: _T, + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + kw_only: bool = ..., + ) -> _T: ... + @overload + def field( + *, + default_factory: Callable[[], _T], + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + kw_only: bool = ..., + ) -> _T: ... + @overload + def field( + *, + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + kw_only: bool = ..., + ) -> Any: ... + +else: + @overload # `default` and `default_factory` are optional and mutually exclusive. + def field( + *, + default: _T, + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + ) -> _T: ... + @overload + def field( + *, + default_factory: Callable[[], _T], + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + ) -> _T: ... + @overload + def field( + *, + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + ) -> Any: ... + +def fields(class_or_instance: DataclassInstance | type[DataclassInstance]) -> tuple[Field[Any], ...]: ... +@overload +def is_dataclass(obj: DataclassInstance) -> Literal[True]: ... +@overload +def is_dataclass(obj: type) -> TypeGuard[type[DataclassInstance]]: ... +@overload +def is_dataclass(obj: object) -> TypeGuard[DataclassInstance | type[DataclassInstance]]: ... + +class FrozenInstanceError(AttributeError): ... + +if sys.version_info >= (3, 9): + _InitVarMeta: TypeAlias = type +else: + class _InitVarMeta(type): + # Not used, instead `InitVar.__class_getitem__` is called. + def __getitem__(self, params: Any) -> InitVar[Any]: ... + +class InitVar(Generic[_T], metaclass=_InitVarMeta): + type: Type[_T] + def __init__(self, type: Type[_T]) -> None: ... + if sys.version_info >= (3, 9): + @overload + def __class_getitem__(cls, type: Type[_T]) -> InitVar[_T]: ... + @overload + def __class_getitem__(cls, type: Any) -> InitVar[Any]: ... + +if sys.version_info >= (3, 11): + def make_dataclass( + cls_name: str, + fields: Iterable[str | tuple[str, type] | tuple[str, type, Any]], + *, + bases: tuple[type, ...] = ..., + namespace: dict[str, Any] | None = None, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + weakref_slot: bool = False, + ) -> type: ... + +elif sys.version_info >= (3, 10): + def make_dataclass( + cls_name: str, + fields: Iterable[str | tuple[str, type] | tuple[str, type, Any]], + *, + bases: tuple[type, ...] = ..., + namespace: dict[str, Any] | None = None, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + ) -> type: ... + +else: + def make_dataclass( + cls_name: str, + fields: Iterable[str | tuple[str, type] | tuple[str, type, Any]], + *, + bases: tuple[type, ...] = ..., + namespace: dict[str, Any] | None = None, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + ) -> type: ... + +def replace(__obj: _DataclassT, **changes: Any) -> _DataclassT: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/datetime.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/datetime.pyi new file mode 100644 index 00000000..4da5501c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/datetime.pyi @@ -0,0 +1,321 @@ +import sys +from abc import abstractmethod +from time import struct_time +from typing import ClassVar, NamedTuple, NoReturn, TypeVar, overload +from typing_extensions import Literal, Self, TypeAlias, final + +if sys.version_info >= (3, 11): + __all__ = ("date", "datetime", "time", "timedelta", "timezone", "tzinfo", "MINYEAR", "MAXYEAR", "UTC") +elif sys.version_info >= (3, 9): + __all__ = ("date", "datetime", "time", "timedelta", "timezone", "tzinfo", "MINYEAR", "MAXYEAR") + +_D = TypeVar("_D", bound=date) + +MINYEAR: Literal[1] +MAXYEAR: Literal[9999] + +class tzinfo: + @abstractmethod + def tzname(self, __dt: datetime | None) -> str | None: ... + @abstractmethod + def utcoffset(self, __dt: datetime | None) -> timedelta | None: ... + @abstractmethod + def dst(self, __dt: datetime | None) -> timedelta | None: ... + def fromutc(self, __dt: datetime) -> datetime: ... + +# Alias required to avoid name conflicts with date(time).tzinfo. +_TzInfo: TypeAlias = tzinfo + +@final +class timezone(tzinfo): + utc: ClassVar[timezone] + min: ClassVar[timezone] + max: ClassVar[timezone] + def __init__(self, offset: timedelta, name: str = ...) -> None: ... + def tzname(self, __dt: datetime | None) -> str: ... + def utcoffset(self, __dt: datetime | None) -> timedelta: ... + def dst(self, __dt: datetime | None) -> None: ... + +if sys.version_info >= (3, 11): + UTC: timezone + +if sys.version_info >= (3, 9): + class _IsoCalendarDate(NamedTuple): + year: int + week: int + weekday: int + +class date: + min: ClassVar[date] + max: ClassVar[date] + resolution: ClassVar[timedelta] + def __new__(cls, year: int, month: int, day: int) -> Self: ... + @classmethod + def fromtimestamp(cls, __timestamp: float) -> Self: ... + @classmethod + def today(cls) -> Self: ... + @classmethod + def fromordinal(cls, __n: int) -> Self: ... + @classmethod + def fromisoformat(cls, __date_string: str) -> Self: ... + if sys.version_info >= (3, 8): + @classmethod + def fromisocalendar(cls, year: int, week: int, day: int) -> Self: ... + + @property + def year(self) -> int: ... + @property + def month(self) -> int: ... + @property + def day(self) -> int: ... + def ctime(self) -> str: ... + # On <3.12, the name of the parameter in the pure-Python implementation + # didn't match the name in the C implementation, + # meaning it is only *safe* to pass it as a keyword argument on 3.12+ + if sys.version_info >= (3, 12): + def strftime(self, format: str) -> str: ... + else: + def strftime(self, __format: str) -> str: ... + + def __format__(self, __fmt: str) -> str: ... + def isoformat(self) -> str: ... + def timetuple(self) -> struct_time: ... + def toordinal(self) -> int: ... + def replace(self, year: int = ..., month: int = ..., day: int = ...) -> Self: ... + def __le__(self, __other: date) -> bool: ... + def __lt__(self, __other: date) -> bool: ... + def __ge__(self, __other: date) -> bool: ... + def __gt__(self, __other: date) -> bool: ... + if sys.version_info >= (3, 8): + def __add__(self, __other: timedelta) -> Self: ... + def __radd__(self, __other: timedelta) -> Self: ... + @overload + def __sub__(self, __other: timedelta) -> Self: ... + @overload + def __sub__(self, __other: datetime) -> NoReturn: ... + @overload + def __sub__(self: _D, __other: _D) -> timedelta: ... + else: + # Prior to Python 3.8, arithmetic operations always returned `date`, even in subclasses + def __add__(self, __other: timedelta) -> date: ... + def __radd__(self, __other: timedelta) -> date: ... + @overload + def __sub__(self, __other: timedelta) -> date: ... + @overload + def __sub__(self, __other: datetime) -> NoReturn: ... + @overload + def __sub__(self, __other: date) -> timedelta: ... + + def weekday(self) -> int: ... + def isoweekday(self) -> int: ... + if sys.version_info >= (3, 9): + def isocalendar(self) -> _IsoCalendarDate: ... + else: + def isocalendar(self) -> tuple[int, int, int]: ... + +class time: + min: ClassVar[time] + max: ClassVar[time] + resolution: ClassVar[timedelta] + def __new__( + cls, + hour: int = ..., + minute: int = ..., + second: int = ..., + microsecond: int = ..., + tzinfo: _TzInfo | None = ..., + *, + fold: int = ..., + ) -> Self: ... + @property + def hour(self) -> int: ... + @property + def minute(self) -> int: ... + @property + def second(self) -> int: ... + @property + def microsecond(self) -> int: ... + @property + def tzinfo(self) -> _TzInfo | None: ... + @property + def fold(self) -> int: ... + def __le__(self, __other: time) -> bool: ... + def __lt__(self, __other: time) -> bool: ... + def __ge__(self, __other: time) -> bool: ... + def __gt__(self, __other: time) -> bool: ... + def isoformat(self, timespec: str = ...) -> str: ... + @classmethod + def fromisoformat(cls, __time_string: str) -> Self: ... + # On <3.12, the name of the parameter in the pure-Python implementation + # didn't match the name in the C implementation, + # meaning it is only *safe* to pass it as a keyword argument on 3.12+ + if sys.version_info >= (3, 12): + def strftime(self, format: str) -> str: ... + else: + def strftime(self, __format: str) -> str: ... + + def __format__(self, __fmt: str) -> str: ... + def utcoffset(self) -> timedelta | None: ... + def tzname(self) -> str | None: ... + def dst(self) -> timedelta | None: ... + def replace( + self, + hour: int = ..., + minute: int = ..., + second: int = ..., + microsecond: int = ..., + tzinfo: _TzInfo | None = ..., + *, + fold: int = ..., + ) -> Self: ... + +_Date: TypeAlias = date +_Time: TypeAlias = time + +class timedelta: + min: ClassVar[timedelta] + max: ClassVar[timedelta] + resolution: ClassVar[timedelta] + def __new__( + cls, + days: float = ..., + seconds: float = ..., + microseconds: float = ..., + milliseconds: float = ..., + minutes: float = ..., + hours: float = ..., + weeks: float = ..., + ) -> Self: ... + @property + def days(self) -> int: ... + @property + def seconds(self) -> int: ... + @property + def microseconds(self) -> int: ... + def total_seconds(self) -> float: ... + def __add__(self, __other: timedelta) -> timedelta: ... + def __radd__(self, __other: timedelta) -> timedelta: ... + def __sub__(self, __other: timedelta) -> timedelta: ... + def __rsub__(self, __other: timedelta) -> timedelta: ... + def __neg__(self) -> timedelta: ... + def __pos__(self) -> timedelta: ... + def __abs__(self) -> timedelta: ... + def __mul__(self, __other: float) -> timedelta: ... + def __rmul__(self, __other: float) -> timedelta: ... + @overload + def __floordiv__(self, __other: timedelta) -> int: ... + @overload + def __floordiv__(self, __other: int) -> timedelta: ... + @overload + def __truediv__(self, __other: timedelta) -> float: ... + @overload + def __truediv__(self, __other: float) -> timedelta: ... + def __mod__(self, __other: timedelta) -> timedelta: ... + def __divmod__(self, __other: timedelta) -> tuple[int, timedelta]: ... + def __le__(self, __other: timedelta) -> bool: ... + def __lt__(self, __other: timedelta) -> bool: ... + def __ge__(self, __other: timedelta) -> bool: ... + def __gt__(self, __other: timedelta) -> bool: ... + def __bool__(self) -> bool: ... + +class datetime(date): + min: ClassVar[datetime] + max: ClassVar[datetime] + def __new__( + cls, + year: int, + month: int, + day: int, + hour: int = ..., + minute: int = ..., + second: int = ..., + microsecond: int = ..., + tzinfo: _TzInfo | None = ..., + *, + fold: int = ..., + ) -> Self: ... + @property + def hour(self) -> int: ... + @property + def minute(self) -> int: ... + @property + def second(self) -> int: ... + @property + def microsecond(self) -> int: ... + @property + def tzinfo(self) -> _TzInfo | None: ... + @property + def fold(self) -> int: ... + # On <3.12, the name of the first parameter in the pure-Python implementation + # didn't match the name in the C implementation, + # meaning it is only *safe* to pass it as a keyword argument on 3.12+ + if sys.version_info >= (3, 12): + @classmethod + def fromtimestamp(cls, timestamp: float, tz: _TzInfo | None = ...) -> Self: ... + else: + @classmethod + def fromtimestamp(cls, __timestamp: float, tz: _TzInfo | None = ...) -> Self: ... + + @classmethod + def utcfromtimestamp(cls, __t: float) -> Self: ... + if sys.version_info >= (3, 8): + @classmethod + def now(cls, tz: _TzInfo | None = None) -> Self: ... + else: + @overload + @classmethod + def now(cls, tz: None = None) -> Self: ... + @overload + @classmethod + def now(cls, tz: _TzInfo) -> datetime: ... + + @classmethod + def utcnow(cls) -> Self: ... + @classmethod + def combine(cls, date: _Date, time: _Time, tzinfo: _TzInfo | None = ...) -> Self: ... + def timestamp(self) -> float: ... + def utctimetuple(self) -> struct_time: ... + def date(self) -> _Date: ... + def time(self) -> _Time: ... + def timetz(self) -> _Time: ... + def replace( + self, + year: int = ..., + month: int = ..., + day: int = ..., + hour: int = ..., + minute: int = ..., + second: int = ..., + microsecond: int = ..., + tzinfo: _TzInfo | None = ..., + *, + fold: int = ..., + ) -> Self: ... + if sys.version_info >= (3, 8): + def astimezone(self, tz: _TzInfo | None = ...) -> Self: ... + else: + def astimezone(self, tz: _TzInfo | None = ...) -> datetime: ... + + def isoformat(self, sep: str = ..., timespec: str = ...) -> str: ... + @classmethod + def strptime(cls, __date_string: str, __format: str) -> Self: ... + def utcoffset(self) -> timedelta | None: ... + def tzname(self) -> str | None: ... + def dst(self) -> timedelta | None: ... + def __le__(self, __other: datetime) -> bool: ... # type: ignore[override] + def __lt__(self, __other: datetime) -> bool: ... # type: ignore[override] + def __ge__(self, __other: datetime) -> bool: ... # type: ignore[override] + def __gt__(self, __other: datetime) -> bool: ... # type: ignore[override] + if sys.version_info >= (3, 8): + @overload # type: ignore[override] + def __sub__(self, __other: timedelta) -> Self: ... + @overload + def __sub__(self: _D, __other: _D) -> timedelta: ... + else: + # Prior to Python 3.8, arithmetic operations always returned `datetime`, even in subclasses + def __add__(self, __other: timedelta) -> datetime: ... + def __radd__(self, __other: timedelta) -> datetime: ... + @overload # type: ignore[override] + def __sub__(self, __other: datetime) -> timedelta: ... + @overload + def __sub__(self, __other: timedelta) -> datetime: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/dbm/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/dbm/__init__.pyi new file mode 100644 index 00000000..0068d67b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/dbm/__init__.pyi @@ -0,0 +1,94 @@ +from collections.abc import Iterator, MutableMapping +from types import TracebackType +from typing_extensions import Literal, Self, TypeAlias + +__all__ = ["open", "whichdb", "error"] + +_KeyType: TypeAlias = str | bytes +_ValueType: TypeAlias = str | bytes | bytearray +_TFlags: TypeAlias = Literal[ + "r", + "w", + "c", + "n", + "rf", + "wf", + "cf", + "nf", + "rs", + "ws", + "cs", + "ns", + "ru", + "wu", + "cu", + "nu", + "rfs", + "wfs", + "cfs", + "nfs", + "rfu", + "wfu", + "cfu", + "nfu", + "rsf", + "wsf", + "csf", + "nsf", + "rsu", + "wsu", + "csu", + "nsu", + "ruf", + "wuf", + "cuf", + "nuf", + "rus", + "wus", + "cus", + "nus", + "rfsu", + "wfsu", + "cfsu", + "nfsu", + "rfus", + "wfus", + "cfus", + "nfus", + "rsfu", + "wsfu", + "csfu", + "nsfu", + "rsuf", + "wsuf", + "csuf", + "nsuf", + "rufs", + "wufs", + "cufs", + "nufs", + "rusf", + "wusf", + "cusf", + "nusf", +] + +class _Database(MutableMapping[_KeyType, bytes]): + def close(self) -> None: ... + def __getitem__(self, key: _KeyType) -> bytes: ... + def __setitem__(self, key: _KeyType, value: _ValueType) -> None: ... + def __delitem__(self, key: _KeyType) -> None: ... + def __iter__(self) -> Iterator[bytes]: ... + def __len__(self) -> int: ... + def __del__(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + +class _error(Exception): ... + +error: tuple[type[_error], type[OSError]] + +def whichdb(filename: str) -> str: ... +def open(file: str, flag: _TFlags = "r", mode: int = 0o666) -> _Database: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/dbm/dumb.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/dbm/dumb.pyi new file mode 100644 index 00000000..1fc68cf7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/dbm/dumb.pyi @@ -0,0 +1,31 @@ +from collections.abc import Iterator, MutableMapping +from types import TracebackType +from typing_extensions import Self, TypeAlias + +__all__ = ["error", "open"] + +_KeyType: TypeAlias = str | bytes +_ValueType: TypeAlias = str | bytes + +error = OSError + +# This class doesn't exist at runtime. open() can return an instance of +# any of the three implementations of dbm (dumb, gnu, ndbm), and this +# class is intended to represent the common interface supported by all three. +class _Database(MutableMapping[_KeyType, bytes]): + def __init__(self, filebasename: str, mode: str, flag: str = "c") -> None: ... + def sync(self) -> None: ... + def iterkeys(self) -> Iterator[bytes]: ... # undocumented + def close(self) -> None: ... + def __getitem__(self, key: _KeyType) -> bytes: ... + def __setitem__(self, key: _KeyType, val: _ValueType) -> None: ... + def __delitem__(self, key: _KeyType) -> None: ... + def __iter__(self) -> Iterator[bytes]: ... + def __len__(self) -> int: ... + def __del__(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + +def open(file: str, flag: str = "c", mode: int = 0o666) -> _Database: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/dbm/gnu.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/dbm/gnu.pyi new file mode 100644 index 00000000..3dc66a30 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/dbm/gnu.pyi @@ -0,0 +1,40 @@ +import sys +from _typeshed import ReadOnlyBuffer +from types import TracebackType +from typing import TypeVar, overload +from typing_extensions import Self, TypeAlias + +if sys.platform != "win32": + _T = TypeVar("_T") + _KeyType: TypeAlias = str | ReadOnlyBuffer + _ValueType: TypeAlias = str | ReadOnlyBuffer + + open_flags: str + + class error(OSError): ... + # Actual typename gdbm, not exposed by the implementation + class _gdbm: + def firstkey(self) -> bytes | None: ... + def nextkey(self, key: _KeyType) -> bytes | None: ... + def reorganize(self) -> None: ... + def sync(self) -> None: ... + def close(self) -> None: ... + def __getitem__(self, item: _KeyType) -> bytes: ... + def __setitem__(self, key: _KeyType, value: _ValueType) -> None: ... + def __delitem__(self, key: _KeyType) -> None: ... + def __contains__(self, key: _KeyType) -> bool: ... + def __len__(self) -> int: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + @overload + def get(self, k: _KeyType) -> bytes | None: ... + @overload + def get(self, k: _KeyType, default: _T) -> bytes | _T: ... + def keys(self) -> list[bytes]: ... + def setdefault(self, k: _KeyType, default: _ValueType = ...) -> bytes: ... + # Don't exist at runtime + __new__: None # type: ignore[assignment] + __init__: None # type: ignore[assignment] + def open(__filename: str, __flags: str = "r", __mode: int = 0o666) -> _gdbm: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/dbm/ndbm.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/dbm/ndbm.pyi new file mode 100644 index 00000000..1106fb2a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/dbm/ndbm.pyi @@ -0,0 +1,36 @@ +import sys +from _typeshed import ReadOnlyBuffer +from types import TracebackType +from typing import TypeVar, overload +from typing_extensions import Self, TypeAlias + +if sys.platform != "win32": + _T = TypeVar("_T") + _KeyType: TypeAlias = str | ReadOnlyBuffer + _ValueType: TypeAlias = str | ReadOnlyBuffer + + class error(OSError): ... + library: str + + # Actual typename dbm, not exposed by the implementation + class _dbm: + def close(self) -> None: ... + def __getitem__(self, item: _KeyType) -> bytes: ... + def __setitem__(self, key: _KeyType, value: _ValueType) -> None: ... + def __delitem__(self, key: _KeyType) -> None: ... + def __len__(self) -> int: ... + def __del__(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + @overload + def get(self, k: _KeyType) -> bytes | None: ... + @overload + def get(self, k: _KeyType, default: _T) -> bytes | _T: ... + def keys(self) -> list[bytes]: ... + def setdefault(self, k: _KeyType, default: _ValueType = ...) -> bytes: ... + # Don't exist at runtime + __new__: None # type: ignore[assignment] + __init__: None # type: ignore[assignment] + def open(__filename: str, __flags: str = "r", __mode: int = 0o666) -> _dbm: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/decimal.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/decimal.pyi new file mode 100644 index 00000000..35fc4405 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/decimal.pyi @@ -0,0 +1,2 @@ +from _decimal import * +from _decimal import __libmpdec_version__ as __libmpdec_version__, __version__ as __version__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/difflib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/difflib.pyi new file mode 100644 index 00000000..31051960 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/difflib.pyi @@ -0,0 +1,141 @@ +import sys +from collections.abc import Callable, Iterable, Iterator, Sequence +from typing import Any, AnyStr, Generic, NamedTuple, TypeVar, overload + +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = [ + "get_close_matches", + "ndiff", + "restore", + "SequenceMatcher", + "Differ", + "IS_CHARACTER_JUNK", + "IS_LINE_JUNK", + "context_diff", + "unified_diff", + "diff_bytes", + "HtmlDiff", + "Match", +] + +_T = TypeVar("_T") + +class Match(NamedTuple): + a: int + b: int + size: int + +class SequenceMatcher(Generic[_T]): + @overload + def __init__(self, isjunk: Callable[[_T], bool] | None, a: Sequence[_T], b: Sequence[_T], autojunk: bool = True) -> None: ... + @overload + def __init__(self, *, a: Sequence[_T], b: Sequence[_T], autojunk: bool = True) -> None: ... + @overload + def __init__( + self: SequenceMatcher[str], + isjunk: Callable[[str], bool] | None = None, + a: Sequence[str] = "", + b: Sequence[str] = "", + autojunk: bool = True, + ) -> None: ... + def set_seqs(self, a: Sequence[_T], b: Sequence[_T]) -> None: ... + def set_seq1(self, a: Sequence[_T]) -> None: ... + def set_seq2(self, b: Sequence[_T]) -> None: ... + if sys.version_info >= (3, 9): + def find_longest_match(self, alo: int = 0, ahi: int | None = None, blo: int = 0, bhi: int | None = None) -> Match: ... + else: + def find_longest_match(self, alo: int, ahi: int, blo: int, bhi: int) -> Match: ... + + def get_matching_blocks(self) -> list[Match]: ... + def get_opcodes(self) -> list[tuple[str, int, int, int, int]]: ... + def get_grouped_opcodes(self, n: int = 3) -> Iterable[list[tuple[str, int, int, int, int]]]: ... + def ratio(self) -> float: ... + def quick_ratio(self) -> float: ... + def real_quick_ratio(self) -> float: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +# mypy thinks the signatures of the overloads overlap, but the types still work fine +@overload +def get_close_matches(word: AnyStr, possibilities: Iterable[AnyStr], n: int = 3, cutoff: float = 0.6) -> list[AnyStr]: ... # type: ignore[misc] +@overload +def get_close_matches( + word: Sequence[_T], possibilities: Iterable[Sequence[_T]], n: int = 3, cutoff: float = 0.6 +) -> list[Sequence[_T]]: ... + +class Differ: + def __init__(self, linejunk: Callable[[str], bool] | None = None, charjunk: Callable[[str], bool] | None = None) -> None: ... + def compare(self, a: Sequence[str], b: Sequence[str]) -> Iterator[str]: ... + +def IS_LINE_JUNK(line: str, pat: Any = ...) -> bool: ... # pat is undocumented +def IS_CHARACTER_JUNK(ch: str, ws: str = " \t") -> bool: ... # ws is undocumented +def unified_diff( + a: Sequence[str], + b: Sequence[str], + fromfile: str = "", + tofile: str = "", + fromfiledate: str = "", + tofiledate: str = "", + n: int = 3, + lineterm: str = "\n", +) -> Iterator[str]: ... +def context_diff( + a: Sequence[str], + b: Sequence[str], + fromfile: str = "", + tofile: str = "", + fromfiledate: str = "", + tofiledate: str = "", + n: int = 3, + lineterm: str = "\n", +) -> Iterator[str]: ... +def ndiff( + a: Sequence[str], + b: Sequence[str], + linejunk: Callable[[str], bool] | None = None, + charjunk: Callable[[str], bool] | None = ..., +) -> Iterator[str]: ... + +class HtmlDiff: + def __init__( + self, + tabsize: int = 8, + wrapcolumn: int | None = None, + linejunk: Callable[[str], bool] | None = None, + charjunk: Callable[[str], bool] | None = ..., + ) -> None: ... + def make_file( + self, + fromlines: Sequence[str], + tolines: Sequence[str], + fromdesc: str = "", + todesc: str = "", + context: bool = False, + numlines: int = 5, + *, + charset: str = "utf-8", + ) -> str: ... + def make_table( + self, + fromlines: Sequence[str], + tolines: Sequence[str], + fromdesc: str = "", + todesc: str = "", + context: bool = False, + numlines: int = 5, + ) -> str: ... + +def restore(delta: Iterable[str], which: int) -> Iterator[str]: ... +def diff_bytes( + dfunc: Callable[[Sequence[str], Sequence[str], str, str, str, str, int, str], Iterator[str]], + a: Iterable[bytes | bytearray], + b: Iterable[bytes | bytearray], + fromfile: bytes | bytearray = b"", + tofile: bytes | bytearray = b"", + fromfiledate: bytes | bytearray = b"", + tofiledate: bytes | bytearray = b"", + n: int = 3, + lineterm: bytes | bytearray = b"\n", +) -> Iterator[bytes]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/dis.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/dis.pyi new file mode 100644 index 00000000..ac0c5356 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/dis.pyi @@ -0,0 +1,138 @@ +import sys +import types +from collections.abc import Callable, Iterator +from opcode import * # `dis` re-exports it as a part of public API +from typing import IO, Any, NamedTuple +from typing_extensions import Self, TypeAlias + +__all__ = [ + "code_info", + "dis", + "disassemble", + "distb", + "disco", + "findlinestarts", + "findlabels", + "show_code", + "get_instructions", + "Instruction", + "Bytecode", + "cmp_op", + "hasconst", + "hasname", + "hasjrel", + "hasjabs", + "haslocal", + "hascompare", + "hasfree", + "opname", + "opmap", + "HAVE_ARGUMENT", + "EXTENDED_ARG", + "hasnargs", + "stack_effect", +] + +# Strictly this should not have to include Callable, but mypy doesn't use FunctionType +# for functions (python/mypy#3171) +_HaveCodeType: TypeAlias = types.MethodType | types.FunctionType | types.CodeType | type | Callable[..., Any] + +if sys.version_info >= (3, 11): + class Positions(NamedTuple): + lineno: int | None = ... + end_lineno: int | None = ... + col_offset: int | None = ... + end_col_offset: int | None = ... + +if sys.version_info >= (3, 11): + class Instruction(NamedTuple): + opname: str + opcode: int + arg: int | None + argval: Any + argrepr: str + offset: int + starts_line: int | None + is_jump_target: bool + positions: Positions | None = ... + +else: + class Instruction(NamedTuple): + opname: str + opcode: int + arg: int | None + argval: Any + argrepr: str + offset: int + starts_line: int | None + is_jump_target: bool + +class Bytecode: + codeobj: types.CodeType + first_line: int + if sys.version_info >= (3, 11): + def __init__( + self, + x: _HaveCodeType | str, + *, + first_line: int | None = None, + current_offset: int | None = None, + show_caches: bool = False, + adaptive: bool = False, + ) -> None: ... + @classmethod + def from_traceback(cls, tb: types.TracebackType, *, show_caches: bool = False, adaptive: bool = False) -> Self: ... + else: + def __init__( + self, x: _HaveCodeType | str, *, first_line: int | None = None, current_offset: int | None = None + ) -> None: ... + @classmethod + def from_traceback(cls, tb: types.TracebackType) -> Self: ... + + def __iter__(self) -> Iterator[Instruction]: ... + def info(self) -> str: ... + def dis(self) -> str: ... + +COMPILER_FLAG_NAMES: dict[int, str] + +def findlabels(code: _HaveCodeType) -> list[int]: ... +def findlinestarts(code: _HaveCodeType) -> Iterator[tuple[int, int]]: ... +def pretty_flags(flags: int) -> str: ... +def code_info(x: _HaveCodeType | str) -> str: ... + +if sys.version_info >= (3, 11): + def dis( + x: _HaveCodeType | str | bytes | bytearray | None = None, + *, + file: IO[str] | None = None, + depth: int | None = None, + show_caches: bool = False, + adaptive: bool = False, + ) -> None: ... + +else: + def dis( + x: _HaveCodeType | str | bytes | bytearray | None = None, *, file: IO[str] | None = None, depth: int | None = None + ) -> None: ... + +if sys.version_info >= (3, 11): + def disassemble( + co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False + ) -> None: ... + def disco( + co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False + ) -> None: ... + def distb( + tb: types.TracebackType | None = None, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False + ) -> None: ... + def get_instructions( + x: _HaveCodeType, *, first_line: int | None = None, show_caches: bool = False, adaptive: bool = False + ) -> Iterator[Instruction]: ... + +else: + def disassemble(co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None) -> None: ... + def disco(co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None) -> None: ... + def distb(tb: types.TracebackType | None = None, *, file: IO[str] | None = None) -> None: ... + def get_instructions(x: _HaveCodeType, *, first_line: int | None = None) -> Iterator[Instruction]: ... + +def show_code(co: _HaveCodeType, *, file: IO[str] | None = None) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/archive_util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/archive_util.pyi new file mode 100644 index 00000000..a8947ce3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/archive_util.pyi @@ -0,0 +1,20 @@ +def make_archive( + base_name: str, + format: str, + root_dir: str | None = None, + base_dir: str | None = None, + verbose: int = 0, + dry_run: int = 0, + owner: str | None = None, + group: str | None = None, +) -> str: ... +def make_tarball( + base_name: str, + base_dir: str, + compress: str | None = "gzip", + verbose: int = 0, + dry_run: int = 0, + owner: str | None = None, + group: str | None = None, +) -> str: ... +def make_zipfile(base_name: str, base_dir: str, verbose: int = 0, dry_run: int = 0) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/bcppcompiler.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/bcppcompiler.pyi new file mode 100644 index 00000000..3e432f94 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/bcppcompiler.pyi @@ -0,0 +1,3 @@ +from distutils.ccompiler import CCompiler + +class BCPPCompiler(CCompiler): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/ccompiler.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/ccompiler.pyi new file mode 100644 index 00000000..e7277aa3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/ccompiler.pyi @@ -0,0 +1,152 @@ +from collections.abc import Callable +from typing import Any +from typing_extensions import TypeAlias + +_Macro: TypeAlias = tuple[str] | tuple[str, str | None] + +def gen_lib_options( + compiler: CCompiler, library_dirs: list[str], runtime_library_dirs: list[str], libraries: list[str] +) -> list[str]: ... +def gen_preprocess_options(macros: list[_Macro], include_dirs: list[str]) -> list[str]: ... +def get_default_compiler(osname: str | None = None, platform: str | None = None) -> str: ... +def new_compiler( + plat: str | None = None, compiler: str | None = None, verbose: int = 0, dry_run: int = 0, force: int = 0 +) -> CCompiler: ... +def show_compilers() -> None: ... + +class CCompiler: + dry_run: bool + force: bool + verbose: bool + output_dir: str | None + macros: list[_Macro] + include_dirs: list[str] + libraries: list[str] + library_dirs: list[str] + runtime_library_dirs: list[str] + objects: list[str] + def __init__(self, verbose: int = 0, dry_run: int = 0, force: int = 0) -> None: ... + def add_include_dir(self, dir: str) -> None: ... + def set_include_dirs(self, dirs: list[str]) -> None: ... + def add_library(self, libname: str) -> None: ... + def set_libraries(self, libnames: list[str]) -> None: ... + def add_library_dir(self, dir: str) -> None: ... + def set_library_dirs(self, dirs: list[str]) -> None: ... + def add_runtime_library_dir(self, dir: str) -> None: ... + def set_runtime_library_dirs(self, dirs: list[str]) -> None: ... + def define_macro(self, name: str, value: str | None = None) -> None: ... + def undefine_macro(self, name: str) -> None: ... + def add_link_object(self, object: str) -> None: ... + def set_link_objects(self, objects: list[str]) -> None: ... + def detect_language(self, sources: str | list[str]) -> str | None: ... + def find_library_file(self, dirs: list[str], lib: str, debug: bool = ...) -> str | None: ... + def has_function( + self, + funcname: str, + includes: list[str] | None = None, + include_dirs: list[str] | None = None, + libraries: list[str] | None = None, + library_dirs: list[str] | None = None, + ) -> bool: ... + def library_dir_option(self, dir: str) -> str: ... + def library_option(self, lib: str) -> str: ... + def runtime_library_dir_option(self, dir: str) -> str: ... + def set_executables(self, **args: str) -> None: ... + def compile( + self, + sources: list[str], + output_dir: str | None = None, + macros: _Macro | None = None, + include_dirs: list[str] | None = None, + debug: bool = ..., + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + depends: list[str] | None = None, + ) -> list[str]: ... + def create_static_lib( + self, + objects: list[str], + output_libname: str, + output_dir: str | None = None, + debug: bool = ..., + target_lang: str | None = None, + ) -> None: ... + def link( + self, + target_desc: str, + objects: list[str], + output_filename: str, + output_dir: str | None = None, + libraries: list[str] | None = None, + library_dirs: list[str] | None = None, + runtime_library_dirs: list[str] | None = None, + export_symbols: list[str] | None = None, + debug: bool = ..., + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + build_temp: str | None = None, + target_lang: str | None = None, + ) -> None: ... + def link_executable( + self, + objects: list[str], + output_progname: str, + output_dir: str | None = None, + libraries: list[str] | None = None, + library_dirs: list[str] | None = None, + runtime_library_dirs: list[str] | None = None, + debug: bool = ..., + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + target_lang: str | None = None, + ) -> None: ... + def link_shared_lib( + self, + objects: list[str], + output_libname: str, + output_dir: str | None = None, + libraries: list[str] | None = None, + library_dirs: list[str] | None = None, + runtime_library_dirs: list[str] | None = None, + export_symbols: list[str] | None = None, + debug: bool = ..., + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + build_temp: str | None = None, + target_lang: str | None = None, + ) -> None: ... + def link_shared_object( + self, + objects: list[str], + output_filename: str, + output_dir: str | None = None, + libraries: list[str] | None = None, + library_dirs: list[str] | None = None, + runtime_library_dirs: list[str] | None = None, + export_symbols: list[str] | None = None, + debug: bool = ..., + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + build_temp: str | None = None, + target_lang: str | None = None, + ) -> None: ... + def preprocess( + self, + source: str, + output_file: str | None = None, + macros: list[_Macro] | None = None, + include_dirs: list[str] | None = None, + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + ) -> None: ... + def executable_filename(self, basename: str, strip_dir: int = 0, output_dir: str = "") -> str: ... + def library_filename(self, libname: str, lib_type: str = "static", strip_dir: int = 0, output_dir: str = "") -> str: ... + def object_filenames(self, source_filenames: list[str], strip_dir: int = 0, output_dir: str = "") -> list[str]: ... + def shared_object_filename(self, basename: str, strip_dir: int = 0, output_dir: str = "") -> str: ... + def execute(self, func: Callable[..., object], args: tuple[Any, ...], msg: str | None = None, level: int = 1) -> None: ... + def spawn(self, cmd: list[str]) -> None: ... + def mkpath(self, name: str, mode: int = 0o777) -> None: ... + def move_file(self, src: str, dst: str) -> str: ... + def announce(self, msg: str, level: int = 1) -> None: ... + def warn(self, msg: str) -> None: ... + def debug_print(self, msg: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/cmd.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/cmd.pyi new file mode 100644 index 00000000..a9aade02 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/cmd.pyi @@ -0,0 +1,65 @@ +from _typeshed import Incomplete +from abc import abstractmethod +from collections.abc import Callable, Iterable +from distutils.dist import Distribution +from typing import Any + +class Command: + sub_commands: list[tuple[str, Callable[[Command], bool] | None]] + def __init__(self, dist: Distribution) -> None: ... + @abstractmethod + def initialize_options(self) -> None: ... + @abstractmethod + def finalize_options(self) -> None: ... + @abstractmethod + def run(self) -> None: ... + def announce(self, msg: str, level: int = 1) -> None: ... + def debug_print(self, msg: str) -> None: ... + def ensure_string(self, option: str, default: str | None = None) -> None: ... + def ensure_string_list(self, option: str | list[str]) -> None: ... + def ensure_filename(self, option: str) -> None: ... + def ensure_dirname(self, option: str) -> None: ... + def get_command_name(self) -> str: ... + def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None: ... + def get_finalized_command(self, command: str, create: int = 1) -> Command: ... + def reinitialize_command(self, command: Command | str, reinit_subcommands: int = 0) -> Command: ... + def run_command(self, command: str) -> None: ... + def get_sub_commands(self) -> list[str]: ... + def warn(self, msg: str) -> None: ... + def execute(self, func: Callable[..., object], args: Iterable[Any], msg: str | None = None, level: int = 1) -> None: ... + def mkpath(self, name: str, mode: int = 0o777) -> None: ... + def copy_file( + self, infile: str, outfile: str, preserve_mode: int = 1, preserve_times: int = 1, link: str | None = None, level: Any = 1 + ) -> tuple[str, bool]: ... # level is not used + def copy_tree( + self, + infile: str, + outfile: str, + preserve_mode: int = 1, + preserve_times: int = 1, + preserve_symlinks: int = 0, + level: Any = 1, + ) -> list[str]: ... # level is not used + def move_file(self, src: str, dst: str, level: Any = 1) -> str: ... # level is not used + def spawn(self, cmd: Iterable[str], search_path: int = 1, level: Any = 1) -> None: ... # level is not used + def make_archive( + self, + base_name: str, + format: str, + root_dir: str | None = None, + base_dir: str | None = None, + owner: str | None = None, + group: str | None = None, + ) -> str: ... + def make_file( + self, + infiles: str | list[str] | tuple[str, ...], + outfile: str, + func: Callable[..., object], + args: list[Any], + exec_msg: str | None = None, + skip_msg: str | None = None, + level: Any = 1, + ) -> None: ... # level is not used + def ensure_finalized(self) -> None: ... + def dump_options(self, header: Incomplete | None = None, indent: str = "") -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/bdist.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/bdist.pyi new file mode 100644 index 00000000..e1f141d3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/bdist.pyi @@ -0,0 +1,25 @@ +from typing import Any + +from ..cmd import Command + +def show_formats() -> None: ... + +class bdist(Command): + description: str + user_options: Any + boolean_options: Any + help_options: Any + no_format_option: Any + default_format: Any + format_commands: Any + format_command: Any + bdist_base: Any + plat_name: Any + formats: Any + dist_dir: Any + skip_build: int + group: Any + owner: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/bdist_dumb.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/bdist_dumb.pyi new file mode 100644 index 00000000..74cca4d1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/bdist_dumb.pyi @@ -0,0 +1,21 @@ +from typing import Any + +from ..cmd import Command + +class bdist_dumb(Command): + description: str + user_options: Any + boolean_options: Any + default_format: Any + bdist_dir: Any + plat_name: Any + format: Any + keep_temp: int + dist_dir: Any + skip_build: Any + relative: int + owner: Any + group: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/bdist_msi.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/bdist_msi.pyi new file mode 100644 index 00000000..fa98e86d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/bdist_msi.pyi @@ -0,0 +1,45 @@ +import sys +from typing import Any + +from ..cmd import Command + +if sys.platform == "win32": + from msilib import Dialog + + class PyDialog(Dialog): + def __init__(self, *args, **kw) -> None: ... + def title(self, title) -> None: ... + def back(self, title, next, name: str = "Back", active: int = 1): ... + def cancel(self, title, next, name: str = "Cancel", active: int = 1): ... + def next(self, title, next, name: str = "Next", active: int = 1): ... + def xbutton(self, name, title, next, xpos): ... + + class bdist_msi(Command): + description: str + user_options: Any + boolean_options: Any + all_versions: Any + other_version: str + if sys.version_info >= (3, 9): + def __init__(self, *args, **kw) -> None: ... + bdist_dir: Any + plat_name: Any + keep_temp: int + no_target_compile: int + no_target_optimize: int + target_version: Any + dist_dir: Any + skip_build: Any + install_script: Any + pre_install_script: Any + versions: Any + def initialize_options(self) -> None: ... + install_script_key: Any + def finalize_options(self) -> None: ... + db: Any + def run(self) -> None: ... + def add_files(self) -> None: ... + def add_find_python(self) -> None: ... + def add_scripts(self) -> None: ... + def add_ui(self) -> None: ... + def get_installer_filename(self, fullname): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/bdist_packager.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/bdist_packager.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/bdist_rpm.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/bdist_rpm.pyi new file mode 100644 index 00000000..76691310 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/bdist_rpm.pyi @@ -0,0 +1,52 @@ +from typing import Any + +from ..cmd import Command + +class bdist_rpm(Command): + description: str + user_options: Any + boolean_options: Any + negative_opt: Any + bdist_base: Any + rpm_base: Any + dist_dir: Any + python: Any + fix_python: Any + spec_only: Any + binary_only: Any + source_only: Any + use_bzip2: Any + distribution_name: Any + group: Any + release: Any + serial: Any + vendor: Any + packager: Any + doc_files: Any + changelog: Any + icon: Any + prep_script: Any + build_script: Any + install_script: Any + clean_script: Any + verify_script: Any + pre_install: Any + post_install: Any + pre_uninstall: Any + post_uninstall: Any + prep: Any + provides: Any + requires: Any + conflicts: Any + build_requires: Any + obsoletes: Any + keep_temp: int + use_rpm_opt_flags: int + rpm3_mode: int + no_autoreq: int + force_arch: Any + quiet: int + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def finalize_package_data(self) -> None: ... + def run(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/bdist_wininst.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/bdist_wininst.pyi new file mode 100644 index 00000000..8491d312 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/bdist_wininst.pyi @@ -0,0 +1,16 @@ +from _typeshed import StrOrBytesPath +from distutils.cmd import Command +from typing import Any, ClassVar + +class bdist_wininst(Command): + description: ClassVar[str] + user_options: ClassVar[list[tuple[Any, ...]]] + boolean_options: ClassVar[list[str]] + + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def get_inidata(self) -> str: ... + def create_exe(self, arcname: StrOrBytesPath, fullname: str, bitmap: StrOrBytesPath | None = None) -> None: ... + def get_installer_filename(self, fullname: str) -> str: ... + def get_exe_bytes(self) -> bytes: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/build.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/build.pyi new file mode 100644 index 00000000..cf3c8a56 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/build.pyi @@ -0,0 +1,31 @@ +from typing import Any + +from ..cmd import Command + +def show_compilers() -> None: ... + +class build(Command): + description: str + user_options: Any + boolean_options: Any + help_options: Any + build_base: str + build_purelib: Any + build_platlib: Any + build_lib: Any + build_temp: Any + build_scripts: Any + compiler: Any + plat_name: Any + debug: Any + force: int + executable: Any + parallel: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def has_pure_modules(self): ... + def has_c_libraries(self): ... + def has_ext_modules(self): ... + def has_scripts(self): ... + sub_commands: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/build_clib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/build_clib.pyi new file mode 100644 index 00000000..32ab182b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/build_clib.pyi @@ -0,0 +1,27 @@ +from typing import Any + +from ..cmd import Command + +def show_compilers() -> None: ... + +class build_clib(Command): + description: str + user_options: Any + boolean_options: Any + help_options: Any + build_clib: Any + build_temp: Any + libraries: Any + include_dirs: Any + define: Any + undef: Any + debug: Any + force: int + compiler: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def check_library_list(self, libraries) -> None: ... + def get_library_names(self): ... + def get_source_files(self): ... + def build_libraries(self, libraries) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/build_ext.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/build_ext.pyi new file mode 100644 index 00000000..80cd7893 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/build_ext.pyi @@ -0,0 +1,50 @@ +from typing import Any + +from ..cmd import Command + +extension_name_re: Any + +def show_compilers() -> None: ... + +class build_ext(Command): + description: str + sep_by: Any + user_options: Any + boolean_options: Any + help_options: Any + extensions: Any + build_lib: Any + plat_name: Any + build_temp: Any + inplace: int + package: Any + include_dirs: Any + define: Any + undef: Any + libraries: Any + library_dirs: Any + rpath: Any + link_objects: Any + debug: Any + force: Any + compiler: Any + swig: Any + swig_cpp: Any + swig_opts: Any + user: Any + parallel: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def check_extensions_list(self, extensions) -> None: ... + def get_source_files(self): ... + def get_outputs(self): ... + def build_extensions(self) -> None: ... + def build_extension(self, ext) -> None: ... + def swig_sources(self, sources, extension): ... + def find_swig(self): ... + def get_ext_fullpath(self, ext_name): ... + def get_ext_fullname(self, ext_name): ... + def get_ext_filename(self, ext_name): ... + def get_export_symbols(self, ext): ... + def get_libraries(self, ext): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/build_py.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/build_py.pyi new file mode 100644 index 00000000..ca4e4ed7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/build_py.pyi @@ -0,0 +1,44 @@ +from typing import Any + +from ..cmd import Command +from ..util import Mixin2to3 as Mixin2to3 + +class build_py(Command): + description: str + user_options: Any + boolean_options: Any + negative_opt: Any + build_lib: Any + py_modules: Any + package: Any + package_data: Any + package_dir: Any + compile: int + optimize: int + force: Any + def initialize_options(self) -> None: ... + packages: Any + data_files: Any + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def get_data_files(self): ... + def find_data_files(self, package, src_dir): ... + def build_package_data(self) -> None: ... + def get_package_dir(self, package): ... + def check_package(self, package, package_dir): ... + def check_module(self, module, module_file): ... + def find_package_modules(self, package, package_dir): ... + def find_modules(self): ... + def find_all_modules(self): ... + def get_source_files(self): ... + def get_module_outfile(self, build_dir, package, module): ... + def get_outputs(self, include_bytecode: int = 1): ... + def build_module(self, module, module_file, package): ... + def build_modules(self) -> None: ... + def build_packages(self) -> None: ... + def byte_compile(self, files) -> None: ... + +class build_py_2to3(build_py, Mixin2to3): + updated_files: Any + def run(self) -> None: ... + def build_module(self, module, module_file, package): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/build_scripts.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/build_scripts.pyi new file mode 100644 index 00000000..42135ece --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/build_scripts.pyi @@ -0,0 +1,24 @@ +from typing import Any + +from ..cmd import Command +from ..util import Mixin2to3 as Mixin2to3 + +first_line_re: Any + +class build_scripts(Command): + description: str + user_options: Any + boolean_options: Any + build_dir: Any + scripts: Any + force: Any + executable: Any + outfiles: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def get_source_files(self): ... + def run(self) -> None: ... + def copy_scripts(self): ... + +class build_scripts_2to3(build_scripts, Mixin2to3): + def copy_scripts(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/check.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/check.pyi new file mode 100644 index 00000000..9cbcc6c8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/check.pyi @@ -0,0 +1,39 @@ +from typing import Any +from typing_extensions import TypeAlias + +from ..cmd import Command + +_Reporter: TypeAlias = Any # really docutils.utils.Reporter + +# Only defined if docutils is installed. +# Depends on a third-party stub. Since distutils is deprecated anyway, +# it's easier to just suppress the "any subclassing" error. +class SilentReporter(_Reporter): + messages: Any + def __init__( + self, + source, + report_level, + halt_level, + stream: Any | None = ..., + debug: int = ..., + encoding: str = ..., + error_handler: str = ..., + ) -> None: ... + def system_message(self, level, message, *children, **kwargs): ... + +HAS_DOCUTILS: bool + +class check(Command): + description: str + user_options: Any + boolean_options: Any + restructuredtext: int + metadata: int + strict: int + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def warn(self, msg): ... + def run(self) -> None: ... + def check_metadata(self) -> None: ... + def check_restructuredtext(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/clean.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/clean.pyi new file mode 100644 index 00000000..99560aa8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/clean.pyi @@ -0,0 +1,17 @@ +from typing import Any + +from ..cmd import Command + +class clean(Command): + description: str + user_options: Any + boolean_options: Any + build_base: Any + build_lib: Any + build_temp: Any + build_scripts: Any + bdist_base: Any + all: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/config.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/config.pyi new file mode 100644 index 00000000..81fdf76b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/config.pyi @@ -0,0 +1,83 @@ +from collections.abc import Sequence +from re import Pattern +from typing import Any + +from ..ccompiler import CCompiler +from ..cmd import Command + +LANG_EXT: dict[str, str] + +class config(Command): + description: str + # Tuple is full name, short name, description + user_options: Sequence[tuple[str, str | None, str]] + compiler: str | CCompiler + cc: str | None + include_dirs: Sequence[str] | None + libraries: Sequence[str] | None + library_dirs: Sequence[str] | None + noisy: int + dump_source: int + temp_files: Sequence[str] + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def try_cpp( + self, + body: str | None = None, + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, + lang: str = "c", + ) -> bool: ... + def search_cpp( + self, + pattern: Pattern[str] | str, + body: str | None = None, + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, + lang: str = "c", + ) -> bool: ... + def try_compile( + self, body: str, headers: Sequence[str] | None = None, include_dirs: Sequence[str] | None = None, lang: str = "c" + ) -> bool: ... + def try_link( + self, + body: str, + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, + libraries: Sequence[str] | None = None, + library_dirs: Sequence[str] | None = None, + lang: str = "c", + ) -> bool: ... + def try_run( + self, + body: str, + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, + libraries: Sequence[str] | None = None, + library_dirs: Sequence[str] | None = None, + lang: str = "c", + ) -> bool: ... + def check_func( + self, + func: str, + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, + libraries: Sequence[str] | None = None, + library_dirs: Sequence[str] | None = None, + decl: int = 0, + call: int = 0, + ) -> bool: ... + def check_lib( + self, + library: str, + library_dirs: Sequence[str] | None = None, + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, + other_libraries: list[str] = ..., + ) -> bool: ... + def check_header( + self, header: str, include_dirs: Sequence[str] | None = None, library_dirs: Sequence[str] | None = None, lang: str = "c" + ) -> bool: ... + +def dump_file(filename: str, head: Any | None = None) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/install.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/install.pyi new file mode 100644 index 00000000..661d256e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/install.pyi @@ -0,0 +1,63 @@ +from typing import Any + +from ..cmd import Command + +HAS_USER_SITE: bool +SCHEME_KEYS: tuple[str, ...] +INSTALL_SCHEMES: dict[str, dict[Any, Any]] + +class install(Command): + description: str + user_options: Any + boolean_options: Any + negative_opt: Any + prefix: str | None + exec_prefix: Any + home: str | None + user: bool + install_base: Any + install_platbase: Any + root: str | None + install_purelib: Any + install_platlib: Any + install_headers: Any + install_lib: str | None + install_scripts: Any + install_data: Any + install_userbase: Any + install_usersite: Any + compile: Any + optimize: Any + extra_path: Any + install_path_file: int + force: int + skip_build: int + warn_dir: int + build_base: Any + build_lib: Any + record: Any + def initialize_options(self) -> None: ... + config_vars: Any + install_libbase: Any + def finalize_options(self) -> None: ... + def dump_dirs(self, msg) -> None: ... + def finalize_unix(self) -> None: ... + def finalize_other(self) -> None: ... + def select_scheme(self, name) -> None: ... + def expand_basedirs(self) -> None: ... + def expand_dirs(self) -> None: ... + def convert_paths(self, *names) -> None: ... + path_file: Any + extra_dirs: Any + def handle_extra_path(self) -> None: ... + def change_roots(self, *names) -> None: ... + def create_home_path(self) -> None: ... + def run(self) -> None: ... + def create_path_file(self) -> None: ... + def get_outputs(self): ... + def get_inputs(self): ... + def has_lib(self): ... + def has_headers(self): ... + def has_scripts(self): ... + def has_data(self): ... + sub_commands: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/install_data.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/install_data.pyi new file mode 100644 index 00000000..6cc9b528 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/install_data.pyi @@ -0,0 +1,19 @@ +from typing import Any + +from ..cmd import Command + +class install_data(Command): + description: str + user_options: Any + boolean_options: Any + install_dir: Any + outfiles: Any + root: Any + force: int + data_files: Any + warn_dir: int + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def get_inputs(self): ... + def get_outputs(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/install_egg_info.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/install_egg_info.pyi new file mode 100644 index 00000000..776eafc1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/install_egg_info.pyi @@ -0,0 +1,18 @@ +from typing import Any, ClassVar + +from ..cmd import Command + +class install_egg_info(Command): + description: ClassVar[str] + user_options: ClassVar[list[tuple[str, str | None, str]]] + install_dir: Any + def initialize_options(self) -> None: ... + target: Any + outputs: Any + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def get_outputs(self) -> list[str]: ... + +def safe_name(name): ... +def safe_version(version): ... +def to_filename(name): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/install_headers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/install_headers.pyi new file mode 100644 index 00000000..795bd1cf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/install_headers.pyi @@ -0,0 +1,16 @@ +from typing import Any + +from ..cmd import Command + +class install_headers(Command): + description: str + user_options: Any + boolean_options: Any + install_dir: Any + force: int + outfiles: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def get_inputs(self): ... + def get_outputs(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/install_lib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/install_lib.pyi new file mode 100644 index 00000000..a6a5e4e7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/install_lib.pyi @@ -0,0 +1,25 @@ +from typing import Any + +from ..cmd import Command + +PYTHON_SOURCE_EXTENSION: str + +class install_lib(Command): + description: str + user_options: Any + boolean_options: Any + negative_opt: Any + install_dir: Any + build_dir: Any + force: int + compile: Any + optimize: Any + skip_build: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def build(self) -> None: ... + def install(self): ... + def byte_compile(self, files) -> None: ... + def get_outputs(self): ... + def get_inputs(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/install_scripts.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/install_scripts.pyi new file mode 100644 index 00000000..92728a16 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/install_scripts.pyi @@ -0,0 +1,18 @@ +from typing import Any + +from ..cmd import Command + +class install_scripts(Command): + description: str + user_options: Any + boolean_options: Any + install_dir: Any + force: int + build_dir: Any + skip_build: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + outfiles: Any + def run(self) -> None: ... + def get_inputs(self): ... + def get_outputs(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/register.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/register.pyi new file mode 100644 index 00000000..f88b9411 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/register.pyi @@ -0,0 +1,18 @@ +from typing import Any + +from ..config import PyPIRCCommand + +class register(PyPIRCCommand): + description: str + sub_commands: Any + list_classifiers: int + strict: int + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def check_metadata(self) -> None: ... + def classifiers(self) -> None: ... + def verify_metadata(self) -> None: ... + def send_metadata(self) -> None: ... + def build_post_data(self, action): ... + def post_to_server(self, data, auth: Any | None = None): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/sdist.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/sdist.pyi new file mode 100644 index 00000000..636c4a35 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/sdist.pyi @@ -0,0 +1,42 @@ +from typing import Any + +from ..cmd import Command + +def show_formats() -> None: ... + +class sdist(Command): + description: str + def checking_metadata(self): ... + user_options: Any + boolean_options: Any + help_options: Any + negative_opt: Any + sub_commands: Any + READMES: Any + template: Any + manifest: Any + use_defaults: int + prune: int + manifest_only: int + force_manifest: int + formats: Any + keep_temp: int + dist_dir: Any + archive_files: Any + metadata_check: int + owner: Any + group: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + filelist: Any + def run(self) -> None: ... + def check_metadata(self) -> None: ... + def get_file_list(self) -> None: ... + def add_defaults(self) -> None: ... + def read_template(self) -> None: ... + def prune_file_list(self) -> None: ... + def write_manifest(self) -> None: ... + def read_manifest(self) -> None: ... + def make_release_tree(self, base_dir, files) -> None: ... + def make_distribution(self) -> None: ... + def get_archive_files(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/upload.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/upload.pyi new file mode 100644 index 00000000..e6b77825 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/command/upload.pyi @@ -0,0 +1,17 @@ +from typing import Any, ClassVar + +from ..config import PyPIRCCommand + +class upload(PyPIRCCommand): + description: ClassVar[str] + username: str + password: str + show_response: int + sign: bool + identity: Any + def initialize_options(self) -> None: ... + repository: Any + realm: Any + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def upload_file(self, command: str, pyversion: str, filename: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/config.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/config.pyi new file mode 100644 index 00000000..5814a828 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/config.pyi @@ -0,0 +1,17 @@ +from abc import abstractmethod +from distutils.cmd import Command +from typing import ClassVar + +DEFAULT_PYPIRC: str + +class PyPIRCCommand(Command): + DEFAULT_REPOSITORY: ClassVar[str] + DEFAULT_REALM: ClassVar[str] + repository: None + realm: None + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + @abstractmethod + def run(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/core.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/core.pyi new file mode 100644 index 00000000..7b0bdd1b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/core.pyi @@ -0,0 +1,57 @@ +from _typeshed import StrOrBytesPath +from collections.abc import Mapping +from distutils.cmd import Command as Command +from distutils.dist import Distribution as Distribution +from distutils.extension import Extension as Extension +from typing import Any + +USAGE: str + +def gen_usage(script_name: StrOrBytesPath) -> str: ... + +setup_keywords: tuple[str, ...] +extension_keywords: tuple[str, ...] + +def setup( + *, + name: str = ..., + version: str = ..., + description: str = ..., + long_description: str = ..., + author: str = ..., + author_email: str = ..., + maintainer: str = ..., + maintainer_email: str = ..., + url: str = ..., + download_url: str = ..., + packages: list[str] = ..., + py_modules: list[str] = ..., + scripts: list[str] = ..., + ext_modules: list[Extension] = ..., + classifiers: list[str] = ..., + distclass: type[Distribution] = ..., + script_name: str = ..., + script_args: list[str] = ..., + options: Mapping[str, Any] = ..., + license: str = ..., + keywords: list[str] | str = ..., + platforms: list[str] | str = ..., + cmdclass: Mapping[str, type[Command]] = ..., + data_files: list[tuple[str, list[str]]] = ..., + package_dir: Mapping[str, str] = ..., + obsoletes: list[str] = ..., + provides: list[str] = ..., + requires: list[str] = ..., + command_packages: list[str] = ..., + command_options: Mapping[str, Mapping[str, tuple[Any, Any]]] = ..., + package_data: Mapping[str, list[str]] = ..., + include_package_data: bool = ..., + libraries: list[str] = ..., + headers: list[str] = ..., + ext_package: str = ..., + include_dirs: list[str] = ..., + password: str = ..., + fullname: str = ..., + **attrs: Any, +) -> None: ... +def run_setup(script_name: str, script_args: list[str] | None = None, stop_after: str = "run") -> Distribution: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/cygwinccompiler.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/cygwinccompiler.pyi new file mode 100644 index 00000000..a990c3e2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/cygwinccompiler.pyi @@ -0,0 +1,20 @@ +from distutils.unixccompiler import UnixCCompiler +from distutils.version import LooseVersion +from re import Pattern +from typing_extensions import Literal + +def get_msvcr() -> list[str] | None: ... + +class CygwinCCompiler(UnixCCompiler): ... +class Mingw32CCompiler(CygwinCCompiler): ... + +CONFIG_H_OK: str +CONFIG_H_NOTOK: str +CONFIG_H_UNCERTAIN: str + +def check_config_h() -> tuple[Literal["ok", "not ok", "uncertain"], str]: ... + +RE_VERSION: Pattern[bytes] + +def get_versions() -> tuple[LooseVersion | None, ...]: ... +def is_cygwingcc() -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/debug.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/debug.pyi new file mode 100644 index 00000000..11f28a8b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/debug.pyi @@ -0,0 +1 @@ +DEBUG: bool | None diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/dep_util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/dep_util.pyi new file mode 100644 index 00000000..096ce19d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/dep_util.pyi @@ -0,0 +1,3 @@ +def newer(source: str, target: str) -> bool: ... +def newer_pairwise(sources: list[str], targets: list[str]) -> list[tuple[str, str]]: ... +def newer_group(sources: list[str], target: str, missing: str = "error") -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/dir_util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/dir_util.pyi new file mode 100644 index 00000000..2324a2d5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/dir_util.pyi @@ -0,0 +1,13 @@ +def mkpath(name: str, mode: int = 0o777, verbose: int = 1, dry_run: int = 0) -> list[str]: ... +def create_tree(base_dir: str, files: list[str], mode: int = 0o777, verbose: int = 1, dry_run: int = 0) -> None: ... +def copy_tree( + src: str, + dst: str, + preserve_mode: int = 1, + preserve_times: int = 1, + preserve_symlinks: int = 0, + update: int = 0, + verbose: int = 1, + dry_run: int = 0, +) -> list[str]: ... +def remove_tree(directory: str, verbose: int = 1, dry_run: int = 0) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/dist.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/dist.pyi new file mode 100644 index 00000000..4641636f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/dist.pyi @@ -0,0 +1,116 @@ +from _typeshed import FileDescriptorOrPath, Incomplete, SupportsWrite +from collections.abc import Iterable, Mapping +from distutils.cmd import Command +from re import Pattern +from typing import IO, Any + +command_re: Pattern[str] + +class DistributionMetadata: + def __init__(self, path: FileDescriptorOrPath | None = None) -> None: ... + name: str | None + version: str | None + author: str | None + author_email: str | None + maintainer: str | None + maintainer_email: str | None + url: str | None + license: str | None + description: str | None + long_description: str | None + keywords: str | list[str] | None + platforms: str | list[str] | None + classifiers: str | list[str] | None + download_url: str | None + provides: list[str] | None + requires: list[str] | None + obsoletes: list[str] | None + def read_pkg_file(self, file: IO[str]) -> None: ... + def write_pkg_info(self, base_dir: str) -> None: ... + def write_pkg_file(self, file: SupportsWrite[str]) -> None: ... + def get_name(self) -> str: ... + def get_version(self) -> str: ... + def get_fullname(self) -> str: ... + def get_author(self) -> str: ... + def get_author_email(self) -> str: ... + def get_maintainer(self) -> str: ... + def get_maintainer_email(self) -> str: ... + def get_contact(self) -> str: ... + def get_contact_email(self) -> str: ... + def get_url(self) -> str: ... + def get_license(self) -> str: ... + def get_licence(self) -> str: ... + def get_description(self) -> str: ... + def get_long_description(self) -> str: ... + def get_keywords(self) -> str | list[str]: ... + def get_platforms(self) -> str | list[str]: ... + def get_classifiers(self) -> str | list[str]: ... + def get_download_url(self) -> str: ... + def get_requires(self) -> list[str]: ... + def set_requires(self, value: Iterable[str]) -> None: ... + def get_provides(self) -> list[str]: ... + def set_provides(self, value: Iterable[str]) -> None: ... + def get_obsoletes(self) -> list[str]: ... + def set_obsoletes(self, value: Iterable[str]) -> None: ... + +class Distribution: + cmdclass: dict[str, type[Command]] + metadata: DistributionMetadata + def __init__(self, attrs: Mapping[str, Any] | None = None) -> None: ... + def get_option_dict(self, command: str) -> dict[str, tuple[str, str]]: ... + def parse_config_files(self, filenames: Iterable[str] | None = None) -> None: ... + def get_command_obj(self, command: str, create: bool = ...) -> Command | None: ... + global_options: Incomplete + common_usage: str + display_options: Incomplete + display_option_names: Incomplete + negative_opt: Incomplete + verbose: int + dry_run: int + help: int + command_packages: Incomplete + script_name: Incomplete + script_args: Incomplete + command_options: Incomplete + dist_files: Incomplete + packages: Incomplete + package_data: Incomplete + package_dir: Incomplete + py_modules: Incomplete + libraries: Incomplete + headers: Incomplete + ext_modules: Incomplete + ext_package: Incomplete + include_dirs: Incomplete + extra_path: Incomplete + scripts: Incomplete + data_files: Incomplete + password: str + command_obj: Incomplete + have_run: Incomplete + want_user_cfg: bool + def dump_option_dicts( + self, header: Incomplete | None = None, commands: Incomplete | None = None, indent: str = "" + ) -> None: ... + def find_config_files(self): ... + commands: Incomplete + def parse_command_line(self): ... + def finalize_options(self) -> None: ... + def handle_display_options(self, option_order): ... + def print_command_list(self, commands, header, max_length) -> None: ... + def print_commands(self) -> None: ... + def get_command_list(self): ... + def get_command_packages(self): ... + def get_command_class(self, command): ... + def reinitialize_command(self, command, reinit_subcommands: int = 0): ... + def announce(self, msg, level=2) -> None: ... + def run_commands(self) -> None: ... + def run_command(self, command) -> None: ... + def has_pure_modules(self): ... + def has_ext_modules(self): ... + def has_c_libraries(self): ... + def has_modules(self): ... + def has_headers(self): ... + def has_scripts(self): ... + def has_data_files(self): ... + def is_pure(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/errors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/errors.pyi new file mode 100644 index 00000000..e483362b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/errors.pyi @@ -0,0 +1,19 @@ +class DistutilsError(Exception): ... +class DistutilsModuleError(DistutilsError): ... +class DistutilsClassError(DistutilsError): ... +class DistutilsGetoptError(DistutilsError): ... +class DistutilsArgError(DistutilsError): ... +class DistutilsFileError(DistutilsError): ... +class DistutilsOptionError(DistutilsError): ... +class DistutilsSetupError(DistutilsError): ... +class DistutilsPlatformError(DistutilsError): ... +class DistutilsExecError(DistutilsError): ... +class DistutilsInternalError(DistutilsError): ... +class DistutilsTemplateError(DistutilsError): ... +class DistutilsByteCompileError(DistutilsError): ... +class CCompilerError(Exception): ... +class PreprocessError(CCompilerError): ... +class CompileError(CCompilerError): ... +class LibError(CCompilerError): ... +class LinkError(CCompilerError): ... +class UnknownFileError(CCompilerError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/extension.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/extension.pyi new file mode 100644 index 00000000..789bbf6e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/extension.pyi @@ -0,0 +1,36 @@ +class Extension: + name: str + sources: list[str] + include_dirs: list[str] + define_macros: list[tuple[str, str | None]] + undef_macros: list[str] + library_dirs: list[str] + libraries: list[str] + runtime_library_dirs: list[str] + extra_objects: list[str] + extra_compile_args: list[str] + extra_link_args: list[str] + export_symbols: list[str] + swig_opts: list[str] + depends: list[str] + language: str | None + optional: bool | None + def __init__( + self, + name: str, + sources: list[str], + include_dirs: list[str] | None = None, + define_macros: list[tuple[str, str | None]] | None = None, + undef_macros: list[str] | None = None, + library_dirs: list[str] | None = None, + libraries: list[str] | None = None, + runtime_library_dirs: list[str] | None = None, + extra_objects: list[str] | None = None, + extra_compile_args: list[str] | None = None, + extra_link_args: list[str] | None = None, + export_symbols: list[str] | None = None, + swig_opts: list[str] | None = None, + depends: list[str] | None = None, + language: str | None = None, + optional: bool | None = None, + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/fancy_getopt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/fancy_getopt.pyi new file mode 100644 index 00000000..c15bb8a1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/fancy_getopt.pyi @@ -0,0 +1,34 @@ +from collections.abc import Iterable, Mapping +from re import Pattern +from typing import Any, overload +from typing_extensions import TypeAlias + +_Option: TypeAlias = tuple[str, str | None, str] +_GR: TypeAlias = tuple[list[str], OptionDummy] + +longopt_pat: str +longopt_re: Pattern[str] +neg_alias_re: Pattern[str] +longopt_xlate: dict[int, int] + +class FancyGetopt: + def __init__(self, option_table: list[_Option] | None = None) -> None: ... + # TODO kinda wrong, `getopt(object=object())` is invalid + @overload + def getopt(self, args: list[str] | None = None) -> _GR: ... + @overload + def getopt(self, args: list[str] | None, object: Any) -> list[str]: ... + def get_option_order(self) -> list[tuple[str, str]]: ... + def generate_help(self, header: str | None = None) -> list[str]: ... + +def fancy_getopt( + options: list[_Option], negative_opt: Mapping[_Option, _Option], object: Any, args: list[str] | None +) -> list[str] | _GR: ... + +WS_TRANS: dict[int, str] + +def wrap_text(text: str, width: int) -> list[str]: ... +def translate_longopt(opt: str) -> str: ... + +class OptionDummy: + def __init__(self, options: Iterable[str] = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/file_util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/file_util.pyi new file mode 100644 index 00000000..a97dfca6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/file_util.pyi @@ -0,0 +1,14 @@ +from collections.abc import Sequence + +def copy_file( + src: str, + dst: str, + preserve_mode: bool = ..., + preserve_times: bool = ..., + update: bool = ..., + link: str | None = None, + verbose: bool = ..., + dry_run: bool = ..., +) -> tuple[str, str]: ... +def move_file(src: str, dst: str, verbose: bool = ..., dry_run: bool = ...) -> str: ... +def write_file(filename: str, contents: Sequence[str]) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/filelist.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/filelist.pyi new file mode 100644 index 00000000..bea48ac1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/filelist.pyi @@ -0,0 +1,51 @@ +from collections.abc import Iterable +from re import Pattern +from typing import overload +from typing_extensions import Literal + +# class is entirely undocumented +class FileList: + allfiles: Iterable[str] | None + files: list[str] + def __init__(self, warn: None = None, debug_print: None = None) -> None: ... + def set_allfiles(self, allfiles: Iterable[str]) -> None: ... + def findall(self, dir: str = ".") -> None: ... + def debug_print(self, msg: str) -> None: ... + def append(self, item: str) -> None: ... + def extend(self, items: Iterable[str]) -> None: ... + def sort(self) -> None: ... + def remove_duplicates(self) -> None: ... + def process_template_line(self, line: str) -> None: ... + @overload + def include_pattern( + self, pattern: str, anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: Literal[0, False] = 0 + ) -> bool: ... + @overload + def include_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> bool: ... + @overload + def include_pattern( + self, pattern: str | Pattern[str], anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: int = 0 + ) -> bool: ... + @overload + def exclude_pattern( + self, pattern: str, anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: Literal[0, False] = 0 + ) -> bool: ... + @overload + def exclude_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> bool: ... + @overload + def exclude_pattern( + self, pattern: str | Pattern[str], anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: int = 0 + ) -> bool: ... + +def findall(dir: str = ".") -> list[str]: ... +def glob_to_re(pattern: str) -> str: ... +@overload +def translate_pattern( + pattern: str, anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: Literal[False, 0] = 0 +) -> Pattern[str]: ... +@overload +def translate_pattern(pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> Pattern[str]: ... +@overload +def translate_pattern( + pattern: str | Pattern[str], anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: int = 0 +) -> Pattern[str]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/log.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/log.pyi new file mode 100644 index 00000000..14ed8d8a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/log.pyi @@ -0,0 +1,25 @@ +from typing import Any + +DEBUG: int +INFO: int +WARN: int +ERROR: int +FATAL: int + +class Log: + def __init__(self, threshold: int = 3) -> None: ... + def log(self, level: int, msg: str, *args: Any) -> None: ... + def debug(self, msg: str, *args: Any) -> None: ... + def info(self, msg: str, *args: Any) -> None: ... + def warn(self, msg: str, *args: Any) -> None: ... + def error(self, msg: str, *args: Any) -> None: ... + def fatal(self, msg: str, *args: Any) -> None: ... + +def log(level: int, msg: str, *args: Any) -> None: ... +def debug(msg: str, *args: Any) -> None: ... +def info(msg: str, *args: Any) -> None: ... +def warn(msg: str, *args: Any) -> None: ... +def error(msg: str, *args: Any) -> None: ... +def fatal(msg: str, *args: Any) -> None: ... +def set_threshold(level: int) -> int: ... +def set_verbosity(v: int) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/msvccompiler.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/msvccompiler.pyi new file mode 100644 index 00000000..80872a6b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/msvccompiler.pyi @@ -0,0 +1,3 @@ +from distutils.ccompiler import CCompiler + +class MSVCCompiler(CCompiler): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/spawn.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/spawn.pyi new file mode 100644 index 00000000..a8a2c414 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/spawn.pyi @@ -0,0 +1,2 @@ +def spawn(cmd: list[str], search_path: bool = ..., verbose: bool = ..., dry_run: bool = ...) -> None: ... +def find_executable(executable: str, path: str | None = None) -> str | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/sysconfig.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/sysconfig.pyi new file mode 100644 index 00000000..464cfb63 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/sysconfig.pyi @@ -0,0 +1,22 @@ +import sys +from collections.abc import Mapping +from distutils.ccompiler import CCompiler + +PREFIX: str +EXEC_PREFIX: str +BASE_PREFIX: str +BASE_EXEC_PREFIX: str +project_base: str +python_build: bool + +def expand_makefile_vars(s: str, vars: Mapping[str, str]) -> str: ... +def get_config_var(name: str) -> int | str | None: ... +def get_config_vars(*args: str) -> Mapping[str, int | str]: ... +def get_config_h_filename() -> str: ... +def get_makefile_filename() -> str: ... +def get_python_inc(plat_specific: bool = ..., prefix: str | None = None) -> str: ... +def get_python_lib(plat_specific: bool = ..., standard_lib: bool = ..., prefix: str | None = None) -> str: ... +def customize_compiler(compiler: CCompiler) -> None: ... + +if sys.version_info < (3, 10): + def get_python_version() -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/text_file.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/text_file.pyi new file mode 100644 index 00000000..4a6cf1db --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/text_file.pyi @@ -0,0 +1,21 @@ +from typing import IO + +class TextFile: + def __init__( + self, + filename: str | None = None, + file: IO[str] | None = None, + *, + strip_comments: bool = ..., + lstrip_ws: bool = ..., + rstrip_ws: bool = ..., + skip_blanks: bool = ..., + join_lines: bool = ..., + collapse_join: bool = ..., + ) -> None: ... + def open(self, filename: str) -> None: ... + def close(self) -> None: ... + def warn(self, msg: str, line: list[int] | tuple[int, int] | int | None = None) -> None: ... + def readline(self) -> str | None: ... + def readlines(self) -> list[str]: ... + def unreadline(self, line: str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/unixccompiler.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/unixccompiler.pyi new file mode 100644 index 00000000..e1d44347 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/unixccompiler.pyi @@ -0,0 +1,3 @@ +from distutils.ccompiler import CCompiler + +class UnixCCompiler(CCompiler): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/util.pyi new file mode 100644 index 00000000..83b03747 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/util.pyi @@ -0,0 +1,50 @@ +import sys +from _typeshed import StrPath, Unused +from collections.abc import Callable, Container, Iterable, Mapping +from typing import Any +from typing_extensions import Literal + +if sys.version_info >= (3, 8): + def get_host_platform() -> str: ... + +def get_platform() -> str: ... +def convert_path(pathname: str) -> str: ... +def change_root(new_root: str, pathname: str) -> str: ... +def check_environ() -> None: ... +def subst_vars(s: str, local_vars: Mapping[str, str]) -> None: ... +def split_quoted(s: str) -> list[str]: ... +def execute( + func: Callable[..., object], args: tuple[Any, ...], msg: str | None = None, verbose: bool = ..., dry_run: bool = ... +) -> None: ... +def strtobool(val: str) -> Literal[0, 1]: ... +def byte_compile( + py_files: list[str], + optimize: int = 0, + force: bool = ..., + prefix: str | None = None, + base_dir: str | None = None, + verbose: bool = ..., + dry_run: bool = ..., + direct: bool | None = None, +) -> None: ... +def rfc822_escape(header: str) -> str: ... +def run_2to3( + files: Iterable[str], + fixer_names: Iterable[str] | None = None, + options: Mapping[str, Any] | None = None, + explicit: Unused = None, +) -> None: ... +def copydir_run_2to3( + src: StrPath, + dest: StrPath, + template: str | None = None, + fixer_names: Iterable[str] | None = None, + options: Mapping[str, Any] | None = None, + explicit: Container[str] | None = None, +) -> list[str]: ... + +class Mixin2to3: + fixer_names: Iterable[str] | None + options: Mapping[str, Any] | None + explicit: Container[str] | None + def run_2to3(self, files: Iterable[str]) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/version.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/version.pyi new file mode 100644 index 00000000..47da65ef --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/distutils/version.pyi @@ -0,0 +1,36 @@ +from abc import abstractmethod +from re import Pattern +from typing_extensions import Self + +class Version: + def __eq__(self, other: object) -> bool: ... + def __lt__(self, other: Self | str) -> bool: ... + def __le__(self, other: Self | str) -> bool: ... + def __gt__(self, other: Self | str) -> bool: ... + def __ge__(self, other: Self | str) -> bool: ... + @abstractmethod + def __init__(self, vstring: str | None = None) -> None: ... + @abstractmethod + def parse(self, vstring: str) -> Self: ... + @abstractmethod + def __str__(self) -> str: ... + @abstractmethod + def _cmp(self, other: Self | str) -> bool: ... + +class StrictVersion(Version): + version_re: Pattern[str] + version: tuple[int, int, int] + prerelease: tuple[str, int] | None + def __init__(self, vstring: str | None = None) -> None: ... + def parse(self, vstring: str) -> Self: ... + def __str__(self) -> str: ... # noqa: Y029 + def _cmp(self, other: Self | str) -> bool: ... + +class LooseVersion(Version): + component_re: Pattern[str] + vstring: str + version: tuple[str | int, ...] + def __init__(self, vstring: str | None = None) -> None: ... + def parse(self, vstring: str) -> Self: ... + def __str__(self) -> str: ... # noqa: Y029 + def _cmp(self, other: Self | str) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/doctest.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/doctest.pyi new file mode 100644 index 00000000..88d066fd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/doctest.pyi @@ -0,0 +1,245 @@ +import types +import unittest +from _typeshed import ExcInfo +from collections.abc import Callable +from typing import Any, NamedTuple +from typing_extensions import TypeAlias + +__all__ = [ + "register_optionflag", + "DONT_ACCEPT_TRUE_FOR_1", + "DONT_ACCEPT_BLANKLINE", + "NORMALIZE_WHITESPACE", + "ELLIPSIS", + "SKIP", + "IGNORE_EXCEPTION_DETAIL", + "COMPARISON_FLAGS", + "REPORT_UDIFF", + "REPORT_CDIFF", + "REPORT_NDIFF", + "REPORT_ONLY_FIRST_FAILURE", + "REPORTING_FLAGS", + "FAIL_FAST", + "Example", + "DocTest", + "DocTestParser", + "DocTestFinder", + "DocTestRunner", + "OutputChecker", + "DocTestFailure", + "UnexpectedException", + "DebugRunner", + "testmod", + "testfile", + "run_docstring_examples", + "DocTestSuite", + "DocFileSuite", + "set_unittest_reportflags", + "script_from_examples", + "testsource", + "debug_src", + "debug", +] + +class TestResults(NamedTuple): + failed: int + attempted: int + +OPTIONFLAGS_BY_NAME: dict[str, int] + +def register_optionflag(name: str) -> int: ... + +DONT_ACCEPT_TRUE_FOR_1: int +DONT_ACCEPT_BLANKLINE: int +NORMALIZE_WHITESPACE: int +ELLIPSIS: int +SKIP: int +IGNORE_EXCEPTION_DETAIL: int + +COMPARISON_FLAGS: int + +REPORT_UDIFF: int +REPORT_CDIFF: int +REPORT_NDIFF: int +REPORT_ONLY_FIRST_FAILURE: int +FAIL_FAST: int + +REPORTING_FLAGS: int + +BLANKLINE_MARKER: str +ELLIPSIS_MARKER: str + +class Example: + source: str + want: str + exc_msg: str | None + lineno: int + indent: int + options: dict[int, bool] + def __init__( + self, + source: str, + want: str, + exc_msg: str | None = None, + lineno: int = 0, + indent: int = 0, + options: dict[int, bool] | None = None, + ) -> None: ... + def __eq__(self, other: object) -> bool: ... + +class DocTest: + examples: list[Example] + globs: dict[str, Any] + name: str + filename: str | None + lineno: int | None + docstring: str | None + def __init__( + self, + examples: list[Example], + globs: dict[str, Any], + name: str, + filename: str | None, + lineno: int | None, + docstring: str | None, + ) -> None: ... + def __lt__(self, other: DocTest) -> bool: ... + def __eq__(self, other: object) -> bool: ... + +class DocTestParser: + def parse(self, string: str, name: str = "") -> list[str | Example]: ... + def get_doctest(self, string: str, globs: dict[str, Any], name: str, filename: str | None, lineno: int | None) -> DocTest: ... + def get_examples(self, string: str, name: str = "") -> list[Example]: ... + +class DocTestFinder: + def __init__( + self, verbose: bool = False, parser: DocTestParser = ..., recurse: bool = True, exclude_empty: bool = True + ) -> None: ... + def find( + self, + obj: object, + name: str | None = None, + module: None | bool | types.ModuleType = None, + globs: dict[str, Any] | None = None, + extraglobs: dict[str, Any] | None = None, + ) -> list[DocTest]: ... + +_Out: TypeAlias = Callable[[str], object] + +class DocTestRunner: + DIVIDER: str + optionflags: int + original_optionflags: int + tries: int + failures: int + test: DocTest + def __init__(self, checker: OutputChecker | None = None, verbose: bool | None = None, optionflags: int = 0) -> None: ... + def report_start(self, out: _Out, test: DocTest, example: Example) -> None: ... + def report_success(self, out: _Out, test: DocTest, example: Example, got: str) -> None: ... + def report_failure(self, out: _Out, test: DocTest, example: Example, got: str) -> None: ... + def report_unexpected_exception(self, out: _Out, test: DocTest, example: Example, exc_info: ExcInfo) -> None: ... + def run( + self, test: DocTest, compileflags: int | None = None, out: _Out | None = None, clear_globs: bool = True + ) -> TestResults: ... + def summarize(self, verbose: bool | None = None) -> TestResults: ... + def merge(self, other: DocTestRunner) -> None: ... + +class OutputChecker: + def check_output(self, want: str, got: str, optionflags: int) -> bool: ... + def output_difference(self, example: Example, got: str, optionflags: int) -> str: ... + +class DocTestFailure(Exception): + test: DocTest + example: Example + got: str + def __init__(self, test: DocTest, example: Example, got: str) -> None: ... + +class UnexpectedException(Exception): + test: DocTest + example: Example + exc_info: ExcInfo + def __init__(self, test: DocTest, example: Example, exc_info: ExcInfo) -> None: ... + +class DebugRunner(DocTestRunner): ... + +master: DocTestRunner | None + +def testmod( + m: types.ModuleType | None = None, + name: str | None = None, + globs: dict[str, Any] | None = None, + verbose: bool | None = None, + report: bool = True, + optionflags: int = 0, + extraglobs: dict[str, Any] | None = None, + raise_on_error: bool = False, + exclude_empty: bool = False, +) -> TestResults: ... +def testfile( + filename: str, + module_relative: bool = True, + name: str | None = None, + package: None | str | types.ModuleType = None, + globs: dict[str, Any] | None = None, + verbose: bool | None = None, + report: bool = True, + optionflags: int = 0, + extraglobs: dict[str, Any] | None = None, + raise_on_error: bool = False, + parser: DocTestParser = ..., + encoding: str | None = None, +) -> TestResults: ... +def run_docstring_examples( + f: object, + globs: dict[str, Any], + verbose: bool = False, + name: str = "NoName", + compileflags: int | None = None, + optionflags: int = 0, +) -> None: ... +def set_unittest_reportflags(flags: int) -> int: ... + +class DocTestCase(unittest.TestCase): + def __init__( + self, + test: DocTest, + optionflags: int = 0, + setUp: Callable[[DocTest], Any] | None = None, + tearDown: Callable[[DocTest], Any] | None = None, + checker: OutputChecker | None = None, + ) -> None: ... + def runTest(self) -> None: ... + def format_failure(self, err: str) -> str: ... + def __eq__(self, other: object) -> bool: ... + +class SkipDocTestCase(DocTestCase): + def __init__(self, module: types.ModuleType) -> None: ... + def test_skip(self) -> None: ... + +class _DocTestSuite(unittest.TestSuite): ... + +def DocTestSuite( + module: None | str | types.ModuleType = None, + globs: dict[str, Any] | None = None, + extraglobs: dict[str, Any] | None = None, + test_finder: DocTestFinder | None = None, + **options: Any, +) -> _DocTestSuite: ... + +class DocFileCase(DocTestCase): ... + +def DocFileTest( + path: str, + module_relative: bool = True, + package: None | str | types.ModuleType = None, + globs: dict[str, Any] | None = None, + parser: DocTestParser = ..., + encoding: str | None = None, + **options: Any, +) -> DocFileCase: ... +def DocFileSuite(*paths: str, **kw: Any) -> _DocTestSuite: ... +def script_from_examples(s: str) -> str: ... +def testsource(module: None | str | types.ModuleType, name: str) -> str: ... +def debug_src(src: str, pm: bool = False, globs: dict[str, Any] | None = None) -> None: ... +def debug_script(src: str, pm: bool = False, globs: dict[str, Any] | None = None) -> None: ... +def debug(module: None | str | types.ModuleType, name: str, pm: bool = False) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/dummy_threading.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/dummy_threading.pyi new file mode 100644 index 00000000..757cb8d4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/dummy_threading.pyi @@ -0,0 +1,2 @@ +from _dummy_threading import * +from _dummy_threading import __all__ as __all__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/__init__.pyi new file mode 100644 index 00000000..fca302f5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/__init__.pyi @@ -0,0 +1,29 @@ +from collections.abc import Callable +from email.message import Message +from email.policy import Policy +from typing import IO +from typing_extensions import TypeAlias + +# Definitions imported by multiple submodules in typeshed +_ParamType: TypeAlias = str | tuple[str | None, str | None, str] # noqa: Y047 +_ParamsType: TypeAlias = str | None | tuple[str, str | None, str] # noqa: Y047 + +def message_from_string(s: str, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... +def message_from_bytes(s: bytes | bytearray, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... +def message_from_file(fp: IO[str], _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... +def message_from_binary_file(fp: IO[bytes], _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... + +# Names in __all__ with no definition: +# base64mime +# charset +# encoders +# errors +# feedparser +# generator +# header +# iterators +# message +# mime +# parser +# quoprimime +# utils diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/_header_value_parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/_header_value_parser.pyi new file mode 100644 index 00000000..97008140 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/_header_value_parser.pyi @@ -0,0 +1,398 @@ +import sys +from collections.abc import Iterable, Iterator +from email.errors import HeaderParseError, MessageDefect +from email.policy import Policy +from re import Pattern +from typing import Any +from typing_extensions import Final, Self + +WSP: Final[set[str]] +CFWS_LEADER: Final[set[str]] +SPECIALS: Final[set[str]] +ATOM_ENDS: Final[set[str]] +DOT_ATOM_ENDS: Final[set[str]] +PHRASE_ENDS: Final[set[str]] +TSPECIALS: Final[set[str]] +TOKEN_ENDS: Final[set[str]] +ASPECIALS: Final[set[str]] +ATTRIBUTE_ENDS: Final[set[str]] +EXTENDED_ATTRIBUTE_ENDS: Final[set[str]] + +def quote_string(value: Any) -> str: ... + +rfc2047_matcher: Pattern[str] + +class TokenList(list[TokenList | Terminal]): + token_type: str | None + syntactic_break: bool + ew_combine_allowed: bool + defects: list[MessageDefect] + def __init__(self, *args: Any, **kw: Any) -> None: ... + @property + def value(self) -> str: ... + @property + def all_defects(self) -> list[MessageDefect]: ... + def startswith_fws(self) -> bool: ... + @property + def as_ew_allowed(self) -> bool: ... + @property + def comments(self) -> list[str]: ... + def fold(self, *, policy: Policy) -> str: ... + def pprint(self, indent: str = "") -> None: ... + def ppstr(self, indent: str = "") -> str: ... + +class WhiteSpaceTokenList(TokenList): ... + +class UnstructuredTokenList(TokenList): + token_type: str + +class Phrase(TokenList): + token_type: str + +class Word(TokenList): + token_type: str + +class CFWSList(WhiteSpaceTokenList): + token_type: str + +class Atom(TokenList): + token_type: str + +class Token(TokenList): + token_type: str + encode_as_ew: bool + +class EncodedWord(TokenList): + token_type: str + cte: str | None + charset: str | None + lang: str | None + +class QuotedString(TokenList): + token_type: str + @property + def content(self) -> str: ... + @property + def quoted_value(self) -> str: ... + @property + def stripped_value(self) -> str: ... + +class BareQuotedString(QuotedString): + token_type: str + +class Comment(WhiteSpaceTokenList): + token_type: str + def quote(self, value: Any) -> str: ... + @property + def content(self) -> str: ... + +class AddressList(TokenList): + token_type: str + @property + def addresses(self) -> list[Address]: ... + @property + def mailboxes(self) -> list[Mailbox]: ... + @property + def all_mailboxes(self) -> list[Mailbox]: ... + +class Address(TokenList): + token_type: str + @property + def display_name(self) -> str: ... + @property + def mailboxes(self) -> list[Mailbox]: ... + @property + def all_mailboxes(self) -> list[Mailbox]: ... + +class MailboxList(TokenList): + token_type: str + @property + def mailboxes(self) -> list[Mailbox]: ... + @property + def all_mailboxes(self) -> list[Mailbox]: ... + +class GroupList(TokenList): + token_type: str + @property + def mailboxes(self) -> list[Mailbox]: ... + @property + def all_mailboxes(self) -> list[Mailbox]: ... + +class Group(TokenList): + token_type: str + @property + def mailboxes(self) -> list[Mailbox]: ... + @property + def all_mailboxes(self) -> list[Mailbox]: ... + @property + def display_name(self) -> str: ... + +class NameAddr(TokenList): + token_type: str + @property + def display_name(self) -> str: ... + @property + def local_part(self) -> str: ... + @property + def domain(self) -> str: ... + @property + def route(self) -> list[Domain] | None: ... + @property + def addr_spec(self) -> str: ... + +class AngleAddr(TokenList): + token_type: str + @property + def local_part(self) -> str: ... + @property + def domain(self) -> str: ... + @property + def route(self) -> list[Domain] | None: ... + @property + def addr_spec(self) -> str: ... + +class ObsRoute(TokenList): + token_type: str + @property + def domains(self) -> list[Domain]: ... + +class Mailbox(TokenList): + token_type: str + @property + def display_name(self) -> str: ... + @property + def local_part(self) -> str: ... + @property + def domain(self) -> str: ... + @property + def route(self) -> list[str]: ... + @property + def addr_spec(self) -> str: ... + +class InvalidMailbox(TokenList): + token_type: str + @property + def display_name(self) -> None: ... + @property + def local_part(self) -> None: ... + @property + def domain(self) -> None: ... + @property + def route(self) -> None: ... + @property + def addr_spec(self) -> None: ... + +class Domain(TokenList): + token_type: str + as_ew_allowed: bool + @property + def domain(self) -> str: ... + +class DotAtom(TokenList): + token_type: str + +class DotAtomText(TokenList): + token_type: str + as_ew_allowed: bool + +if sys.version_info >= (3, 8): + class NoFoldLiteral(TokenList): + token_type: str + as_ew_allowed: bool + +class AddrSpec(TokenList): + token_type: str + as_ew_allowed: bool + @property + def local_part(self) -> str: ... + @property + def domain(self) -> str: ... + @property + def addr_spec(self) -> str: ... + +class ObsLocalPart(TokenList): + token_type: str + as_ew_allowed: bool + +class DisplayName(Phrase): + token_type: str + @property + def display_name(self) -> str: ... + +class LocalPart(TokenList): + token_type: str + as_ew_allowed: bool + @property + def local_part(self) -> str: ... + +class DomainLiteral(TokenList): + token_type: str + as_ew_allowed: bool + @property + def domain(self) -> str: ... + @property + def ip(self) -> str: ... + +class MIMEVersion(TokenList): + token_type: str + major: int | None + minor: int | None + +class Parameter(TokenList): + token_type: str + sectioned: bool + extended: bool + charset: str + @property + def section_number(self) -> int: ... + @property + def param_value(self) -> str: ... + +class InvalidParameter(Parameter): + token_type: str + +class Attribute(TokenList): + token_type: str + @property + def stripped_value(self) -> str: ... + +class Section(TokenList): + token_type: str + number: int | None + +class Value(TokenList): + token_type: str + @property + def stripped_value(self) -> str: ... + +class MimeParameters(TokenList): + token_type: str + syntactic_break: bool + @property + def params(self) -> Iterator[tuple[str, str]]: ... + +class ParameterizedHeaderValue(TokenList): + syntactic_break: bool + @property + def params(self) -> Iterable[tuple[str, str]]: ... + +class ContentType(ParameterizedHeaderValue): + token_type: str + as_ew_allowed: bool + maintype: str + subtype: str + +class ContentDisposition(ParameterizedHeaderValue): + token_type: str + as_ew_allowed: bool + content_disposition: Any + +class ContentTransferEncoding(TokenList): + token_type: str + as_ew_allowed: bool + cte: str + +class HeaderLabel(TokenList): + token_type: str + as_ew_allowed: bool + +if sys.version_info >= (3, 8): + class MsgID(TokenList): + token_type: str + as_ew_allowed: bool + def fold(self, policy: Policy) -> str: ... + + class MessageID(MsgID): + token_type: str + + class InvalidMessageID(MessageID): + token_type: str + +class Header(TokenList): + token_type: str + +class Terminal(str): + as_ew_allowed: bool + ew_combine_allowed: bool + syntactic_break: bool + token_type: str + defects: list[MessageDefect] + def __new__(cls, value: str, token_type: str) -> Self: ... + def pprint(self) -> None: ... + @property + def all_defects(self) -> list[MessageDefect]: ... + def pop_trailing_ws(self) -> None: ... + @property + def comments(self) -> list[str]: ... + def __getnewargs__(self) -> tuple[str, str]: ... # type: ignore[override] + +class WhiteSpaceTerminal(Terminal): + @property + def value(self) -> str: ... + def startswith_fws(self) -> bool: ... + +class ValueTerminal(Terminal): + @property + def value(self) -> ValueTerminal: ... + def startswith_fws(self) -> bool: ... + +class EWWhiteSpaceTerminal(WhiteSpaceTerminal): ... +class _InvalidEwError(HeaderParseError): ... + +DOT: Final[ValueTerminal] +ListSeparator: Final[ValueTerminal] +RouteComponentMarker: Final[ValueTerminal] + +def get_fws(value: str) -> tuple[WhiteSpaceTerminal, str]: ... +def get_encoded_word(value: str) -> tuple[EncodedWord, str]: ... +def get_unstructured(value: str) -> UnstructuredTokenList: ... +def get_qp_ctext(value: str) -> tuple[WhiteSpaceTerminal, str]: ... +def get_qcontent(value: str) -> tuple[ValueTerminal, str]: ... +def get_atext(value: str) -> tuple[ValueTerminal, str]: ... +def get_bare_quoted_string(value: str) -> tuple[BareQuotedString, str]: ... +def get_comment(value: str) -> tuple[Comment, str]: ... +def get_cfws(value: str) -> tuple[CFWSList, str]: ... +def get_quoted_string(value: str) -> tuple[QuotedString, str]: ... +def get_atom(value: str) -> tuple[Atom, str]: ... +def get_dot_atom_text(value: str) -> tuple[DotAtomText, str]: ... +def get_dot_atom(value: str) -> tuple[DotAtom, str]: ... +def get_word(value: str) -> tuple[Any, str]: ... +def get_phrase(value: str) -> tuple[Phrase, str]: ... +def get_local_part(value: str) -> tuple[LocalPart, str]: ... +def get_obs_local_part(value: str) -> tuple[ObsLocalPart, str]: ... +def get_dtext(value: str) -> tuple[ValueTerminal, str]: ... +def get_domain_literal(value: str) -> tuple[DomainLiteral, str]: ... +def get_domain(value: str) -> tuple[Domain, str]: ... +def get_addr_spec(value: str) -> tuple[AddrSpec, str]: ... +def get_obs_route(value: str) -> tuple[ObsRoute, str]: ... +def get_angle_addr(value: str) -> tuple[AngleAddr, str]: ... +def get_display_name(value: str) -> tuple[DisplayName, str]: ... +def get_name_addr(value: str) -> tuple[NameAddr, str]: ... +def get_mailbox(value: str) -> tuple[Mailbox, str]: ... +def get_invalid_mailbox(value: str, endchars: str) -> tuple[InvalidMailbox, str]: ... +def get_mailbox_list(value: str) -> tuple[MailboxList, str]: ... +def get_group_list(value: str) -> tuple[GroupList, str]: ... +def get_group(value: str) -> tuple[Group, str]: ... +def get_address(value: str) -> tuple[Address, str]: ... +def get_address_list(value: str) -> tuple[AddressList, str]: ... + +if sys.version_info >= (3, 8): + def get_no_fold_literal(value: str) -> tuple[NoFoldLiteral, str]: ... + def get_msg_id(value: str) -> tuple[MsgID, str]: ... + def parse_message_id(value: str) -> MessageID: ... + +def parse_mime_version(value: str) -> MIMEVersion: ... +def get_invalid_parameter(value: str) -> tuple[InvalidParameter, str]: ... +def get_ttext(value: str) -> tuple[ValueTerminal, str]: ... +def get_token(value: str) -> tuple[Token, str]: ... +def get_attrtext(value: str) -> tuple[ValueTerminal, str]: ... +def get_attribute(value: str) -> tuple[Attribute, str]: ... +def get_extended_attrtext(value: str) -> tuple[ValueTerminal, str]: ... +def get_extended_attribute(value: str) -> tuple[Attribute, str]: ... +def get_section(value: str) -> tuple[Section, str]: ... +def get_value(value: str) -> tuple[Value, str]: ... +def get_parameter(value: str) -> tuple[Parameter, str]: ... +def parse_mime_parameters(value: str) -> MimeParameters: ... +def parse_content_type_header(value: str) -> ContentType: ... +def parse_content_disposition_header(value: str) -> ContentDisposition: ... +def parse_content_transfer_encoding_header(value: str) -> ContentTransferEncoding: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/base64mime.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/base64mime.pyi new file mode 100644 index 00000000..563cd7f6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/base64mime.pyi @@ -0,0 +1,13 @@ +__all__ = ["body_decode", "body_encode", "decode", "decodestring", "header_encode", "header_length"] + +from _typeshed import ReadableBuffer + +def header_length(bytearray: str | bytes | bytearray) -> int: ... +def header_encode(header_bytes: str | ReadableBuffer, charset: str = "iso-8859-1") -> str: ... + +# First argument should be a buffer that supports slicing and len(). +def body_encode(s: bytes | bytearray, maxlinelen: int = 76, eol: str = "\n") -> str: ... +def decode(string: str | ReadableBuffer) -> bytes: ... + +body_decode = decode +decodestring = decode diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/charset.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/charset.pyi new file mode 100644 index 00000000..24b8fd76 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/charset.pyi @@ -0,0 +1,29 @@ +from collections.abc import Iterator + +__all__ = ["Charset", "add_alias", "add_charset", "add_codec"] + +QP: int # undocumented +BASE64: int # undocumented +SHORTEST: int # undocumented + +class Charset: + input_charset: str + header_encoding: int + body_encoding: int + output_charset: str | None + input_codec: str | None + output_codec: str | None + def __init__(self, input_charset: str = "us-ascii") -> None: ... + def get_body_encoding(self) -> str: ... + def get_output_charset(self) -> str | None: ... + def header_encode(self, string: str) -> str: ... + def header_encode_lines(self, string: str, maxlengths: Iterator[int]) -> list[str]: ... + def body_encode(self, string: str) -> str: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, __other: object) -> bool: ... + +def add_charset( + charset: str, header_enc: int | None = None, body_enc: int | None = None, output_charset: str | None = None +) -> None: ... +def add_alias(alias: str, canonical: str) -> None: ... +def add_codec(charset: str, codecname: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/contentmanager.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/contentmanager.pyi new file mode 100644 index 00000000..3214f1a4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/contentmanager.pyi @@ -0,0 +1,11 @@ +from collections.abc import Callable +from email.message import Message +from typing import Any + +class ContentManager: + def get_content(self, msg: Message, *args: Any, **kw: Any) -> Any: ... + def set_content(self, msg: Message, obj: Any, *args: Any, **kw: Any) -> Any: ... + def add_get_handler(self, key: str, handler: Callable[..., Any]) -> None: ... + def add_set_handler(self, typekey: type, handler: Callable[..., Any]) -> None: ... + +raw_data_manager: ContentManager diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/encoders.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/encoders.pyi new file mode 100644 index 00000000..55223bdc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/encoders.pyi @@ -0,0 +1,8 @@ +from email.message import Message + +__all__ = ["encode_7or8bit", "encode_base64", "encode_noop", "encode_quopri"] + +def encode_base64(msg: Message) -> None: ... +def encode_quopri(msg: Message) -> None: ... +def encode_7or8bit(msg: Message) -> None: ... +def encode_noop(msg: Message) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/errors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/errors.pyi new file mode 100644 index 00000000..c54f1560 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/errors.pyi @@ -0,0 +1,39 @@ +import sys + +class MessageError(Exception): ... +class MessageParseError(MessageError): ... +class HeaderParseError(MessageParseError): ... +class BoundaryError(MessageParseError): ... +class MultipartConversionError(MessageError, TypeError): ... +class CharsetError(MessageError): ... + +class MessageDefect(ValueError): + def __init__(self, line: str | None = None) -> None: ... + +class NoBoundaryInMultipartDefect(MessageDefect): ... +class StartBoundaryNotFoundDefect(MessageDefect): ... +class FirstHeaderLineIsContinuationDefect(MessageDefect): ... +class MisplacedEnvelopeHeaderDefect(MessageDefect): ... +class MultipartInvariantViolationDefect(MessageDefect): ... +class InvalidMultipartContentTransferEncodingDefect(MessageDefect): ... +class UndecodableBytesDefect(MessageDefect): ... +class InvalidBase64PaddingDefect(MessageDefect): ... +class InvalidBase64CharactersDefect(MessageDefect): ... +class InvalidBase64LengthDefect(MessageDefect): ... +class CloseBoundaryNotFoundDefect(MessageDefect): ... +class MissingHeaderBodySeparatorDefect(MessageDefect): ... + +MalformedHeaderDefect = MissingHeaderBodySeparatorDefect + +class HeaderDefect(MessageDefect): ... +class InvalidHeaderDefect(HeaderDefect): ... +class HeaderMissingRequiredValue(HeaderDefect): ... + +class NonPrintableDefect(HeaderDefect): + def __init__(self, non_printables: str | None) -> None: ... + +class ObsoleteHeaderDefect(HeaderDefect): ... +class NonASCIILocalPartDefect(HeaderDefect): ... + +if sys.version_info >= (3, 10): + class InvalidDateDefect(HeaderDefect): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/feedparser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/feedparser.pyi new file mode 100644 index 00000000..4b7f73b9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/feedparser.pyi @@ -0,0 +1,24 @@ +from collections.abc import Callable +from email.message import Message +from email.policy import Policy +from typing import Generic, TypeVar, overload + +__all__ = ["FeedParser", "BytesFeedParser"] + +_MessageT = TypeVar("_MessageT", bound=Message) + +class FeedParser(Generic[_MessageT]): + @overload + def __init__(self: FeedParser[Message], _factory: None = None, *, policy: Policy = ...) -> None: ... + @overload + def __init__(self, _factory: Callable[[], _MessageT], *, policy: Policy = ...) -> None: ... + def feed(self, data: str) -> None: ... + def close(self) -> _MessageT: ... + +class BytesFeedParser(Generic[_MessageT]): + @overload + def __init__(self: BytesFeedParser[Message], _factory: None = None, *, policy: Policy = ...) -> None: ... + @overload + def __init__(self, _factory: Callable[[], _MessageT], *, policy: Policy = ...) -> None: ... + def feed(self, data: bytes | bytearray) -> None: ... + def close(self) -> _MessageT: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/generator.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/generator.pyi new file mode 100644 index 00000000..8362dd9c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/generator.pyi @@ -0,0 +1,42 @@ +from _typeshed import SupportsWrite +from email.message import Message +from email.policy import Policy + +__all__ = ["Generator", "DecodedGenerator", "BytesGenerator"] + +class Generator: + def clone(self, fp: SupportsWrite[str]) -> Generator: ... + def write(self, s: str) -> None: ... + def __init__( + self, + outfp: SupportsWrite[str], + mangle_from_: bool | None = None, + maxheaderlen: int | None = None, + *, + policy: Policy | None = None, + ) -> None: ... + def flatten(self, msg: Message, unixfrom: bool = False, linesep: str | None = None) -> None: ... + +class BytesGenerator: + def clone(self, fp: SupportsWrite[bytes]) -> BytesGenerator: ... + def write(self, s: str) -> None: ... + def __init__( + self, + outfp: SupportsWrite[bytes], + mangle_from_: bool | None = None, + maxheaderlen: int | None = None, + *, + policy: Policy | None = None, + ) -> None: ... + def flatten(self, msg: Message, unixfrom: bool = False, linesep: str | None = None) -> None: ... + +class DecodedGenerator(Generator): + def __init__( + self, + outfp: SupportsWrite[str], + mangle_from_: bool | None = None, + maxheaderlen: int | None = None, + fmt: str | None = None, + *, + policy: Policy | None = None, + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/header.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/header.pyi new file mode 100644 index 00000000..c6f0c6fb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/header.pyi @@ -0,0 +1,31 @@ +from collections.abc import Iterable +from email.charset import Charset +from typing import Any + +__all__ = ["Header", "decode_header", "make_header"] + +class Header: + def __init__( + self, + s: bytes | bytearray | str | None = None, + charset: Charset | str | None = None, + maxlinelen: int | None = None, + header_name: str | None = None, + continuation_ws: str = " ", + errors: str = "strict", + ) -> None: ... + def append(self, s: bytes | bytearray | str, charset: Charset | str | None = None, errors: str = "strict") -> None: ... + def encode(self, splitchars: str = ";, \t", maxlinelen: int | None = None, linesep: str = "\n") -> str: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, __other: object) -> bool: ... + +# decode_header() either returns list[tuple[str, None]] if the header +# contains no encoded parts, or list[tuple[bytes, str | None]] if the header +# contains at least one encoded part. +def decode_header(header: Header | str) -> list[tuple[Any, Any | None]]: ... +def make_header( + decoded_seq: Iterable[tuple[bytes | bytearray | str, str | None]], + maxlinelen: int | None = None, + header_name: str | None = None, + continuation_ws: str = " ", +) -> Header: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/headerregistry.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/headerregistry.pyi new file mode 100644 index 00000000..e158e898 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/headerregistry.pyi @@ -0,0 +1,181 @@ +import sys +import types +from collections.abc import Iterable, Mapping +from datetime import datetime as _datetime +from email._header_value_parser import ( + AddressList, + ContentDisposition, + ContentTransferEncoding, + ContentType, + MIMEVersion, + TokenList, + UnstructuredTokenList, +) +from email.errors import MessageDefect +from email.policy import Policy +from typing import Any, ClassVar, Protocol +from typing_extensions import Literal, Self + +class BaseHeader(str): + # max_count is actually more of an abstract ClassVar (not defined on the base class, but expected to be defined in subclasses) + max_count: ClassVar[Literal[1] | None] + @property + def name(self) -> str: ... + @property + def defects(self) -> tuple[MessageDefect, ...]: ... + def __new__(cls, name: str, value: Any) -> Self: ... + def init(self, name: str, *, parse_tree: TokenList, defects: Iterable[MessageDefect]) -> None: ... + def fold(self, *, policy: Policy) -> str: ... + +class UnstructuredHeader: + max_count: ClassVar[Literal[1] | None] + @staticmethod + def value_parser(value: str) -> UnstructuredTokenList: ... + @classmethod + def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... + +class UniqueUnstructuredHeader(UnstructuredHeader): + max_count: ClassVar[Literal[1]] + +class DateHeader: + max_count: ClassVar[Literal[1] | None] + def init(self, name: str, *, parse_tree: TokenList, defects: Iterable[MessageDefect], datetime: _datetime) -> None: ... + @property + def datetime(self) -> _datetime: ... + @staticmethod + def value_parser(value: str) -> UnstructuredTokenList: ... + @classmethod + def parse(cls, value: str | _datetime, kwds: dict[str, Any]) -> None: ... + +class UniqueDateHeader(DateHeader): + max_count: ClassVar[Literal[1]] + +class AddressHeader: + max_count: ClassVar[Literal[1] | None] + def init(self, name: str, *, parse_tree: TokenList, defects: Iterable[MessageDefect], groups: Iterable[Group]) -> None: ... + @property + def groups(self) -> tuple[Group, ...]: ... + @property + def addresses(self) -> tuple[Address, ...]: ... + @staticmethod + def value_parser(value: str) -> AddressList: ... + @classmethod + def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... + +class UniqueAddressHeader(AddressHeader): + max_count: ClassVar[Literal[1]] + +class SingleAddressHeader(AddressHeader): + @property + def address(self) -> Address: ... + +class UniqueSingleAddressHeader(SingleAddressHeader): + max_count: ClassVar[Literal[1]] + +class MIMEVersionHeader: + max_count: ClassVar[Literal[1]] + def init( + self, + name: str, + *, + parse_tree: TokenList, + defects: Iterable[MessageDefect], + version: str | None, + major: int | None, + minor: int | None, + ) -> None: ... + @property + def version(self) -> str | None: ... + @property + def major(self) -> int | None: ... + @property + def minor(self) -> int | None: ... + @staticmethod + def value_parser(value: str) -> MIMEVersion: ... + @classmethod + def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... + +class ParameterizedMIMEHeader: + max_count: ClassVar[Literal[1]] + def init(self, name: str, *, parse_tree: TokenList, defects: Iterable[MessageDefect], params: Mapping[str, Any]) -> None: ... + @property + def params(self) -> types.MappingProxyType[str, Any]: ... + @classmethod + def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... + +class ContentTypeHeader(ParameterizedMIMEHeader): + @property + def content_type(self) -> str: ... + @property + def maintype(self) -> str: ... + @property + def subtype(self) -> str: ... + @staticmethod + def value_parser(value: str) -> ContentType: ... + +class ContentDispositionHeader(ParameterizedMIMEHeader): + # init is redefined but has the same signature as parent class, so is omitted from the stub + @property + def content_disposition(self) -> str | None: ... + @staticmethod + def value_parser(value: str) -> ContentDisposition: ... + +class ContentTransferEncodingHeader: + max_count: ClassVar[Literal[1]] + def init(self, name: str, *, parse_tree: TokenList, defects: Iterable[MessageDefect]) -> None: ... + @property + def cte(self) -> str: ... + @classmethod + def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... + @staticmethod + def value_parser(value: str) -> ContentTransferEncoding: ... + +if sys.version_info >= (3, 8): + from email._header_value_parser import MessageID + + class MessageIDHeader: + max_count: ClassVar[Literal[1]] + @classmethod + def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... + @staticmethod + def value_parser(value: str) -> MessageID: ... + +class _HeaderParser(Protocol): + max_count: ClassVar[Literal[1] | None] + @staticmethod + def value_parser(value: str) -> TokenList: ... + @classmethod + def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... + +class HeaderRegistry: + registry: dict[str, type[_HeaderParser]] + base_class: type[BaseHeader] + default_class: type[_HeaderParser] + def __init__( + self, base_class: type[BaseHeader] = ..., default_class: type[_HeaderParser] = ..., use_default_map: bool = True + ) -> None: ... + def map_to_type(self, name: str, cls: type[BaseHeader]) -> None: ... + def __getitem__(self, name: str) -> type[BaseHeader]: ... + def __call__(self, name: str, value: Any) -> BaseHeader: ... + +class Address: + @property + def display_name(self) -> str: ... + @property + def username(self) -> str: ... + @property + def domain(self) -> str: ... + @property + def addr_spec(self) -> str: ... + def __init__( + self, display_name: str = "", username: str | None = "", domain: str | None = "", addr_spec: str | None = None + ) -> None: ... + def __eq__(self, other: object) -> bool: ... + +class Group: + @property + def display_name(self) -> str | None: ... + @property + def addresses(self) -> tuple[Address, ...]: ... + def __init__(self, display_name: str | None = None, addresses: Iterable[Address] | None = None) -> None: ... + def __eq__(self, other: object) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/iterators.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/iterators.pyi new file mode 100644 index 00000000..d964d684 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/iterators.pyi @@ -0,0 +1,12 @@ +from _typeshed import SupportsWrite +from collections.abc import Iterator +from email.message import Message + +__all__ = ["body_line_iterator", "typed_subpart_iterator", "walk"] + +def body_line_iterator(msg: Message, decode: bool = False) -> Iterator[str]: ... +def typed_subpart_iterator(msg: Message, maintype: str = "text", subtype: str | None = None) -> Iterator[str]: ... +def walk(self: Message) -> Iterator[Message]: ... + +# We include the seemingly private function because it is documented in the stdlib documentation. +def _structure(msg: Message, fp: SupportsWrite[str] | None = None, level: int = 0, include_default: bool = False) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/message.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/message.pyi new file mode 100644 index 00000000..14e01807 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/message.pyi @@ -0,0 +1,124 @@ +from collections.abc import Generator, Iterator, Sequence +from email import _ParamsType, _ParamType +from email.charset import Charset +from email.contentmanager import ContentManager +from email.errors import MessageDefect +from email.policy import Policy +from typing import Any, TypeVar, overload +from typing_extensions import Self, TypeAlias + +__all__ = ["Message", "EmailMessage"] + +_T = TypeVar("_T") + +_PayloadType: TypeAlias = list[Message] | str | bytes | bytearray +_CharsetType: TypeAlias = Charset | str | None +_HeaderType: TypeAlias = Any + +class Message: + policy: Policy # undocumented + preamble: str | None + epilogue: str | None + defects: list[MessageDefect] + def is_multipart(self) -> bool: ... + def set_unixfrom(self, unixfrom: str) -> None: ... + def get_unixfrom(self) -> str | None: ... + def attach(self, payload: Message) -> None: ... + def get_payload(self, i: int | None = None, decode: bool = False) -> Any: ... # returns _PayloadType | None + def set_payload(self, payload: _PayloadType, charset: _CharsetType = None) -> None: ... + def set_charset(self, charset: _CharsetType) -> None: ... + def get_charset(self) -> _CharsetType: ... + def __len__(self) -> int: ... + def __contains__(self, name: str) -> bool: ... + def __iter__(self) -> Iterator[str]: ... + def __getitem__(self, name: str) -> _HeaderType: ... + def __setitem__(self, name: str, val: _HeaderType) -> None: ... + def __delitem__(self, name: str) -> None: ... + def keys(self) -> list[str]: ... + def values(self) -> list[_HeaderType]: ... + def items(self) -> list[tuple[str, _HeaderType]]: ... + @overload + def get(self, name: str, failobj: None = None) -> _HeaderType | None: ... + @overload + def get(self, name: str, failobj: _T) -> _HeaderType | _T: ... + @overload + def get_all(self, name: str, failobj: None = None) -> list[_HeaderType] | None: ... + @overload + def get_all(self, name: str, failobj: _T) -> list[_HeaderType] | _T: ... + def add_header(self, _name: str, _value: str, **_params: _ParamsType) -> None: ... + def replace_header(self, _name: str, _value: _HeaderType) -> None: ... + def get_content_type(self) -> str: ... + def get_content_maintype(self) -> str: ... + def get_content_subtype(self) -> str: ... + def get_default_type(self) -> str: ... + def set_default_type(self, ctype: str) -> None: ... + @overload + def get_params( + self, failobj: None = None, header: str = "content-type", unquote: bool = True + ) -> list[tuple[str, str]] | None: ... + @overload + def get_params(self, failobj: _T, header: str = "content-type", unquote: bool = True) -> list[tuple[str, str]] | _T: ... + @overload + def get_param( + self, param: str, failobj: None = None, header: str = "content-type", unquote: bool = True + ) -> _ParamType | None: ... + @overload + def get_param(self, param: str, failobj: _T, header: str = "content-type", unquote: bool = True) -> _ParamType | _T: ... + def del_param(self, param: str, header: str = "content-type", requote: bool = True) -> None: ... + def set_type(self, type: str, header: str = "Content-Type", requote: bool = True) -> None: ... + @overload + def get_filename(self, failobj: None = None) -> str | None: ... + @overload + def get_filename(self, failobj: _T) -> str | _T: ... + @overload + def get_boundary(self, failobj: None = None) -> str | None: ... + @overload + def get_boundary(self, failobj: _T) -> str | _T: ... + def set_boundary(self, boundary: str) -> None: ... + @overload + def get_content_charset(self) -> str | None: ... + @overload + def get_content_charset(self, failobj: _T) -> str | _T: ... + @overload + def get_charsets(self, failobj: None = None) -> list[str] | None: ... + @overload + def get_charsets(self, failobj: _T) -> list[str] | _T: ... + def walk(self) -> Generator[Self, None, None]: ... + def get_content_disposition(self) -> str | None: ... + def as_string(self, unixfrom: bool = False, maxheaderlen: int = 0, policy: Policy | None = None) -> str: ... + def as_bytes(self, unixfrom: bool = False, policy: Policy | None = None) -> bytes: ... + def __bytes__(self) -> bytes: ... + def set_param( + self, + param: str, + value: str, + header: str = "Content-Type", + requote: bool = True, + charset: str | None = None, + language: str = "", + replace: bool = False, + ) -> None: ... + def __init__(self, policy: Policy = ...) -> None: ... + # The following two methods are undocumented, but a source code comment states that they are public API + def set_raw(self, name: str, value: _HeaderType) -> None: ... + def raw_items(self) -> Iterator[tuple[str, _HeaderType]]: ... + +class MIMEPart(Message): + def __init__(self, policy: Policy | None = None) -> None: ... + def get_body(self, preferencelist: Sequence[str] = ...) -> Message | None: ... + def iter_attachments(self) -> Iterator[Message]: ... + def iter_parts(self) -> Iterator[Message]: ... + def get_content(self, *args: Any, content_manager: ContentManager | None = None, **kw: Any) -> Any: ... + def set_content(self, *args: Any, content_manager: ContentManager | None = None, **kw: Any) -> None: ... + def make_related(self, boundary: str | None = None) -> None: ... + def make_alternative(self, boundary: str | None = None) -> None: ... + def make_mixed(self, boundary: str | None = None) -> None: ... + def add_related(self, *args: Any, content_manager: ContentManager | None = ..., **kw: Any) -> None: ... + def add_alternative(self, *args: Any, content_manager: ContentManager | None = ..., **kw: Any) -> None: ... + def add_attachment(self, *args: Any, content_manager: ContentManager | None = ..., **kw: Any) -> None: ... + def clear(self) -> None: ... + def clear_content(self) -> None: ... + def as_string(self, unixfrom: bool = False, maxheaderlen: int | None = None, policy: Policy | None = None) -> str: ... + def is_attachment(self) -> bool: ... + +class EmailMessage(MIMEPart): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/application.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/application.pyi new file mode 100644 index 00000000..a7ab9dc7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/application.pyi @@ -0,0 +1,17 @@ +from collections.abc import Callable +from email import _ParamsType +from email.mime.nonmultipart import MIMENonMultipart +from email.policy import Policy + +__all__ = ["MIMEApplication"] + +class MIMEApplication(MIMENonMultipart): + def __init__( + self, + _data: str | bytes | bytearray, + _subtype: str = "octet-stream", + _encoder: Callable[[MIMEApplication], object] = ..., + *, + policy: Policy | None = None, + **_params: _ParamsType, + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/audio.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/audio.pyi new file mode 100644 index 00000000..090dfb96 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/audio.pyi @@ -0,0 +1,17 @@ +from collections.abc import Callable +from email import _ParamsType +from email.mime.nonmultipart import MIMENonMultipart +from email.policy import Policy + +__all__ = ["MIMEAudio"] + +class MIMEAudio(MIMENonMultipart): + def __init__( + self, + _audiodata: str | bytes | bytearray, + _subtype: str | None = None, + _encoder: Callable[[MIMEAudio], object] = ..., + *, + policy: Policy | None = None, + **_params: _ParamsType, + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/base.pyi new file mode 100644 index 00000000..b733709f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/base.pyi @@ -0,0 +1,8 @@ +import email.message +from email import _ParamsType +from email.policy import Policy + +__all__ = ["MIMEBase"] + +class MIMEBase(email.message.Message): + def __init__(self, _maintype: str, _subtype: str, *, policy: Policy | None = None, **_params: _ParamsType) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/image.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/image.pyi new file mode 100644 index 00000000..b47afa6c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/image.pyi @@ -0,0 +1,17 @@ +from collections.abc import Callable +from email import _ParamsType +from email.mime.nonmultipart import MIMENonMultipart +from email.policy import Policy + +__all__ = ["MIMEImage"] + +class MIMEImage(MIMENonMultipart): + def __init__( + self, + _imagedata: str | bytes | bytearray, + _subtype: str | None = None, + _encoder: Callable[[MIMEImage], object] = ..., + *, + policy: Policy | None = None, + **_params: _ParamsType, + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/message.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/message.pyi new file mode 100644 index 00000000..23cf5861 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/message.pyi @@ -0,0 +1,8 @@ +from email.message import Message +from email.mime.nonmultipart import MIMENonMultipart +from email.policy import Policy + +__all__ = ["MIMEMessage"] + +class MIMEMessage(MIMENonMultipart): + def __init__(self, _msg: Message, _subtype: str = "rfc822", *, policy: Policy | None = None) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/multipart.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/multipart.pyi new file mode 100644 index 00000000..6163810e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/multipart.pyi @@ -0,0 +1,18 @@ +from collections.abc import Sequence +from email import _ParamsType +from email.message import Message +from email.mime.base import MIMEBase +from email.policy import Policy + +__all__ = ["MIMEMultipart"] + +class MIMEMultipart(MIMEBase): + def __init__( + self, + _subtype: str = "mixed", + boundary: str | None = None, + _subparts: Sequence[Message] | None = None, + *, + policy: Policy | None = None, + **_params: _ParamsType, + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/nonmultipart.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/nonmultipart.pyi new file mode 100644 index 00000000..5497d89b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/nonmultipart.pyi @@ -0,0 +1,5 @@ +from email.mime.base import MIMEBase + +__all__ = ["MIMENonMultipart"] + +class MIMENonMultipart(MIMEBase): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/text.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/text.pyi new file mode 100644 index 00000000..74d5ef4c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/mime/text.pyi @@ -0,0 +1,9 @@ +from email.mime.nonmultipart import MIMENonMultipart +from email.policy import Policy + +__all__ = ["MIMEText"] + +class MIMEText(MIMENonMultipart): + def __init__( + self, _text: str, _subtype: str = "plain", _charset: str | None = None, *, policy: Policy | None = None + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/parser.pyi new file mode 100644 index 00000000..28b6aca8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/parser.pyi @@ -0,0 +1,26 @@ +from _typeshed import SupportsRead +from collections.abc import Callable +from email.feedparser import BytesFeedParser as BytesFeedParser, FeedParser as FeedParser +from email.message import Message +from email.policy import Policy +from typing import IO + +__all__ = ["Parser", "HeaderParser", "BytesParser", "BytesHeaderParser", "FeedParser", "BytesFeedParser"] + +class Parser: + def __init__(self, _class: Callable[[], Message] | None = None, *, policy: Policy = ...) -> None: ... + def parse(self, fp: SupportsRead[str], headersonly: bool = False) -> Message: ... + def parsestr(self, text: str, headersonly: bool = False) -> Message: ... + +class HeaderParser(Parser): + def parse(self, fp: SupportsRead[str], headersonly: bool = True) -> Message: ... + def parsestr(self, text: str, headersonly: bool = True) -> Message: ... + +class BytesParser: + def __init__(self, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> None: ... + def parse(self, fp: IO[bytes], headersonly: bool = False) -> Message: ... + def parsebytes(self, text: bytes | bytearray, headersonly: bool = False) -> Message: ... + +class BytesHeaderParser(BytesParser): + def parse(self, fp: IO[bytes], headersonly: bool = True) -> Message: ... + def parsebytes(self, text: bytes | bytearray, headersonly: bool = True) -> Message: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/policy.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/policy.pyi new file mode 100644 index 00000000..4df3c1e4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/policy.pyi @@ -0,0 +1,81 @@ +from abc import ABCMeta, abstractmethod +from collections.abc import Callable +from email.contentmanager import ContentManager +from email.errors import MessageDefect +from email.header import Header +from email.message import Message +from typing import Any + +__all__ = ["Compat32", "compat32", "Policy", "EmailPolicy", "default", "strict", "SMTP", "HTTP"] + +class Policy(metaclass=ABCMeta): + max_line_length: int | None + linesep: str + cte_type: str + raise_on_defect: bool + mangle_from_: bool + message_factory: Callable[[Policy], Message] | None + def __init__( + self, + *, + max_line_length: int | None = ..., + linesep: str = ..., + cte_type: str = ..., + raise_on_defect: bool = ..., + mangle_from_: bool = ..., + message_factory: Callable[[Policy], Message] | None = ..., + ) -> None: ... + def clone(self, **kw: Any) -> Policy: ... + def handle_defect(self, obj: Message, defect: MessageDefect) -> None: ... + def register_defect(self, obj: Message, defect: MessageDefect) -> None: ... + def header_max_count(self, name: str) -> int | None: ... + @abstractmethod + def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: ... + @abstractmethod + def header_store_parse(self, name: str, value: str) -> tuple[str, str]: ... + @abstractmethod + def header_fetch_parse(self, name: str, value: str) -> str: ... + @abstractmethod + def fold(self, name: str, value: str) -> str: ... + @abstractmethod + def fold_binary(self, name: str, value: str) -> bytes: ... + +class Compat32(Policy): + def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: ... + def header_store_parse(self, name: str, value: str) -> tuple[str, str]: ... + def header_fetch_parse(self, name: str, value: str) -> str | Header: ... # type: ignore[override] + def fold(self, name: str, value: str) -> str: ... + def fold_binary(self, name: str, value: str) -> bytes: ... + +compat32: Compat32 + +class EmailPolicy(Policy): + utf8: bool + refold_source: str + header_factory: Callable[[str, str], str] + content_manager: ContentManager + def __init__( + self, + *, + max_line_length: int | None = ..., + linesep: str = ..., + cte_type: str = ..., + raise_on_defect: bool = ..., + mangle_from_: bool = ..., + message_factory: Callable[[Policy], Message] | None = ..., + utf8: bool = ..., + refold_source: str = ..., + header_factory: Callable[[str, str], str] = ..., + content_manager: ContentManager = ..., + ) -> None: ... + def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: ... + def header_store_parse(self, name: str, value: str) -> tuple[str, str]: ... + def header_fetch_parse(self, name: str, value: str) -> str: ... + def fold(self, name: str, value: str) -> str: ... + def fold_binary(self, name: str, value: str) -> bytes: ... + +default: EmailPolicy +SMTP: EmailPolicy +SMTPUTF8: EmailPolicy +HTTP: EmailPolicy +strict: EmailPolicy diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/quoprimime.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/quoprimime.pyi new file mode 100644 index 00000000..87d08eec --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/quoprimime.pyi @@ -0,0 +1,28 @@ +from collections.abc import Iterable + +__all__ = [ + "body_decode", + "body_encode", + "body_length", + "decode", + "decodestring", + "header_decode", + "header_encode", + "header_length", + "quote", + "unquote", +] + +def header_check(octet: int) -> bool: ... +def body_check(octet: int) -> bool: ... +def header_length(bytearray: Iterable[int]) -> int: ... +def body_length(bytearray: Iterable[int]) -> int: ... +def unquote(s: str | bytes | bytearray) -> str: ... +def quote(c: str | bytes | bytearray) -> str: ... +def header_encode(header_bytes: bytes | bytearray, charset: str = "iso-8859-1") -> str: ... +def body_encode(body: str, maxlinelen: int = 76, eol: str = "\n") -> str: ... +def decode(encoded: str, eol: str = "\n") -> str: ... +def header_decode(s: str) -> str: ... + +body_decode = decode +decodestring = decode diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/utils.pyi new file mode 100644 index 00000000..090ddf9e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/email/utils.pyi @@ -0,0 +1,59 @@ +import datetime +import sys +from email import _ParamType +from email.charset import Charset +from typing import overload +from typing_extensions import TypeAlias + +__all__ = [ + "collapse_rfc2231_value", + "decode_params", + "decode_rfc2231", + "encode_rfc2231", + "formataddr", + "formatdate", + "format_datetime", + "getaddresses", + "make_msgid", + "mktime_tz", + "parseaddr", + "parsedate", + "parsedate_tz", + "parsedate_to_datetime", + "unquote", +] + +_PDTZ: TypeAlias = tuple[int, int, int, int, int, int, int, int, int, int | None] + +def quote(str: str) -> str: ... +def unquote(str: str) -> str: ... +def parseaddr(addr: str | None) -> tuple[str, str]: ... +def formataddr(pair: tuple[str | None, str], charset: str | Charset = "utf-8") -> str: ... +def getaddresses(fieldvalues: list[str]) -> list[tuple[str, str]]: ... +@overload +def parsedate(data: None) -> None: ... +@overload +def parsedate(data: str) -> tuple[int, int, int, int, int, int, int, int, int] | None: ... +@overload +def parsedate_tz(data: None) -> None: ... +@overload +def parsedate_tz(data: str) -> _PDTZ | None: ... + +if sys.version_info >= (3, 10): + @overload + def parsedate_to_datetime(data: None) -> None: ... + @overload + def parsedate_to_datetime(data: str) -> datetime.datetime: ... + +else: + def parsedate_to_datetime(data: str) -> datetime.datetime: ... + +def mktime_tz(data: _PDTZ) -> int: ... +def formatdate(timeval: float | None = None, localtime: bool = False, usegmt: bool = False) -> str: ... +def format_datetime(dt: datetime.datetime, usegmt: bool = False) -> str: ... +def localtime(dt: datetime.datetime | None = None, isdst: int = -1) -> datetime.datetime: ... +def make_msgid(idstring: str | None = None, domain: str | None = None) -> str: ... +def decode_rfc2231(s: str) -> tuple[str | None, str | None, str]: ... +def encode_rfc2231(s: str, charset: str | None = None, language: str | None = None) -> str: ... +def collapse_rfc2231_value(value: _ParamType, errors: str = "replace", fallback_charset: str = "us-ascii") -> str: ... +def decode_params(params: list[tuple[str, str]]) -> list[tuple[str, _ParamType]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/encodings/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/encodings/__init__.pyi new file mode 100644 index 00000000..2e83f0f6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/encodings/__init__.pyi @@ -0,0 +1,10 @@ +from _typeshed import Incomplete +from codecs import CodecInfo + +class CodecRegistryError(LookupError, SystemError): ... + +def normalize_encoding(encoding: str | bytes) -> str: ... +def search_function(encoding: str) -> CodecInfo | None: ... + +# Needed for submodules +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/encodings/utf_8.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/encodings/utf_8.pyi new file mode 100644 index 00000000..0de51026 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/encodings/utf_8.pyi @@ -0,0 +1,21 @@ +import codecs +from _typeshed import ReadableBuffer + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input: str, final: bool = False) -> bytes: ... + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + @staticmethod + def _buffer_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... + +class StreamWriter(codecs.StreamWriter): + @staticmethod + def encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... + +class StreamReader(codecs.StreamReader): + @staticmethod + def decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... + +def getregentry() -> codecs.CodecInfo: ... +def encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def decode(input: ReadableBuffer, errors: str | None = "strict") -> tuple[str, int]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/encodings/utf_8_sig.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/encodings/utf_8_sig.pyi new file mode 100644 index 00000000..150fe22f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/encodings/utf_8_sig.pyi @@ -0,0 +1,22 @@ +import codecs +from _typeshed import ReadableBuffer + +class IncrementalEncoder(codecs.IncrementalEncoder): + def __init__(self, errors: str = "strict") -> None: ... + def encode(self, input: str, final: bool = False) -> bytes: ... + def getstate(self) -> int: ... # type: ignore[override] + def setstate(self, state: int) -> None: ... # type: ignore[override] + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def __init__(self, errors: str = "strict") -> None: ... + def _buffer_decode(self, input: ReadableBuffer, errors: str | None, final: bool) -> tuple[str, int]: ... + +class StreamWriter(codecs.StreamWriter): + def encode(self, input: str, errors: str | None = "strict") -> tuple[bytes, int]: ... + +class StreamReader(codecs.StreamReader): + def decode(self, input: ReadableBuffer, errors: str | None = "strict") -> tuple[str, int]: ... + +def getregentry() -> codecs.CodecInfo: ... +def encode(input: str, errors: str | None = "strict") -> tuple[bytes, int]: ... +def decode(input: ReadableBuffer, errors: str | None = "strict") -> tuple[str, int]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ensurepip/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ensurepip/__init__.pyi new file mode 100644 index 00000000..332fb184 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ensurepip/__init__.pyi @@ -0,0 +1,12 @@ +__all__ = ["version", "bootstrap"] + +def version() -> str: ... +def bootstrap( + *, + root: str | None = None, + upgrade: bool = False, + user: bool = False, + altinstall: bool = False, + default_pip: bool = False, + verbosity: int = 0, +) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/enum.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/enum.pyi new file mode 100644 index 00000000..b46fe429 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/enum.pyi @@ -0,0 +1,283 @@ +import _typeshed +import sys +import types +from _typeshed import SupportsKeysAndGetItem, Unused +from abc import ABCMeta +from builtins import property as _builtins_property +from collections.abc import Iterable, Iterator, Mapping +from typing import Any, Generic, TypeVar, overload +from typing_extensions import Literal, Self, TypeAlias + +__all__ = ["EnumMeta", "Enum", "IntEnum", "Flag", "IntFlag", "auto", "unique"] + +if sys.version_info >= (3, 11): + __all__ += [ + "CONFORM", + "CONTINUOUS", + "EJECT", + "EnumCheck", + "EnumType", + "FlagBoundary", + "KEEP", + "NAMED_FLAGS", + "ReprEnum", + "STRICT", + "StrEnum", + "UNIQUE", + "global_enum", + "global_enum_repr", + "global_flag_repr", + "global_str", + "member", + "nonmember", + "property", + "verify", + ] + +_EnumMemberT = TypeVar("_EnumMemberT") +_EnumerationT = TypeVar("_EnumerationT", bound=type[Enum]) + +# The following all work: +# >>> from enum import Enum +# >>> from string import ascii_lowercase +# >>> Enum('Foo', names='RED YELLOW GREEN') +# +# >>> Enum('Foo', names=[('RED', 1), ('YELLOW, 2)]) +# +# >>> Enum('Foo', names=((x for x in (ascii_lowercase[i], i)) for i in range(5))) +# +# >>> Enum('Foo', names={'RED': 1, 'YELLOW': 2}) +# +_EnumNames: TypeAlias = str | Iterable[str] | Iterable[Iterable[str | Any]] | Mapping[str, Any] + +if sys.version_info >= (3, 11): + class nonmember(Generic[_EnumMemberT]): + value: _EnumMemberT + def __init__(self, value: _EnumMemberT) -> None: ... + + class member(Generic[_EnumMemberT]): + value: _EnumMemberT + def __init__(self, value: _EnumMemberT) -> None: ... + +class _EnumDict(dict[str, Any]): + def __init__(self) -> None: ... + def __setitem__(self, key: str, value: Any) -> None: ... + if sys.version_info >= (3, 11): + # See comment above `typing.MutableMapping.update` + # for why overloads are preferable to a Union here + # + # Unlike with MutableMapping.update(), the first argument is required, + # hence the type: ignore + @overload # type: ignore[override] + def update(self, members: SupportsKeysAndGetItem[str, Any], **more_members: Any) -> None: ... + @overload + def update(self, members: Iterable[tuple[str, Any]], **more_members: Any) -> None: ... + +# Note: EnumMeta actually subclasses type directly, not ABCMeta. +# This is a temporary workaround to allow multiple creation of enums with builtins +# such as str as mixins, which due to the handling of ABCs of builtin types, cause +# spurious inconsistent metaclass structure. See #1595. +# Structurally: Iterable[T], Reversible[T], Container[T] where T is the enum itself +class EnumMeta(ABCMeta): + if sys.version_info >= (3, 11): + def __new__( + metacls: type[_typeshed.Self], + cls: str, + bases: tuple[type, ...], + classdict: _EnumDict, + *, + boundary: FlagBoundary | None = None, + _simple: bool = False, + **kwds: Any, + ) -> _typeshed.Self: ... + elif sys.version_info >= (3, 9): + def __new__( + metacls: type[_typeshed.Self], cls: str, bases: tuple[type, ...], classdict: _EnumDict, **kwds: Any + ) -> _typeshed.Self: ... + else: + def __new__(metacls: type[_typeshed.Self], cls: str, bases: tuple[type, ...], classdict: _EnumDict) -> _typeshed.Self: ... + + if sys.version_info >= (3, 9): + @classmethod + def __prepare__(metacls, cls: str, bases: tuple[type, ...], **kwds: Any) -> _EnumDict: ... # type: ignore[override] + else: + @classmethod + def __prepare__(metacls, cls: str, bases: tuple[type, ...]) -> _EnumDict: ... # type: ignore[override] + + def __iter__(self: type[_EnumMemberT]) -> Iterator[_EnumMemberT]: ... + def __reversed__(self: type[_EnumMemberT]) -> Iterator[_EnumMemberT]: ... + def __contains__(self: type[Any], obj: object) -> bool: ... + def __getitem__(self: type[_EnumMemberT], name: str) -> _EnumMemberT: ... + @_builtins_property + def __members__(self: type[_EnumMemberT]) -> types.MappingProxyType[str, _EnumMemberT]: ... + def __len__(self) -> int: ... + def __bool__(self) -> Literal[True]: ... + def __dir__(self) -> list[str]: ... + # Simple value lookup + @overload # type: ignore[override] + def __call__(cls: type[_EnumMemberT], value: Any, names: None = None) -> _EnumMemberT: ... + # Functional Enum API + if sys.version_info >= (3, 11): + @overload + def __call__( + cls, + value: str, + names: _EnumNames, + *, + module: str | None = None, + qualname: str | None = None, + type: type | None = None, + start: int = 1, + boundary: FlagBoundary | None = None, + ) -> type[Enum]: ... + else: + @overload + def __call__( + cls, + value: str, + names: _EnumNames, + *, + module: str | None = None, + qualname: str | None = None, + type: type | None = None, + start: int = 1, + ) -> type[Enum]: ... + _member_names_: list[str] # undocumented + _member_map_: dict[str, Enum] # undocumented + _value2member_map_: dict[Any, Enum] # undocumented + +if sys.version_info >= (3, 11): + # In 3.11 `EnumMeta` metaclass is renamed to `EnumType`, but old name also exists. + EnumType = EnumMeta + + class property(types.DynamicClassAttribute): + def __set_name__(self, ownerclass: type[Enum], name: str) -> None: ... + name: str + clsname: str + _magic_enum_attr = property +else: + _magic_enum_attr = types.DynamicClassAttribute + +class Enum(metaclass=EnumMeta): + @_magic_enum_attr + def name(self) -> str: ... + @_magic_enum_attr + def value(self) -> Any: ... + _name_: str + _value_: Any + _ignore_: str | list[str] + _order_: str + __order__: str + @classmethod + def _missing_(cls, value: object) -> Any: ... + @staticmethod + def _generate_next_value_(name: str, start: int, count: int, last_values: list[Any]) -> Any: ... + # It's not true that `__new__` will accept any argument type, + # so ideally we'd use `Any` to indicate that the argument type is inexpressible. + # However, using `Any` causes too many false-positives for those using mypy's `--disallow-any-expr` + # (see #7752, #2539, mypy/#5788), + # and in practice using `object` here has the same effect as using `Any`. + def __new__(cls, value: object) -> Self: ... + def __dir__(self) -> list[str]: ... + def __format__(self, format_spec: str) -> str: ... + def __reduce_ex__(self, proto: Unused) -> tuple[Any, ...]: ... + +if sys.version_info >= (3, 11): + class ReprEnum(Enum): ... + +if sys.version_info >= (3, 11): + _IntEnumBase = ReprEnum +else: + _IntEnumBase = Enum + +class IntEnum(int, _IntEnumBase): + _value_: int + @_magic_enum_attr + def value(self) -> int: ... + def __new__(cls, value: int) -> Self: ... + +def unique(enumeration: _EnumerationT) -> _EnumerationT: ... + +_auto_null: Any + +# subclassing IntFlag so it picks up all implemented base functions, best modeling behavior of enum.auto() +class auto(IntFlag): + _value_: Any + @_magic_enum_attr + def value(self) -> Any: ... + def __new__(cls) -> Self: ... + +class Flag(Enum): + _name_: str | None # type: ignore[assignment] + _value_: int + @_magic_enum_attr + def name(self) -> str | None: ... # type: ignore[override] + @_magic_enum_attr + def value(self) -> int: ... + def __contains__(self, other: Self) -> bool: ... + def __bool__(self) -> bool: ... + def __or__(self, other: Self) -> Self: ... + def __and__(self, other: Self) -> Self: ... + def __xor__(self, other: Self) -> Self: ... + def __invert__(self) -> Self: ... + if sys.version_info >= (3, 11): + def __iter__(self) -> Iterator[Self]: ... + def __len__(self) -> int: ... + __ror__ = __or__ + __rand__ = __and__ + __rxor__ = __xor__ + +if sys.version_info >= (3, 11): + # The body of the class is the same, but the base classes are different. + class IntFlag(int, ReprEnum, Flag, boundary=KEEP): # type: ignore[misc] # complaints about incompatible bases + def __new__(cls, value: int) -> Self: ... + def __or__(self, other: int) -> Self: ... + def __and__(self, other: int) -> Self: ... + def __xor__(self, other: int) -> Self: ... + __ror__ = __or__ + __rand__ = __and__ + __rxor__ = __xor__ + +else: + class IntFlag(int, Flag): # type: ignore[misc] # complaints about incompatible bases + def __new__(cls, value: int) -> Self: ... + def __or__(self, other: int) -> Self: ... + def __and__(self, other: int) -> Self: ... + def __xor__(self, other: int) -> Self: ... + __ror__ = __or__ + __rand__ = __and__ + __rxor__ = __xor__ + +if sys.version_info >= (3, 11): + class StrEnum(str, ReprEnum): + def __new__(cls, value: str) -> Self: ... + _value_: str + @_magic_enum_attr + def value(self) -> str: ... + + class EnumCheck(StrEnum): + CONTINUOUS: str + NAMED_FLAGS: str + UNIQUE: str + CONTINUOUS = EnumCheck.CONTINUOUS + NAMED_FLAGS = EnumCheck.NAMED_FLAGS + UNIQUE = EnumCheck.UNIQUE + + class verify: + def __init__(self, *checks: EnumCheck) -> None: ... + def __call__(self, enumeration: _EnumerationT) -> _EnumerationT: ... + + class FlagBoundary(StrEnum): + STRICT: str + CONFORM: str + EJECT: str + KEEP: str + STRICT = FlagBoundary.STRICT + CONFORM = FlagBoundary.CONFORM + EJECT = FlagBoundary.EJECT + KEEP = FlagBoundary.KEEP + + def global_str(self: Enum) -> str: ... + def global_enum(cls: _EnumerationT, update_str: bool = False) -> _EnumerationT: ... + def global_enum_repr(self: Enum) -> str: ... + def global_flag_repr(self: Flag) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/errno.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/errno.pyi new file mode 100644 index 00000000..28874d44 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/errno.pyi @@ -0,0 +1,220 @@ +import sys +from collections.abc import Mapping + +errorcode: Mapping[int, str] + +EPERM: int +ENOENT: int +ESRCH: int +EINTR: int +EIO: int +ENXIO: int +E2BIG: int +ENOEXEC: int +EBADF: int +ECHILD: int +EAGAIN: int +ENOMEM: int +EACCES: int +EFAULT: int +EBUSY: int +EEXIST: int +EXDEV: int +ENODEV: int +ENOTDIR: int +EISDIR: int +EINVAL: int +ENFILE: int +EMFILE: int +ENOTTY: int +ETXTBSY: int +EFBIG: int +ENOSPC: int +ESPIPE: int +EROFS: int +EMLINK: int +EPIPE: int +EDOM: int +ERANGE: int +EDEADLK: int +ENAMETOOLONG: int +ENOLCK: int +ENOSYS: int +ENOTEMPTY: int +ELOOP: int +EWOULDBLOCK: int +ENOMSG: int +EIDRM: int +ENOSTR: int +ENODATA: int +ETIME: int +ENOSR: int +EREMOTE: int +ENOLINK: int +EPROTO: int +EBADMSG: int +EOVERFLOW: int +EILSEQ: int +EUSERS: int +ENOTSOCK: int +EDESTADDRREQ: int +EMSGSIZE: int +EPROTOTYPE: int +ENOPROTOOPT: int +EPROTONOSUPPORT: int +ESOCKTNOSUPPORT: int +ENOTSUP: int +EOPNOTSUPP: int +EPFNOSUPPORT: int +EAFNOSUPPORT: int +EADDRINUSE: int +EADDRNOTAVAIL: int +ENETDOWN: int +ENETUNREACH: int +ENETRESET: int +ECONNABORTED: int +ECONNRESET: int +ENOBUFS: int +EISCONN: int +ENOTCONN: int +ESHUTDOWN: int +ETOOMANYREFS: int +ETIMEDOUT: int +ECONNREFUSED: int +EHOSTDOWN: int +EHOSTUNREACH: int +EALREADY: int +EINPROGRESS: int +ESTALE: int +EDQUOT: int +ECANCELED: int # undocumented +ENOTRECOVERABLE: int # undocumented +EOWNERDEAD: int # undocumented + +if sys.platform != "win32": + ENOTBLK: int + EMULTIHOP: int + # All of the below are undocumented + EAUTH: int + EBADARCH: int + EBADEXEC: int + EBADMACHO: int + EBADRPC: int + EDEVERR: int + EFTYPE: int + ENEEDAUTH: int + ENOATTR: int + ENOPOLICY: int + EPROCLIM: int + EPROCUNAVAIL: int + EPROGMISMATCH: int + EPROGUNAVAIL: int + EPWROFF: int + ERPCMISMATCH: int + ESHLIBVERS: int + + if sys.platform != "darwin" or sys.version_info >= (3, 11): + EQFULL: int # undocumented + +if sys.platform != "darwin": + EDEADLOCK: int + +if sys.platform != "win32" and sys.platform != "darwin": + ECHRNG: int + EL2NSYNC: int + EL3HLT: int + EL3RST: int + ELNRNG: int + EUNATCH: int + ENOCSI: int + EL2HLT: int + EBADE: int + EBADR: int + EXFULL: int + ENOANO: int + EBADRQC: int + EBADSLT: int + EBFONT: int + ENONET: int + ENOPKG: int + EADV: int + ESRMNT: int + ECOMM: int + EDOTDOT: int + ENOTUNIQ: int + EBADFD: int + EREMCHG: int + ELIBACC: int + ELIBBAD: int + ELIBSCN: int + ELIBMAX: int + ELIBEXEC: int + ERESTART: int + ESTRPIPE: int + EUCLEAN: int + ENOTNAM: int + ENAVAIL: int + EISNAM: int + EREMOTEIO: int + # All of the below are undocumented + EKEYEXPIRED: int + EKEYREJECTED: int + EKEYREVOKED: int + EMEDIUMTYPE: int + ENOKEY: int + ENOMEDIUM: int + ERFKILL: int + EL: int + ELOCKUNMAPPED: int + ENOTACTIVE: int + +if sys.platform == "win32": + # All of these are undocumented + WSABASEERR: int + WSAEACCES: int + WSAEADDRINUSE: int + WSAEADDRNOTAVAIL: int + WSAEAFNOSUPPORT: int + WSAEALREADY: int + WSAEBADF: int + WSAECONNABORTED: int + WSAECONNREFUSED: int + WSAECONNRESET: int + WSAEDESTADDRREQ: int + WSAEDISCON: int + WSAEDQUOT: int + WSAEFAULT: int + WSAEHOSTDOWN: int + WSAEHOSTUNREACH: int + WSAEINPROGRESS: int + WSAEINTR: int + WSAEINVAL: int + WSAEISCONN: int + WSAELOOP: int + WSAEMFILE: int + WSAEMSGSIZE: int + WSAENAMETOOLONG: int + WSAENETDOWN: int + WSAENETRESET: int + WSAENETUNREACH: int + WSAENOBUFS: int + WSAENOPROTOOPT: int + WSAENOTCONN: int + WSAENOTEMPTY: int + WSAENOTSOCK: int + WSAEOPNOTSUPP: int + WSAEPFNOSUPPORT: int + WSAEPROCLIM: int + WSAEPROTONOSUPPORT: int + WSAEPROTOTYPE: int + WSAEREMOTE: int + WSAESHUTDOWN: int + WSAESOCKTNOSUPPORT: int + WSAESTALE: int + WSAETIMEDOUT: int + WSAETOOMANYREFS: int + WSAEUSERS: int + WSAEWOULDBLOCK: int + WSANOTINITIALISED: int + WSASYSNOTREADY: int + WSAVERNOTSUPPORTED: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/faulthandler.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/faulthandler.pyi new file mode 100644 index 00000000..7b42b8ec --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/faulthandler.pyi @@ -0,0 +1,13 @@ +import sys +from _typeshed import FileDescriptorLike + +def cancel_dump_traceback_later() -> None: ... +def disable() -> None: ... +def dump_traceback(file: FileDescriptorLike = ..., all_threads: bool = ...) -> None: ... +def dump_traceback_later(timeout: float, repeat: bool = ..., file: FileDescriptorLike = ..., exit: bool = ...) -> None: ... +def enable(file: FileDescriptorLike = ..., all_threads: bool = ...) -> None: ... +def is_enabled() -> bool: ... + +if sys.platform != "win32": + def register(signum: int, file: FileDescriptorLike = ..., all_threads: bool = ..., chain: bool = ...) -> None: ... + def unregister(signum: int) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/fcntl.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/fcntl.pyi new file mode 100644 index 00000000..90676e36 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/fcntl.pyi @@ -0,0 +1,116 @@ +import sys +from _typeshed import FileDescriptorLike, ReadOnlyBuffer, WriteableBuffer +from typing import Any, overload +from typing_extensions import Literal + +if sys.platform != "win32": + FASYNC: int + FD_CLOEXEC: int + F_DUPFD: int + F_DUPFD_CLOEXEC: int + F_GETFD: int + F_GETFL: int + F_GETLK: int + F_GETOWN: int + F_RDLCK: int + F_SETFD: int + F_SETFL: int + F_SETLK: int + F_SETLKW: int + F_SETOWN: int + F_UNLCK: int + F_WRLCK: int + if sys.platform == "darwin": + F_FULLFSYNC: int + F_NOCACHE: int + if sys.version_info >= (3, 9): + F_GETPATH: int + if sys.platform == "linux": + F_SETLKW64: int + F_SETSIG: int + F_SHLCK: int + F_SETLK64: int + F_SETLEASE: int + F_GETSIG: int + F_NOTIFY: int + F_EXLCK: int + F_GETLEASE: int + F_GETLK64: int + if sys.version_info >= (3, 8): + F_ADD_SEALS: int + F_GET_SEALS: int + F_SEAL_GROW: int + F_SEAL_SEAL: int + F_SEAL_SHRINK: int + F_SEAL_WRITE: int + if sys.version_info >= (3, 9): + F_OFD_GETLK: int + F_OFD_SETLK: int + F_OFD_SETLKW: int + if sys.version_info >= (3, 10): + F_GETPIPE_SZ: int + F_SETPIPE_SZ: int + + DN_ACCESS: int + DN_ATTRIB: int + DN_CREATE: int + DN_DELETE: int + DN_MODIFY: int + DN_MULTISHOT: int + DN_RENAME: int + + LOCK_EX: int + LOCK_NB: int + LOCK_SH: int + LOCK_UN: int + if sys.platform == "linux": + LOCK_MAND: int + LOCK_READ: int + LOCK_RW: int + LOCK_WRITE: int + + # These are highly problematic, they might be present or not, depends on the specific OS. + if sys.platform == "linux": + I_ATMARK: int + I_CANPUT: int + I_CKBAND: int + I_FDINSERT: int + I_FIND: int + I_FLUSH: int + I_FLUSHBAND: int + I_GETBAND: int + I_GETCLTIME: int + I_GETSIG: int + I_GRDOPT: int + I_GWROPT: int + I_LINK: int + I_LIST: int + I_LOOK: int + I_NREAD: int + I_PEEK: int + I_PLINK: int + I_POP: int + I_PUNLINK: int + I_PUSH: int + I_RECVFD: int + I_SENDFD: int + I_SETCLTIME: int + I_SETSIG: int + I_SRDOPT: int + I_STR: int + I_SWROPT: int + I_UNLINK: int + @overload + def fcntl(__fd: FileDescriptorLike, __cmd: int, __arg: int = 0) -> int: ... + @overload + def fcntl(__fd: FileDescriptorLike, __cmd: int, __arg: str | ReadOnlyBuffer) -> bytes: ... + @overload + def ioctl(__fd: FileDescriptorLike, __request: int, __arg: int = 0, __mutate_flag: bool = True) -> int: ... + @overload + def ioctl(__fd: FileDescriptorLike, __request: int, __arg: WriteableBuffer, __mutate_flag: Literal[True] = True) -> int: ... + @overload + def ioctl(__fd: FileDescriptorLike, __request: int, __arg: WriteableBuffer, __mutate_flag: Literal[False]) -> bytes: ... + @overload + def ioctl(__fd: FileDescriptorLike, __request: int, __arg: ReadOnlyBuffer, __mutate_flag: bool = True) -> bytes: ... + def flock(__fd: FileDescriptorLike, __operation: int) -> None: ... + def lockf(__fd: FileDescriptorLike, __cmd: int, __len: int = 0, __start: int = 0, __whence: int = 0) -> Any: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/filecmp.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/filecmp.pyi new file mode 100644 index 00000000..008d7a44 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/filecmp.pyi @@ -0,0 +1,58 @@ +import sys +from _typeshed import GenericPath, StrOrBytesPath +from collections.abc import Callable, Iterable, Sequence +from typing import Any, AnyStr, Generic +from typing_extensions import Literal + +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = ["clear_cache", "cmp", "dircmp", "cmpfiles", "DEFAULT_IGNORES"] + +DEFAULT_IGNORES: list[str] +BUFSIZE: Literal[8192] + +def cmp(f1: StrOrBytesPath, f2: StrOrBytesPath, shallow: bool | Literal[0, 1] = True) -> bool: ... +def cmpfiles( + a: GenericPath[AnyStr], b: GenericPath[AnyStr], common: Iterable[GenericPath[AnyStr]], shallow: bool | Literal[0, 1] = True +) -> tuple[list[AnyStr], list[AnyStr], list[AnyStr]]: ... + +class dircmp(Generic[AnyStr]): + def __init__( + self, + a: GenericPath[AnyStr], + b: GenericPath[AnyStr], + ignore: Sequence[AnyStr] | None = None, + hide: Sequence[AnyStr] | None = None, + ) -> None: ... + left: AnyStr + right: AnyStr + hide: Sequence[AnyStr] + ignore: Sequence[AnyStr] + # These properties are created at runtime by __getattr__ + subdirs: dict[AnyStr, dircmp[AnyStr]] + same_files: list[AnyStr] + diff_files: list[AnyStr] + funny_files: list[AnyStr] + common_dirs: list[AnyStr] + common_files: list[AnyStr] + common_funny: list[AnyStr] + common: list[AnyStr] + left_only: list[AnyStr] + right_only: list[AnyStr] + left_list: list[AnyStr] + right_list: list[AnyStr] + def report(self) -> None: ... + def report_partial_closure(self) -> None: ... + def report_full_closure(self) -> None: ... + methodmap: dict[str, Callable[[], None]] + def phase0(self) -> None: ... + def phase1(self) -> None: ... + def phase2(self) -> None: ... + def phase3(self) -> None: ... + def phase4(self) -> None: ... + def phase4_closure(self) -> None: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +def clear_cache() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/fileinput.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/fileinput.pyi new file mode 100644 index 00000000..e9f3713b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/fileinput.pyi @@ -0,0 +1,320 @@ +import sys +from _typeshed import AnyStr_co, StrOrBytesPath +from collections.abc import Callable, Iterable, Iterator +from types import TracebackType +from typing import IO, Any, AnyStr, Generic, Protocol, overload +from typing_extensions import Literal, Self, TypeAlias + +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = [ + "input", + "close", + "nextfile", + "filename", + "lineno", + "filelineno", + "fileno", + "isfirstline", + "isstdin", + "FileInput", + "hook_compressed", + "hook_encoded", +] + +if sys.version_info >= (3, 11): + _TextMode: TypeAlias = Literal["r"] +else: + _TextMode: TypeAlias = Literal["r", "rU", "U"] + +class _HasReadlineAndFileno(Protocol[AnyStr_co]): + def readline(self) -> AnyStr_co: ... + def fileno(self) -> int: ... + +if sys.version_info >= (3, 10): + # encoding and errors are added + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: _TextMode = "r", + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, + encoding: str | None = None, + errors: str | None = None, + ) -> FileInput[str]: ... + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: Literal["rb"], + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, + encoding: None = None, + errors: None = None, + ) -> FileInput[bytes]: ... + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: str, + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, + encoding: str | None = None, + errors: str | None = None, + ) -> FileInput[Any]: ... + +elif sys.version_info >= (3, 8): + # bufsize is dropped and mode and openhook become keyword-only + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: _TextMode = "r", + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, + ) -> FileInput[str]: ... + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: Literal["rb"], + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, + ) -> FileInput[bytes]: ... + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: str, + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, + ) -> FileInput[Any]: ... + +else: + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + bufsize: int = 0, + mode: _TextMode = "r", + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, + ) -> FileInput[str]: ... + # Because mode isn't keyword-only here yet, we need two overloads each for + # the bytes case and the fallback case. + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + bufsize: int = 0, + *, + mode: Literal["rb"], + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, + ) -> FileInput[bytes]: ... + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None, + inplace: bool, + backup: str, + bufsize: int, + mode: Literal["rb"], + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, + ) -> FileInput[bytes]: ... + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + bufsize: int = 0, + *, + mode: str, + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, + ) -> FileInput[Any]: ... + @overload + def input( + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None, + inplace: bool, + backup: str, + bufsize: int, + mode: str, + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, + ) -> FileInput[Any]: ... + +def close() -> None: ... +def nextfile() -> None: ... +def filename() -> str: ... +def lineno() -> int: ... +def filelineno() -> int: ... +def fileno() -> int: ... +def isfirstline() -> bool: ... +def isstdin() -> bool: ... + +class FileInput(Iterator[AnyStr], Generic[AnyStr]): + if sys.version_info >= (3, 10): + # encoding and errors are added + @overload + def __init__( + self: FileInput[str], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: _TextMode = "r", + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, + encoding: str | None = None, + errors: str | None = None, + ) -> None: ... + @overload + def __init__( + self: FileInput[bytes], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: Literal["rb"], + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, + encoding: None = None, + errors: None = None, + ) -> None: ... + @overload + def __init__( + self: FileInput[Any], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: str, + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, + encoding: str | None = None, + errors: str | None = None, + ) -> None: ... + + elif sys.version_info >= (3, 8): + # bufsize is dropped and mode and openhook become keyword-only + @overload + def __init__( + self: FileInput[str], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: _TextMode = "r", + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, + ) -> None: ... + @overload + def __init__( + self: FileInput[bytes], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: Literal["rb"], + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, + ) -> None: ... + @overload + def __init__( + self: FileInput[Any], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + *, + mode: str, + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, + ) -> None: ... + + else: + @overload + def __init__( + self: FileInput[str], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + bufsize: int = 0, + mode: _TextMode = "r", + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, + ) -> None: ... + # Because mode isn't keyword-only here yet, we need two overloads each for + # the bytes case and the fallback case. + @overload + def __init__( + self: FileInput[bytes], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + bufsize: int = 0, + *, + mode: Literal["rb"], + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, + ) -> None: ... + @overload + def __init__( + self: FileInput[bytes], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None, + inplace: bool, + backup: str, + bufsize: int, + mode: Literal["rb"], + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, + ) -> None: ... + @overload + def __init__( + self: FileInput[Any], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + bufsize: int = 0, + *, + mode: str, + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, + ) -> None: ... + @overload + def __init__( + self: FileInput[Any], + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None, + inplace: bool, + backup: str, + bufsize: int, + mode: str, + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, + ) -> None: ... + + def __del__(self) -> None: ... + def close(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __iter__(self) -> Self: ... + def __next__(self) -> AnyStr: ... + if sys.version_info < (3, 11): + def __getitem__(self, i: int) -> AnyStr: ... + + def nextfile(self) -> None: ... + def readline(self) -> AnyStr: ... + def filename(self) -> str: ... + def lineno(self) -> int: ... + def filelineno(self) -> int: ... + def fileno(self) -> int: ... + def isfirstline(self) -> bool: ... + def isstdin(self) -> bool: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +if sys.version_info >= (3, 10): + def hook_compressed( + filename: StrOrBytesPath, mode: str, *, encoding: str | None = None, errors: str | None = None + ) -> IO[Any]: ... + +else: + def hook_compressed(filename: StrOrBytesPath, mode: str) -> IO[Any]: ... + +def hook_encoded(encoding: str, errors: str | None = None) -> Callable[[StrOrBytesPath, str], IO[Any]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/fnmatch.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/fnmatch.pyi new file mode 100644 index 00000000..7051c999 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/fnmatch.pyi @@ -0,0 +1,9 @@ +from collections.abc import Iterable +from typing import AnyStr + +__all__ = ["filter", "fnmatch", "fnmatchcase", "translate"] + +def fnmatch(name: AnyStr, pat: AnyStr) -> bool: ... +def fnmatchcase(name: AnyStr, pat: AnyStr) -> bool: ... +def filter(names: Iterable[AnyStr], pat: AnyStr) -> list[AnyStr]: ... +def translate(pat: str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/formatter.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/formatter.pyi new file mode 100644 index 00000000..05c3c8b3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/formatter.pyi @@ -0,0 +1,88 @@ +from collections.abc import Iterable +from typing import IO, Any +from typing_extensions import TypeAlias + +AS_IS: None +_FontType: TypeAlias = tuple[str, bool, bool, bool] +_StylesType: TypeAlias = tuple[Any, ...] + +class NullFormatter: + writer: NullWriter | None + def __init__(self, writer: NullWriter | None = None) -> None: ... + def end_paragraph(self, blankline: int) -> None: ... + def add_line_break(self) -> None: ... + def add_hor_rule(self, *args: Any, **kw: Any) -> None: ... + def add_label_data(self, format: str, counter: int, blankline: int | None = None) -> None: ... + def add_flowing_data(self, data: str) -> None: ... + def add_literal_data(self, data: str) -> None: ... + def flush_softspace(self) -> None: ... + def push_alignment(self, align: str | None) -> None: ... + def pop_alignment(self) -> None: ... + def push_font(self, x: _FontType) -> None: ... + def pop_font(self) -> None: ... + def push_margin(self, margin: int) -> None: ... + def pop_margin(self) -> None: ... + def set_spacing(self, spacing: str | None) -> None: ... + def push_style(self, *styles: _StylesType) -> None: ... + def pop_style(self, n: int = 1) -> None: ... + def assert_line_data(self, flag: int = 1) -> None: ... + +class AbstractFormatter: + writer: NullWriter + align: str | None + align_stack: list[str | None] + font_stack: list[_FontType] + margin_stack: list[int] + spacing: str | None + style_stack: Any + nospace: int + softspace: int + para_end: int + parskip: int + hard_break: int + have_label: int + def __init__(self, writer: NullWriter) -> None: ... + def end_paragraph(self, blankline: int) -> None: ... + def add_line_break(self) -> None: ... + def add_hor_rule(self, *args: Any, **kw: Any) -> None: ... + def add_label_data(self, format: str, counter: int, blankline: int | None = None) -> None: ... + def format_counter(self, format: Iterable[str], counter: int) -> str: ... + def format_letter(self, case: str, counter: int) -> str: ... + def format_roman(self, case: str, counter: int) -> str: ... + def add_flowing_data(self, data: str) -> None: ... + def add_literal_data(self, data: str) -> None: ... + def flush_softspace(self) -> None: ... + def push_alignment(self, align: str | None) -> None: ... + def pop_alignment(self) -> None: ... + def push_font(self, font: _FontType) -> None: ... + def pop_font(self) -> None: ... + def push_margin(self, margin: int) -> None: ... + def pop_margin(self) -> None: ... + def set_spacing(self, spacing: str | None) -> None: ... + def push_style(self, *styles: _StylesType) -> None: ... + def pop_style(self, n: int = 1) -> None: ... + def assert_line_data(self, flag: int = 1) -> None: ... + +class NullWriter: + def flush(self) -> None: ... + def new_alignment(self, align: str | None) -> None: ... + def new_font(self, font: _FontType) -> None: ... + def new_margin(self, margin: int, level: int) -> None: ... + def new_spacing(self, spacing: str | None) -> None: ... + def new_styles(self, styles: tuple[Any, ...]) -> None: ... + def send_paragraph(self, blankline: int) -> None: ... + def send_line_break(self) -> None: ... + def send_hor_rule(self, *args: Any, **kw: Any) -> None: ... + def send_label_data(self, data: str) -> None: ... + def send_flowing_data(self, data: str) -> None: ... + def send_literal_data(self, data: str) -> None: ... + +class AbstractWriter(NullWriter): ... + +class DumbWriter(NullWriter): + file: IO[str] + maxcol: int + def __init__(self, file: IO[str] | None = None, maxcol: int = 72) -> None: ... + def reset(self) -> None: ... + +def test(file: str | None = None) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/fractions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/fractions.pyi new file mode 100644 index 00000000..97cefc91 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/fractions.pyi @@ -0,0 +1,151 @@ +import sys +from collections.abc import Callable +from decimal import Decimal +from numbers import Integral, Rational, Real +from typing import Any, overload +from typing_extensions import Literal, Self, SupportsIndex, TypeAlias + +_ComparableNum: TypeAlias = int | float | Decimal | Real + +if sys.version_info >= (3, 9): + __all__ = ["Fraction"] +else: + __all__ = ["Fraction", "gcd"] + @overload + def gcd(a: int, b: int) -> int: ... + @overload + def gcd(a: Integral, b: int) -> Integral: ... + @overload + def gcd(a: int, b: Integral) -> Integral: ... + @overload + def gcd(a: Integral, b: Integral) -> Integral: ... + +class Fraction(Rational): + @overload + def __new__( + cls, numerator: int | Rational = 0, denominator: int | Rational | None = None, *, _normalize: bool = True + ) -> Self: ... + @overload + def __new__(cls, __value: float | Decimal | str, *, _normalize: bool = True) -> Self: ... + @classmethod + def from_float(cls, f: float) -> Self: ... + @classmethod + def from_decimal(cls, dec: Decimal) -> Self: ... + def limit_denominator(self, max_denominator: int = 1000000) -> Fraction: ... + if sys.version_info >= (3, 8): + def as_integer_ratio(self) -> tuple[int, int]: ... + + @property + def numerator(a) -> int: ... + @property + def denominator(a) -> int: ... + @overload + def __add__(a, b: int | Fraction) -> Fraction: ... + @overload + def __add__(a, b: float) -> float: ... + @overload + def __add__(a, b: complex) -> complex: ... + @overload + def __radd__(b, a: int | Fraction) -> Fraction: ... + @overload + def __radd__(b, a: float) -> float: ... + @overload + def __radd__(b, a: complex) -> complex: ... + @overload + def __sub__(a, b: int | Fraction) -> Fraction: ... + @overload + def __sub__(a, b: float) -> float: ... + @overload + def __sub__(a, b: complex) -> complex: ... + @overload + def __rsub__(b, a: int | Fraction) -> Fraction: ... + @overload + def __rsub__(b, a: float) -> float: ... + @overload + def __rsub__(b, a: complex) -> complex: ... + @overload + def __mul__(a, b: int | Fraction) -> Fraction: ... + @overload + def __mul__(a, b: float) -> float: ... + @overload + def __mul__(a, b: complex) -> complex: ... + @overload + def __rmul__(b, a: int | Fraction) -> Fraction: ... + @overload + def __rmul__(b, a: float) -> float: ... + @overload + def __rmul__(b, a: complex) -> complex: ... + @overload + def __truediv__(a, b: int | Fraction) -> Fraction: ... + @overload + def __truediv__(a, b: float) -> float: ... + @overload + def __truediv__(a, b: complex) -> complex: ... + @overload + def __rtruediv__(b, a: int | Fraction) -> Fraction: ... + @overload + def __rtruediv__(b, a: float) -> float: ... + @overload + def __rtruediv__(b, a: complex) -> complex: ... + @overload + def __floordiv__(a, b: int | Fraction) -> int: ... + @overload + def __floordiv__(a, b: float) -> float: ... + @overload + def __rfloordiv__(b, a: int | Fraction) -> int: ... + @overload + def __rfloordiv__(b, a: float) -> float: ... + @overload + def __mod__(a, b: int | Fraction) -> Fraction: ... + @overload + def __mod__(a, b: float) -> float: ... + @overload + def __rmod__(b, a: int | Fraction) -> Fraction: ... + @overload + def __rmod__(b, a: float) -> float: ... + @overload + def __divmod__(a, b: int | Fraction) -> tuple[int, Fraction]: ... + @overload + def __divmod__(a, b: float) -> tuple[float, Fraction]: ... + @overload + def __rdivmod__(b, a: int | Fraction) -> tuple[int, Fraction]: ... + @overload + def __rdivmod__(b, a: float) -> tuple[float, Fraction]: ... + @overload + def __pow__(a, b: int) -> Fraction: ... + @overload + def __pow__(a, b: float | Fraction) -> float: ... + @overload + def __pow__(a, b: complex) -> complex: ... + @overload + def __rpow__(b, a: float | Fraction) -> float: ... + @overload + def __rpow__(b, a: complex) -> complex: ... + def __pos__(a) -> Fraction: ... + def __neg__(a) -> Fraction: ... + def __abs__(a) -> Fraction: ... + def __trunc__(a) -> int: ... + def __floor__(a) -> int: ... + def __ceil__(a) -> int: ... + @overload + def __round__(self, ndigits: None = None) -> int: ... + @overload + def __round__(self, ndigits: int) -> Fraction: ... + def __hash__(self) -> int: ... + def __eq__(a, b: object) -> bool: ... + def __lt__(a, b: _ComparableNum) -> bool: ... + def __gt__(a, b: _ComparableNum) -> bool: ... + def __le__(a, b: _ComparableNum) -> bool: ... + def __ge__(a, b: _ComparableNum) -> bool: ... + def __bool__(a) -> bool: ... + def __copy__(self) -> Self: ... + def __deepcopy__(self, memo: Any) -> Self: ... + if sys.version_info >= (3, 11): + def __int__(a, _index: Callable[[SupportsIndex], int] = ...) -> int: ... + # Not actually defined within fractions.py, but provides more useful + # overrides + @property + def real(self) -> Fraction: ... + @property + def imag(self) -> Literal[0]: ... + def conjugate(self) -> Fraction: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ftplib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ftplib.pyi new file mode 100644 index 00000000..76d9dc02 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ftplib.pyi @@ -0,0 +1,165 @@ +import sys +from _typeshed import SupportsRead, SupportsReadline +from collections.abc import Callable, Iterable, Iterator +from socket import socket +from ssl import SSLContext +from types import TracebackType +from typing import Any, TextIO +from typing_extensions import Literal, Self + +__all__ = ["FTP", "error_reply", "error_temp", "error_perm", "error_proto", "all_errors", "FTP_TLS"] + +MSG_OOB: Literal[1] +FTP_PORT: Literal[21] +MAXLINE: Literal[8192] +CRLF: Literal["\r\n"] +B_CRLF: Literal[b"\r\n"] + +class Error(Exception): ... +class error_reply(Error): ... +class error_temp(Error): ... +class error_perm(Error): ... +class error_proto(Error): ... + +all_errors: tuple[type[Exception], ...] + +class FTP: + debugging: int + host: str + port: int + maxline: int + sock: socket | None + welcome: str | None + passiveserver: int + timeout: int + af: int + lastresp: str + file: TextIO | None + encoding: str + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + source_address: tuple[str, int] | None + if sys.version_info >= (3, 9): + def __init__( + self, + host: str = "", + user: str = "", + passwd: str = "", + acct: str = "", + timeout: float = ..., + source_address: tuple[str, int] | None = None, + *, + encoding: str = "utf-8", + ) -> None: ... + else: + def __init__( + self, + host: str = "", + user: str = "", + passwd: str = "", + acct: str = "", + timeout: float = ..., + source_address: tuple[str, int] | None = None, + ) -> None: ... + + def connect( + self, host: str = "", port: int = 0, timeout: float = -999, source_address: tuple[str, int] | None = None + ) -> str: ... + def getwelcome(self) -> str: ... + def set_debuglevel(self, level: int) -> None: ... + def debug(self, level: int) -> None: ... + def set_pasv(self, val: bool | Literal[0, 1]) -> None: ... + def sanitize(self, s: str) -> str: ... + def putline(self, line: str) -> None: ... + def putcmd(self, line: str) -> None: ... + def getline(self) -> str: ... + def getmultiline(self) -> str: ... + def getresp(self) -> str: ... + def voidresp(self) -> str: ... + def abort(self) -> str: ... + def sendcmd(self, cmd: str) -> str: ... + def voidcmd(self, cmd: str) -> str: ... + def sendport(self, host: str, port: int) -> str: ... + def sendeprt(self, host: str, port: int) -> str: ... + def makeport(self) -> socket: ... + def makepasv(self) -> tuple[str, int]: ... + def login(self, user: str = "", passwd: str = "", acct: str = "") -> str: ... + # In practice, `rest` rest can actually be anything whose str() is an integer sequence, so to make it simple we allow integers. + def ntransfercmd(self, cmd: str, rest: int | str | None = None) -> tuple[socket, int]: ... + def transfercmd(self, cmd: str, rest: int | str | None = None) -> socket: ... + def retrbinary( + self, cmd: str, callback: Callable[[bytes], object], blocksize: int = 8192, rest: int | str | None = None + ) -> str: ... + def storbinary( + self, + cmd: str, + fp: SupportsRead[bytes], + blocksize: int = 8192, + callback: Callable[[bytes], object] | None = None, + rest: int | str | None = None, + ) -> str: ... + def retrlines(self, cmd: str, callback: Callable[[str], object] | None = None) -> str: ... + def storlines(self, cmd: str, fp: SupportsReadline[bytes], callback: Callable[[bytes], object] | None = None) -> str: ... + def acct(self, password: str) -> str: ... + def nlst(self, *args: str) -> list[str]: ... + # Technically only the last arg can be a Callable but ... + def dir(self, *args: str | Callable[[str], object]) -> None: ... + def mlsd(self, path: str = "", facts: Iterable[str] = ...) -> Iterator[tuple[str, dict[str, str]]]: ... + def rename(self, fromname: str, toname: str) -> str: ... + def delete(self, filename: str) -> str: ... + def cwd(self, dirname: str) -> str: ... + def size(self, filename: str) -> int | None: ... + def mkd(self, dirname: str) -> str: ... + def rmd(self, dirname: str) -> str: ... + def pwd(self) -> str: ... + def quit(self) -> str: ... + def close(self) -> None: ... + +class FTP_TLS(FTP): + if sys.version_info >= (3, 9): + def __init__( + self, + host: str = "", + user: str = "", + passwd: str = "", + acct: str = "", + keyfile: str | None = None, + certfile: str | None = None, + context: SSLContext | None = None, + timeout: float = ..., + source_address: tuple[str, int] | None = None, + *, + encoding: str = "utf-8", + ) -> None: ... + else: + def __init__( + self, + host: str = "", + user: str = "", + passwd: str = "", + acct: str = "", + keyfile: str | None = None, + certfile: str | None = None, + context: SSLContext | None = None, + timeout: float = ..., + source_address: tuple[str, int] | None = None, + ) -> None: ... + ssl_version: int + keyfile: str | None + certfile: str | None + context: SSLContext + def login(self, user: str = "", passwd: str = "", acct: str = "", secure: bool = True) -> str: ... + def auth(self) -> str: ... + def prot_p(self) -> str: ... + def prot_c(self) -> str: ... + def ccc(self) -> str: ... + +def parse150(resp: str) -> int | None: ... # undocumented +def parse227(resp: str) -> tuple[str, int]: ... # undocumented +def parse229(resp: str, peer: Any) -> tuple[str, int]: ... # undocumented +def parse257(resp: str) -> str: ... # undocumented +def ftpcp( + source: FTP, sourcename: str, target: FTP, targetname: str = "", type: Literal["A", "I"] = "I" +) -> None: ... # undocumented diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/functools.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/functools.pyi new file mode 100644 index 00000000..fe36a134 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/functools.pyi @@ -0,0 +1,168 @@ +import sys +import types +from _typeshed import IdentityFunction, SupportsAllComparisons, SupportsItems +from collections.abc import Callable, Hashable, Iterable, Sequence, Sized +from typing import Any, Generic, NamedTuple, TypeVar, overload +from typing_extensions import Literal, Self, TypeAlias, final + +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = [ + "update_wrapper", + "wraps", + "WRAPPER_ASSIGNMENTS", + "WRAPPER_UPDATES", + "total_ordering", + "cmp_to_key", + "lru_cache", + "reduce", + "partial", + "partialmethod", + "singledispatch", +] + +if sys.version_info >= (3, 8): + __all__ += ["cached_property", "singledispatchmethod"] + +if sys.version_info >= (3, 9): + __all__ += ["cache"] + +_AnyCallable: TypeAlias = Callable[..., object] + +_T = TypeVar("_T") +_S = TypeVar("_S") + +@overload +def reduce(function: Callable[[_T, _S], _T], sequence: Iterable[_S], initial: _T) -> _T: ... +@overload +def reduce(function: Callable[[_T, _T], _T], sequence: Iterable[_T]) -> _T: ... + +class _CacheInfo(NamedTuple): + hits: int + misses: int + maxsize: int | None + currsize: int + +@final +class _lru_cache_wrapper(Generic[_T]): + __wrapped__: Callable[..., _T] + def __call__(self, *args: Hashable, **kwargs: Hashable) -> _T: ... + def cache_info(self) -> _CacheInfo: ... + def cache_clear(self) -> None: ... + def __copy__(self) -> _lru_cache_wrapper[_T]: ... + def __deepcopy__(self, __memo: Any) -> _lru_cache_wrapper[_T]: ... + +if sys.version_info >= (3, 8): + @overload + def lru_cache(maxsize: int | None = 128, typed: bool = False) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: ... + @overload + def lru_cache(maxsize: Callable[..., _T], typed: bool = False) -> _lru_cache_wrapper[_T]: ... + +else: + def lru_cache(maxsize: int | None = 128, typed: bool = False) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: ... + +WRAPPER_ASSIGNMENTS: tuple[ + Literal["__module__"], Literal["__name__"], Literal["__qualname__"], Literal["__doc__"], Literal["__annotations__"] +] +WRAPPER_UPDATES: tuple[Literal["__dict__"]] + +def update_wrapper(wrapper: _T, wrapped: _AnyCallable, assigned: Sequence[str] = ..., updated: Sequence[str] = ...) -> _T: ... +def wraps(wrapped: _AnyCallable, assigned: Sequence[str] = ..., updated: Sequence[str] = ...) -> IdentityFunction: ... +def total_ordering(cls: type[_T]) -> type[_T]: ... +def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsAllComparisons]: ... + +class partial(Generic[_T]): + @property + def func(self) -> Callable[..., _T]: ... + @property + def args(self) -> tuple[Any, ...]: ... + @property + def keywords(self) -> dict[str, Any]: ... + def __new__(cls, __func: Callable[..., _T], *args: Any, **kwargs: Any) -> Self: ... + def __call__(__self, *args: Any, **kwargs: Any) -> _T: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +# With protocols, this could change into a generic protocol that defines __get__ and returns _T +_Descriptor: TypeAlias = Any + +class partialmethod(Generic[_T]): + func: Callable[..., _T] | _Descriptor + args: tuple[Any, ...] + keywords: dict[str, Any] + @overload + def __init__(self, __func: Callable[..., _T], *args: Any, **keywords: Any) -> None: ... + @overload + def __init__(self, __func: _Descriptor, *args: Any, **keywords: Any) -> None: ... + if sys.version_info >= (3, 8): + def __get__(self, obj: Any, cls: type[Any] | None = None) -> Callable[..., _T]: ... + else: + def __get__(self, obj: Any, cls: type[Any] | None) -> Callable[..., _T]: ... + + @property + def __isabstractmethod__(self) -> bool: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +class _SingleDispatchCallable(Generic[_T]): + registry: types.MappingProxyType[Any, Callable[..., _T]] + def dispatch(self, cls: Any) -> Callable[..., _T]: ... + # @fun.register(complex) + # def _(arg, verbose=False): ... + @overload + def register(self, cls: type[Any], func: None = None) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + # @fun.register + # def _(arg: int, verbose=False): + @overload + def register(self, cls: Callable[..., _T], func: None = None) -> Callable[..., _T]: ... + # fun.register(int, lambda x: x) + @overload + def register(self, cls: type[Any], func: Callable[..., _T]) -> Callable[..., _T]: ... + def _clear_cache(self) -> None: ... + def __call__(__self, *args: Any, **kwargs: Any) -> _T: ... + +def singledispatch(func: Callable[..., _T]) -> _SingleDispatchCallable[_T]: ... + +if sys.version_info >= (3, 8): + class singledispatchmethod(Generic[_T]): + dispatcher: _SingleDispatchCallable[_T] + func: Callable[..., _T] + def __init__(self, func: Callable[..., _T]) -> None: ... + @property + def __isabstractmethod__(self) -> bool: ... + @overload + def register(self, cls: type[Any], method: None = None) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + @overload + def register(self, cls: Callable[..., _T], method: None = None) -> Callable[..., _T]: ... + @overload + def register(self, cls: type[Any], method: Callable[..., _T]) -> Callable[..., _T]: ... + def __get__(self, obj: _S, cls: type[_S] | None = None) -> Callable[..., _T]: ... + + class cached_property(Generic[_T]): + func: Callable[[Any], _T] + attrname: str | None + def __init__(self, func: Callable[[Any], _T]) -> None: ... + @overload + def __get__(self, instance: None, owner: type[Any] | None = None) -> cached_property[_T]: ... + @overload + def __get__(self, instance: object, owner: type[Any] | None = None) -> _T: ... + def __set_name__(self, owner: type[Any], name: str) -> None: ... + # __set__ is not defined at runtime, but @cached_property is designed to be settable + def __set__(self, instance: object, value: _T) -> None: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +if sys.version_info >= (3, 9): + def cache(__user_function: Callable[..., _T]) -> _lru_cache_wrapper[_T]: ... + +def _make_key( + args: tuple[Hashable, ...], + kwds: SupportsItems[Any, Any], + typed: bool, + kwd_mark: tuple[object, ...] = ..., + fasttypes: set[type] = ..., + tuple: type = ..., + type: Any = ..., + len: Callable[[Sized], int] = ..., +) -> Hashable: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/gc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/gc.pyi new file mode 100644 index 00000000..27cee726 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/gc.pyi @@ -0,0 +1,43 @@ +import sys +from collections.abc import Callable +from typing import Any +from typing_extensions import Literal, TypeAlias + +DEBUG_COLLECTABLE: Literal[2] +DEBUG_LEAK: Literal[38] +DEBUG_SAVEALL: Literal[32] +DEBUG_STATS: Literal[1] +DEBUG_UNCOLLECTABLE: Literal[4] + +_CallbackType: TypeAlias = Callable[[Literal["start", "stop"], dict[str, int]], object] + +callbacks: list[_CallbackType] +garbage: list[Any] + +def collect(generation: int = 2) -> int: ... +def disable() -> None: ... +def enable() -> None: ... +def get_count() -> tuple[int, int, int]: ... +def get_debug() -> int: ... + +if sys.version_info >= (3, 8): + def get_objects(generation: int | None = None) -> list[Any]: ... + +else: + def get_objects() -> list[Any]: ... + +def freeze() -> None: ... +def unfreeze() -> None: ... +def get_freeze_count() -> int: ... +def get_referents(*objs: Any) -> list[Any]: ... +def get_referrers(*objs: Any) -> list[Any]: ... +def get_stats() -> list[dict[str, Any]]: ... +def get_threshold() -> tuple[int, int, int]: ... +def is_tracked(__obj: Any) -> bool: ... + +if sys.version_info >= (3, 9): + def is_finalized(__obj: Any) -> bool: ... + +def isenabled() -> bool: ... +def set_debug(__flags: int) -> None: ... +def set_threshold(threshold0: int, threshold1: int = ..., threshold2: int = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/genericpath.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/genericpath.pyi new file mode 100644 index 00000000..46426b63 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/genericpath.pyi @@ -0,0 +1,46 @@ +import os +from _typeshed import BytesPath, FileDescriptorOrPath, StrPath, SupportsRichComparisonT +from collections.abc import Sequence +from typing import overload +from typing_extensions import Literal, LiteralString + +__all__ = [ + "commonprefix", + "exists", + "getatime", + "getctime", + "getmtime", + "getsize", + "isdir", + "isfile", + "samefile", + "sameopenfile", + "samestat", +] + +# All overloads can return empty string. Ideally, Literal[""] would be a valid +# Iterable[T], so that list[T] | Literal[""] could be used as a return +# type. But because this only works when T is str, we need Sequence[T] instead. +@overload +def commonprefix(m: Sequence[LiteralString]) -> LiteralString: ... +@overload +def commonprefix(m: Sequence[StrPath]) -> str: ... +@overload +def commonprefix(m: Sequence[BytesPath]) -> bytes | Literal[""]: ... +@overload +def commonprefix(m: Sequence[list[SupportsRichComparisonT]]) -> Sequence[SupportsRichComparisonT]: ... +@overload +def commonprefix(m: Sequence[tuple[SupportsRichComparisonT, ...]]) -> Sequence[SupportsRichComparisonT]: ... +def exists(path: FileDescriptorOrPath) -> bool: ... +def getsize(filename: FileDescriptorOrPath) -> int: ... +def isfile(path: FileDescriptorOrPath) -> bool: ... +def isdir(s: FileDescriptorOrPath) -> bool: ... + +# These return float if os.stat_float_times() == True, +# but int is a subclass of float. +def getatime(filename: FileDescriptorOrPath) -> float: ... +def getmtime(filename: FileDescriptorOrPath) -> float: ... +def getctime(filename: FileDescriptorOrPath) -> float: ... +def samefile(f1: FileDescriptorOrPath, f2: FileDescriptorOrPath) -> bool: ... +def sameopenfile(fp1: int, fp2: int) -> bool: ... +def samestat(s1: os.stat_result, s2: os.stat_result) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/getopt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/getopt.pyi new file mode 100644 index 00000000..14d63dbd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/getopt.pyi @@ -0,0 +1,11 @@ +__all__ = ["GetoptError", "error", "getopt", "gnu_getopt"] + +def getopt(args: list[str], shortopts: str, longopts: list[str] = ...) -> tuple[list[tuple[str, str]], list[str]]: ... +def gnu_getopt(args: list[str], shortopts: str, longopts: list[str] = ...) -> tuple[list[tuple[str, str]], list[str]]: ... + +class GetoptError(Exception): + msg: str + opt: str + def __init__(self, msg: str, opt: str = "") -> None: ... + +error = GetoptError diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/getpass.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/getpass.pyi new file mode 100644 index 00000000..6104e0de --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/getpass.pyi @@ -0,0 +1,8 @@ +from typing import TextIO + +__all__ = ["getpass", "getuser", "GetPassWarning"] + +def getpass(prompt: str = "Password: ", stream: TextIO | None = None) -> str: ... +def getuser() -> str: ... + +class GetPassWarning(UserWarning): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/gettext.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/gettext.pyi new file mode 100644 index 00000000..5d98227e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/gettext.pyi @@ -0,0 +1,173 @@ +import io +import sys +from _typeshed import StrPath +from collections.abc import Callable, Container, Iterable, Sequence +from typing import Any, Protocol, TypeVar, overload +from typing_extensions import Final, Literal + +__all__ = [ + "NullTranslations", + "GNUTranslations", + "Catalog", + "find", + "translation", + "install", + "textdomain", + "bindtextdomain", + "dgettext", + "dngettext", + "gettext", + "ngettext", +] + +if sys.version_info < (3, 11): + __all__ += ["bind_textdomain_codeset", "ldgettext", "ldngettext", "lgettext", "lngettext"] + +if sys.version_info >= (3, 8): + __all__ += ["dnpgettext", "dpgettext", "npgettext", "pgettext"] + +class _TranslationsReader(Protocol): + def read(self) -> bytes: ... + # optional: + # name: str + +class NullTranslations: + def __init__(self, fp: _TranslationsReader | None = None) -> None: ... + def _parse(self, fp: _TranslationsReader) -> None: ... + def add_fallback(self, fallback: NullTranslations) -> None: ... + def gettext(self, message: str) -> str: ... + def ngettext(self, msgid1: str, msgid2: str, n: int) -> str: ... + if sys.version_info >= (3, 8): + def pgettext(self, context: str, message: str) -> str: ... + def npgettext(self, context: str, msgid1: str, msgid2: str, n: int) -> str: ... + + def info(self) -> dict[str, str]: ... + def charset(self) -> str | None: ... + if sys.version_info < (3, 11): + def output_charset(self) -> str | None: ... + def set_output_charset(self, charset: str) -> None: ... + def lgettext(self, message: str) -> str: ... + def lngettext(self, msgid1: str, msgid2: str, n: int) -> str: ... + + def install(self, names: Container[str] | None = None) -> None: ... + +class GNUTranslations(NullTranslations): + LE_MAGIC: Final[int] + BE_MAGIC: Final[int] + CONTEXT: str + VERSIONS: Sequence[int] + +@overload # ignores incompatible overloads +def find( # type: ignore[misc] + domain: str, localedir: StrPath | None = None, languages: Iterable[str] | None = None, all: Literal[False] = False +) -> str | None: ... +@overload +def find( + domain: str, localedir: StrPath | None = None, languages: Iterable[str] | None = None, *, all: Literal[True] +) -> list[str]: ... +@overload +def find(domain: str, localedir: StrPath | None, languages: Iterable[str] | None, all: Literal[True]) -> list[str]: ... +@overload +def find(domain: str, localedir: StrPath | None = None, languages: Iterable[str] | None = None, all: bool = False) -> Any: ... + +_NullTranslationsT = TypeVar("_NullTranslationsT", bound=NullTranslations) + +if sys.version_info >= (3, 11): + @overload + def translation( + domain: str, + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, + class_: None = None, + fallback: Literal[False] = False, + ) -> GNUTranslations: ... + @overload + def translation( + domain: str, + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, + *, + class_: Callable[[io.BufferedReader], _NullTranslationsT], + fallback: Literal[False] = False, + ) -> _NullTranslationsT: ... + @overload + def translation( + domain: str, + localedir: StrPath | None, + languages: Iterable[str] | None, + class_: Callable[[io.BufferedReader], _NullTranslationsT], + fallback: Literal[False] = False, + ) -> _NullTranslationsT: ... + @overload + def translation( + domain: str, + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, + class_: Callable[[io.BufferedReader], NullTranslations] | None = None, + fallback: bool = False, + ) -> NullTranslations: ... + def install(domain: str, localedir: StrPath | None = None, *, names: Container[str] | None = None) -> None: ... + +else: + @overload + def translation( + domain: str, + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, + class_: None = None, + fallback: Literal[False] = False, + codeset: str | None = None, + ) -> GNUTranslations: ... + @overload + def translation( + domain: str, + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, + *, + class_: Callable[[io.BufferedReader], _NullTranslationsT], + fallback: Literal[False] = False, + codeset: str | None = None, + ) -> _NullTranslationsT: ... + @overload + def translation( + domain: str, + localedir: StrPath | None, + languages: Iterable[str] | None, + class_: Callable[[io.BufferedReader], _NullTranslationsT], + fallback: Literal[False] = False, + codeset: str | None = None, + ) -> _NullTranslationsT: ... + @overload + def translation( + domain: str, + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, + class_: Callable[[io.BufferedReader], NullTranslations] | None = None, + fallback: bool = False, + codeset: str | None = None, + ) -> NullTranslations: ... + def install( + domain: str, localedir: StrPath | None = None, codeset: str | None = None, names: Container[str] | None = None + ) -> None: ... + +def textdomain(domain: str | None = None) -> str: ... +def bindtextdomain(domain: str, localedir: StrPath | None = None) -> str: ... +def dgettext(domain: str, message: str) -> str: ... +def dngettext(domain: str, msgid1: str, msgid2: str, n: int) -> str: ... +def gettext(message: str) -> str: ... +def ngettext(msgid1: str, msgid2: str, n: int) -> str: ... + +if sys.version_info >= (3, 8): + def pgettext(context: str, message: str) -> str: ... + def dpgettext(domain: str, context: str, message: str) -> str: ... + def npgettext(context: str, msgid1: str, msgid2: str, n: int) -> str: ... + def dnpgettext(domain: str, context: str, msgid1: str, msgid2: str, n: int) -> str: ... + +if sys.version_info < (3, 11): + def lgettext(message: str) -> str: ... + def ldgettext(domain: str, message: str) -> str: ... + def lngettext(msgid1: str, msgid2: str, n: int) -> str: ... + def ldngettext(domain: str, msgid1: str, msgid2: str, n: int) -> str: ... + def bind_textdomain_codeset(domain: str, codeset: str | None = None) -> str: ... + +Catalog = translation diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/glob.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/glob.pyi new file mode 100644 index 00000000..914ccc12 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/glob.pyi @@ -0,0 +1,42 @@ +import sys +from _typeshed import StrOrBytesPath +from collections.abc import Iterator +from typing import AnyStr + +__all__ = ["escape", "glob", "iglob"] + +def glob0(dirname: AnyStr, pattern: AnyStr) -> list[AnyStr]: ... +def glob1(dirname: AnyStr, pattern: AnyStr) -> list[AnyStr]: ... + +if sys.version_info >= (3, 11): + def glob( + pathname: AnyStr, + *, + root_dir: StrOrBytesPath | None = None, + dir_fd: int | None = None, + recursive: bool = False, + include_hidden: bool = False, + ) -> list[AnyStr]: ... + def iglob( + pathname: AnyStr, + *, + root_dir: StrOrBytesPath | None = None, + dir_fd: int | None = None, + recursive: bool = False, + include_hidden: bool = False, + ) -> Iterator[AnyStr]: ... + +elif sys.version_info >= (3, 10): + def glob( + pathname: AnyStr, *, root_dir: StrOrBytesPath | None = None, dir_fd: int | None = None, recursive: bool = False + ) -> list[AnyStr]: ... + def iglob( + pathname: AnyStr, *, root_dir: StrOrBytesPath | None = None, dir_fd: int | None = None, recursive: bool = False + ) -> Iterator[AnyStr]: ... + +else: + def glob(pathname: AnyStr, *, recursive: bool = False) -> list[AnyStr]: ... + def iglob(pathname: AnyStr, *, recursive: bool = False) -> Iterator[AnyStr]: ... + +def escape(pathname: AnyStr) -> AnyStr: ... +def has_magic(s: str | bytes) -> bool: ... # undocumented diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/graphlib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/graphlib.pyi new file mode 100644 index 00000000..c02d447a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/graphlib.pyi @@ -0,0 +1,28 @@ +import sys +from _typeshed import SupportsItems +from collections.abc import Iterable +from typing import Any, Generic, TypeVar, overload + +__all__ = ["TopologicalSorter", "CycleError"] + +_T = TypeVar("_T") + +if sys.version_info >= (3, 11): + from types import GenericAlias + +class TopologicalSorter(Generic[_T]): + @overload + def __init__(self, graph: None = None) -> None: ... + @overload + def __init__(self, graph: SupportsItems[_T, Iterable[_T]]) -> None: ... + def add(self, node: _T, *predecessors: _T) -> None: ... + def prepare(self) -> None: ... + def is_active(self) -> bool: ... + def __bool__(self) -> bool: ... + def done(self, *nodes: _T) -> None: ... + def get_ready(self) -> tuple[_T, ...]: ... + def static_order(self) -> Iterable[_T]: ... + if sys.version_info >= (3, 11): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +class CycleError(ValueError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/grp.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/grp.pyi new file mode 100644 index 00000000..4b66b84b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/grp.pyi @@ -0,0 +1,22 @@ +import sys +from _typeshed import structseq +from typing import Any +from typing_extensions import Final, final + +if sys.platform != "win32": + @final + class struct_group(structseq[Any], tuple[str, str | None, int, list[str]]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("gr_name", "gr_passwd", "gr_gid", "gr_mem") + @property + def gr_name(self) -> str: ... + @property + def gr_passwd(self) -> str | None: ... + @property + def gr_gid(self) -> int: ... + @property + def gr_mem(self) -> list[str]: ... + + def getgrall() -> list[struct_group]: ... + def getgrgid(id: int) -> struct_group: ... + def getgrnam(name: str) -> struct_group: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/gzip.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/gzip.pyi new file mode 100644 index 00000000..6a794f38 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/gzip.pyi @@ -0,0 +1,167 @@ +import _compression +import sys +import zlib +from _typeshed import ReadableBuffer, StrOrBytesPath, _BufferWithLen +from io import FileIO +from typing import Protocol, TextIO, overload +from typing_extensions import Literal, TypeAlias + +if sys.version_info >= (3, 8): + __all__ = ["BadGzipFile", "GzipFile", "open", "compress", "decompress"] +else: + __all__ = ["GzipFile", "open", "compress", "decompress"] + +_ReadBinaryMode: TypeAlias = Literal["r", "rb"] +_WriteBinaryMode: TypeAlias = Literal["a", "ab", "w", "wb", "x", "xb"] +_OpenTextMode: TypeAlias = Literal["rt", "at", "wt", "xt"] + +READ: Literal[1] # undocumented +WRITE: Literal[2] # undocumented + +FTEXT: int # actually Literal[1] # undocumented +FHCRC: int # actually Literal[2] # undocumented +FEXTRA: int # actually Literal[4] # undocumented +FNAME: int # actually Literal[8] # undocumented +FCOMMENT: int # actually Literal[16] # undocumented + +class _ReadableFileobj(Protocol): + def read(self, __n: int) -> bytes: ... + def seek(self, __n: int) -> object: ... + # The following attributes and methods are optional: + # name: str + # mode: str + # def fileno() -> int: ... + +class _WritableFileobj(Protocol): + def write(self, __b: bytes) -> object: ... + def flush(self) -> object: ... + # The following attributes and methods are optional: + # name: str + # mode: str + # def fileno() -> int: ... + +@overload +def open( + filename: StrOrBytesPath | _ReadableFileobj, + mode: _ReadBinaryMode = "rb", + compresslevel: int = 9, + encoding: None = None, + errors: None = None, + newline: None = None, +) -> GzipFile: ... +@overload +def open( + filename: StrOrBytesPath | _WritableFileobj, + mode: _WriteBinaryMode, + compresslevel: int = 9, + encoding: None = None, + errors: None = None, + newline: None = None, +) -> GzipFile: ... +@overload +def open( + filename: StrOrBytesPath, + mode: _OpenTextMode, + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, +) -> TextIO: ... +@overload +def open( + filename: StrOrBytesPath | _ReadableFileobj | _WritableFileobj, + mode: str, + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, +) -> GzipFile | TextIO: ... + +class _PaddedFile: + file: _ReadableFileobj + def __init__(self, f: _ReadableFileobj, prepend: bytes = b"") -> None: ... + def read(self, size: int) -> bytes: ... + def prepend(self, prepend: bytes = b"") -> None: ... + def seek(self, off: int) -> int: ... + def seekable(self) -> bool: ... + +if sys.version_info >= (3, 8): + class BadGzipFile(OSError): ... + +class GzipFile(_compression.BaseStream): + myfileobj: FileIO | None + mode: Literal[1, 2] + name: str + compress: zlib._Compress + fileobj: _ReadableFileobj | _WritableFileobj + @overload + def __init__( + self, + filename: StrOrBytesPath | None, + mode: _ReadBinaryMode, + compresslevel: int = 9, + fileobj: _ReadableFileobj | None = None, + mtime: float | None = None, + ) -> None: ... + @overload + def __init__( + self, + *, + mode: _ReadBinaryMode, + compresslevel: int = 9, + fileobj: _ReadableFileobj | None = None, + mtime: float | None = None, + ) -> None: ... + @overload + def __init__( + self, + filename: StrOrBytesPath | None, + mode: _WriteBinaryMode, + compresslevel: int = 9, + fileobj: _WritableFileobj | None = None, + mtime: float | None = None, + ) -> None: ... + @overload + def __init__( + self, + *, + mode: _WriteBinaryMode, + compresslevel: int = 9, + fileobj: _WritableFileobj | None = None, + mtime: float | None = None, + ) -> None: ... + @overload + def __init__( + self, + filename: StrOrBytesPath | None = None, + mode: str | None = None, + compresslevel: int = 9, + fileobj: _ReadableFileobj | _WritableFileobj | None = None, + mtime: float | None = None, + ) -> None: ... + @property + def filename(self) -> str: ... + @property + def mtime(self) -> int | None: ... + crc: int + def write(self, data: ReadableBuffer) -> int: ... + def read(self, size: int | None = -1) -> bytes: ... + def read1(self, size: int = -1) -> bytes: ... + def peek(self, n: int) -> bytes: ... + def close(self) -> None: ... + def flush(self, zlib_mode: int = 2) -> None: ... + def fileno(self) -> int: ... + def rewind(self) -> None: ... + def seek(self, offset: int, whence: int = 0) -> int: ... + def readline(self, size: int | None = -1) -> bytes: ... + +class _GzipReader(_compression.DecompressReader): + def __init__(self, fp: _ReadableFileobj) -> None: ... + +if sys.version_info >= (3, 8): + def compress(data: _BufferWithLen, compresslevel: int = 9, *, mtime: float | None = None) -> bytes: ... + +else: + def compress(data: _BufferWithLen, compresslevel: int = 9) -> bytes: ... + +def decompress(data: ReadableBuffer) -> bytes: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/hashlib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/hashlib.pyi new file mode 100644 index 00000000..18b1ab54 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/hashlib.pyi @@ -0,0 +1,181 @@ +import sys +from _typeshed import ReadableBuffer +from collections.abc import Callable, Set as AbstractSet +from typing import Protocol +from typing_extensions import Self, final + +if sys.version_info >= (3, 11): + __all__ = ( + "md5", + "sha1", + "sha224", + "sha256", + "sha384", + "sha512", + "blake2b", + "blake2s", + "sha3_224", + "sha3_256", + "sha3_384", + "sha3_512", + "shake_128", + "shake_256", + "new", + "algorithms_guaranteed", + "algorithms_available", + "pbkdf2_hmac", + "file_digest", + ) +else: + __all__ = ( + "md5", + "sha1", + "sha224", + "sha256", + "sha384", + "sha512", + "blake2b", + "blake2s", + "sha3_224", + "sha3_256", + "sha3_384", + "sha3_512", + "shake_128", + "shake_256", + "new", + "algorithms_guaranteed", + "algorithms_available", + "pbkdf2_hmac", + ) + +class _Hash: + @property + def digest_size(self) -> int: ... + @property + def block_size(self) -> int: ... + @property + def name(self) -> str: ... + def __init__(self, data: ReadableBuffer = ...) -> None: ... + def copy(self) -> Self: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def update(self, __data: ReadableBuffer) -> None: ... + +if sys.version_info >= (3, 9): + def new(name: str, data: ReadableBuffer = b"", *, usedforsecurity: bool = ...) -> _Hash: ... + def md5(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> _Hash: ... + def sha1(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> _Hash: ... + def sha224(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> _Hash: ... + def sha256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> _Hash: ... + def sha384(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> _Hash: ... + def sha512(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> _Hash: ... + +elif sys.version_info >= (3, 8): + def new(name: str, data: ReadableBuffer = b"") -> _Hash: ... + def md5(string: ReadableBuffer = b"") -> _Hash: ... + def sha1(string: ReadableBuffer = b"") -> _Hash: ... + def sha224(string: ReadableBuffer = b"") -> _Hash: ... + def sha256(string: ReadableBuffer = b"") -> _Hash: ... + def sha384(string: ReadableBuffer = b"") -> _Hash: ... + def sha512(string: ReadableBuffer = b"") -> _Hash: ... + +else: + def new(name: str, data: ReadableBuffer = b"") -> _Hash: ... + def md5(__string: ReadableBuffer = ...) -> _Hash: ... + def sha1(__string: ReadableBuffer = ...) -> _Hash: ... + def sha224(__string: ReadableBuffer = ...) -> _Hash: ... + def sha256(__string: ReadableBuffer = ...) -> _Hash: ... + def sha384(__string: ReadableBuffer = ...) -> _Hash: ... + def sha512(__string: ReadableBuffer = ...) -> _Hash: ... + +algorithms_guaranteed: AbstractSet[str] +algorithms_available: AbstractSet[str] + +def pbkdf2_hmac( + hash_name: str, password: ReadableBuffer, salt: ReadableBuffer, iterations: int, dklen: int | None = None +) -> bytes: ... + +class _VarLenHash: + digest_size: int + block_size: int + name: str + def __init__(self, data: ReadableBuffer = ...) -> None: ... + def copy(self) -> _VarLenHash: ... + def digest(self, __length: int) -> bytes: ... + def hexdigest(self, __length: int) -> str: ... + def update(self, __data: ReadableBuffer) -> None: ... + +sha3_224 = _Hash +sha3_256 = _Hash +sha3_384 = _Hash +sha3_512 = _Hash +shake_128 = _VarLenHash +shake_256 = _VarLenHash + +def scrypt( + password: ReadableBuffer, + *, + salt: ReadableBuffer | None = None, + n: int | None = None, + r: int | None = None, + p: int | None = None, + maxmem: int = 0, + dklen: int = 64, +) -> bytes: ... +@final +class _BlakeHash(_Hash): + MAX_DIGEST_SIZE: int + MAX_KEY_SIZE: int + PERSON_SIZE: int + SALT_SIZE: int + + if sys.version_info >= (3, 9): + def __init__( + self, + __data: ReadableBuffer = ..., + *, + digest_size: int = ..., + key: ReadableBuffer = ..., + salt: ReadableBuffer = ..., + person: ReadableBuffer = ..., + fanout: int = ..., + depth: int = ..., + leaf_size: int = ..., + node_offset: int = ..., + node_depth: int = ..., + inner_size: int = ..., + last_node: bool = ..., + usedforsecurity: bool = ..., + ) -> None: ... + else: + def __init__( + self, + __data: ReadableBuffer = ..., + *, + digest_size: int = ..., + key: ReadableBuffer = ..., + salt: ReadableBuffer = ..., + person: ReadableBuffer = ..., + fanout: int = ..., + depth: int = ..., + leaf_size: int = ..., + node_offset: int = ..., + node_depth: int = ..., + inner_size: int = ..., + last_node: bool = ..., + ) -> None: ... + +blake2b = _BlakeHash +blake2s = _BlakeHash + +if sys.version_info >= (3, 11): + class _BytesIOLike(Protocol): + def getbuffer(self) -> ReadableBuffer: ... + + class _FileDigestFileObj(Protocol): + def readinto(self, __buf: bytearray) -> int: ... + def readable(self) -> bool: ... + + def file_digest( + __fileobj: _BytesIOLike | _FileDigestFileObj, __digest: str | Callable[[], _Hash], *, _bufsize: int = 262144 + ) -> _Hash: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/heapq.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/heapq.pyi new file mode 100644 index 00000000..61418b37 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/heapq.pyi @@ -0,0 +1,18 @@ +from _heapq import * +from _typeshed import SupportsRichComparison +from collections.abc import Callable, Iterable +from typing import Any, TypeVar +from typing_extensions import Final + +__all__ = ["heappush", "heappop", "heapify", "heapreplace", "merge", "nlargest", "nsmallest", "heappushpop"] + +_S = TypeVar("_S") + +__about__: Final[str] + +def merge( + *iterables: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None, reverse: bool = False +) -> Iterable[_S]: ... +def nlargest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None) -> list[_S]: ... +def nsmallest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None) -> list[_S]: ... +def _heapify_max(__x: list[Any]) -> None: ... # undocumented diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/hmac.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/hmac.pyi new file mode 100644 index 00000000..ee8af1b4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/hmac.pyi @@ -0,0 +1,49 @@ +import sys +from _typeshed import ReadableBuffer, _BufferWithLen +from collections.abc import Callable +from types import ModuleType +from typing import Any, AnyStr, overload +from typing_extensions import TypeAlias + +# TODO more precise type for object of hashlib +_Hash: TypeAlias = Any +_DigestMod: TypeAlias = str | Callable[[], _Hash] | ModuleType + +trans_5C: bytes +trans_36: bytes + +digest_size: None + +if sys.version_info >= (3, 8): + # In reality digestmod has a default value, but the function always throws an error + # if the argument is not given, so we pretend it is a required argument. + @overload + def new(key: bytes | bytearray, msg: ReadableBuffer | None, digestmod: _DigestMod) -> HMAC: ... + @overload + def new(key: bytes | bytearray, *, digestmod: _DigestMod) -> HMAC: ... + +else: + def new(key: bytes | bytearray, msg: ReadableBuffer | None = None, digestmod: _DigestMod | None = None) -> HMAC: ... + +class HMAC: + digest_size: int + block_size: int + @property + def name(self) -> str: ... + if sys.version_info >= (3, 8): + def __init__(self, key: bytes | bytearray, msg: ReadableBuffer | None = None, digestmod: _DigestMod = "") -> None: ... + else: + def __init__( + self, key: bytes | bytearray, msg: ReadableBuffer | None = None, digestmod: _DigestMod | None = None + ) -> None: ... + + def update(self, msg: ReadableBuffer) -> None: ... + def digest(self) -> bytes: ... + def hexdigest(self) -> str: ... + def copy(self) -> HMAC: ... + +@overload +def compare_digest(__a: ReadableBuffer, __b: ReadableBuffer) -> bool: ... +@overload +def compare_digest(__a: AnyStr, __b: AnyStr) -> bool: ... +def digest(key: _BufferWithLen, msg: ReadableBuffer, digest: _DigestMod) -> bytes: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/html/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/html/__init__.pyi new file mode 100644 index 00000000..afba9083 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/html/__init__.pyi @@ -0,0 +1,6 @@ +from typing import AnyStr + +__all__ = ["escape", "unescape"] + +def escape(s: AnyStr, quote: bool = True) -> AnyStr: ... +def unescape(s: AnyStr) -> AnyStr: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/html/entities.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/html/entities.pyi new file mode 100644 index 00000000..be83fd11 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/html/entities.pyi @@ -0,0 +1,6 @@ +__all__ = ["html5", "name2codepoint", "codepoint2name", "entitydefs"] + +name2codepoint: dict[str, int] +html5: dict[str, str] +codepoint2name: dict[int, str] +entitydefs: dict[str, str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/html/parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/html/parser.pyi new file mode 100644 index 00000000..d322ade9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/html/parser.pyi @@ -0,0 +1,34 @@ +from _markupbase import ParserBase +from re import Pattern + +__all__ = ["HTMLParser"] + +class HTMLParser(ParserBase): + def __init__(self, *, convert_charrefs: bool = True) -> None: ... + def feed(self, data: str) -> None: ... + def close(self) -> None: ... + def get_starttag_text(self) -> str | None: ... + def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: ... + def handle_endtag(self, tag: str) -> None: ... + def handle_startendtag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: ... + def handle_data(self, data: str) -> None: ... + def handle_entityref(self, name: str) -> None: ... + def handle_charref(self, name: str) -> None: ... + def handle_comment(self, data: str) -> None: ... + def handle_decl(self, decl: str) -> None: ... + def handle_pi(self, data: str) -> None: ... + CDATA_CONTENT_ELEMENTS: tuple[str, ...] + def check_for_whole_start_tag(self, i: int) -> int: ... # undocumented + def clear_cdata_mode(self) -> None: ... # undocumented + def goahead(self, end: bool) -> None: ... # undocumented + def parse_bogus_comment(self, i: int, report: bool = ...) -> int: ... # undocumented + def parse_endtag(self, i: int) -> int: ... # undocumented + def parse_html_declaration(self, i: int) -> int: ... # undocumented + def parse_pi(self, i: int) -> int: ... # undocumented + def parse_starttag(self, i: int) -> int: ... # undocumented + def set_cdata_mode(self, elem: str) -> None: ... # undocumented + rawdata: str # undocumented + cdata_elem: str | None # undocumented + convert_charrefs: bool # undocumented + interesting: Pattern[str] # undocumented + lasttag: str # undocumented diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/http/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/http/__init__.pyi new file mode 100644 index 00000000..d4b44f2e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/http/__init__.pyi @@ -0,0 +1,95 @@ +import sys +from enum import IntEnum +from typing_extensions import Literal + +if sys.version_info >= (3, 11): + from enum import StrEnum + +if sys.version_info >= (3, 11): + __all__ = ["HTTPStatus", "HTTPMethod"] +else: + __all__ = ["HTTPStatus"] + +class HTTPStatus(IntEnum): + @property + def phrase(self) -> str: ... + @property + def description(self) -> str: ... + CONTINUE: int + SWITCHING_PROTOCOLS: int + PROCESSING: int + OK: int + CREATED: int + ACCEPTED: int + NON_AUTHORITATIVE_INFORMATION: int + NO_CONTENT: int + RESET_CONTENT: int + PARTIAL_CONTENT: int + MULTI_STATUS: int + ALREADY_REPORTED: int + IM_USED: int + MULTIPLE_CHOICES: int + MOVED_PERMANENTLY: int + FOUND: int + SEE_OTHER: int + NOT_MODIFIED: int + USE_PROXY: int + TEMPORARY_REDIRECT: int + PERMANENT_REDIRECT: int + BAD_REQUEST: int + UNAUTHORIZED: int + PAYMENT_REQUIRED: int + FORBIDDEN: int + NOT_FOUND: int + METHOD_NOT_ALLOWED: int + NOT_ACCEPTABLE: int + PROXY_AUTHENTICATION_REQUIRED: int + REQUEST_TIMEOUT: int + CONFLICT: int + GONE: int + LENGTH_REQUIRED: int + PRECONDITION_FAILED: int + REQUEST_ENTITY_TOO_LARGE: int + REQUEST_URI_TOO_LONG: int + UNSUPPORTED_MEDIA_TYPE: int + REQUESTED_RANGE_NOT_SATISFIABLE: int + EXPECTATION_FAILED: int + UNPROCESSABLE_ENTITY: int + LOCKED: int + FAILED_DEPENDENCY: int + UPGRADE_REQUIRED: int + PRECONDITION_REQUIRED: int + TOO_MANY_REQUESTS: int + REQUEST_HEADER_FIELDS_TOO_LARGE: int + INTERNAL_SERVER_ERROR: int + NOT_IMPLEMENTED: int + BAD_GATEWAY: int + SERVICE_UNAVAILABLE: int + GATEWAY_TIMEOUT: int + HTTP_VERSION_NOT_SUPPORTED: int + VARIANT_ALSO_NEGOTIATES: int + INSUFFICIENT_STORAGE: int + LOOP_DETECTED: int + NOT_EXTENDED: int + NETWORK_AUTHENTICATION_REQUIRED: int + MISDIRECTED_REQUEST: int + if sys.version_info >= (3, 8): + UNAVAILABLE_FOR_LEGAL_REASONS: int + if sys.version_info >= (3, 9): + EARLY_HINTS: Literal[103] + IM_A_TEAPOT: Literal[418] + TOO_EARLY: Literal[425] + +if sys.version_info >= (3, 11): + class HTTPMethod(StrEnum): + @property + def description(self) -> str: ... + CONNECT: str + DELETE: str + GET: str + HEAD: str + OPTIONS: str + PATCH: str + POST: str + PUT: str + TRACE: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/http/client.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/http/client.pyi new file mode 100644 index 00000000..b1506b50 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/http/client.pyi @@ -0,0 +1,221 @@ +import email.message +import io +import ssl +import types +from _typeshed import ReadableBuffer, SupportsRead, WriteableBuffer +from collections.abc import Callable, Iterable, Iterator, Mapping +from socket import socket +from typing import Any, BinaryIO, TypeVar, overload +from typing_extensions import Self, TypeAlias + +__all__ = [ + "HTTPResponse", + "HTTPConnection", + "HTTPException", + "NotConnected", + "UnknownProtocol", + "UnknownTransferEncoding", + "UnimplementedFileMode", + "IncompleteRead", + "InvalidURL", + "ImproperConnectionState", + "CannotSendRequest", + "CannotSendHeader", + "ResponseNotReady", + "BadStatusLine", + "LineTooLong", + "RemoteDisconnected", + "error", + "responses", + "HTTPSConnection", +] + +_DataType: TypeAlias = SupportsRead[bytes] | Iterable[ReadableBuffer] | ReadableBuffer +_T = TypeVar("_T") + +HTTP_PORT: int +HTTPS_PORT: int + +CONTINUE: int +SWITCHING_PROTOCOLS: int +PROCESSING: int + +OK: int +CREATED: int +ACCEPTED: int +NON_AUTHORITATIVE_INFORMATION: int +NO_CONTENT: int +RESET_CONTENT: int +PARTIAL_CONTENT: int +MULTI_STATUS: int +IM_USED: int + +MULTIPLE_CHOICES: int +MOVED_PERMANENTLY: int +FOUND: int +SEE_OTHER: int +NOT_MODIFIED: int +USE_PROXY: int +TEMPORARY_REDIRECT: int + +BAD_REQUEST: int +UNAUTHORIZED: int +PAYMENT_REQUIRED: int +FORBIDDEN: int +NOT_FOUND: int +METHOD_NOT_ALLOWED: int +NOT_ACCEPTABLE: int +PROXY_AUTHENTICATION_REQUIRED: int +REQUEST_TIMEOUT: int +CONFLICT: int +GONE: int +LENGTH_REQUIRED: int +PRECONDITION_FAILED: int +REQUEST_ENTITY_TOO_LARGE: int +REQUEST_URI_TOO_LONG: int +UNSUPPORTED_MEDIA_TYPE: int +REQUESTED_RANGE_NOT_SATISFIABLE: int +EXPECTATION_FAILED: int +UNPROCESSABLE_ENTITY: int +LOCKED: int +FAILED_DEPENDENCY: int +UPGRADE_REQUIRED: int +PRECONDITION_REQUIRED: int +TOO_MANY_REQUESTS: int +REQUEST_HEADER_FIELDS_TOO_LARGE: int + +INTERNAL_SERVER_ERROR: int +NOT_IMPLEMENTED: int +BAD_GATEWAY: int +SERVICE_UNAVAILABLE: int +GATEWAY_TIMEOUT: int +HTTP_VERSION_NOT_SUPPORTED: int +INSUFFICIENT_STORAGE: int +NOT_EXTENDED: int +NETWORK_AUTHENTICATION_REQUIRED: int + +responses: dict[int, str] + +class HTTPMessage(email.message.Message): + def getallmatchingheaders(self, name: str) -> list[str]: ... # undocumented + +def parse_headers(fp: io.BufferedIOBase, _class: Callable[[], email.message.Message] = ...) -> HTTPMessage: ... + +class HTTPResponse(io.BufferedIOBase, BinaryIO): + msg: HTTPMessage + headers: HTTPMessage + version: int + debuglevel: int + fp: io.BufferedReader + closed: bool + status: int + reason: str + chunked: bool + chunk_left: int | None + length: int | None + will_close: bool + def __init__(self, sock: socket, debuglevel: int = 0, method: str | None = None, url: str | None = None) -> None: ... + def peek(self, n: int = -1) -> bytes: ... + def read(self, amt: int | None = None) -> bytes: ... + def read1(self, n: int = -1) -> bytes: ... + def readinto(self, b: WriteableBuffer) -> int: ... + def readline(self, limit: int = -1) -> bytes: ... # type: ignore[override] + @overload + def getheader(self, name: str) -> str | None: ... + @overload + def getheader(self, name: str, default: _T) -> str | _T: ... + def getheaders(self) -> list[tuple[str, str]]: ... + def isclosed(self) -> bool: ... + def __iter__(self) -> Iterator[bytes]: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None + ) -> None: ... + def info(self) -> email.message.Message: ... + def geturl(self) -> str: ... + def getcode(self) -> int: ... + def begin(self) -> None: ... + +class HTTPConnection: + auto_open: int # undocumented + debuglevel: int + default_port: int # undocumented + response_class: type[HTTPResponse] # undocumented + timeout: float | None + host: str + port: int + sock: socket | Any # can be `None` if `.connect()` was not called + def __init__( + self, + host: str, + port: int | None = None, + timeout: float | None = ..., + source_address: tuple[str, int] | None = None, + blocksize: int = 8192, + ) -> None: ... + def request( + self, + method: str, + url: str, + body: _DataType | str | None = None, + headers: Mapping[str, str] = ..., + *, + encode_chunked: bool = False, + ) -> None: ... + def getresponse(self) -> HTTPResponse: ... + def set_debuglevel(self, level: int) -> None: ... + def set_tunnel(self, host: str, port: int | None = None, headers: Mapping[str, str] | None = None) -> None: ... + def connect(self) -> None: ... + def close(self) -> None: ... + def putrequest(self, method: str, url: str, skip_host: bool = False, skip_accept_encoding: bool = False) -> None: ... + def putheader(self, header: str, *argument: str) -> None: ... + def endheaders(self, message_body: _DataType | None = None, *, encode_chunked: bool = False) -> None: ... + def send(self, data: _DataType | str) -> None: ... + +class HTTPSConnection(HTTPConnection): + # Can be `None` if `.connect()` was not called: + sock: ssl.SSLSocket | Any # type: ignore[override] + def __init__( + self, + host: str, + port: int | None = None, + key_file: str | None = None, + cert_file: str | None = None, + timeout: float | None = ..., + source_address: tuple[str, int] | None = None, + *, + context: ssl.SSLContext | None = None, + check_hostname: bool | None = None, + blocksize: int = 8192, + ) -> None: ... + +class HTTPException(Exception): ... + +error = HTTPException + +class NotConnected(HTTPException): ... +class InvalidURL(HTTPException): ... + +class UnknownProtocol(HTTPException): + def __init__(self, version: str) -> None: ... + +class UnknownTransferEncoding(HTTPException): ... +class UnimplementedFileMode(HTTPException): ... + +class IncompleteRead(HTTPException): + def __init__(self, partial: bytes, expected: int | None = None) -> None: ... + partial: bytes + expected: int | None + +class ImproperConnectionState(HTTPException): ... +class CannotSendRequest(ImproperConnectionState): ... +class CannotSendHeader(ImproperConnectionState): ... +class ResponseNotReady(ImproperConnectionState): ... + +class BadStatusLine(HTTPException): + def __init__(self, line: str) -> None: ... + +class LineTooLong(HTTPException): + def __init__(self, line_type: str) -> None: ... + +class RemoteDisconnected(ConnectionResetError, BadStatusLine): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/http/cookiejar.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/http/cookiejar.pyi new file mode 100644 index 00000000..7f2c9c6c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/http/cookiejar.pyi @@ -0,0 +1,183 @@ +import sys +from _typeshed import StrPath +from collections.abc import Iterable, Iterator, Sequence +from http.client import HTTPResponse +from re import Pattern +from typing import ClassVar, TypeVar, overload +from urllib.request import Request + +__all__ = [ + "Cookie", + "CookieJar", + "CookiePolicy", + "DefaultCookiePolicy", + "FileCookieJar", + "LWPCookieJar", + "LoadError", + "MozillaCookieJar", +] + +_T = TypeVar("_T") + +class LoadError(OSError): ... + +class CookieJar(Iterable[Cookie]): + non_word_re: ClassVar[Pattern[str]] # undocumented + quote_re: ClassVar[Pattern[str]] # undocumented + strict_domain_re: ClassVar[Pattern[str]] # undocumented + domain_re: ClassVar[Pattern[str]] # undocumented + dots_re: ClassVar[Pattern[str]] # undocumented + magic_re: ClassVar[Pattern[str]] # undocumented + def __init__(self, policy: CookiePolicy | None = None) -> None: ... + def add_cookie_header(self, request: Request) -> None: ... + def extract_cookies(self, response: HTTPResponse, request: Request) -> None: ... + def set_policy(self, policy: CookiePolicy) -> None: ... + def make_cookies(self, response: HTTPResponse, request: Request) -> Sequence[Cookie]: ... + def set_cookie(self, cookie: Cookie) -> None: ... + def set_cookie_if_ok(self, cookie: Cookie, request: Request) -> None: ... + def clear(self, domain: str | None = None, path: str | None = None, name: str | None = None) -> None: ... + def clear_session_cookies(self) -> None: ... + def clear_expired_cookies(self) -> None: ... # undocumented + def __iter__(self) -> Iterator[Cookie]: ... + def __len__(self) -> int: ... + +class FileCookieJar(CookieJar): + filename: str + delayload: bool + if sys.version_info >= (3, 8): + def __init__( + self, filename: StrPath | None = None, delayload: bool = False, policy: CookiePolicy | None = None + ) -> None: ... + else: + def __init__(self, filename: str | None = None, delayload: bool = False, policy: CookiePolicy | None = None) -> None: ... + + def save(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: ... + def load(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: ... + def revert(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: ... + +class MozillaCookieJar(FileCookieJar): + if sys.version_info < (3, 10): + header: ClassVar[str] # undocumented + +class LWPCookieJar(FileCookieJar): + def as_lwp_str(self, ignore_discard: bool = True, ignore_expires: bool = True) -> str: ... # undocumented + +class CookiePolicy: + netscape: bool + rfc2965: bool + hide_cookie2: bool + def set_ok(self, cookie: Cookie, request: Request) -> bool: ... + def return_ok(self, cookie: Cookie, request: Request) -> bool: ... + def domain_return_ok(self, domain: str, request: Request) -> bool: ... + def path_return_ok(self, path: str, request: Request) -> bool: ... + +class DefaultCookiePolicy(CookiePolicy): + rfc2109_as_netscape: bool + strict_domain: bool + strict_rfc2965_unverifiable: bool + strict_ns_unverifiable: bool + strict_ns_domain: int + strict_ns_set_initial_dollar: bool + strict_ns_set_path: bool + DomainStrictNoDots: ClassVar[int] + DomainStrictNonDomain: ClassVar[int] + DomainRFC2965Match: ClassVar[int] + DomainLiberal: ClassVar[int] + DomainStrict: ClassVar[int] + if sys.version_info >= (3, 8): + def __init__( + self, + blocked_domains: Sequence[str] | None = None, + allowed_domains: Sequence[str] | None = None, + netscape: bool = True, + rfc2965: bool = False, + rfc2109_as_netscape: bool | None = None, + hide_cookie2: bool = False, + strict_domain: bool = False, + strict_rfc2965_unverifiable: bool = True, + strict_ns_unverifiable: bool = False, + strict_ns_domain: int = 0, + strict_ns_set_initial_dollar: bool = False, + strict_ns_set_path: bool = False, + secure_protocols: Sequence[str] = ..., + ) -> None: ... + else: + def __init__( + self, + blocked_domains: Sequence[str] | None = None, + allowed_domains: Sequence[str] | None = None, + netscape: bool = True, + rfc2965: bool = False, + rfc2109_as_netscape: bool | None = None, + hide_cookie2: bool = False, + strict_domain: bool = False, + strict_rfc2965_unverifiable: bool = True, + strict_ns_unverifiable: bool = False, + strict_ns_domain: int = 0, + strict_ns_set_initial_dollar: bool = False, + strict_ns_set_path: bool = False, + ) -> None: ... + + def blocked_domains(self) -> tuple[str, ...]: ... + def set_blocked_domains(self, blocked_domains: Sequence[str]) -> None: ... + def is_blocked(self, domain: str) -> bool: ... + def allowed_domains(self) -> tuple[str, ...] | None: ... + def set_allowed_domains(self, allowed_domains: Sequence[str] | None) -> None: ... + def is_not_allowed(self, domain: str) -> bool: ... + def set_ok_version(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + def set_ok_verifiability(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + def set_ok_name(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + def set_ok_path(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + def set_ok_domain(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + def set_ok_port(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + def return_ok_version(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + def return_ok_verifiability(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + def return_ok_secure(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + def return_ok_expires(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + def return_ok_port(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + def return_ok_domain(self, cookie: Cookie, request: Request) -> bool: ... # undocumented + +class Cookie: + version: int | None + name: str + value: str | None + port: str | None + path: str + path_specified: bool + secure: bool + expires: int | None + discard: bool + comment: str | None + comment_url: str | None + rfc2109: bool + port_specified: bool + domain: str # undocumented + domain_specified: bool + domain_initial_dot: bool + def __init__( + self, + version: int | None, + name: str, + value: str | None, # undocumented + port: str | None, + port_specified: bool, + domain: str, + domain_specified: bool, + domain_initial_dot: bool, + path: str, + path_specified: bool, + secure: bool, + expires: int | None, + discard: bool, + comment: str | None, + comment_url: str | None, + rest: dict[str, str], + rfc2109: bool = False, + ) -> None: ... + def has_nonstandard_attr(self, name: str) -> bool: ... + @overload + def get_nonstandard_attr(self, name: str) -> str | None: ... + @overload + def get_nonstandard_attr(self, name: str, default: _T) -> str | _T: ... + def set_nonstandard_attr(self, name: str, value: str) -> None: ... + def is_expired(self, now: int | None = None) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/http/cookies.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/http/cookies.pyi new file mode 100644 index 00000000..e24ef9cb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/http/cookies.pyi @@ -0,0 +1,60 @@ +import sys +from collections.abc import Iterable, Mapping +from typing import Any, Generic, TypeVar, overload +from typing_extensions import TypeAlias + +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = ["CookieError", "BaseCookie", "SimpleCookie"] + +_DataType: TypeAlias = str | Mapping[str, str | Morsel[Any]] +_T = TypeVar("_T") + +@overload +def _quote(str: None) -> None: ... +@overload +def _quote(str: str) -> str: ... +@overload +def _unquote(str: None) -> None: ... +@overload +def _unquote(str: str) -> str: ... + +class CookieError(Exception): ... + +class Morsel(dict[str, Any], Generic[_T]): + @property + def value(self) -> str: ... + @property + def coded_value(self) -> _T: ... + @property + def key(self) -> str: ... + def __init__(self) -> None: ... + def set(self, key: str, val: str, coded_val: _T) -> None: ... + def setdefault(self, key: str, val: str | None = None) -> str: ... + # The dict update can also get a keywords argument so this is incompatible + @overload # type: ignore[override] + def update(self, values: Mapping[str, str]) -> None: ... + @overload + def update(self, values: Iterable[tuple[str, str]]) -> None: ... + def isReservedKey(self, K: str) -> bool: ... + def output(self, attrs: list[str] | None = None, header: str = "Set-Cookie:") -> str: ... + __str__ = output + def js_output(self, attrs: list[str] | None = None) -> str: ... + def OutputString(self, attrs: list[str] | None = None) -> str: ... + def __eq__(self, morsel: object) -> bool: ... + def __setitem__(self, K: str, V: Any) -> None: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +class BaseCookie(dict[str, Morsel[_T]], Generic[_T]): + def __init__(self, input: _DataType | None = None) -> None: ... + def value_decode(self, val: str) -> _T: ... + def value_encode(self, val: _T) -> str: ... + def output(self, attrs: list[str] | None = None, header: str = "Set-Cookie:", sep: str = "\r\n") -> str: ... + __str__ = output + def js_output(self, attrs: list[str] | None = None) -> str: ... + def load(self, rawdata: _DataType) -> None: ... + def __setitem__(self, key: str, value: str | Morsel[_T]) -> None: ... + +class SimpleCookie(BaseCookie[_T], Generic[_T]): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/http/server.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/http/server.pyi new file mode 100644 index 00000000..c9700f70 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/http/server.pyi @@ -0,0 +1,82 @@ +import _socket +import email.message +import io +import socketserver +import sys +from _typeshed import StrPath, SupportsRead, SupportsWrite +from collections.abc import Mapping, Sequence +from typing import Any, AnyStr, BinaryIO, ClassVar + +__all__ = ["HTTPServer", "ThreadingHTTPServer", "BaseHTTPRequestHandler", "SimpleHTTPRequestHandler", "CGIHTTPRequestHandler"] + +class HTTPServer(socketserver.TCPServer): + server_name: str + server_port: int + +class ThreadingHTTPServer(socketserver.ThreadingMixIn, HTTPServer): ... + +class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): + client_address: tuple[str, int] + close_connection: bool + requestline: str + command: str + path: str + request_version: str + headers: email.message.Message + server_version: str + sys_version: str + error_message_format: str + error_content_type: str + protocol_version: str + MessageClass: type + responses: Mapping[int, tuple[str, str]] + default_request_version: str # undocumented + weekdayname: ClassVar[Sequence[str]] # undocumented + monthname: ClassVar[Sequence[str | None]] # undocumented + def handle_one_request(self) -> None: ... + def handle_expect_100(self) -> bool: ... + def send_error(self, code: int, message: str | None = None, explain: str | None = None) -> None: ... + def send_response(self, code: int, message: str | None = None) -> None: ... + def send_header(self, keyword: str, value: str) -> None: ... + def send_response_only(self, code: int, message: str | None = None) -> None: ... + def end_headers(self) -> None: ... + def flush_headers(self) -> None: ... + def log_request(self, code: int | str = "-", size: int | str = "-") -> None: ... + def log_error(self, format: str, *args: Any) -> None: ... + def log_message(self, format: str, *args: Any) -> None: ... + def version_string(self) -> str: ... + def date_time_string(self, timestamp: int | None = None) -> str: ... + def log_date_time_string(self) -> str: ... + def address_string(self) -> str: ... + def parse_request(self) -> bool: ... # undocumented + +class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): + extensions_map: dict[str, str] + if sys.version_info >= (3, 12): + index_pages: ClassVar[tuple[str, ...]] + def __init__( + self, + request: socketserver._RequestType, + client_address: _socket._RetAddress, + server: socketserver.BaseServer, + *, + directory: str | None = None, + ) -> None: ... + def do_GET(self) -> None: ... + def do_HEAD(self) -> None: ... + def send_head(self) -> io.BytesIO | BinaryIO | None: ... # undocumented + def list_directory(self, path: StrPath) -> io.BytesIO | None: ... # undocumented + def translate_path(self, path: str) -> str: ... # undocumented + def copyfile(self, source: SupportsRead[AnyStr], outputfile: SupportsWrite[AnyStr]) -> None: ... # undocumented + def guess_type(self, path: StrPath) -> str: ... # undocumented + +def executable(path: StrPath) -> bool: ... # undocumented + +class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): + cgi_directories: list[str] + have_fork: bool # undocumented + def do_POST(self) -> None: ... + def is_cgi(self) -> bool: ... # undocumented + def is_executable(self, path: StrPath) -> bool: ... # undocumented + def is_python(self, path: StrPath) -> bool: ... # undocumented + def run_cgi(self) -> None: ... # undocumented diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/imaplib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/imaplib.pyi new file mode 100644 index 00000000..1c2112dd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/imaplib.pyi @@ -0,0 +1,163 @@ +import subprocess +import sys +import time +from _typeshed import ReadableBuffer, _BufferWithLen +from builtins import list as _list # conflicts with a method named "list" +from collections.abc import Callable +from datetime import datetime +from re import Pattern +from socket import socket as _socket +from ssl import SSLContext, SSLSocket +from types import TracebackType +from typing import IO, Any, SupportsAbs, SupportsInt +from typing_extensions import Literal, Self, TypeAlias + +__all__ = ["IMAP4", "IMAP4_stream", "Internaldate2tuple", "Int2AP", "ParseFlags", "Time2Internaldate", "IMAP4_SSL"] + +# TODO: Commands should use their actual return types, not this type alias. +# E.g. Tuple[Literal["OK"], List[bytes]] +_CommandResults: TypeAlias = tuple[str, list[Any]] + +_AnyResponseData: TypeAlias = list[None] | list[bytes | tuple[bytes, bytes]] + +Commands: dict[str, tuple[str, ...]] + +class IMAP4: + class error(Exception): ... + class abort(error): ... + class readonly(abort): ... + mustquote: Pattern[str] + debug: int + state: str + literal: str | None + tagged_commands: dict[bytes, _list[bytes] | None] + untagged_responses: dict[str, _list[bytes | tuple[bytes, bytes]]] + continuation_response: str + is_readonly: bool + tagnum: int + tagpre: str + tagre: Pattern[str] + welcome: bytes + capabilities: tuple[str, ...] + PROTOCOL_VERSION: str + if sys.version_info >= (3, 9): + def __init__(self, host: str = "", port: int = 143, timeout: float | None = None) -> None: ... + def open(self, host: str = "", port: int = 143, timeout: float | None = None) -> None: ... + else: + def __init__(self, host: str = "", port: int = 143) -> None: ... + def open(self, host: str = "", port: int = 143) -> None: ... + + def __getattr__(self, attr: str) -> Any: ... + host: str + port: int + sock: _socket + file: IO[str] | IO[bytes] + def read(self, size: int) -> bytes: ... + def readline(self) -> bytes: ... + def send(self, data: ReadableBuffer) -> None: ... + def shutdown(self) -> None: ... + def socket(self) -> _socket: ... + def recent(self) -> _CommandResults: ... + def response(self, code: str) -> _CommandResults: ... + def append(self, mailbox: str, flags: str, date_time: str, message: ReadableBuffer) -> str: ... + def authenticate(self, mechanism: str, authobject: Callable[[bytes], bytes | None]) -> tuple[str, str]: ... + def capability(self) -> _CommandResults: ... + def check(self) -> _CommandResults: ... + def close(self) -> _CommandResults: ... + def copy(self, message_set: str, new_mailbox: str) -> _CommandResults: ... + def create(self, mailbox: str) -> _CommandResults: ... + def delete(self, mailbox: str) -> _CommandResults: ... + def deleteacl(self, mailbox: str, who: str) -> _CommandResults: ... + def enable(self, capability: str) -> _CommandResults: ... + def __enter__(self) -> Self: ... + def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... + def expunge(self) -> _CommandResults: ... + def fetch(self, message_set: str, message_parts: str) -> tuple[str, _AnyResponseData]: ... + def getacl(self, mailbox: str) -> _CommandResults: ... + def getannotation(self, mailbox: str, entry: str, attribute: str) -> _CommandResults: ... + def getquota(self, root: str) -> _CommandResults: ... + def getquotaroot(self, mailbox: str) -> _CommandResults: ... + def list(self, directory: str = '""', pattern: str = "*") -> tuple[str, _AnyResponseData]: ... + def login(self, user: str, password: str) -> tuple[Literal["OK"], _list[bytes]]: ... + def login_cram_md5(self, user: str, password: str) -> _CommandResults: ... + def logout(self) -> tuple[str, _AnyResponseData]: ... + def lsub(self, directory: str = '""', pattern: str = "*") -> _CommandResults: ... + def myrights(self, mailbox: str) -> _CommandResults: ... + def namespace(self) -> _CommandResults: ... + def noop(self) -> tuple[str, _list[bytes]]: ... + def partial(self, message_num: str, message_part: str, start: str, length: str) -> _CommandResults: ... + def proxyauth(self, user: str) -> _CommandResults: ... + def rename(self, oldmailbox: str, newmailbox: str) -> _CommandResults: ... + def search(self, charset: str | None, *criteria: str) -> _CommandResults: ... + def select(self, mailbox: str = "INBOX", readonly: bool = False) -> tuple[str, _list[bytes | None]]: ... + def setacl(self, mailbox: str, who: str, what: str) -> _CommandResults: ... + def setannotation(self, *args: str) -> _CommandResults: ... + def setquota(self, root: str, limits: str) -> _CommandResults: ... + def sort(self, sort_criteria: str, charset: str, *search_criteria: str) -> _CommandResults: ... + def starttls(self, ssl_context: Any | None = None) -> tuple[Literal["OK"], _list[None]]: ... + def status(self, mailbox: str, names: str) -> _CommandResults: ... + def store(self, message_set: str, command: str, flags: str) -> _CommandResults: ... + def subscribe(self, mailbox: str) -> _CommandResults: ... + def thread(self, threading_algorithm: str, charset: str, *search_criteria: str) -> _CommandResults: ... + def uid(self, command: str, *args: str) -> _CommandResults: ... + def unsubscribe(self, mailbox: str) -> _CommandResults: ... + if sys.version_info >= (3, 9): + def unselect(self) -> _CommandResults: ... + + def xatom(self, name: str, *args: str) -> _CommandResults: ... + def print_log(self) -> None: ... + +class IMAP4_SSL(IMAP4): + keyfile: str + certfile: str + if sys.version_info >= (3, 9): + def __init__( + self, + host: str = "", + port: int = 993, + keyfile: str | None = None, + certfile: str | None = None, + ssl_context: SSLContext | None = None, + timeout: float | None = None, + ) -> None: ... + else: + def __init__( + self, + host: str = "", + port: int = 993, + keyfile: str | None = None, + certfile: str | None = None, + ssl_context: SSLContext | None = None, + ) -> None: ... + sslobj: SSLSocket + file: IO[Any] + if sys.version_info >= (3, 9): + def open(self, host: str = "", port: int | None = 993, timeout: float | None = None) -> None: ... + else: + def open(self, host: str = "", port: int | None = 993) -> None: ... + + def ssl(self) -> SSLSocket: ... + +class IMAP4_stream(IMAP4): + command: str + def __init__(self, command: str) -> None: ... + file: IO[Any] + process: subprocess.Popen[bytes] + writefile: IO[Any] + readfile: IO[Any] + if sys.version_info >= (3, 9): + def open(self, host: str | None = None, port: int | None = None, timeout: float | None = None) -> None: ... + else: + def open(self, host: str | None = None, port: int | None = None) -> None: ... + +class _Authenticator: + mech: Callable[[bytes], bytes | bytearray | memoryview | str | None] + def __init__(self, mechinst: Callable[[bytes], bytes | bytearray | memoryview | str | None]) -> None: ... + def process(self, data: str) -> str: ... + def encode(self, inp: bytes | bytearray | memoryview) -> str: ... + def decode(self, inp: str | _BufferWithLen) -> bytes: ... + +def Internaldate2tuple(resp: ReadableBuffer) -> time.struct_time | None: ... +def Int2AP(num: SupportsAbs[SupportsInt]) -> bytes: ... +def ParseFlags(resp: ReadableBuffer) -> tuple[bytes, ...]: ... +def Time2Internaldate(date_time: float | time.struct_time | time._TimeTuple | datetime | str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/imghdr.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/imghdr.pyi new file mode 100644 index 00000000..ed3647f2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/imghdr.pyi @@ -0,0 +1,17 @@ +from _typeshed import StrPath +from collections.abc import Callable +from typing import Any, BinaryIO, Protocol, overload + +__all__ = ["what"] + +class _ReadableBinary(Protocol): + def tell(self) -> int: ... + def read(self, size: int) -> bytes: ... + def seek(self, offset: int) -> Any: ... + +@overload +def what(file: StrPath | _ReadableBinary, h: None = None) -> str | None: ... +@overload +def what(file: Any, h: bytes) -> str | None: ... + +tests: list[Callable[[bytes, BinaryIO | None], str | None]] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/imp.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/imp.pyi new file mode 100644 index 00000000..3f2920de --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/imp.pyi @@ -0,0 +1,62 @@ +import types +from _imp import ( + acquire_lock as acquire_lock, + create_dynamic as create_dynamic, + get_frozen_object as get_frozen_object, + init_frozen as init_frozen, + is_builtin as is_builtin, + is_frozen as is_frozen, + is_frozen_package as is_frozen_package, + lock_held as lock_held, + release_lock as release_lock, +) +from _typeshed import StrPath +from os import PathLike +from types import TracebackType +from typing import IO, Any, Protocol + +SEARCH_ERROR: int +PY_SOURCE: int +PY_COMPILED: int +C_EXTENSION: int +PY_RESOURCE: int +PKG_DIRECTORY: int +C_BUILTIN: int +PY_FROZEN: int +PY_CODERESOURCE: int +IMP_HOOK: int + +def new_module(name: str) -> types.ModuleType: ... +def get_magic() -> bytes: ... +def get_tag() -> str: ... +def cache_from_source(path: StrPath, debug_override: bool | None = None) -> str: ... +def source_from_cache(path: StrPath) -> str: ... +def get_suffixes() -> list[tuple[str, str, int]]: ... + +class NullImporter: + def __init__(self, path: StrPath) -> None: ... + def find_module(self, fullname: Any) -> None: ... + +# Technically, a text file has to support a slightly different set of operations than a binary file, +# but we ignore that here. +class _FileLike(Protocol): + closed: bool + mode: str + def read(self) -> str | bytes: ... + def close(self) -> Any: ... + def __enter__(self) -> Any: ... + def __exit__(self, typ: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> Any: ... + +# PathLike doesn't work for the pathname argument here +def load_source(name: str, pathname: str, file: _FileLike | None = None) -> types.ModuleType: ... +def load_compiled(name: str, pathname: str, file: _FileLike | None = None) -> types.ModuleType: ... +def load_package(name: str, path: StrPath) -> types.ModuleType: ... +def load_module(name: str, file: _FileLike | None, filename: str, details: tuple[str, str, int]) -> types.ModuleType: ... + +# IO[Any] is a TextIOWrapper if name is a .py file, and a FileIO otherwise. +def find_module( + name: str, path: None | list[str] | list[PathLike[str]] | list[StrPath] = None +) -> tuple[IO[Any], str, tuple[str, str, int]]: ... +def reload(module: types.ModuleType) -> types.ModuleType: ... +def init_builtin(name: str) -> types.ModuleType | None: ... +def load_dynamic(name: str, path: str, file: Any = None) -> types.ModuleType: ... # file argument is ignored diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/importlib/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/importlib/__init__.pyi new file mode 100644 index 00000000..1747b274 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/importlib/__init__.pyi @@ -0,0 +1,20 @@ +from collections.abc import Mapping, Sequence +from importlib.abc import Loader +from types import ModuleType + +__all__ = ["__import__", "import_module", "invalidate_caches", "reload"] + +# Signature of `builtins.__import__` should be kept identical to `importlib.__import__` +def __import__( + name: str, + globals: Mapping[str, object] | None = None, + locals: Mapping[str, object] | None = None, + fromlist: Sequence[str] = ..., + level: int = 0, +) -> ModuleType: ... + +# `importlib.import_module` return type should be kept the same as `builtins.__import__` +def import_module(name: str, package: str | None = None) -> ModuleType: ... +def find_loader(name: str, path: str | None = None) -> Loader | None: ... +def invalidate_caches() -> None: ... +def reload(module: ModuleType) -> ModuleType: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/importlib/abc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/importlib/abc.pyi new file mode 100644 index 00000000..3d0c2d38 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/importlib/abc.pyi @@ -0,0 +1,197 @@ +import sys +import types +from _typeshed import ( + OpenBinaryMode, + OpenBinaryModeReading, + OpenBinaryModeUpdating, + OpenBinaryModeWriting, + OpenTextMode, + ReadableBuffer, +) +from abc import ABCMeta, abstractmethod +from collections.abc import Iterator, Mapping, Sequence +from importlib.machinery import ModuleSpec +from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper +from typing import IO, Any, BinaryIO, NoReturn, Protocol, overload, runtime_checkable +from typing_extensions import Literal + +if sys.version_info >= (3, 11): + __all__ = [ + "Loader", + "Finder", + "MetaPathFinder", + "PathEntryFinder", + "ResourceLoader", + "InspectLoader", + "ExecutionLoader", + "FileLoader", + "SourceLoader", + "ResourceReader", + "Traversable", + "TraversableResources", + ] + +class Finder(metaclass=ABCMeta): ... + +class Loader(metaclass=ABCMeta): + def load_module(self, fullname: str) -> types.ModuleType: ... + def module_repr(self, module: types.ModuleType) -> str: ... + def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: ... + # Not defined on the actual class for backwards-compatibility reasons, + # but expected in new code. + def exec_module(self, module: types.ModuleType) -> None: ... + +class ResourceLoader(Loader): + @abstractmethod + def get_data(self, path: str) -> bytes: ... + +class InspectLoader(Loader): + def is_package(self, fullname: str) -> bool: ... + def get_code(self, fullname: str) -> types.CodeType | None: ... + @abstractmethod + def get_source(self, fullname: str) -> str | None: ... + def exec_module(self, module: types.ModuleType) -> None: ... + @staticmethod + def source_to_code(data: ReadableBuffer | str, path: str = "") -> types.CodeType: ... + +class ExecutionLoader(InspectLoader): + @abstractmethod + def get_filename(self, fullname: str) -> str: ... + +class SourceLoader(ResourceLoader, ExecutionLoader, metaclass=ABCMeta): + def path_mtime(self, path: str) -> float: ... + def set_data(self, path: str, data: bytes) -> None: ... + def get_source(self, fullname: str) -> str | None: ... + def path_stats(self, path: str) -> Mapping[str, Any]: ... + +# Please keep in sync with sys._MetaPathFinder +class MetaPathFinder(Finder): + def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: ... + def invalidate_caches(self) -> None: ... + # Not defined on the actual class, but expected to exist. + def find_spec( + self, fullname: str, path: Sequence[str] | None, target: types.ModuleType | None = ... + ) -> ModuleSpec | None: ... + +class PathEntryFinder(Finder): + def find_module(self, fullname: str) -> Loader | None: ... + def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[str]]: ... + def invalidate_caches(self) -> None: ... + # Not defined on the actual class, but expected to exist. + def find_spec(self, fullname: str, target: types.ModuleType | None = ...) -> ModuleSpec | None: ... + +class FileLoader(ResourceLoader, ExecutionLoader, metaclass=ABCMeta): + name: str + path: str + def __init__(self, fullname: str, path: str) -> None: ... + def get_data(self, path: str) -> bytes: ... + def get_filename(self, name: str | None = None) -> str: ... + def load_module(self, name: str | None = None) -> types.ModuleType: ... + +class ResourceReader(metaclass=ABCMeta): + @abstractmethod + def open_resource(self, resource: str) -> IO[bytes]: ... + @abstractmethod + def resource_path(self, resource: str) -> str: ... + if sys.version_info >= (3, 10): + @abstractmethod + def is_resource(self, path: str) -> bool: ... + else: + @abstractmethod + def is_resource(self, name: str) -> bool: ... + + @abstractmethod + def contents(self) -> Iterator[str]: ... + +if sys.version_info >= (3, 9): + @runtime_checkable + class Traversable(Protocol): + @abstractmethod + def is_dir(self) -> bool: ... + @abstractmethod + def is_file(self) -> bool: ... + @abstractmethod + def iterdir(self) -> Iterator[Traversable]: ... + if sys.version_info >= (3, 11): + @abstractmethod + def joinpath(self, *descendants: str) -> Traversable: ... + else: + @abstractmethod + def joinpath(self, child: str) -> Traversable: ... + # The .open method comes from pathlib.pyi and should be kept in sync. + @overload + @abstractmethod + def open( + self, + mode: OpenTextMode = "r", + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + ) -> TextIOWrapper: ... + # Unbuffered binary mode: returns a FileIO + @overload + @abstractmethod + def open( + self, mode: OpenBinaryMode, buffering: Literal[0], encoding: None = None, errors: None = None, newline: None = None + ) -> FileIO: ... + # Buffering is on: return BufferedRandom, BufferedReader, or BufferedWriter + @overload + @abstractmethod + def open( + self, + mode: OpenBinaryModeUpdating, + buffering: Literal[-1, 1] = ..., + encoding: None = None, + errors: None = None, + newline: None = None, + ) -> BufferedRandom: ... + @overload + @abstractmethod + def open( + self, + mode: OpenBinaryModeWriting, + buffering: Literal[-1, 1] = ..., + encoding: None = None, + errors: None = None, + newline: None = None, + ) -> BufferedWriter: ... + @overload + @abstractmethod + def open( + self, + mode: OpenBinaryModeReading, + buffering: Literal[-1, 1] = ..., + encoding: None = None, + errors: None = None, + newline: None = None, + ) -> BufferedReader: ... + # Buffering cannot be determined: fall back to BinaryIO + @overload + @abstractmethod + def open( + self, mode: OpenBinaryMode, buffering: int = ..., encoding: None = None, errors: None = None, newline: None = None + ) -> BinaryIO: ... + # Fallback if mode is not specified + @overload + @abstractmethod + def open( + self, mode: str, buffering: int = ..., encoding: str | None = ..., errors: str | None = ..., newline: str | None = ... + ) -> IO[Any]: ... + @property + @abstractmethod + def name(self) -> str: ... + @abstractmethod + def __truediv__(self, child: str) -> Traversable: ... + @abstractmethod + def read_bytes(self) -> bytes: ... + @abstractmethod + def read_text(self, encoding: str | None = None) -> str: ... + + class TraversableResources(ResourceReader): + @abstractmethod + def files(self) -> Traversable: ... + def open_resource(self, resource: str) -> BufferedReader: ... # type: ignore[override] + def resource_path(self, resource: Any) -> NoReturn: ... + def is_resource(self, path: str) -> bool: ... + def contents(self) -> Iterator[str]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/importlib/machinery.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/importlib/machinery.pyi new file mode 100644 index 00000000..5aaefce8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/importlib/machinery.pyi @@ -0,0 +1,150 @@ +import importlib.abc +import sys +import types +from _typeshed import ReadableBuffer +from collections.abc import Callable, Iterable, Sequence +from typing import Any + +if sys.version_info >= (3, 8): + from importlib.metadata import DistributionFinder, PathDistribution + +class ModuleSpec: + def __init__( + self, + name: str, + loader: importlib.abc.Loader | None, + *, + origin: str | None = None, + loader_state: Any = None, + is_package: bool | None = None, + ) -> None: ... + name: str + loader: importlib.abc.Loader | None + origin: str | None + submodule_search_locations: list[str] | None + loader_state: Any + cached: str | None + @property + def parent(self) -> str | None: ... + has_location: bool + def __eq__(self, other: object) -> bool: ... + +class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): + # MetaPathFinder + @classmethod + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... + @classmethod + def find_spec( + cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None + ) -> ModuleSpec | None: ... + # InspectLoader + @classmethod + def is_package(cls, fullname: str) -> bool: ... + @classmethod + def load_module(cls, fullname: str) -> types.ModuleType: ... + @classmethod + def get_code(cls, fullname: str) -> None: ... + @classmethod + def get_source(cls, fullname: str) -> None: ... + # Loader + @staticmethod + def module_repr(module: types.ModuleType) -> str: ... + if sys.version_info >= (3, 10): + @staticmethod + def create_module(spec: ModuleSpec) -> types.ModuleType | None: ... + @staticmethod + def exec_module(module: types.ModuleType) -> None: ... + else: + @classmethod + def create_module(cls, spec: ModuleSpec) -> types.ModuleType | None: ... + @classmethod + def exec_module(cls, module: types.ModuleType) -> None: ... + +class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): + # MetaPathFinder + @classmethod + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... + @classmethod + def find_spec( + cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None + ) -> ModuleSpec | None: ... + # InspectLoader + @classmethod + def is_package(cls, fullname: str) -> bool: ... + @classmethod + def load_module(cls, fullname: str) -> types.ModuleType: ... + @classmethod + def get_code(cls, fullname: str) -> None: ... + @classmethod + def get_source(cls, fullname: str) -> None: ... + # Loader + @staticmethod + def module_repr(m: types.ModuleType) -> str: ... + if sys.version_info >= (3, 10): + @staticmethod + def create_module(spec: ModuleSpec) -> types.ModuleType | None: ... + else: + @classmethod + def create_module(cls, spec: ModuleSpec) -> types.ModuleType | None: ... + + @staticmethod + def exec_module(module: types.ModuleType) -> None: ... + +class WindowsRegistryFinder(importlib.abc.MetaPathFinder): + @classmethod + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... + @classmethod + def find_spec( + cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None + ) -> ModuleSpec | None: ... + +class PathFinder: + if sys.version_info >= (3, 10): + @staticmethod + def invalidate_caches() -> None: ... + else: + @classmethod + def invalidate_caches(cls) -> None: ... + if sys.version_info >= (3, 10): + @staticmethod + def find_distributions(context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: ... + elif sys.version_info >= (3, 8): + @classmethod + def find_distributions(cls, context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: ... + + @classmethod + def find_spec( + cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None + ) -> ModuleSpec | None: ... + @classmethod + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... + +SOURCE_SUFFIXES: list[str] +DEBUG_BYTECODE_SUFFIXES: list[str] +OPTIMIZED_BYTECODE_SUFFIXES: list[str] +BYTECODE_SUFFIXES: list[str] +EXTENSION_SUFFIXES: list[str] + +def all_suffixes() -> list[str]: ... + +class FileFinder(importlib.abc.PathEntryFinder): + path: str + def __init__(self, path: str, *loader_details: tuple[type[importlib.abc.Loader], list[str]]) -> None: ... + @classmethod + def path_hook( + cls, *loader_details: tuple[type[importlib.abc.Loader], list[str]] + ) -> Callable[[str], importlib.abc.PathEntryFinder]: ... + +class SourceFileLoader(importlib.abc.FileLoader, importlib.abc.SourceLoader): + def set_data(self, path: str, data: ReadableBuffer, *, _mode: int = 0o666) -> None: ... + +class SourcelessFileLoader(importlib.abc.FileLoader, importlib.abc.SourceLoader): ... + +class ExtensionFileLoader(importlib.abc.ExecutionLoader): + def __init__(self, name: str, path: str) -> None: ... + def get_filename(self, name: str | None = None) -> str: ... + def get_source(self, fullname: str) -> None: ... + def create_module(self, spec: ModuleSpec) -> types.ModuleType: ... + def exec_module(self, module: types.ModuleType) -> None: ... + def get_code(self, fullname: str) -> None: ... + def __eq__(self, other: object) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/importlib/metadata/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/importlib/metadata/__init__.pyi new file mode 100644 index 00000000..083453cd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/importlib/metadata/__init__.pyi @@ -0,0 +1,207 @@ +import abc +import pathlib +import sys +from _typeshed import StrPath +from collections.abc import Iterable, Mapping +from email.message import Message +from importlib.abc import MetaPathFinder +from os import PathLike +from pathlib import Path +from re import Pattern +from typing import Any, ClassVar, NamedTuple, overload +from typing_extensions import Self + +__all__ = [ + "Distribution", + "DistributionFinder", + "PackageNotFoundError", + "distribution", + "distributions", + "entry_points", + "files", + "metadata", + "requires", + "version", +] + +if sys.version_info >= (3, 10): + __all__ += ["PackageMetadata", "packages_distributions"] + +if sys.version_info >= (3, 10): + from importlib.metadata._meta import PackageMetadata as PackageMetadata + def packages_distributions() -> Mapping[str, list[str]]: ... + +class PackageNotFoundError(ModuleNotFoundError): + @property + def name(self) -> str: ... # type: ignore[override] + +class _EntryPointBase(NamedTuple): + name: str + value: str + group: str + +class EntryPoint(_EntryPointBase): + pattern: ClassVar[Pattern[str]] + if sys.version_info >= (3, 11): + def __init__(self, name: str, value: str, group: str) -> None: ... + + def load(self) -> Any: ... # Callable[[], Any] or an importable module + @property + def extras(self) -> list[str]: ... + if sys.version_info >= (3, 9): + @property + def module(self) -> str: ... + @property + def attr(self) -> str: ... + if sys.version_info >= (3, 10): + dist: ClassVar[Distribution | None] + def matches( + self, + *, + name: str = ..., + value: str = ..., + group: str = ..., + module: str = ..., + attr: str = ..., + extras: list[str] = ..., + ) -> bool: ... # undocumented + +if sys.version_info >= (3, 10): + class EntryPoints(list[EntryPoint]): # use as list is deprecated since 3.10 + # int argument is deprecated since 3.10 + def __getitem__(self, name: int | str) -> EntryPoint: ... # type: ignore[override] + def select( + self, + *, + name: str = ..., + value: str = ..., + group: str = ..., + module: str = ..., + attr: str = ..., + extras: list[str] = ..., + ) -> EntryPoints: ... + @property + def names(self) -> set[str]: ... + @property + def groups(self) -> set[str]: ... + + class SelectableGroups(dict[str, EntryPoints]): # use as dict is deprecated since 3.10 + @classmethod + def load(cls, eps: Iterable[EntryPoint]) -> Self: ... + @property + def groups(self) -> set[str]: ... + @property + def names(self) -> set[str]: ... + @overload + def select(self) -> Self: ... # type: ignore[misc] + @overload + def select( + self, + *, + name: str = ..., + value: str = ..., + group: str = ..., + module: str = ..., + attr: str = ..., + extras: list[str] = ..., + ) -> EntryPoints: ... + +class PackagePath(pathlib.PurePosixPath): + def read_text(self, encoding: str = "utf-8") -> str: ... + def read_binary(self) -> bytes: ... + def locate(self) -> PathLike[str]: ... + # The following attributes are not defined on PackagePath, but are dynamically added by Distribution.files: + hash: FileHash | None + size: int | None + dist: Distribution + +class FileHash: + mode: str + value: str + def __init__(self, spec: str) -> None: ... + +class Distribution: + @abc.abstractmethod + def read_text(self, filename: str) -> str | None: ... + @abc.abstractmethod + def locate_file(self, path: StrPath) -> PathLike[str]: ... + @classmethod + def from_name(cls, name: str) -> Distribution: ... + @overload + @classmethod + def discover(cls, *, context: DistributionFinder.Context) -> Iterable[Distribution]: ... + @overload + @classmethod + def discover( + cls, *, context: None = None, name: str | None = ..., path: list[str] = ..., **kwargs: Any + ) -> Iterable[Distribution]: ... + @staticmethod + def at(path: StrPath) -> PathDistribution: ... + + if sys.version_info >= (3, 10): + @property + def metadata(self) -> PackageMetadata: ... + @property + def entry_points(self) -> EntryPoints: ... + else: + @property + def metadata(self) -> Message: ... + @property + def entry_points(self) -> list[EntryPoint]: ... + + @property + def version(self) -> str: ... + @property + def files(self) -> list[PackagePath] | None: ... + @property + def requires(self) -> list[str] | None: ... + if sys.version_info >= (3, 10): + @property + def name(self) -> str: ... + +class DistributionFinder(MetaPathFinder): + class Context: + name: str | None + def __init__(self, *, name: str | None = ..., path: list[str] = ..., **kwargs: Any) -> None: ... + @property + def path(self) -> list[str]: ... + + @abc.abstractmethod + def find_distributions(self, context: DistributionFinder.Context = ...) -> Iterable[Distribution]: ... + +class MetadataPathFinder(DistributionFinder): + @classmethod + def find_distributions(cls, context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: ... + if sys.version_info >= (3, 10): + # Yes, this is an instance method that has argumend named "cls" + def invalidate_caches(cls) -> None: ... + +class PathDistribution(Distribution): + def __init__(self, path: Path) -> None: ... + def read_text(self, filename: StrPath) -> str: ... + def locate_file(self, path: StrPath) -> PathLike[str]: ... + +def distribution(distribution_name: str) -> Distribution: ... +@overload +def distributions(*, context: DistributionFinder.Context) -> Iterable[Distribution]: ... +@overload +def distributions( + *, context: None = None, name: str | None = ..., path: list[str] = ..., **kwargs: Any +) -> Iterable[Distribution]: ... + +if sys.version_info >= (3, 10): + def metadata(distribution_name: str) -> PackageMetadata: ... + @overload + def entry_points() -> SelectableGroups: ... # type: ignore[misc] + @overload + def entry_points( + *, name: str = ..., value: str = ..., group: str = ..., module: str = ..., attr: str = ..., extras: list[str] = ... + ) -> EntryPoints: ... + +else: + def metadata(distribution_name: str) -> Message: ... + def entry_points() -> dict[str, list[EntryPoint]]: ... + +def version(distribution_name: str) -> str: ... +def files(distribution_name: str) -> list[PackagePath] | None: ... +def requires(distribution_name: str) -> list[str] | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/importlib/metadata/_meta.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/importlib/metadata/_meta.pyi new file mode 100644 index 00000000..e3504fe4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/importlib/metadata/_meta.pyi @@ -0,0 +1,22 @@ +from collections.abc import Iterator +from typing import Any, Protocol, TypeVar + +_T = TypeVar("_T") + +class PackageMetadata(Protocol): + def __len__(self) -> int: ... + def __contains__(self, item: str) -> bool: ... + def __getitem__(self, key: str) -> str: ... + def __iter__(self) -> Iterator[str]: ... + def get_all(self, name: str, failobj: _T = ...) -> list[Any] | _T: ... + @property + def json(self) -> dict[str, str | list[str]]: ... + +class SimplePath(Protocol): + def joinpath(self) -> SimplePath: ... + def parent(self) -> SimplePath: ... + def read_text(self) -> str: ... + # There was a bug in `SimplePath` definition in cpython, see #8451 + # Strictly speaking `__div__` was defined in 3.10, not __truediv__, + # but it should have always been `__truediv__`. + def __truediv__(self) -> SimplePath: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/importlib/resources.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/importlib/resources.pyi new file mode 100644 index 00000000..ba3d9b08 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/importlib/resources.pyi @@ -0,0 +1,39 @@ +import os +import sys +from collections.abc import Iterator +from contextlib import AbstractContextManager +from pathlib import Path +from types import ModuleType +from typing import Any, BinaryIO, TextIO +from typing_extensions import TypeAlias + +__all__ = ["Package", "Resource", "contents", "is_resource", "open_binary", "open_text", "path", "read_binary", "read_text"] + +if sys.version_info >= (3, 9): + __all__ += ["as_file", "files"] + +if sys.version_info >= (3, 10): + __all__ += ["ResourceReader"] + +Package: TypeAlias = str | ModuleType + +if sys.version_info >= (3, 11): + Resource: TypeAlias = str +else: + Resource: TypeAlias = str | os.PathLike[Any] + +def open_binary(package: Package, resource: Resource) -> BinaryIO: ... +def open_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> TextIO: ... +def read_binary(package: Package, resource: Resource) -> bytes: ... +def read_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> str: ... +def path(package: Package, resource: Resource) -> AbstractContextManager[Path]: ... +def is_resource(package: Package, name: str) -> bool: ... +def contents(package: Package) -> Iterator[str]: ... + +if sys.version_info >= (3, 9): + from importlib.abc import Traversable + def files(package: Package) -> Traversable: ... + def as_file(path: Traversable) -> AbstractContextManager[Path]: ... + +if sys.version_info >= (3, 10): + from importlib.abc import ResourceReader as ResourceReader diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/importlib/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/importlib/util.pyi new file mode 100644 index 00000000..f988eb27 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/importlib/util.pyi @@ -0,0 +1,40 @@ +import importlib.abc +import importlib.machinery +import types +from _typeshed import ReadableBuffer, StrOrBytesPath +from collections.abc import Callable +from typing import Any +from typing_extensions import ParamSpec + +_P = ParamSpec("_P") + +def module_for_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... +def set_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... +def set_package(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... +def resolve_name(name: str, package: str | None) -> str: ... + +MAGIC_NUMBER: bytes + +def cache_from_source(path: str, debug_override: bool | None = None, *, optimization: Any | None = None) -> str: ... +def source_from_cache(path: str) -> str: ... +def decode_source(source_bytes: ReadableBuffer) -> str: ... +def find_spec(name: str, package: str | None = None) -> importlib.machinery.ModuleSpec | None: ... +def spec_from_loader( + name: str, loader: importlib.abc.Loader | None, *, origin: str | None = None, is_package: bool | None = None +) -> importlib.machinery.ModuleSpec | None: ... +def spec_from_file_location( + name: str, + location: StrOrBytesPath | None = None, + *, + loader: importlib.abc.Loader | None = None, + submodule_search_locations: list[str] | None = ..., +) -> importlib.machinery.ModuleSpec | None: ... +def module_from_spec(spec: importlib.machinery.ModuleSpec) -> types.ModuleType: ... + +class LazyLoader(importlib.abc.Loader): + def __init__(self, loader: importlib.abc.Loader) -> None: ... + @classmethod + def factory(cls, loader: importlib.abc.Loader) -> Callable[..., LazyLoader]: ... + def exec_module(self, module: types.ModuleType) -> None: ... + +def source_hash(source_bytes: ReadableBuffer) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/inspect.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/inspect.pyi new file mode 100644 index 00000000..2525ef49 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/inspect.pyi @@ -0,0 +1,592 @@ +import dis +import enum +import sys +import types +from collections import OrderedDict +from collections.abc import AsyncGenerator, Awaitable, Callable, Coroutine, Generator, Mapping, Sequence, Set as AbstractSet +from types import ( + AsyncGeneratorType, + BuiltinFunctionType, + BuiltinMethodType, + ClassMethodDescriptorType, + CodeType, + CoroutineType, + FrameType, + FunctionType, + GeneratorType, + GetSetDescriptorType, + LambdaType, + MemberDescriptorType, + MethodDescriptorType, + MethodType, + MethodWrapperType, + ModuleType, + TracebackType, + WrapperDescriptorType, +) +from typing import Any, ClassVar, NamedTuple, Protocol, TypeVar, overload +from typing_extensions import Literal, ParamSpec, Self, TypeAlias, TypeGuard + +if sys.version_info >= (3, 11): + __all__ = [ + "ArgInfo", + "Arguments", + "Attribute", + "BlockFinder", + "BoundArguments", + "CORO_CLOSED", + "CORO_CREATED", + "CORO_RUNNING", + "CORO_SUSPENDED", + "CO_ASYNC_GENERATOR", + "CO_COROUTINE", + "CO_GENERATOR", + "CO_ITERABLE_COROUTINE", + "CO_NESTED", + "CO_NEWLOCALS", + "CO_NOFREE", + "CO_OPTIMIZED", + "CO_VARARGS", + "CO_VARKEYWORDS", + "ClassFoundException", + "ClosureVars", + "EndOfBlock", + "FrameInfo", + "FullArgSpec", + "GEN_CLOSED", + "GEN_CREATED", + "GEN_RUNNING", + "GEN_SUSPENDED", + "Parameter", + "Signature", + "TPFLAGS_IS_ABSTRACT", + "Traceback", + "classify_class_attrs", + "cleandoc", + "currentframe", + "findsource", + "formatannotation", + "formatannotationrelativeto", + "formatargvalues", + "get_annotations", + "getabsfile", + "getargs", + "getargvalues", + "getattr_static", + "getblock", + "getcallargs", + "getclasstree", + "getclosurevars", + "getcomments", + "getcoroutinelocals", + "getcoroutinestate", + "getdoc", + "getfile", + "getframeinfo", + "getfullargspec", + "getgeneratorlocals", + "getgeneratorstate", + "getinnerframes", + "getlineno", + "getmembers", + "getmembers_static", + "getmodule", + "getmodulename", + "getmro", + "getouterframes", + "getsource", + "getsourcefile", + "getsourcelines", + "indentsize", + "isabstract", + "isasyncgen", + "isasyncgenfunction", + "isawaitable", + "isbuiltin", + "isclass", + "iscode", + "iscoroutine", + "iscoroutinefunction", + "isdatadescriptor", + "isframe", + "isfunction", + "isgenerator", + "isgeneratorfunction", + "isgetsetdescriptor", + "ismemberdescriptor", + "ismethod", + "ismethoddescriptor", + "ismethodwrapper", + "ismodule", + "isroutine", + "istraceback", + "signature", + "stack", + "trace", + "unwrap", + "walktree", + ] + +_P = ParamSpec("_P") +_T = TypeVar("_T") +_T_cont = TypeVar("_T_cont", contravariant=True) +_V_cont = TypeVar("_V_cont", contravariant=True) + +# +# Types and members +# +class EndOfBlock(Exception): ... + +class BlockFinder: + indent: int + islambda: bool + started: bool + passline: bool + indecorator: bool + decoratorhasargs: bool + last: int + def tokeneater(self, type: int, token: str, srowcol: tuple[int, int], erowcol: tuple[int, int], line: str) -> None: ... + +CO_OPTIMIZED: Literal[1] +CO_NEWLOCALS: Literal[2] +CO_VARARGS: Literal[4] +CO_VARKEYWORDS: Literal[8] +CO_NESTED: Literal[16] +CO_GENERATOR: Literal[32] +CO_NOFREE: Literal[64] +CO_COROUTINE: Literal[128] +CO_ITERABLE_COROUTINE: Literal[256] +CO_ASYNC_GENERATOR: Literal[512] +TPFLAGS_IS_ABSTRACT: Literal[1048576] + +modulesbyfile: dict[str, Any] + +_GetMembersPredicate: TypeAlias = Callable[[Any], bool] +_GetMembersReturn: TypeAlias = list[tuple[str, Any]] + +def getmembers(object: object, predicate: _GetMembersPredicate | None = None) -> _GetMembersReturn: ... + +if sys.version_info >= (3, 11): + def getmembers_static(object: object, predicate: _GetMembersPredicate | None = None) -> _GetMembersReturn: ... + +def getmodulename(path: str) -> str | None: ... +def ismodule(object: object) -> TypeGuard[ModuleType]: ... +def isclass(object: object) -> TypeGuard[type[Any]]: ... +def ismethod(object: object) -> TypeGuard[MethodType]: ... +def isfunction(object: object) -> TypeGuard[FunctionType]: ... + +if sys.version_info >= (3, 8): + @overload + def isgeneratorfunction(obj: Callable[..., Generator[Any, Any, Any]]) -> bool: ... + @overload + def isgeneratorfunction(obj: Callable[_P, Any]) -> TypeGuard[Callable[_P, GeneratorType[Any, Any, Any]]]: ... + @overload + def isgeneratorfunction(obj: object) -> TypeGuard[Callable[..., GeneratorType[Any, Any, Any]]]: ... + @overload + def iscoroutinefunction(obj: Callable[..., Coroutine[Any, Any, Any]]) -> bool: ... + @overload + def iscoroutinefunction(obj: Callable[_P, Awaitable[_T]]) -> TypeGuard[Callable[_P, CoroutineType[Any, Any, _T]]]: ... + @overload + def iscoroutinefunction(obj: Callable[_P, object]) -> TypeGuard[Callable[_P, CoroutineType[Any, Any, Any]]]: ... + @overload + def iscoroutinefunction(obj: object) -> TypeGuard[Callable[..., CoroutineType[Any, Any, Any]]]: ... + +else: + @overload + def isgeneratorfunction(object: Callable[..., Generator[Any, Any, Any]]) -> bool: ... + @overload + def isgeneratorfunction(object: Callable[_P, Any]) -> TypeGuard[Callable[_P, GeneratorType[Any, Any, Any]]]: ... + @overload + def isgeneratorfunction(object: object) -> TypeGuard[Callable[..., GeneratorType[Any, Any, Any]]]: ... + @overload + def iscoroutinefunction(object: Callable[..., Coroutine[Any, Any, Any]]) -> bool: ... + @overload + def iscoroutinefunction(object: Callable[_P, Awaitable[_T]]) -> TypeGuard[Callable[_P, CoroutineType[Any, Any, _T]]]: ... + @overload + def iscoroutinefunction(object: Callable[_P, Any]) -> TypeGuard[Callable[_P, CoroutineType[Any, Any, Any]]]: ... + @overload + def iscoroutinefunction(object: object) -> TypeGuard[Callable[..., CoroutineType[Any, Any, Any]]]: ... + +def isgenerator(object: object) -> TypeGuard[GeneratorType[Any, Any, Any]]: ... +def iscoroutine(object: object) -> TypeGuard[CoroutineType[Any, Any, Any]]: ... +def isawaitable(object: object) -> TypeGuard[Awaitable[Any]]: ... + +if sys.version_info >= (3, 8): + @overload + def isasyncgenfunction(obj: Callable[..., AsyncGenerator[Any, Any]]) -> bool: ... + @overload + def isasyncgenfunction(obj: Callable[_P, Any]) -> TypeGuard[Callable[_P, AsyncGeneratorType[Any, Any]]]: ... + @overload + def isasyncgenfunction(obj: object) -> TypeGuard[Callable[..., AsyncGeneratorType[Any, Any]]]: ... + +else: + @overload + def isasyncgenfunction(object: Callable[..., AsyncGenerator[Any, Any]]) -> bool: ... + @overload + def isasyncgenfunction(object: Callable[_P, Any]) -> TypeGuard[Callable[_P, AsyncGeneratorType[Any, Any]]]: ... + @overload + def isasyncgenfunction(object: object) -> TypeGuard[Callable[..., AsyncGeneratorType[Any, Any]]]: ... + +class _SupportsSet(Protocol[_T_cont, _V_cont]): + def __set__(self, __instance: _T_cont, __value: _V_cont) -> None: ... + +class _SupportsDelete(Protocol[_T_cont]): + def __delete__(self, __instance: _T_cont) -> None: ... + +def isasyncgen(object: object) -> TypeGuard[AsyncGeneratorType[Any, Any]]: ... +def istraceback(object: object) -> TypeGuard[TracebackType]: ... +def isframe(object: object) -> TypeGuard[FrameType]: ... +def iscode(object: object) -> TypeGuard[CodeType]: ... +def isbuiltin(object: object) -> TypeGuard[BuiltinFunctionType]: ... + +if sys.version_info >= (3, 11): + def ismethodwrapper(object: object) -> TypeGuard[MethodWrapperType]: ... + +def isroutine( + object: object, +) -> TypeGuard[ + FunctionType + | LambdaType + | MethodType + | BuiltinFunctionType + | BuiltinMethodType + | WrapperDescriptorType + | MethodDescriptorType + | ClassMethodDescriptorType +]: ... +def ismethoddescriptor(object: object) -> TypeGuard[MethodDescriptorType]: ... +def ismemberdescriptor(object: object) -> TypeGuard[MemberDescriptorType]: ... +def isabstract(object: object) -> bool: ... +def isgetsetdescriptor(object: object) -> TypeGuard[GetSetDescriptorType]: ... +def isdatadescriptor(object: object) -> TypeGuard[_SupportsSet[Any, Any] | _SupportsDelete[Any]]: ... + +# +# Retrieving source code +# +_SourceObjectType: TypeAlias = ( + ModuleType | type[Any] | MethodType | FunctionType | TracebackType | FrameType | CodeType | Callable[..., Any] +) + +def findsource(object: _SourceObjectType) -> tuple[list[str], int]: ... +def getabsfile(object: _SourceObjectType, _filename: str | None = None) -> str: ... +def getblock(lines: Sequence[str]) -> Sequence[str]: ... +def getdoc(object: object) -> str | None: ... +def getcomments(object: object) -> str | None: ... +def getfile(object: _SourceObjectType) -> str: ... +def getmodule(object: object, _filename: str | None = None) -> ModuleType | None: ... +def getsourcefile(object: _SourceObjectType) -> str | None: ... +def getsourcelines(object: _SourceObjectType) -> tuple[list[str], int]: ... +def getsource(object: _SourceObjectType) -> str: ... +def cleandoc(doc: str) -> str: ... +def indentsize(line: str) -> int: ... + +_IntrospectableCallable: TypeAlias = Callable[..., Any] + +# +# Introspecting callables with the Signature object +# +if sys.version_info >= (3, 10): + def signature( + obj: _IntrospectableCallable, + *, + follow_wrapped: bool = True, + globals: Mapping[str, Any] | None = None, + locals: Mapping[str, Any] | None = None, + eval_str: bool = False, + ) -> Signature: ... + +else: + def signature(obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Signature: ... + +class _void: ... +class _empty: ... + +class Signature: + def __init__( + self, parameters: Sequence[Parameter] | None = None, *, return_annotation: Any = ..., __validate_parameters__: bool = True + ) -> None: ... + empty = _empty + @property + def parameters(self) -> types.MappingProxyType[str, Parameter]: ... + @property + def return_annotation(self) -> Any: ... + def bind(self, *args: Any, **kwargs: Any) -> BoundArguments: ... + def bind_partial(self, *args: Any, **kwargs: Any) -> BoundArguments: ... + def replace(self, *, parameters: Sequence[Parameter] | type[_void] | None = ..., return_annotation: Any = ...) -> Self: ... + if sys.version_info >= (3, 10): + @classmethod + def from_callable( + cls, + obj: _IntrospectableCallable, + *, + follow_wrapped: bool = True, + globals: Mapping[str, Any] | None = None, + locals: Mapping[str, Any] | None = None, + eval_str: bool = False, + ) -> Self: ... + else: + @classmethod + def from_callable(cls, obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Self: ... + + def __eq__(self, other: object) -> bool: ... + +if sys.version_info >= (3, 10): + def get_annotations( + obj: Callable[..., object] | type[Any] | ModuleType, + *, + globals: Mapping[str, Any] | None = None, + locals: Mapping[str, Any] | None = None, + eval_str: bool = False, + ) -> dict[str, Any]: ... + +# The name is the same as the enum's name in CPython +class _ParameterKind(enum.IntEnum): + POSITIONAL_ONLY: int + POSITIONAL_OR_KEYWORD: int + VAR_POSITIONAL: int + KEYWORD_ONLY: int + VAR_KEYWORD: int + + if sys.version_info >= (3, 8): + @property + def description(self) -> str: ... + +class Parameter: + def __init__(self, name: str, kind: _ParameterKind, *, default: Any = ..., annotation: Any = ...) -> None: ... + empty = _empty + + POSITIONAL_ONLY: ClassVar[Literal[_ParameterKind.POSITIONAL_ONLY]] + POSITIONAL_OR_KEYWORD: ClassVar[Literal[_ParameterKind.POSITIONAL_OR_KEYWORD]] + VAR_POSITIONAL: ClassVar[Literal[_ParameterKind.VAR_POSITIONAL]] + KEYWORD_ONLY: ClassVar[Literal[_ParameterKind.KEYWORD_ONLY]] + VAR_KEYWORD: ClassVar[Literal[_ParameterKind.VAR_KEYWORD]] + @property + def name(self) -> str: ... + @property + def default(self) -> Any: ... + @property + def kind(self) -> _ParameterKind: ... + @property + def annotation(self) -> Any: ... + def replace( + self, + *, + name: str | type[_void] = ..., + kind: _ParameterKind | type[_void] = ..., + default: Any = ..., + annotation: Any = ..., + ) -> Self: ... + def __eq__(self, other: object) -> bool: ... + +class BoundArguments: + arguments: OrderedDict[str, Any] + @property + def args(self) -> tuple[Any, ...]: ... + @property + def kwargs(self) -> dict[str, Any]: ... + @property + def signature(self) -> Signature: ... + def __init__(self, signature: Signature, arguments: OrderedDict[str, Any]) -> None: ... + def apply_defaults(self) -> None: ... + def __eq__(self, other: object) -> bool: ... + +# +# Classes and functions +# + +# TODO: The actual return type should be list[_ClassTreeItem] but mypy doesn't +# seem to be supporting this at the moment: +# _ClassTreeItem = list[_ClassTreeItem] | Tuple[type, Tuple[type, ...]] +def getclasstree(classes: list[type], unique: bool = False) -> list[Any]: ... +def walktree(classes: list[type], children: Mapping[type[Any], list[type]], parent: type[Any] | None) -> list[Any]: ... + +class Arguments(NamedTuple): + args: list[str] + varargs: str | None + varkw: str | None + +def getargs(co: CodeType) -> Arguments: ... + +if sys.version_info < (3, 11): + class ArgSpec(NamedTuple): + args: list[str] + varargs: str | None + keywords: str | None + defaults: tuple[Any, ...] + def getargspec(func: object) -> ArgSpec: ... + +class FullArgSpec(NamedTuple): + args: list[str] + varargs: str | None + varkw: str | None + defaults: tuple[Any, ...] | None + kwonlyargs: list[str] + kwonlydefaults: dict[str, Any] | None + annotations: dict[str, Any] + +def getfullargspec(func: object) -> FullArgSpec: ... + +class ArgInfo(NamedTuple): + args: list[str] + varargs: str | None + keywords: str | None + locals: dict[str, Any] + +def getargvalues(frame: FrameType) -> ArgInfo: ... +def formatannotation(annotation: object, base_module: str | None = None) -> str: ... +def formatannotationrelativeto(object: object) -> Callable[[object], str]: ... + +if sys.version_info < (3, 11): + def formatargspec( + args: list[str], + varargs: str | None = None, + varkw: str | None = None, + defaults: tuple[Any, ...] | None = None, + kwonlyargs: Sequence[str] | None = ..., + kwonlydefaults: Mapping[str, Any] | None = ..., + annotations: Mapping[str, Any] = ..., + formatarg: Callable[[str], str] = ..., + formatvarargs: Callable[[str], str] = ..., + formatvarkw: Callable[[str], str] = ..., + formatvalue: Callable[[Any], str] = ..., + formatreturns: Callable[[Any], str] = ..., + formatannotation: Callable[[Any], str] = ..., + ) -> str: ... + +def formatargvalues( + args: list[str], + varargs: str | None, + varkw: str | None, + locals: Mapping[str, Any] | None, + formatarg: Callable[[str], str] | None = ..., + formatvarargs: Callable[[str], str] | None = ..., + formatvarkw: Callable[[str], str] | None = ..., + formatvalue: Callable[[Any], str] | None = ..., +) -> str: ... +def getmro(cls: type) -> tuple[type, ...]: ... +def getcallargs(__func: Callable[_P, Any], *args: _P.args, **kwds: _P.kwargs) -> dict[str, Any]: ... + +class ClosureVars(NamedTuple): + nonlocals: Mapping[str, Any] + globals: Mapping[str, Any] + builtins: Mapping[str, Any] + unbound: AbstractSet[str] + +def getclosurevars(func: _IntrospectableCallable) -> ClosureVars: ... +def unwrap(func: Callable[..., Any], *, stop: Callable[[Callable[..., Any]], Any] | None = None) -> Any: ... + +# +# The interpreter stack +# + +if sys.version_info >= (3, 11): + class _Traceback(NamedTuple): + filename: str + lineno: int + function: str + code_context: list[str] | None + index: int | None # type: ignore[assignment] + + class Traceback(_Traceback): + positions: dis.Positions | None + def __new__( + cls, + filename: str, + lineno: int, + function: str, + code_context: list[str] | None, + index: int | None, + *, + positions: dis.Positions | None = None, + ) -> Self: ... + + class _FrameInfo(NamedTuple): + frame: FrameType + filename: str + lineno: int + function: str + code_context: list[str] | None + index: int | None # type: ignore[assignment] + + class FrameInfo(_FrameInfo): + positions: dis.Positions | None + def __new__( + cls, + frame: FrameType, + filename: str, + lineno: int, + function: str, + code_context: list[str] | None, + index: int | None, + *, + positions: dis.Positions | None = None, + ) -> Self: ... + +else: + class Traceback(NamedTuple): + filename: str + lineno: int + function: str + code_context: list[str] | None + index: int | None # type: ignore[assignment] + + class FrameInfo(NamedTuple): + frame: FrameType + filename: str + lineno: int + function: str + code_context: list[str] | None + index: int | None # type: ignore[assignment] + +def getframeinfo(frame: FrameType | TracebackType, context: int = 1) -> Traceback: ... +def getouterframes(frame: Any, context: int = 1) -> list[FrameInfo]: ... +def getinnerframes(tb: TracebackType, context: int = 1) -> list[FrameInfo]: ... +def getlineno(frame: FrameType) -> int: ... +def currentframe() -> FrameType | None: ... +def stack(context: int = 1) -> list[FrameInfo]: ... +def trace(context: int = 1) -> list[FrameInfo]: ... + +# +# Fetching attributes statically +# + +def getattr_static(obj: object, attr: str, default: Any | None = ...) -> Any: ... + +# +# Current State of Generators and Coroutines +# + +GEN_CREATED: Literal["GEN_CREATED"] +GEN_RUNNING: Literal["GEN_RUNNING"] +GEN_SUSPENDED: Literal["GEN_SUSPENDED"] +GEN_CLOSED: Literal["GEN_CLOSED"] + +def getgeneratorstate( + generator: Generator[Any, Any, Any] +) -> Literal["GEN_CREATED", "GEN_RUNNING", "GEN_SUSPENDED", "GEN_CLOSED"]: ... + +CORO_CREATED: Literal["CORO_CREATED"] +CORO_RUNNING: Literal["CORO_RUNNING"] +CORO_SUSPENDED: Literal["CORO_SUSPENDED"] +CORO_CLOSED: Literal["CORO_CLOSED"] + +def getcoroutinestate( + coroutine: Coroutine[Any, Any, Any] +) -> Literal["CORO_CREATED", "CORO_RUNNING", "CORO_SUSPENDED", "CORO_CLOSED"]: ... +def getgeneratorlocals(generator: Generator[Any, Any, Any]) -> dict[str, Any]: ... +def getcoroutinelocals(coroutine: Coroutine[Any, Any, Any]) -> dict[str, Any]: ... + +# Create private type alias to avoid conflict with symbol of same +# name created in Attribute class. +_Object: TypeAlias = object + +class Attribute(NamedTuple): + name: str + kind: Literal["class method", "static method", "property", "method", "data"] + defining_class: type + object: _Object + +def classify_class_attrs(cls: type) -> list[Attribute]: ... + +if sys.version_info >= (3, 9): + class ClassFoundException(Exception): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/io.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/io.pyi new file mode 100644 index 00000000..c3e07bac --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/io.pyi @@ -0,0 +1,196 @@ +import abc +import builtins +import codecs +import sys +from _typeshed import FileDescriptorOrPath, ReadableBuffer, WriteableBuffer +from collections.abc import Callable, Iterable, Iterator +from os import _Opener +from types import TracebackType +from typing import IO, Any, BinaryIO, TextIO +from typing_extensions import Literal, Self + +__all__ = [ + "BlockingIOError", + "open", + "IOBase", + "RawIOBase", + "FileIO", + "BytesIO", + "StringIO", + "BufferedIOBase", + "BufferedReader", + "BufferedWriter", + "BufferedRWPair", + "BufferedRandom", + "TextIOBase", + "TextIOWrapper", + "UnsupportedOperation", + "SEEK_SET", + "SEEK_CUR", + "SEEK_END", +] + +if sys.version_info >= (3, 8): + __all__ += ["open_code"] + +DEFAULT_BUFFER_SIZE: Literal[8192] + +SEEK_SET: Literal[0] +SEEK_CUR: Literal[1] +SEEK_END: Literal[2] + +open = builtins.open + +if sys.version_info >= (3, 8): + def open_code(path: str) -> IO[bytes]: ... + +BlockingIOError = builtins.BlockingIOError + +class UnsupportedOperation(OSError, ValueError): ... + +class IOBase(metaclass=abc.ABCMeta): + def __iter__(self) -> Iterator[bytes]: ... + def __next__(self) -> bytes: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def close(self) -> None: ... + def fileno(self) -> int: ... + def flush(self) -> None: ... + def isatty(self) -> bool: ... + def readable(self) -> bool: ... + read: Callable[..., Any] + def readlines(self, __hint: int = -1) -> list[bytes]: ... + def seek(self, __offset: int, __whence: int = ...) -> int: ... + def seekable(self) -> bool: ... + def tell(self) -> int: ... + def truncate(self, __size: int | None = ...) -> int: ... + def writable(self) -> bool: ... + write: Callable[..., Any] + def writelines(self, __lines: Iterable[ReadableBuffer]) -> None: ... + def readline(self, __size: int | None = -1) -> bytes: ... + def __del__(self) -> None: ... + @property + def closed(self) -> bool: ... + def _checkClosed(self, msg: str | None = ...) -> None: ... # undocumented + +class RawIOBase(IOBase): + def readall(self) -> bytes: ... + def readinto(self, __buffer: WriteableBuffer) -> int | None: ... + def write(self, __b: ReadableBuffer) -> int | None: ... + def read(self, __size: int = -1) -> bytes | None: ... + +class BufferedIOBase(IOBase): + raw: RawIOBase # This is not part of the BufferedIOBase API and may not exist on some implementations. + def detach(self) -> RawIOBase: ... + def readinto(self, __buffer: WriteableBuffer) -> int: ... + def write(self, __buffer: ReadableBuffer) -> int: ... + def readinto1(self, __buffer: WriteableBuffer) -> int: ... + def read(self, __size: int | None = ...) -> bytes: ... + def read1(self, __size: int = ...) -> bytes: ... + +class FileIO(RawIOBase, BinaryIO): + mode: str + name: FileDescriptorOrPath # type: ignore[assignment] + def __init__( + self, file: FileDescriptorOrPath, mode: str = ..., closefd: bool = ..., opener: _Opener | None = ... + ) -> None: ... + @property + def closefd(self) -> bool: ... + def write(self, __b: ReadableBuffer) -> int: ... + def read(self, __size: int = -1) -> bytes: ... + def __enter__(self) -> Self: ... + +class BytesIO(BufferedIOBase, BinaryIO): + def __init__(self, initial_bytes: ReadableBuffer = ...) -> None: ... + # BytesIO does not contain a "name" field. This workaround is necessary + # to allow BytesIO sub-classes to add this field, as it is defined + # as a read-only property on IO[]. + name: Any + def __enter__(self) -> Self: ... + def getvalue(self) -> bytes: ... + def getbuffer(self) -> memoryview: ... + def read1(self, __size: int | None = -1) -> bytes: ... + +class BufferedReader(BufferedIOBase, BinaryIO): + def __enter__(self) -> Self: ... + def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ... + def peek(self, __size: int = 0) -> bytes: ... + +class BufferedWriter(BufferedIOBase, BinaryIO): + def __enter__(self) -> Self: ... + def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ... + def write(self, __buffer: ReadableBuffer) -> int: ... + +class BufferedRandom(BufferedReader, BufferedWriter): + def __enter__(self) -> Self: ... + def seek(self, __target: int, __whence: int = 0) -> int: ... # stubtest needs this + +class BufferedRWPair(BufferedIOBase): + def __init__(self, reader: RawIOBase, writer: RawIOBase, buffer_size: int = ...) -> None: ... + def peek(self, __size: int = ...) -> bytes: ... + +class TextIOBase(IOBase): + encoding: str + errors: str | None + newlines: str | tuple[str, ...] | None + def __iter__(self) -> Iterator[str]: ... # type: ignore[override] + def __next__(self) -> str: ... # type: ignore[override] + def detach(self) -> BinaryIO: ... + def write(self, __s: str) -> int: ... + def writelines(self, __lines: Iterable[str]) -> None: ... # type: ignore[override] + def readline(self, __size: int = ...) -> str: ... # type: ignore[override] + def readlines(self, __hint: int = -1) -> list[str]: ... # type: ignore[override] + def read(self, __size: int | None = ...) -> str: ... + +class TextIOWrapper(TextIOBase, TextIO): + def __init__( + self, + buffer: IO[bytes], + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + line_buffering: bool = ..., + write_through: bool = ..., + ) -> None: ... + @property + def buffer(self) -> BinaryIO: ... + @property + def closed(self) -> bool: ... + @property + def line_buffering(self) -> bool: ... + @property + def write_through(self) -> bool: ... + def reconfigure( + self, + *, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + line_buffering: bool | None = None, + write_through: bool | None = None, + ) -> None: ... + # These are inherited from TextIOBase, but must exist in the stub to satisfy mypy. + def __enter__(self) -> Self: ... + def __iter__(self) -> Iterator[str]: ... # type: ignore[override] + def __next__(self) -> str: ... # type: ignore[override] + def writelines(self, __lines: Iterable[str]) -> None: ... # type: ignore[override] + def readline(self, __size: int = -1) -> str: ... # type: ignore[override] + def readlines(self, __hint: int = -1) -> list[str]: ... # type: ignore[override] + def seek(self, __cookie: int, __whence: int = 0) -> int: ... # stubtest needs this + +class StringIO(TextIOWrapper): + def __init__(self, initial_value: str | None = ..., newline: str | None = ...) -> None: ... + # StringIO does not contain a "name" field. This workaround is necessary + # to allow StringIO sub-classes to add this field, as it is defined + # as a read-only property on IO[]. + name: Any + def getvalue(self) -> str: ... + +class IncrementalNewlineDecoder(codecs.IncrementalDecoder): + def __init__(self, decoder: codecs.IncrementalDecoder | None, translate: bool, errors: str = ...) -> None: ... + def decode(self, input: ReadableBuffer | str, final: bool = False) -> str: ... + @property + def newlines(self) -> str | tuple[str, ...] | None: ... + def setstate(self, __state: tuple[bytes, int]) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ipaddress.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ipaddress.pyi new file mode 100644 index 00000000..9f966213 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ipaddress.pyi @@ -0,0 +1,196 @@ +import sys +from collections.abc import Container, Iterable, Iterator +from typing import Any, Generic, SupportsInt, TypeVar, overload +from typing_extensions import Literal, Self, TypeAlias + +# Undocumented length constants +IPV4LENGTH: Literal[32] +IPV6LENGTH: Literal[128] + +_A = TypeVar("_A", IPv4Address, IPv6Address) +_N = TypeVar("_N", IPv4Network, IPv6Network) + +_RawIPAddress: TypeAlias = int | str | bytes | IPv4Address | IPv6Address +_RawNetworkPart: TypeAlias = IPv4Network | IPv6Network | IPv4Interface | IPv6Interface + +def ip_address(address: _RawIPAddress) -> IPv4Address | IPv6Address: ... +def ip_network( + address: _RawIPAddress | _RawNetworkPart | tuple[_RawIPAddress] | tuple[_RawIPAddress, int], strict: bool = True +) -> IPv4Network | IPv6Network: ... +def ip_interface( + address: _RawIPAddress | _RawNetworkPart | tuple[_RawIPAddress] | tuple[_RawIPAddress, int] +) -> IPv4Interface | IPv6Interface: ... + +class _IPAddressBase: + @property + def compressed(self) -> str: ... + @property + def exploded(self) -> str: ... + @property + def reverse_pointer(self) -> str: ... + @property + def version(self) -> int: ... + +class _BaseAddress(_IPAddressBase, SupportsInt): + def __init__(self, address: object) -> None: ... + def __add__(self, other: int) -> Self: ... + def __int__(self) -> int: ... + def __sub__(self, other: int) -> Self: ... + def __format__(self, fmt: str) -> str: ... + def __eq__(self, other: object) -> bool: ... + def __lt__(self, other: Self) -> bool: ... + if sys.version_info >= (3, 11): + def __ge__(self, other: Self) -> bool: ... + def __gt__(self, other: Self) -> bool: ... + def __le__(self, other: Self) -> bool: ... + else: + def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __le__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + + @property + def is_global(self) -> bool: ... + @property + def is_link_local(self) -> bool: ... + @property + def is_loopback(self) -> bool: ... + @property + def is_multicast(self) -> bool: ... + @property + def is_private(self) -> bool: ... + @property + def is_reserved(self) -> bool: ... + @property + def is_unspecified(self) -> bool: ... + @property + def max_prefixlen(self) -> int: ... + @property + def packed(self) -> bytes: ... + +class _BaseNetwork(_IPAddressBase, Container[_A], Iterable[_A], Generic[_A]): + network_address: _A + netmask: _A + def __init__(self, address: object, strict: bool = ...) -> None: ... + def __contains__(self, other: Any) -> bool: ... + def __getitem__(self, n: int) -> _A: ... + def __iter__(self) -> Iterator[_A]: ... + def __eq__(self, other: object) -> bool: ... + def __lt__(self, other: Self) -> bool: ... + if sys.version_info >= (3, 11): + def __ge__(self, other: Self) -> bool: ... + def __gt__(self, other: Self) -> bool: ... + def __le__(self, other: Self) -> bool: ... + else: + def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __le__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + + def address_exclude(self, other: Self) -> Iterator[Self]: ... + @property + def broadcast_address(self) -> _A: ... + def compare_networks(self, other: Self) -> int: ... + def hosts(self) -> Iterator[_A]: ... + @property + def is_global(self) -> bool: ... + @property + def is_link_local(self) -> bool: ... + @property + def is_loopback(self) -> bool: ... + @property + def is_multicast(self) -> bool: ... + @property + def is_private(self) -> bool: ... + @property + def is_reserved(self) -> bool: ... + @property + def is_unspecified(self) -> bool: ... + @property + def max_prefixlen(self) -> int: ... + @property + def num_addresses(self) -> int: ... + def overlaps(self, other: _BaseNetwork[IPv4Address] | _BaseNetwork[IPv6Address]) -> bool: ... + @property + def prefixlen(self) -> int: ... + def subnet_of(self, other: Self) -> bool: ... + def supernet_of(self, other: Self) -> bool: ... + def subnets(self, prefixlen_diff: int = 1, new_prefix: int | None = None) -> Iterator[Self]: ... + def supernet(self, prefixlen_diff: int = 1, new_prefix: int | None = None) -> Self: ... + @property + def with_hostmask(self) -> str: ... + @property + def with_netmask(self) -> str: ... + @property + def with_prefixlen(self) -> str: ... + @property + def hostmask(self) -> _A: ... + +class _BaseInterface(_BaseAddress, Generic[_A, _N]): + hostmask: _A + netmask: _A + network: _N + @property + def ip(self) -> _A: ... + @property + def with_hostmask(self) -> str: ... + @property + def with_netmask(self) -> str: ... + @property + def with_prefixlen(self) -> str: ... + +class _BaseV4: + @property + def version(self) -> Literal[4]: ... + @property + def max_prefixlen(self) -> Literal[32]: ... + +class IPv4Address(_BaseV4, _BaseAddress): ... +class IPv4Network(_BaseV4, _BaseNetwork[IPv4Address]): ... +class IPv4Interface(IPv4Address, _BaseInterface[IPv4Address, IPv4Network]): ... + +class _BaseV6: + @property + def version(self) -> Literal[6]: ... + @property + def max_prefixlen(self) -> Literal[128]: ... + +class IPv6Address(_BaseV6, _BaseAddress): + @property + def ipv4_mapped(self) -> IPv4Address | None: ... + @property + def is_site_local(self) -> bool: ... + @property + def sixtofour(self) -> IPv4Address | None: ... + @property + def teredo(self) -> tuple[IPv4Address, IPv4Address] | None: ... + if sys.version_info >= (3, 9): + @property + def scope_id(self) -> str | None: ... + +class IPv6Network(_BaseV6, _BaseNetwork[IPv6Address]): + @property + def is_site_local(self) -> bool: ... + +class IPv6Interface(IPv6Address, _BaseInterface[IPv6Address, IPv6Network]): ... + +def v4_int_to_packed(address: int) -> bytes: ... +def v6_int_to_packed(address: int) -> bytes: ... + +# Third overload is technically incorrect, but convenient when first and last are return values of ip_address() +@overload +def summarize_address_range(first: IPv4Address, last: IPv4Address) -> Iterator[IPv4Network]: ... +@overload +def summarize_address_range(first: IPv6Address, last: IPv6Address) -> Iterator[IPv6Network]: ... +@overload +def summarize_address_range( + first: IPv4Address | IPv6Address, last: IPv4Address | IPv6Address +) -> Iterator[IPv4Network] | Iterator[IPv6Network]: ... +def collapse_addresses(addresses: Iterable[_N]) -> Iterator[_N]: ... +@overload +def get_mixed_type_key(obj: _A) -> tuple[int, _A]: ... +@overload +def get_mixed_type_key(obj: IPv4Network) -> tuple[int, IPv4Address, IPv4Address]: ... +@overload +def get_mixed_type_key(obj: IPv6Network) -> tuple[int, IPv6Address, IPv6Address]: ... + +class AddressValueError(ValueError): ... +class NetmaskValueError(ValueError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/itertools.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/itertools.pyi new file mode 100644 index 00000000..c7b92c3a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/itertools.pyi @@ -0,0 +1,278 @@ +import sys +from collections.abc import Callable, Iterable, Iterator +from typing import Any, Generic, SupportsComplex, SupportsFloat, SupportsInt, TypeVar, overload +from typing_extensions import Literal, Self, SupportsIndex, TypeAlias + +if sys.version_info >= (3, 9): + from types import GenericAlias + +_T = TypeVar("_T") +_S = TypeVar("_S") +_N = TypeVar("_N", int, float, SupportsFloat, SupportsInt, SupportsIndex, SupportsComplex) +_T_co = TypeVar("_T_co", covariant=True) +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") +_T4 = TypeVar("_T4") +_T5 = TypeVar("_T5") +_T6 = TypeVar("_T6") + +_Step: TypeAlias = SupportsFloat | SupportsInt | SupportsIndex | SupportsComplex + +_Predicate: TypeAlias = Callable[[_T], object] + +# Technically count can take anything that implements a number protocol and has an add method +# but we can't enforce the add method +class count(Iterator[_N], Generic[_N]): + @overload + def __new__(cls) -> count[int]: ... + @overload + def __new__(cls, start: _N, step: _Step = ...) -> count[_N]: ... + @overload + def __new__(cls, *, step: _N) -> count[_N]: ... + def __next__(self) -> _N: ... + def __iter__(self) -> Self: ... + +class cycle(Iterator[_T], Generic[_T]): + def __init__(self, __iterable: Iterable[_T]) -> None: ... + def __next__(self) -> _T: ... + def __iter__(self) -> Self: ... + +class repeat(Iterator[_T], Generic[_T]): + @overload + def __init__(self, object: _T) -> None: ... + @overload + def __init__(self, object: _T, times: int) -> None: ... + def __next__(self) -> _T: ... + def __iter__(self) -> Self: ... + def __length_hint__(self) -> int: ... + +class accumulate(Iterator[_T], Generic[_T]): + if sys.version_info >= (3, 8): + @overload + def __init__(self, iterable: Iterable[_T], func: None = None, *, initial: _T | None = ...) -> None: ... + @overload + def __init__(self, iterable: Iterable[_S], func: Callable[[_T, _S], _T], *, initial: _T | None = ...) -> None: ... + else: + def __init__(self, iterable: Iterable[_T], func: Callable[[_T, _T], _T] | None = ...) -> None: ... + + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... + +class chain(Iterator[_T], Generic[_T]): + def __init__(self, *iterables: Iterable[_T]) -> None: ... + def __next__(self) -> _T: ... + def __iter__(self) -> Self: ... + @classmethod + # We use type[Any] and not type[_S] to not lose the type inference from __iterable + def from_iterable(cls: type[Any], __iterable: Iterable[Iterable[_S]]) -> chain[_S]: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... + +class compress(Iterator[_T], Generic[_T]): + def __init__(self, data: Iterable[_T], selectors: Iterable[Any]) -> None: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... + +class dropwhile(Iterator[_T], Generic[_T]): + def __init__(self, __predicate: _Predicate[_T], __iterable: Iterable[_T]) -> None: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... + +class filterfalse(Iterator[_T], Generic[_T]): + def __init__(self, __predicate: _Predicate[_T] | None, __iterable: Iterable[_T]) -> None: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... + +class groupby(Iterator[tuple[_T, Iterator[_S]]], Generic[_T, _S]): + @overload + def __new__(cls, iterable: Iterable[_T1], key: None = None) -> groupby[_T1, _T1]: ... + @overload + def __new__(cls, iterable: Iterable[_T1], key: Callable[[_T1], _T2]) -> groupby[_T2, _T1]: ... + def __iter__(self) -> Self: ... + def __next__(self) -> tuple[_T, Iterator[_S]]: ... + +class islice(Iterator[_T], Generic[_T]): + @overload + def __init__(self, __iterable: Iterable[_T], __stop: int | None) -> None: ... + @overload + def __init__(self, __iterable: Iterable[_T], __start: int | None, __stop: int | None, __step: int | None = ...) -> None: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... + +class starmap(Iterator[_T], Generic[_T]): + def __init__(self, __function: Callable[..., _T], __iterable: Iterable[Iterable[Any]]) -> None: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... + +class takewhile(Iterator[_T], Generic[_T]): + def __init__(self, __predicate: _Predicate[_T], __iterable: Iterable[_T]) -> None: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... + +def tee(__iterable: Iterable[_T], __n: int = 2) -> tuple[Iterator[_T], ...]: ... + +class zip_longest(Iterator[_T_co], Generic[_T_co]): + # one iterable (fillvalue doesn't matter) + @overload + def __new__(cls, __iter1: Iterable[_T1], *, fillvalue: object = ...) -> zip_longest[tuple[_T1]]: ... + # two iterables + @overload + # In the overloads without fillvalue, all of the tuple members could theoretically be None, + # but we return Any instead to avoid false positives for code where we know one of the iterables + # is longer. + def __new__(cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> zip_longest[tuple[_T1 | Any, _T2 | Any]]: ... + @overload + def __new__( + cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], *, fillvalue: _T + ) -> zip_longest[tuple[_T1 | _T, _T2 | _T]]: ... + # three iterables + @overload + def __new__( + cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3] + ) -> zip_longest[tuple[_T1 | Any, _T2 | Any, _T3 | Any]]: ... + @overload + def __new__( + cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], *, fillvalue: _T + ) -> zip_longest[tuple[_T1 | _T, _T2 | _T, _T3 | _T]]: ... + # four iterables + @overload + def __new__( + cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4] + ) -> zip_longest[tuple[_T1 | Any, _T2 | Any, _T3 | Any, _T4 | Any]]: ... + @overload + def __new__( + cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4], *, fillvalue: _T + ) -> zip_longest[tuple[_T1 | _T, _T2 | _T, _T3 | _T, _T4 | _T]]: ... + # five iterables + @overload + def __new__( + cls, + __iter1: Iterable[_T1], + __iter2: Iterable[_T2], + __iter3: Iterable[_T3], + __iter4: Iterable[_T4], + __iter5: Iterable[_T5], + ) -> zip_longest[tuple[_T1 | Any, _T2 | Any, _T3 | Any, _T4 | Any, _T5 | Any]]: ... + @overload + def __new__( + cls, + __iter1: Iterable[_T1], + __iter2: Iterable[_T2], + __iter3: Iterable[_T3], + __iter4: Iterable[_T4], + __iter5: Iterable[_T5], + *, + fillvalue: _T, + ) -> zip_longest[tuple[_T1 | _T, _T2 | _T, _T3 | _T, _T4 | _T, _T5 | _T]]: ... + # six or more iterables + @overload + def __new__( + cls, + __iter1: Iterable[_T], + __iter2: Iterable[_T], + __iter3: Iterable[_T], + __iter4: Iterable[_T], + __iter5: Iterable[_T], + __iter6: Iterable[_T], + *iterables: Iterable[_T], + ) -> zip_longest[tuple[_T | Any, ...]]: ... + @overload + def __new__( + cls, + __iter1: Iterable[_T], + __iter2: Iterable[_T], + __iter3: Iterable[_T], + __iter4: Iterable[_T], + __iter5: Iterable[_T], + __iter6: Iterable[_T], + *iterables: Iterable[_T], + fillvalue: _T, + ) -> zip_longest[tuple[_T, ...]]: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T_co: ... + +class product(Iterator[_T_co], Generic[_T_co]): + @overload + def __new__(cls, __iter1: Iterable[_T1]) -> product[tuple[_T1]]: ... + @overload + def __new__(cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> product[tuple[_T1, _T2]]: ... + @overload + def __new__(cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]) -> product[tuple[_T1, _T2, _T3]]: ... + @overload + def __new__( + cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4] + ) -> product[tuple[_T1, _T2, _T3, _T4]]: ... + @overload + def __new__( + cls, + __iter1: Iterable[_T1], + __iter2: Iterable[_T2], + __iter3: Iterable[_T3], + __iter4: Iterable[_T4], + __iter5: Iterable[_T5], + ) -> product[tuple[_T1, _T2, _T3, _T4, _T5]]: ... + @overload + def __new__( + cls, + __iter1: Iterable[_T1], + __iter2: Iterable[_T2], + __iter3: Iterable[_T3], + __iter4: Iterable[_T4], + __iter5: Iterable[_T5], + __iter6: Iterable[_T6], + ) -> product[tuple[_T1, _T2, _T3, _T4, _T5, _T6]]: ... + @overload + def __new__( + cls, + __iter1: Iterable[Any], + __iter2: Iterable[Any], + __iter3: Iterable[Any], + __iter4: Iterable[Any], + __iter5: Iterable[Any], + __iter6: Iterable[Any], + __iter7: Iterable[Any], + *iterables: Iterable[Any], + ) -> product[tuple[Any, ...]]: ... + @overload + def __new__(cls, *iterables: Iterable[_T1], repeat: int) -> product[tuple[_T1, ...]]: ... + @overload + def __new__(cls, *iterables: Iterable[Any], repeat: int = ...) -> product[tuple[Any, ...]]: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T_co: ... + +class permutations(Iterator[tuple[_T, ...]], Generic[_T]): + def __init__(self, iterable: Iterable[_T], r: int | None = ...) -> None: ... + def __iter__(self) -> Self: ... + def __next__(self) -> tuple[_T, ...]: ... + +class combinations(Iterator[_T_co], Generic[_T_co]): + @overload + def __new__(cls, iterable: Iterable[_T], r: Literal[2]) -> combinations[tuple[_T, _T]]: ... + @overload + def __new__(cls, iterable: Iterable[_T], r: Literal[3]) -> combinations[tuple[_T, _T, _T]]: ... + @overload + def __new__(cls, iterable: Iterable[_T], r: Literal[4]) -> combinations[tuple[_T, _T, _T, _T]]: ... + @overload + def __new__(cls, iterable: Iterable[_T], r: Literal[5]) -> combinations[tuple[_T, _T, _T, _T, _T]]: ... + @overload + def __new__(cls, iterable: Iterable[_T], r: int) -> combinations[tuple[_T, ...]]: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T_co: ... + +class combinations_with_replacement(Iterator[tuple[_T, ...]], Generic[_T]): + def __init__(self, iterable: Iterable[_T], r: int) -> None: ... + def __iter__(self) -> Self: ... + def __next__(self) -> tuple[_T, ...]: ... + +if sys.version_info >= (3, 10): + class pairwise(Iterator[_T_co], Generic[_T_co]): + def __new__(cls, __iterable: Iterable[_T]) -> pairwise[tuple[_T, _T]]: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T_co: ... + +if sys.version_info >= (3, 12): + class batched(Iterator[_T_co], Generic[_T_co]): + def __new__(cls, iterable: Iterable[_T_co], n: int) -> Self: ... + def __iter__(self) -> Self: ... + def __next__(self) -> tuple[_T_co, ...]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/json/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/json/__init__.pyi new file mode 100644 index 00000000..63e9718e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/json/__init__.pyi @@ -0,0 +1,61 @@ +from _typeshed import SupportsRead, SupportsWrite +from collections.abc import Callable +from typing import Any + +from .decoder import JSONDecodeError as JSONDecodeError, JSONDecoder as JSONDecoder +from .encoder import JSONEncoder as JSONEncoder + +__all__ = ["dump", "dumps", "load", "loads", "JSONDecoder", "JSONDecodeError", "JSONEncoder"] + +def dumps( + obj: Any, + *, + skipkeys: bool = False, + ensure_ascii: bool = True, + check_circular: bool = True, + allow_nan: bool = True, + cls: type[JSONEncoder] | None = None, + indent: None | int | str = None, + separators: tuple[str, str] | None = None, + default: Callable[[Any], Any] | None = None, + sort_keys: bool = False, + **kwds: Any, +) -> str: ... +def dump( + obj: Any, + fp: SupportsWrite[str], + *, + skipkeys: bool = False, + ensure_ascii: bool = True, + check_circular: bool = True, + allow_nan: bool = True, + cls: type[JSONEncoder] | None = None, + indent: None | int | str = None, + separators: tuple[str, str] | None = None, + default: Callable[[Any], Any] | None = None, + sort_keys: bool = False, + **kwds: Any, +) -> None: ... +def loads( + s: str | bytes | bytearray, + *, + cls: type[JSONDecoder] | None = None, + object_hook: Callable[[dict[Any, Any]], Any] | None = None, + parse_float: Callable[[str], Any] | None = None, + parse_int: Callable[[str], Any] | None = None, + parse_constant: Callable[[str], Any] | None = None, + object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = None, + **kwds: Any, +) -> Any: ... +def load( + fp: SupportsRead[str | bytes], + *, + cls: type[JSONDecoder] | None = None, + object_hook: Callable[[dict[Any, Any]], Any] | None = None, + parse_float: Callable[[str], Any] | None = None, + parse_int: Callable[[str], Any] | None = None, + parse_constant: Callable[[str], Any] | None = None, + object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = None, + **kwds: Any, +) -> Any: ... +def detect_encoding(b: bytes | bytearray) -> str: ... # undocumented diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/json/decoder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/json/decoder.pyi new file mode 100644 index 00000000..8debfe6c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/json/decoder.pyi @@ -0,0 +1,32 @@ +from collections.abc import Callable +from typing import Any + +__all__ = ["JSONDecoder", "JSONDecodeError"] + +class JSONDecodeError(ValueError): + msg: str + doc: str + pos: int + lineno: int + colno: int + def __init__(self, msg: str, doc: str, pos: int) -> None: ... + +class JSONDecoder: + object_hook: Callable[[dict[str, Any]], Any] + parse_float: Callable[[str], Any] + parse_int: Callable[[str], Any] + parse_constant: Callable[[str], Any] + strict: bool + object_pairs_hook: Callable[[list[tuple[str, Any]]], Any] + def __init__( + self, + *, + object_hook: Callable[[dict[str, Any]], Any] | None = None, + parse_float: Callable[[str], Any] | None = None, + parse_int: Callable[[str], Any] | None = None, + parse_constant: Callable[[str], Any] | None = None, + strict: bool = True, + object_pairs_hook: Callable[[list[tuple[str, Any]]], Any] | None = None, + ) -> None: ... + def decode(self, s: str, _w: Callable[..., Any] = ...) -> Any: ... # _w is undocumented + def raw_decode(self, s: str, idx: int = 0) -> tuple[Any, int]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/json/encoder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/json/encoder.pyi new file mode 100644 index 00000000..0c0d366e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/json/encoder.pyi @@ -0,0 +1,38 @@ +from collections.abc import Callable, Iterator +from re import Pattern +from typing import Any + +ESCAPE: Pattern[str] +ESCAPE_ASCII: Pattern[str] +HAS_UTF8: Pattern[bytes] +ESCAPE_DCT: dict[str, str] +INFINITY: float + +def py_encode_basestring(s: str) -> str: ... # undocumented +def py_encode_basestring_ascii(s: str) -> str: ... # undocumented + +class JSONEncoder: + item_separator: str + key_separator: str + + skipkeys: bool + ensure_ascii: bool + check_circular: bool + allow_nan: bool + sort_keys: bool + indent: int | str + def __init__( + self, + *, + skipkeys: bool = False, + ensure_ascii: bool = True, + check_circular: bool = True, + allow_nan: bool = True, + sort_keys: bool = False, + indent: int | str | None = None, + separators: tuple[str, str] | None = None, + default: Callable[..., Any] | None = None, + ) -> None: ... + def default(self, o: Any) -> Any: ... + def encode(self, o: Any) -> str: ... + def iterencode(self, o: Any, _one_shot: bool = False) -> Iterator[str]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/json/tool.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/json/tool.pyi new file mode 100644 index 00000000..7e7363e7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/json/tool.pyi @@ -0,0 +1 @@ +def main() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/keyword.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/keyword.pyi new file mode 100644 index 00000000..46c38604 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/keyword.pyi @@ -0,0 +1,21 @@ +import sys +from collections.abc import Sequence +from typing_extensions import Final + +if sys.version_info >= (3, 9): + __all__ = ["iskeyword", "issoftkeyword", "kwlist", "softkwlist"] +else: + __all__ = ["iskeyword", "kwlist"] + +def iskeyword(s: str) -> bool: ... + +# a list at runtime, but you're not meant to mutate it; +# type it as a sequence +kwlist: Final[Sequence[str]] + +if sys.version_info >= (3, 9): + def issoftkeyword(s: str) -> bool: ... + + # a list at runtime, but you're not meant to mutate it; + # type it as a sequence + softkwlist: Final[Sequence[str]] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/__init__.pyi new file mode 100644 index 00000000..acc1cc42 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/__init__.pyi @@ -0,0 +1,8 @@ +from collections.abc import Callable +from lib2to3.pgen2.grammar import Grammar +from lib2to3.pytree import _RawNode +from typing import Any +from typing_extensions import TypeAlias + +# This is imported in several lib2to3/pgen2 submodules +_Convert: TypeAlias = Callable[[Grammar, _RawNode], Any] # noqa: Y047 diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/driver.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/driver.pyi new file mode 100644 index 00000000..9f6e4d67 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/driver.pyi @@ -0,0 +1,24 @@ +from _typeshed import StrPath +from collections.abc import Iterable +from lib2to3.pgen2 import _Convert +from lib2to3.pgen2.grammar import Grammar +from lib2to3.pytree import _NL +from logging import Logger +from typing import IO, Any + +__all__ = ["Driver", "load_grammar"] + +class Driver: + grammar: Grammar + logger: Logger + convert: _Convert + def __init__(self, grammar: Grammar, convert: _Convert | None = None, logger: Logger | None = None) -> None: ... + def parse_tokens(self, tokens: Iterable[Any], debug: bool = False) -> _NL: ... + def parse_stream_raw(self, stream: IO[str], debug: bool = False) -> _NL: ... + def parse_stream(self, stream: IO[str], debug: bool = False) -> _NL: ... + def parse_file(self, filename: StrPath, encoding: str | None = None, debug: bool = False) -> _NL: ... + def parse_string(self, text: str, debug: bool = False) -> _NL: ... + +def load_grammar( + gt: str = "Grammar.txt", gp: str | None = None, save: bool = True, force: bool = False, logger: Logger | None = None +) -> Grammar: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/grammar.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/grammar.pyi new file mode 100644 index 00000000..bef0a792 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/grammar.pyi @@ -0,0 +1,24 @@ +from _typeshed import StrPath +from typing_extensions import Self, TypeAlias + +_Label: TypeAlias = tuple[int, str | None] +_DFA: TypeAlias = list[list[tuple[int, int]]] +_DFAS: TypeAlias = tuple[_DFA, dict[int, int]] + +class Grammar: + symbol2number: dict[str, int] + number2symbol: dict[int, str] + states: list[_DFA] + dfas: dict[int, _DFAS] + labels: list[_Label] + keywords: dict[str, int] + tokens: dict[int, int] + symbol2label: dict[str, int] + start: int + def dump(self, filename: StrPath) -> None: ... + def load(self, filename: StrPath) -> None: ... + def copy(self) -> Self: ... + def report(self) -> None: ... + +opmap_raw: str +opmap: dict[str, str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/literals.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/literals.pyi new file mode 100644 index 00000000..c3fabe8a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/literals.pyi @@ -0,0 +1,7 @@ +from re import Match + +simple_escapes: dict[str, str] + +def escape(m: Match[str]) -> str: ... +def evalString(s: str) -> str: ... +def test() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/parse.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/parse.pyi new file mode 100644 index 00000000..51eb671f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/parse.pyi @@ -0,0 +1,29 @@ +from collections.abc import Sequence +from lib2to3.pgen2 import _Convert +from lib2to3.pgen2.grammar import _DFAS, Grammar +from lib2to3.pytree import _NL, _RawNode +from typing import Any +from typing_extensions import TypeAlias + +_Context: TypeAlias = Sequence[Any] + +class ParseError(Exception): + msg: str + type: int + value: str | None + context: _Context + def __init__(self, msg: str, type: int, value: str | None, context: _Context) -> None: ... + +class Parser: + grammar: Grammar + convert: _Convert + stack: list[tuple[_DFAS, int, _RawNode]] + rootnode: _NL | None + used_names: set[str] + def __init__(self, grammar: Grammar, convert: _Convert | None = None) -> None: ... + def setup(self, start: int | None = None) -> None: ... + def addtoken(self, type: int, value: str | None, context: _Context) -> bool: ... + def classify(self, type: int, value: str | None, context: _Context) -> int: ... + def shift(self, type: int, value: str | None, newstate: int, context: _Context) -> None: ... + def push(self, type: int, newdfa: _DFAS, newstate: int, context: _Context) -> None: ... + def pop(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/pgen.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/pgen.pyi new file mode 100644 index 00000000..d346739d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/pgen.pyi @@ -0,0 +1,46 @@ +from _typeshed import StrPath +from collections.abc import Iterable, Iterator +from lib2to3.pgen2 import grammar +from lib2to3.pgen2.tokenize import _TokenInfo +from typing import IO, Any, NoReturn + +class PgenGrammar(grammar.Grammar): ... + +class ParserGenerator: + filename: StrPath + stream: IO[str] + generator: Iterator[_TokenInfo] + first: dict[str, dict[str, int]] + def __init__(self, filename: StrPath, stream: IO[str] | None = None) -> None: ... + def make_grammar(self) -> PgenGrammar: ... + def make_first(self, c: PgenGrammar, name: str) -> dict[int, int]: ... + def make_label(self, c: PgenGrammar, label: str) -> int: ... + def addfirstsets(self) -> None: ... + def calcfirst(self, name: str) -> None: ... + def parse(self) -> tuple[dict[str, list[DFAState]], str]: ... + def make_dfa(self, start: NFAState, finish: NFAState) -> list[DFAState]: ... + def dump_nfa(self, name: str, start: NFAState, finish: NFAState) -> list[DFAState]: ... + def dump_dfa(self, name: str, dfa: Iterable[DFAState]) -> None: ... + def simplify_dfa(self, dfa: list[DFAState]) -> None: ... + def parse_rhs(self) -> tuple[NFAState, NFAState]: ... + def parse_alt(self) -> tuple[NFAState, NFAState]: ... + def parse_item(self) -> tuple[NFAState, NFAState]: ... + def parse_atom(self) -> tuple[NFAState, NFAState]: ... + def expect(self, type: int, value: Any | None = None) -> str: ... + def gettoken(self) -> None: ... + def raise_error(self, msg: str, *args: Any) -> NoReturn: ... + +class NFAState: + arcs: list[tuple[str | None, NFAState]] + def addarc(self, next: NFAState, label: str | None = None) -> None: ... + +class DFAState: + nfaset: dict[NFAState, Any] + isfinal: bool + arcs: dict[str, DFAState] + def __init__(self, nfaset: dict[NFAState, Any], final: NFAState) -> None: ... + def addarc(self, next: DFAState, label: str) -> None: ... + def unifystate(self, old: DFAState, new: DFAState) -> None: ... + def __eq__(self, other: DFAState) -> bool: ... # type: ignore[override] + +def generate_grammar(filename: StrPath = "Grammar.txt") -> PgenGrammar: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/token.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/token.pyi new file mode 100644 index 00000000..debcb219 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/token.pyi @@ -0,0 +1,67 @@ +ENDMARKER: int +NAME: int +NUMBER: int +STRING: int +NEWLINE: int +INDENT: int +DEDENT: int +LPAR: int +RPAR: int +LSQB: int +RSQB: int +COLON: int +COMMA: int +SEMI: int +PLUS: int +MINUS: int +STAR: int +SLASH: int +VBAR: int +AMPER: int +LESS: int +GREATER: int +EQUAL: int +DOT: int +PERCENT: int +BACKQUOTE: int +LBRACE: int +RBRACE: int +EQEQUAL: int +NOTEQUAL: int +LESSEQUAL: int +GREATEREQUAL: int +TILDE: int +CIRCUMFLEX: int +LEFTSHIFT: int +RIGHTSHIFT: int +DOUBLESTAR: int +PLUSEQUAL: int +MINEQUAL: int +STAREQUAL: int +SLASHEQUAL: int +PERCENTEQUAL: int +AMPEREQUAL: int +VBAREQUAL: int +CIRCUMFLEXEQUAL: int +LEFTSHIFTEQUAL: int +RIGHTSHIFTEQUAL: int +DOUBLESTAREQUAL: int +DOUBLESLASH: int +DOUBLESLASHEQUAL: int +OP: int +COMMENT: int +NL: int +RARROW: int +AT: int +ATEQUAL: int +AWAIT: int +ASYNC: int +ERRORTOKEN: int +COLONEQUAL: int +N_TOKENS: int +NT_OFFSET: int +tok_name: dict[int, str] + +def ISTERMINAL(x: int) -> bool: ... +def ISNONTERMINAL(x: int) -> bool: ... +def ISEOF(x: int) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi new file mode 100644 index 00000000..2a9c3fbb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi @@ -0,0 +1,95 @@ +from collections.abc import Callable, Iterable, Iterator +from lib2to3.pgen2.token import * +from typing_extensions import TypeAlias + +__all__ = [ + "AMPER", + "AMPEREQUAL", + "ASYNC", + "AT", + "ATEQUAL", + "AWAIT", + "BACKQUOTE", + "CIRCUMFLEX", + "CIRCUMFLEXEQUAL", + "COLON", + "COMMA", + "COMMENT", + "DEDENT", + "DOT", + "DOUBLESLASH", + "DOUBLESLASHEQUAL", + "DOUBLESTAR", + "DOUBLESTAREQUAL", + "ENDMARKER", + "EQEQUAL", + "EQUAL", + "ERRORTOKEN", + "GREATER", + "GREATEREQUAL", + "INDENT", + "ISEOF", + "ISNONTERMINAL", + "ISTERMINAL", + "LBRACE", + "LEFTSHIFT", + "LEFTSHIFTEQUAL", + "LESS", + "LESSEQUAL", + "LPAR", + "LSQB", + "MINEQUAL", + "MINUS", + "NAME", + "NEWLINE", + "NL", + "NOTEQUAL", + "NT_OFFSET", + "NUMBER", + "N_TOKENS", + "OP", + "PERCENT", + "PERCENTEQUAL", + "PLUS", + "PLUSEQUAL", + "RARROW", + "RBRACE", + "RIGHTSHIFT", + "RIGHTSHIFTEQUAL", + "RPAR", + "RSQB", + "SEMI", + "SLASH", + "SLASHEQUAL", + "STAR", + "STAREQUAL", + "STRING", + "TILDE", + "VBAR", + "VBAREQUAL", + "tok_name", + "tokenize", + "generate_tokens", + "untokenize", + "COLONEQUAL", +] + +_Coord: TypeAlias = tuple[int, int] +_TokenEater: TypeAlias = Callable[[int, str, _Coord, _Coord, str], object] +_TokenInfo: TypeAlias = tuple[int, str, _Coord, _Coord, str] + +class TokenError(Exception): ... +class StopTokenizing(Exception): ... + +def tokenize(readline: Callable[[], str], tokeneater: _TokenEater = ...) -> None: ... + +class Untokenizer: + tokens: list[str] + prev_row: int + prev_col: int + def add_whitespace(self, start: _Coord) -> None: ... + def untokenize(self, iterable: Iterable[_TokenInfo]) -> str: ... + def compat(self, token: tuple[int, str], iterable: Iterable[_TokenInfo]) -> None: ... + +def untokenize(iterable: Iterable[_TokenInfo]) -> str: ... +def generate_tokens(readline: Callable[[], str]) -> Iterator[_TokenInfo]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pygram.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pygram.pyi new file mode 100644 index 00000000..00fdbd1a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pygram.pyi @@ -0,0 +1,116 @@ +import sys +from lib2to3.pgen2.grammar import Grammar + +class Symbols: + def __init__(self, grammar: Grammar) -> None: ... + +class python_symbols(Symbols): + and_expr: int + and_test: int + annassign: int + arglist: int + argument: int + arith_expr: int + assert_stmt: int + async_funcdef: int + async_stmt: int + atom: int + augassign: int + break_stmt: int + classdef: int + comp_for: int + comp_if: int + comp_iter: int + comp_op: int + comparison: int + compound_stmt: int + continue_stmt: int + decorated: int + decorator: int + decorators: int + del_stmt: int + dictsetmaker: int + dotted_as_name: int + dotted_as_names: int + dotted_name: int + encoding_decl: int + eval_input: int + except_clause: int + exec_stmt: int + expr: int + expr_stmt: int + exprlist: int + factor: int + file_input: int + flow_stmt: int + for_stmt: int + funcdef: int + global_stmt: int + if_stmt: int + import_as_name: int + import_as_names: int + import_from: int + import_name: int + import_stmt: int + lambdef: int + listmaker: int + not_test: int + old_lambdef: int + old_test: int + or_test: int + parameters: int + pass_stmt: int + power: int + print_stmt: int + raise_stmt: int + return_stmt: int + shift_expr: int + simple_stmt: int + single_input: int + sliceop: int + small_stmt: int + star_expr: int + stmt: int + subscript: int + subscriptlist: int + suite: int + term: int + test: int + testlist: int + testlist1: int + testlist_gexp: int + testlist_safe: int + testlist_star_expr: int + tfpdef: int + tfplist: int + tname: int + trailer: int + try_stmt: int + typedargslist: int + varargslist: int + vfpdef: int + vfplist: int + vname: int + while_stmt: int + with_item: int + with_stmt: int + with_var: int + xor_expr: int + yield_arg: int + yield_expr: int + yield_stmt: int + +class pattern_symbols(Symbols): + Alternative: int + Alternatives: int + Details: int + Matcher: int + NegatedUnit: int + Repeater: int + Unit: int + +python_grammar: Grammar +python_grammar_no_print_statement: Grammar +if sys.version_info >= (3, 8): + python_grammar_no_print_and_exec_statement: Grammar +pattern_grammar: Grammar diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pytree.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pytree.pyi new file mode 100644 index 00000000..4f756c97 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/pytree.pyi @@ -0,0 +1,90 @@ +from collections.abc import Iterator +from lib2to3.pgen2.grammar import Grammar +from typing import Any +from typing_extensions import Self, TypeAlias + +_NL: TypeAlias = Node | Leaf +_Context: TypeAlias = tuple[str, int, int] +_Results: TypeAlias = dict[str, _NL] +_RawNode: TypeAlias = tuple[int, str, _Context, list[_NL] | None] + +HUGE: int + +def type_repr(type_num: int) -> str: ... + +class Base: + type: int + parent: Node | None + prefix: str + children: list[_NL] + was_changed: bool + was_checked: bool + def __eq__(self, other: object) -> bool: ... + def _eq(self, other: Self) -> bool: ... + def clone(self) -> Self: ... + def post_order(self) -> Iterator[_NL]: ... + def pre_order(self) -> Iterator[_NL]: ... + def replace(self, new: _NL | list[_NL]) -> None: ... + def get_lineno(self) -> int: ... + def changed(self) -> None: ... + def remove(self) -> int | None: ... + @property + def next_sibling(self) -> _NL | None: ... + @property + def prev_sibling(self) -> _NL | None: ... + def leaves(self) -> Iterator[Leaf]: ... + def depth(self) -> int: ... + def get_suffix(self) -> str: ... + +class Node(Base): + fixers_applied: list[Any] + def __init__( + self, + type: int, + children: list[_NL], + context: Any | None = None, + prefix: str | None = None, + fixers_applied: list[Any] | None = None, + ) -> None: ... + def set_child(self, i: int, child: _NL) -> None: ... + def insert_child(self, i: int, child: _NL) -> None: ... + def append_child(self, child: _NL) -> None: ... + def __unicode__(self) -> str: ... + +class Leaf(Base): + lineno: int + column: int + value: str + fixers_applied: list[Any] + def __init__( + self, type: int, value: str, context: _Context | None = None, prefix: str | None = None, fixers_applied: list[Any] = ... + ) -> None: ... + def __unicode__(self) -> str: ... + +def convert(gr: Grammar, raw_node: _RawNode) -> _NL: ... + +class BasePattern: + type: int + content: str | None + name: str | None + def optimize(self) -> BasePattern: ... # sic, subclasses are free to optimize themselves into different patterns + def match(self, node: _NL, results: _Results | None = None) -> bool: ... + def match_seq(self, nodes: list[_NL], results: _Results | None = None) -> bool: ... + def generate_matches(self, nodes: list[_NL]) -> Iterator[tuple[int, _Results]]: ... + +class LeafPattern(BasePattern): + def __init__(self, type: int | None = None, content: str | None = None, name: str | None = None) -> None: ... + +class NodePattern(BasePattern): + wildcards: bool + def __init__(self, type: int | None = None, content: str | None = None, name: str | None = None) -> None: ... + +class WildcardPattern(BasePattern): + min: int + max: int + def __init__(self, content: str | None = None, min: int = 0, max: int = 0x7FFFFFFF, name: str | None = None) -> None: ... + +class NegatedPattern(BasePattern): + def __init__(self, content: str | None = None) -> None: ... + +def generate_matches(patterns: list[BasePattern], nodes: list[_NL]) -> Iterator[tuple[int, _Results]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/refactor.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/refactor.pyi new file mode 100644 index 00000000..f1d89679 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lib2to3/refactor.pyi @@ -0,0 +1,73 @@ +from collections.abc import Container, Generator, Iterable, Mapping +from logging import Logger +from typing import Any, ClassVar, NoReturn +from typing_extensions import TypeAlias + +from .pgen2.grammar import Grammar + +_Driver: TypeAlias = Any # really lib2to3.driver.Driver +_BottomMatcher: TypeAlias = Any # really lib2to3.btm_matcher.BottomMatcher + +def get_all_fix_names(fixer_pkg: str, remove_prefix: bool = True) -> list[str]: ... +def get_fixers_from_package(pkg_name: str) -> list[str]: ... + +class FixerError(Exception): ... + +class RefactoringTool: + CLASS_PREFIX: ClassVar[str] + FILE_PREFIX: ClassVar[str] + fixers: Iterable[str] + explicit: Container[str] + options: dict[str, Any] + grammar: Grammar + write_unchanged_files: bool + errors: list[Any] + logger: Logger + fixer_log: list[Any] + wrote: bool + driver: _Driver + pre_order: Any + post_order: Any + files: list[Any] + BM: _BottomMatcher + bmi_pre_order: list[Any] + bmi_post_order: list[Any] + def __init__( + self, fixer_names: Iterable[str], options: Mapping[str, Any] | None = None, explicit: Container[str] | None = None + ) -> None: ... + def get_fixers(self) -> tuple[list[Any], list[Any]]: ... + def log_error(self, msg: str, *args: Any, **kwds: Any) -> NoReturn: ... + def log_message(self, msg: str, *args: Any) -> None: ... + def log_debug(self, msg: str, *args: Any) -> None: ... + def print_output(self, old_text: str, new_text: str, filename: str, equal): ... + def refactor(self, items: Iterable[str], write: bool = False, doctests_only: bool = False) -> None: ... + def refactor_dir(self, dir_name: str, write: bool = False, doctests_only: bool = False) -> None: ... + def _read_python_source(self, filename: str) -> tuple[str, str]: ... + def refactor_file(self, filename: str, write: bool = False, doctests_only: bool = False) -> None: ... + def refactor_string(self, data: str, name: str): ... + def refactor_stdin(self, doctests_only: bool = False) -> None: ... + def refactor_tree(self, tree, name: str) -> bool: ... + def traverse_by(self, fixers, traversal) -> None: ... + def processed_file( + self, new_text: str, filename: str, old_text: str | None = None, write: bool = False, encoding: str | None = None + ) -> None: ... + def write_file(self, new_text: str, filename: str, old_text: str, encoding: str | None = None) -> None: ... + PS1: ClassVar[str] + PS2: ClassVar[str] + def refactor_docstring(self, input: str, filename: str) -> str: ... + def refactor_doctest(self, block: list[str], lineno: int, indent: int, filename: str) -> list[str]: ... + def summarize(self) -> None: ... + def parse_block(self, block: Iterable[str], lineno: int, indent: int): ... + def wrap_toks( + self, block: Iterable[str], lineno: int, indent: int + ) -> Generator[tuple[Any, Any, tuple[int, int], tuple[int, int], str], None, None]: ... + def gen_lines(self, block: Iterable[str], indent: int) -> Generator[str, None, None]: ... + +class MultiprocessingUnsupported(Exception): ... + +class MultiprocessRefactoringTool(RefactoringTool): + queue: Any | None + output_lock: Any | None + def refactor( + self, items: Iterable[str], write: bool = False, doctests_only: bool = False, num_processes: int = 1 + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/linecache.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/linecache.pyi new file mode 100644 index 00000000..8e317dd3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/linecache.pyi @@ -0,0 +1,23 @@ +import sys +from typing import Any, Protocol +from typing_extensions import TypeAlias + +if sys.version_info >= (3, 9): + __all__ = ["getline", "clearcache", "checkcache", "lazycache"] +else: + __all__ = ["getline", "clearcache", "checkcache"] + +_ModuleGlobals: TypeAlias = dict[str, Any] +_ModuleMetadata: TypeAlias = tuple[int, float | None, list[str], str] + +class _SourceLoader(Protocol): + def __call__(self) -> str | None: ... + +cache: dict[str, _SourceLoader | _ModuleMetadata] # undocumented + +def getline(filename: str, lineno: int, module_globals: _ModuleGlobals | None = None) -> str: ... +def clearcache() -> None: ... +def getlines(filename: str, module_globals: _ModuleGlobals | None = None) -> list[str]: ... +def checkcache(filename: str | None = None) -> None: ... +def updatecache(filename: str, module_globals: _ModuleGlobals | None = None) -> list[str]: ... +def lazycache(filename: str, module_globals: _ModuleGlobals) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/locale.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/locale.pyi new file mode 100644 index 00000000..0b0dd945 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/locale.pyi @@ -0,0 +1,148 @@ +import sys +from _typeshed import StrPath +from collections.abc import Callable, Iterable, Mapping + +__all__ = [ + "getlocale", + "getdefaultlocale", + "getpreferredencoding", + "Error", + "setlocale", + "resetlocale", + "localeconv", + "strcoll", + "strxfrm", + "str", + "atof", + "atoi", + "format", + "format_string", + "currency", + "normalize", + "LC_CTYPE", + "LC_COLLATE", + "LC_MESSAGES", + "LC_TIME", + "LC_MONETARY", + "LC_NUMERIC", + "LC_ALL", + "CHAR_MAX", +] + +if sys.version_info >= (3, 11): + __all__ += ["getencoding"] + +# This module defines a function "str()", which is why "str" can't be used +# as a type annotation or type alias. +from builtins import str as _str +from decimal import Decimal +from typing import Any + +CODESET: int +D_T_FMT: int +D_FMT: int +T_FMT: int +T_FMT_AMPM: int +AM_STR: int +PM_STR: int + +DAY_1: int +DAY_2: int +DAY_3: int +DAY_4: int +DAY_5: int +DAY_6: int +DAY_7: int +ABDAY_1: int +ABDAY_2: int +ABDAY_3: int +ABDAY_4: int +ABDAY_5: int +ABDAY_6: int +ABDAY_7: int + +MON_1: int +MON_2: int +MON_3: int +MON_4: int +MON_5: int +MON_6: int +MON_7: int +MON_8: int +MON_9: int +MON_10: int +MON_11: int +MON_12: int +ABMON_1: int +ABMON_2: int +ABMON_3: int +ABMON_4: int +ABMON_5: int +ABMON_6: int +ABMON_7: int +ABMON_8: int +ABMON_9: int +ABMON_10: int +ABMON_11: int +ABMON_12: int + +RADIXCHAR: int +THOUSEP: int +YESEXPR: int +NOEXPR: int +CRNCYSTR: int + +ERA: int +ERA_D_T_FMT: int +ERA_D_FMT: int +ERA_T_FMT: int + +ALT_DIGITS: int + +LC_CTYPE: int +LC_COLLATE: int +LC_TIME: int +LC_MONETARY: int +LC_MESSAGES: int +LC_NUMERIC: int +LC_ALL: int + +CHAR_MAX: int + +class Error(Exception): ... + +def setlocale(category: int, locale: _str | Iterable[_str | None] | None = None) -> _str: ... +def localeconv() -> Mapping[_str, int | _str | list[int]]: ... +def nl_langinfo(__key: int) -> _str: ... +def getdefaultlocale(envvars: tuple[_str, ...] = ...) -> tuple[_str | None, _str | None]: ... +def getlocale(category: int = ...) -> tuple[_str | None, _str | None]: ... +def getpreferredencoding(do_setlocale: bool = True) -> _str: ... +def normalize(localename: _str) -> _str: ... +def resetlocale(category: int = ...) -> None: ... +def strcoll(__os1: _str, __os2: _str) -> int: ... +def strxfrm(__string: _str) -> _str: ... +def format(percent: _str, value: float | Decimal, grouping: bool = False, monetary: bool = False, *additional: Any) -> _str: ... +def format_string(f: _str, val: Any, grouping: bool = False, monetary: bool = False) -> _str: ... +def currency(val: float | Decimal, symbol: bool = True, grouping: bool = False, international: bool = False) -> _str: ... +def delocalize(string: _str) -> _str: ... +def atof(string: _str, func: Callable[[_str], float] = ...) -> float: ... +def atoi(string: _str) -> int: ... +def str(val: float) -> _str: ... + +# native gettext functions +# https://docs.python.org/3/library/locale.html#access-to-message-catalogs +# https://github.com/python/cpython/blob/f4c03484da59049eb62a9bf7777b963e2267d187/Modules/_localemodule.c#L626 +if sys.platform == "linux" or sys.platform == "darwin": + def gettext(__msg: _str) -> _str: ... + def dgettext(__domain: _str | None, __msg: _str) -> _str: ... + def dcgettext(__domain: _str | None, __msg: _str, __category: int) -> _str: ... + def textdomain(__domain: _str | None) -> _str: ... + def bindtextdomain(__domain: _str, __dir: StrPath | None) -> _str: ... + def bind_textdomain_codeset(__domain: _str, __codeset: _str | None) -> _str | None: ... + +if sys.version_info >= (3, 11): + def getencoding() -> _str: ... + +locale_alias: dict[_str, _str] # undocumented +locale_encoding_alias: dict[_str, _str] # undocumented +windows_locale: dict[int, _str] # undocumented diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/logging/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/logging/__init__.pyi new file mode 100644 index 00000000..3c547a6e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/logging/__init__.pyi @@ -0,0 +1,865 @@ +import sys +import threading +from _typeshed import StrPath, SupportsWrite +from collections.abc import Callable, Iterable, Mapping, MutableMapping, Sequence +from io import TextIOWrapper +from re import Pattern +from string import Template +from time import struct_time +from types import FrameType, TracebackType +from typing import Any, ClassVar, Generic, TextIO, TypeVar, overload +from typing_extensions import Literal, Self, TypeAlias + +if sys.version_info >= (3, 11): + from types import GenericAlias + +__all__ = [ + "BASIC_FORMAT", + "BufferingFormatter", + "CRITICAL", + "DEBUG", + "ERROR", + "FATAL", + "FileHandler", + "Filter", + "Formatter", + "Handler", + "INFO", + "LogRecord", + "Logger", + "LoggerAdapter", + "NOTSET", + "NullHandler", + "StreamHandler", + "WARN", + "WARNING", + "addLevelName", + "basicConfig", + "captureWarnings", + "critical", + "debug", + "disable", + "error", + "exception", + "fatal", + "getLevelName", + "getLogger", + "getLoggerClass", + "info", + "log", + "makeLogRecord", + "setLoggerClass", + "shutdown", + "warn", + "warning", + "getLogRecordFactory", + "setLogRecordFactory", + "lastResort", + "raiseExceptions", +] + +if sys.version_info >= (3, 11): + __all__ += ["getLevelNamesMapping"] + +_SysExcInfoType: TypeAlias = tuple[type[BaseException], BaseException, TracebackType | None] | tuple[None, None, None] +_ExcInfoType: TypeAlias = None | bool | _SysExcInfoType | BaseException +_ArgsType: TypeAlias = tuple[object, ...] | Mapping[str, object] +_FilterType: TypeAlias = Filter | Callable[[LogRecord], bool] +_Level: TypeAlias = int | str +_FormatStyle: TypeAlias = Literal["%", "{", "$"] + +raiseExceptions: bool +logThreads: bool +logMultiprocessing: bool +logProcesses: bool +_srcfile: str | None + +def currentframe() -> FrameType: ... + +_levelToName: dict[int, str] +_nameToLevel: dict[str, int] + +class Filterer: + filters: list[Filter] + def addFilter(self, filter: _FilterType) -> None: ... + def removeFilter(self, filter: _FilterType) -> None: ... + def filter(self, record: LogRecord) -> bool: ... + +class Manager: # undocumented + root: RootLogger + disable: int + emittedNoHandlerWarning: bool + loggerDict: dict[str, Logger | PlaceHolder] + loggerClass: type[Logger] | None + logRecordFactory: Callable[..., LogRecord] | None + def __init__(self, rootnode: RootLogger) -> None: ... + def getLogger(self, name: str) -> Logger: ... + def setLoggerClass(self, klass: type[Logger]) -> None: ... + def setLogRecordFactory(self, factory: Callable[..., LogRecord]) -> None: ... + +class Logger(Filterer): + name: str # undocumented + level: int # undocumented + parent: Logger | None # undocumented + propagate: bool + handlers: list[Handler] # undocumented + disabled: bool # undocumented + root: ClassVar[RootLogger] # undocumented + manager: Manager # undocumented + def __init__(self, name: str, level: _Level = 0) -> None: ... + def setLevel(self, level: _Level) -> None: ... + def isEnabledFor(self, level: int) -> bool: ... + def getEffectiveLevel(self) -> int: ... + def getChild(self, suffix: str) -> Self: ... # see python/typing#980 + if sys.version_info >= (3, 8): + def debug( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def info( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def warning( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def warn( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def error( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def exception( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = True, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def critical( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def log( + self, + level: int, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def _log( + self, + level: int, + msg: object, + args: _ArgsType, + exc_info: _ExcInfoType | None = None, + extra: Mapping[str, object] | None = None, + stack_info: bool = False, + stacklevel: int = 1, + ) -> None: ... # undocumented + else: + def debug( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def info( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def warning( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def warn( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def error( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def critical( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def log( + self, + level: int, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def exception( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = True, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def _log( + self, + level: int, + msg: object, + args: _ArgsType, + exc_info: _ExcInfoType | None = None, + extra: Mapping[str, object] | None = None, + stack_info: bool = False, + ) -> None: ... # undocumented + fatal = critical + def addHandler(self, hdlr: Handler) -> None: ... + def removeHandler(self, hdlr: Handler) -> None: ... + if sys.version_info >= (3, 8): + def findCaller(self, stack_info: bool = False, stacklevel: int = 1) -> tuple[str, int, str, str | None]: ... + else: + def findCaller(self, stack_info: bool = False) -> tuple[str, int, str, str | None]: ... + + def handle(self, record: LogRecord) -> None: ... + def makeRecord( + self, + name: str, + level: int, + fn: str, + lno: int, + msg: object, + args: _ArgsType, + exc_info: _SysExcInfoType | None, + func: str | None = None, + extra: Mapping[str, object] | None = None, + sinfo: str | None = None, + ) -> LogRecord: ... + def hasHandlers(self) -> bool: ... + def callHandlers(self, record: LogRecord) -> None: ... # undocumented + +CRITICAL: int +FATAL: int +ERROR: int +WARNING: int +WARN: int +INFO: int +DEBUG: int +NOTSET: int + +class Handler(Filterer): + level: int # undocumented + formatter: Formatter | None # undocumented + lock: threading.Lock | None # undocumented + name: str | None # undocumented + def __init__(self, level: _Level = 0) -> None: ... + def get_name(self) -> str: ... # undocumented + def set_name(self, name: str) -> None: ... # undocumented + def createLock(self) -> None: ... + def acquire(self) -> None: ... + def release(self) -> None: ... + def setLevel(self, level: _Level) -> None: ... + def setFormatter(self, fmt: Formatter | None) -> None: ... + def flush(self) -> None: ... + def close(self) -> None: ... + def handle(self, record: LogRecord) -> bool: ... + def handleError(self, record: LogRecord) -> None: ... + def format(self, record: LogRecord) -> str: ... + def emit(self, record: LogRecord) -> None: ... + +class Formatter: + converter: Callable[[float | None], struct_time] + _fmt: str | None # undocumented + datefmt: str | None # undocumented + _style: PercentStyle # undocumented + default_time_format: str + if sys.version_info >= (3, 9): + default_msec_format: str | None + else: + default_msec_format: str + + if sys.version_info >= (3, 10): + def __init__( + self, + fmt: str | None = None, + datefmt: str | None = None, + style: _FormatStyle = "%", + validate: bool = True, + *, + defaults: Mapping[str, Any] | None = None, + ) -> None: ... + elif sys.version_info >= (3, 8): + def __init__( + self, fmt: str | None = None, datefmt: str | None = None, style: _FormatStyle = "%", validate: bool = True + ) -> None: ... + else: + def __init__(self, fmt: str | None = None, datefmt: str | None = None, style: _FormatStyle = "%") -> None: ... + + def format(self, record: LogRecord) -> str: ... + def formatTime(self, record: LogRecord, datefmt: str | None = None) -> str: ... + def formatException(self, ei: _SysExcInfoType) -> str: ... + def formatMessage(self, record: LogRecord) -> str: ... # undocumented + def formatStack(self, stack_info: str) -> str: ... + def usesTime(self) -> bool: ... # undocumented + +class BufferingFormatter: + linefmt: Formatter + def __init__(self, linefmt: Formatter | None = None) -> None: ... + def formatHeader(self, records: Sequence[LogRecord]) -> str: ... + def formatFooter(self, records: Sequence[LogRecord]) -> str: ... + def format(self, records: Sequence[LogRecord]) -> str: ... + +class Filter: + name: str # undocumented + nlen: int # undocumented + def __init__(self, name: str = "") -> None: ... + def filter(self, record: LogRecord) -> bool: ... + +class LogRecord: + # args can be set to None by logging.handlers.QueueHandler + # (see https://bugs.python.org/issue44473) + args: _ArgsType | None + asctime: str + created: float + exc_info: _SysExcInfoType | None + exc_text: str | None + filename: str + funcName: str + levelname: str + levelno: int + lineno: int + module: str + msecs: float + # Only created when logging.Formatter.format is called. See #6132. + message: str + msg: str + name: str + pathname: str + process: int | None + processName: str | None + relativeCreated: float + stack_info: str | None + thread: int | None + threadName: str | None + def __init__( + self, + name: str, + level: int, + pathname: str, + lineno: int, + msg: object, + args: _ArgsType | None, + exc_info: _SysExcInfoType | None, + func: str | None = None, + sinfo: str | None = None, + ) -> None: ... + def getMessage(self) -> str: ... + # Allows setting contextual information on LogRecord objects as per the docs, see #7833 + def __setattr__(self, __name: str, __value: Any) -> None: ... + +_L = TypeVar("_L", bound=Logger | LoggerAdapter[Any]) + +class LoggerAdapter(Generic[_L]): + logger: _L + manager: Manager # undocumented + if sys.version_info >= (3, 10): + extra: Mapping[str, object] | None + def __init__(self, logger: _L, extra: Mapping[str, object] | None = None) -> None: ... + else: + extra: Mapping[str, object] + def __init__(self, logger: _L, extra: Mapping[str, object]) -> None: ... + + def process(self, msg: Any, kwargs: MutableMapping[str, Any]) -> tuple[Any, MutableMapping[str, Any]]: ... + if sys.version_info >= (3, 8): + def debug( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + def info( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + def warning( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + def warn( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + def error( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + def exception( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = True, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + def critical( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + def log( + self, + level: int, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + else: + def debug( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + def info( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + def warning( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + def warn( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + def error( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + def exception( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = True, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + def critical( + self, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + def log( + self, + level: int, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + **kwargs: object, + ) -> None: ... + + def isEnabledFor(self, level: int) -> bool: ... + def getEffectiveLevel(self) -> int: ... + def setLevel(self, level: _Level) -> None: ... + def hasHandlers(self) -> bool: ... + def _log( + self, + level: int, + msg: object, + args: _ArgsType, + exc_info: _ExcInfoType | None = None, + extra: Mapping[str, object] | None = None, + stack_info: bool = False, + ) -> None: ... # undocumented + @property + def name(self) -> str: ... # undocumented + if sys.version_info >= (3, 11): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +def getLogger(name: str | None = None) -> Logger: ... +def getLoggerClass() -> type[Logger]: ... +def getLogRecordFactory() -> Callable[..., LogRecord]: ... + +if sys.version_info >= (3, 8): + def debug( + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def info( + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def warning( + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def warn( + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def error( + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def critical( + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def exception( + msg: object, + *args: object, + exc_info: _ExcInfoType = True, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def log( + level: int, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + +else: + def debug( + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def info( + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def warning( + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def warn( + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def error( + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def critical( + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def exception( + msg: object, + *args: object, + exc_info: _ExcInfoType = True, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + ) -> None: ... + def log( + level: int, + msg: object, + *args: object, + exc_info: _ExcInfoType = None, + stack_info: bool = False, + extra: Mapping[str, object] | None = None, + ) -> None: ... + +fatal = critical + +def disable(level: int = 50) -> None: ... +def addLevelName(level: int, levelName: str) -> None: ... +def getLevelName(level: _Level) -> Any: ... + +if sys.version_info >= (3, 11): + def getLevelNamesMapping() -> dict[str, int]: ... + +def makeLogRecord(dict: Mapping[str, object]) -> LogRecord: ... + +if sys.version_info >= (3, 9): + def basicConfig( + *, + filename: StrPath | None = ..., + filemode: str = ..., + format: str = ..., + datefmt: str | None = ..., + style: _FormatStyle = ..., + level: _Level | None = ..., + stream: SupportsWrite[str] | None = ..., + handlers: Iterable[Handler] | None = ..., + force: bool | None = ..., + encoding: str | None = ..., + errors: str | None = ..., + ) -> None: ... + +elif sys.version_info >= (3, 8): + def basicConfig( + *, + filename: StrPath | None = ..., + filemode: str = ..., + format: str = ..., + datefmt: str | None = ..., + style: _FormatStyle = ..., + level: _Level | None = ..., + stream: SupportsWrite[str] | None = ..., + handlers: Iterable[Handler] | None = ..., + force: bool = ..., + ) -> None: ... + +else: + def basicConfig( + *, + filename: StrPath | None = ..., + filemode: str = ..., + format: str = ..., + datefmt: str | None = ..., + style: _FormatStyle = ..., + level: _Level | None = ..., + stream: SupportsWrite[str] | None = ..., + handlers: Iterable[Handler] | None = ..., + ) -> None: ... + +def shutdown(handlerList: Sequence[Any] = ...) -> None: ... # handlerList is undocumented +def setLoggerClass(klass: type[Logger]) -> None: ... +def captureWarnings(capture: bool) -> None: ... +def setLogRecordFactory(factory: Callable[..., LogRecord]) -> None: ... + +lastResort: StreamHandler[Any] | None + +_StreamT = TypeVar("_StreamT", bound=SupportsWrite[str]) + +class StreamHandler(Handler, Generic[_StreamT]): + stream: _StreamT # undocumented + terminator: str + @overload + def __init__(self: StreamHandler[TextIO], stream: None = None) -> None: ... + @overload + def __init__(self: StreamHandler[_StreamT], stream: _StreamT) -> None: ... + def setStream(self, stream: _StreamT) -> _StreamT | None: ... + if sys.version_info >= (3, 11): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +class FileHandler(StreamHandler[TextIOWrapper]): + baseFilename: str # undocumented + mode: str # undocumented + encoding: str | None # undocumented + delay: bool # undocumented + if sys.version_info >= (3, 9): + errors: str | None # undocumented + def __init__( + self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False, errors: str | None = None + ) -> None: ... + else: + def __init__(self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False) -> None: ... + + def _open(self) -> TextIOWrapper: ... # undocumented + +class NullHandler(Handler): ... + +class PlaceHolder: # undocumented + loggerMap: dict[Logger, None] + def __init__(self, alogger: Logger) -> None: ... + def append(self, alogger: Logger) -> None: ... + +# Below aren't in module docs but still visible + +class RootLogger(Logger): + def __init__(self, level: int) -> None: ... + +root: RootLogger + +class PercentStyle: # undocumented + default_format: str + asctime_format: str + asctime_search: str + if sys.version_info >= (3, 8): + validation_pattern: Pattern[str] + _fmt: str + if sys.version_info >= (3, 10): + def __init__(self, fmt: str, *, defaults: Mapping[str, Any] | None = None) -> None: ... + else: + def __init__(self, fmt: str) -> None: ... + + def usesTime(self) -> bool: ... + if sys.version_info >= (3, 8): + def validate(self) -> None: ... + + def format(self, record: Any) -> str: ... + +class StrFormatStyle(PercentStyle): # undocumented + fmt_spec: Pattern[str] + field_spec: Pattern[str] + +class StringTemplateStyle(PercentStyle): # undocumented + _tpl: Template + +_STYLES: dict[str, tuple[PercentStyle, str]] + +BASIC_FORMAT: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/logging/config.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/logging/config.pyi new file mode 100644 index 00000000..f76f655a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/logging/config.pyi @@ -0,0 +1,66 @@ +import sys +from _typeshed import StrOrBytesPath +from collections.abc import Callable, Sequence +from configparser import RawConfigParser +from re import Pattern +from threading import Thread +from typing import IO, Any + +from . import _Level + +if sys.version_info >= (3, 8): + from typing import Literal, TypedDict +else: + from typing_extensions import Literal, TypedDict + +DEFAULT_LOGGING_CONFIG_PORT: int +RESET_ERROR: int # undocumented +IDENTIFIER: Pattern[str] # undocumented + +class _RootLoggerConfiguration(TypedDict, total=False): + level: _Level + filters: Sequence[str] + handlers: Sequence[str] + +class _LoggerConfiguration(_RootLoggerConfiguration, TypedDict, total=False): + propagate: bool + +class _OptionalDictConfigArgs(TypedDict, total=False): + # these two can have custom factories (key: `()`) which can have extra keys + formatters: dict[str, dict[str, Any]] + filters: dict[str, dict[str, Any]] + # type checkers would warn about extra keys if this was a TypedDict + handlers: dict[str, dict[str, Any]] + loggers: dict[str, _LoggerConfiguration] + root: _RootLoggerConfiguration | None + incremental: bool + disable_existing_loggers: bool + +class _DictConfigArgs(_OptionalDictConfigArgs, TypedDict): + version: Literal[1] + +# Accept dict[str, Any] to avoid false positives if called with a dict +# type, since dict types are not compatible with TypedDicts. +# +# Also accept a TypedDict type, to allow callers to use TypedDict +# types, and for somewhat stricter type checking of dict literals. +def dictConfig(config: _DictConfigArgs | dict[str, Any]) -> None: ... + +if sys.version_info >= (3, 10): + def fileConfig( + fname: StrOrBytesPath | IO[str] | RawConfigParser, + defaults: dict[str, str] | None = None, + disable_existing_loggers: bool = True, + encoding: str | None = None, + ) -> None: ... + +else: + def fileConfig( + fname: StrOrBytesPath | IO[str] | RawConfigParser, + defaults: dict[str, str] | None = None, + disable_existing_loggers: bool = True, + ) -> None: ... + +def valid_ident(s: str) -> Literal[True]: ... # undocumented +def listen(port: int = 9030, verify: Callable[[bytes], bytes | None] | None = None) -> Thread: ... +def stopListening() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/logging/handlers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/logging/handlers.pyi new file mode 100644 index 00000000..7e0bfd70 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/logging/handlers.pyi @@ -0,0 +1,266 @@ +import datetime +import http.client +import ssl +import sys +from _typeshed import ReadableBuffer, StrPath +from collections.abc import Callable +from logging import FileHandler, Handler, LogRecord +from queue import Queue, SimpleQueue +from re import Pattern +from socket import SocketKind, socket +from typing import Any, ClassVar + +DEFAULT_TCP_LOGGING_PORT: int +DEFAULT_UDP_LOGGING_PORT: int +DEFAULT_HTTP_LOGGING_PORT: int +DEFAULT_SOAP_LOGGING_PORT: int +SYSLOG_UDP_PORT: int +SYSLOG_TCP_PORT: int + +class WatchedFileHandler(FileHandler): + dev: int # undocumented + ino: int # undocumented + if sys.version_info >= (3, 9): + def __init__( + self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False, errors: str | None = None + ) -> None: ... + else: + def __init__(self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False) -> None: ... + + def _statstream(self) -> None: ... # undocumented + def reopenIfNeeded(self) -> None: ... + +class BaseRotatingHandler(FileHandler): + namer: Callable[[str], str] | None + rotator: Callable[[str, str], None] | None + if sys.version_info >= (3, 9): + def __init__( + self, filename: StrPath, mode: str, encoding: str | None = None, delay: bool = False, errors: str | None = None + ) -> None: ... + else: + def __init__(self, filename: StrPath, mode: str, encoding: str | None = None, delay: bool = False) -> None: ... + + def rotation_filename(self, default_name: str) -> str: ... + def rotate(self, source: str, dest: str) -> None: ... + +class RotatingFileHandler(BaseRotatingHandler): + maxBytes: str # undocumented + backupCount: int # undocumented + if sys.version_info >= (3, 9): + def __init__( + self, + filename: StrPath, + mode: str = "a", + maxBytes: int = 0, + backupCount: int = 0, + encoding: str | None = None, + delay: bool = False, + errors: str | None = None, + ) -> None: ... + else: + def __init__( + self, + filename: StrPath, + mode: str = "a", + maxBytes: int = 0, + backupCount: int = 0, + encoding: str | None = None, + delay: bool = False, + ) -> None: ... + + def doRollover(self) -> None: ... + def shouldRollover(self, record: LogRecord) -> int: ... # undocumented + +class TimedRotatingFileHandler(BaseRotatingHandler): + when: str # undocumented + backupCount: int # undocumented + utc: bool # undocumented + atTime: datetime.time | None # undocumented + interval: int # undocumented + suffix: str # undocumented + dayOfWeek: int # undocumented + rolloverAt: int # undocumented + extMatch: Pattern[str] # undocumented + if sys.version_info >= (3, 9): + def __init__( + self, + filename: StrPath, + when: str = "h", + interval: int = 1, + backupCount: int = 0, + encoding: str | None = None, + delay: bool = False, + utc: bool = False, + atTime: datetime.time | None = None, + errors: str | None = None, + ) -> None: ... + else: + def __init__( + self, + filename: StrPath, + when: str = "h", + interval: int = 1, + backupCount: int = 0, + encoding: str | None = None, + delay: bool = False, + utc: bool = False, + atTime: datetime.time | None = None, + ) -> None: ... + + def doRollover(self) -> None: ... + def shouldRollover(self, record: LogRecord) -> int: ... # undocumented + def computeRollover(self, currentTime: int) -> int: ... # undocumented + def getFilesToDelete(self) -> list[str]: ... # undocumented + +class SocketHandler(Handler): + host: str # undocumented + port: int | None # undocumented + address: tuple[str, int] | str # undocumented + sock: socket | None # undocumented + closeOnError: bool # undocumented + retryTime: float | None # undocumented + retryStart: float # undocumented + retryFactor: float # undocumented + retryMax: float # undocumented + def __init__(self, host: str, port: int | None) -> None: ... + def makeSocket(self, timeout: float = 1) -> socket: ... # timeout is undocumented + def makePickle(self, record: LogRecord) -> bytes: ... + def send(self, s: ReadableBuffer) -> None: ... + def createSocket(self) -> None: ... + +class DatagramHandler(SocketHandler): + def makeSocket(self) -> socket: ... # type: ignore[override] + +class SysLogHandler(Handler): + LOG_EMERG: int + LOG_ALERT: int + LOG_CRIT: int + LOG_ERR: int + LOG_WARNING: int + LOG_NOTICE: int + LOG_INFO: int + LOG_DEBUG: int + + LOG_KERN: int + LOG_USER: int + LOG_MAIL: int + LOG_DAEMON: int + LOG_AUTH: int + LOG_SYSLOG: int + LOG_LPR: int + LOG_NEWS: int + LOG_UUCP: int + LOG_CRON: int + LOG_AUTHPRIV: int + LOG_FTP: int + + if sys.version_info >= (3, 9): + LOG_NTP: int + LOG_SECURITY: int + LOG_CONSOLE: int + LOG_SOLCRON: int + + LOG_LOCAL0: int + LOG_LOCAL1: int + LOG_LOCAL2: int + LOG_LOCAL3: int + LOG_LOCAL4: int + LOG_LOCAL5: int + LOG_LOCAL6: int + LOG_LOCAL7: int + address: tuple[str, int] | str # undocumented + unixsocket: bool # undocumented + socktype: SocketKind # undocumented + ident: str # undocumented + append_nul: bool # undocumented + facility: int # undocumented + priority_names: ClassVar[dict[str, int]] # undocumented + facility_names: ClassVar[dict[str, int]] # undocumented + priority_map: ClassVar[dict[str, str]] # undocumented + def __init__(self, address: tuple[str, int] | str = ..., facility: int = 1, socktype: SocketKind | None = None) -> None: ... + if sys.version_info >= (3, 11): + def createSocket(self) -> None: ... + + def encodePriority(self, facility: int | str, priority: int | str) -> int: ... + def mapPriority(self, levelName: str) -> str: ... + +class NTEventLogHandler(Handler): + def __init__(self, appname: str, dllname: str | None = None, logtype: str = "Application") -> None: ... + def getEventCategory(self, record: LogRecord) -> int: ... + # TODO correct return value? + def getEventType(self, record: LogRecord) -> int: ... + def getMessageID(self, record: LogRecord) -> int: ... + +class SMTPHandler(Handler): + mailhost: str # undocumented + mailport: int | None # undocumented + username: str | None # undocumented + # password only exists as an attribute if passed credentials is a tuple or list + password: str # undocumented + fromaddr: str # undocumented + toaddrs: list[str] # undocumented + subject: str # undocumented + secure: tuple[()] | tuple[str] | tuple[str, str] | None # undocumented + timeout: float # undocumented + def __init__( + self, + mailhost: str | tuple[str, int], + fromaddr: str, + toaddrs: str | list[str], + subject: str, + credentials: tuple[str, str] | None = None, + secure: tuple[()] | tuple[str] | tuple[str, str] | None = None, + timeout: float = 5.0, + ) -> None: ... + def getSubject(self, record: LogRecord) -> str: ... + +class BufferingHandler(Handler): + capacity: int # undocumented + buffer: list[LogRecord] # undocumented + def __init__(self, capacity: int) -> None: ... + def shouldFlush(self, record: LogRecord) -> bool: ... + +class MemoryHandler(BufferingHandler): + flushLevel: int # undocumented + target: Handler | None # undocumented + flushOnClose: bool # undocumented + def __init__(self, capacity: int, flushLevel: int = 40, target: Handler | None = None, flushOnClose: bool = True) -> None: ... + def setTarget(self, target: Handler | None) -> None: ... + +class HTTPHandler(Handler): + host: str # undocumented + url: str # undocumented + method: str # undocumented + secure: bool # undocumented + credentials: tuple[str, str] | None # undocumented + context: ssl.SSLContext | None # undocumented + def __init__( + self, + host: str, + url: str, + method: str = "GET", + secure: bool = False, + credentials: tuple[str, str] | None = None, + context: ssl.SSLContext | None = None, + ) -> None: ... + def mapLogRecord(self, record: LogRecord) -> dict[str, Any]: ... + if sys.version_info >= (3, 9): + def getConnection(self, host: str, secure: bool) -> http.client.HTTPConnection: ... # undocumented + +class QueueHandler(Handler): + queue: SimpleQueue[Any] | Queue[Any] # undocumented + def __init__(self, queue: SimpleQueue[Any] | Queue[Any]) -> None: ... + def prepare(self, record: LogRecord) -> Any: ... + def enqueue(self, record: LogRecord) -> None: ... + +class QueueListener: + handlers: tuple[Handler, ...] # undocumented + respect_handler_level: bool # undocumented + queue: SimpleQueue[Any] | Queue[Any] # undocumented + def __init__(self, queue: SimpleQueue[Any] | Queue[Any], *handlers: Handler, respect_handler_level: bool = False) -> None: ... + def dequeue(self, block: bool) -> LogRecord: ... + def prepare(self, record: LogRecord) -> Any: ... + def start(self) -> None: ... + def stop(self) -> None: ... + def enqueue_sentinel(self) -> None: ... + def handle(self, record: LogRecord) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lzma.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lzma.pyi new file mode 100644 index 00000000..34bd6f3f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/lzma.pyi @@ -0,0 +1,197 @@ +import io +from _typeshed import ReadableBuffer, StrOrBytesPath +from collections.abc import Mapping, Sequence +from typing import IO, Any, TextIO, overload +from typing_extensions import Literal, Self, TypeAlias, final + +__all__ = [ + "CHECK_NONE", + "CHECK_CRC32", + "CHECK_CRC64", + "CHECK_SHA256", + "CHECK_ID_MAX", + "CHECK_UNKNOWN", + "FILTER_LZMA1", + "FILTER_LZMA2", + "FILTER_DELTA", + "FILTER_X86", + "FILTER_IA64", + "FILTER_ARM", + "FILTER_ARMTHUMB", + "FILTER_POWERPC", + "FILTER_SPARC", + "FORMAT_AUTO", + "FORMAT_XZ", + "FORMAT_ALONE", + "FORMAT_RAW", + "MF_HC3", + "MF_HC4", + "MF_BT2", + "MF_BT3", + "MF_BT4", + "MODE_FAST", + "MODE_NORMAL", + "PRESET_DEFAULT", + "PRESET_EXTREME", + "LZMACompressor", + "LZMADecompressor", + "LZMAFile", + "LZMAError", + "open", + "compress", + "decompress", + "is_check_supported", +] + +_OpenBinaryWritingMode: TypeAlias = Literal["w", "wb", "x", "xb", "a", "ab"] +_OpenTextWritingMode: TypeAlias = Literal["wt", "xt", "at"] + +_PathOrFile: TypeAlias = StrOrBytesPath | IO[bytes] + +_FilterChain: TypeAlias = Sequence[Mapping[str, Any]] + +FORMAT_AUTO: Literal[0] +FORMAT_XZ: Literal[1] +FORMAT_ALONE: Literal[2] +FORMAT_RAW: Literal[3] +CHECK_NONE: Literal[0] +CHECK_CRC32: Literal[1] +CHECK_CRC64: Literal[4] +CHECK_SHA256: Literal[10] +CHECK_ID_MAX: Literal[15] +CHECK_UNKNOWN: Literal[16] +FILTER_LZMA1: int # v big number +FILTER_LZMA2: Literal[33] +FILTER_DELTA: Literal[3] +FILTER_X86: Literal[4] +FILTER_IA64: Literal[6] +FILTER_ARM: Literal[7] +FILTER_ARMTHUMB: Literal[8] +FILTER_SPARC: Literal[9] +FILTER_POWERPC: Literal[5] +MF_HC3: Literal[3] +MF_HC4: Literal[4] +MF_BT2: Literal[18] +MF_BT3: Literal[19] +MF_BT4: Literal[20] +MODE_FAST: Literal[1] +MODE_NORMAL: Literal[2] +PRESET_DEFAULT: Literal[6] +PRESET_EXTREME: int # v big number + +# from _lzma.c +@final +class LZMADecompressor: + def __init__(self, format: int | None = ..., memlimit: int | None = ..., filters: _FilterChain | None = ...) -> None: ... + def decompress(self, data: ReadableBuffer, max_length: int = -1) -> bytes: ... + @property + def check(self) -> int: ... + @property + def eof(self) -> bool: ... + @property + def unused_data(self) -> bytes: ... + @property + def needs_input(self) -> bool: ... + +# from _lzma.c +@final +class LZMACompressor: + def __init__( + self, format: int | None = ..., check: int = ..., preset: int | None = ..., filters: _FilterChain | None = ... + ) -> None: ... + def compress(self, __data: ReadableBuffer) -> bytes: ... + def flush(self) -> bytes: ... + +class LZMAError(Exception): ... + +class LZMAFile(io.BufferedIOBase, IO[bytes]): + def __init__( + self, + filename: _PathOrFile | None = None, + mode: str = "r", + *, + format: int | None = None, + check: int = -1, + preset: int | None = None, + filters: _FilterChain | None = None, + ) -> None: ... + def __enter__(self) -> Self: ... + def peek(self, size: int = -1) -> bytes: ... + def read(self, size: int | None = -1) -> bytes: ... + def read1(self, size: int = -1) -> bytes: ... + def readline(self, size: int | None = -1) -> bytes: ... + def write(self, data: ReadableBuffer) -> int: ... + def seek(self, offset: int, whence: int = 0) -> int: ... + +@overload +def open( + filename: _PathOrFile, + mode: Literal["r", "rb"] = "rb", + *, + format: int | None = None, + check: Literal[-1] = -1, + preset: None = None, + filters: _FilterChain | None = None, + encoding: None = None, + errors: None = None, + newline: None = None, +) -> LZMAFile: ... +@overload +def open( + filename: _PathOrFile, + mode: _OpenBinaryWritingMode, + *, + format: int | None = None, + check: int = -1, + preset: int | None = None, + filters: _FilterChain | None = None, + encoding: None = None, + errors: None = None, + newline: None = None, +) -> LZMAFile: ... +@overload +def open( + filename: StrOrBytesPath, + mode: Literal["rt"], + *, + format: int | None = None, + check: Literal[-1] = -1, + preset: None = None, + filters: _FilterChain | None = None, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, +) -> TextIO: ... +@overload +def open( + filename: StrOrBytesPath, + mode: _OpenTextWritingMode, + *, + format: int | None = None, + check: int = -1, + preset: int | None = None, + filters: _FilterChain | None = None, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, +) -> TextIO: ... +@overload +def open( + filename: _PathOrFile, + mode: str, + *, + format: int | None = None, + check: int = -1, + preset: int | None = None, + filters: _FilterChain | None = None, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, +) -> LZMAFile | TextIO: ... +def compress( + data: ReadableBuffer, format: int = 1, check: int = -1, preset: int | None = None, filters: _FilterChain | None = None +) -> bytes: ... +def decompress( + data: ReadableBuffer, format: int = 0, memlimit: int | None = None, filters: _FilterChain | None = None +) -> bytes: ... +def is_check_supported(__check_id: int) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/macpath.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/macpath.pyi new file mode 100644 index 00000000..37821f44 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/macpath.pyi @@ -0,0 +1,104 @@ +from _typeshed import BytesPath, StrOrBytesPath, StrPath +from genericpath import ( + commonprefix as commonprefix, + exists as exists, + getatime as getatime, + getctime as getctime, + getmtime as getmtime, + getsize as getsize, + isdir as isdir, + isfile as isfile, + samefile as samefile, + sameopenfile as sameopenfile, + samestat as samestat, +) +from os import PathLike + +# Re-export common definitions from posixpath to reduce duplication +from posixpath import ( + abspath as abspath, + curdir as curdir, + defpath as defpath, + devnull as devnull, + expanduser as expanduser, + expandvars as expandvars, + extsep as extsep, + isabs as isabs, + lexists as lexists, + pardir as pardir, + pathsep as pathsep, + sep as sep, + splitdrive as splitdrive, + splitext as splitext, + supports_unicode_filenames as supports_unicode_filenames, +) +from typing import AnyStr, overload + +__all__ = [ + "normcase", + "isabs", + "join", + "splitdrive", + "split", + "splitext", + "basename", + "dirname", + "commonprefix", + "getsize", + "getmtime", + "getatime", + "getctime", + "islink", + "exists", + "lexists", + "isdir", + "isfile", + "expanduser", + "expandvars", + "normpath", + "abspath", + "curdir", + "pardir", + "sep", + "pathsep", + "defpath", + "altsep", + "extsep", + "devnull", + "realpath", + "supports_unicode_filenames", +] + +altsep: str | None + +@overload +def basename(s: PathLike[AnyStr]) -> AnyStr: ... +@overload +def basename(s: AnyStr) -> AnyStr: ... +@overload +def dirname(s: PathLike[AnyStr]) -> AnyStr: ... +@overload +def dirname(s: AnyStr) -> AnyStr: ... +@overload +def normcase(path: PathLike[AnyStr]) -> AnyStr: ... +@overload +def normcase(path: AnyStr) -> AnyStr: ... +@overload +def normpath(s: PathLike[AnyStr]) -> AnyStr: ... +@overload +def normpath(s: AnyStr) -> AnyStr: ... +@overload +def realpath(path: PathLike[AnyStr]) -> AnyStr: ... +@overload +def realpath(path: AnyStr) -> AnyStr: ... +def islink(s: StrOrBytesPath) -> bool: ... + +# Mypy complains that the signatures overlap, but things seem to behave correctly anyway. +@overload +def join(s: StrPath, *paths: StrPath) -> str: ... +@overload +def join(s: BytesPath, *paths: BytesPath) -> bytes: ... +@overload +def split(s: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... +@overload +def split(s: AnyStr) -> tuple[AnyStr, AnyStr]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/mailbox.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/mailbox.pyi new file mode 100644 index 00000000..8053fad8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/mailbox.pyi @@ -0,0 +1,256 @@ +import email.message +import io +import sys +from _typeshed import StrPath, SupportsNoArgReadline, SupportsRead +from abc import ABCMeta, abstractmethod +from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence +from types import TracebackType +from typing import IO, Any, AnyStr, Generic, Protocol, TypeVar, overload +from typing_extensions import Literal, Self, TypeAlias + +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = [ + "Mailbox", + "Maildir", + "mbox", + "MH", + "Babyl", + "MMDF", + "Message", + "MaildirMessage", + "mboxMessage", + "MHMessage", + "BabylMessage", + "MMDFMessage", + "Error", + "NoSuchMailboxError", + "NotEmptyError", + "ExternalClashError", + "FormatError", +] + +_T = TypeVar("_T") +_MessageT = TypeVar("_MessageT", bound=Message) + +class _SupportsReadAndReadline(SupportsRead[bytes], SupportsNoArgReadline[bytes], Protocol): ... + +_MessageData: TypeAlias = email.message.Message | bytes | str | io.StringIO | _SupportsReadAndReadline + +class _HasIteritems(Protocol): + def iteritems(self) -> Iterator[tuple[str, _MessageData]]: ... + +class _HasItems(Protocol): + def items(self) -> Iterator[tuple[str, _MessageData]]: ... + +linesep: bytes + +class Mailbox(Generic[_MessageT]): + _path: str # undocumented + _factory: Callable[[IO[Any]], _MessageT] | None # undocumented + @overload + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], _MessageT], create: bool = True) -> None: ... + @overload + def __init__(self, path: StrPath, factory: None = None, create: bool = True) -> None: ... + @abstractmethod + def add(self, message: _MessageData) -> str: ... + @abstractmethod + def remove(self, key: str) -> None: ... + def __delitem__(self, key: str) -> None: ... + def discard(self, key: str) -> None: ... + @abstractmethod + def __setitem__(self, key: str, message: _MessageData) -> None: ... + @overload + def get(self, key: str, default: None = None) -> _MessageT | None: ... + @overload + def get(self, key: str, default: _T) -> _MessageT | _T: ... + def __getitem__(self, key: str) -> _MessageT: ... + @abstractmethod + def get_message(self, key: str) -> _MessageT: ... + def get_string(self, key: str) -> str: ... + @abstractmethod + def get_bytes(self, key: str) -> bytes: ... + # As '_ProxyFile' doesn't implement the full IO spec, and BytesIO is incompatible with it, get_file return is Any here + @abstractmethod + def get_file(self, key: str) -> Any: ... + @abstractmethod + def iterkeys(self) -> Iterator[str]: ... + def keys(self) -> list[str]: ... + def itervalues(self) -> Iterator[_MessageT]: ... + def __iter__(self) -> Iterator[_MessageT]: ... + def values(self) -> list[_MessageT]: ... + def iteritems(self) -> Iterator[tuple[str, _MessageT]]: ... + def items(self) -> list[tuple[str, _MessageT]]: ... + @abstractmethod + def __contains__(self, key: str) -> bool: ... + @abstractmethod + def __len__(self) -> int: ... + def clear(self) -> None: ... + @overload + def pop(self, key: str, default: None = None) -> _MessageT | None: ... + @overload + def pop(self, key: str, default: _T) -> _MessageT | _T: ... + def popitem(self) -> tuple[str, _MessageT]: ... + def update(self, arg: _HasIteritems | _HasItems | Iterable[tuple[str, _MessageData]] | None = None) -> None: ... + @abstractmethod + def flush(self) -> None: ... + @abstractmethod + def lock(self) -> None: ... + @abstractmethod + def unlock(self) -> None: ... + @abstractmethod + def close(self) -> None: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +class Maildir(Mailbox[MaildirMessage]): + colon: str + def __init__( + self, dirname: StrPath, factory: Callable[[IO[Any]], MaildirMessage] | None = None, create: bool = True + ) -> None: ... + def add(self, message: _MessageData) -> str: ... + def remove(self, key: str) -> None: ... + def __setitem__(self, key: str, message: _MessageData) -> None: ... + def get_message(self, key: str) -> MaildirMessage: ... + def get_bytes(self, key: str) -> bytes: ... + def get_file(self, key: str) -> _ProxyFile[bytes]: ... + def iterkeys(self) -> Iterator[str]: ... + def __contains__(self, key: str) -> bool: ... + def __len__(self) -> int: ... + def flush(self) -> None: ... + def lock(self) -> None: ... + def unlock(self) -> None: ... + def close(self) -> None: ... + def list_folders(self) -> list[str]: ... + def get_folder(self, folder: str) -> Maildir: ... + def add_folder(self, folder: str) -> Maildir: ... + def remove_folder(self, folder: str) -> None: ... + def clean(self) -> None: ... + def next(self) -> str | None: ... + +class _singlefileMailbox(Mailbox[_MessageT], metaclass=ABCMeta): + def add(self, message: _MessageData) -> str: ... + def remove(self, key: str) -> None: ... + def __setitem__(self, key: str, message: _MessageData) -> None: ... + def iterkeys(self) -> Iterator[str]: ... + def __contains__(self, key: str) -> bool: ... + def __len__(self) -> int: ... + def lock(self) -> None: ... + def unlock(self) -> None: ... + def flush(self) -> None: ... + def close(self) -> None: ... + +class _mboxMMDF(_singlefileMailbox[_MessageT]): + def get_message(self, key: str) -> _MessageT: ... + def get_file(self, key: str, from_: bool = False) -> _PartialFile[bytes]: ... + def get_bytes(self, key: str, from_: bool = False) -> bytes: ... + def get_string(self, key: str, from_: bool = False) -> str: ... + +class mbox(_mboxMMDF[mboxMessage]): + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], mboxMessage] | None = None, create: bool = True) -> None: ... + +class MMDF(_mboxMMDF[MMDFMessage]): + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MMDFMessage] | None = None, create: bool = True) -> None: ... + +class MH(Mailbox[MHMessage]): + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MHMessage] | None = None, create: bool = True) -> None: ... + def add(self, message: _MessageData) -> str: ... + def remove(self, key: str) -> None: ... + def __setitem__(self, key: str, message: _MessageData) -> None: ... + def get_message(self, key: str) -> MHMessage: ... + def get_bytes(self, key: str) -> bytes: ... + def get_file(self, key: str) -> _ProxyFile[bytes]: ... + def iterkeys(self) -> Iterator[str]: ... + def __contains__(self, key: str) -> bool: ... + def __len__(self) -> int: ... + def flush(self) -> None: ... + def lock(self) -> None: ... + def unlock(self) -> None: ... + def close(self) -> None: ... + def list_folders(self) -> list[str]: ... + def get_folder(self, folder: StrPath) -> MH: ... + def add_folder(self, folder: StrPath) -> MH: ... + def remove_folder(self, folder: StrPath) -> None: ... + def get_sequences(self) -> dict[str, list[int]]: ... + def set_sequences(self, sequences: Mapping[str, Sequence[int]]) -> None: ... + def pack(self) -> None: ... + +class Babyl(_singlefileMailbox[BabylMessage]): + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], BabylMessage] | None = None, create: bool = True) -> None: ... + def get_message(self, key: str) -> BabylMessage: ... + def get_bytes(self, key: str) -> bytes: ... + def get_file(self, key: str) -> IO[bytes]: ... + def get_labels(self) -> list[str]: ... + +class Message(email.message.Message): + def __init__(self, message: _MessageData | None = None) -> None: ... + +class MaildirMessage(Message): + def get_subdir(self) -> str: ... + def set_subdir(self, subdir: Literal["new", "cur"]) -> None: ... + def get_flags(self) -> str: ... + def set_flags(self, flags: Iterable[str]) -> None: ... + def add_flag(self, flag: str) -> None: ... + def remove_flag(self, flag: str) -> None: ... + def get_date(self) -> int: ... + def set_date(self, date: float) -> None: ... + def get_info(self) -> str: ... + def set_info(self, info: str) -> None: ... + +class _mboxMMDFMessage(Message): + def get_from(self) -> str: ... + def set_from(self, from_: str, time_: bool | tuple[int, int, int, int, int, int, int, int, int] | None = None) -> None: ... + def get_flags(self) -> str: ... + def set_flags(self, flags: Iterable[str]) -> None: ... + def add_flag(self, flag: str) -> None: ... + def remove_flag(self, flag: str) -> None: ... + +class mboxMessage(_mboxMMDFMessage): ... + +class MHMessage(Message): + def get_sequences(self) -> list[str]: ... + def set_sequences(self, sequences: Iterable[str]) -> None: ... + def add_sequence(self, sequence: str) -> None: ... + def remove_sequence(self, sequence: str) -> None: ... + +class BabylMessage(Message): + def get_labels(self) -> list[str]: ... + def set_labels(self, labels: Iterable[str]) -> None: ... + def add_label(self, label: str) -> None: ... + def remove_label(self, label: str) -> None: ... + def get_visible(self) -> Message: ... + def set_visible(self, visible: _MessageData) -> None: ... + def update_visible(self) -> None: ... + +class MMDFMessage(_mboxMMDFMessage): ... + +class _ProxyFile(Generic[AnyStr]): + def __init__(self, f: IO[AnyStr], pos: int | None = None) -> None: ... + def read(self, size: int | None = None) -> AnyStr: ... + def read1(self, size: int | None = None) -> AnyStr: ... + def readline(self, size: int | None = None) -> AnyStr: ... + def readlines(self, sizehint: int | None = None) -> list[AnyStr]: ... + def __iter__(self) -> Iterator[AnyStr]: ... + def tell(self) -> int: ... + def seek(self, offset: int, whence: int = 0) -> None: ... + def close(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ... + def readable(self) -> bool: ... + def writable(self) -> bool: ... + def seekable(self) -> bool: ... + def flush(self) -> None: ... + @property + def closed(self) -> bool: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +class _PartialFile(_ProxyFile[AnyStr]): + def __init__(self, f: IO[AnyStr], start: int | None = None, stop: int | None = None) -> None: ... + +class Error(Exception): ... +class NoSuchMailboxError(Error): ... +class NotEmptyError(Error): ... +class ExternalClashError(Error): ... +class FormatError(Error): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/mailcap.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/mailcap.pyi new file mode 100644 index 00000000..5905f582 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/mailcap.pyi @@ -0,0 +1,11 @@ +from collections.abc import Mapping, Sequence +from typing_extensions import TypeAlias + +_Cap: TypeAlias = dict[str, str | int] + +__all__ = ["getcaps", "findmatch"] + +def findmatch( + caps: Mapping[str, list[_Cap]], MIMEtype: str, key: str = "view", filename: str = "/dev/null", plist: Sequence[str] = ... +) -> tuple[str | None, _Cap | None]: ... +def getcaps() -> dict[str, list[_Cap]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/marshal.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/marshal.pyi new file mode 100644 index 00000000..21f05c90 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/marshal.pyi @@ -0,0 +1,33 @@ +import builtins +import types +from _typeshed import ReadableBuffer, SupportsRead, SupportsWrite +from typing import Any +from typing_extensions import TypeAlias + +version: int + +_Marshallable: TypeAlias = ( + # handled in w_object() in marshal.c + None + | type[StopIteration] + | builtins.ellipsis + | bool + # handled in w_complex_object() in marshal.c + | int + | float + | complex + | bytes + | str + | tuple[_Marshallable, ...] + | list[Any] + | dict[Any, Any] + | set[Any] + | frozenset[_Marshallable] + | types.CodeType + | ReadableBuffer +) + +def dump(__value: _Marshallable, __file: SupportsWrite[bytes], __version: int = 4) -> None: ... +def load(__file: SupportsRead[bytes]) -> Any: ... +def dumps(__value: _Marshallable, __version: int = 4) -> bytes: ... +def loads(__bytes: ReadableBuffer) -> Any: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/math.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/math.pyi new file mode 100644 index 00000000..231964f3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/math.pyi @@ -0,0 +1,144 @@ +import sys +from collections.abc import Iterable +from typing import Protocol, SupportsFloat, TypeVar, overload +from typing_extensions import SupportsIndex, TypeAlias + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) + +if sys.version_info >= (3, 8): + _SupportsFloatOrIndex: TypeAlias = SupportsFloat | SupportsIndex +else: + _SupportsFloatOrIndex: TypeAlias = SupportsFloat + +e: float +pi: float +inf: float +nan: float +tau: float + +def acos(__x: _SupportsFloatOrIndex) -> float: ... +def acosh(__x: _SupportsFloatOrIndex) -> float: ... +def asin(__x: _SupportsFloatOrIndex) -> float: ... +def asinh(__x: _SupportsFloatOrIndex) -> float: ... +def atan(__x: _SupportsFloatOrIndex) -> float: ... +def atan2(__y: _SupportsFloatOrIndex, __x: _SupportsFloatOrIndex) -> float: ... +def atanh(__x: _SupportsFloatOrIndex) -> float: ... + +if sys.version_info >= (3, 11): + def cbrt(__x: _SupportsFloatOrIndex) -> float: ... + +class _SupportsCeil(Protocol[_T_co]): + def __ceil__(self) -> _T_co: ... + +@overload +def ceil(__x: _SupportsCeil[_T]) -> _T: ... +@overload +def ceil(__x: _SupportsFloatOrIndex) -> int: ... + +if sys.version_info >= (3, 8): + def comb(__n: SupportsIndex, __k: SupportsIndex) -> int: ... + +def copysign(__x: _SupportsFloatOrIndex, __y: _SupportsFloatOrIndex) -> float: ... +def cos(__x: _SupportsFloatOrIndex) -> float: ... +def cosh(__x: _SupportsFloatOrIndex) -> float: ... +def degrees(__x: _SupportsFloatOrIndex) -> float: ... + +if sys.version_info >= (3, 8): + def dist(__p: Iterable[_SupportsFloatOrIndex], __q: Iterable[_SupportsFloatOrIndex]) -> float: ... + +def erf(__x: _SupportsFloatOrIndex) -> float: ... +def erfc(__x: _SupportsFloatOrIndex) -> float: ... +def exp(__x: _SupportsFloatOrIndex) -> float: ... + +if sys.version_info >= (3, 11): + def exp2(__x: _SupportsFloatOrIndex) -> float: ... + +def expm1(__x: _SupportsFloatOrIndex) -> float: ... +def fabs(__x: _SupportsFloatOrIndex) -> float: ... + +if sys.version_info >= (3, 8): + def factorial(__x: SupportsIndex) -> int: ... + +else: + def factorial(__x: int) -> int: ... + +class _SupportsFloor(Protocol[_T_co]): + def __floor__(self) -> _T_co: ... + +@overload +def floor(__x: _SupportsFloor[_T]) -> _T: ... +@overload +def floor(__x: _SupportsFloatOrIndex) -> int: ... +def fmod(__x: _SupportsFloatOrIndex, __y: _SupportsFloatOrIndex) -> float: ... +def frexp(__x: _SupportsFloatOrIndex) -> tuple[float, int]: ... +def fsum(__seq: Iterable[_SupportsFloatOrIndex]) -> float: ... +def gamma(__x: _SupportsFloatOrIndex) -> float: ... + +if sys.version_info >= (3, 9): + def gcd(*integers: SupportsIndex) -> int: ... + +else: + def gcd(__x: SupportsIndex, __y: SupportsIndex) -> int: ... + +if sys.version_info >= (3, 8): + def hypot(*coordinates: _SupportsFloatOrIndex) -> float: ... + +else: + def hypot(__x: _SupportsFloatOrIndex, __y: _SupportsFloatOrIndex) -> float: ... + +def isclose( + a: _SupportsFloatOrIndex, + b: _SupportsFloatOrIndex, + *, + rel_tol: _SupportsFloatOrIndex = 1e-09, + abs_tol: _SupportsFloatOrIndex = 0.0, +) -> bool: ... +def isinf(__x: _SupportsFloatOrIndex) -> bool: ... +def isfinite(__x: _SupportsFloatOrIndex) -> bool: ... +def isnan(__x: _SupportsFloatOrIndex) -> bool: ... + +if sys.version_info >= (3, 8): + def isqrt(__n: SupportsIndex) -> int: ... + +if sys.version_info >= (3, 9): + def lcm(*integers: SupportsIndex) -> int: ... + +def ldexp(__x: _SupportsFloatOrIndex, __i: int) -> float: ... +def lgamma(__x: _SupportsFloatOrIndex) -> float: ... +def log(x: _SupportsFloatOrIndex, base: _SupportsFloatOrIndex = ...) -> float: ... +def log10(__x: _SupportsFloatOrIndex) -> float: ... +def log1p(__x: _SupportsFloatOrIndex) -> float: ... +def log2(__x: _SupportsFloatOrIndex) -> float: ... +def modf(__x: _SupportsFloatOrIndex) -> tuple[float, float]: ... + +if sys.version_info >= (3, 9): + def nextafter(__x: _SupportsFloatOrIndex, __y: _SupportsFloatOrIndex) -> float: ... + +if sys.version_info >= (3, 8): + def perm(__n: SupportsIndex, __k: SupportsIndex | None = None) -> int: ... + +def pow(__x: _SupportsFloatOrIndex, __y: _SupportsFloatOrIndex) -> float: ... + +if sys.version_info >= (3, 8): + @overload + def prod(__iterable: Iterable[SupportsIndex], *, start: SupportsIndex = 1) -> int: ... # type: ignore[misc] + @overload + def prod(__iterable: Iterable[_SupportsFloatOrIndex], *, start: _SupportsFloatOrIndex = 1) -> float: ... + +def radians(__x: _SupportsFloatOrIndex) -> float: ... +def remainder(__x: _SupportsFloatOrIndex, __y: _SupportsFloatOrIndex) -> float: ... +def sin(__x: _SupportsFloatOrIndex) -> float: ... +def sinh(__x: _SupportsFloatOrIndex) -> float: ... +def sqrt(__x: _SupportsFloatOrIndex) -> float: ... +def tan(__x: _SupportsFloatOrIndex) -> float: ... +def tanh(__x: _SupportsFloatOrIndex) -> float: ... + +# Is different from `_typeshed.SupportsTrunc`, which is not generic +class _SupportsTrunc(Protocol[_T_co]): + def __trunc__(self) -> _T_co: ... + +def trunc(__x: _SupportsTrunc[_T]) -> _T: ... + +if sys.version_info >= (3, 9): + def ulp(__x: _SupportsFloatOrIndex) -> float: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/mimetypes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/mimetypes.pyi new file mode 100644 index 00000000..fd390868 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/mimetypes.pyi @@ -0,0 +1,57 @@ +import sys +from _typeshed import StrPath +from collections.abc import Sequence +from typing import IO + +__all__ = [ + "knownfiles", + "inited", + "MimeTypes", + "guess_type", + "guess_all_extensions", + "guess_extension", + "add_type", + "init", + "read_mime_types", + "suffix_map", + "encodings_map", + "types_map", + "common_types", +] + +if sys.version_info >= (3, 8): + def guess_type(url: StrPath, strict: bool = True) -> tuple[str | None, str | None]: ... + +else: + def guess_type(url: str, strict: bool = True) -> tuple[str | None, str | None]: ... + +def guess_all_extensions(type: str, strict: bool = True) -> list[str]: ... +def guess_extension(type: str, strict: bool = True) -> str | None: ... +def init(files: Sequence[str] | None = None) -> None: ... +def read_mime_types(file: str) -> dict[str, str] | None: ... +def add_type(type: str, ext: str, strict: bool = True) -> None: ... + +inited: bool +knownfiles: list[str] +suffix_map: dict[str, str] +encodings_map: dict[str, str] +types_map: dict[str, str] +common_types: dict[str, str] + +class MimeTypes: + suffix_map: dict[str, str] + encodings_map: dict[str, str] + types_map: tuple[dict[str, str], dict[str, str]] + types_map_inv: tuple[dict[str, str], dict[str, str]] + def __init__(self, filenames: tuple[str, ...] = ..., strict: bool = True) -> None: ... + def guess_extension(self, type: str, strict: bool = True) -> str | None: ... + if sys.version_info >= (3, 8): + def guess_type(self, url: StrPath, strict: bool = True) -> tuple[str | None, str | None]: ... + else: + def guess_type(self, url: str, strict: bool = True) -> tuple[str | None, str | None]: ... + + def guess_all_extensions(self, type: str, strict: bool = True) -> list[str]: ... + def read(self, filename: str, strict: bool = True) -> None: ... + def readfp(self, fp: IO[str], strict: bool = True) -> None: ... + if sys.platform == "win32": + def read_windows_registry(self, strict: bool = True) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/mmap.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/mmap.pyi new file mode 100644 index 00000000..c74ad3cd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/mmap.pyi @@ -0,0 +1,113 @@ +import sys +from _typeshed import ReadableBuffer, Unused +from collections.abc import Iterable, Iterator, Sized +from typing import NoReturn, overload +from typing_extensions import Self + +ACCESS_DEFAULT: int +ACCESS_READ: int +ACCESS_WRITE: int +ACCESS_COPY: int + +ALLOCATIONGRANULARITY: int + +if sys.platform == "linux": + MAP_DENYWRITE: int + MAP_EXECUTABLE: int + if sys.version_info >= (3, 10): + MAP_POPULATE: int + +if sys.platform != "win32": + MAP_ANON: int + MAP_ANONYMOUS: int + MAP_PRIVATE: int + MAP_SHARED: int + PROT_EXEC: int + PROT_READ: int + PROT_WRITE: int + + PAGESIZE: int + +class mmap(Iterable[int], Sized): + if sys.platform == "win32": + def __init__(self, fileno: int, length: int, tagname: str | None = ..., access: int = ..., offset: int = ...) -> None: ... + else: + def __init__( + self, fileno: int, length: int, flags: int = ..., prot: int = ..., access: int = ..., offset: int = ... + ) -> None: ... + + def close(self) -> None: ... + if sys.version_info >= (3, 8): + def flush(self, offset: int = ..., size: int = ...) -> None: ... + else: + def flush(self, offset: int = ..., size: int = ...) -> int: ... + + def move(self, dest: int, src: int, count: int) -> None: ... + def read_byte(self) -> int: ... + def readline(self) -> bytes: ... + def resize(self, newsize: int) -> None: ... + def seek(self, pos: int, whence: int = ...) -> None: ... + def size(self) -> int: ... + def tell(self) -> int: ... + def write_byte(self, byte: int) -> None: ... + def __len__(self) -> int: ... + closed: bool + if sys.version_info >= (3, 8) and sys.platform != "win32": + def madvise(self, option: int, start: int = ..., length: int = ...) -> None: ... + + def find(self, sub: ReadableBuffer, start: int = ..., stop: int = ...) -> int: ... + def rfind(self, sub: ReadableBuffer, start: int = ..., stop: int = ...) -> int: ... + def read(self, n: int | None = ...) -> bytes: ... + def write(self, bytes: ReadableBuffer) -> int: ... + @overload + def __getitem__(self, __index: int) -> int: ... + @overload + def __getitem__(self, __index: slice) -> bytes: ... + def __delitem__(self, __index: int | slice) -> NoReturn: ... + @overload + def __setitem__(self, __index: int, __object: int) -> None: ... + @overload + def __setitem__(self, __index: slice, __object: ReadableBuffer) -> None: ... + # Doesn't actually exist, but the object actually supports "in" because it has __getitem__, + # so we claim that there is also a __contains__ to help type checkers. + def __contains__(self, __o: object) -> bool: ... + # Doesn't actually exist, but the object is actually iterable because it has __getitem__ and __len__, + # so we claim that there is also an __iter__ to help type checkers. + def __iter__(self) -> Iterator[int]: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + +if sys.version_info >= (3, 8) and sys.platform != "win32": + MADV_NORMAL: int + MADV_RANDOM: int + MADV_SEQUENTIAL: int + MADV_WILLNEED: int + MADV_DONTNEED: int + MADV_FREE: int + + if sys.platform == "linux": + MADV_REMOVE: int + MADV_DONTFORK: int + MADV_DOFORK: int + MADV_HWPOISON: int + MADV_MERGEABLE: int + MADV_UNMERGEABLE: int + # Seems like this constant is not defined in glibc. + # See https://github.com/python/typeshed/pull/5360 for details + # MADV_SOFT_OFFLINE: int + MADV_HUGEPAGE: int + MADV_NOHUGEPAGE: int + MADV_DONTDUMP: int + MADV_DODUMP: int + + # This Values are defined for FreeBSD but type checkers do not support conditions for these + if sys.platform != "linux" and sys.platform != "darwin": + MADV_NOSYNC: int + MADV_AUTOSYNC: int + MADV_NOCORE: int + MADV_CORE: int + MADV_PROTECT: int + +if sys.version_info >= (3, 10) and sys.platform == "darwin": + MADV_FREE_REUSABLE: int + MADV_FREE_REUSE: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/modulefinder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/modulefinder.pyi new file mode 100644 index 00000000..6f191764 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/modulefinder.pyi @@ -0,0 +1,76 @@ +import sys +from collections.abc import Container, Iterable, Iterator, Sequence +from types import CodeType +from typing import IO, Any + +if sys.version_info < (3, 11): + LOAD_CONST: int # undocumented + IMPORT_NAME: int # undocumented + STORE_NAME: int # undocumented + STORE_GLOBAL: int # undocumented + STORE_OPS: tuple[int, int] # undocumented + EXTENDED_ARG: int # undocumented + +packagePathMap: dict[str, list[str]] # undocumented + +def AddPackagePath(packagename: str, path: str) -> None: ... + +replacePackageMap: dict[str, str] # undocumented + +def ReplacePackage(oldname: str, newname: str) -> None: ... + +class Module: # undocumented + def __init__(self, name: str, file: str | None = None, path: str | None = None) -> None: ... + +class ModuleFinder: + modules: dict[str, Module] + path: list[str] # undocumented + badmodules: dict[str, dict[str, int]] # undocumented + debug: int # undocumented + indent: int # undocumented + excludes: Container[str] # undocumented + replace_paths: Sequence[tuple[str, str]] # undocumented + + if sys.version_info >= (3, 8): + def __init__( + self, + path: list[str] | None = None, + debug: int = 0, + excludes: Container[str] | None = None, + replace_paths: Sequence[tuple[str, str]] | None = None, + ) -> None: ... + else: + def __init__( + self, + path: list[str] | None = None, + debug: int = 0, + excludes: Container[str] = ..., + replace_paths: Sequence[tuple[str, str]] = ..., + ) -> None: ... + + def msg(self, level: int, str: str, *args: Any) -> None: ... # undocumented + def msgin(self, *args: Any) -> None: ... # undocumented + def msgout(self, *args: Any) -> None: ... # undocumented + def run_script(self, pathname: str) -> None: ... + def load_file(self, pathname: str) -> None: ... # undocumented + def import_hook( + self, name: str, caller: Module | None = None, fromlist: list[str] | None = None, level: int = -1 + ) -> Module | None: ... # undocumented + def determine_parent(self, caller: Module | None, level: int = -1) -> Module | None: ... # undocumented + def find_head_package(self, parent: Module, name: str) -> tuple[Module, str]: ... # undocumented + def load_tail(self, q: Module, tail: str) -> Module: ... # undocumented + def ensure_fromlist(self, m: Module, fromlist: Iterable[str], recursive: int = 0) -> None: ... # undocumented + def find_all_submodules(self, m: Module) -> Iterable[str]: ... # undocumented + def import_module(self, partname: str, fqname: str, parent: Module) -> Module | None: ... # undocumented + def load_module(self, fqname: str, fp: IO[str], pathname: str, file_info: tuple[str, str, str]) -> Module: ... # undocumented + def scan_opcodes(self, co: CodeType) -> Iterator[tuple[str, tuple[Any, ...]]]: ... # undocumented + def scan_code(self, co: CodeType, m: Module) -> None: ... # undocumented + def load_package(self, fqname: str, pathname: str) -> Module: ... # undocumented + def add_module(self, fqname: str) -> Module: ... # undocumented + def find_module( + self, name: str, path: str | None, parent: Module | None = None + ) -> tuple[IO[Any] | None, str | None, tuple[str, str, int]]: ... # undocumented + def report(self) -> None: ... + def any_missing(self) -> list[str]: ... # undocumented + def any_missing_maybe(self) -> tuple[list[str], list[str]]: ... # undocumented + def replace_paths_in_code(self, co: CodeType) -> CodeType: ... # undocumented diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/msilib/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/msilib/__init__.pyi new file mode 100644 index 00000000..9f7367d1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/msilib/__init__.pyi @@ -0,0 +1,177 @@ +import sys +from collections.abc import Container, Iterable, Sequence +from types import ModuleType +from typing import Any +from typing_extensions import Literal + +if sys.platform == "win32": + from _msi import * + from _msi import _Database + + AMD64: bool + Win64: bool + + datasizemask: Literal[0x00FF] + type_valid: Literal[0x0100] + type_localizable: Literal[0x0200] + typemask: Literal[0x0C00] + type_long: Literal[0x0000] + type_short: Literal[0x0400] + type_string: Literal[0x0C00] + type_binary: Literal[0x0800] + type_nullable: Literal[0x1000] + type_key: Literal[0x2000] + knownbits: Literal[0x3FFF] + + class Table: + name: str + fields: list[tuple[int, str, int]] + def __init__(self, name: str) -> None: ... + def add_field(self, index: int, name: str, type: int) -> None: ... + def sql(self) -> str: ... + def create(self, db: _Database) -> None: ... + + class _Unspecified: ... + + def change_sequence( + seq: Sequence[tuple[str, str | None, int]], + action: str, + seqno: int | type[_Unspecified] = ..., + cond: str | type[_Unspecified] = ..., + ) -> None: ... + def add_data(db: _Database, table: str, values: Iterable[tuple[Any, ...]]) -> None: ... + def add_stream(db: _Database, name: str, path: str) -> None: ... + def init_database( + name: str, schema: ModuleType, ProductName: str, ProductCode: str, ProductVersion: str, Manufacturer: str + ) -> _Database: ... + def add_tables(db: _Database, module: ModuleType) -> None: ... + def make_id(str: str) -> str: ... + def gen_uuid() -> str: ... + + class CAB: + name: str + files: list[tuple[str, str]] + filenames: set[str] + index: int + def __init__(self, name: str) -> None: ... + def gen_id(self, file: str) -> str: ... + def append(self, full: str, file: str, logical: str) -> tuple[int, str]: ... + def commit(self, db: _Database) -> None: ... + _directories: set[str] + + class Directory: + db: _Database + cab: CAB + basedir: str + physical: str + logical: str + component: str | None + short_names: set[str] + ids: set[str] + keyfiles: dict[str, str] + componentflags: int | None + absolute: str + def __init__( + self, + db: _Database, + cab: CAB, + basedir: str, + physical: str, + _logical: str, + default: str, + componentflags: int | None = None, + ) -> None: ... + def start_component( + self, + component: str | None = None, + feature: Feature | None = None, + flags: int | None = None, + keyfile: str | None = None, + uuid: str | None = None, + ) -> None: ... + def make_short(self, file: str) -> str: ... + def add_file(self, file: str, src: str | None = None, version: str | None = None, language: str | None = None) -> str: ... + def glob(self, pattern: str, exclude: Container[str] | None = None) -> list[str]: ... + def remove_pyc(self) -> None: ... + + class Binary: + name: str + def __init__(self, fname: str) -> None: ... + + class Feature: + id: str + def __init__( + self, + db: _Database, + id: str, + title: str, + desc: str, + display: int, + level: int = 1, + parent: Feature | None = None, + directory: str | None = None, + attributes: int = 0, + ) -> None: ... + def set_current(self) -> None: ... + + class Control: + dlg: Dialog + name: str + def __init__(self, dlg: Dialog, name: str) -> None: ... + def event(self, event: str, argument: str, condition: str = "1", ordering: int | None = None) -> None: ... + def mapping(self, event: str, attribute: str) -> None: ... + def condition(self, action: str, condition: str) -> None: ... + + class RadioButtonGroup(Control): + property: str + index: int + def __init__(self, dlg: Dialog, name: str, property: str) -> None: ... + def add(self, name: str, x: int, y: int, w: int, h: int, text: str, value: str | None = None) -> None: ... + + class Dialog: + db: _Database + name: str + x: int + y: int + w: int + h: int + def __init__( + self, + db: _Database, + name: str, + x: int, + y: int, + w: int, + h: int, + attr: int, + title: str, + first: str, + default: str, + cancel: str, + ) -> None: ... + def control( + self, + name: str, + type: str, + x: int, + y: int, + w: int, + h: int, + attr: int, + prop: str | None, + text: str | None, + next: str | None, + help: str | None, + ) -> Control: ... + def text(self, name: str, x: int, y: int, w: int, h: int, attr: int, text: str | None) -> Control: ... + def bitmap(self, name: str, x: int, y: int, w: int, h: int, text: str | None) -> Control: ... + def line(self, name: str, x: int, y: int, w: int, h: int) -> Control: ... + def pushbutton( + self, name: str, x: int, y: int, w: int, h: int, attr: int, text: str | None, next: str | None + ) -> Control: ... + def radiogroup( + self, name: str, x: int, y: int, w: int, h: int, attr: int, prop: str | None, text: str | None, next: str | None + ) -> RadioButtonGroup: ... + def checkbox( + self, name: str, x: int, y: int, w: int, h: int, attr: int, prop: str | None, text: str | None, next: str | None + ) -> Control: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/msilib/schema.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/msilib/schema.pyi new file mode 100644 index 00000000..4ad9a178 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/msilib/schema.pyi @@ -0,0 +1,94 @@ +import sys + +if sys.platform == "win32": + from . import Table + + _Validation: Table + ActionText: Table + AdminExecuteSequence: Table + Condition: Table + AdminUISequence: Table + AdvtExecuteSequence: Table + AdvtUISequence: Table + AppId: Table + AppSearch: Table + Property: Table + BBControl: Table + Billboard: Table + Feature: Table + Binary: Table + BindImage: Table + File: Table + CCPSearch: Table + CheckBox: Table + Class: Table + Component: Table + Icon: Table + ProgId: Table + ComboBox: Table + CompLocator: Table + Complus: Table + Directory: Table + Control: Table + Dialog: Table + ControlCondition: Table + ControlEvent: Table + CreateFolder: Table + CustomAction: Table + DrLocator: Table + DuplicateFile: Table + Environment: Table + Error: Table + EventMapping: Table + Extension: Table + MIME: Table + FeatureComponents: Table + FileSFPCatalog: Table + SFPCatalog: Table + Font: Table + IniFile: Table + IniLocator: Table + InstallExecuteSequence: Table + InstallUISequence: Table + IsolatedComponent: Table + LaunchCondition: Table + ListBox: Table + ListView: Table + LockPermissions: Table + Media: Table + MoveFile: Table + MsiAssembly: Table + MsiAssemblyName: Table + MsiDigitalCertificate: Table + MsiDigitalSignature: Table + MsiFileHash: Table + MsiPatchHeaders: Table + ODBCAttribute: Table + ODBCDriver: Table + ODBCDataSource: Table + ODBCSourceAttribute: Table + ODBCTranslator: Table + Patch: Table + PatchPackage: Table + PublishComponent: Table + RadioButton: Table + Registry: Table + RegLocator: Table + RemoveFile: Table + RemoveIniFile: Table + RemoveRegistry: Table + ReserveCost: Table + SelfReg: Table + ServiceControl: Table + ServiceInstall: Table + Shortcut: Table + Signature: Table + TextStyle: Table + TypeLib: Table + UIText: Table + Upgrade: Table + Verb: Table + + tables: list[Table] + + _Validation_records: list[tuple[str, str, str, int | None, int | None, str | None, int | None, str | None, str | None, str]] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/msilib/sequence.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/msilib/sequence.pyi new file mode 100644 index 00000000..b8af09f4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/msilib/sequence.pyi @@ -0,0 +1,13 @@ +import sys +from typing_extensions import TypeAlias + +if sys.platform == "win32": + _SequenceType: TypeAlias = list[tuple[str, str | None, int]] + + AdminExecuteSequence: _SequenceType + AdminUISequence: _SequenceType + AdvtExecuteSequence: _SequenceType + InstallExecuteSequence: _SequenceType + InstallUISequence: _SequenceType + + tables: list[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/msilib/text.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/msilib/text.pyi new file mode 100644 index 00000000..1353cf8a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/msilib/text.pyi @@ -0,0 +1,7 @@ +import sys + +if sys.platform == "win32": + ActionText: list[tuple[str, str, str | None]] + UIText: list[tuple[str, str | None]] + + tables: list[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/msvcrt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/msvcrt.pyi new file mode 100644 index 00000000..5849b9b0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/msvcrt.pyi @@ -0,0 +1,28 @@ +import sys +from typing_extensions import Literal + +# This module is only available on Windows +if sys.platform == "win32": + LK_UNLCK: Literal[0] + LK_LOCK: Literal[1] + LK_NBLCK: Literal[2] + LK_RLCK: Literal[3] + LK_NBRLCK: Literal[4] + SEM_FAILCRITICALERRORS: int + SEM_NOALIGNMENTFAULTEXCEPT: int + SEM_NOGPFAULTERRORBOX: int + SEM_NOOPENFILEERRORBOX: int + def locking(__fd: int, __mode: int, __nbytes: int) -> None: ... + def setmode(__fd: int, __mode: int) -> int: ... + def open_osfhandle(__handle: int, __flags: int) -> int: ... + def get_osfhandle(__fd: int) -> int: ... + def kbhit() -> bool: ... + def getch() -> bytes: ... + def getwch() -> str: ... + def getche() -> bytes: ... + def getwche() -> str: ... + def putch(__char: bytes | bytearray) -> None: ... + def putwch(__unicode_char: str) -> None: ... + def ungetch(__char: bytes | bytearray) -> None: ... + def ungetwch(__unicode_char: str) -> None: ... + def heapmin() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/__init__.pyi new file mode 100644 index 00000000..186bd54a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/__init__.pyi @@ -0,0 +1,92 @@ +import sys +from multiprocessing import context, reduction as reducer +from multiprocessing.context import ( + AuthenticationError as AuthenticationError, + BufferTooShort as BufferTooShort, + Process as Process, + ProcessError as ProcessError, + TimeoutError as TimeoutError, +) +from multiprocessing.process import active_children as active_children, current_process as current_process + +# These are technically functions that return instances of these Queue classes. +# The stub here doesn't reflect reality exactly -- +# while e.g. `multiprocessing.queues.Queue` is a class, +# `multiprocessing.Queue` is actually a function at runtime. +# Avoid using `multiprocessing.Queue` as a type annotation; +# use imports from multiprocessing.queues instead. +# See #4266 and #8450 for discussion. +from multiprocessing.queues import JoinableQueue as JoinableQueue, Queue as Queue, SimpleQueue as SimpleQueue +from multiprocessing.spawn import freeze_support as freeze_support + +if sys.version_info >= (3, 8): + from multiprocessing.process import parent_process as parent_process + +__all__ = [ + "Array", + "AuthenticationError", + "Barrier", + "BoundedSemaphore", + "BufferTooShort", + "Condition", + "Event", + "JoinableQueue", + "Lock", + "Manager", + "Pipe", + "Pool", + "Process", + "ProcessError", + "Queue", + "RLock", + "RawArray", + "RawValue", + "Semaphore", + "SimpleQueue", + "TimeoutError", + "Value", + "active_children", + "allow_connection_pickling", + "cpu_count", + "current_process", + "freeze_support", + "get_all_start_methods", + "get_context", + "get_logger", + "get_start_method", + "log_to_stderr", + "reducer", + "set_executable", + "set_forkserver_preload", + "set_start_method", +] + +if sys.version_info >= (3, 8): + __all__ += ["parent_process"] + +# These functions (really bound methods) +# are all autogenerated at runtime here: https://github.com/python/cpython/blob/600c65c094b0b48704d8ec2416930648052ba715/Lib/multiprocessing/__init__.py#L23 +RawValue = context._default_context.RawValue +RawArray = context._default_context.RawArray +Value = context._default_context.Value +Array = context._default_context.Array +Barrier = context._default_context.Barrier +BoundedSemaphore = context._default_context.BoundedSemaphore +Condition = context._default_context.Condition +Event = context._default_context.Event +Lock = context._default_context.Lock +RLock = context._default_context.RLock +Semaphore = context._default_context.Semaphore +Pipe = context._default_context.Pipe +Pool = context._default_context.Pool +allow_connection_pickling = context._default_context.allow_connection_pickling +cpu_count = context._default_context.cpu_count +get_logger = context._default_context.get_logger +log_to_stderr = context._default_context.log_to_stderr +Manager = context._default_context.Manager +set_executable = context._default_context.set_executable +set_forkserver_preload = context._default_context.set_forkserver_preload +get_all_start_methods = context._default_context.get_all_start_methods +get_start_method = context._default_context.get_start_method +set_start_method = context._default_context.set_start_method +get_context = context._default_context.get_context diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/connection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/connection.pyi new file mode 100644 index 00000000..d0343737 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/connection.pyi @@ -0,0 +1,69 @@ +import socket +import sys +import types +from _typeshed import ReadableBuffer +from collections.abc import Iterable +from typing import Any +from typing_extensions import Self, SupportsIndex, TypeAlias + +__all__ = ["Client", "Listener", "Pipe", "wait"] + +# https://docs.python.org/3/library/multiprocessing.html#address-formats +_Address: TypeAlias = str | tuple[str, int] + +class _ConnectionBase: + def __init__(self, handle: SupportsIndex, readable: bool = True, writable: bool = True) -> None: ... + @property + def closed(self) -> bool: ... # undocumented + @property + def readable(self) -> bool: ... # undocumented + @property + def writable(self) -> bool: ... # undocumented + def fileno(self) -> int: ... + def close(self) -> None: ... + def send_bytes(self, buf: ReadableBuffer, offset: int = 0, size: int | None = None) -> None: ... + def send(self, obj: Any) -> None: ... + def recv_bytes(self, maxlength: int | None = None) -> bytes: ... + def recv_bytes_into(self, buf: Any, offset: int = 0) -> int: ... + def recv(self) -> Any: ... + def poll(self, timeout: float | None = 0.0) -> bool: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: types.TracebackType | None + ) -> None: ... + +class Connection(_ConnectionBase): ... + +if sys.platform == "win32": + class PipeConnection(_ConnectionBase): ... + +class Listener: + def __init__( + self, address: _Address | None = None, family: str | None = None, backlog: int = 1, authkey: bytes | None = None + ) -> None: ... + def accept(self) -> Connection: ... + def close(self) -> None: ... + @property + def address(self) -> _Address: ... + @property + def last_accepted(self) -> _Address | None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: types.TracebackType | None + ) -> None: ... + +def deliver_challenge(connection: Connection, authkey: bytes) -> None: ... +def answer_challenge(connection: Connection, authkey: bytes) -> None: ... +def wait( + object_list: Iterable[Connection | socket.socket | int], timeout: float | None = None +) -> list[Connection | socket.socket | int]: ... +def Client(address: _Address, family: str | None = None, authkey: bytes | None = None) -> Connection: ... + +# N.B. Keep this in sync with multiprocessing.context.BaseContext.Pipe. +# _ConnectionBase is the common base class of Connection and PipeConnection +# and can be used in cross-platform code. +if sys.platform != "win32": + def Pipe(duplex: bool = True) -> tuple[Connection, Connection]: ... + +else: + def Pipe(duplex: bool = True) -> tuple[PipeConnection, PipeConnection]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/context.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/context.pyi new file mode 100644 index 00000000..c498649a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/context.pyi @@ -0,0 +1,194 @@ +import ctypes +import sys +from collections.abc import Callable, Iterable, Sequence +from ctypes import _CData +from logging import Logger, _Level as _LoggingLevel +from multiprocessing import popen_fork, popen_forkserver, popen_spawn_posix, popen_spawn_win32, queues, synchronize +from multiprocessing.managers import SyncManager +from multiprocessing.pool import Pool as _Pool +from multiprocessing.process import BaseProcess +from multiprocessing.sharedctypes import SynchronizedArray, SynchronizedBase +from typing import Any, ClassVar, TypeVar, overload +from typing_extensions import Literal, TypeAlias + +if sys.platform != "win32": + from multiprocessing.connection import Connection +else: + from multiprocessing.connection import PipeConnection + +if sys.version_info >= (3, 8): + __all__ = () +else: + __all__: list[str] = [] + +_LockLike: TypeAlias = synchronize.Lock | synchronize.RLock +_CT = TypeVar("_CT", bound=_CData) + +class ProcessError(Exception): ... +class BufferTooShort(ProcessError): ... +class TimeoutError(ProcessError): ... +class AuthenticationError(ProcessError): ... + +class BaseContext: + ProcessError: ClassVar[type[ProcessError]] + BufferTooShort: ClassVar[type[BufferTooShort]] + TimeoutError: ClassVar[type[TimeoutError]] + AuthenticationError: ClassVar[type[AuthenticationError]] + + # N.B. The methods below are applied at runtime to generate + # multiprocessing.*, so the signatures should be identical (modulo self). + @staticmethod + def current_process() -> BaseProcess: ... + if sys.version_info >= (3, 8): + @staticmethod + def parent_process() -> BaseProcess | None: ... + + @staticmethod + def active_children() -> list[BaseProcess]: ... + def cpu_count(self) -> int: ... + def Manager(self) -> SyncManager: ... + + # N.B. Keep this in sync with multiprocessing.connection.Pipe. + # _ConnectionBase is the common base class of Connection and PipeConnection + # and can be used in cross-platform code. + if sys.platform != "win32": + def Pipe(self, duplex: bool = True) -> tuple[Connection, Connection]: ... + else: + def Pipe(self, duplex: bool = True) -> tuple[PipeConnection, PipeConnection]: ... + + def Barrier( + self, parties: int, action: Callable[..., object] | None = None, timeout: float | None = None + ) -> synchronize.Barrier: ... + def BoundedSemaphore(self, value: int = 1) -> synchronize.BoundedSemaphore: ... + def Condition(self, lock: _LockLike | None = None) -> synchronize.Condition: ... + def Event(self) -> synchronize.Event: ... + def Lock(self) -> synchronize.Lock: ... + def RLock(self) -> synchronize.RLock: ... + def Semaphore(self, value: int = 1) -> synchronize.Semaphore: ... + def Queue(self, maxsize: int = 0) -> queues.Queue[Any]: ... + def JoinableQueue(self, maxsize: int = 0) -> queues.JoinableQueue[Any]: ... + def SimpleQueue(self) -> queues.SimpleQueue[Any]: ... + def Pool( + self, + processes: int | None = None, + initializer: Callable[..., object] | None = None, + initargs: Iterable[Any] = ..., + maxtasksperchild: int | None = None, + ) -> _Pool: ... + @overload + def RawValue(self, typecode_or_type: type[_CT], *args: Any) -> _CT: ... + @overload + def RawValue(self, typecode_or_type: str, *args: Any) -> Any: ... + @overload + def RawArray(self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any]) -> ctypes.Array[_CT]: ... + @overload + def RawArray(self, typecode_or_type: str, size_or_initializer: int | Sequence[Any]) -> Any: ... + @overload + def Value(self, typecode_or_type: type[_CT], *args: Any, lock: Literal[False]) -> _CT: ... + @overload + def Value(self, typecode_or_type: type[_CT], *args: Any, lock: Literal[True] | _LockLike = True) -> SynchronizedBase[_CT]: ... + @overload + def Value(self, typecode_or_type: str, *args: Any, lock: Literal[True] | _LockLike = True) -> SynchronizedBase[Any]: ... + @overload + def Value(self, typecode_or_type: str | type[_CData], *args: Any, lock: bool | _LockLike = True) -> Any: ... + @overload + def Array(self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[False]) -> _CT: ... + @overload + def Array( + self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True + ) -> SynchronizedArray[_CT]: ... + @overload + def Array( + self, typecode_or_type: str, size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True + ) -> SynchronizedArray[Any]: ... + @overload + def Array( + self, typecode_or_type: str | type[_CData], size_or_initializer: int | Sequence[Any], *, lock: bool | _LockLike = True + ) -> Any: ... + def freeze_support(self) -> None: ... + def get_logger(self) -> Logger: ... + def log_to_stderr(self, level: _LoggingLevel | None = None) -> Logger: ... + def allow_connection_pickling(self) -> None: ... + def set_executable(self, executable: str) -> None: ... + def set_forkserver_preload(self, module_names: list[str]) -> None: ... + if sys.platform != "win32": + @overload + def get_context(self, method: None = None) -> DefaultContext: ... + @overload + def get_context(self, method: Literal["spawn"]) -> SpawnContext: ... + @overload + def get_context(self, method: Literal["fork"]) -> ForkContext: ... + @overload + def get_context(self, method: Literal["forkserver"]) -> ForkServerContext: ... + @overload + def get_context(self, method: str) -> BaseContext: ... + else: + @overload + def get_context(self, method: None = None) -> DefaultContext: ... + @overload + def get_context(self, method: Literal["spawn"]) -> SpawnContext: ... + @overload + def get_context(self, method: str) -> BaseContext: ... + + @overload + def get_start_method(self, allow_none: Literal[False] = False) -> str: ... + @overload + def get_start_method(self, allow_none: bool) -> str | None: ... + def set_start_method(self, method: str | None, force: bool = False) -> None: ... + @property + def reducer(self) -> str: ... + @reducer.setter + def reducer(self, reduction: str) -> None: ... + def _check_available(self) -> None: ... + +class Process(BaseProcess): + _start_method: str | None + @staticmethod + def _Popen(process_obj: BaseProcess) -> DefaultContext: ... + +class DefaultContext(BaseContext): + Process: ClassVar[type[Process]] + def __init__(self, context: BaseContext) -> None: ... + def get_start_method(self, allow_none: bool = False) -> str: ... + def get_all_start_methods(self) -> list[str]: ... + if sys.version_info < (3, 8): + __all__: ClassVar[list[str]] + +_default_context: DefaultContext + +class SpawnProcess(BaseProcess): + _start_method: str + if sys.platform != "win32": + @staticmethod + def _Popen(process_obj: BaseProcess) -> popen_spawn_posix.Popen: ... + else: + @staticmethod + def _Popen(process_obj: BaseProcess) -> popen_spawn_win32.Popen: ... + +class SpawnContext(BaseContext): + _name: str + Process: ClassVar[type[SpawnProcess]] + +if sys.platform != "win32": + class ForkProcess(BaseProcess): + _start_method: str + @staticmethod + def _Popen(process_obj: BaseProcess) -> popen_fork.Popen: ... + + class ForkServerProcess(BaseProcess): + _start_method: str + @staticmethod + def _Popen(process_obj: BaseProcess) -> popen_forkserver.Popen: ... + + class ForkContext(BaseContext): + _name: str + Process: ClassVar[type[ForkProcess]] + + class ForkServerContext(BaseContext): + _name: str + Process: ClassVar[type[ForkServerProcess]] + +def _force_start_method(method: str) -> None: ... +def get_spawning_popen() -> Any | None: ... +def set_spawning_popen(popen: Any) -> None: ... +def assert_spawning(obj: Any) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/dummy/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/dummy/__init__.pyi new file mode 100644 index 00000000..5b2a3377 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/dummy/__init__.pyi @@ -0,0 +1,80 @@ +import array +import threading +import weakref +from collections.abc import Callable, Iterable, Mapping, Sequence +from queue import Queue as Queue +from threading import ( + Barrier as Barrier, + BoundedSemaphore as BoundedSemaphore, + Condition as Condition, + Event as Event, + Lock as Lock, + RLock as RLock, + Semaphore as Semaphore, +) +from typing import Any +from typing_extensions import Literal + +from .connection import Pipe as Pipe + +__all__ = [ + "Process", + "current_process", + "active_children", + "freeze_support", + "Lock", + "RLock", + "Semaphore", + "BoundedSemaphore", + "Condition", + "Event", + "Barrier", + "Queue", + "Manager", + "Pipe", + "Pool", + "JoinableQueue", +] + +JoinableQueue = Queue + +class DummyProcess(threading.Thread): + _children: weakref.WeakKeyDictionary[Any, Any] + _parent: threading.Thread + _pid: None + _start_called: int + @property + def exitcode(self) -> Literal[0] | None: ... + def __init__( + self, + group: Any = None, + target: Callable[..., object] | None = None, + name: str | None = None, + args: Iterable[Any] = ..., + kwargs: Mapping[str, Any] = ..., + ) -> None: ... + +Process = DummyProcess + +class Namespace: + def __init__(self, **kwds: Any) -> None: ... + def __getattr__(self, __name: str) -> Any: ... + def __setattr__(self, __name: str, __value: Any) -> None: ... + +class Value: + _typecode: Any + _value: Any + value: Any + def __init__(self, typecode: Any, value: Any, lock: Any = True) -> None: ... + +def Array(typecode: Any, sequence: Sequence[Any], lock: Any = True) -> array.array[Any]: ... +def Manager() -> Any: ... +def Pool( + processes: int | None = None, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ... +) -> Any: ... +def active_children() -> list[Any]: ... + +current_process = threading.current_thread + +def freeze_support() -> None: ... +def shutdown() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/dummy/connection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/dummy/connection.pyi new file mode 100644 index 00000000..d7e98212 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/dummy/connection.pyi @@ -0,0 +1,39 @@ +from multiprocessing.connection import _Address +from queue import Queue +from types import TracebackType +from typing import Any +from typing_extensions import Self + +__all__ = ["Client", "Listener", "Pipe"] + +families: list[None] + +class Connection: + _in: Any + _out: Any + recv: Any + recv_bytes: Any + send: Any + send_bytes: Any + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def __init__(self, _in: Any, _out: Any) -> None: ... + def close(self) -> None: ... + def poll(self, timeout: float = 0.0) -> bool: ... + +class Listener: + _backlog_queue: Queue[Any] | None + @property + def address(self) -> Queue[Any] | None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def __init__(self, address: _Address | None = None, family: int | None = None, backlog: int = 1) -> None: ... + def accept(self) -> Connection: ... + def close(self) -> None: ... + +def Client(address: _Address) -> Connection: ... +def Pipe(duplex: bool = True) -> tuple[Connection, Connection]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/forkserver.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/forkserver.pyi new file mode 100644 index 00000000..df435f00 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/forkserver.pyi @@ -0,0 +1,31 @@ +from _typeshed import FileDescriptorLike, Unused +from collections.abc import Sequence +from struct import Struct +from typing import Any + +__all__ = ["ensure_running", "get_inherited_fds", "connect_to_new_process", "set_forkserver_preload"] + +MAXFDS_TO_SEND: int +SIGNED_STRUCT: Struct + +class ForkServer: + def set_forkserver_preload(self, modules_names: list[str]) -> None: ... + def get_inherited_fds(self) -> list[int] | None: ... + def connect_to_new_process(self, fds: Sequence[int]) -> tuple[int, int]: ... + def ensure_running(self) -> None: ... + +def main( + listener_fd: int | None, + alive_r: FileDescriptorLike, + preload: Sequence[str], + main_path: str | None = None, + sys_path: Unused = None, +) -> None: ... +def read_signed(fd: int) -> Any: ... +def write_signed(fd: int, n: int) -> None: ... + +_forkserver: ForkServer = ... +ensure_running = _forkserver.ensure_running +get_inherited_fds = _forkserver.get_inherited_fds +connect_to_new_process = _forkserver.connect_to_new_process +set_forkserver_preload = _forkserver.set_forkserver_preload diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/heap.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/heap.pyi new file mode 100644 index 00000000..b5e2ced5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/heap.pyi @@ -0,0 +1,36 @@ +import sys +from _typeshed import Incomplete +from collections.abc import Callable +from mmap import mmap +from typing import Protocol +from typing_extensions import TypeAlias + +__all__ = ["BufferWrapper"] + +class Arena: + size: int + buffer: mmap + if sys.platform == "win32": + name: str + def __init__(self, size: int) -> None: ... + else: + fd: int + def __init__(self, size: int, fd: int = -1) -> None: ... + +_Block: TypeAlias = tuple[Arena, int, int] + +if sys.platform != "win32": + class _SupportsDetach(Protocol): + def detach(self) -> int: ... + + def reduce_arena(a: Arena) -> tuple[Callable[[int, _SupportsDetach], Arena], tuple[int, Incomplete]]: ... + def rebuild_arena(size: int, dupfd: _SupportsDetach) -> Arena: ... + +class Heap: + def __init__(self, size: int = ...) -> None: ... + def free(self, block: _Block) -> None: ... + def malloc(self, size: int) -> _Block: ... + +class BufferWrapper: + def __init__(self, size: int) -> None: ... + def create_memoryview(self) -> memoryview: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/managers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/managers.pyi new file mode 100644 index 00000000..e035a187 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/managers.pyi @@ -0,0 +1,212 @@ +import queue +import sys +import threading +from _typeshed import SupportsKeysAndGetItem, SupportsRichComparison, SupportsRichComparisonT +from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping, MutableSequence, Sequence +from types import TracebackType +from typing import Any, AnyStr, ClassVar, Generic, TypeVar, overload +from typing_extensions import Self, SupportsIndex, TypeAlias + +from .connection import Connection +from .context import BaseContext + +if sys.version_info >= (3, 8): + from .shared_memory import _SLT, ShareableList as _ShareableList, SharedMemory as _SharedMemory + + __all__ = ["BaseManager", "SyncManager", "BaseProxy", "Token", "SharedMemoryManager"] + +else: + __all__ = ["BaseManager", "SyncManager", "BaseProxy", "Token"] + +if sys.version_info >= (3, 9): + from types import GenericAlias + +_T = TypeVar("_T") +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") + +class Namespace: + def __init__(self, **kwds: Any) -> None: ... + def __getattr__(self, __name: str) -> Any: ... + def __setattr__(self, __name: str, __value: Any) -> None: ... + +_Namespace: TypeAlias = Namespace + +class Token: + typeid: str | bytes | None + address: tuple[str | bytes, int] + id: str | bytes | int | None + def __init__(self, typeid: bytes | str | None, address: tuple[str | bytes, int], id: str | bytes | int | None) -> None: ... + def __getstate__(self) -> tuple[str | bytes | None, tuple[str | bytes, int], str | bytes | int | None]: ... + def __setstate__(self, state: tuple[str | bytes | None, tuple[str | bytes, int], str | bytes | int | None]) -> None: ... + +class BaseProxy: + _address_to_local: dict[Any, Any] + _mutex: Any + def __init__( + self, + token: Any, + serializer: str, + manager: Any = None, + authkey: AnyStr | None = None, + exposed: Any = None, + incref: bool = True, + manager_owned: bool = False, + ) -> None: ... + def __deepcopy__(self, memo: Any | None) -> Any: ... + def _callmethod(self, methodname: str, args: tuple[Any, ...] = ..., kwds: dict[Any, Any] = ...) -> None: ... + def _getvalue(self) -> Any: ... + def __reduce__(self) -> tuple[Any, tuple[Any, Any, str, dict[Any, Any]]]: ... + +class ValueProxy(BaseProxy, Generic[_T]): + def get(self) -> _T: ... + def set(self, value: _T) -> None: ... + value: _T + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +class DictProxy(BaseProxy, MutableMapping[_KT, _VT]): + __builtins__: ClassVar[dict[str, Any]] + def __len__(self) -> int: ... + def __getitem__(self, __key: _KT) -> _VT: ... + def __setitem__(self, __key: _KT, __value: _VT) -> None: ... + def __delitem__(self, __key: _KT) -> None: ... + def __iter__(self) -> Iterator[_KT]: ... + def copy(self) -> dict[_KT, _VT]: ... + @overload + def get(self, __key: _KT) -> _VT | None: ... + @overload + def get(self, __key: _KT, __default: _VT | _T) -> _VT | _T: ... + @overload + def pop(self, __key: _KT) -> _VT: ... + @overload + def pop(self, __key: _KT, __default: _VT | _T) -> _VT | _T: ... + def keys(self) -> list[_KT]: ... # type: ignore[override] + def items(self) -> list[tuple[_KT, _VT]]: ... # type: ignore[override] + def values(self) -> list[_VT]: ... # type: ignore[override] + +class BaseListProxy(BaseProxy, MutableSequence[_T]): + __builtins__: ClassVar[dict[str, Any]] + def __len__(self) -> int: ... + def __add__(self, __x: list[_T]) -> list[_T]: ... + def __delitem__(self, __i: SupportsIndex | slice) -> None: ... + @overload + def __getitem__(self, __i: SupportsIndex) -> _T: ... + @overload + def __getitem__(self, __s: slice) -> list[_T]: ... + @overload + def __setitem__(self, __i: SupportsIndex, __o: _T) -> None: ... + @overload + def __setitem__(self, __s: slice, __o: Iterable[_T]) -> None: ... + def __mul__(self, __n: SupportsIndex) -> list[_T]: ... + def __rmul__(self, __n: SupportsIndex) -> list[_T]: ... + def __reversed__(self) -> Iterator[_T]: ... + def append(self, __object: _T) -> None: ... + def extend(self, __iterable: Iterable[_T]) -> None: ... + def pop(self, __index: SupportsIndex = ...) -> _T: ... + def index(self, __value: _T, __start: SupportsIndex = ..., __stop: SupportsIndex = ...) -> int: ... + def count(self, __value: _T) -> int: ... + def insert(self, __index: SupportsIndex, __object: _T) -> None: ... + def remove(self, __value: _T) -> None: ... + # Use BaseListProxy[SupportsRichComparisonT] for the first overload rather than [SupportsRichComparison] + # to work around invariance + @overload + def sort(self: BaseListProxy[SupportsRichComparisonT], *, key: None = None, reverse: bool = ...) -> None: ... + @overload + def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = ...) -> None: ... + +class ListProxy(BaseListProxy[_T]): + def __iadd__(self, __x: Iterable[_T]) -> Self: ... # type: ignore[override] + def __imul__(self, __n: SupportsIndex) -> Self: ... # type: ignore[override] + +# Returned by BaseManager.get_server() +class Server: + address: Any + def __init__( + self, registry: dict[str, tuple[Callable[..., Any], Any, Any, Any]], address: Any, authkey: bytes, serializer: str + ) -> None: ... + def serve_forever(self) -> None: ... + def accept_connection(self, c: Connection, name: str) -> None: ... + +class BaseManager: + if sys.version_info >= (3, 11): + def __init__( + self, + address: Any | None = None, + authkey: bytes | None = None, + serializer: str = "pickle", + ctx: BaseContext | None = None, + *, + shutdown_timeout: float = 1.0, + ) -> None: ... + else: + def __init__( + self, + address: Any | None = None, + authkey: bytes | None = None, + serializer: str = "pickle", + ctx: BaseContext | None = None, + ) -> None: ... + + def get_server(self) -> Server: ... + def connect(self) -> None: ... + def start(self, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ...) -> None: ... + def shutdown(self) -> None: ... # only available after start() was called + def join(self, timeout: float | None = None) -> None: ... # undocumented + @property + def address(self) -> Any: ... + @classmethod + def register( + cls, + typeid: str, + callable: Callable[..., object] | None = None, + proxytype: Any = None, + exposed: Sequence[str] | None = None, + method_to_typeid: Mapping[str, str] | None = None, + create_method: bool = True, + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + +class SyncManager(BaseManager): + def BoundedSemaphore(self, value: Any = ...) -> threading.BoundedSemaphore: ... + def Condition(self, lock: Any = ...) -> threading.Condition: ... + def Event(self) -> threading.Event: ... + def Lock(self) -> threading.Lock: ... + def Namespace(self) -> _Namespace: ... + def Queue(self, maxsize: int = ...) -> queue.Queue[Any]: ... + def RLock(self) -> threading.RLock: ... + def Semaphore(self, value: Any = ...) -> threading.Semaphore: ... + def Array(self, typecode: Any, sequence: Sequence[_T]) -> Sequence[_T]: ... + def Value(self, typecode: Any, value: _T) -> ValueProxy[_T]: ... + # Overloads are copied from builtins.dict.__init__ + @overload + def dict(self) -> DictProxy[Any, Any]: ... + @overload + def dict(self, **kwargs: _VT) -> DictProxy[str, _VT]: ... + @overload + def dict(self, __map: SupportsKeysAndGetItem[_KT, _VT]) -> DictProxy[_KT, _VT]: ... + @overload + def dict(self, __map: SupportsKeysAndGetItem[str, _VT], **kwargs: _VT) -> DictProxy[str, _VT]: ... + @overload + def dict(self, __iterable: Iterable[tuple[_KT, _VT]]) -> DictProxy[_KT, _VT]: ... + @overload + def dict(self, __iterable: Iterable[tuple[str, _VT]], **kwargs: _VT) -> DictProxy[str, _VT]: ... + @overload + def dict(self, __iterable: Iterable[list[str]]) -> DictProxy[str, str]: ... + @overload + def list(self, __sequence: Sequence[_T]) -> ListProxy[_T]: ... + @overload + def list(self) -> ListProxy[Any]: ... + +class RemoteError(Exception): ... + +if sys.version_info >= (3, 8): + class SharedMemoryServer(Server): ... + + class SharedMemoryManager(BaseManager): + def get_server(self) -> SharedMemoryServer: ... + def SharedMemory(self, size: int) -> _SharedMemory: ... + def ShareableList(self, sequence: Iterable[_SLT] | None) -> _ShareableList[_SLT]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/pool.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/pool.pyi new file mode 100644 index 00000000..a19dd555 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/pool.pyi @@ -0,0 +1,130 @@ +import sys +from collections.abc import Callable, Iterable, Iterator, Mapping +from types import TracebackType +from typing import Any, Generic, TypeVar +from typing_extensions import Literal, Self + +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = ["Pool", "ThreadPool"] + +_S = TypeVar("_S") +_T = TypeVar("_T") + +class ApplyResult(Generic[_T]): + if sys.version_info >= (3, 8): + def __init__( + self, pool: Pool, callback: Callable[[_T], object] | None, error_callback: Callable[[BaseException], object] | None + ) -> None: ... + else: + def __init__( + self, + cache: dict[int, ApplyResult[Any]], + callback: Callable[[_T], object] | None, + error_callback: Callable[[BaseException], object] | None, + ) -> None: ... + + def get(self, timeout: float | None = None) -> _T: ... + def wait(self, timeout: float | None = None) -> None: ... + def ready(self) -> bool: ... + def successful(self) -> bool: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +# alias created during issue #17805 +AsyncResult = ApplyResult + +class MapResult(ApplyResult[list[_T]]): + if sys.version_info >= (3, 8): + def __init__( + self, + pool: Pool, + chunksize: int, + length: int, + callback: Callable[[list[_T]], object] | None, + error_callback: Callable[[BaseException], object] | None, + ) -> None: ... + else: + def __init__( + self, + cache: dict[int, ApplyResult[Any]], + chunksize: int, + length: int, + callback: Callable[[list[_T]], object] | None, + error_callback: Callable[[BaseException], object] | None, + ) -> None: ... + +class IMapIterator(Iterator[_T]): + if sys.version_info >= (3, 8): + def __init__(self, pool: Pool) -> None: ... + else: + def __init__(self, cache: dict[int, IMapIterator[Any]]) -> None: ... + + def __iter__(self) -> Self: ... + def next(self, timeout: float | None = None) -> _T: ... + def __next__(self, timeout: float | None = None) -> _T: ... + +class IMapUnorderedIterator(IMapIterator[_T]): ... + +class Pool: + def __init__( + self, + processes: int | None = None, + initializer: Callable[..., object] | None = None, + initargs: Iterable[Any] = ..., + maxtasksperchild: int | None = None, + context: Any | None = None, + ) -> None: ... + def apply(self, func: Callable[..., _T], args: Iterable[Any] = ..., kwds: Mapping[str, Any] = ...) -> _T: ... + def apply_async( + self, + func: Callable[..., _T], + args: Iterable[Any] = ..., + kwds: Mapping[str, Any] = ..., + callback: Callable[[_T], object] | None = None, + error_callback: Callable[[BaseException], object] | None = None, + ) -> AsyncResult[_T]: ... + def map(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = None) -> list[_T]: ... + def map_async( + self, + func: Callable[[_S], _T], + iterable: Iterable[_S], + chunksize: int | None = None, + callback: Callable[[_T], object] | None = None, + error_callback: Callable[[BaseException], object] | None = None, + ) -> MapResult[_T]: ... + def imap(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = 1) -> IMapIterator[_T]: ... + def imap_unordered(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = 1) -> IMapIterator[_T]: ... + def starmap(self, func: Callable[..., _T], iterable: Iterable[Iterable[Any]], chunksize: int | None = None) -> list[_T]: ... + def starmap_async( + self, + func: Callable[..., _T], + iterable: Iterable[Iterable[Any]], + chunksize: int | None = None, + callback: Callable[[_T], object] | None = None, + error_callback: Callable[[BaseException], object] | None = None, + ) -> AsyncResult[list[_T]]: ... + def close(self) -> None: ... + def terminate(self) -> None: ... + def join(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + +class ThreadPool(Pool): + def __init__( + self, processes: int | None = None, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ... + ) -> None: ... + +# undocumented +if sys.version_info >= (3, 8): + INIT: Literal["INIT"] + RUN: Literal["RUN"] + CLOSE: Literal["CLOSE"] + TERMINATE: Literal["TERMINATE"] +else: + RUN: Literal[0] + CLOSE: Literal[1] + TERMINATE: Literal[2] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/popen_fork.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/popen_fork.pyi new file mode 100644 index 00000000..4fcbfd99 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/popen_fork.pyi @@ -0,0 +1,23 @@ +import sys +from typing import ClassVar + +from .process import BaseProcess +from .util import Finalize + +if sys.platform != "win32": + __all__ = ["Popen"] + + class Popen: + finalizer: Finalize | None + method: ClassVar[str] + pid: int + returncode: int | None + sentinel: int # doesn't exist if os.fork in _launch returns 0 + + def __init__(self, process_obj: BaseProcess) -> None: ... + def duplicate_for_child(self, fd: int) -> int: ... + def poll(self, flag: int = 1) -> int | None: ... + def wait(self, timeout: float | None = None) -> int | None: ... + def terminate(self) -> None: ... + def kill(self) -> None: ... + def close(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/popen_forkserver.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/popen_forkserver.pyi new file mode 100644 index 00000000..f7d53bbb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/popen_forkserver.pyi @@ -0,0 +1,16 @@ +import sys +from typing import ClassVar + +from . import popen_fork +from .util import Finalize + +if sys.platform != "win32": + __all__ = ["Popen"] + + class _DupFd: + def __init__(self, ind: int) -> None: ... + def detach(self) -> int: ... + + class Popen(popen_fork.Popen): + DupFd: ClassVar[type[_DupFd]] + finalizer: Finalize diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/popen_spawn_posix.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/popen_spawn_posix.pyi new file mode 100644 index 00000000..7e81d396 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/popen_spawn_posix.pyi @@ -0,0 +1,20 @@ +import sys +from typing import ClassVar + +from . import popen_fork +from .util import Finalize + +if sys.platform != "win32": + __all__ = ["Popen"] + + class _DupFd: + fd: int + + def __init__(self, fd: int) -> None: ... + def detach(self) -> int: ... + + class Popen(popen_fork.Popen): + DupFd: ClassVar[type[_DupFd]] + finalizer: Finalize + pid: int # may not exist if _launch raises in second try / except + sentinel: int # may not exist if _launch raises in second try / except diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi new file mode 100644 index 00000000..3dc9d5bd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi @@ -0,0 +1,30 @@ +import sys +from multiprocessing.process import BaseProcess +from typing import ClassVar + +from .util import Finalize + +if sys.platform == "win32": + __all__ = ["Popen"] + + TERMINATE: int + WINEXE: bool + WINSERVICE: bool + WINENV: bool + + class Popen: + finalizer: Finalize + method: ClassVar[str] + pid: int + returncode: int | None + sentinel: int + + def __init__(self, process_obj: BaseProcess) -> None: ... + def duplicate_for_child(self, handle: int) -> int: ... + def wait(self, timeout: float | None = None) -> int | None: ... + def poll(self) -> int | None: ... + def terminate(self) -> None: ... + + kill = terminate + + def close(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/process.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/process.pyi new file mode 100644 index 00000000..ef1b4b59 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/process.pyi @@ -0,0 +1,45 @@ +import sys +from collections.abc import Callable, Iterable, Mapping +from typing import Any + +if sys.version_info >= (3, 8): + __all__ = ["BaseProcess", "current_process", "active_children", "parent_process"] +else: + __all__ = ["BaseProcess", "current_process", "active_children"] + +class BaseProcess: + name: str + daemon: bool + authkey: bytes + _identity: tuple[int, ...] # undocumented + def __init__( + self, + group: None = None, + target: Callable[..., object] | None = None, + name: str | None = None, + args: Iterable[Any] = ..., + kwargs: Mapping[str, Any] = ..., + *, + daemon: bool | None = None, + ) -> None: ... + def run(self) -> None: ... + def start(self) -> None: ... + def terminate(self) -> None: ... + def kill(self) -> None: ... + def close(self) -> None: ... + def join(self, timeout: float | None = None) -> None: ... + def is_alive(self) -> bool: ... + @property + def exitcode(self) -> int | None: ... + @property + def ident(self) -> int | None: ... + @property + def pid(self) -> int | None: ... + @property + def sentinel(self) -> int: ... + +def current_process() -> BaseProcess: ... +def active_children() -> list[BaseProcess]: ... + +if sys.version_info >= (3, 8): + def parent_process() -> BaseProcess | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/queues.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/queues.pyi new file mode 100644 index 00000000..7ba17dcf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/queues.pyi @@ -0,0 +1,35 @@ +import queue +import sys +from typing import Any, Generic, TypeVar + +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = ["Queue", "SimpleQueue", "JoinableQueue"] + +_T = TypeVar("_T") + +class Queue(queue.Queue[_T]): + # FIXME: `ctx` is a circular dependency and it's not actually optional. + # It's marked as such to be able to use the generic Queue in __init__.pyi. + def __init__(self, maxsize: int = 0, *, ctx: Any = ...) -> None: ... + def get(self, block: bool = True, timeout: float | None = None) -> _T: ... + def put(self, obj: _T, block: bool = True, timeout: float | None = None) -> None: ... + def put_nowait(self, item: _T) -> None: ... + def get_nowait(self) -> _T: ... + def close(self) -> None: ... + def join_thread(self) -> None: ... + def cancel_join_thread(self) -> None: ... + +class JoinableQueue(Queue[_T]): ... + +class SimpleQueue(Generic[_T]): + def __init__(self, *, ctx: Any = ...) -> None: ... + if sys.version_info >= (3, 9): + def close(self) -> None: ... + + def empty(self) -> bool: ... + def get(self) -> _T: ... + def put(self, item: _T) -> None: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/reduction.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/reduction.pyi new file mode 100644 index 00000000..e5a8cde8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/reduction.pyi @@ -0,0 +1,94 @@ +import pickle +import sys +from _typeshed import HasFileno, SupportsWrite, Unused +from abc import ABCMeta +from builtins import type as Type # alias to avoid name clash +from collections.abc import Callable +from copyreg import _DispatchTableType +from multiprocessing import connection +from pickle import _ReducedType +from socket import socket +from typing import Any +from typing_extensions import Literal + +if sys.platform == "win32": + __all__ = ["send_handle", "recv_handle", "ForkingPickler", "register", "dump", "DupHandle", "duplicate", "steal_handle"] +else: + __all__ = ["send_handle", "recv_handle", "ForkingPickler", "register", "dump", "DupFd", "sendfds", "recvfds"] + +HAVE_SEND_HANDLE: bool + +class ForkingPickler(pickle.Pickler): + dispatch_table: _DispatchTableType + def __init__(self, file: SupportsWrite[bytes], protocol: int | None = ...) -> None: ... + @classmethod + def register(cls, type: Type, reduce: Callable[[Any], _ReducedType]) -> None: ... + @classmethod + def dumps(cls, obj: Any, protocol: int | None = None) -> memoryview: ... + loads = pickle.loads + +register = ForkingPickler.register + +def dump(obj: Any, file: SupportsWrite[bytes], protocol: int | None = None) -> None: ... + +if sys.platform == "win32": + if sys.version_info >= (3, 8): + def duplicate( + handle: int, target_process: int | None = None, inheritable: bool = False, *, source_process: int | None = None + ) -> int: ... + else: + def duplicate(handle: int, target_process: int | None = None, inheritable: bool = False) -> int: ... + + def steal_handle(source_pid: int, handle: int) -> int: ... + def send_handle(conn: connection.PipeConnection, handle: int, destination_pid: int) -> None: ... + def recv_handle(conn: connection.PipeConnection) -> int: ... + + class DupHandle: + def __init__(self, handle: int, access: int, pid: int | None = None) -> None: ... + def detach(self) -> int: ... + +else: + if sys.platform == "darwin": + ACKNOWLEDGE: Literal[True] + else: + ACKNOWLEDGE: Literal[False] + + def recvfds(sock: socket, size: int) -> list[int]: ... + def send_handle(conn: HasFileno, handle: int, destination_pid: Unused) -> None: ... + def recv_handle(conn: HasFileno) -> int: ... + def sendfds(sock: socket, fds: list[int]) -> None: ... + def DupFd(fd: int) -> Any: ... # Return type is really hard to get right + +# These aliases are to work around pyright complaints. +# Pyright doesn't like it when a class object is defined as an alias +# of a global object with the same name. +_ForkingPickler = ForkingPickler +_register = register +_dump = dump +_send_handle = send_handle +_recv_handle = recv_handle + +if sys.platform == "win32": + _steal_handle = steal_handle + _duplicate = duplicate + _DupHandle = DupHandle +else: + _sendfds = sendfds + _recvfds = recvfds + _DupFd = DupFd + +class AbstractReducer(metaclass=ABCMeta): + ForkingPickler = _ForkingPickler + register = _register + dump = _dump + send_handle = _send_handle + recv_handle = _recv_handle + if sys.platform == "win32": + steal_handle = _steal_handle + duplicate = _duplicate + DupHandle = _DupHandle + else: + sendfds = _sendfds + recvfds = _recvfds + DupFd = _DupFd + def __init__(self, *args: Unused) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/resource_sharer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/resource_sharer.pyi new file mode 100644 index 00000000..5fee7cf3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/resource_sharer.pyi @@ -0,0 +1,20 @@ +import sys +from socket import socket + +__all__ = ["stop"] + +if sys.platform == "win32": + __all__ += ["DupSocket"] + + class DupSocket: + def __init__(self, sock: socket) -> None: ... + def detach(self) -> socket: ... + +else: + __all__ += ["DupFd"] + + class DupFd: + def __init__(self, fd: int) -> None: ... + def detach(self) -> int: ... + +def stop(timeout: float | None = None) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/resource_tracker.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/resource_tracker.pyi new file mode 100644 index 00000000..e2b94079 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/resource_tracker.pyi @@ -0,0 +1,18 @@ +from _typeshed import FileDescriptorOrPath, Incomplete +from collections.abc import Sized + +__all__ = ["ensure_running", "register", "unregister"] + +class ResourceTracker: + def getfd(self) -> int | None: ... + def ensure_running(self) -> None: ... + def register(self, name: Sized, rtype: Incomplete) -> None: ... + def unregister(self, name: Sized, rtype: Incomplete) -> None: ... + +_resource_tracker: ResourceTracker = ... +ensure_running = _resource_tracker.ensure_running +register = _resource_tracker.register +unregister = _resource_tracker.unregister +getfd = _resource_tracker.getfd + +def main(fd: FileDescriptorOrPath) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/shared_memory.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/shared_memory.pyi new file mode 100644 index 00000000..ae6e2a0e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/shared_memory.pyi @@ -0,0 +1,39 @@ +import sys +from collections.abc import Iterable +from typing import Any, Generic, TypeVar, overload +from typing_extensions import Self + +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = ["SharedMemory", "ShareableList"] + +_SLT = TypeVar("_SLT", int, float, bool, str, bytes, None) + +class SharedMemory: + def __init__(self, name: str | None = None, create: bool = False, size: int = 0) -> None: ... + @property + def buf(self) -> memoryview: ... + @property + def name(self) -> str: ... + @property + def size(self) -> int: ... + def close(self) -> None: ... + def unlink(self) -> None: ... + +class ShareableList(Generic[_SLT]): + shm: SharedMemory + @overload + def __init__(self, sequence: None = None, *, name: str | None = None) -> None: ... + @overload + def __init__(self, sequence: Iterable[_SLT], *, name: str | None = None) -> None: ... + def __getitem__(self, position: int) -> _SLT: ... + def __setitem__(self, position: int, value: _SLT) -> None: ... + def __reduce__(self) -> tuple[Self, tuple[_SLT, ...]]: ... + def __len__(self) -> int: ... + @property + def format(self) -> str: ... + def count(self, value: _SLT) -> int: ... + def index(self, value: _SLT) -> int: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/sharedctypes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/sharedctypes.pyi new file mode 100644 index 00000000..686a45d9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/sharedctypes.pyi @@ -0,0 +1,102 @@ +import ctypes +from collections.abc import Callable, Iterable, Sequence +from ctypes import _CData, _SimpleCData, c_char +from multiprocessing.context import BaseContext +from multiprocessing.synchronize import _LockLike +from types import TracebackType +from typing import Any, Generic, Protocol, TypeVar, overload +from typing_extensions import Literal + +__all__ = ["RawValue", "RawArray", "Value", "Array", "copy", "synchronized"] + +_T = TypeVar("_T") +_CT = TypeVar("_CT", bound=_CData) + +@overload +def RawValue(typecode_or_type: type[_CT], *args: Any) -> _CT: ... +@overload +def RawValue(typecode_or_type: str, *args: Any) -> Any: ... +@overload +def RawArray(typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any]) -> ctypes.Array[_CT]: ... +@overload +def RawArray(typecode_or_type: str, size_or_initializer: int | Sequence[Any]) -> Any: ... +@overload +def Value(typecode_or_type: type[_CT], *args: Any, lock: Literal[False], ctx: BaseContext | None = None) -> _CT: ... +@overload +def Value( + typecode_or_type: type[_CT], *args: Any, lock: Literal[True] | _LockLike = True, ctx: BaseContext | None = None +) -> SynchronizedBase[_CT]: ... +@overload +def Value( + typecode_or_type: str, *args: Any, lock: Literal[True] | _LockLike = True, ctx: BaseContext | None = None +) -> SynchronizedBase[Any]: ... +@overload +def Value( + typecode_or_type: str | type[_CData], *args: Any, lock: bool | _LockLike = True, ctx: BaseContext | None = None +) -> Any: ... +@overload +def Array( + typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[False], ctx: BaseContext | None = None +) -> _CT: ... +@overload +def Array( + typecode_or_type: type[_CT], + size_or_initializer: int | Sequence[Any], + *, + lock: Literal[True] | _LockLike = True, + ctx: BaseContext | None = None, +) -> SynchronizedArray[_CT]: ... +@overload +def Array( + typecode_or_type: str, + size_or_initializer: int | Sequence[Any], + *, + lock: Literal[True] | _LockLike = True, + ctx: BaseContext | None = None, +) -> SynchronizedArray[Any]: ... +@overload +def Array( + typecode_or_type: str | type[_CData], + size_or_initializer: int | Sequence[Any], + *, + lock: bool | _LockLike = True, + ctx: BaseContext | None = None, +) -> Any: ... +def copy(obj: _CT) -> _CT: ... +@overload +def synchronized(obj: _SimpleCData[_T], lock: _LockLike | None = None, ctx: Any | None = None) -> Synchronized[_T]: ... +@overload +def synchronized(obj: ctypes.Array[c_char], lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedString: ... +@overload +def synchronized(obj: ctypes.Array[_CT], lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedArray[_CT]: ... +@overload +def synchronized(obj: _CT, lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedBase[_CT]: ... + +class _AcquireFunc(Protocol): + def __call__(self, block: bool = ..., timeout: float | None = ...) -> bool: ... + +class SynchronizedBase(Generic[_CT]): + acquire: _AcquireFunc + release: Callable[[], None] + def __init__(self, obj: Any, lock: _LockLike | None = None, ctx: Any | None = None) -> None: ... + def __reduce__(self) -> tuple[Callable[[Any, _LockLike], SynchronizedBase[Any]], tuple[Any, _LockLike]]: ... + def get_obj(self) -> _CT: ... + def get_lock(self) -> _LockLike: ... + def __enter__(self) -> bool: ... + def __exit__( + self, __exc_type: type[BaseException] | None, __exc_val: BaseException | None, __exc_tb: TracebackType | None + ) -> None: ... + +class Synchronized(SynchronizedBase[_SimpleCData[_T]], Generic[_T]): + value: _T + +class SynchronizedArray(SynchronizedBase[ctypes.Array[_CT]], Generic[_CT]): + def __len__(self) -> int: ... + def __getitem__(self, i: int) -> _CT: ... + def __setitem__(self, i: int, value: _CT) -> None: ... + def __getslice__(self, start: int, stop: int) -> list[_CT]: ... + def __setslice__(self, start: int, stop: int, values: Iterable[_CT]) -> None: ... + +class SynchronizedString(SynchronizedArray[c_char]): + value: bytes + raw: bytes diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/spawn.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/spawn.pyi new file mode 100644 index 00000000..26ff1657 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/spawn.pyi @@ -0,0 +1,32 @@ +from collections.abc import Mapping, Sequence +from types import ModuleType +from typing import Any + +__all__ = [ + "_main", + "freeze_support", + "set_executable", + "get_executable", + "get_preparation_data", + "get_command_line", + "import_main_path", +] + +WINEXE: bool +WINSERVICE: bool + +def set_executable(exe: str) -> None: ... +def get_executable() -> str: ... +def is_forking(argv: Sequence[str]) -> bool: ... +def freeze_support() -> None: ... +def get_command_line(**kwds: Any) -> list[str]: ... +def spawn_main(pipe_handle: int, parent_pid: int | None = None, tracker_fd: int | None = None) -> None: ... + +# undocumented +def _main(fd: int) -> Any: ... +def get_preparation_data(name: str) -> dict[str, Any]: ... + +old_main_modules: list[ModuleType] + +def prepare(data: Mapping[str, Any]) -> None: ... +def import_main_path(main_path: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/synchronize.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/synchronize.pyi new file mode 100644 index 00000000..70437590 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/synchronize.pyi @@ -0,0 +1,54 @@ +import threading +from collections.abc import Callable +from contextlib import AbstractContextManager +from multiprocessing.context import BaseContext +from types import TracebackType +from typing_extensions import TypeAlias + +__all__ = ["Lock", "RLock", "Semaphore", "BoundedSemaphore", "Condition", "Event"] + +_LockLike: TypeAlias = Lock | RLock + +class Barrier(threading.Barrier): + def __init__( + self, parties: int, action: Callable[[], object] | None = None, timeout: float | None = None, *ctx: BaseContext + ) -> None: ... + +class BoundedSemaphore(Semaphore): + def __init__(self, value: int = 1, *, ctx: BaseContext) -> None: ... + +class Condition(AbstractContextManager[bool]): + def __init__(self, lock: _LockLike | None = None, *, ctx: BaseContext) -> None: ... + def notify(self, n: int = 1) -> None: ... + def notify_all(self) -> None: ... + def wait(self, timeout: float | None = None) -> bool: ... + def wait_for(self, predicate: Callable[[], bool], timeout: float | None = None) -> bool: ... + def acquire(self, block: bool = ..., timeout: float | None = ...) -> bool: ... + def release(self) -> None: ... + def __exit__( + self, __exc_type: type[BaseException] | None, __exc_val: BaseException | None, __exc_tb: TracebackType | None + ) -> None: ... + +class Event: + def __init__(self, lock: _LockLike | None = ..., *, ctx: BaseContext) -> None: ... + def is_set(self) -> bool: ... + def set(self) -> None: ... + def clear(self) -> None: ... + def wait(self, timeout: float | None = None) -> bool: ... + +class Lock(SemLock): + def __init__(self, *, ctx: BaseContext) -> None: ... + +class RLock(SemLock): + def __init__(self, *, ctx: BaseContext) -> None: ... + +class Semaphore(SemLock): + def __init__(self, value: int = 1, *, ctx: BaseContext) -> None: ... + +# Not part of public API +class SemLock(AbstractContextManager[bool]): + def acquire(self, block: bool = ..., timeout: float | None = ...) -> bool: ... + def release(self) -> None: ... + def __exit__( + self, __exc_type: type[BaseException] | None, __exc_val: BaseException | None, __exc_tb: TracebackType | None + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/util.pyi new file mode 100644 index 00000000..006ec3a9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/multiprocessing/util.pyi @@ -0,0 +1,85 @@ +import threading +from _typeshed import Incomplete, ReadableBuffer, SupportsTrunc, Unused +from collections.abc import Callable, Iterable, Mapping, MutableMapping, Sequence +from logging import Logger, _Level as _LoggingLevel +from typing import Any, SupportsInt +from typing_extensions import SupportsIndex + +__all__ = [ + "sub_debug", + "debug", + "info", + "sub_warning", + "get_logger", + "log_to_stderr", + "get_temp_dir", + "register_after_fork", + "is_exiting", + "Finalize", + "ForkAwareThreadLock", + "ForkAwareLocal", + "close_all_fds_except", + "SUBDEBUG", + "SUBWARNING", +] + +NOTSET: int +SUBDEBUG: int +DEBUG: int +INFO: int +SUBWARNING: int + +LOGGER_NAME: str +DEFAULT_LOGGING_FORMAT: str + +def sub_debug(msg: object, *args: object) -> None: ... +def debug(msg: object, *args: object) -> None: ... +def info(msg: object, *args: object) -> None: ... +def sub_warning(msg: object, *args: object) -> None: ... +def get_logger() -> Logger: ... +def log_to_stderr(level: _LoggingLevel | None = None) -> Logger: ... +def is_abstract_socket_namespace(address: str | bytes | None) -> bool: ... + +abstract_sockets_supported: bool + +def get_temp_dir() -> str: ... +def register_after_fork(obj: Incomplete, func: Callable[[Incomplete], object]) -> None: ... + +class Finalize: + def __init__( + self, + obj: Incomplete | None, + callback: Callable[..., Incomplete], + args: Sequence[Any] = ..., + kwargs: Mapping[str, Any] | None = None, + exitpriority: int | None = None, + ) -> None: ... + def __call__( + self, + wr: Unused = None, + _finalizer_registry: MutableMapping[Incomplete, Incomplete] = ..., + sub_debug: Callable[..., object] = ..., + getpid: Callable[[], int] = ..., + ) -> Incomplete: ... + def cancel(self) -> None: ... + def still_active(self) -> bool: ... + +def is_exiting() -> bool: ... + +class ForkAwareThreadLock: + acquire: Callable[[bool, float], bool] + release: Callable[[], None] + def __enter__(self) -> bool: ... + def __exit__(self, *args: Unused) -> None: ... + +class ForkAwareLocal(threading.local): ... + +MAXFD: int + +def close_all_fds_except(fds: Iterable[int]) -> None: ... +def spawnv_passfds( + path: bytes, + # args is anything that can be passed to the int constructor + args: Sequence[str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc], + passfds: Sequence[int], +) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/netrc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/netrc.pyi new file mode 100644 index 00000000..480f55a4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/netrc.pyi @@ -0,0 +1,23 @@ +import sys +from _typeshed import StrOrBytesPath +from typing_extensions import TypeAlias + +__all__ = ["netrc", "NetrcParseError"] + +class NetrcParseError(Exception): + filename: str | None + lineno: int | None + msg: str + def __init__(self, msg: str, filename: StrOrBytesPath | None = None, lineno: int | None = None) -> None: ... + +# (login, account, password) tuple +if sys.version_info >= (3, 11): + _NetrcTuple: TypeAlias = tuple[str, str, str] +else: + _NetrcTuple: TypeAlias = tuple[str, str | None, str | None] + +class netrc: + hosts: dict[str, _NetrcTuple] + macros: dict[str, list[str]] + def __init__(self, file: StrOrBytesPath | None = None) -> None: ... + def authenticators(self, host: str) -> _NetrcTuple | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/nis.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/nis.pyi new file mode 100644 index 00000000..10eef233 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/nis.pyi @@ -0,0 +1,9 @@ +import sys + +if sys.platform != "win32": + def cat(map: str, domain: str = ...) -> dict[str, str]: ... + def get_default_domain() -> str: ... + def maps(domain: str = ...) -> list[str]: ... + def match(key: str, map: str, domain: str = ...) -> str: ... + + class error(Exception): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/nntplib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/nntplib.pyi new file mode 100644 index 00000000..f948c143 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/nntplib.pyi @@ -0,0 +1,125 @@ +import datetime +import socket +import ssl +import sys +from _typeshed import Unused +from builtins import list as _list # conflicts with a method named "list" +from collections.abc import Iterable +from typing import IO, Any, NamedTuple +from typing_extensions import Literal, Self, TypeAlias + +__all__ = [ + "NNTP", + "NNTPError", + "NNTPReplyError", + "NNTPTemporaryError", + "NNTPPermanentError", + "NNTPProtocolError", + "NNTPDataError", + "decode_header", + "NNTP_SSL", +] + +_File: TypeAlias = IO[bytes] | bytes | str | None + +class NNTPError(Exception): + response: str + +class NNTPReplyError(NNTPError): ... +class NNTPTemporaryError(NNTPError): ... +class NNTPPermanentError(NNTPError): ... +class NNTPProtocolError(NNTPError): ... +class NNTPDataError(NNTPError): ... + +NNTP_PORT: Literal[119] +NNTP_SSL_PORT: Literal[563] + +class GroupInfo(NamedTuple): + group: str + last: str + first: str + flag: str + +class ArticleInfo(NamedTuple): + number: int + message_id: str + lines: list[bytes] + +def decode_header(header_str: str) -> str: ... + +class NNTP: + encoding: str + errors: str + + host: str + port: int + sock: socket.socket + file: IO[bytes] + debugging: int + welcome: str + readermode_afterauth: bool + tls_on: bool + authenticated: bool + nntp_implementation: str + nntp_version: int + def __init__( + self, + host: str, + port: int = 119, + user: str | None = None, + password: str | None = None, + readermode: bool | None = None, + usenetrc: bool = False, + timeout: float = ..., + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + def getwelcome(self) -> str: ... + def getcapabilities(self) -> dict[str, _list[str]]: ... + def set_debuglevel(self, level: int) -> None: ... + def debug(self, level: int) -> None: ... + def capabilities(self) -> tuple[str, dict[str, _list[str]]]: ... + def newgroups(self, date: datetime.date | datetime.datetime, *, file: _File = None) -> tuple[str, _list[str]]: ... + def newnews(self, group: str, date: datetime.date | datetime.datetime, *, file: _File = None) -> tuple[str, _list[str]]: ... + def list(self, group_pattern: str | None = None, *, file: _File = None) -> tuple[str, _list[str]]: ... + def description(self, group: str) -> str: ... + def descriptions(self, group_pattern: str) -> tuple[str, dict[str, str]]: ... + def group(self, name: str) -> tuple[str, int, int, int, str]: ... + def help(self, *, file: _File = None) -> tuple[str, _list[str]]: ... + def stat(self, message_spec: Any = None) -> tuple[str, int, str]: ... + def next(self) -> tuple[str, int, str]: ... + def last(self) -> tuple[str, int, str]: ... + def head(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: ... + def body(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: ... + def article(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: ... + def slave(self) -> str: ... + def xhdr(self, hdr: str, str: Any, *, file: _File = None) -> tuple[str, _list[str]]: ... + def xover(self, start: int, end: int, *, file: _File = None) -> tuple[str, _list[tuple[int, dict[str, str]]]]: ... + def over( + self, message_spec: None | str | _list[Any] | tuple[Any, ...], *, file: _File = None + ) -> tuple[str, _list[tuple[int, dict[str, str]]]]: ... + if sys.version_info < (3, 9): + def xgtitle(self, group: str, *, file: _File = None) -> tuple[str, _list[tuple[str, str]]]: ... + def xpath(self, id: Any) -> tuple[str, str]: ... + + def date(self) -> tuple[str, datetime.datetime]: ... + def post(self, data: bytes | Iterable[bytes]) -> str: ... + def ihave(self, message_id: Any, data: bytes | Iterable[bytes]) -> str: ... + def quit(self) -> str: ... + def login(self, user: str | None = None, password: str | None = None, usenetrc: bool = True) -> None: ... + def starttls(self, context: ssl.SSLContext | None = None) -> None: ... + +class NNTP_SSL(NNTP): + ssl_context: ssl.SSLContext | None + sock: ssl.SSLSocket + def __init__( + self, + host: str, + port: int = 563, + user: str | None = None, + password: str | None = None, + ssl_context: ssl.SSLContext | None = None, + readermode: bool | None = None, + usenetrc: bool = False, + timeout: float = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ntpath.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ntpath.pyi new file mode 100644 index 00000000..f1fa137c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ntpath.pyi @@ -0,0 +1,114 @@ +import sys +from _typeshed import BytesPath, StrPath +from genericpath import ( + commonprefix as commonprefix, + exists as exists, + getatime as getatime, + getctime as getctime, + getmtime as getmtime, + getsize as getsize, + isdir as isdir, + isfile as isfile, + samefile as samefile, + sameopenfile as sameopenfile, + samestat as samestat, +) +from os import PathLike + +# Re-export common definitions from posixpath to reduce duplication +from posixpath import ( + abspath as abspath, + basename as basename, + commonpath as commonpath, + curdir as curdir, + defpath as defpath, + devnull as devnull, + dirname as dirname, + expanduser as expanduser, + expandvars as expandvars, + extsep as extsep, + isabs as isabs, + islink as islink, + ismount as ismount, + lexists as lexists, + normcase as normcase, + normpath as normpath, + pardir as pardir, + pathsep as pathsep, + relpath as relpath, + sep as sep, + split as split, + splitdrive as splitdrive, + splitext as splitext, + supports_unicode_filenames as supports_unicode_filenames, +) +from typing import AnyStr, overload +from typing_extensions import LiteralString + +__all__ = [ + "normcase", + "isabs", + "join", + "splitdrive", + "split", + "splitext", + "basename", + "dirname", + "commonprefix", + "getsize", + "getmtime", + "getatime", + "getctime", + "islink", + "exists", + "lexists", + "isdir", + "isfile", + "ismount", + "expanduser", + "expandvars", + "normpath", + "abspath", + "curdir", + "pardir", + "sep", + "pathsep", + "defpath", + "altsep", + "extsep", + "devnull", + "realpath", + "supports_unicode_filenames", + "relpath", + "samefile", + "sameopenfile", + "samestat", + "commonpath", +] + +altsep: LiteralString + +# First parameter is not actually pos-only, +# but must be defined as pos-only in the stub or cross-platform code doesn't type-check, +# as the parameter name is different in posixpath.join() +@overload +def join(__path: LiteralString, *paths: LiteralString) -> LiteralString: ... +@overload +def join(__path: StrPath, *paths: StrPath) -> str: ... +@overload +def join(__path: BytesPath, *paths: BytesPath) -> bytes: ... + +if sys.platform == "win32": + if sys.version_info >= (3, 10): + @overload + def realpath(path: PathLike[AnyStr], *, strict: bool = False) -> AnyStr: ... + @overload + def realpath(path: AnyStr, *, strict: bool = False) -> AnyStr: ... + else: + @overload + def realpath(path: PathLike[AnyStr]) -> AnyStr: ... + @overload + def realpath(path: AnyStr) -> AnyStr: ... + +else: + realpath = abspath diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/nturl2path.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/nturl2path.pyi new file mode 100644 index 00000000..b8ad8d68 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/nturl2path.pyi @@ -0,0 +1,2 @@ +def url2pathname(url: str) -> str: ... +def pathname2url(p: str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/numbers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/numbers.pyi new file mode 100644 index 00000000..55f21041 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/numbers.pyi @@ -0,0 +1,129 @@ +# Note: these stubs are incomplete. The more complex type +# signatures are currently omitted. + +from abc import ABCMeta, abstractmethod +from typing import Any, SupportsFloat, overload + +__all__ = ["Number", "Complex", "Real", "Rational", "Integral"] + +class Number(metaclass=ABCMeta): + @abstractmethod + def __hash__(self) -> int: ... + +class Complex(Number): + @abstractmethod + def __complex__(self) -> complex: ... + def __bool__(self) -> bool: ... + @property + @abstractmethod + def real(self) -> Any: ... + @property + @abstractmethod + def imag(self) -> Any: ... + @abstractmethod + def __add__(self, other: Any) -> Any: ... + @abstractmethod + def __radd__(self, other: Any) -> Any: ... + @abstractmethod + def __neg__(self) -> Any: ... + @abstractmethod + def __pos__(self) -> Any: ... + def __sub__(self, other: Any) -> Any: ... + def __rsub__(self, other: Any) -> Any: ... + @abstractmethod + def __mul__(self, other: Any) -> Any: ... + @abstractmethod + def __rmul__(self, other: Any) -> Any: ... + @abstractmethod + def __truediv__(self, other: Any) -> Any: ... + @abstractmethod + def __rtruediv__(self, other: Any) -> Any: ... + @abstractmethod + def __pow__(self, exponent: Any) -> Any: ... + @abstractmethod + def __rpow__(self, base: Any) -> Any: ... + @abstractmethod + def __abs__(self) -> Real: ... + @abstractmethod + def conjugate(self) -> Any: ... + @abstractmethod + def __eq__(self, other: object) -> bool: ... + +class Real(Complex, SupportsFloat): + @abstractmethod + def __float__(self) -> float: ... + @abstractmethod + def __trunc__(self) -> int: ... + @abstractmethod + def __floor__(self) -> int: ... + @abstractmethod + def __ceil__(self) -> int: ... + @abstractmethod + @overload + def __round__(self, ndigits: None = None) -> int: ... + @abstractmethod + @overload + def __round__(self, ndigits: int) -> Any: ... + def __divmod__(self, other: Any) -> Any: ... + def __rdivmod__(self, other: Any) -> Any: ... + @abstractmethod + def __floordiv__(self, other: Any) -> int: ... + @abstractmethod + def __rfloordiv__(self, other: Any) -> int: ... + @abstractmethod + def __mod__(self, other: Any) -> Any: ... + @abstractmethod + def __rmod__(self, other: Any) -> Any: ... + @abstractmethod + def __lt__(self, other: Any) -> bool: ... + @abstractmethod + def __le__(self, other: Any) -> bool: ... + def __complex__(self) -> complex: ... + @property + def real(self) -> Any: ... + @property + def imag(self) -> Any: ... + def conjugate(self) -> Any: ... + +class Rational(Real): + @property + @abstractmethod + def numerator(self) -> int: ... + @property + @abstractmethod + def denominator(self) -> int: ... + def __float__(self) -> float: ... + +class Integral(Rational): + @abstractmethod + def __int__(self) -> int: ... + def __index__(self) -> int: ... + @abstractmethod + def __pow__(self, exponent: Any, modulus: Any | None = None) -> Any: ... + @abstractmethod + def __lshift__(self, other: Any) -> Any: ... + @abstractmethod + def __rlshift__(self, other: Any) -> Any: ... + @abstractmethod + def __rshift__(self, other: Any) -> Any: ... + @abstractmethod + def __rrshift__(self, other: Any) -> Any: ... + @abstractmethod + def __and__(self, other: Any) -> Any: ... + @abstractmethod + def __rand__(self, other: Any) -> Any: ... + @abstractmethod + def __xor__(self, other: Any) -> Any: ... + @abstractmethod + def __rxor__(self, other: Any) -> Any: ... + @abstractmethod + def __or__(self, other: Any) -> Any: ... + @abstractmethod + def __ror__(self, other: Any) -> Any: ... + @abstractmethod + def __invert__(self) -> Any: ... + def __float__(self) -> float: ... + @property + def numerator(self) -> int: ... + @property + def denominator(self) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/opcode.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/opcode.pyi new file mode 100644 index 00000000..1232454e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/opcode.pyi @@ -0,0 +1,57 @@ +import sys +from typing_extensions import Literal + +__all__ = [ + "cmp_op", + "hasconst", + "hasname", + "hasjrel", + "hasjabs", + "haslocal", + "hascompare", + "hasfree", + "opname", + "opmap", + "HAVE_ARGUMENT", + "EXTENDED_ARG", + "hasnargs", + "stack_effect", +] + +if sys.version_info >= (3, 9): + cmp_op: tuple[Literal["<"], Literal["<="], Literal["=="], Literal["!="], Literal[">"], Literal[">="]] +else: + cmp_op: tuple[ + Literal["<"], + Literal["<="], + Literal["=="], + Literal["!="], + Literal[">"], + Literal[">="], + Literal["in"], + Literal["not in"], + Literal["is"], + Literal["is not"], + Literal["exception match"], + Literal["BAD"], + ] +hasconst: list[int] +hasname: list[int] +hasjrel: list[int] +hasjabs: list[int] +haslocal: list[int] +hascompare: list[int] +hasfree: list[int] +opname: list[str] + +opmap: dict[str, int] +HAVE_ARGUMENT: Literal[90] +EXTENDED_ARG: Literal[144] + +if sys.version_info >= (3, 8): + def stack_effect(__opcode: int, __oparg: int | None = None, *, jump: bool | None = None) -> int: ... + +else: + def stack_effect(__opcode: int, __oparg: int | None = None) -> int: ... + +hasnargs: list[int] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/operator.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/operator.pyi new file mode 100644 index 00000000..a0e5df79 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/operator.pyi @@ -0,0 +1,110 @@ +import sys +from _operator import * + +__all__ = [ + "abs", + "add", + "and_", + "attrgetter", + "concat", + "contains", + "countOf", + "delitem", + "eq", + "floordiv", + "ge", + "getitem", + "gt", + "iadd", + "iand", + "iconcat", + "ifloordiv", + "ilshift", + "imatmul", + "imod", + "imul", + "index", + "indexOf", + "inv", + "invert", + "ior", + "ipow", + "irshift", + "is_", + "is_not", + "isub", + "itemgetter", + "itruediv", + "ixor", + "le", + "length_hint", + "lshift", + "lt", + "matmul", + "methodcaller", + "mod", + "mul", + "ne", + "neg", + "not_", + "or_", + "pos", + "pow", + "rshift", + "setitem", + "sub", + "truediv", + "truth", + "xor", +] + +if sys.version_info >= (3, 11): + __all__ += ["call"] + +__lt__ = lt +__le__ = le +__eq__ = eq +__ne__ = ne +__ge__ = ge +__gt__ = gt +__not__ = not_ +__abs__ = abs +__add__ = add +__and__ = and_ +__floordiv__ = floordiv +__index__ = index +__inv__ = inv +__invert__ = invert +__lshift__ = lshift +__mod__ = mod +__mul__ = mul +__matmul__ = matmul +__neg__ = neg +__or__ = or_ +__pos__ = pos +__pow__ = pow +__rshift__ = rshift +__sub__ = sub +__truediv__ = truediv +__xor__ = xor +__concat__ = concat +__contains__ = contains +__delitem__ = delitem +__getitem__ = getitem +__setitem__ = setitem +__iadd__ = iadd +__iand__ = iand +__iconcat__ = iconcat +__ifloordiv__ = ifloordiv +__ilshift__ = ilshift +__imod__ = imod +__imul__ = imul +__imatmul__ = imatmul +__ior__ = ior +__ipow__ = ipow +__irshift__ = irshift +__isub__ = isub +__itruediv__ = itruediv +__ixor__ = ixor +if sys.version_info >= (3, 11): + __call__ = call diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/optparse.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/optparse.pyi new file mode 100644 index 00000000..a8c1c4cf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/optparse.pyi @@ -0,0 +1,252 @@ +from abc import abstractmethod +from collections.abc import Callable, Iterable, Mapping, Sequence +from typing import IO, Any, AnyStr, overload + +__all__ = [ + "Option", + "make_option", + "SUPPRESS_HELP", + "SUPPRESS_USAGE", + "Values", + "OptionContainer", + "OptionGroup", + "OptionParser", + "HelpFormatter", + "IndentedHelpFormatter", + "TitledHelpFormatter", + "OptParseError", + "OptionError", + "OptionConflictError", + "OptionValueError", + "BadOptionError", + "check_choice", +] + +NO_DEFAULT: tuple[str, ...] +SUPPRESS_HELP: str +SUPPRESS_USAGE: str + +def check_builtin(option: Option, opt: Any, value: str) -> Any: ... +def check_choice(option: Option, opt: Any, value: str) -> str: ... + +class OptParseError(Exception): + msg: str + def __init__(self, msg: str) -> None: ... + +class BadOptionError(OptParseError): + opt_str: str + def __init__(self, opt_str: str) -> None: ... + +class AmbiguousOptionError(BadOptionError): + possibilities: Iterable[str] + def __init__(self, opt_str: str, possibilities: Sequence[str]) -> None: ... + +class OptionError(OptParseError): + option_id: str + def __init__(self, msg: str, option: Option) -> None: ... + +class OptionConflictError(OptionError): ... +class OptionValueError(OptParseError): ... + +class HelpFormatter: + NO_DEFAULT_VALUE: str + _long_opt_fmt: str + _short_opt_fmt: str + current_indent: int + default_tag: str + help_position: Any + help_width: Any + indent_increment: int + level: int + max_help_position: int + option_strings: dict[Option, str] + parser: OptionParser + short_first: Any + width: int + def __init__(self, indent_increment: int, max_help_position: int, width: int | None, short_first: int) -> None: ... + def dedent(self) -> None: ... + def expand_default(self, option: Option) -> str: ... + def format_description(self, description: str) -> str: ... + def format_epilog(self, epilog: str) -> str: ... + @abstractmethod + def format_heading(self, heading: Any) -> str: ... + def format_option(self, option: Option) -> str: ... + def format_option_strings(self, option: Option) -> str: ... + @abstractmethod + def format_usage(self, usage: Any) -> str: ... + def indent(self) -> None: ... + def set_long_opt_delimiter(self, delim: str) -> None: ... + def set_parser(self, parser: OptionParser) -> None: ... + def set_short_opt_delimiter(self, delim: str) -> None: ... + def store_option_strings(self, parser: OptionParser) -> None: ... + +class IndentedHelpFormatter(HelpFormatter): + def __init__( + self, indent_increment: int = 2, max_help_position: int = 24, width: int | None = None, short_first: int = 1 + ) -> None: ... + def format_heading(self, heading: str) -> str: ... + def format_usage(self, usage: str) -> str: ... + +class TitledHelpFormatter(HelpFormatter): + def __init__( + self, indent_increment: int = 0, max_help_position: int = 24, width: int | None = None, short_first: int = 0 + ) -> None: ... + def format_heading(self, heading: str) -> str: ... + def format_usage(self, usage: str) -> str: ... + +class Option: + ACTIONS: tuple[str, ...] + ALWAYS_TYPED_ACTIONS: tuple[str, ...] + ATTRS: list[str] + CHECK_METHODS: list[Callable[..., Any]] | None + CONST_ACTIONS: tuple[str, ...] + STORE_ACTIONS: tuple[str, ...] + TYPED_ACTIONS: tuple[str, ...] + TYPES: tuple[str, ...] + TYPE_CHECKER: dict[str, Callable[..., Any]] + _long_opts: list[str] + _short_opts: list[str] + action: str + dest: str | None + default: Any + nargs: int + type: Any + callback: Callable[..., Any] | None + callback_args: tuple[Any, ...] | None + callback_kwargs: dict[str, Any] | None + help: str | None + metavar: str | None + def __init__(self, *opts: str | None, **attrs: Any) -> None: ... + def _check_action(self) -> None: ... + def _check_callback(self) -> None: ... + def _check_choice(self) -> None: ... + def _check_const(self) -> None: ... + def _check_dest(self) -> None: ... + def _check_nargs(self) -> None: ... + def _check_opt_strings(self, opts: Iterable[str | None]) -> list[str]: ... + def _check_type(self) -> None: ... + def _set_attrs(self, attrs: dict[str, Any]) -> None: ... + def _set_opt_strings(self, opts: Iterable[str]) -> None: ... + def check_value(self, opt: str, value: Any) -> Any: ... + def convert_value(self, opt: str, value: Any) -> Any: ... + def get_opt_string(self) -> str: ... + def process(self, opt: Any, value: Any, values: Any, parser: OptionParser) -> int: ... + def take_action(self, action: str, dest: str, opt: Any, value: Any, values: Any, parser: OptionParser) -> int: ... + def takes_value(self) -> bool: ... + +make_option = Option + +class OptionContainer: + _long_opt: dict[str, Option] + _short_opt: dict[str, Option] + conflict_handler: str + defaults: dict[str, Any] + description: Any + option_class: type[Option] + def __init__(self, option_class: type[Option], conflict_handler: Any, description: Any) -> None: ... + def _check_conflict(self, option: Any) -> None: ... + def _create_option_mappings(self) -> None: ... + def _share_option_mappings(self, parser: OptionParser) -> None: ... + @overload + def add_option(self, opt: Option) -> Option: ... + @overload + def add_option(self, *args: str | None, **kwargs: Any) -> Any: ... + def add_options(self, option_list: Iterable[Option]) -> None: ... + def destroy(self) -> None: ... + def format_description(self, formatter: HelpFormatter | None) -> Any: ... + def format_help(self, formatter: HelpFormatter | None) -> str: ... + def format_option_help(self, formatter: HelpFormatter | None) -> str: ... + def get_description(self) -> Any: ... + def get_option(self, opt_str: str) -> Option | None: ... + def has_option(self, opt_str: str) -> bool: ... + def remove_option(self, opt_str: str) -> None: ... + def set_conflict_handler(self, handler: Any) -> None: ... + def set_description(self, description: Any) -> None: ... + +class OptionGroup(OptionContainer): + option_list: list[Option] + parser: OptionParser + title: str + def __init__(self, parser: OptionParser, title: str, description: str | None = None) -> None: ... + def _create_option_list(self) -> None: ... + def set_title(self, title: str) -> None: ... + +class Values: + def __init__(self, defaults: Mapping[str, Any] | None = None) -> None: ... + def _update(self, dict: Mapping[str, Any], mode: Any) -> None: ... + def _update_careful(self, dict: Mapping[str, Any]) -> None: ... + def _update_loose(self, dict: Mapping[str, Any]) -> None: ... + def ensure_value(self, attr: str, value: Any) -> Any: ... + def read_file(self, filename: str, mode: str = "careful") -> None: ... + def read_module(self, modname: str, mode: str = "careful") -> None: ... + def __getattr__(self, name: str) -> Any: ... + def __setattr__(self, __name: str, __value: Any) -> None: ... + def __eq__(self, other: object) -> bool: ... + +class OptionParser(OptionContainer): + allow_interspersed_args: bool + epilog: str | None + formatter: HelpFormatter + largs: list[str] | None + option_groups: list[OptionGroup] + option_list: list[Option] + process_default_values: Any + prog: str | None + rargs: list[Any] | None + standard_option_list: list[Option] + usage: str | None + values: Values | None + version: str + def __init__( + self, + usage: str | None = None, + option_list: Iterable[Option] | None = None, + option_class: type[Option] = ..., + version: str | None = None, + conflict_handler: str = "error", + description: str | None = None, + formatter: HelpFormatter | None = None, + add_help_option: bool = True, + prog: str | None = None, + epilog: str | None = None, + ) -> None: ... + def _add_help_option(self) -> None: ... + def _add_version_option(self) -> None: ... + def _create_option_list(self) -> None: ... + def _get_all_options(self) -> list[Option]: ... + def _get_args(self, args: Iterable[Any]) -> list[Any]: ... + def _init_parsing_state(self) -> None: ... + def _match_long_opt(self, opt: str) -> str: ... + def _populate_option_list(self, option_list: Iterable[Option], add_help: bool = True) -> None: ... + def _process_args(self, largs: list[Any], rargs: list[Any], values: Values) -> None: ... + def _process_long_opt(self, rargs: list[Any], values: Any) -> None: ... + def _process_short_opts(self, rargs: list[Any], values: Any) -> None: ... + @overload + def add_option_group(self, __opt_group: OptionGroup) -> OptionGroup: ... + @overload + def add_option_group(self, *args: Any, **kwargs: Any) -> OptionGroup: ... + def check_values(self, values: Values, args: list[str]) -> tuple[Values, list[str]]: ... + def disable_interspersed_args(self) -> None: ... + def enable_interspersed_args(self) -> None: ... + def error(self, msg: str) -> None: ... + def exit(self, status: int = 0, msg: str | None = None) -> None: ... + def expand_prog_name(self, s: str | None) -> Any: ... + def format_epilog(self, formatter: HelpFormatter) -> Any: ... + def format_help(self, formatter: HelpFormatter | None = None) -> str: ... + def format_option_help(self, formatter: HelpFormatter | None = None) -> str: ... + def get_default_values(self) -> Values: ... + def get_option_group(self, opt_str: str) -> Any: ... + def get_prog_name(self) -> str: ... + def get_usage(self) -> str: ... + def get_version(self) -> str: ... + @overload + def parse_args(self, args: None = None, values: Values | None = None) -> tuple[Values, list[str]]: ... + @overload + def parse_args(self, args: Sequence[AnyStr], values: Values | None = None) -> tuple[Values, list[AnyStr]]: ... + def print_usage(self, file: IO[str] | None = None) -> None: ... + def print_help(self, file: IO[str] | None = None) -> None: ... + def print_version(self, file: IO[str] | None = None) -> None: ... + def set_default(self, dest: Any, value: Any) -> None: ... + def set_defaults(self, **kwargs: Any) -> None: ... + def set_process_default_values(self, process: Any) -> None: ... + def set_usage(self, usage: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/os/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/os/__init__.pyi new file mode 100644 index 00000000..595b7878 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/os/__init__.pyi @@ -0,0 +1,1034 @@ +import sys +from _typeshed import ( + AnyStr_co, + BytesPath, + FileDescriptorLike, + FileDescriptorOrPath, + GenericPath, + OpenBinaryMode, + OpenBinaryModeReading, + OpenBinaryModeUpdating, + OpenBinaryModeWriting, + OpenTextMode, + ReadableBuffer, + StrOrBytesPath, + StrPath, + SupportsLenAndGetItem, + Unused, + WriteableBuffer, + structseq, +) +from abc import abstractmethod +from builtins import OSError +from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping, Sequence +from contextlib import AbstractContextManager +from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper as _TextIOWrapper +from subprocess import Popen +from typing import IO, Any, AnyStr, BinaryIO, Generic, NoReturn, Protocol, TypeVar, overload, runtime_checkable +from typing_extensions import Final, Literal, Self, TypeAlias, final + +from . import path as _path + +if sys.version_info >= (3, 9): + from types import GenericAlias + +# This unnecessary alias is to work around various errors +path = _path + +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") + +# ----- os variables ----- + +error = OSError + +supports_bytes_environ: bool + +supports_dir_fd: set[Callable[..., Any]] +supports_fd: set[Callable[..., Any]] +supports_effective_ids: set[Callable[..., Any]] +supports_follow_symlinks: set[Callable[..., Any]] + +if sys.platform != "win32": + # Unix only + PRIO_PROCESS: int + PRIO_PGRP: int + PRIO_USER: int + + F_LOCK: int + F_TLOCK: int + F_ULOCK: int + F_TEST: int + + if sys.platform != "darwin": + POSIX_FADV_NORMAL: int + POSIX_FADV_SEQUENTIAL: int + POSIX_FADV_RANDOM: int + POSIX_FADV_NOREUSE: int + POSIX_FADV_WILLNEED: int + POSIX_FADV_DONTNEED: int + + SF_NODISKIO: int + SF_MNOWAIT: int + SF_SYNC: int + + if sys.platform == "linux": + XATTR_SIZE_MAX: int + XATTR_CREATE: int + XATTR_REPLACE: int + + P_PID: int + P_PGID: int + P_ALL: int + + if sys.platform == "linux" and sys.version_info >= (3, 9): + P_PIDFD: int + + WEXITED: int + WSTOPPED: int + WNOWAIT: int + + CLD_EXITED: int + CLD_DUMPED: int + CLD_TRAPPED: int + CLD_CONTINUED: int + + if sys.version_info >= (3, 9): + CLD_KILLED: int + CLD_STOPPED: int + + # TODO: SCHED_RESET_ON_FORK not available on darwin? + # TODO: SCHED_BATCH and SCHED_IDLE are linux only? + SCHED_OTHER: int # some flavors of Unix + SCHED_BATCH: int # some flavors of Unix + SCHED_IDLE: int # some flavors of Unix + SCHED_SPORADIC: int # some flavors of Unix + SCHED_FIFO: int # some flavors of Unix + SCHED_RR: int # some flavors of Unix + SCHED_RESET_ON_FORK: int # some flavors of Unix + +if sys.platform != "win32": + RTLD_LAZY: int + RTLD_NOW: int + RTLD_GLOBAL: int + RTLD_LOCAL: int + RTLD_NODELETE: int + RTLD_NOLOAD: int + +if sys.platform == "linux": + RTLD_DEEPBIND: int + GRND_NONBLOCK: int + GRND_RANDOM: int + +SEEK_SET: int +SEEK_CUR: int +SEEK_END: int +if sys.platform != "win32": + SEEK_DATA: int # some flavors of Unix + SEEK_HOLE: int # some flavors of Unix + +O_RDONLY: int +O_WRONLY: int +O_RDWR: int +O_APPEND: int +O_CREAT: int +O_EXCL: int +O_TRUNC: int +# We don't use sys.platform for O_* flags to denote platform-dependent APIs because some codes, +# including tests for mypy, use a more finer way than sys.platform before using these APIs +# See https://github.com/python/typeshed/pull/2286 for discussions +O_DSYNC: int # Unix only +O_RSYNC: int # Unix only +O_SYNC: int # Unix only +O_NDELAY: int # Unix only +O_NONBLOCK: int # Unix only +O_NOCTTY: int # Unix only +O_CLOEXEC: int # Unix only +O_SHLOCK: int # Unix only +O_EXLOCK: int # Unix only +O_BINARY: int # Windows only +O_NOINHERIT: int # Windows only +O_SHORT_LIVED: int # Windows only +O_TEMPORARY: int # Windows only +O_RANDOM: int # Windows only +O_SEQUENTIAL: int # Windows only +O_TEXT: int # Windows only +O_ASYNC: int # Gnu extension if in C library +O_DIRECT: int # Gnu extension if in C library +O_DIRECTORY: int # Gnu extension if in C library +O_NOFOLLOW: int # Gnu extension if in C library +O_NOATIME: int # Gnu extension if in C library +O_PATH: int # Gnu extension if in C library +O_TMPFILE: int # Gnu extension if in C library +O_LARGEFILE: int # Gnu extension if in C library +O_ACCMODE: int # TODO: when does this exist? + +if sys.platform != "win32" and sys.platform != "darwin": + # posix, but apparently missing on macos + ST_APPEND: int + ST_MANDLOCK: int + ST_NOATIME: int + ST_NODEV: int + ST_NODIRATIME: int + ST_NOEXEC: int + ST_RELATIME: int + ST_SYNCHRONOUS: int + ST_WRITE: int + +if sys.platform != "win32": + NGROUPS_MAX: int + ST_NOSUID: int + ST_RDONLY: int + +curdir: str +pardir: str +sep: str +if sys.platform == "win32": + altsep: str +else: + altsep: str | None +extsep: str +pathsep: str +defpath: str +linesep: str +devnull: str +name: str + +F_OK: int +R_OK: int +W_OK: int +X_OK: int + +_EnvironCodeFunc: TypeAlias = Callable[[AnyStr], AnyStr] + +class _Environ(MutableMapping[AnyStr, AnyStr], Generic[AnyStr]): + encodekey: _EnvironCodeFunc[AnyStr] + decodekey: _EnvironCodeFunc[AnyStr] + encodevalue: _EnvironCodeFunc[AnyStr] + decodevalue: _EnvironCodeFunc[AnyStr] + if sys.version_info >= (3, 9): + def __init__( + self, + data: MutableMapping[AnyStr, AnyStr], + encodekey: _EnvironCodeFunc[AnyStr], + decodekey: _EnvironCodeFunc[AnyStr], + encodevalue: _EnvironCodeFunc[AnyStr], + decodevalue: _EnvironCodeFunc[AnyStr], + ) -> None: ... + else: + putenv: Callable[[AnyStr, AnyStr], object] + unsetenv: Callable[[AnyStr, AnyStr], object] + def __init__( + self, + data: MutableMapping[AnyStr, AnyStr], + encodekey: _EnvironCodeFunc[AnyStr], + decodekey: _EnvironCodeFunc[AnyStr], + encodevalue: _EnvironCodeFunc[AnyStr], + decodevalue: _EnvironCodeFunc[AnyStr], + putenv: Callable[[AnyStr, AnyStr], object], + unsetenv: Callable[[AnyStr, AnyStr], object], + ) -> None: ... + + def setdefault(self, key: AnyStr, value: AnyStr) -> AnyStr: ... # type: ignore[override] + def copy(self) -> dict[AnyStr, AnyStr]: ... + def __delitem__(self, key: AnyStr) -> None: ... + def __getitem__(self, key: AnyStr) -> AnyStr: ... + def __setitem__(self, key: AnyStr, value: AnyStr) -> None: ... + def __iter__(self) -> Iterator[AnyStr]: ... + def __len__(self) -> int: ... + if sys.version_info >= (3, 9): + def __or__(self, other: Mapping[_T1, _T2]) -> dict[AnyStr | _T1, AnyStr | _T2]: ... + def __ror__(self, other: Mapping[_T1, _T2]) -> dict[AnyStr | _T1, AnyStr | _T2]: ... + # We use @overload instead of a Union for reasons similar to those given for + # overloading MutableMapping.update in stdlib/typing.pyi + # The type: ignore is needed due to incompatible __or__/__ior__ signatures + @overload # type: ignore[misc] + def __ior__(self, other: Mapping[AnyStr, AnyStr]) -> Self: ... + @overload + def __ior__(self, other: Iterable[tuple[AnyStr, AnyStr]]) -> Self: ... + +environ: _Environ[str] +if sys.platform != "win32": + environb: _Environ[bytes] + +if sys.platform != "win32": + confstr_names: dict[str, int] + pathconf_names: dict[str, int] + sysconf_names: dict[str, int] + + EX_OK: int + EX_USAGE: int + EX_DATAERR: int + EX_NOINPUT: int + EX_NOUSER: int + EX_NOHOST: int + EX_UNAVAILABLE: int + EX_SOFTWARE: int + EX_OSERR: int + EX_OSFILE: int + EX_CANTCREAT: int + EX_IOERR: int + EX_TEMPFAIL: int + EX_PROTOCOL: int + EX_NOPERM: int + EX_CONFIG: int + EX_NOTFOUND: int + +P_NOWAIT: int +P_NOWAITO: int +P_WAIT: int +if sys.platform == "win32": + P_DETACH: int + P_OVERLAY: int + +# wait()/waitpid() options +if sys.platform != "win32": + WNOHANG: int # Unix only + WCONTINUED: int # some Unix systems + WUNTRACED: int # Unix only + +TMP_MAX: int # Undocumented, but used by tempfile + +# ----- os classes (structures) ----- +@final +class stat_result(structseq[float], tuple[int, int, int, int, int, int, int, float, float, float]): + # The constructor of this class takes an iterable of variable length (though it must be at least 10). + # + # However, this class behaves like a tuple of 10 elements, + # no matter how long the iterable supplied to the constructor is. + # https://github.com/python/typeshed/pull/6560#discussion_r767162532 + # + # The 10 elements always present are st_mode, st_ino, st_dev, st_nlink, + # st_uid, st_gid, st_size, st_atime, st_mtime, st_ctime. + # + # More items may be added at the end by some implementations. + if sys.version_info >= (3, 10): + __match_args__: Final = ("st_mode", "st_ino", "st_dev", "st_nlink", "st_uid", "st_gid", "st_size") + @property + def st_mode(self) -> int: ... # protection bits, + @property + def st_ino(self) -> int: ... # inode number, + @property + def st_dev(self) -> int: ... # device, + @property + def st_nlink(self) -> int: ... # number of hard links, + @property + def st_uid(self) -> int: ... # user id of owner, + @property + def st_gid(self) -> int: ... # group id of owner, + @property + def st_size(self) -> int: ... # size of file, in bytes, + @property + def st_atime(self) -> float: ... # time of most recent access, + @property + def st_mtime(self) -> float: ... # time of most recent content modification, + # platform dependent (time of most recent metadata change on Unix, or the time of creation on Windows) + @property + def st_ctime(self) -> float: ... + @property + def st_atime_ns(self) -> int: ... # time of most recent access, in nanoseconds + @property + def st_mtime_ns(self) -> int: ... # time of most recent content modification in nanoseconds + # platform dependent (time of most recent metadata change on Unix, or the time of creation on Windows) in nanoseconds + @property + def st_ctime_ns(self) -> int: ... + if sys.platform == "win32": + @property + def st_file_attributes(self) -> int: ... + if sys.version_info >= (3, 8): + @property + def st_reparse_tag(self) -> int: ... + else: + @property + def st_blocks(self) -> int: ... # number of blocks allocated for file + @property + def st_blksize(self) -> int: ... # filesystem blocksize + @property + def st_rdev(self) -> int: ... # type of device if an inode device + if sys.platform != "linux": + # These properties are available on MacOS, but not on Windows or Ubuntu. + # On other Unix systems (such as FreeBSD), the following attributes may be + # available (but may be only filled out if root tries to use them): + @property + def st_gen(self) -> int: ... # file generation number + @property + def st_birthtime(self) -> int: ... # time of file creation + if sys.platform == "darwin": + @property + def st_flags(self) -> int: ... # user defined flags for file + # Attributes documented as sometimes appearing, but deliberately omitted from the stub: `st_creator`, `st_rsize`, `st_type`. + # See https://github.com/python/typeshed/pull/6560#issuecomment-991253327 + +@runtime_checkable +class PathLike(Protocol[AnyStr_co]): + @abstractmethod + def __fspath__(self) -> AnyStr_co: ... + +@overload +def listdir(path: StrPath | None = None) -> list[str]: ... +@overload +def listdir(path: BytesPath) -> list[bytes]: ... +@overload +def listdir(path: int) -> list[str]: ... +@final +class DirEntry(Generic[AnyStr]): + # This is what the scandir iterator yields + # The constructor is hidden + + @property + def name(self) -> AnyStr: ... + @property + def path(self) -> AnyStr: ... + def inode(self) -> int: ... + def is_dir(self, *, follow_symlinks: bool = True) -> bool: ... + def is_file(self, *, follow_symlinks: bool = True) -> bool: ... + def is_symlink(self) -> bool: ... + def stat(self, *, follow_symlinks: bool = True) -> stat_result: ... + def __fspath__(self) -> AnyStr: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +@final +class statvfs_result(structseq[int], tuple[int, int, int, int, int, int, int, int, int, int, int]): + if sys.version_info >= (3, 10): + __match_args__: Final = ( + "f_bsize", + "f_frsize", + "f_blocks", + "f_bfree", + "f_bavail", + "f_files", + "f_ffree", + "f_favail", + "f_flag", + "f_namemax", + ) + @property + def f_bsize(self) -> int: ... + @property + def f_frsize(self) -> int: ... + @property + def f_blocks(self) -> int: ... + @property + def f_bfree(self) -> int: ... + @property + def f_bavail(self) -> int: ... + @property + def f_files(self) -> int: ... + @property + def f_ffree(self) -> int: ... + @property + def f_favail(self) -> int: ... + @property + def f_flag(self) -> int: ... + @property + def f_namemax(self) -> int: ... + @property + def f_fsid(self) -> int: ... + +# ----- os function stubs ----- +def fsencode(filename: StrOrBytesPath) -> bytes: ... +def fsdecode(filename: StrOrBytesPath) -> str: ... +@overload +def fspath(path: str) -> str: ... +@overload +def fspath(path: bytes) -> bytes: ... +@overload +def fspath(path: PathLike[AnyStr]) -> AnyStr: ... +def get_exec_path(env: Mapping[str, str] | None = None) -> list[str]: ... +def getlogin() -> str: ... +def getpid() -> int: ... +def getppid() -> int: ... +def strerror(__code: int) -> str: ... +def umask(__mask: int) -> int: ... +@final +class uname_result(structseq[str], tuple[str, str, str, str, str]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("sysname", "nodename", "release", "version", "machine") + @property + def sysname(self) -> str: ... + @property + def nodename(self) -> str: ... + @property + def release(self) -> str: ... + @property + def version(self) -> str: ... + @property + def machine(self) -> str: ... + +if sys.platform != "win32": + def ctermid() -> str: ... + def getegid() -> int: ... + def geteuid() -> int: ... + def getgid() -> int: ... + def getgrouplist(__user: str, __group: int) -> list[int]: ... + def getgroups() -> list[int]: ... # Unix only, behaves differently on Mac + def initgroups(__username: str, __gid: int) -> None: ... + def getpgid(pid: int) -> int: ... + def getpgrp() -> int: ... + def getpriority(which: int, who: int) -> int: ... + def setpriority(which: int, who: int, priority: int) -> None: ... + if sys.platform != "darwin": + def getresuid() -> tuple[int, int, int]: ... + def getresgid() -> tuple[int, int, int]: ... + + def getuid() -> int: ... + def setegid(__egid: int) -> None: ... + def seteuid(__euid: int) -> None: ... + def setgid(__gid: int) -> None: ... + def setgroups(__groups: Sequence[int]) -> None: ... + def setpgrp() -> None: ... + def setpgid(__pid: int, __pgrp: int) -> None: ... + def setregid(__rgid: int, __egid: int) -> None: ... + if sys.platform != "darwin": + def setresgid(rgid: int, egid: int, sgid: int) -> None: ... + def setresuid(ruid: int, euid: int, suid: int) -> None: ... + + def setreuid(__ruid: int, __euid: int) -> None: ... + def getsid(__pid: int) -> int: ... + def setsid() -> None: ... + def setuid(__uid: int) -> None: ... + def uname() -> uname_result: ... + +@overload +def getenv(key: str) -> str | None: ... +@overload +def getenv(key: str, default: _T) -> str | _T: ... + +if sys.platform != "win32": + @overload + def getenvb(key: bytes) -> bytes | None: ... + @overload + def getenvb(key: bytes, default: _T) -> bytes | _T: ... + def putenv(__name: StrOrBytesPath, __value: StrOrBytesPath) -> None: ... + def unsetenv(__name: StrOrBytesPath) -> None: ... + +else: + def putenv(__name: str, __value: str) -> None: ... + + if sys.version_info >= (3, 9): + def unsetenv(__name: str) -> None: ... + +_Opener: TypeAlias = Callable[[str, int], int] + +@overload +def fdopen( + fd: int, + mode: OpenTextMode = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = ..., + newline: str | None = ..., + closefd: bool = ..., + opener: _Opener | None = ..., +) -> _TextIOWrapper: ... +@overload +def fdopen( + fd: int, + mode: OpenBinaryMode, + buffering: Literal[0], + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = ..., + opener: _Opener | None = ..., +) -> FileIO: ... +@overload +def fdopen( + fd: int, + mode: OpenBinaryModeUpdating, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = ..., + opener: _Opener | None = ..., +) -> BufferedRandom: ... +@overload +def fdopen( + fd: int, + mode: OpenBinaryModeWriting, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = ..., + opener: _Opener | None = ..., +) -> BufferedWriter: ... +@overload +def fdopen( + fd: int, + mode: OpenBinaryModeReading, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = ..., + opener: _Opener | None = ..., +) -> BufferedReader: ... +@overload +def fdopen( + fd: int, + mode: OpenBinaryMode, + buffering: int = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = ..., + opener: _Opener | None = ..., +) -> BinaryIO: ... +@overload +def fdopen( + fd: int, + mode: str, + buffering: int = -1, + encoding: str | None = None, + errors: str | None = ..., + newline: str | None = ..., + closefd: bool = ..., + opener: _Opener | None = ..., +) -> IO[Any]: ... +def close(fd: int) -> None: ... +def closerange(__fd_low: int, __fd_high: int) -> None: ... +def device_encoding(fd: int) -> str | None: ... +def dup(__fd: int) -> int: ... +def dup2(fd: int, fd2: int, inheritable: bool = True) -> int: ... +def fstat(fd: int) -> stat_result: ... +def ftruncate(__fd: int, __length: int) -> None: ... +def fsync(fd: FileDescriptorLike) -> None: ... +def isatty(__fd: int) -> bool: ... + +if sys.platform != "win32" and sys.version_info >= (3, 11): + def login_tty(__fd: int) -> None: ... + +def lseek(__fd: int, __position: int, __how: int) -> int: ... +def open(path: StrOrBytesPath, flags: int, mode: int = 0o777, *, dir_fd: int | None = None) -> int: ... +def pipe() -> tuple[int, int]: ... +def read(__fd: int, __length: int) -> bytes: ... + +if sys.platform != "win32": + def fchmod(fd: int, mode: int) -> None: ... + def fchown(fd: int, uid: int, gid: int) -> None: ... + def fpathconf(__fd: int, __name: str | int) -> int: ... + def fstatvfs(__fd: int) -> statvfs_result: ... + def get_blocking(__fd: int) -> bool: ... + def set_blocking(__fd: int, __blocking: bool) -> None: ... + def lockf(__fd: int, __command: int, __length: int) -> None: ... + def openpty() -> tuple[int, int]: ... # some flavors of Unix + if sys.platform != "darwin": + def fdatasync(fd: FileDescriptorLike) -> None: ... + def pipe2(__flags: int) -> tuple[int, int]: ... # some flavors of Unix + def posix_fallocate(__fd: int, __offset: int, __length: int) -> None: ... + def posix_fadvise(__fd: int, __offset: int, __length: int, __advice: int) -> None: ... + + def pread(__fd: int, __length: int, __offset: int) -> bytes: ... + def pwrite(__fd: int, __buffer: ReadableBuffer, __offset: int) -> int: ... + # In CI, stubtest sometimes reports that these are available on MacOS, sometimes not + def preadv(__fd: int, __buffers: SupportsLenAndGetItem[WriteableBuffer], __offset: int, __flags: int = 0) -> int: ... + def pwritev(__fd: int, __buffers: SupportsLenAndGetItem[ReadableBuffer], __offset: int, __flags: int = 0) -> int: ... + if sys.platform != "darwin": + if sys.version_info >= (3, 10): + RWF_APPEND: int # docs say available on 3.7+, stubtest says otherwise + RWF_DSYNC: int + RWF_SYNC: int + RWF_HIPRI: int + RWF_NOWAIT: int + @overload + def sendfile(out_fd: int, in_fd: int, offset: int | None, count: int) -> int: ... + @overload + def sendfile( + out_fd: int, + in_fd: int, + offset: int, + count: int, + headers: Sequence[ReadableBuffer] = ..., + trailers: Sequence[ReadableBuffer] = ..., + flags: int = 0, + ) -> int: ... # FreeBSD and Mac OS X only + def readv(__fd: int, __buffers: SupportsLenAndGetItem[WriteableBuffer]) -> int: ... + def writev(__fd: int, __buffers: SupportsLenAndGetItem[ReadableBuffer]) -> int: ... + +@final +class terminal_size(structseq[int], tuple[int, int]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("columns", "lines") + @property + def columns(self) -> int: ... + @property + def lines(self) -> int: ... + +def get_terminal_size(__fd: int = ...) -> terminal_size: ... +def get_inheritable(__fd: int) -> bool: ... +def set_inheritable(__fd: int, __inheritable: bool) -> None: ... + +if sys.platform == "win32": + def get_handle_inheritable(__handle: int) -> bool: ... + def set_handle_inheritable(__handle: int, __inheritable: bool) -> None: ... + +if sys.platform != "win32": + # Unix only + def tcgetpgrp(__fd: int) -> int: ... + def tcsetpgrp(__fd: int, __pgid: int) -> None: ... + def ttyname(__fd: int) -> str: ... + +def write(__fd: int, __data: ReadableBuffer) -> int: ... +def access( + path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = None, effective_ids: bool = False, follow_symlinks: bool = True +) -> bool: ... +def chdir(path: FileDescriptorOrPath) -> None: ... + +if sys.platform != "win32": + def fchdir(fd: FileDescriptorLike) -> None: ... + +def getcwd() -> str: ... +def getcwdb() -> bytes: ... +def chmod(path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = None, follow_symlinks: bool = True) -> None: ... + +if sys.platform != "win32" and sys.platform != "linux": + def chflags(path: StrOrBytesPath, flags: int, follow_symlinks: bool = True) -> None: ... # some flavors of Unix + def lchflags(path: StrOrBytesPath, flags: int) -> None: ... + def lchmod(path: StrOrBytesPath, mode: int) -> None: ... + +if sys.platform != "win32": + def chroot(path: StrOrBytesPath) -> None: ... + def chown( + path: FileDescriptorOrPath, uid: int, gid: int, *, dir_fd: int | None = None, follow_symlinks: bool = True + ) -> None: ... + def lchown(path: StrOrBytesPath, uid: int, gid: int) -> None: ... + +def link( + src: StrOrBytesPath, + dst: StrOrBytesPath, + *, + src_dir_fd: int | None = None, + dst_dir_fd: int | None = None, + follow_symlinks: bool = True, +) -> None: ... +def lstat(path: StrOrBytesPath, *, dir_fd: int | None = None) -> stat_result: ... +def mkdir(path: StrOrBytesPath, mode: int = 0o777, *, dir_fd: int | None = None) -> None: ... + +if sys.platform != "win32": + def mkfifo(path: StrOrBytesPath, mode: int = 0o666, *, dir_fd: int | None = None) -> None: ... # Unix only + +def makedirs(name: StrOrBytesPath, mode: int = 0o777, exist_ok: bool = False) -> None: ... + +if sys.platform != "win32": + def mknod(path: StrOrBytesPath, mode: int = 0o600, device: int = 0, *, dir_fd: int | None = None) -> None: ... + def major(__device: int) -> int: ... + def minor(__device: int) -> int: ... + def makedev(__major: int, __minor: int) -> int: ... + def pathconf(path: FileDescriptorOrPath, name: str | int) -> int: ... # Unix only + +def readlink(path: GenericPath[AnyStr], *, dir_fd: int | None = None) -> AnyStr: ... +def remove(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: ... +def removedirs(name: StrOrBytesPath) -> None: ... +def rename(src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = None, dst_dir_fd: int | None = None) -> None: ... +def renames(old: StrOrBytesPath, new: StrOrBytesPath) -> None: ... +def replace( + src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = None, dst_dir_fd: int | None = None +) -> None: ... +def rmdir(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: ... + +class _ScandirIterator(Iterator[DirEntry[AnyStr]], AbstractContextManager[_ScandirIterator[AnyStr]]): + def __next__(self) -> DirEntry[AnyStr]: ... + def __exit__(self, *args: Unused) -> None: ... + def close(self) -> None: ... + +@overload +def scandir(path: None = None) -> _ScandirIterator[str]: ... +@overload +def scandir(path: int) -> _ScandirIterator[str]: ... +@overload +def scandir(path: GenericPath[AnyStr]) -> _ScandirIterator[AnyStr]: ... +def stat(path: FileDescriptorOrPath, *, dir_fd: int | None = None, follow_symlinks: bool = True) -> stat_result: ... + +if sys.platform != "win32": + def statvfs(path: FileDescriptorOrPath) -> statvfs_result: ... # Unix only + +def symlink( + src: StrOrBytesPath, dst: StrOrBytesPath, target_is_directory: bool = False, *, dir_fd: int | None = None +) -> None: ... + +if sys.platform != "win32": + def sync() -> None: ... # Unix only + +def truncate(path: FileDescriptorOrPath, length: int) -> None: ... # Unix only up to version 3.4 +def unlink(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: ... +def utime( + path: FileDescriptorOrPath, + times: tuple[int, int] | tuple[float, float] | None = None, + *, + ns: tuple[int, int] = ..., + dir_fd: int | None = None, + follow_symlinks: bool = True, +) -> None: ... + +_OnError: TypeAlias = Callable[[OSError], object] + +def walk( + top: GenericPath[AnyStr], topdown: bool = True, onerror: _OnError | None = None, followlinks: bool = False +) -> Iterator[tuple[AnyStr, list[AnyStr], list[AnyStr]]]: ... + +if sys.platform != "win32": + @overload + def fwalk( + top: StrPath = ".", + topdown: bool = True, + onerror: _OnError | None = None, + *, + follow_symlinks: bool = False, + dir_fd: int | None = None, + ) -> Iterator[tuple[str, list[str], list[str], int]]: ... + @overload + def fwalk( + top: BytesPath, + topdown: bool = True, + onerror: _OnError | None = None, + *, + follow_symlinks: bool = False, + dir_fd: int | None = None, + ) -> Iterator[tuple[bytes, list[bytes], list[bytes], int]]: ... + if sys.platform == "linux": + def getxattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = True) -> bytes: ... + def listxattr(path: FileDescriptorOrPath | None = None, *, follow_symlinks: bool = True) -> list[str]: ... + def removexattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: ... + def setxattr( + path: FileDescriptorOrPath, + attribute: StrOrBytesPath, + value: ReadableBuffer, + flags: int = 0, + *, + follow_symlinks: bool = True, + ) -> None: ... + +def abort() -> NoReturn: ... + +# These are defined as execl(file, *args) but the first *arg is mandatory. +def execl(file: StrOrBytesPath, __arg0: StrOrBytesPath, *args: StrOrBytesPath) -> NoReturn: ... +def execlp(file: StrOrBytesPath, __arg0: StrOrBytesPath, *args: StrOrBytesPath) -> NoReturn: ... + +# These are: execle(file, *args, env) but env is pulled from the last element of the args. +def execle(file: StrOrBytesPath, __arg0: StrOrBytesPath, *args: Any) -> NoReturn: ... +def execlpe(file: StrOrBytesPath, __arg0: StrOrBytesPath, *args: Any) -> NoReturn: ... + +# The docs say `args: tuple or list of strings` +# The implementation enforces tuple or list so we can't use Sequence. +# Not separating out PathLike[str] and PathLike[bytes] here because it doesn't make much difference +# in practice, and doing so would explode the number of combinations in this already long union. +# All these combinations are necessary due to list being invariant. +_ExecVArgs: TypeAlias = ( + tuple[StrOrBytesPath, ...] + | list[bytes] + | list[str] + | list[PathLike[Any]] + | list[bytes | str] + | list[bytes | PathLike[Any]] + | list[str | PathLike[Any]] + | list[bytes | str | PathLike[Any]] +) +# Depending on the OS, the keys and values are passed either to +# PyUnicode_FSDecoder (which accepts str | ReadableBuffer) or to +# PyUnicode_FSConverter (which accepts StrOrBytesPath). For simplicity, +# we limit to str | bytes. +_ExecEnv: TypeAlias = Mapping[bytes, bytes | str] | Mapping[str, bytes | str] + +def execv(__path: StrOrBytesPath, __argv: _ExecVArgs) -> NoReturn: ... +def execve(path: FileDescriptorOrPath, argv: _ExecVArgs, env: _ExecEnv) -> NoReturn: ... +def execvp(file: StrOrBytesPath, args: _ExecVArgs) -> NoReturn: ... +def execvpe(file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> NoReturn: ... +def _exit(status: int) -> NoReturn: ... +def kill(__pid: int, __signal: int) -> None: ... + +if sys.platform != "win32": + # Unix only + def fork() -> int: ... + def forkpty() -> tuple[int, int]: ... # some flavors of Unix + def killpg(__pgid: int, __signal: int) -> None: ... + def nice(__increment: int) -> int: ... + if sys.platform != "darwin": + def plock(__op: int) -> None: ... # ???op is int? + +class _wrap_close(_TextIOWrapper): + def __init__(self, stream: _TextIOWrapper, proc: Popen[str]) -> None: ... + def close(self) -> int | None: ... # type: ignore[override] + +def popen(cmd: str, mode: str = "r", buffering: int = -1) -> _wrap_close: ... +def spawnl(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: ... +def spawnle(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any) -> int: ... # Imprecise sig + +if sys.platform != "win32": + def spawnv(mode: int, file: StrOrBytesPath, args: _ExecVArgs) -> int: ... + def spawnve(mode: int, file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> int: ... + +else: + def spawnv(__mode: int, __path: StrOrBytesPath, __argv: _ExecVArgs) -> int: ... + def spawnve(__mode: int, __path: StrOrBytesPath, __argv: _ExecVArgs, __env: _ExecEnv) -> int: ... + +def system(command: StrOrBytesPath) -> int: ... +@final +class times_result(structseq[float], tuple[float, float, float, float, float]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("user", "system", "children_user", "children_system", "elapsed") + @property + def user(self) -> float: ... + @property + def system(self) -> float: ... + @property + def children_user(self) -> float: ... + @property + def children_system(self) -> float: ... + @property + def elapsed(self) -> float: ... + +def times() -> times_result: ... +def waitpid(__pid: int, __options: int) -> tuple[int, int]: ... + +if sys.platform == "win32": + def startfile(path: StrOrBytesPath, operation: str | None = None) -> None: ... + +else: + def spawnlp(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: ... + def spawnlpe(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any) -> int: ... # Imprecise signature + def spawnvp(mode: int, file: StrOrBytesPath, args: _ExecVArgs) -> int: ... + def spawnvpe(mode: int, file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> int: ... + def wait() -> tuple[int, int]: ... # Unix only + if sys.platform != "darwin": + @final + class waitid_result(structseq[int], tuple[int, int, int, int, int]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("si_pid", "si_uid", "si_signo", "si_status", "si_code") + @property + def si_pid(self) -> int: ... + @property + def si_uid(self) -> int: ... + @property + def si_signo(self) -> int: ... + @property + def si_status(self) -> int: ... + @property + def si_code(self) -> int: ... + + def waitid(__idtype: int, __ident: int, __options: int) -> waitid_result: ... + + def wait3(options: int) -> tuple[int, int, Any]: ... + def wait4(pid: int, options: int) -> tuple[int, int, Any]: ... + def WCOREDUMP(__status: int) -> bool: ... + def WIFCONTINUED(status: int) -> bool: ... + def WIFSTOPPED(status: int) -> bool: ... + def WIFSIGNALED(status: int) -> bool: ... + def WIFEXITED(status: int) -> bool: ... + def WEXITSTATUS(status: int) -> int: ... + def WSTOPSIG(status: int) -> int: ... + def WTERMSIG(status: int) -> int: ... + if sys.version_info >= (3, 8): + def posix_spawn( + path: StrOrBytesPath, + argv: _ExecVArgs, + env: _ExecEnv, + *, + file_actions: Sequence[tuple[Any, ...]] | None = ..., + setpgroup: int | None = ..., + resetids: bool = ..., + setsid: bool = ..., + setsigmask: Iterable[int] = ..., + setsigdef: Iterable[int] = ..., + scheduler: tuple[Any, sched_param] | None = ..., + ) -> int: ... + def posix_spawnp( + path: StrOrBytesPath, + argv: _ExecVArgs, + env: _ExecEnv, + *, + file_actions: Sequence[tuple[Any, ...]] | None = ..., + setpgroup: int | None = ..., + resetids: bool = ..., + setsid: bool = ..., + setsigmask: Iterable[int] = ..., + setsigdef: Iterable[int] = ..., + scheduler: tuple[Any, sched_param] | None = ..., + ) -> int: ... + POSIX_SPAWN_OPEN: int + POSIX_SPAWN_CLOSE: int + POSIX_SPAWN_DUP2: int + +if sys.platform != "win32": + @final + class sched_param(structseq[int], tuple[int]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("sched_priority",) + def __new__(cls, sched_priority: int) -> Self: ... + @property + def sched_priority(self) -> int: ... + + def sched_get_priority_min(policy: int) -> int: ... # some flavors of Unix + def sched_get_priority_max(policy: int) -> int: ... # some flavors of Unix + def sched_yield() -> None: ... # some flavors of Unix + if sys.platform != "darwin": + def sched_setscheduler(__pid: int, __policy: int, __param: sched_param) -> None: ... # some flavors of Unix + def sched_getscheduler(__pid: int) -> int: ... # some flavors of Unix + def sched_rr_get_interval(__pid: int) -> float: ... # some flavors of Unix + def sched_setparam(__pid: int, __param: sched_param) -> None: ... # some flavors of Unix + def sched_getparam(__pid: int) -> sched_param: ... # some flavors of Unix + def sched_setaffinity(__pid: int, __mask: Iterable[int]) -> None: ... # some flavors of Unix + def sched_getaffinity(__pid: int) -> set[int]: ... # some flavors of Unix + +def cpu_count() -> int | None: ... + +if sys.platform != "win32": + # Unix only + def confstr(__name: str | int) -> str | None: ... + def getloadavg() -> tuple[float, float, float]: ... + def sysconf(__name: str | int) -> int: ... + +if sys.platform == "linux": + def getrandom(size: int, flags: int = 0) -> bytes: ... + +def urandom(__size: int) -> bytes: ... + +if sys.platform != "win32": + def register_at_fork( + *, + before: Callable[..., Any] | None = ..., + after_in_parent: Callable[..., Any] | None = ..., + after_in_child: Callable[..., Any] | None = ..., + ) -> None: ... + +if sys.version_info >= (3, 8): + if sys.platform == "win32": + class _AddedDllDirectory: + path: str | None + def __init__(self, path: str | None, cookie: _T, remove_dll_directory: Callable[[_T], object]) -> None: ... + def close(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + + def add_dll_directory(path: str) -> _AddedDllDirectory: ... + if sys.platform == "linux": + MFD_CLOEXEC: int + MFD_ALLOW_SEALING: int + MFD_HUGETLB: int + MFD_HUGE_SHIFT: int + MFD_HUGE_MASK: int + MFD_HUGE_64KB: int + MFD_HUGE_512KB: int + MFD_HUGE_1MB: int + MFD_HUGE_2MB: int + MFD_HUGE_8MB: int + MFD_HUGE_16MB: int + MFD_HUGE_32MB: int + MFD_HUGE_256MB: int + MFD_HUGE_512MB: int + MFD_HUGE_1GB: int + MFD_HUGE_2GB: int + MFD_HUGE_16GB: int + def memfd_create(name: str, flags: int = ...) -> int: ... + def copy_file_range( + src: int, dst: int, count: int, offset_src: int | None = ..., offset_dst: int | None = ... + ) -> int: ... + +if sys.version_info >= (3, 9): + def waitstatus_to_exitcode(status: int) -> int: ... + + if sys.platform == "linux": + def pidfd_open(pid: int, flags: int = ...) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/os/path.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/os/path.pyi new file mode 100644 index 00000000..dc688a9f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/os/path.pyi @@ -0,0 +1,8 @@ +import sys + +if sys.platform == "win32": + from ntpath import * + from ntpath import __all__ as __all__ +else: + from posixpath import * + from posixpath import __all__ as __all__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ossaudiodev.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ossaudiodev.pyi new file mode 100644 index 00000000..d956a897 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ossaudiodev.pyi @@ -0,0 +1,132 @@ +import sys +from typing import Any, overload +from typing_extensions import Literal + +if sys.platform != "win32" and sys.platform != "darwin": + AFMT_AC3: int + AFMT_A_LAW: int + AFMT_IMA_ADPCM: int + AFMT_MPEG: int + AFMT_MU_LAW: int + AFMT_QUERY: int + AFMT_S16_BE: int + AFMT_S16_LE: int + AFMT_S16_NE: int + AFMT_S8: int + AFMT_U16_BE: int + AFMT_U16_LE: int + AFMT_U8: int + SNDCTL_COPR_HALT: int + SNDCTL_COPR_LOAD: int + SNDCTL_COPR_RCODE: int + SNDCTL_COPR_RCVMSG: int + SNDCTL_COPR_RDATA: int + SNDCTL_COPR_RESET: int + SNDCTL_COPR_RUN: int + SNDCTL_COPR_SENDMSG: int + SNDCTL_COPR_WCODE: int + SNDCTL_COPR_WDATA: int + SNDCTL_DSP_BIND_CHANNEL: int + SNDCTL_DSP_CHANNELS: int + SNDCTL_DSP_GETBLKSIZE: int + SNDCTL_DSP_GETCAPS: int + SNDCTL_DSP_GETCHANNELMASK: int + SNDCTL_DSP_GETFMTS: int + SNDCTL_DSP_GETIPTR: int + SNDCTL_DSP_GETISPACE: int + SNDCTL_DSP_GETODELAY: int + SNDCTL_DSP_GETOPTR: int + SNDCTL_DSP_GETOSPACE: int + SNDCTL_DSP_GETSPDIF: int + SNDCTL_DSP_GETTRIGGER: int + SNDCTL_DSP_MAPINBUF: int + SNDCTL_DSP_MAPOUTBUF: int + SNDCTL_DSP_NONBLOCK: int + SNDCTL_DSP_POST: int + SNDCTL_DSP_PROFILE: int + SNDCTL_DSP_RESET: int + SNDCTL_DSP_SAMPLESIZE: int + SNDCTL_DSP_SETDUPLEX: int + SNDCTL_DSP_SETFMT: int + SNDCTL_DSP_SETFRAGMENT: int + SNDCTL_DSP_SETSPDIF: int + SNDCTL_DSP_SETSYNCRO: int + SNDCTL_DSP_SETTRIGGER: int + SNDCTL_DSP_SPEED: int + SNDCTL_DSP_STEREO: int + SNDCTL_DSP_SUBDIVIDE: int + SNDCTL_DSP_SYNC: int + SNDCTL_FM_4OP_ENABLE: int + SNDCTL_FM_LOAD_INSTR: int + SNDCTL_MIDI_INFO: int + SNDCTL_MIDI_MPUCMD: int + SNDCTL_MIDI_MPUMODE: int + SNDCTL_MIDI_PRETIME: int + SNDCTL_SEQ_CTRLRATE: int + SNDCTL_SEQ_GETINCOUNT: int + SNDCTL_SEQ_GETOUTCOUNT: int + SNDCTL_SEQ_GETTIME: int + SNDCTL_SEQ_NRMIDIS: int + SNDCTL_SEQ_NRSYNTHS: int + SNDCTL_SEQ_OUTOFBAND: int + SNDCTL_SEQ_PANIC: int + SNDCTL_SEQ_PERCMODE: int + SNDCTL_SEQ_RESET: int + SNDCTL_SEQ_RESETSAMPLES: int + SNDCTL_SEQ_SYNC: int + SNDCTL_SEQ_TESTMIDI: int + SNDCTL_SEQ_THRESHOLD: int + SNDCTL_SYNTH_CONTROL: int + SNDCTL_SYNTH_ID: int + SNDCTL_SYNTH_INFO: int + SNDCTL_SYNTH_MEMAVL: int + SNDCTL_SYNTH_REMOVESAMPLE: int + SNDCTL_TMR_CONTINUE: int + SNDCTL_TMR_METRONOME: int + SNDCTL_TMR_SELECT: int + SNDCTL_TMR_SOURCE: int + SNDCTL_TMR_START: int + SNDCTL_TMR_STOP: int + SNDCTL_TMR_TEMPO: int + SNDCTL_TMR_TIMEBASE: int + SOUND_MIXER_ALTPCM: int + SOUND_MIXER_BASS: int + SOUND_MIXER_CD: int + SOUND_MIXER_DIGITAL1: int + SOUND_MIXER_DIGITAL2: int + SOUND_MIXER_DIGITAL3: int + SOUND_MIXER_IGAIN: int + SOUND_MIXER_IMIX: int + SOUND_MIXER_LINE: int + SOUND_MIXER_LINE1: int + SOUND_MIXER_LINE2: int + SOUND_MIXER_LINE3: int + SOUND_MIXER_MIC: int + SOUND_MIXER_MONITOR: int + SOUND_MIXER_NRDEVICES: int + SOUND_MIXER_OGAIN: int + SOUND_MIXER_PCM: int + SOUND_MIXER_PHONEIN: int + SOUND_MIXER_PHONEOUT: int + SOUND_MIXER_RADIO: int + SOUND_MIXER_RECLEV: int + SOUND_MIXER_SPEAKER: int + SOUND_MIXER_SYNTH: int + SOUND_MIXER_TREBLE: int + SOUND_MIXER_VIDEO: int + SOUND_MIXER_VOLUME: int + + control_labels: list[str] + control_names: list[str] + + # TODO: oss_audio_device return type + @overload + def open(mode: Literal["r", "w", "rw"]) -> Any: ... + @overload + def open(device: str, mode: Literal["r", "w", "rw"]) -> Any: ... + + # TODO: oss_mixer_device return type + def openmixer(device: str = ...) -> Any: ... + + class OSSAudioError(Exception): ... + error = OSSAudioError diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/parser.pyi new file mode 100644 index 00000000..cce8594e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/parser.pyi @@ -0,0 +1,25 @@ +from _typeshed import StrOrBytesPath +from collections.abc import Sequence +from types import CodeType +from typing import Any +from typing_extensions import final + +def expr(source: str) -> STType: ... +def suite(source: str) -> STType: ... +def sequence2st(sequence: Sequence[Any]) -> STType: ... +def tuple2st(sequence: Sequence[Any]) -> STType: ... +def st2list(st: STType, line_info: bool = ..., col_info: bool = ...) -> list[Any]: ... +def st2tuple(st: STType, line_info: bool = ..., col_info: bool = ...) -> tuple[Any, ...]: ... +def compilest(st: STType, filename: StrOrBytesPath = ...) -> CodeType: ... +def isexpr(st: STType) -> bool: ... +def issuite(st: STType) -> bool: ... + +class ParserError(Exception): ... + +@final +class STType: + def compile(self, filename: StrOrBytesPath = ...) -> CodeType: ... + def isexpr(self) -> bool: ... + def issuite(self) -> bool: ... + def tolist(self, line_info: bool = ..., col_info: bool = ...) -> list[Any]: ... + def totuple(self, line_info: bool = ..., col_info: bool = ...) -> tuple[Any, ...]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pathlib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pathlib.pyi new file mode 100644 index 00000000..114678ed --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pathlib.pyi @@ -0,0 +1,208 @@ +import sys +from _typeshed import ( + OpenBinaryMode, + OpenBinaryModeReading, + OpenBinaryModeUpdating, + OpenBinaryModeWriting, + OpenTextMode, + ReadableBuffer, + StrOrBytesPath, + StrPath, +) +from collections.abc import Callable, Generator, Iterator, Sequence +from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper +from os import PathLike, stat_result +from types import TracebackType +from typing import IO, Any, BinaryIO, overload +from typing_extensions import Literal, Self + +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = ["PurePath", "PurePosixPath", "PureWindowsPath", "Path", "PosixPath", "WindowsPath"] + +class PurePath(PathLike[str]): + @property + def parts(self) -> tuple[str, ...]: ... + @property + def drive(self) -> str: ... + @property + def root(self) -> str: ... + @property + def anchor(self) -> str: ... + @property + def name(self) -> str: ... + @property + def suffix(self) -> str: ... + @property + def suffixes(self) -> list[str]: ... + @property + def stem(self) -> str: ... + def __new__(cls, *args: StrPath) -> Self: ... + def __eq__(self, other: object) -> bool: ... + def __fspath__(self) -> str: ... + def __lt__(self, other: PurePath) -> bool: ... + def __le__(self, other: PurePath) -> bool: ... + def __gt__(self, other: PurePath) -> bool: ... + def __ge__(self, other: PurePath) -> bool: ... + def __truediv__(self, key: StrPath) -> Self: ... + def __rtruediv__(self, key: StrPath) -> Self: ... + def __bytes__(self) -> bytes: ... + def as_posix(self) -> str: ... + def as_uri(self) -> str: ... + def is_absolute(self) -> bool: ... + def is_reserved(self) -> bool: ... + if sys.version_info >= (3, 9): + def is_relative_to(self, *other: StrPath) -> bool: ... + + def match(self, path_pattern: str) -> bool: ... + def relative_to(self, *other: StrPath) -> Self: ... + def with_name(self, name: str) -> Self: ... + if sys.version_info >= (3, 9): + def with_stem(self, stem: str) -> Self: ... + + def with_suffix(self, suffix: str) -> Self: ... + def joinpath(self, *other: StrPath) -> Self: ... + @property + def parents(self) -> Sequence[Self]: ... + @property + def parent(self) -> Self: ... + if sys.version_info >= (3, 9) and sys.version_info < (3, 11): + def __class_getitem__(cls, type: Any) -> GenericAlias: ... + +class PurePosixPath(PurePath): ... +class PureWindowsPath(PurePath): ... + +class Path(PurePath): + def __new__(cls, *args: StrPath, **kwargs: Any) -> Self: ... + def __enter__(self) -> Self: ... + def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... + @classmethod + def cwd(cls) -> Self: ... + if sys.version_info >= (3, 10): + def stat(self, *, follow_symlinks: bool = True) -> stat_result: ... + def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: ... + else: + def stat(self) -> stat_result: ... + def chmod(self, mode: int) -> None: ... + + def exists(self) -> bool: ... + def glob(self, pattern: str) -> Generator[Self, None, None]: ... + def is_dir(self) -> bool: ... + def is_file(self) -> bool: ... + def is_symlink(self) -> bool: ... + def is_socket(self) -> bool: ... + def is_fifo(self) -> bool: ... + def is_block_device(self) -> bool: ... + def is_char_device(self) -> bool: ... + def iterdir(self) -> Generator[Self, None, None]: ... + def lchmod(self, mode: int) -> None: ... + def lstat(self) -> stat_result: ... + def mkdir(self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False) -> None: ... + # Adapted from builtins.open + # Text mode: always returns a TextIOWrapper + # The Traversable .open in stdlib/importlib/abc.pyi should be kept in sync with this. + @overload + def open( + self, + mode: OpenTextMode = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> TextIOWrapper: ... + # Unbuffered binary mode: returns a FileIO + @overload + def open( + self, mode: OpenBinaryMode, buffering: Literal[0], encoding: None = None, errors: None = None, newline: None = None + ) -> FileIO: ... + # Buffering is on: return BufferedRandom, BufferedReader, or BufferedWriter + @overload + def open( + self, + mode: OpenBinaryModeUpdating, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + ) -> BufferedRandom: ... + @overload + def open( + self, + mode: OpenBinaryModeWriting, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + ) -> BufferedWriter: ... + @overload + def open( + self, + mode: OpenBinaryModeReading, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + ) -> BufferedReader: ... + # Buffering cannot be determined: fall back to BinaryIO + @overload + def open( + self, mode: OpenBinaryMode, buffering: int = -1, encoding: None = None, errors: None = None, newline: None = None + ) -> BinaryIO: ... + # Fallback if mode is not specified + @overload + def open( + self, mode: str, buffering: int = -1, encoding: str | None = None, errors: str | None = None, newline: str | None = None + ) -> IO[Any]: ... + if sys.platform != "win32": + # These methods do "exist" on Windows, but they always raise NotImplementedError, + # so it's safer to pretend they don't exist + def owner(self) -> str: ... + def group(self) -> str: ... + def is_mount(self) -> bool: ... + + if sys.version_info >= (3, 9): + def readlink(self) -> Self: ... + if sys.version_info >= (3, 8): + def rename(self, target: str | PurePath) -> Self: ... + def replace(self, target: str | PurePath) -> Self: ... + else: + def rename(self, target: str | PurePath) -> None: ... + def replace(self, target: str | PurePath) -> None: ... + + def resolve(self, strict: bool = False) -> Self: ... + def rglob(self, pattern: str) -> Generator[Self, None, None]: ... + def rmdir(self) -> None: ... + def symlink_to(self, target: str | Path, target_is_directory: bool = False) -> None: ... + if sys.version_info >= (3, 10): + def hardlink_to(self, target: str | Path) -> None: ... + + def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: ... + if sys.version_info >= (3, 8): + def unlink(self, missing_ok: bool = False) -> None: ... + else: + def unlink(self) -> None: ... + + @classmethod + def home(cls) -> Self: ... + def absolute(self) -> Self: ... + def expanduser(self) -> Self: ... + def read_bytes(self) -> bytes: ... + def read_text(self, encoding: str | None = None, errors: str | None = None) -> str: ... + def samefile(self, other_path: StrPath) -> bool: ... + def write_bytes(self, data: ReadableBuffer) -> int: ... + if sys.version_info >= (3, 10): + def write_text( + self, data: str, encoding: str | None = None, errors: str | None = None, newline: str | None = None + ) -> int: ... + else: + def write_text(self, data: str, encoding: str | None = None, errors: str | None = None) -> int: ... + if sys.version_info >= (3, 8) and sys.version_info < (3, 12): + def link_to(self, target: StrOrBytesPath) -> None: ... + if sys.version_info >= (3, 12): + def walk( + self, top_down: bool = ..., on_error: Callable[[OSError], object] | None = ..., follow_symlinks: bool = ... + ) -> Iterator[tuple[Self, list[str], list[str]]]: ... + +class PosixPath(Path, PurePosixPath): ... +class WindowsPath(Path, PureWindowsPath): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pdb.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pdb.pyi new file mode 100644 index 00000000..e2871bb5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pdb.pyi @@ -0,0 +1,175 @@ +import signal +import sys +from bdb import Bdb +from cmd import Cmd +from collections.abc import Callable, Iterable, Mapping, Sequence +from inspect import _SourceObjectType +from types import CodeType, FrameType, TracebackType +from typing import IO, Any, ClassVar, TypeVar +from typing_extensions import ParamSpec, Self + +__all__ = ["run", "pm", "Pdb", "runeval", "runctx", "runcall", "set_trace", "post_mortem", "help"] + +_T = TypeVar("_T") +_P = ParamSpec("_P") + +line_prefix: str # undocumented + +class Restart(Exception): ... + +def run(statement: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> None: ... +def runeval(expression: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> Any: ... +def runctx(statement: str, globals: dict[str, Any], locals: Mapping[str, Any]) -> None: ... +def runcall(func: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> _T | None: ... +def set_trace(*, header: str | None = None) -> None: ... +def post_mortem(t: TracebackType | None = None) -> None: ... +def pm() -> None: ... + +class Pdb(Bdb, Cmd): + # Everything here is undocumented, except for __init__ + + commands_resuming: ClassVar[list[str]] + + aliases: dict[str, str] + mainpyfile: str + _wait_for_mainpyfile: bool + rcLines: list[str] + commands: dict[int, list[str]] + commands_doprompt: dict[int, bool] + commands_silent: dict[int, bool] + commands_defining: bool + commands_bnum: int | None + lineno: int | None + stack: list[tuple[FrameType, int]] + curindex: int + curframe: FrameType | None + curframe_locals: Mapping[str, Any] + def __init__( + self, + completekey: str = "tab", + stdin: IO[str] | None = None, + stdout: IO[str] | None = None, + skip: Iterable[str] | None = None, + nosigint: bool = False, + readrc: bool = True, + ) -> None: ... + def forget(self) -> None: ... + def setup(self, f: FrameType | None, tb: TracebackType | None) -> None: ... + def execRcLines(self) -> None: ... + def bp_commands(self, frame: FrameType) -> bool: ... + def interaction(self, frame: FrameType | None, traceback: TracebackType | None) -> None: ... + def displayhook(self, obj: object) -> None: ... + def handle_command_def(self, line: str) -> bool: ... + def defaultFile(self) -> str: ... + def lineinfo(self, identifier: str) -> tuple[None, None, None] | tuple[str, str, int]: ... + def checkline(self, filename: str, lineno: int) -> int: ... + def _getval(self, arg: str) -> object: ... + def print_stack_trace(self) -> None: ... + def print_stack_entry(self, frame_lineno: tuple[FrameType, int], prompt_prefix: str = "\n-> ") -> None: ... + def lookupmodule(self, filename: str) -> str | None: ... + if sys.version_info < (3, 11): + def _runscript(self, filename: str) -> None: ... + + def do_commands(self, arg: str) -> bool | None: ... + def do_break(self, arg: str, temporary: bool = ...) -> bool | None: ... + def do_tbreak(self, arg: str) -> bool | None: ... + def do_enable(self, arg: str) -> bool | None: ... + def do_disable(self, arg: str) -> bool | None: ... + def do_condition(self, arg: str) -> bool | None: ... + def do_ignore(self, arg: str) -> bool | None: ... + def do_clear(self, arg: str) -> bool | None: ... + def do_where(self, arg: str) -> bool | None: ... + def do_up(self, arg: str) -> bool | None: ... + def do_down(self, arg: str) -> bool | None: ... + def do_until(self, arg: str) -> bool | None: ... + def do_step(self, arg: str) -> bool | None: ... + def do_next(self, arg: str) -> bool | None: ... + def do_run(self, arg: str) -> bool | None: ... + def do_return(self, arg: str) -> bool | None: ... + def do_continue(self, arg: str) -> bool | None: ... + def do_jump(self, arg: str) -> bool | None: ... + def do_debug(self, arg: str) -> bool | None: ... + def do_quit(self, arg: str) -> bool | None: ... + def do_EOF(self, arg: str) -> bool | None: ... + def do_args(self, arg: str) -> bool | None: ... + def do_retval(self, arg: str) -> bool | None: ... + def do_p(self, arg: str) -> bool | None: ... + def do_pp(self, arg: str) -> bool | None: ... + def do_list(self, arg: str) -> bool | None: ... + def do_whatis(self, arg: str) -> bool | None: ... + def do_alias(self, arg: str) -> bool | None: ... + def do_unalias(self, arg: str) -> bool | None: ... + def do_help(self, arg: str) -> bool | None: ... + do_b = do_break + do_cl = do_clear + do_w = do_where + do_bt = do_where + do_u = do_up + do_d = do_down + do_unt = do_until + do_s = do_step + do_n = do_next + do_restart = do_run + do_r = do_return + do_c = do_continue + do_cont = do_continue + do_j = do_jump + do_q = do_quit + do_exit = do_quit + do_a = do_args + do_rv = do_retval + do_l = do_list + do_h = do_help + def help_exec(self) -> None: ... + def help_pdb(self) -> None: ... + def sigint_handler(self, signum: signal.Signals, frame: FrameType) -> None: ... + def message(self, msg: str) -> None: ... + def error(self, msg: str) -> None: ... + def _select_frame(self, number: int) -> None: ... + def _getval_except(self, arg: str, frame: FrameType | None = None) -> object: ... + def _print_lines( + self, lines: Sequence[str], start: int, breaks: Sequence[int] = ..., frame: FrameType | None = None + ) -> None: ... + def _cmdloop(self) -> None: ... + def do_display(self, arg: str) -> bool | None: ... + def do_interact(self, arg: str) -> bool | None: ... + def do_longlist(self, arg: str) -> bool | None: ... + def do_source(self, arg: str) -> bool | None: ... + def do_undisplay(self, arg: str) -> bool | None: ... + do_ll = do_longlist + def _complete_location(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... + def _complete_bpnumber(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... + def _complete_expression(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... + def complete_undisplay(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... + def complete_unalias(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... + complete_commands = _complete_bpnumber + complete_break = _complete_location + complete_b = _complete_location + complete_tbreak = _complete_location + complete_enable = _complete_bpnumber + complete_disable = _complete_bpnumber + complete_condition = _complete_bpnumber + complete_ignore = _complete_bpnumber + complete_clear = _complete_location + complete_cl = _complete_location + complete_debug = _complete_expression + complete_print = _complete_expression + complete_p = _complete_expression + complete_pp = _complete_expression + complete_source = _complete_expression + complete_whatis = _complete_expression + complete_display = _complete_expression + + if sys.version_info < (3, 11): + def _runmodule(self, module_name: str) -> None: ... + +# undocumented + +def find_function(funcname: str, filename: str) -> tuple[str, str, int] | None: ... +def main() -> None: ... +def help() -> None: ... +def getsourcelines(obj: _SourceObjectType) -> tuple[list[str], int]: ... +def lasti2lineno(code: CodeType, lasti: int) -> int: ... + +class _rstr(str): + def __repr__(self) -> Self: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pickle.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pickle.pyi new file mode 100644 index 00000000..57c4cb03 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pickle.pyi @@ -0,0 +1,284 @@ +import sys +from _typeshed import ReadableBuffer, SupportsWrite +from collections.abc import Callable, Iterable, Iterator, Mapping +from typing import Any, ClassVar, Protocol, SupportsBytes +from typing_extensions import SupportsIndex, TypeAlias, final + +__all__ = [ + "PickleError", + "PicklingError", + "UnpicklingError", + "Pickler", + "Unpickler", + "dump", + "dumps", + "load", + "loads", + "ADDITEMS", + "APPEND", + "APPENDS", + "BINBYTES", + "BINBYTES8", + "BINFLOAT", + "BINGET", + "BININT", + "BININT1", + "BININT2", + "BINPERSID", + "BINPUT", + "BINSTRING", + "BINUNICODE", + "BINUNICODE8", + "BUILD", + "DEFAULT_PROTOCOL", + "DICT", + "DUP", + "EMPTY_DICT", + "EMPTY_LIST", + "EMPTY_SET", + "EMPTY_TUPLE", + "EXT1", + "EXT2", + "EXT4", + "FALSE", + "FLOAT", + "FRAME", + "FROZENSET", + "GET", + "GLOBAL", + "HIGHEST_PROTOCOL", + "INST", + "INT", + "LIST", + "LONG", + "LONG1", + "LONG4", + "LONG_BINGET", + "LONG_BINPUT", + "MARK", + "MEMOIZE", + "NEWFALSE", + "NEWOBJ", + "NEWOBJ_EX", + "NEWTRUE", + "NONE", + "OBJ", + "PERSID", + "POP", + "POP_MARK", + "PROTO", + "PUT", + "REDUCE", + "SETITEM", + "SETITEMS", + "SHORT_BINBYTES", + "SHORT_BINSTRING", + "SHORT_BINUNICODE", + "STACK_GLOBAL", + "STOP", + "STRING", + "TRUE", + "TUPLE", + "TUPLE1", + "TUPLE2", + "TUPLE3", + "UNICODE", +] + +if sys.version_info >= (3, 8): + __all__ += ["BYTEARRAY8", "NEXT_BUFFER", "PickleBuffer", "READONLY_BUFFER"] + +HIGHEST_PROTOCOL: int +DEFAULT_PROTOCOL: int + +bytes_types: tuple[type[Any], ...] # undocumented + +class _ReadableFileobj(Protocol): + def read(self, __n: int) -> bytes: ... + def readline(self) -> bytes: ... + +if sys.version_info >= (3, 8): + @final + class PickleBuffer: + def __init__(self, buffer: ReadableBuffer) -> None: ... + def raw(self) -> memoryview: ... + def release(self) -> None: ... + _BufferCallback: TypeAlias = Callable[[PickleBuffer], Any] | None + def dump( + obj: Any, + file: SupportsWrite[bytes], + protocol: int | None = None, + *, + fix_imports: bool = True, + buffer_callback: _BufferCallback = None, + ) -> None: ... + def dumps( + obj: Any, protocol: int | None = None, *, fix_imports: bool = True, buffer_callback: _BufferCallback = None + ) -> bytes: ... + def load( + file: _ReadableFileobj, + *, + fix_imports: bool = True, + encoding: str = "ASCII", + errors: str = "strict", + buffers: Iterable[Any] | None = ..., + ) -> Any: ... + def loads( + __data: ReadableBuffer, + *, + fix_imports: bool = True, + encoding: str = "ASCII", + errors: str = "strict", + buffers: Iterable[Any] | None = ..., + ) -> Any: ... + +else: + def dump(obj: Any, file: SupportsWrite[bytes], protocol: int | None = None, *, fix_imports: bool = True) -> None: ... + def dumps(obj: Any, protocol: int | None = None, *, fix_imports: bool = True) -> bytes: ... + def load(file: _ReadableFileobj, *, fix_imports: bool = True, encoding: str = "ASCII", errors: str = "strict") -> Any: ... + def loads(data: ReadableBuffer, *, fix_imports: bool = True, encoding: str = "ASCII", errors: str = "strict") -> Any: ... + +class PickleError(Exception): ... +class PicklingError(PickleError): ... +class UnpicklingError(PickleError): ... + +_ReducedType: TypeAlias = ( + str + | tuple[Callable[..., Any], tuple[Any, ...]] + | tuple[Callable[..., Any], tuple[Any, ...], Any] + | tuple[Callable[..., Any], tuple[Any, ...], Any, Iterator[Any] | None] + | tuple[Callable[..., Any], tuple[Any, ...], Any, Iterator[Any] | None, Iterator[Any] | None] +) + +class Pickler: + fast: bool + dispatch_table: Mapping[type, Callable[[Any], _ReducedType]] + bin: bool # undocumented + dispatch: ClassVar[dict[type, Callable[[Unpickler, Any], None]]] # undocumented, _Pickler only + + if sys.version_info >= (3, 8): + def __init__( + self, + file: SupportsWrite[bytes], + protocol: int | None = ..., + *, + fix_imports: bool = ..., + buffer_callback: _BufferCallback = ..., + ) -> None: ... + def reducer_override(self, obj: Any) -> Any: ... + else: + def __init__(self, file: SupportsWrite[bytes], protocol: int | None = ..., *, fix_imports: bool = ...) -> None: ... + + def dump(self, __obj: Any) -> None: ... + def clear_memo(self) -> None: ... + def persistent_id(self, obj: Any) -> Any: ... + +class Unpickler: + dispatch: ClassVar[dict[int, Callable[[Unpickler], None]]] # undocumented, _Unpickler only + + if sys.version_info >= (3, 8): + def __init__( + self, + file: _ReadableFileobj, + *, + fix_imports: bool = ..., + encoding: str = ..., + errors: str = ..., + buffers: Iterable[Any] | None = ..., + ) -> None: ... + else: + def __init__( + self, file: _ReadableFileobj, *, fix_imports: bool = ..., encoding: str = ..., errors: str = ... + ) -> None: ... + + def load(self) -> Any: ... + def find_class(self, __module_name: str, __global_name: str) -> Any: ... + def persistent_load(self, pid: Any) -> Any: ... + +MARK: bytes +STOP: bytes +POP: bytes +POP_MARK: bytes +DUP: bytes +FLOAT: bytes +INT: bytes +BININT: bytes +BININT1: bytes +LONG: bytes +BININT2: bytes +NONE: bytes +PERSID: bytes +BINPERSID: bytes +REDUCE: bytes +STRING: bytes +BINSTRING: bytes +SHORT_BINSTRING: bytes +UNICODE: bytes +BINUNICODE: bytes +APPEND: bytes +BUILD: bytes +GLOBAL: bytes +DICT: bytes +EMPTY_DICT: bytes +APPENDS: bytes +GET: bytes +BINGET: bytes +INST: bytes +LONG_BINGET: bytes +LIST: bytes +EMPTY_LIST: bytes +OBJ: bytes +PUT: bytes +BINPUT: bytes +LONG_BINPUT: bytes +SETITEM: bytes +TUPLE: bytes +EMPTY_TUPLE: bytes +SETITEMS: bytes +BINFLOAT: bytes + +TRUE: bytes +FALSE: bytes + +# protocol 2 +PROTO: bytes +NEWOBJ: bytes +EXT1: bytes +EXT2: bytes +EXT4: bytes +TUPLE1: bytes +TUPLE2: bytes +TUPLE3: bytes +NEWTRUE: bytes +NEWFALSE: bytes +LONG1: bytes +LONG4: bytes + +# protocol 3 +BINBYTES: bytes +SHORT_BINBYTES: bytes + +# protocol 4 +SHORT_BINUNICODE: bytes +BINUNICODE8: bytes +BINBYTES8: bytes +EMPTY_SET: bytes +ADDITEMS: bytes +FROZENSET: bytes +NEWOBJ_EX: bytes +STACK_GLOBAL: bytes +MEMOIZE: bytes +FRAME: bytes + +if sys.version_info >= (3, 8): + # Protocol 5 + BYTEARRAY8: bytes + NEXT_BUFFER: bytes + READONLY_BUFFER: bytes + +def encode_long(x: int) -> bytes: ... # undocumented +def decode_long(data: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer) -> int: ... # undocumented + +# pure-Python implementations +_Pickler = Pickler # undocumented +_Unpickler = Unpickler # undocumented diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pickletools.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pickletools.pyi new file mode 100644 index 00000000..54217281 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pickletools.pyi @@ -0,0 +1,167 @@ +from collections.abc import Callable, Iterator, MutableMapping +from typing import IO, Any +from typing_extensions import TypeAlias + +__all__ = ["dis", "genops", "optimize"] + +_Reader: TypeAlias = Callable[[IO[bytes]], Any] +bytes_types: tuple[type[Any], ...] + +UP_TO_NEWLINE: int +TAKEN_FROM_ARGUMENT1: int +TAKEN_FROM_ARGUMENT4: int +TAKEN_FROM_ARGUMENT4U: int +TAKEN_FROM_ARGUMENT8U: int + +class ArgumentDescriptor: + name: str + n: int + reader: _Reader + doc: str + def __init__(self, name: str, n: int, reader: _Reader, doc: str) -> None: ... + +def read_uint1(f: IO[bytes]) -> int: ... + +uint1: ArgumentDescriptor + +def read_uint2(f: IO[bytes]) -> int: ... + +uint2: ArgumentDescriptor + +def read_int4(f: IO[bytes]) -> int: ... + +int4: ArgumentDescriptor + +def read_uint4(f: IO[bytes]) -> int: ... + +uint4: ArgumentDescriptor + +def read_uint8(f: IO[bytes]) -> int: ... + +uint8: ArgumentDescriptor + +def read_stringnl(f: IO[bytes], decode: bool = True, stripquotes: bool = True) -> bytes | str: ... + +stringnl: ArgumentDescriptor + +def read_stringnl_noescape(f: IO[bytes]) -> str: ... + +stringnl_noescape: ArgumentDescriptor + +def read_stringnl_noescape_pair(f: IO[bytes]) -> str: ... + +stringnl_noescape_pair: ArgumentDescriptor + +def read_string1(f: IO[bytes]) -> str: ... + +string1: ArgumentDescriptor + +def read_string4(f: IO[bytes]) -> str: ... + +string4: ArgumentDescriptor + +def read_bytes1(f: IO[bytes]) -> bytes: ... + +bytes1: ArgumentDescriptor + +def read_bytes4(f: IO[bytes]) -> bytes: ... + +bytes4: ArgumentDescriptor + +def read_bytes8(f: IO[bytes]) -> bytes: ... + +bytes8: ArgumentDescriptor + +def read_unicodestringnl(f: IO[bytes]) -> str: ... + +unicodestringnl: ArgumentDescriptor + +def read_unicodestring1(f: IO[bytes]) -> str: ... + +unicodestring1: ArgumentDescriptor + +def read_unicodestring4(f: IO[bytes]) -> str: ... + +unicodestring4: ArgumentDescriptor + +def read_unicodestring8(f: IO[bytes]) -> str: ... + +unicodestring8: ArgumentDescriptor + +def read_decimalnl_short(f: IO[bytes]) -> int: ... +def read_decimalnl_long(f: IO[bytes]) -> int: ... + +decimalnl_short: ArgumentDescriptor +decimalnl_long: ArgumentDescriptor + +def read_floatnl(f: IO[bytes]) -> float: ... + +floatnl: ArgumentDescriptor + +def read_float8(f: IO[bytes]) -> float: ... + +float8: ArgumentDescriptor + +def read_long1(f: IO[bytes]) -> int: ... + +long1: ArgumentDescriptor + +def read_long4(f: IO[bytes]) -> int: ... + +long4: ArgumentDescriptor + +class StackObject: + name: str + obtype: type[Any] | tuple[type[Any], ...] + doc: str + def __init__(self, name: str, obtype: type[Any] | tuple[type[Any], ...], doc: str) -> None: ... + +pyint: StackObject +pylong: StackObject +pyinteger_or_bool: StackObject +pybool: StackObject +pyfloat: StackObject +pybytes_or_str: StackObject +pystring: StackObject +pybytes: StackObject +pyunicode: StackObject +pynone: StackObject +pytuple: StackObject +pylist: StackObject +pydict: StackObject +pyset: StackObject +pyfrozenset: StackObject +anyobject: StackObject +markobject: StackObject +stackslice: StackObject + +class OpcodeInfo: + name: str + code: str + arg: ArgumentDescriptor | None + stack_before: list[StackObject] + stack_after: list[StackObject] + proto: int + doc: str + def __init__( + self, + name: str, + code: str, + arg: ArgumentDescriptor | None, + stack_before: list[StackObject], + stack_after: list[StackObject], + proto: int, + doc: str, + ) -> None: ... + +opcodes: list[OpcodeInfo] + +def genops(pickle: bytes | bytearray | IO[bytes]) -> Iterator[tuple[OpcodeInfo, Any | None, int | None]]: ... +def optimize(p: bytes | bytearray | IO[bytes]) -> bytes: ... +def dis( + pickle: bytes | bytearray | IO[bytes], + out: IO[str] | None = None, + memo: MutableMapping[int, Any] | None = None, + indentlevel: int = 4, + annotate: int = 0, +) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pipes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pipes.pyi new file mode 100644 index 00000000..fe680bfd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pipes.pyi @@ -0,0 +1,16 @@ +import os + +__all__ = ["Template"] + +class Template: + def reset(self) -> None: ... + def clone(self) -> Template: ... + def debug(self, flag: bool) -> None: ... + def append(self, cmd: str, kind: str) -> None: ... + def prepend(self, cmd: str, kind: str) -> None: ... + def open(self, file: str, rw: str) -> os._wrap_close: ... + def copy(self, infile: str, outfile: str) -> int: ... + +# Not documented, but widely used. +# Documented as shlex.quote since 3.3. +def quote(s: str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pkgutil.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pkgutil.pyi new file mode 100644 index 00000000..f9808c9e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pkgutil.pyi @@ -0,0 +1,49 @@ +import sys +from _typeshed import SupportsRead +from collections.abc import Callable, Iterable, Iterator +from importlib.abc import Loader, MetaPathFinder, PathEntryFinder +from typing import IO, Any, NamedTuple, TypeVar + +__all__ = [ + "get_importer", + "iter_importers", + "get_loader", + "find_loader", + "walk_packages", + "iter_modules", + "get_data", + "ImpImporter", + "ImpLoader", + "read_code", + "extend_path", + "ModuleInfo", +] + +_PathT = TypeVar("_PathT", bound=Iterable[str]) + +class ModuleInfo(NamedTuple): + module_finder: MetaPathFinder | PathEntryFinder + name: str + ispkg: bool + +def extend_path(path: _PathT, name: str) -> _PathT: ... + +class ImpImporter: + def __init__(self, path: str | None = None) -> None: ... + +class ImpLoader: + def __init__(self, fullname: str, file: IO[str], filename: str, etc: tuple[str, str, int]) -> None: ... + +def find_loader(fullname: str) -> Loader | None: ... +def get_importer(path_item: str) -> PathEntryFinder | None: ... +def get_loader(module_or_name: str) -> Loader | None: ... +def iter_importers(fullname: str = "") -> Iterator[MetaPathFinder | PathEntryFinder]: ... +def iter_modules(path: Iterable[str] | None = None, prefix: str = "") -> Iterator[ModuleInfo]: ... +def read_code(stream: SupportsRead[bytes]) -> Any: ... # undocumented +def walk_packages( + path: Iterable[str] | None = None, prefix: str = "", onerror: Callable[[str], object] | None = None +) -> Iterator[ModuleInfo]: ... +def get_data(package: str, resource: str) -> bytes | None: ... + +if sys.version_info >= (3, 9): + def resolve_name(name: str) -> Any: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/platform.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/platform.pyi new file mode 100644 index 00000000..291f302b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/platform.pyi @@ -0,0 +1,69 @@ +import sys + +if sys.version_info < (3, 8): + import os + + DEV_NULL = os.devnull +from typing import NamedTuple + +if sys.version_info >= (3, 8): + def libc_ver(executable: str | None = None, lib: str = "", version: str = "", chunksize: int = 16384) -> tuple[str, str]: ... + +else: + def libc_ver( + executable: str = sys.executable, lib: str = "", version: str = "", chunksize: int = 16384 + ) -> tuple[str, str]: ... + +if sys.version_info < (3, 8): + def linux_distribution( + distname: str = "", + version: str = "", + id: str = "", + supported_dists: tuple[str, ...] = ..., + full_distribution_name: bool = ..., + ) -> tuple[str, str, str]: ... + def dist( + distname: str = "", version: str = "", id: str = "", supported_dists: tuple[str, ...] = ... + ) -> tuple[str, str, str]: ... + +def win32_ver(release: str = "", version: str = "", csd: str = "", ptype: str = "") -> tuple[str, str, str, str]: ... + +if sys.version_info >= (3, 8): + def win32_edition() -> str: ... + def win32_is_iot() -> bool: ... + +def mac_ver( + release: str = "", versioninfo: tuple[str, str, str] = ..., machine: str = "" +) -> tuple[str, tuple[str, str, str], str]: ... +def java_ver( + release: str = "", vendor: str = "", vminfo: tuple[str, str, str] = ..., osinfo: tuple[str, str, str] = ... +) -> tuple[str, str, tuple[str, str, str], tuple[str, str, str]]: ... +def system_alias(system: str, release: str, version: str) -> tuple[str, str, str]: ... +def architecture(executable: str = sys.executable, bits: str = "", linkage: str = "") -> tuple[str, str]: ... + +class uname_result(NamedTuple): + system: str + node: str + release: str + version: str + machine: str + processor: str + +def uname() -> uname_result: ... +def system() -> str: ... +def node() -> str: ... +def release() -> str: ... +def version() -> str: ... +def machine() -> str: ... +def processor() -> str: ... +def python_implementation() -> str: ... +def python_version() -> str: ... +def python_version_tuple() -> tuple[str, str, str]: ... +def python_branch() -> str: ... +def python_revision() -> str: ... +def python_build() -> tuple[str, str]: ... +def python_compiler() -> str: ... +def platform(aliased: bool = ..., terse: bool = ...) -> str: ... + +if sys.version_info >= (3, 10): + def freedesktop_os_release() -> dict[str, str]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/plistlib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/plistlib.pyi new file mode 100644 index 00000000..5b76c935 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/plistlib.pyi @@ -0,0 +1,108 @@ +import sys +from _typeshed import ReadableBuffer +from collections.abc import Mapping, MutableMapping +from datetime import datetime +from enum import Enum +from typing import IO, Any +from typing_extensions import Self + +if sys.version_info >= (3, 9): + __all__ = ["InvalidFileException", "FMT_XML", "FMT_BINARY", "load", "dump", "loads", "dumps", "UID"] +elif sys.version_info >= (3, 8): + __all__ = [ + "readPlist", + "writePlist", + "readPlistFromBytes", + "writePlistToBytes", + "Data", + "InvalidFileException", + "FMT_XML", + "FMT_BINARY", + "load", + "dump", + "loads", + "dumps", + "UID", + ] +else: + __all__ = [ + "readPlist", + "writePlist", + "readPlistFromBytes", + "writePlistToBytes", + "Data", + "InvalidFileException", + "FMT_XML", + "FMT_BINARY", + "load", + "dump", + "loads", + "dumps", + ] + +class PlistFormat(Enum): + FMT_XML: int + FMT_BINARY: int + +FMT_XML = PlistFormat.FMT_XML +FMT_BINARY = PlistFormat.FMT_BINARY + +if sys.version_info >= (3, 9): + def load(fp: IO[bytes], *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ...) -> Any: ... + def loads( + value: ReadableBuffer, *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ... + ) -> Any: ... + +else: + def load( + fp: IO[bytes], + *, + fmt: PlistFormat | None = None, + use_builtin_types: bool = True, + dict_type: type[MutableMapping[str, Any]] = ..., + ) -> Any: ... + def loads( + value: ReadableBuffer, + *, + fmt: PlistFormat | None = None, + use_builtin_types: bool = True, + dict_type: type[MutableMapping[str, Any]] = ..., + ) -> Any: ... + +def dump( + value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | bytearray | datetime, + fp: IO[bytes], + *, + fmt: PlistFormat = ..., + sort_keys: bool = True, + skipkeys: bool = False, +) -> None: ... +def dumps( + value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | bytearray | datetime, + *, + fmt: PlistFormat = ..., + skipkeys: bool = False, + sort_keys: bool = True, +) -> bytes: ... + +if sys.version_info < (3, 9): + def readPlist(pathOrFile: str | IO[bytes]) -> Any: ... + def writePlist(value: Mapping[str, Any], pathOrFile: str | IO[bytes]) -> None: ... + def readPlistFromBytes(data: ReadableBuffer) -> Any: ... + def writePlistToBytes(value: Mapping[str, Any]) -> bytes: ... + +if sys.version_info < (3, 9): + class Data: + data: bytes + def __init__(self, data: bytes) -> None: ... + +if sys.version_info >= (3, 8): + class UID: + data: int + def __init__(self, data: int) -> None: ... + def __index__(self) -> int: ... + def __reduce__(self) -> tuple[type[Self], tuple[int]]: ... + def __eq__(self, other: object) -> bool: ... + +class InvalidFileException(ValueError): + def __init__(self, message: str = "Invalid file") -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/poplib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/poplib.pyi new file mode 100644 index 00000000..c64e47e8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/poplib.pyi @@ -0,0 +1,64 @@ +import socket +import ssl +from builtins import list as _list # conflicts with a method named "list" +from re import Pattern +from typing import Any, BinaryIO, NoReturn, overload +from typing_extensions import Literal, TypeAlias + +__all__ = ["POP3", "error_proto", "POP3_SSL"] + +_LongResp: TypeAlias = tuple[bytes, list[bytes], int] + +class error_proto(Exception): ... + +POP3_PORT: Literal[110] +POP3_SSL_PORT: Literal[995] +CR: Literal[b"\r"] +LF: Literal[b"\n"] +CRLF: Literal[b"\r\n"] +HAVE_SSL: bool + +class POP3: + encoding: str + host: str + port: int + sock: socket.socket + file: BinaryIO + welcome: bytes + def __init__(self, host: str, port: int = 110, timeout: float = ...) -> None: ... + def getwelcome(self) -> bytes: ... + def set_debuglevel(self, level: int) -> None: ... + def user(self, user: str) -> bytes: ... + def pass_(self, pswd: str) -> bytes: ... + def stat(self) -> tuple[int, int]: ... + def list(self, which: Any | None = None) -> _LongResp: ... + def retr(self, which: Any) -> _LongResp: ... + def dele(self, which: Any) -> bytes: ... + def noop(self) -> bytes: ... + def rset(self) -> bytes: ... + def quit(self) -> bytes: ... + def close(self) -> None: ... + def rpop(self, user: str) -> bytes: ... + timestamp: Pattern[str] + def apop(self, user: str, password: str) -> bytes: ... + def top(self, which: Any, howmuch: int) -> _LongResp: ... + @overload + def uidl(self) -> _LongResp: ... + @overload + def uidl(self, which: Any) -> bytes: ... + def utf8(self) -> bytes: ... + def capa(self) -> dict[str, _list[str]]: ... + def stls(self, context: ssl.SSLContext | None = None) -> bytes: ... + +class POP3_SSL(POP3): + def __init__( + self, + host: str, + port: int = 995, + keyfile: str | None = None, + certfile: str | None = None, + timeout: float = ..., + context: ssl.SSLContext | None = None, + ) -> None: ... + # "context" is actually the last argument, but that breaks LSP and it doesn't really matter because all the arguments are ignored + def stls(self, context: Any = None, keyfile: Any = None, certfile: Any = None) -> NoReturn: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/posix.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/posix.pyi new file mode 100644 index 00000000..ffd96757 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/posix.pyi @@ -0,0 +1,319 @@ +import sys + +if sys.platform != "win32": + # Actually defined here, but defining in os allows sharing code with windows + from os import ( + CLD_CONTINUED as CLD_CONTINUED, + CLD_DUMPED as CLD_DUMPED, + CLD_EXITED as CLD_EXITED, + CLD_TRAPPED as CLD_TRAPPED, + EX_CANTCREAT as EX_CANTCREAT, + EX_CONFIG as EX_CONFIG, + EX_DATAERR as EX_DATAERR, + EX_IOERR as EX_IOERR, + EX_NOHOST as EX_NOHOST, + EX_NOINPUT as EX_NOINPUT, + EX_NOPERM as EX_NOPERM, + EX_NOTFOUND as EX_NOTFOUND, + EX_NOUSER as EX_NOUSER, + EX_OK as EX_OK, + EX_OSERR as EX_OSERR, + EX_OSFILE as EX_OSFILE, + EX_PROTOCOL as EX_PROTOCOL, + EX_SOFTWARE as EX_SOFTWARE, + EX_TEMPFAIL as EX_TEMPFAIL, + EX_UNAVAILABLE as EX_UNAVAILABLE, + EX_USAGE as EX_USAGE, + F_LOCK as F_LOCK, + F_OK as F_OK, + F_TEST as F_TEST, + F_TLOCK as F_TLOCK, + F_ULOCK as F_ULOCK, + O_APPEND as O_APPEND, + O_ASYNC as O_ASYNC, + O_CREAT as O_CREAT, + O_DIRECT as O_DIRECT, + O_DIRECTORY as O_DIRECTORY, + O_DSYNC as O_DSYNC, + O_EXCL as O_EXCL, + O_LARGEFILE as O_LARGEFILE, + O_NDELAY as O_NDELAY, + O_NOATIME as O_NOATIME, + O_NOCTTY as O_NOCTTY, + O_NOFOLLOW as O_NOFOLLOW, + O_NONBLOCK as O_NONBLOCK, + O_RDONLY as O_RDONLY, + O_RDWR as O_RDWR, + O_RSYNC as O_RSYNC, + O_SYNC as O_SYNC, + O_TRUNC as O_TRUNC, + O_WRONLY as O_WRONLY, + P_ALL as P_ALL, + P_PGID as P_PGID, + P_PID as P_PID, + PRIO_PGRP as PRIO_PGRP, + PRIO_PROCESS as PRIO_PROCESS, + PRIO_USER as PRIO_USER, + R_OK as R_OK, + RTLD_GLOBAL as RTLD_GLOBAL, + RTLD_LAZY as RTLD_LAZY, + RTLD_LOCAL as RTLD_LOCAL, + RTLD_NODELETE as RTLD_NODELETE, + RTLD_NOLOAD as RTLD_NOLOAD, + RTLD_NOW as RTLD_NOW, + SCHED_BATCH as SCHED_BATCH, + SCHED_FIFO as SCHED_FIFO, + SCHED_IDLE as SCHED_IDLE, + SCHED_OTHER as SCHED_OTHER, + SCHED_RESET_ON_FORK as SCHED_RESET_ON_FORK, + SCHED_RR as SCHED_RR, + SCHED_SPORADIC as SCHED_SPORADIC, + SEEK_DATA as SEEK_DATA, + SEEK_HOLE as SEEK_HOLE, + ST_NOSUID as ST_NOSUID, + ST_RDONLY as ST_RDONLY, + TMP_MAX as TMP_MAX, + W_OK as W_OK, + WCONTINUED as WCONTINUED, + WCOREDUMP as WCOREDUMP, + WEXITED as WEXITED, + WEXITSTATUS as WEXITSTATUS, + WIFCONTINUED as WIFCONTINUED, + WIFEXITED as WIFEXITED, + WIFSIGNALED as WIFSIGNALED, + WIFSTOPPED as WIFSTOPPED, + WNOHANG as WNOHANG, + WNOWAIT as WNOWAIT, + WSTOPPED as WSTOPPED, + WSTOPSIG as WSTOPSIG, + WTERMSIG as WTERMSIG, + WUNTRACED as WUNTRACED, + X_OK as X_OK, + DirEntry as DirEntry, + _exit as _exit, + abort as abort, + access as access, + chdir as chdir, + chmod as chmod, + chown as chown, + chroot as chroot, + close as close, + closerange as closerange, + confstr as confstr, + confstr_names as confstr_names, + cpu_count as cpu_count, + ctermid as ctermid, + device_encoding as device_encoding, + dup as dup, + dup2 as dup2, + error as error, + execv as execv, + execve as execve, + fchdir as fchdir, + fchmod as fchmod, + fchown as fchown, + fork as fork, + forkpty as forkpty, + fpathconf as fpathconf, + fspath as fspath, + fstat as fstat, + fstatvfs as fstatvfs, + fsync as fsync, + ftruncate as ftruncate, + get_blocking as get_blocking, + get_inheritable as get_inheritable, + get_terminal_size as get_terminal_size, + getcwd as getcwd, + getcwdb as getcwdb, + getegid as getegid, + geteuid as geteuid, + getgid as getgid, + getgrouplist as getgrouplist, + getgroups as getgroups, + getloadavg as getloadavg, + getlogin as getlogin, + getpgid as getpgid, + getpgrp as getpgrp, + getpid as getpid, + getppid as getppid, + getpriority as getpriority, + getsid as getsid, + getuid as getuid, + initgroups as initgroups, + isatty as isatty, + kill as kill, + killpg as killpg, + lchown as lchown, + link as link, + listdir as listdir, + lockf as lockf, + lseek as lseek, + lstat as lstat, + major as major, + makedev as makedev, + minor as minor, + mkdir as mkdir, + mkfifo as mkfifo, + mknod as mknod, + nice as nice, + open as open, + openpty as openpty, + pathconf as pathconf, + pathconf_names as pathconf_names, + pipe as pipe, + pread as pread, + putenv as putenv, + pwrite as pwrite, + read as read, + readlink as readlink, + readv as readv, + remove as remove, + rename as rename, + replace as replace, + rmdir as rmdir, + scandir as scandir, + sched_get_priority_max as sched_get_priority_max, + sched_get_priority_min as sched_get_priority_min, + sched_param as sched_param, + sched_yield as sched_yield, + sendfile as sendfile, + set_blocking as set_blocking, + set_inheritable as set_inheritable, + setegid as setegid, + seteuid as seteuid, + setgid as setgid, + setgroups as setgroups, + setpgid as setpgid, + setpgrp as setpgrp, + setpriority as setpriority, + setregid as setregid, + setreuid as setreuid, + setsid as setsid, + setuid as setuid, + stat as stat, + stat_result as stat_result, + statvfs as statvfs, + statvfs_result as statvfs_result, + strerror as strerror, + symlink as symlink, + sync as sync, + sysconf as sysconf, + sysconf_names as sysconf_names, + system as system, + tcgetpgrp as tcgetpgrp, + tcsetpgrp as tcsetpgrp, + terminal_size as terminal_size, + times as times, + times_result as times_result, + truncate as truncate, + ttyname as ttyname, + umask as umask, + uname as uname, + uname_result as uname_result, + unlink as unlink, + unsetenv as unsetenv, + urandom as urandom, + utime as utime, + wait as wait, + wait3 as wait3, + wait4 as wait4, + waitpid as waitpid, + write as write, + writev as writev, + ) + + if sys.platform == "linux": + from os import ( + GRND_NONBLOCK as GRND_NONBLOCK, + GRND_RANDOM as GRND_RANDOM, + RTLD_DEEPBIND as RTLD_DEEPBIND, + XATTR_CREATE as XATTR_CREATE, + XATTR_REPLACE as XATTR_REPLACE, + XATTR_SIZE_MAX as XATTR_SIZE_MAX, + getrandom as getrandom, + getxattr as getxattr, + listxattr as listxattr, + removexattr as removexattr, + setxattr as setxattr, + ) + else: + from os import chflags as chflags, lchflags as lchflags, lchmod as lchmod + + if sys.platform != "darwin": + from os import ( + POSIX_FADV_DONTNEED as POSIX_FADV_DONTNEED, + POSIX_FADV_NOREUSE as POSIX_FADV_NOREUSE, + POSIX_FADV_NORMAL as POSIX_FADV_NORMAL, + POSIX_FADV_RANDOM as POSIX_FADV_RANDOM, + POSIX_FADV_SEQUENTIAL as POSIX_FADV_SEQUENTIAL, + POSIX_FADV_WILLNEED as POSIX_FADV_WILLNEED, + fdatasync as fdatasync, + getresgid as getresgid, + getresuid as getresuid, + pipe2 as pipe2, + posix_fadvise as posix_fadvise, + posix_fallocate as posix_fallocate, + sched_getaffinity as sched_getaffinity, + sched_getparam as sched_getparam, + sched_getscheduler as sched_getscheduler, + sched_rr_get_interval as sched_rr_get_interval, + sched_setaffinity as sched_setaffinity, + sched_setparam as sched_setparam, + sched_setscheduler as sched_setscheduler, + setresgid as setresgid, + setresuid as setresuid, + waitid as waitid, + waitid_result as waitid_result, + ) + + if sys.version_info >= (3, 10): + from os import RWF_APPEND as RWF_APPEND + + if sys.version_info >= (3, 11): + from os import login_tty as login_tty + + if sys.version_info >= (3, 9): + from os import CLD_KILLED as CLD_KILLED, CLD_STOPPED as CLD_STOPPED, waitstatus_to_exitcode as waitstatus_to_exitcode + + if sys.platform == "linux": + from os import P_PIDFD as P_PIDFD, pidfd_open as pidfd_open + + if sys.version_info >= (3, 8): + from os import ( + POSIX_SPAWN_CLOSE as POSIX_SPAWN_CLOSE, + POSIX_SPAWN_DUP2 as POSIX_SPAWN_DUP2, + POSIX_SPAWN_OPEN as POSIX_SPAWN_OPEN, + posix_spawn as posix_spawn, + posix_spawnp as posix_spawnp, + ) + + if sys.platform == "linux": + from os import ( + MFD_ALLOW_SEALING as MFD_ALLOW_SEALING, + MFD_CLOEXEC as MFD_CLOEXEC, + MFD_HUGE_1GB as MFD_HUGE_1GB, + MFD_HUGE_1MB as MFD_HUGE_1MB, + MFD_HUGE_2GB as MFD_HUGE_2GB, + MFD_HUGE_2MB as MFD_HUGE_2MB, + MFD_HUGE_8MB as MFD_HUGE_8MB, + MFD_HUGE_16GB as MFD_HUGE_16GB, + MFD_HUGE_16MB as MFD_HUGE_16MB, + MFD_HUGE_32MB as MFD_HUGE_32MB, + MFD_HUGE_64KB as MFD_HUGE_64KB, + MFD_HUGE_256MB as MFD_HUGE_256MB, + MFD_HUGE_512KB as MFD_HUGE_512KB, + MFD_HUGE_512MB as MFD_HUGE_512MB, + MFD_HUGE_MASK as MFD_HUGE_MASK, + MFD_HUGE_SHIFT as MFD_HUGE_SHIFT, + MFD_HUGETLB as MFD_HUGETLB, + copy_file_range as copy_file_range, + memfd_create as memfd_create, + ) + from os import preadv as preadv, pwritev as pwritev, register_at_fork as register_at_fork + + if sys.platform != "darwin": + from os import RWF_DSYNC as RWF_DSYNC, RWF_HIPRI as RWF_HIPRI, RWF_NOWAIT as RWF_NOWAIT, RWF_SYNC as RWF_SYNC + + # Not same as os.environ or os.environb + # Because of this variable, we can't do "from posix import *" in os/__init__.pyi + environ: dict[bytes, bytes] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/posixpath.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/posixpath.pyi new file mode 100644 index 00000000..1945190b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/posixpath.pyi @@ -0,0 +1,152 @@ +import sys +from _typeshed import AnyOrLiteralStr, BytesPath, FileDescriptorOrPath, StrOrBytesPath, StrPath +from collections.abc import Sequence +from genericpath import ( + commonprefix as commonprefix, + exists as exists, + getatime as getatime, + getctime as getctime, + getmtime as getmtime, + getsize as getsize, + isdir as isdir, + isfile as isfile, + samefile as samefile, + sameopenfile as sameopenfile, + samestat as samestat, +) +from os import PathLike +from typing import AnyStr, overload +from typing_extensions import LiteralString + +__all__ = [ + "normcase", + "isabs", + "join", + "splitdrive", + "split", + "splitext", + "basename", + "dirname", + "commonprefix", + "getsize", + "getmtime", + "getatime", + "getctime", + "islink", + "exists", + "lexists", + "isdir", + "isfile", + "ismount", + "expanduser", + "expandvars", + "normpath", + "abspath", + "samefile", + "sameopenfile", + "samestat", + "curdir", + "pardir", + "sep", + "pathsep", + "defpath", + "altsep", + "extsep", + "devnull", + "realpath", + "supports_unicode_filenames", + "relpath", + "commonpath", +] + +supports_unicode_filenames: bool +# aliases (also in os) +curdir: LiteralString +pardir: LiteralString +sep: LiteralString +altsep: LiteralString | None +extsep: LiteralString +pathsep: LiteralString +defpath: LiteralString +devnull: LiteralString + +# Overloads are necessary to work around python/mypy#3644. +@overload +def abspath(path: PathLike[AnyStr]) -> AnyStr: ... +@overload +def abspath(path: AnyStr) -> AnyStr: ... +@overload +def basename(p: PathLike[AnyStr]) -> AnyStr: ... +@overload +def basename(p: AnyOrLiteralStr) -> AnyOrLiteralStr: ... +@overload +def dirname(p: PathLike[AnyStr]) -> AnyStr: ... +@overload +def dirname(p: AnyOrLiteralStr) -> AnyOrLiteralStr: ... +@overload +def expanduser(path: PathLike[AnyStr]) -> AnyStr: ... +@overload +def expanduser(path: AnyStr) -> AnyStr: ... +@overload +def expandvars(path: PathLike[AnyStr]) -> AnyStr: ... +@overload +def expandvars(path: AnyStr) -> AnyStr: ... +@overload +def normcase(s: PathLike[AnyStr]) -> AnyStr: ... +@overload +def normcase(s: AnyOrLiteralStr) -> AnyOrLiteralStr: ... +@overload +def normpath(path: PathLike[AnyStr]) -> AnyStr: ... +@overload +def normpath(path: AnyOrLiteralStr) -> AnyOrLiteralStr: ... +@overload +def commonpath(paths: Sequence[LiteralString]) -> LiteralString: ... +@overload +def commonpath(paths: Sequence[StrPath]) -> str: ... +@overload +def commonpath(paths: Sequence[BytesPath]) -> bytes: ... + +# First parameter is not actually pos-only, +# but must be defined as pos-only in the stub or cross-platform code doesn't type-check, +# as the parameter name is different in ntpath.join() +@overload +def join(__a: LiteralString, *paths: LiteralString) -> LiteralString: ... +@overload +def join(__a: StrPath, *paths: StrPath) -> str: ... +@overload +def join(__a: BytesPath, *paths: BytesPath) -> bytes: ... + +if sys.version_info >= (3, 10): + @overload + def realpath(filename: PathLike[AnyStr], *, strict: bool = False) -> AnyStr: ... + @overload + def realpath(filename: AnyStr, *, strict: bool = False) -> AnyStr: ... + +else: + @overload + def realpath(filename: PathLike[AnyStr]) -> AnyStr: ... + @overload + def realpath(filename: AnyStr) -> AnyStr: ... + +@overload +def relpath(path: LiteralString, start: LiteralString | None = None) -> LiteralString: ... +@overload +def relpath(path: BytesPath, start: BytesPath | None = None) -> bytes: ... +@overload +def relpath(path: StrPath, start: StrPath | None = None) -> str: ... +@overload +def split(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... +@overload +def split(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr]: ... +@overload +def splitdrive(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... +@overload +def splitdrive(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr]: ... +@overload +def splitext(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... +@overload +def splitext(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr]: ... +def isabs(s: StrOrBytesPath) -> bool: ... +def islink(path: FileDescriptorOrPath) -> bool: ... +def ismount(path: FileDescriptorOrPath) -> bool: ... +def lexists(path: FileDescriptorOrPath) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pprint.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pprint.pyi new file mode 100644 index 00000000..5a909c69 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pprint.pyi @@ -0,0 +1,139 @@ +import sys +from typing import IO + +if sys.version_info >= (3, 8): + __all__ = ["pprint", "pformat", "isreadable", "isrecursive", "saferepr", "PrettyPrinter", "pp"] +else: + __all__ = ["pprint", "pformat", "isreadable", "isrecursive", "saferepr", "PrettyPrinter"] + +if sys.version_info >= (3, 10): + def pformat( + object: object, + indent: int = 1, + width: int = 80, + depth: int | None = None, + *, + compact: bool = False, + sort_dicts: bool = True, + underscore_numbers: bool = False, + ) -> str: ... + +elif sys.version_info >= (3, 8): + def pformat( + object: object, + indent: int = 1, + width: int = 80, + depth: int | None = None, + *, + compact: bool = False, + sort_dicts: bool = True, + ) -> str: ... + +else: + def pformat(object: object, indent: int = 1, width: int = 80, depth: int | None = None, *, compact: bool = False) -> str: ... + +if sys.version_info >= (3, 10): + def pp( + object: object, + stream: IO[str] | None = ..., + indent: int = ..., + width: int = ..., + depth: int | None = ..., + *, + compact: bool = ..., + sort_dicts: bool = False, + underscore_numbers: bool = ..., + ) -> None: ... + +elif sys.version_info >= (3, 8): + def pp( + object: object, + stream: IO[str] | None = ..., + indent: int = ..., + width: int = ..., + depth: int | None = ..., + *, + compact: bool = ..., + sort_dicts: bool = False, + ) -> None: ... + +if sys.version_info >= (3, 10): + def pprint( + object: object, + stream: IO[str] | None = None, + indent: int = 1, + width: int = 80, + depth: int | None = None, + *, + compact: bool = False, + sort_dicts: bool = True, + underscore_numbers: bool = False, + ) -> None: ... + +elif sys.version_info >= (3, 8): + def pprint( + object: object, + stream: IO[str] | None = None, + indent: int = 1, + width: int = 80, + depth: int | None = None, + *, + compact: bool = False, + sort_dicts: bool = True, + ) -> None: ... + +else: + def pprint( + object: object, + stream: IO[str] | None = None, + indent: int = 1, + width: int = 80, + depth: int | None = None, + *, + compact: bool = False, + ) -> None: ... + +def isreadable(object: object) -> bool: ... +def isrecursive(object: object) -> bool: ... +def saferepr(object: object) -> str: ... + +class PrettyPrinter: + if sys.version_info >= (3, 10): + def __init__( + self, + indent: int = 1, + width: int = 80, + depth: int | None = None, + stream: IO[str] | None = None, + *, + compact: bool = False, + sort_dicts: bool = True, + underscore_numbers: bool = False, + ) -> None: ... + elif sys.version_info >= (3, 8): + def __init__( + self, + indent: int = 1, + width: int = 80, + depth: int | None = None, + stream: IO[str] | None = None, + *, + compact: bool = False, + sort_dicts: bool = True, + ) -> None: ... + else: + def __init__( + self, + indent: int = 1, + width: int = 80, + depth: int | None = None, + stream: IO[str] | None = None, + *, + compact: bool = False, + ) -> None: ... + + def pformat(self, object: object) -> str: ... + def pprint(self, object: object) -> None: ... + def isreadable(self, object: object) -> bool: ... + def isrecursive(self, object: object) -> bool: ... + def format(self, object: object, context: dict[int, int], maxlevels: int, level: int) -> tuple[str, bool, bool]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/profile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/profile.pyi new file mode 100644 index 00000000..6ae37500 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/profile.pyi @@ -0,0 +1,31 @@ +from _typeshed import StrOrBytesPath +from collections.abc import Callable +from typing import Any, TypeVar +from typing_extensions import ParamSpec, Self, TypeAlias + +__all__ = ["run", "runctx", "Profile"] + +def run(statement: str, filename: str | None = None, sort: str | int = -1) -> None: ... +def runctx( + statement: str, globals: dict[str, Any], locals: dict[str, Any], filename: str | None = None, sort: str | int = -1 +) -> None: ... + +_T = TypeVar("_T") +_P = ParamSpec("_P") +_Label: TypeAlias = tuple[str, int, str] + +class Profile: + bias: int + stats: dict[_Label, tuple[int, int, int, int, dict[_Label, tuple[int, int, int, int]]]] # undocumented + def __init__(self, timer: Callable[[], float] | None = None, bias: int | None = None) -> None: ... + def set_cmd(self, cmd: str) -> None: ... + def simulate_call(self, name: str) -> None: ... + def simulate_cmd_complete(self) -> None: ... + def print_stats(self, sort: str | int = -1) -> None: ... + def dump_stats(self, file: StrOrBytesPath) -> None: ... + def create_stats(self) -> None: ... + def snapshot_stats(self) -> None: ... + def run(self, cmd: str) -> Self: ... + def runctx(self, cmd: str, globals: dict[str, Any], locals: dict[str, Any]) -> Self: ... + def runcall(self, __func: Callable[_P, _T], *args: _P.args, **kw: _P.kwargs) -> _T: ... + def calibrate(self, m: int, verbose: int = 0) -> float: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pstats.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pstats.pyi new file mode 100644 index 00000000..5d25d1bb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pstats.pyi @@ -0,0 +1,79 @@ +import sys +from _typeshed import StrOrBytesPath +from collections.abc import Iterable +from cProfile import Profile as _cProfile +from enum import Enum +from profile import Profile +from typing import IO, Any, overload +from typing_extensions import Literal, Self, TypeAlias + +if sys.version_info >= (3, 9): + __all__ = ["Stats", "SortKey", "FunctionProfile", "StatsProfile"] +else: + __all__ = ["Stats", "SortKey"] + +_Selector: TypeAlias = str | float | int + +class SortKey(str, Enum): + CALLS: str + CUMULATIVE: str + FILENAME: str + LINE: str + NAME: str + NFL: str + PCALLS: str + STDNAME: str + TIME: str + +if sys.version_info >= (3, 9): + from dataclasses import dataclass + + @dataclass(unsafe_hash=True) + class FunctionProfile: + ncalls: str + tottime: float + percall_tottime: float + cumtime: float + percall_cumtime: float + file_name: str + line_number: int + @dataclass(unsafe_hash=True) + class StatsProfile: + total_tt: float + func_profiles: dict[str, FunctionProfile] + +_SortArgDict: TypeAlias = dict[str, tuple[tuple[tuple[int, int], ...], str]] + +class Stats: + sort_arg_dict_default: _SortArgDict + def __init__( + self, + __arg: None | str | Profile | _cProfile = ..., + *args: None | str | Profile | _cProfile | Self, + stream: IO[Any] | None = None, + ) -> None: ... + def init(self, arg: None | str | Profile | _cProfile) -> None: ... + def load_stats(self, arg: None | str | Profile | _cProfile) -> None: ... + def get_top_level_stats(self) -> None: ... + def add(self, *arg_list: None | str | Profile | _cProfile | Self) -> Self: ... + def dump_stats(self, filename: StrOrBytesPath) -> None: ... + def get_sort_arg_defs(self) -> _SortArgDict: ... + @overload + def sort_stats(self, field: Literal[-1, 0, 1, 2]) -> Self: ... + @overload + def sort_stats(self, *field: str) -> Self: ... + def reverse_order(self) -> Self: ... + def strip_dirs(self) -> Self: ... + def calc_callees(self) -> None: ... + def eval_print_amount(self, sel: _Selector, list: list[str], msg: str) -> tuple[list[str], str]: ... + if sys.version_info >= (3, 9): + def get_stats_profile(self) -> StatsProfile: ... + + def get_print_list(self, sel_list: Iterable[_Selector]) -> tuple[int, list[str]]: ... + def print_stats(self, *amount: _Selector) -> Self: ... + def print_callees(self, *amount: _Selector) -> Self: ... + def print_callers(self, *amount: _Selector) -> Self: ... + def print_call_heading(self, name_size: int, column_title: str) -> None: ... + def print_call_line(self, name_size: int, source: str, call_dict: dict[str, Any], arrow: str = "->") -> None: ... + def print_title(self) -> None: ... + def print_line(self, func: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pty.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pty.pyi new file mode 100644 index 00000000..a6a2d8fa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pty.pyi @@ -0,0 +1,18 @@ +import sys +from collections.abc import Callable, Iterable +from typing_extensions import Literal, TypeAlias + +if sys.platform != "win32": + __all__ = ["openpty", "fork", "spawn"] + _Reader: TypeAlias = Callable[[int], bytes] + + STDIN_FILENO: Literal[0] + STDOUT_FILENO: Literal[1] + STDERR_FILENO: Literal[2] + + CHILD: Literal[0] + def openpty() -> tuple[int, int]: ... + def master_open() -> tuple[int, str]: ... # deprecated, use openpty() + def slave_open(tty_name: str) -> int: ... # deprecated, use openpty() + def fork() -> tuple[int, int]: ... + def spawn(argv: str | Iterable[str], master_read: _Reader = ..., stdin_read: _Reader = ...) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pwd.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pwd.pyi new file mode 100644 index 00000000..80813479 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pwd.pyi @@ -0,0 +1,28 @@ +import sys +from _typeshed import structseq +from typing import Any +from typing_extensions import Final, final + +if sys.platform != "win32": + @final + class struct_passwd(structseq[Any], tuple[str, str, int, int, str, str, str]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("pw_name", "pw_passwd", "pw_uid", "pw_gid", "pw_gecos", "pw_dir", "pw_shell") + @property + def pw_name(self) -> str: ... + @property + def pw_passwd(self) -> str: ... + @property + def pw_uid(self) -> int: ... + @property + def pw_gid(self) -> int: ... + @property + def pw_gecos(self) -> str: ... + @property + def pw_dir(self) -> str: ... + @property + def pw_shell(self) -> str: ... + + def getpwall() -> list[struct_passwd]: ... + def getpwuid(__uid: int) -> struct_passwd: ... + def getpwnam(__name: str) -> struct_passwd: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/py_compile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/py_compile.pyi new file mode 100644 index 00000000..48f1d7dc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/py_compile.pyi @@ -0,0 +1,46 @@ +import enum +import sys +from typing import AnyStr + +__all__ = ["compile", "main", "PyCompileError", "PycInvalidationMode"] + +class PyCompileError(Exception): + exc_type_name: str + exc_value: BaseException + file: str + msg: str + def __init__(self, exc_type: type[BaseException], exc_value: BaseException, file: str, msg: str = "") -> None: ... + +class PycInvalidationMode(enum.Enum): + TIMESTAMP: int + CHECKED_HASH: int + UNCHECKED_HASH: int + +def _get_default_invalidation_mode() -> PycInvalidationMode: ... + +if sys.version_info >= (3, 8): + def compile( + file: AnyStr, + cfile: AnyStr | None = None, + dfile: AnyStr | None = None, + doraise: bool = False, + optimize: int = -1, + invalidation_mode: PycInvalidationMode | None = None, + quiet: int = 0, + ) -> AnyStr | None: ... + +else: + def compile( + file: AnyStr, + cfile: AnyStr | None = None, + dfile: AnyStr | None = None, + doraise: bool = False, + optimize: int = -1, + invalidation_mode: PycInvalidationMode | None = None, + ) -> AnyStr | None: ... + +if sys.version_info >= (3, 10): + def main() -> None: ... + +else: + def main(args: list[str] | None = None) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pyclbr.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pyclbr.pyi new file mode 100644 index 00000000..38658a03 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pyclbr.pyi @@ -0,0 +1,66 @@ +import sys +from collections.abc import Sequence + +__all__ = ["readmodule", "readmodule_ex", "Class", "Function"] + +class Class: + module: str + name: str + super: list[Class | str] | None + methods: dict[str, int] + file: int + lineno: int + + if sys.version_info >= (3, 10): + end_lineno: int | None + + parent: Class | None + children: dict[str, Class | Function] + + if sys.version_info >= (3, 10): + def __init__( + self, + module: str, + name: str, + super_: list[Class | str] | None, + file: str, + lineno: int, + parent: Class | None = None, + *, + end_lineno: int | None = None, + ) -> None: ... + else: + def __init__( + self, module: str, name: str, super: list[Class | str] | None, file: str, lineno: int, parent: Class | None = None + ) -> None: ... + +class Function: + module: str + name: str + file: int + lineno: int + + if sys.version_info >= (3, 10): + end_lineno: int | None + is_async: bool + + parent: Function | Class | None + children: dict[str, Class | Function] + + if sys.version_info >= (3, 10): + def __init__( + self, + module: str, + name: str, + file: str, + lineno: int, + parent: Function | Class | None = None, + is_async: bool = False, + *, + end_lineno: int | None = None, + ) -> None: ... + else: + def __init__(self, module: str, name: str, file: str, lineno: int, parent: Function | Class | None = None) -> None: ... + +def readmodule(module: str, path: Sequence[str] | None = None) -> dict[str, Class]: ... +def readmodule_ex(module: str, path: Sequence[str] | None = None) -> dict[str, Class | Function | list[str]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pydoc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pydoc.pyi new file mode 100644 index 00000000..c6893d50 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pydoc.pyi @@ -0,0 +1,243 @@ +import sys +from _typeshed import OptExcInfo, SupportsWrite +from abc import abstractmethod +from builtins import list as _list # "list" conflicts with method name +from collections.abc import Callable, Container, Mapping, MutableMapping +from reprlib import Repr +from types import MethodType, ModuleType, TracebackType +from typing import IO, Any, AnyStr, NoReturn, TypeVar +from typing_extensions import Final, TypeGuard + +__all__ = ["help"] + +_T = TypeVar("_T") + +__author__: Final[str] +__date__: Final[str] +__version__: Final[str] +__credits__: Final[str] + +def pathdirs() -> list[str]: ... +def getdoc(object: object) -> str: ... +def splitdoc(doc: AnyStr) -> tuple[AnyStr, AnyStr]: ... +def classname(object: object, modname: str) -> str: ... +def isdata(object: object) -> bool: ... +def replace(text: AnyStr, *pairs: AnyStr) -> AnyStr: ... +def cram(text: str, maxlen: int) -> str: ... +def stripid(text: str) -> str: ... +def allmethods(cl: type) -> MutableMapping[str, MethodType]: ... +def visiblename(name: str, all: Container[str] | None = None, obj: object = None) -> bool: ... +def classify_class_attrs(object: object) -> list[tuple[str, str, type, str]]: ... +def ispackage(path: str) -> bool: ... +def source_synopsis(file: IO[AnyStr]) -> AnyStr | None: ... +def synopsis(filename: str, cache: MutableMapping[str, tuple[int, str]] = ...) -> str | None: ... + +class ErrorDuringImport(Exception): + filename: str + exc: type[BaseException] | None + value: BaseException | None + tb: TracebackType | None + def __init__(self, filename: str, exc_info: OptExcInfo) -> None: ... + +def importfile(path: str) -> ModuleType: ... +def safeimport(path: str, forceload: bool = ..., cache: MutableMapping[str, ModuleType] = ...) -> ModuleType: ... + +class Doc: + PYTHONDOCS: str + def document(self, object: object, name: str | None = None, *args: Any) -> str: ... + def fail(self, object: object, name: str | None = None, *args: Any) -> NoReturn: ... + @abstractmethod + def docmodule(self, object: object, name: str | None = None, *args: Any) -> str: ... + @abstractmethod + def docclass(self, object: object, name: str | None = None, *args: Any) -> str: ... + @abstractmethod + def docroutine(self, object: object, name: str | None = None, *args: Any) -> str: ... + @abstractmethod + def docother(self, object: object, name: str | None = None, *args: Any) -> str: ... + @abstractmethod + def docproperty(self, object: object, name: str | None = None, *args: Any) -> str: ... + @abstractmethod + def docdata(self, object: object, name: str | None = None, *args: Any) -> str: ... + def getdocloc(self, object: object, basedir: str = ...) -> str | None: ... + +class HTMLRepr(Repr): + def escape(self, text: str) -> str: ... + def repr(self, object: object) -> str: ... + def repr1(self, x: object, level: complex) -> str: ... + def repr_string(self, x: str, level: complex) -> str: ... + def repr_str(self, x: str, level: complex) -> str: ... + def repr_instance(self, x: object, level: complex) -> str: ... + def repr_unicode(self, x: AnyStr, level: complex) -> str: ... + +class HTMLDoc(Doc): + _repr_instance: HTMLRepr = ... + repr = _repr_instance.repr + escape = _repr_instance.escape + def page(self, title: str, contents: str) -> str: ... + if sys.version_info >= (3, 11): + def heading(self, title: str, extras: str = "") -> str: ... + def section( + self, + title: str, + cls: str, + contents: str, + width: int = 6, + prelude: str = "", + marginalia: str | None = None, + gap: str = " ", + ) -> str: ... + def multicolumn(self, list: list[_T], format: Callable[[_T], str]) -> str: ... + else: + def heading(self, title: str, fgcol: str, bgcol: str, extras: str = "") -> str: ... + def section( + self, + title: str, + fgcol: str, + bgcol: str, + contents: str, + width: int = 6, + prelude: str = "", + marginalia: str | None = None, + gap: str = " ", + ) -> str: ... + def multicolumn(self, list: list[_T], format: Callable[[_T], str], cols: int = 4) -> str: ... + + def bigsection(self, title: str, *args: Any) -> str: ... + def preformat(self, text: str) -> str: ... + def grey(self, text: str) -> str: ... + def namelink(self, name: str, *dicts: MutableMapping[str, str]) -> str: ... + def classlink(self, object: object, modname: str) -> str: ... + def modulelink(self, object: object) -> str: ... + def modpkglink(self, modpkginfo: tuple[str, str, bool, bool]) -> str: ... + def markup( + self, + text: str, + escape: Callable[[str], str] | None = None, + funcs: Mapping[str, str] = ..., + classes: Mapping[str, str] = ..., + methods: Mapping[str, str] = ..., + ) -> str: ... + def formattree( + self, tree: list[tuple[type, tuple[type, ...]] | list[Any]], modname: str, parent: type | None = None + ) -> str: ... + def docmodule(self, object: object, name: str | None = None, mod: str | None = None, *ignored: Any) -> str: ... + def docclass( + self, + object: object, + name: str | None = None, + mod: str | None = None, + funcs: Mapping[str, str] = ..., + classes: Mapping[str, str] = ..., + *ignored: Any, + ) -> str: ... + def formatvalue(self, object: object) -> str: ... + def docroutine( # type: ignore[override] + self, + object: object, + name: str | None = None, + mod: str | None = None, + funcs: Mapping[str, str] = ..., + classes: Mapping[str, str] = ..., + methods: Mapping[str, str] = ..., + cl: type | None = None, + ) -> str: ... + def docproperty(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def docother(self, object: object, name: str | None = None, mod: Any | None = None, *ignored: Any) -> str: ... + def docdata(self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def index(self, dir: str, shadowed: MutableMapping[str, bool] | None = None) -> str: ... + def filelink(self, url: str, path: str) -> str: ... + +class TextRepr(Repr): + def repr1(self, x: object, level: complex) -> str: ... + def repr_string(self, x: str, level: complex) -> str: ... + def repr_str(self, x: str, level: complex) -> str: ... + def repr_instance(self, x: object, level: complex) -> str: ... + +class TextDoc(Doc): + _repr_instance: TextRepr = ... + repr = _repr_instance.repr + def bold(self, text: str) -> str: ... + def indent(self, text: str, prefix: str = " ") -> str: ... + def section(self, title: str, contents: str) -> str: ... + def formattree( + self, tree: list[tuple[type, tuple[type, ...]] | list[Any]], modname: str, parent: type | None = None, prefix: str = "" + ) -> str: ... + def docmodule(self, object: object, name: str | None = None, mod: Any | None = None) -> str: ... # type: ignore[override] + def docclass(self, object: object, name: str | None = None, mod: str | None = None, *ignored: Any) -> str: ... + def formatvalue(self, object: object) -> str: ... + def docroutine(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def docproperty(self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def docdata(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def docother( # type: ignore[override] + self, + object: object, + name: str | None = None, + mod: str | None = None, + parent: str | None = None, + maxlen: int | None = None, + doc: Any | None = None, + ) -> str: ... + +def pager(text: str) -> None: ... +def getpager() -> Callable[[str], None]: ... +def plain(text: str) -> str: ... +def pipepager(text: str, cmd: str) -> None: ... +def tempfilepager(text: str, cmd: str) -> None: ... +def ttypager(text: str) -> None: ... +def plainpager(text: str) -> None: ... +def describe(thing: Any) -> str: ... +def locate(path: str, forceload: bool = ...) -> object: ... + +text: TextDoc +html: HTMLDoc + +def resolve(thing: str | object, forceload: bool = ...) -> tuple[object, str] | None: ... +def render_doc( + thing: str | object, title: str = "Python Library Documentation: %s", forceload: bool = ..., renderer: Doc | None = None +) -> str: ... +def doc( + thing: str | object, + title: str = "Python Library Documentation: %s", + forceload: bool = ..., + output: SupportsWrite[str] | None = None, +) -> None: ... +def writedoc(thing: str | object, forceload: bool = ...) -> None: ... +def writedocs(dir: str, pkgpath: str = "", done: Any | None = None) -> None: ... + +class Helper: + keywords: dict[str, str | tuple[str, str]] + symbols: dict[str, str] + topics: dict[str, str | tuple[str, ...]] + def __init__(self, input: IO[str] | None = None, output: IO[str] | None = None) -> None: ... + @property + def input(self) -> IO[str]: ... + @property + def output(self) -> IO[str]: ... + def __call__(self, request: str | Helper | object = ...) -> None: ... + def interact(self) -> None: ... + def getline(self, prompt: str) -> str: ... + def help(self, request: Any) -> None: ... + def intro(self) -> None: ... + def list(self, items: _list[str], columns: int = 4, width: int = 80) -> None: ... + def listkeywords(self) -> None: ... + def listsymbols(self) -> None: ... + def listtopics(self) -> None: ... + def showtopic(self, topic: str, more_xrefs: str = "") -> None: ... + def showsymbol(self, symbol: str) -> None: ... + def listmodules(self, key: str = "") -> None: ... + +help: Helper + +class ModuleScanner: + quit: bool + def run( + self, + callback: Callable[[str | None, str, str], object], + key: str | None = None, + completer: Callable[[], object] | None = None, + onerror: Callable[[str], object] | None = None, + ) -> None: ... + +def apropos(key: str) -> None: ... +def ispath(x: object) -> TypeGuard[str]: ... +def cli() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pydoc_data/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pydoc_data/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pydoc_data/topics.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pydoc_data/topics.pyi new file mode 100644 index 00000000..091d3430 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pydoc_data/topics.pyi @@ -0,0 +1 @@ +topics: dict[str, str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pyexpat/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pyexpat/__init__.pyi new file mode 100644 index 00000000..9e1eea08 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pyexpat/__init__.pyi @@ -0,0 +1,80 @@ +from _typeshed import ReadableBuffer, SupportsRead +from collections.abc import Callable +from pyexpat import errors as errors, model as model +from typing import Any +from typing_extensions import TypeAlias, final + +EXPAT_VERSION: str # undocumented +version_info: tuple[int, int, int] # undocumented +native_encoding: str # undocumented +features: list[tuple[str, int]] # undocumented + +class ExpatError(Exception): + code: int + lineno: int + offset: int + +error = ExpatError + +XML_PARAM_ENTITY_PARSING_NEVER: int +XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE: int +XML_PARAM_ENTITY_PARSING_ALWAYS: int + +_Model: TypeAlias = tuple[int, int, str | None, tuple[Any, ...]] + +@final +class XMLParserType: + def Parse(self, __data: str | ReadableBuffer, __isfinal: bool = False) -> int: ... + def ParseFile(self, __file: SupportsRead[bytes]) -> int: ... + def SetBase(self, __base: str) -> None: ... + def GetBase(self) -> str | None: ... + def GetInputContext(self) -> bytes | None: ... + def ExternalEntityParserCreate(self, __context: str | None, __encoding: str = ...) -> XMLParserType: ... + def SetParamEntityParsing(self, __flag: int) -> int: ... + def UseForeignDTD(self, __flag: bool = True) -> None: ... + @property + def intern(self) -> dict[str, str]: ... + buffer_size: int + buffer_text: bool + buffer_used: int + namespace_prefixes: bool # undocumented + ordered_attributes: bool + specified_attributes: bool + ErrorByteIndex: int + ErrorCode: int + ErrorColumnNumber: int + ErrorLineNumber: int + CurrentByteIndex: int + CurrentColumnNumber: int + CurrentLineNumber: int + XmlDeclHandler: Callable[[str, str | None, int], Any] | None + StartDoctypeDeclHandler: Callable[[str, str | None, str | None, bool], Any] | None + EndDoctypeDeclHandler: Callable[[], Any] | None + ElementDeclHandler: Callable[[str, _Model], Any] | None + AttlistDeclHandler: Callable[[str, str, str, str | None, bool], Any] | None + StartElementHandler: Callable[[str, dict[str, str]], Any] | Callable[[str, list[str]], Any] | Callable[ + [str, dict[str, str], list[str]], Any + ] | None + EndElementHandler: Callable[[str], Any] | None + ProcessingInstructionHandler: Callable[[str, str], Any] | None + CharacterDataHandler: Callable[[str], Any] | None + UnparsedEntityDeclHandler: Callable[[str, str | None, str, str | None, str], Any] | None + EntityDeclHandler: Callable[[str, bool, str | None, str | None, str, str | None, str | None], Any] | None + NotationDeclHandler: Callable[[str, str | None, str, str | None], Any] | None + StartNamespaceDeclHandler: Callable[[str, str], Any] | None + EndNamespaceDeclHandler: Callable[[str], Any] | None + CommentHandler: Callable[[str], Any] | None + StartCdataSectionHandler: Callable[[], Any] | None + EndCdataSectionHandler: Callable[[], Any] | None + DefaultHandler: Callable[[str], Any] | None + DefaultHandlerExpand: Callable[[str], Any] | None + NotStandaloneHandler: Callable[[], int] | None + ExternalEntityRefHandler: Callable[[str, str | None, str | None, str | None], int] | None + SkippedEntityHandler: Callable[[str, bool], Any] | None + +def ErrorString(__code: int) -> str: ... + +# intern is undocumented +def ParserCreate( + encoding: str | None = None, namespace_separator: str | None = None, intern: dict[str, Any] | None = None +) -> XMLParserType: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pyexpat/errors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pyexpat/errors.pyi new file mode 100644 index 00000000..2e512eb1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pyexpat/errors.pyi @@ -0,0 +1,49 @@ +import sys + +codes: dict[str, int] +messages: dict[int, str] + +XML_ERROR_ABORTED: str +XML_ERROR_ASYNC_ENTITY: str +XML_ERROR_ATTRIBUTE_EXTERNAL_ENTITY_REF: str +XML_ERROR_BAD_CHAR_REF: str +XML_ERROR_BINARY_ENTITY_REF: str +XML_ERROR_CANT_CHANGE_FEATURE_ONCE_PARSING: str +XML_ERROR_DUPLICATE_ATTRIBUTE: str +XML_ERROR_ENTITY_DECLARED_IN_PE: str +XML_ERROR_EXTERNAL_ENTITY_HANDLING: str +XML_ERROR_FEATURE_REQUIRES_XML_DTD: str +XML_ERROR_FINISHED: str +XML_ERROR_INCOMPLETE_PE: str +XML_ERROR_INCORRECT_ENCODING: str +XML_ERROR_INVALID_TOKEN: str +XML_ERROR_JUNK_AFTER_DOC_ELEMENT: str +XML_ERROR_MISPLACED_XML_PI: str +XML_ERROR_NOT_STANDALONE: str +XML_ERROR_NOT_SUSPENDED: str +XML_ERROR_NO_ELEMENTS: str +XML_ERROR_NO_MEMORY: str +XML_ERROR_PARAM_ENTITY_REF: str +XML_ERROR_PARTIAL_CHAR: str +XML_ERROR_PUBLICID: str +XML_ERROR_RECURSIVE_ENTITY_REF: str +XML_ERROR_SUSPENDED: str +XML_ERROR_SUSPEND_PE: str +XML_ERROR_SYNTAX: str +XML_ERROR_TAG_MISMATCH: str +XML_ERROR_TEXT_DECL: str +XML_ERROR_UNBOUND_PREFIX: str +XML_ERROR_UNCLOSED_CDATA_SECTION: str +XML_ERROR_UNCLOSED_TOKEN: str +XML_ERROR_UNDECLARING_PREFIX: str +XML_ERROR_UNDEFINED_ENTITY: str +XML_ERROR_UNEXPECTED_STATE: str +XML_ERROR_UNKNOWN_ENCODING: str +XML_ERROR_XML_DECL: str +if sys.version_info >= (3, 11): + XML_ERROR_RESERVED_PREFIX_XML: str + XML_ERROR_RESERVED_PREFIX_XMLNS: str + XML_ERROR_RESERVED_NAMESPACE_URI: str + XML_ERROR_INVALID_ARGUMENT: str + XML_ERROR_NO_BUFFER: str + XML_ERROR_AMPLIFICATION_LIMIT_BREACH: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pyexpat/model.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pyexpat/model.pyi new file mode 100644 index 00000000..f357cf65 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/pyexpat/model.pyi @@ -0,0 +1,11 @@ +XML_CTYPE_ANY: int +XML_CTYPE_CHOICE: int +XML_CTYPE_EMPTY: int +XML_CTYPE_MIXED: int +XML_CTYPE_NAME: int +XML_CTYPE_SEQ: int + +XML_CQUANT_NONE: int +XML_CQUANT_OPT: int +XML_CQUANT_PLUS: int +XML_CQUANT_REP: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/queue.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/queue.pyi new file mode 100644 index 00000000..3537e445 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/queue.pyi @@ -0,0 +1,58 @@ +import sys +from threading import Condition, Lock +from typing import Any, Generic, TypeVar + +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = ["Empty", "Full", "Queue", "PriorityQueue", "LifoQueue", "SimpleQueue"] + +_T = TypeVar("_T") + +class Empty(Exception): ... +class Full(Exception): ... + +class Queue(Generic[_T]): + maxsize: int + + mutex: Lock # undocumented + not_empty: Condition # undocumented + not_full: Condition # undocumented + all_tasks_done: Condition # undocumented + unfinished_tasks: int # undocumented + # Despite the fact that `queue` has `deque` type, + # we treat it as `Any` to allow different implementations in subtypes. + queue: Any # undocumented + def __init__(self, maxsize: int = 0) -> None: ... + def _init(self, maxsize: int) -> None: ... + def empty(self) -> bool: ... + def full(self) -> bool: ... + def get(self, block: bool = True, timeout: float | None = None) -> _T: ... + def get_nowait(self) -> _T: ... + def _get(self) -> _T: ... + def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: ... + def put_nowait(self, item: _T) -> None: ... + def _put(self, item: _T) -> None: ... + def join(self) -> None: ... + def qsize(self) -> int: ... + def _qsize(self) -> int: ... + def task_done(self) -> None: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +class PriorityQueue(Queue[_T]): + queue: list[_T] + +class LifoQueue(Queue[_T]): + queue: list[_T] + +class SimpleQueue(Generic[_T]): + def __init__(self) -> None: ... + def empty(self) -> bool: ... + def get(self, block: bool = True, timeout: float | None = None) -> _T: ... + def get_nowait(self) -> _T: ... + def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: ... + def put_nowait(self, item: _T) -> None: ... + def qsize(self) -> int: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/quopri.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/quopri.pyi new file mode 100644 index 00000000..b652e139 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/quopri.pyi @@ -0,0 +1,11 @@ +from _typeshed import ReadableBuffer, SupportsNoArgReadline, SupportsRead, SupportsWrite +from typing import Protocol + +__all__ = ["encode", "decode", "encodestring", "decodestring"] + +class _Input(SupportsRead[bytes], SupportsNoArgReadline[bytes], Protocol): ... + +def encode(input: _Input, output: SupportsWrite[bytes], quotetabs: int, header: bool = False) -> None: ... +def encodestring(s: ReadableBuffer, quotetabs: bool = False, header: bool = False) -> bytes: ... +def decode(input: _Input, output: SupportsWrite[bytes], header: bool = False) -> None: ... +def decodestring(s: str | ReadableBuffer, header: bool = False) -> bytes: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/random.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/random.pyi new file mode 100644 index 00000000..48498786 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/random.pyi @@ -0,0 +1,129 @@ +import _random +import sys +from _typeshed import SupportsLenAndGetItem +from collections.abc import Callable, Iterable, MutableSequence, Sequence, Set as AbstractSet +from fractions import Fraction +from typing import Any, ClassVar, NoReturn, TypeVar + +__all__ = [ + "Random", + "seed", + "random", + "uniform", + "randint", + "choice", + "sample", + "randrange", + "shuffle", + "normalvariate", + "lognormvariate", + "expovariate", + "vonmisesvariate", + "gammavariate", + "triangular", + "gauss", + "betavariate", + "paretovariate", + "weibullvariate", + "getstate", + "setstate", + "getrandbits", + "choices", + "SystemRandom", +] + +if sys.version_info >= (3, 9): + __all__ += ["randbytes"] + +_T = TypeVar("_T") + +class Random(_random.Random): + VERSION: ClassVar[int] + def __init__(self, x: Any = None) -> None: ... + # Using other `seed` types is deprecated since 3.9 and removed in 3.11 + # Ignore Y041, since random.seed doesn't treat int like a float subtype. Having an explicit + # int better documents conventional usage of random.seed. + if sys.version_info >= (3, 9): + def seed(self, a: int | float | str | bytes | bytearray | None = None, version: int = 2) -> None: ... # type: ignore[override] # noqa: Y041 + else: + def seed(self, a: Any = None, version: int = 2) -> None: ... + + def getstate(self) -> tuple[Any, ...]: ... + def setstate(self, state: tuple[Any, ...]) -> None: ... + def randrange(self, start: int, stop: int | None = None, step: int = 1) -> int: ... + def randint(self, a: int, b: int) -> int: ... + if sys.version_info >= (3, 9): + def randbytes(self, n: int) -> bytes: ... + + def choice(self, seq: SupportsLenAndGetItem[_T]) -> _T: ... + def choices( + self, + population: SupportsLenAndGetItem[_T], + weights: Sequence[float | Fraction] | None = None, + *, + cum_weights: Sequence[float | Fraction] | None = None, + k: int = 1, + ) -> list[_T]: ... + if sys.version_info >= (3, 11): + def shuffle(self, x: MutableSequence[Any]) -> None: ... + else: + def shuffle(self, x: MutableSequence[Any], random: Callable[[], float] | None = None) -> None: ... + if sys.version_info >= (3, 11): + def sample(self, population: Sequence[_T], k: int, *, counts: Iterable[int] | None = None) -> list[_T]: ... + elif sys.version_info >= (3, 9): + def sample( + self, population: Sequence[_T] | AbstractSet[_T], k: int, *, counts: Iterable[int] | None = None + ) -> list[_T]: ... + else: + def sample(self, population: Sequence[_T] | AbstractSet[_T], k: int) -> list[_T]: ... + + def uniform(self, a: float, b: float) -> float: ... + def triangular(self, low: float = 0.0, high: float = 1.0, mode: float | None = None) -> float: ... + def betavariate(self, alpha: float, beta: float) -> float: ... + def expovariate(self, lambd: float) -> float: ... + def gammavariate(self, alpha: float, beta: float) -> float: ... + if sys.version_info >= (3, 11): + def gauss(self, mu: float = 0.0, sigma: float = 1.0) -> float: ... + def normalvariate(self, mu: float = 0.0, sigma: float = 1.0) -> float: ... + else: + def gauss(self, mu: float, sigma: float) -> float: ... + def normalvariate(self, mu: float, sigma: float) -> float: ... + + def lognormvariate(self, mu: float, sigma: float) -> float: ... + def vonmisesvariate(self, mu: float, kappa: float) -> float: ... + def paretovariate(self, alpha: float) -> float: ... + def weibullvariate(self, alpha: float, beta: float) -> float: ... + +# SystemRandom is not implemented for all OS's; good on Windows & Linux +class SystemRandom(Random): + def getrandbits(self, k: int) -> int: ... # k can be passed by keyword + def getstate(self, *args: Any, **kwds: Any) -> NoReturn: ... + def setstate(self, *args: Any, **kwds: Any) -> NoReturn: ... + +# ----- random function stubs ----- + +_inst: Random = ... +seed = _inst.seed +random = _inst.random +uniform = _inst.uniform +triangular = _inst.triangular +randint = _inst.randint +choice = _inst.choice +randrange = _inst.randrange +sample = _inst.sample +shuffle = _inst.shuffle +choices = _inst.choices +normalvariate = _inst.normalvariate +lognormvariate = _inst.lognormvariate +expovariate = _inst.expovariate +vonmisesvariate = _inst.vonmisesvariate +gammavariate = _inst.gammavariate +gauss = _inst.gauss +betavariate = _inst.betavariate +paretovariate = _inst.paretovariate +weibullvariate = _inst.weibullvariate +getstate = _inst.getstate +setstate = _inst.setstate +getrandbits = _inst.getrandbits +if sys.version_info >= (3, 9): + randbytes = _inst.randbytes diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/re.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/re.pyi new file mode 100644 index 00000000..4e53141a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/re.pyi @@ -0,0 +1,288 @@ +import enum +import sre_compile +import sys +from _typeshed import ReadableBuffer +from collections.abc import Callable, Iterator, Mapping +from sre_constants import error as error +from typing import Any, AnyStr, Generic, TypeVar, overload +from typing_extensions import Literal, TypeAlias, final + +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = [ + "match", + "fullmatch", + "search", + "sub", + "subn", + "split", + "findall", + "finditer", + "compile", + "purge", + "template", + "escape", + "error", + "A", + "I", + "L", + "M", + "S", + "X", + "U", + "ASCII", + "IGNORECASE", + "LOCALE", + "MULTILINE", + "DOTALL", + "VERBOSE", + "UNICODE", + "Match", + "Pattern", +] + +if sys.version_info >= (3, 11): + __all__ += ["NOFLAG", "RegexFlag"] + +_T = TypeVar("_T") + +@final +class Match(Generic[AnyStr]): + @property + def pos(self) -> int: ... + @property + def endpos(self) -> int: ... + @property + def lastindex(self) -> int | None: ... + @property + def lastgroup(self) -> str | None: ... + @property + def string(self) -> AnyStr: ... + + # The regular expression object whose match() or search() method produced + # this match instance. + @property + def re(self) -> Pattern[AnyStr]: ... + @overload + def expand(self: Match[str], template: str) -> str: ... + @overload + def expand(self: Match[bytes], template: ReadableBuffer) -> bytes: ... # type: ignore[misc] + @overload + def expand(self, template: AnyStr) -> AnyStr: ... + # group() returns "AnyStr" or "AnyStr | None", depending on the pattern. + @overload + def group(self, __group: Literal[0] = 0) -> AnyStr: ... + @overload + def group(self, __group: str | int) -> AnyStr | Any: ... + @overload + def group(self, __group1: str | int, __group2: str | int, *groups: str | int) -> tuple[AnyStr | Any, ...]: ... + # Each item of groups()'s return tuple is either "AnyStr" or + # "AnyStr | None", depending on the pattern. + @overload + def groups(self) -> tuple[AnyStr | Any, ...]: ... + @overload + def groups(self, default: _T) -> tuple[AnyStr | _T, ...]: ... + # Each value in groupdict()'s return dict is either "AnyStr" or + # "AnyStr | None", depending on the pattern. + @overload + def groupdict(self) -> dict[str, AnyStr | Any]: ... + @overload + def groupdict(self, default: _T) -> dict[str, AnyStr | _T]: ... + def start(self, __group: int | str = 0) -> int: ... + def end(self, __group: int | str = 0) -> int: ... + def span(self, __group: int | str = 0) -> tuple[int, int]: ... + @property + def regs(self) -> tuple[tuple[int, int], ...]: ... # undocumented + # __getitem__() returns "AnyStr" or "AnyStr | None", depending on the pattern. + @overload + def __getitem__(self, __key: Literal[0]) -> AnyStr: ... + @overload + def __getitem__(self, __key: int | str) -> AnyStr | Any: ... + def __copy__(self) -> Match[AnyStr]: ... + def __deepcopy__(self, __memo: Any) -> Match[AnyStr]: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +@final +class Pattern(Generic[AnyStr]): + @property + def flags(self) -> int: ... + @property + def groupindex(self) -> Mapping[str, int]: ... + @property + def groups(self) -> int: ... + @property + def pattern(self) -> AnyStr: ... + @overload + def search(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ... + @overload + def search(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ... # type: ignore[misc] + @overload + def search(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ... + @overload + def match(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ... + @overload + def match(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ... # type: ignore[misc] + @overload + def match(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ... + @overload + def fullmatch(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ... + @overload + def fullmatch(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ... # type: ignore[misc] + @overload + def fullmatch(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ... + @overload + def split(self: Pattern[str], string: str, maxsplit: int = 0) -> list[str | Any]: ... + @overload + def split(self: Pattern[bytes], string: ReadableBuffer, maxsplit: int = 0) -> list[bytes | Any]: ... + @overload + def split(self, string: AnyStr, maxsplit: int = 0) -> list[AnyStr | Any]: ... + # return type depends on the number of groups in the pattern + @overload + def findall(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> list[Any]: ... + @overload + def findall(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> list[Any]: ... + @overload + def findall(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> list[AnyStr]: ... + @overload + def finditer(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[str]]: ... + @overload + def finditer(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[bytes]]: ... # type: ignore[misc] + @overload + def finditer(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[AnyStr]]: ... + @overload + def sub(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> str: ... + @overload + def sub( # type: ignore[misc] + self: Pattern[bytes], + repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], + string: ReadableBuffer, + count: int = 0, + ) -> bytes: ... + @overload + def sub(self, repl: AnyStr | Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = 0) -> AnyStr: ... + @overload + def subn(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> tuple[str, int]: ... + @overload + def subn( # type: ignore[misc] + self: Pattern[bytes], + repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], + string: ReadableBuffer, + count: int = 0, + ) -> tuple[bytes, int]: ... + @overload + def subn(self, repl: AnyStr | Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = 0) -> tuple[AnyStr, int]: ... + def __copy__(self) -> Pattern[AnyStr]: ... + def __deepcopy__(self, __memo: Any) -> Pattern[AnyStr]: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +# ----- re variables and constants ----- + +class RegexFlag(enum.IntFlag): + A = sre_compile.SRE_FLAG_ASCII + ASCII = A + DEBUG = sre_compile.SRE_FLAG_DEBUG + I = sre_compile.SRE_FLAG_IGNORECASE + IGNORECASE = I + L = sre_compile.SRE_FLAG_LOCALE + LOCALE = L + M = sre_compile.SRE_FLAG_MULTILINE + MULTILINE = M + S = sre_compile.SRE_FLAG_DOTALL + DOTALL = S + X = sre_compile.SRE_FLAG_VERBOSE + VERBOSE = X + U = sre_compile.SRE_FLAG_UNICODE + UNICODE = U + T = sre_compile.SRE_FLAG_TEMPLATE + TEMPLATE = T + if sys.version_info >= (3, 11): + NOFLAG: int + +A = RegexFlag.A +ASCII = RegexFlag.ASCII +DEBUG = RegexFlag.DEBUG +I = RegexFlag.I +IGNORECASE = RegexFlag.IGNORECASE +L = RegexFlag.L +LOCALE = RegexFlag.LOCALE +M = RegexFlag.M +MULTILINE = RegexFlag.MULTILINE +S = RegexFlag.S +DOTALL = RegexFlag.DOTALL +X = RegexFlag.X +VERBOSE = RegexFlag.VERBOSE +U = RegexFlag.U +UNICODE = RegexFlag.UNICODE +T = RegexFlag.T +TEMPLATE = RegexFlag.TEMPLATE +if sys.version_info >= (3, 11): + NOFLAG = RegexFlag.NOFLAG +_FlagsType: TypeAlias = int | RegexFlag + +# Type-wise the compile() overloads are unnecessary, they could also be modeled using +# unions in the parameter types. However mypy has a bug regarding TypeVar +# constraints (https://github.com/python/mypy/issues/11880), +# which limits us here because AnyStr is a constrained TypeVar. + +# pattern arguments do *not* accept arbitrary buffers such as bytearray, +# because the pattern must be hashable. +@overload +def compile(pattern: AnyStr, flags: _FlagsType = 0) -> Pattern[AnyStr]: ... +@overload +def compile(pattern: Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: ... +@overload +def search(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: ... +@overload +def search(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ... +@overload +def match(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: ... +@overload +def match(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ... +@overload +def fullmatch(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: ... +@overload +def fullmatch(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ... +@overload +def split(pattern: str | Pattern[str], string: str, maxsplit: int = 0, flags: _FlagsType = 0) -> list[str | Any]: ... +@overload +def split( + pattern: bytes | Pattern[bytes], string: ReadableBuffer, maxsplit: int = 0, flags: _FlagsType = 0 +) -> list[bytes | Any]: ... +@overload +def findall(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> list[Any]: ... +@overload +def findall(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> list[Any]: ... +@overload +def finditer(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Iterator[Match[str]]: ... +@overload +def finditer(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Iterator[Match[bytes]]: ... +@overload +def sub( + pattern: str | Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0, flags: _FlagsType = 0 +) -> str: ... +@overload +def sub( + pattern: bytes | Pattern[bytes], + repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], + string: ReadableBuffer, + count: int = 0, + flags: _FlagsType = 0, +) -> bytes: ... +@overload +def subn( + pattern: str | Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0, flags: _FlagsType = 0 +) -> tuple[str, int]: ... +@overload +def subn( + pattern: bytes | Pattern[bytes], + repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], + string: ReadableBuffer, + count: int = 0, + flags: _FlagsType = 0, +) -> tuple[bytes, int]: ... +def escape(pattern: AnyStr) -> AnyStr: ... +def purge() -> None: ... +def template(pattern: AnyStr | Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/readline.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/readline.pyi new file mode 100644 index 00000000..14c01a98 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/readline.pyi @@ -0,0 +1,36 @@ +import sys +from _typeshed import StrOrBytesPath +from collections.abc import Callable, Sequence +from typing_extensions import TypeAlias + +if sys.platform != "win32": + _Completer: TypeAlias = Callable[[str, int], str | None] + _CompDisp: TypeAlias = Callable[[str, Sequence[str], int], None] + + def parse_and_bind(__string: str) -> None: ... + def read_init_file(__filename: StrOrBytesPath | None = None) -> None: ... + def get_line_buffer() -> str: ... + def insert_text(__string: str) -> None: ... + def redisplay() -> None: ... + def read_history_file(__filename: StrOrBytesPath | None = None) -> None: ... + def write_history_file(__filename: StrOrBytesPath | None = None) -> None: ... + def append_history_file(__nelements: int, __filename: StrOrBytesPath | None = None) -> None: ... + def get_history_length() -> int: ... + def set_history_length(__length: int) -> None: ... + def clear_history() -> None: ... + def get_current_history_length() -> int: ... + def get_history_item(__index: int) -> str: ... + def remove_history_item(__pos: int) -> None: ... + def replace_history_item(__pos: int, __line: str) -> None: ... + def add_history(__string: str) -> None: ... + def set_auto_history(__enabled: bool) -> None: ... + def set_startup_hook(__function: Callable[[], object] | None = None) -> None: ... + def set_pre_input_hook(__function: Callable[[], object] | None = None) -> None: ... + def set_completer(__function: _Completer | None = None) -> None: ... + def get_completer() -> _Completer | None: ... + def get_completion_type() -> int: ... + def get_begidx() -> int: ... + def get_endidx() -> int: ... + def set_completer_delims(__string: str) -> None: ... + def get_completer_delims() -> str: ... + def set_completion_display_matches_hook(__function: _CompDisp | None = None) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/reprlib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/reprlib.pyi new file mode 100644 index 00000000..21c8a5cd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/reprlib.pyi @@ -0,0 +1,40 @@ +from array import array +from collections import deque +from collections.abc import Callable +from typing import Any +from typing_extensions import TypeAlias + +__all__ = ["Repr", "repr", "recursive_repr"] + +_ReprFunc: TypeAlias = Callable[[Any], str] + +def recursive_repr(fillvalue: str = "...") -> Callable[[_ReprFunc], _ReprFunc]: ... + +class Repr: + maxlevel: int + maxdict: int + maxlist: int + maxtuple: int + maxset: int + maxfrozenset: int + maxdeque: int + maxarray: int + maxlong: int + maxstring: int + maxother: int + def repr(self, x: Any) -> str: ... + def repr1(self, x: Any, level: int) -> str: ... + def repr_tuple(self, x: tuple[Any, ...], level: int) -> str: ... + def repr_list(self, x: list[Any], level: int) -> str: ... + def repr_array(self, x: array[Any], level: int) -> str: ... + def repr_set(self, x: set[Any], level: int) -> str: ... + def repr_frozenset(self, x: frozenset[Any], level: int) -> str: ... + def repr_deque(self, x: deque[Any], level: int) -> str: ... + def repr_dict(self, x: dict[Any, Any], level: int) -> str: ... + def repr_str(self, x: str, level: int) -> str: ... + def repr_int(self, x: int, level: int) -> str: ... + def repr_instance(self, x: Any, level: int) -> str: ... + +aRepr: Repr + +def repr(x: object) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/resource.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/resource.pyi new file mode 100644 index 00000000..f2e979ff --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/resource.pyi @@ -0,0 +1,93 @@ +import sys +from _typeshed import structseq +from typing import overload +from typing_extensions import Final, final + +if sys.platform != "win32": + RLIMIT_AS: int + RLIMIT_CORE: int + RLIMIT_CPU: int + RLIMIT_DATA: int + RLIMIT_FSIZE: int + RLIMIT_MEMLOCK: int + RLIMIT_NOFILE: int + RLIMIT_NPROC: int + RLIMIT_RSS: int + RLIMIT_STACK: int + RLIM_INFINITY: int + RUSAGE_CHILDREN: int + RUSAGE_SELF: int + if sys.platform == "linux": + RLIMIT_MSGQUEUE: int + RLIMIT_NICE: int + RLIMIT_OFILE: int + RLIMIT_RTPRIO: int + RLIMIT_RTTIME: int + RLIMIT_SIGPENDING: int + RUSAGE_THREAD: int + @final + class struct_rusage( + structseq[float], tuple[float, float, int, int, int, int, int, int, int, int, int, int, int, int, int, int] + ): + if sys.version_info >= (3, 10): + __match_args__: Final = ( + "ru_utime", + "ru_stime", + "ru_maxrss", + "ru_ixrss", + "ru_idrss", + "ru_isrss", + "ru_minflt", + "ru_majflt", + "ru_nswap", + "ru_inblock", + "ru_oublock", + "ru_msgsnd", + "ru_msgrcv", + "ru_nsignals", + "ru_nvcsw", + "ru_nivcsw", + ) + @property + def ru_utime(self) -> float: ... + @property + def ru_stime(self) -> float: ... + @property + def ru_maxrss(self) -> int: ... + @property + def ru_ixrss(self) -> int: ... + @property + def ru_idrss(self) -> int: ... + @property + def ru_isrss(self) -> int: ... + @property + def ru_minflt(self) -> int: ... + @property + def ru_majflt(self) -> int: ... + @property + def ru_nswap(self) -> int: ... + @property + def ru_inblock(self) -> int: ... + @property + def ru_oublock(self) -> int: ... + @property + def ru_msgsnd(self) -> int: ... + @property + def ru_msgrcv(self) -> int: ... + @property + def ru_nsignals(self) -> int: ... + @property + def ru_nvcsw(self) -> int: ... + @property + def ru_nivcsw(self) -> int: ... + + def getpagesize() -> int: ... + def getrlimit(__resource: int) -> tuple[int, int]: ... + def getrusage(__who: int) -> struct_rusage: ... + def setrlimit(__resource: int, __limits: tuple[int, int]) -> None: ... + if sys.platform == "linux": + @overload + def prlimit(pid: int, resource: int, limits: tuple[int, int]) -> tuple[int, int]: ... + @overload + def prlimit(pid: int, resource: int) -> tuple[int, int]: ... + error = OSError diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/rlcompleter.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/rlcompleter.pyi new file mode 100644 index 00000000..8d9477e3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/rlcompleter.pyi @@ -0,0 +1,9 @@ +from typing import Any + +__all__ = ["Completer"] + +class Completer: + def __init__(self, namespace: dict[str, Any] | None = None) -> None: ... + def complete(self, text: str, state: int) -> str | None: ... + def attr_matches(self, text: str) -> list[str]: ... + def global_matches(self, text: str) -> list[str]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/runpy.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/runpy.pyi new file mode 100644 index 00000000..d4406ea4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/runpy.pyi @@ -0,0 +1,24 @@ +from _typeshed import Unused +from types import ModuleType +from typing import Any +from typing_extensions import Self + +__all__ = ["run_module", "run_path"] + +class _TempModule: + mod_name: str + module: ModuleType + def __init__(self, mod_name: str) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + +class _ModifiedArgv0: + value: Any + def __init__(self, value: Any) -> None: ... + def __enter__(self) -> None: ... + def __exit__(self, *args: Unused) -> None: ... + +def run_module( + mod_name: str, init_globals: dict[str, Any] | None = None, run_name: str | None = None, alter_sys: bool = False +) -> dict[str, Any]: ... +def run_path(path_name: str, init_globals: dict[str, Any] | None = None, run_name: str | None = None) -> dict[str, Any]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sched.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sched.pyi new file mode 100644 index 00000000..a8ec78d6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sched.pyi @@ -0,0 +1,42 @@ +import sys +from collections.abc import Callable +from typing import Any, NamedTuple +from typing_extensions import TypeAlias + +__all__ = ["scheduler"] + +_ActionCallback: TypeAlias = Callable[..., Any] + +if sys.version_info >= (3, 10): + class Event(NamedTuple): + time: float + priority: Any + sequence: int + action: _ActionCallback + argument: tuple[Any, ...] + kwargs: dict[str, Any] + +else: + class Event(NamedTuple): + time: float + priority: Any + action: _ActionCallback + argument: tuple[Any, ...] + kwargs: dict[str, Any] + +class scheduler: + timefunc: Callable[[], float] + delayfunc: Callable[[float], object] + + def __init__(self, timefunc: Callable[[], float] = ..., delayfunc: Callable[[float], object] = ...) -> None: ... + def enterabs( + self, time: float, priority: Any, action: _ActionCallback, argument: tuple[Any, ...] = ..., kwargs: dict[str, Any] = ... + ) -> Event: ... + def enter( + self, delay: float, priority: Any, action: _ActionCallback, argument: tuple[Any, ...] = ..., kwargs: dict[str, Any] = ... + ) -> Event: ... + def run(self, blocking: bool = True) -> float | None: ... + def cancel(self, event: Event) -> None: ... + def empty(self) -> bool: ... + @property + def queue(self) -> list[Event]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/secrets.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/secrets.pyi new file mode 100644 index 00000000..4861b6f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/secrets.pyi @@ -0,0 +1,15 @@ +from _typeshed import SupportsLenAndGetItem +from hmac import compare_digest as compare_digest +from random import SystemRandom as SystemRandom +from typing import TypeVar + +__all__ = ["choice", "randbelow", "randbits", "SystemRandom", "token_bytes", "token_hex", "token_urlsafe", "compare_digest"] + +_T = TypeVar("_T") + +def randbelow(exclusive_upper_bound: int) -> int: ... +def randbits(k: int) -> int: ... +def choice(seq: SupportsLenAndGetItem[_T]) -> _T: ... +def token_bytes(nbytes: int | None = None) -> bytes: ... +def token_hex(nbytes: int | None = None) -> str: ... +def token_urlsafe(nbytes: int | None = None) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/select.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/select.pyi new file mode 100644 index 00000000..c86d20c3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/select.pyi @@ -0,0 +1,151 @@ +import sys +from _typeshed import FileDescriptorLike +from collections.abc import Iterable +from types import TracebackType +from typing import Any +from typing_extensions import Self, final + +if sys.platform != "win32": + PIPE_BUF: int + POLLERR: int + POLLHUP: int + POLLIN: int + POLLMSG: int + POLLNVAL: int + POLLOUT: int + POLLPRI: int + POLLRDBAND: int + POLLRDHUP: int + POLLRDNORM: int + POLLWRBAND: int + POLLWRNORM: int + +class poll: + def register(self, fd: FileDescriptorLike, eventmask: int = ...) -> None: ... + def modify(self, fd: FileDescriptorLike, eventmask: int) -> None: ... + def unregister(self, fd: FileDescriptorLike) -> None: ... + def poll(self, timeout: float | None = ...) -> list[tuple[int, int]]: ... + +def select( + __rlist: Iterable[Any], __wlist: Iterable[Any], __xlist: Iterable[Any], __timeout: float | None = None +) -> tuple[list[Any], list[Any], list[Any]]: ... + +error = OSError + +if sys.platform != "linux" and sys.platform != "win32": + # BSD only + @final + class kevent: + data: Any + fflags: int + filter: int + flags: int + ident: int + udata: Any + def __init__( + self, + ident: FileDescriptorLike, + filter: int = ..., + flags: int = ..., + fflags: int = ..., + data: Any = ..., + udata: Any = ..., + ) -> None: ... + # BSD only + @final + class kqueue: + closed: bool + def __init__(self) -> None: ... + def close(self) -> None: ... + def control( + self, __changelist: Iterable[kevent] | None, __maxevents: int, __timeout: float | None = None + ) -> list[kevent]: ... + def fileno(self) -> int: ... + @classmethod + def fromfd(cls, __fd: FileDescriptorLike) -> kqueue: ... + KQ_EV_ADD: int + KQ_EV_CLEAR: int + KQ_EV_DELETE: int + KQ_EV_DISABLE: int + KQ_EV_ENABLE: int + KQ_EV_EOF: int + KQ_EV_ERROR: int + KQ_EV_FLAG1: int + KQ_EV_ONESHOT: int + KQ_EV_SYSFLAGS: int + KQ_FILTER_AIO: int + KQ_FILTER_NETDEV: int + KQ_FILTER_PROC: int + KQ_FILTER_READ: int + KQ_FILTER_SIGNAL: int + KQ_FILTER_TIMER: int + KQ_FILTER_VNODE: int + KQ_FILTER_WRITE: int + KQ_NOTE_ATTRIB: int + KQ_NOTE_CHILD: int + KQ_NOTE_DELETE: int + KQ_NOTE_EXEC: int + KQ_NOTE_EXIT: int + KQ_NOTE_EXTEND: int + KQ_NOTE_FORK: int + KQ_NOTE_LINK: int + if sys.platform != "darwin": + KQ_NOTE_LINKDOWN: int + KQ_NOTE_LINKINV: int + KQ_NOTE_LINKUP: int + KQ_NOTE_LOWAT: int + KQ_NOTE_PCTRLMASK: int + KQ_NOTE_PDATAMASK: int + KQ_NOTE_RENAME: int + KQ_NOTE_REVOKE: int + KQ_NOTE_TRACK: int + KQ_NOTE_TRACKERR: int + KQ_NOTE_WRITE: int + +if sys.platform == "linux": + @final + class epoll: + def __init__(self, sizehint: int = ..., flags: int = ...) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, + __exc_type: type[BaseException] | None = None, + __exc_value: BaseException | None = ..., + __exc_tb: TracebackType | None = None, + ) -> None: ... + def close(self) -> None: ... + closed: bool + def fileno(self) -> int: ... + def register(self, fd: FileDescriptorLike, eventmask: int = ...) -> None: ... + def modify(self, fd: FileDescriptorLike, eventmask: int) -> None: ... + def unregister(self, fd: FileDescriptorLike) -> None: ... + def poll(self, timeout: float | None = None, maxevents: int = -1) -> list[tuple[int, int]]: ... + @classmethod + def fromfd(cls, __fd: FileDescriptorLike) -> epoll: ... + EPOLLERR: int + EPOLLEXCLUSIVE: int + EPOLLET: int + EPOLLHUP: int + EPOLLIN: int + EPOLLMSG: int + EPOLLONESHOT: int + EPOLLOUT: int + EPOLLPRI: int + EPOLLRDBAND: int + EPOLLRDHUP: int + EPOLLRDNORM: int + EPOLLWRBAND: int + EPOLLWRNORM: int + EPOLL_RDHUP: int + EPOLL_CLOEXEC: int + +if sys.platform != "linux" and sys.platform != "darwin" and sys.platform != "win32": + # Solaris only + class devpoll: + def close(self) -> None: ... + closed: bool + def fileno(self) -> int: ... + def register(self, fd: FileDescriptorLike, eventmask: int = ...) -> None: ... + def modify(self, fd: FileDescriptorLike, eventmask: int = ...) -> None: ... + def unregister(self, fd: FileDescriptorLike) -> None: ... + def poll(self, timeout: float | None = ...) -> list[tuple[int, int]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/selectors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/selectors.pyi new file mode 100644 index 00000000..90a923f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/selectors.pyi @@ -0,0 +1,73 @@ +import sys +from _typeshed import FileDescriptor, FileDescriptorLike, Unused +from abc import ABCMeta, abstractmethod +from collections.abc import Mapping +from typing import Any, NamedTuple +from typing_extensions import Self, TypeAlias + +_EventMask: TypeAlias = int + +EVENT_READ: _EventMask +EVENT_WRITE: _EventMask + +class SelectorKey(NamedTuple): + fileobj: FileDescriptorLike + fd: FileDescriptor + events: _EventMask + data: Any + +class BaseSelector(metaclass=ABCMeta): + @abstractmethod + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... + @abstractmethod + def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... + def modify(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... + @abstractmethod + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... + def close(self) -> None: ... + def get_key(self, fileobj: FileDescriptorLike) -> SelectorKey: ... + @abstractmethod + def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + +class SelectSelector(BaseSelector): + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... + def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... + def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... + +if sys.platform != "win32": + class PollSelector(BaseSelector): + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... + def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... + def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... + +if sys.platform == "linux": + class EpollSelector(BaseSelector): + def fileno(self) -> int: ... + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... + def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... + def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... + +class DevpollSelector(BaseSelector): + def fileno(self) -> int: ... + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = ...) -> SelectorKey: ... + def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... + def select(self, timeout: float | None = ...) -> list[tuple[SelectorKey, _EventMask]]: ... + def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... + +class KqueueSelector(BaseSelector): + def fileno(self) -> int: ... + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... + def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... + def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... + +class DefaultSelector(BaseSelector): + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... + def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... + def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/shelve.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/shelve.pyi new file mode 100644 index 00000000..82d0b03f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/shelve.pyi @@ -0,0 +1,43 @@ +from collections.abc import Iterator, MutableMapping +from dbm import _TFlags +from types import TracebackType +from typing import Any, TypeVar, overload +from typing_extensions import Self + +__all__ = ["Shelf", "BsdDbShelf", "DbfilenameShelf", "open"] + +_T = TypeVar("_T") +_VT = TypeVar("_VT") + +class Shelf(MutableMapping[str, _VT]): + def __init__( + self, dict: MutableMapping[bytes, bytes], protocol: int | None = None, writeback: bool = False, keyencoding: str = "utf-8" + ) -> None: ... + def __iter__(self) -> Iterator[str]: ... + def __len__(self) -> int: ... + @overload + def get(self, key: str) -> _VT | None: ... + @overload + def get(self, key: str, default: _T) -> _VT | _T: ... + def __getitem__(self, key: str) -> _VT: ... + def __setitem__(self, key: str, value: _VT) -> None: ... + def __delitem__(self, key: str) -> None: ... + def __contains__(self, key: str) -> bool: ... # type: ignore[override] + def __enter__(self) -> Self: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def close(self) -> None: ... + def sync(self) -> None: ... + +class BsdDbShelf(Shelf[_VT]): + def set_location(self, key: str) -> tuple[str, _VT]: ... + def next(self) -> tuple[str, _VT]: ... + def previous(self) -> tuple[str, _VT]: ... + def first(self) -> tuple[str, _VT]: ... + def last(self) -> tuple[str, _VT]: ... + +class DbfilenameShelf(Shelf[_VT]): + def __init__(self, filename: str, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False) -> None: ... + +def open(filename: str, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False) -> Shelf[Any]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/shlex.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/shlex.pyi new file mode 100644 index 00000000..fa04932d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/shlex.pyi @@ -0,0 +1,50 @@ +import sys +from collections.abc import Iterable +from typing import TextIO +from typing_extensions import Self + +if sys.version_info >= (3, 8): + __all__ = ["shlex", "split", "quote", "join"] +else: + __all__ = ["shlex", "split", "quote"] + +def split(s: str, comments: bool = False, posix: bool = True) -> list[str]: ... + +if sys.version_info >= (3, 8): + def join(split_command: Iterable[str]) -> str: ... + +def quote(s: str) -> str: ... + +class shlex(Iterable[str]): + commenters: str + wordchars: str + whitespace: str + escape: str + quotes: str + escapedquotes: str + whitespace_split: bool + infile: str | None + instream: TextIO + source: str + debug: int + lineno: int + token: str + eof: str + @property + def punctuation_chars(self) -> str: ... + def __init__( + self, + instream: str | TextIO | None = None, + infile: str | None = None, + posix: bool = False, + punctuation_chars: bool | str = False, + ) -> None: ... + def get_token(self) -> str: ... + def push_token(self, tok: str) -> None: ... + def read_token(self) -> str: ... + def sourcehook(self, newfile: str) -> tuple[str, TextIO]: ... + def push_source(self, newstream: str | TextIO, newfile: str | None = None) -> None: ... + def pop_source(self) -> None: ... + def error_leader(self, infile: str | None = None, lineno: int | None = None) -> None: ... + def __iter__(self) -> Self: ... + def __next__(self) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/shutil.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/shutil.pyi new file mode 100644 index 00000000..0e4f521e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/shutil.pyi @@ -0,0 +1,185 @@ +import os +import sys +from _typeshed import BytesPath, FileDescriptorOrPath, StrOrBytesPath, StrPath, SupportsRead, SupportsWrite +from collections.abc import Callable, Iterable, Sequence +from typing import Any, AnyStr, NamedTuple, Protocol, TypeVar, overload +from typing_extensions import TypeAlias + +__all__ = [ + "copyfileobj", + "copyfile", + "copymode", + "copystat", + "copy", + "copy2", + "copytree", + "move", + "rmtree", + "Error", + "SpecialFileError", + "ExecError", + "make_archive", + "get_archive_formats", + "register_archive_format", + "unregister_archive_format", + "get_unpack_formats", + "register_unpack_format", + "unregister_unpack_format", + "unpack_archive", + "ignore_patterns", + "chown", + "which", + "get_terminal_size", + "SameFileError", + "disk_usage", +] + +_StrOrBytesPathT = TypeVar("_StrOrBytesPathT", bound=StrOrBytesPath) +_StrPathT = TypeVar("_StrPathT", bound=StrPath) +# Return value of some functions that may either return a path-like object that was passed in or +# a string +_PathReturn: TypeAlias = Any + +class Error(OSError): ... +class SameFileError(Error): ... +class SpecialFileError(OSError): ... +class ExecError(OSError): ... +class ReadError(OSError): ... +class RegistryError(Exception): ... + +if sys.version_info >= (3, 8): + def copyfileobj(fsrc: SupportsRead[AnyStr], fdst: SupportsWrite[AnyStr], length: int = 0) -> None: ... + +else: + def copyfileobj(fsrc: SupportsRead[AnyStr], fdst: SupportsWrite[AnyStr], length: int = 16384) -> None: ... + +def copyfile(src: StrOrBytesPath, dst: _StrOrBytesPathT, *, follow_symlinks: bool = True) -> _StrOrBytesPathT: ... +def copymode(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: ... +def copystat(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: ... +@overload +def copy(src: StrPath, dst: StrPath, *, follow_symlinks: bool = True) -> _PathReturn: ... +@overload +def copy(src: BytesPath, dst: BytesPath, *, follow_symlinks: bool = True) -> _PathReturn: ... +@overload +def copy2(src: StrPath, dst: StrPath, *, follow_symlinks: bool = True) -> _PathReturn: ... +@overload +def copy2(src: BytesPath, dst: BytesPath, *, follow_symlinks: bool = True) -> _PathReturn: ... +def ignore_patterns(*patterns: StrPath) -> Callable[[Any, list[str]], set[str]]: ... + +if sys.version_info >= (3, 8): + def copytree( + src: StrPath, + dst: StrPath, + symlinks: bool = False, + ignore: None | Callable[[str, list[str]], Iterable[str]] | Callable[[StrPath, list[str]], Iterable[str]] = None, + copy_function: Callable[[str, str], object] = ..., + ignore_dangling_symlinks: bool = False, + dirs_exist_ok: bool = False, + ) -> _PathReturn: ... + +else: + def copytree( + src: StrPath, + dst: StrPath, + symlinks: bool = False, + ignore: None | Callable[[str, list[str]], Iterable[str]] | Callable[[StrPath, list[str]], Iterable[str]] = None, + copy_function: Callable[[str, str], object] = ..., + ignore_dangling_symlinks: bool = False, + ) -> _PathReturn: ... + +_OnErrorCallback: TypeAlias = Callable[[Callable[..., Any], Any, Any], object] + +class _RmtreeType(Protocol): + avoids_symlink_attacks: bool + if sys.version_info >= (3, 11): + def __call__( + self, + path: StrOrBytesPath, + ignore_errors: bool = ..., + onerror: _OnErrorCallback | None = ..., + *, + dir_fd: int | None = ..., + ) -> None: ... + + else: + def __call__(self, path: StrOrBytesPath, ignore_errors: bool = ..., onerror: _OnErrorCallback | None = ...) -> None: ... + +rmtree: _RmtreeType + +_CopyFn: TypeAlias = Callable[[str, str], object] | Callable[[StrPath, StrPath], object] + +# N.B. shutil.move appears to take bytes arguments, however, +# this does not work when dst is (or is within) an existing directory. +# (#6832) +if sys.version_info >= (3, 9): + def move(src: StrPath, dst: StrPath, copy_function: _CopyFn = ...) -> _PathReturn: ... + +else: + # See https://bugs.python.org/issue32689 + def move(src: str, dst: StrPath, copy_function: _CopyFn = ...) -> _PathReturn: ... + +class _ntuple_diskusage(NamedTuple): + total: int + used: int + free: int + +def disk_usage(path: FileDescriptorOrPath) -> _ntuple_diskusage: ... + +# While chown can be imported on Windows, it doesn't actually work; +# see https://bugs.python.org/issue33140. We keep it here because it's +# in __all__. +@overload +def chown(path: StrOrBytesPath, user: str | int, group: None = None) -> None: ... +@overload +def chown(path: StrOrBytesPath, user: None = None, *, group: str | int) -> None: ... +@overload +def chown(path: StrOrBytesPath, user: None, group: str | int) -> None: ... +@overload +def chown(path: StrOrBytesPath, user: str | int, group: str | int) -> None: ... + +if sys.version_info >= (3, 8): + @overload + def which(cmd: _StrPathT, mode: int = 1, path: StrPath | None = None) -> str | _StrPathT | None: ... + @overload + def which(cmd: bytes, mode: int = 1, path: StrPath | None = None) -> bytes | None: ... + +else: + def which(cmd: _StrPathT, mode: int = 1, path: StrPath | None = None) -> str | _StrPathT | None: ... + +def make_archive( + base_name: str, + format: str, + root_dir: StrPath | None = None, + base_dir: StrPath | None = None, + verbose: bool = ..., + dry_run: bool = ..., + owner: str | None = None, + group: str | None = None, + logger: Any | None = None, +) -> str: ... +def get_archive_formats() -> list[tuple[str, str]]: ... +@overload +def register_archive_format( + name: str, function: Callable[..., object], extra_args: Sequence[tuple[str, Any] | list[Any]], description: str = "" +) -> None: ... +@overload +def register_archive_format( + name: str, function: Callable[[str, str], object], extra_args: None = None, description: str = "" +) -> None: ... +def unregister_archive_format(name: str) -> None: ... +def unpack_archive(filename: StrPath, extract_dir: StrPath | None = None, format: str | None = None) -> None: ... +@overload +def register_unpack_format( + name: str, + extensions: list[str], + function: Callable[..., object], + extra_args: Sequence[tuple[str, Any]], + description: str = "", +) -> None: ... +@overload +def register_unpack_format( + name: str, extensions: list[str], function: Callable[[str, str], object], extra_args: None = None, description: str = "" +) -> None: ... +def unregister_unpack_format(name: str) -> None: ... +def get_unpack_formats() -> list[tuple[str, list[str], str]]: ... +def get_terminal_size(fallback: tuple[int, int] = ...) -> os.terminal_size: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/signal.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/signal.pyi new file mode 100644 index 00000000..e411d470 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/signal.pyi @@ -0,0 +1,181 @@ +import sys +from _typeshed import structseq +from collections.abc import Callable, Iterable +from enum import IntEnum +from types import FrameType +from typing import Any +from typing_extensions import Final, Never, TypeAlias, final + +NSIG: int + +class Signals(IntEnum): + SIGABRT: int + SIGEMT: int + SIGFPE: int + SIGILL: int + SIGINFO: int + SIGINT: int + SIGSEGV: int + SIGTERM: int + + if sys.platform == "win32": + SIGBREAK: int + CTRL_C_EVENT: int + CTRL_BREAK_EVENT: int + else: + SIGALRM: int + SIGBUS: int + SIGCHLD: int + SIGCONT: int + SIGHUP: int + SIGIO: int + SIGIOT: int + SIGKILL: int + SIGPIPE: int + SIGPROF: int + SIGQUIT: int + SIGSTOP: int + SIGSYS: int + SIGTRAP: int + SIGTSTP: int + SIGTTIN: int + SIGTTOU: int + SIGURG: int + SIGUSR1: int + SIGUSR2: int + SIGVTALRM: int + SIGWINCH: int + SIGXCPU: int + SIGXFSZ: int + if sys.platform != "darwin": + SIGCLD: int + SIGPOLL: int + SIGPWR: int + SIGRTMAX: int + SIGRTMIN: int + +class Handlers(IntEnum): + SIG_DFL: int + SIG_IGN: int + +SIG_DFL: Handlers +SIG_IGN: Handlers + +_SIGNUM: TypeAlias = int | Signals +_HANDLER: TypeAlias = Callable[[int, FrameType | None], Any] | int | Handlers | None + +def default_int_handler(__signalnum: int, __frame: FrameType | None) -> Never: ... + +if sys.version_info >= (3, 10): # arguments changed in 3.10.2 + def getsignal(signalnum: _SIGNUM) -> _HANDLER: ... + def signal(signalnum: _SIGNUM, handler: _HANDLER) -> _HANDLER: ... + +else: + def getsignal(__signalnum: _SIGNUM) -> _HANDLER: ... + def signal(__signalnum: _SIGNUM, __handler: _HANDLER) -> _HANDLER: ... + +SIGABRT: Signals +SIGEMT: Signals +SIGFPE: Signals +SIGILL: Signals +SIGINFO: Signals +SIGINT: Signals +SIGSEGV: Signals +SIGTERM: Signals + +if sys.platform == "win32": + SIGBREAK: Signals + CTRL_C_EVENT: Signals + CTRL_BREAK_EVENT: Signals +else: + SIGALRM: Signals + SIGBUS: Signals + SIGCHLD: Signals + SIGCONT: Signals + SIGHUP: Signals + SIGIO: Signals + SIGIOT: Signals + SIGKILL: Signals + SIGPIPE: Signals + SIGPROF: Signals + SIGQUIT: Signals + SIGSTOP: Signals + SIGSYS: Signals + SIGTRAP: Signals + SIGTSTP: Signals + SIGTTIN: Signals + SIGTTOU: Signals + SIGURG: Signals + SIGUSR1: Signals + SIGUSR2: Signals + SIGVTALRM: Signals + SIGWINCH: Signals + SIGXCPU: Signals + SIGXFSZ: Signals + + class ItimerError(OSError): ... + ITIMER_PROF: int + ITIMER_REAL: int + ITIMER_VIRTUAL: int + + class Sigmasks(IntEnum): + SIG_BLOCK: int + SIG_UNBLOCK: int + SIG_SETMASK: int + SIG_BLOCK = Sigmasks.SIG_BLOCK + SIG_UNBLOCK = Sigmasks.SIG_UNBLOCK + SIG_SETMASK = Sigmasks.SIG_SETMASK + def alarm(__seconds: int) -> int: ... + def getitimer(__which: int) -> tuple[float, float]: ... + def pause() -> None: ... + def pthread_kill(__thread_id: int, __signalnum: int) -> None: ... + if sys.version_info >= (3, 10): # arguments changed in 3.10.2 + def pthread_sigmask(how: int, mask: Iterable[int]) -> set[_SIGNUM]: ... + else: + def pthread_sigmask(__how: int, __mask: Iterable[int]) -> set[_SIGNUM]: ... + + def setitimer(__which: int, __seconds: float, __interval: float = 0.0) -> tuple[float, float]: ... + def siginterrupt(__signalnum: int, __flag: bool) -> None: ... + def sigpending() -> Any: ... + if sys.version_info >= (3, 10): # argument changed in 3.10.2 + def sigwait(sigset: Iterable[int]) -> _SIGNUM: ... + else: + def sigwait(__sigset: Iterable[int]) -> _SIGNUM: ... + if sys.platform != "darwin": + SIGCLD: Signals + SIGPOLL: Signals + SIGPWR: Signals + SIGRTMAX: Signals + SIGRTMIN: Signals + @final + class struct_siginfo(structseq[int], tuple[int, int, int, int, int, int, int]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("si_signo", "si_code", "si_errno", "si_pid", "si_uid", "si_status", "si_band") + @property + def si_signo(self) -> int: ... + @property + def si_code(self) -> int: ... + @property + def si_errno(self) -> int: ... + @property + def si_pid(self) -> int: ... + @property + def si_uid(self) -> int: ... + @property + def si_status(self) -> int: ... + @property + def si_band(self) -> int: ... + + def sigtimedwait(sigset: Iterable[int], timeout: float) -> struct_siginfo | None: ... + def sigwaitinfo(sigset: Iterable[int]) -> struct_siginfo: ... + +if sys.version_info >= (3, 8): + def strsignal(__signalnum: _SIGNUM) -> str | None: ... + def valid_signals() -> set[Signals]: ... + def raise_signal(__signalnum: _SIGNUM) -> None: ... + +def set_wakeup_fd(fd: int, *, warn_on_full_buffer: bool = ...) -> int: ... + +if sys.version_info >= (3, 9): + if sys.platform == "linux": + def pidfd_send_signal(__pidfd: int, __sig: int, __siginfo: None = None, __flags: int = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/site.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/site.pyi new file mode 100644 index 00000000..a8c6bcb4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/site.pyi @@ -0,0 +1,27 @@ +from _typeshed import StrPath +from collections.abc import Iterable + +PREFIXES: list[str] +ENABLE_USER_SITE: bool | None +USER_SITE: str | None +USER_BASE: str | None + +def main() -> None: ... +def abs_paths() -> None: ... # undocumented +def addpackage(sitedir: StrPath, name: StrPath, known_paths: set[str] | None) -> set[str] | None: ... # undocumented +def addsitedir(sitedir: str, known_paths: set[str] | None = None) -> None: ... +def addsitepackages(known_paths: set[str] | None, prefixes: Iterable[str] | None = None) -> set[str] | None: ... # undocumented +def addusersitepackages(known_paths: set[str] | None) -> set[str] | None: ... # undocumented +def check_enableusersite() -> bool | None: ... # undocumented +def enablerlcompleter() -> None: ... # undocumented +def execsitecustomize() -> None: ... # undocumented +def execusercustomize() -> None: ... # undocumented +def getsitepackages(prefixes: Iterable[str] | None = None) -> list[str]: ... +def getuserbase() -> str: ... +def getusersitepackages() -> str: ... +def makepath(*paths: StrPath) -> tuple[str, str]: ... # undocumented +def removeduppaths() -> set[str]: ... # undocumented +def setcopyright() -> None: ... # undocumented +def sethelper() -> None: ... # undocumented +def setquit() -> None: ... # undocumented +def venv(known_paths: set[str] | None) -> set[str] | None: ... # undocumented diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/smtpd.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/smtpd.pyi new file mode 100644 index 00000000..7392bd51 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/smtpd.pyi @@ -0,0 +1,91 @@ +import asynchat +import asyncore +import socket +import sys +from collections import defaultdict +from typing import Any +from typing_extensions import TypeAlias + +if sys.version_info >= (3, 11): + __all__ = ["SMTPChannel", "SMTPServer", "DebuggingServer", "PureProxy"] +else: + __all__ = ["SMTPChannel", "SMTPServer", "DebuggingServer", "PureProxy", "MailmanProxy"] + +_Address: TypeAlias = tuple[str, int] # (host, port) + +class SMTPChannel(asynchat.async_chat): + COMMAND: int + DATA: int + + command_size_limits: defaultdict[str, int] + smtp_server: SMTPServer + conn: socket.socket + addr: Any + received_lines: list[str] + smtp_state: int + seen_greeting: str + mailfrom: str + rcpttos: list[str] + received_data: str + fqdn: str + peer: str + + command_size_limit: int + data_size_limit: int + + enable_SMTPUTF8: bool + @property + def max_command_size_limit(self) -> int: ... + def __init__( + self, + server: SMTPServer, + conn: socket.socket, + addr: Any, + data_size_limit: int = 33554432, + map: asyncore._MapType | None = None, + enable_SMTPUTF8: bool = False, + decode_data: bool = False, + ) -> None: ... + # base asynchat.async_chat.push() accepts bytes + def push(self, msg: str) -> None: ... # type: ignore[override] + def collect_incoming_data(self, data: bytes) -> None: ... + def found_terminator(self) -> None: ... + def smtp_HELO(self, arg: str) -> None: ... + def smtp_NOOP(self, arg: str) -> None: ... + def smtp_QUIT(self, arg: str) -> None: ... + def smtp_MAIL(self, arg: str) -> None: ... + def smtp_RCPT(self, arg: str) -> None: ... + def smtp_RSET(self, arg: str) -> None: ... + def smtp_DATA(self, arg: str) -> None: ... + def smtp_EHLO(self, arg: str) -> None: ... + def smtp_HELP(self, arg: str) -> None: ... + def smtp_VRFY(self, arg: str) -> None: ... + def smtp_EXPN(self, arg: str) -> None: ... + +class SMTPServer(asyncore.dispatcher): + channel_class: type[SMTPChannel] + + data_size_limit: int + enable_SMTPUTF8: bool + def __init__( + self, + localaddr: _Address, + remoteaddr: _Address, + data_size_limit: int = 33554432, + map: asyncore._MapType | None = None, + enable_SMTPUTF8: bool = False, + decode_data: bool = False, + ) -> None: ... + def handle_accepted(self, conn: socket.socket, addr: Any) -> None: ... + def process_message( + self, peer: _Address, mailfrom: str, rcpttos: list[str], data: bytes | str, **kwargs: Any + ) -> str | None: ... + +class DebuggingServer(SMTPServer): ... + +class PureProxy(SMTPServer): + def process_message(self, peer: _Address, mailfrom: str, rcpttos: list[str], data: bytes | str) -> str | None: ... # type: ignore[override] + +if sys.version_info < (3, 11): + class MailmanProxy(PureProxy): + def process_message(self, peer: _Address, mailfrom: str, rcpttos: list[str], data: bytes | str) -> str | None: ... # type: ignore[override] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/smtplib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/smtplib.pyi new file mode 100644 index 00000000..0d7595fc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/smtplib.pyi @@ -0,0 +1,186 @@ +import sys +from _socket import _Address as _SourceAddress +from _typeshed import ReadableBuffer, _BufferWithLen +from collections.abc import Sequence +from email.message import Message as _Message +from re import Pattern +from socket import socket +from ssl import SSLContext +from types import TracebackType +from typing import Any, Protocol, overload +from typing_extensions import Self, TypeAlias + +__all__ = [ + "SMTPException", + "SMTPServerDisconnected", + "SMTPResponseException", + "SMTPSenderRefused", + "SMTPRecipientsRefused", + "SMTPDataError", + "SMTPConnectError", + "SMTPHeloError", + "SMTPAuthenticationError", + "quoteaddr", + "quotedata", + "SMTP", + "SMTP_SSL", + "SMTPNotSupportedError", +] + +_Reply: TypeAlias = tuple[int, bytes] +_SendErrs: TypeAlias = dict[str, _Reply] + +SMTP_PORT: int +SMTP_SSL_PORT: int +CRLF: str +bCRLF: bytes + +OLDSTYLE_AUTH: Pattern[str] + +class SMTPException(OSError): ... +class SMTPNotSupportedError(SMTPException): ... +class SMTPServerDisconnected(SMTPException): ... + +class SMTPResponseException(SMTPException): + smtp_code: int + smtp_error: bytes | str + args: tuple[int, bytes | str] | tuple[int, bytes, str] + def __init__(self, code: int, msg: bytes | str) -> None: ... + +class SMTPSenderRefused(SMTPResponseException): + smtp_error: bytes + sender: str + args: tuple[int, bytes, str] + def __init__(self, code: int, msg: bytes, sender: str) -> None: ... + +class SMTPRecipientsRefused(SMTPException): + recipients: _SendErrs + args: tuple[_SendErrs] + def __init__(self, recipients: _SendErrs) -> None: ... + +class SMTPDataError(SMTPResponseException): ... +class SMTPConnectError(SMTPResponseException): ... +class SMTPHeloError(SMTPResponseException): ... +class SMTPAuthenticationError(SMTPResponseException): ... + +def quoteaddr(addrstring: str) -> str: ... +def quotedata(data: str) -> str: ... + +class _AuthObject(Protocol): + @overload + def __call__(self, challenge: None = None) -> str | None: ... + @overload + def __call__(self, challenge: bytes) -> str: ... + +class SMTP: + debuglevel: int + sock: socket | None + # Type of file should match what socket.makefile() returns + file: Any | None + helo_resp: bytes | None + ehlo_msg: str + ehlo_resp: bytes | None + does_esmtp: bool + default_port: int + timeout: float + esmtp_features: dict[str, str] + command_encoding: str + source_address: _SourceAddress | None + local_hostname: str + def __init__( + self, + host: str = "", + port: int = 0, + local_hostname: str | None = None, + timeout: float = ..., + source_address: _SourceAddress | None = None, + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None + ) -> None: ... + def set_debuglevel(self, debuglevel: int) -> None: ... + def connect(self, host: str = "localhost", port: int = 0, source_address: _SourceAddress | None = None) -> _Reply: ... + def send(self, s: ReadableBuffer | str) -> None: ... + def putcmd(self, cmd: str, args: str = "") -> None: ... + def getreply(self) -> _Reply: ... + def docmd(self, cmd: str, args: str = "") -> _Reply: ... + def helo(self, name: str = "") -> _Reply: ... + def ehlo(self, name: str = "") -> _Reply: ... + def has_extn(self, opt: str) -> bool: ... + def help(self, args: str = "") -> bytes: ... + def rset(self) -> _Reply: ... + def noop(self) -> _Reply: ... + def mail(self, sender: str, options: Sequence[str] = ...) -> _Reply: ... + def rcpt(self, recip: str, options: Sequence[str] = ...) -> _Reply: ... + def data(self, msg: ReadableBuffer | str) -> _Reply: ... + def verify(self, address: str) -> _Reply: ... + vrfy = verify + def expn(self, address: str) -> _Reply: ... + def ehlo_or_helo_if_needed(self) -> None: ... + user: str + password: str + def auth(self, mechanism: str, authobject: _AuthObject, *, initial_response_ok: bool = True) -> _Reply: ... + @overload + def auth_cram_md5(self, challenge: None = None) -> None: ... + @overload + def auth_cram_md5(self, challenge: ReadableBuffer) -> str: ... + def auth_plain(self, challenge: ReadableBuffer | None = None) -> str: ... + def auth_login(self, challenge: ReadableBuffer | None = None) -> str: ... + def login(self, user: str, password: str, *, initial_response_ok: bool = True) -> _Reply: ... + def starttls(self, keyfile: str | None = None, certfile: str | None = None, context: SSLContext | None = None) -> _Reply: ... + def sendmail( + self, + from_addr: str, + to_addrs: str | Sequence[str], + msg: _BufferWithLen | str, + mail_options: Sequence[str] = ..., + rcpt_options: Sequence[str] = ..., + ) -> _SendErrs: ... + def send_message( + self, + msg: _Message, + from_addr: str | None = None, + to_addrs: str | Sequence[str] | None = None, + mail_options: Sequence[str] = ..., + rcpt_options: Sequence[str] = ..., + ) -> _SendErrs: ... + def close(self) -> None: ... + def quit(self) -> _Reply: ... + +class SMTP_SSL(SMTP): + keyfile: str | None + certfile: str | None + context: SSLContext + def __init__( + self, + host: str = "", + port: int = 0, + local_hostname: str | None = None, + keyfile: str | None = None, + certfile: str | None = None, + timeout: float = ..., + source_address: _SourceAddress | None = None, + context: SSLContext | None = None, + ) -> None: ... + +LMTP_PORT: int + +class LMTP(SMTP): + if sys.version_info >= (3, 9): + def __init__( + self, + host: str = "", + port: int = 2003, + local_hostname: str | None = None, + source_address: _SourceAddress | None = None, + timeout: float = ..., + ) -> None: ... + else: + def __init__( + self, + host: str = "", + port: int = 2003, + local_hostname: str | None = None, + source_address: _SourceAddress | None = None, + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sndhdr.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sndhdr.pyi new file mode 100644 index 00000000..f4d48760 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sndhdr.pyi @@ -0,0 +1,14 @@ +from _typeshed import StrOrBytesPath +from typing import NamedTuple + +__all__ = ["what", "whathdr"] + +class SndHeaders(NamedTuple): + filetype: str + framerate: int + nchannels: int + nframes: int + sampwidth: int | str + +def what(filename: StrOrBytesPath) -> SndHeaders | None: ... +def whathdr(filename: StrOrBytesPath) -> SndHeaders | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/socket.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/socket.pyi new file mode 100644 index 00000000..dbc1d46e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/socket.pyi @@ -0,0 +1,793 @@ +# Ideally, we'd just do "from _socket import *". Unfortunately, socket +# overrides some definitions from _socket incompatibly. mypy incorrectly +# prefers the definitions from _socket over those defined here. +import _socket +import sys +from _socket import ( + _FD, + EAI_AGAIN as EAI_AGAIN, + EAI_BADFLAGS as EAI_BADFLAGS, + EAI_FAIL as EAI_FAIL, + EAI_FAMILY as EAI_FAMILY, + EAI_MEMORY as EAI_MEMORY, + EAI_NODATA as EAI_NODATA, + EAI_NONAME as EAI_NONAME, + EAI_SERVICE as EAI_SERVICE, + EAI_SOCKTYPE as EAI_SOCKTYPE, + INADDR_ALLHOSTS_GROUP as INADDR_ALLHOSTS_GROUP, + INADDR_ANY as INADDR_ANY, + INADDR_BROADCAST as INADDR_BROADCAST, + INADDR_LOOPBACK as INADDR_LOOPBACK, + INADDR_MAX_LOCAL_GROUP as INADDR_MAX_LOCAL_GROUP, + INADDR_NONE as INADDR_NONE, + INADDR_UNSPEC_GROUP as INADDR_UNSPEC_GROUP, + IP_ADD_MEMBERSHIP as IP_ADD_MEMBERSHIP, + IP_DROP_MEMBERSHIP as IP_DROP_MEMBERSHIP, + IP_HDRINCL as IP_HDRINCL, + IP_MULTICAST_IF as IP_MULTICAST_IF, + IP_MULTICAST_LOOP as IP_MULTICAST_LOOP, + IP_MULTICAST_TTL as IP_MULTICAST_TTL, + IP_OPTIONS as IP_OPTIONS, + IP_RECVDSTADDR as IP_RECVDSTADDR, + IP_TOS as IP_TOS, + IP_TTL as IP_TTL, + IPPORT_RESERVED as IPPORT_RESERVED, + IPPORT_USERRESERVED as IPPORT_USERRESERVED, + IPPROTO_ICMP as IPPROTO_ICMP, + IPPROTO_IP as IPPROTO_IP, + IPPROTO_RAW as IPPROTO_RAW, + IPPROTO_TCP as IPPROTO_TCP, + IPPROTO_UDP as IPPROTO_UDP, + IPV6_CHECKSUM as IPV6_CHECKSUM, + IPV6_JOIN_GROUP as IPV6_JOIN_GROUP, + IPV6_LEAVE_GROUP as IPV6_LEAVE_GROUP, + IPV6_MULTICAST_HOPS as IPV6_MULTICAST_HOPS, + IPV6_MULTICAST_IF as IPV6_MULTICAST_IF, + IPV6_MULTICAST_LOOP as IPV6_MULTICAST_LOOP, + IPV6_RECVTCLASS as IPV6_RECVTCLASS, + IPV6_TCLASS as IPV6_TCLASS, + IPV6_UNICAST_HOPS as IPV6_UNICAST_HOPS, + IPV6_V6ONLY as IPV6_V6ONLY, + NI_DGRAM as NI_DGRAM, + NI_MAXHOST as NI_MAXHOST, + NI_MAXSERV as NI_MAXSERV, + NI_NAMEREQD as NI_NAMEREQD, + NI_NOFQDN as NI_NOFQDN, + NI_NUMERICHOST as NI_NUMERICHOST, + NI_NUMERICSERV as NI_NUMERICSERV, + SHUT_RD as SHUT_RD, + SHUT_RDWR as SHUT_RDWR, + SHUT_WR as SHUT_WR, + SO_ACCEPTCONN as SO_ACCEPTCONN, + SO_BROADCAST as SO_BROADCAST, + SO_DEBUG as SO_DEBUG, + SO_DONTROUTE as SO_DONTROUTE, + SO_ERROR as SO_ERROR, + SO_KEEPALIVE as SO_KEEPALIVE, + SO_LINGER as SO_LINGER, + SO_OOBINLINE as SO_OOBINLINE, + SO_RCVBUF as SO_RCVBUF, + SO_RCVLOWAT as SO_RCVLOWAT, + SO_RCVTIMEO as SO_RCVTIMEO, + SO_REUSEADDR as SO_REUSEADDR, + SO_SNDBUF as SO_SNDBUF, + SO_SNDLOWAT as SO_SNDLOWAT, + SO_SNDTIMEO as SO_SNDTIMEO, + SO_TYPE as SO_TYPE, + SO_USELOOPBACK as SO_USELOOPBACK, + SOL_IP as SOL_IP, + SOL_SOCKET as SOL_SOCKET, + SOL_TCP as SOL_TCP, + SOL_UDP as SOL_UDP, + SOMAXCONN as SOMAXCONN, + TCP_FASTOPEN as TCP_FASTOPEN, + TCP_KEEPCNT as TCP_KEEPCNT, + TCP_MAXSEG as TCP_MAXSEG, + TCP_NODELAY as TCP_NODELAY, + SocketType as SocketType, + _Address as _Address, + _RetAddress as _RetAddress, + dup as dup, + error as error, + gaierror as gaierror, + getdefaulttimeout as getdefaulttimeout, + gethostbyaddr as gethostbyaddr, + gethostbyname as gethostbyname, + gethostbyname_ex as gethostbyname_ex, + gethostname as gethostname, + getnameinfo as getnameinfo, + getprotobyname as getprotobyname, + getservbyname as getservbyname, + getservbyport as getservbyport, + has_ipv6 as has_ipv6, + herror as herror, + htonl as htonl, + htons as htons, + inet_aton as inet_aton, + inet_ntoa as inet_ntoa, + inet_ntop as inet_ntop, + inet_pton as inet_pton, + ntohl as ntohl, + ntohs as ntohs, + setdefaulttimeout as setdefaulttimeout, + timeout as timeout, +) +from _typeshed import ReadableBuffer, Unused, WriteableBuffer +from collections.abc import Iterable +from enum import IntEnum, IntFlag +from io import BufferedReader, BufferedRWPair, BufferedWriter, IOBase, RawIOBase, TextIOWrapper +from typing import Any, Protocol, overload +from typing_extensions import Literal, Self + +if sys.platform != "darwin" or sys.version_info >= (3, 9): + from _socket import ( + IPV6_DONTFRAG as IPV6_DONTFRAG, + IPV6_HOPLIMIT as IPV6_HOPLIMIT, + IPV6_HOPOPTS as IPV6_HOPOPTS, + IPV6_PKTINFO as IPV6_PKTINFO, + IPV6_RECVRTHDR as IPV6_RECVRTHDR, + IPV6_RTHDR as IPV6_RTHDR, + ) + +if sys.platform != "darwin": + from _socket import SO_EXCLUSIVEADDRUSE as SO_EXCLUSIVEADDRUSE + +if sys.version_info >= (3, 10): + from _socket import IP_RECVTOS as IP_RECVTOS +elif sys.platform != "darwin" and sys.platform != "win32": + from _socket import IP_RECVTOS as IP_RECVTOS + +from _socket import TCP_KEEPINTVL as TCP_KEEPINTVL, close as close + +if sys.platform != "darwin": + from _socket import TCP_KEEPIDLE as TCP_KEEPIDLE + +if sys.platform != "win32" or sys.version_info >= (3, 8): + from _socket import ( + IPPROTO_AH as IPPROTO_AH, + IPPROTO_DSTOPTS as IPPROTO_DSTOPTS, + IPPROTO_EGP as IPPROTO_EGP, + IPPROTO_ESP as IPPROTO_ESP, + IPPROTO_FRAGMENT as IPPROTO_FRAGMENT, + IPPROTO_GGP as IPPROTO_GGP, + IPPROTO_HOPOPTS as IPPROTO_HOPOPTS, + IPPROTO_ICMPV6 as IPPROTO_ICMPV6, + IPPROTO_IDP as IPPROTO_IDP, + IPPROTO_IGMP as IPPROTO_IGMP, + IPPROTO_IPV4 as IPPROTO_IPV4, + IPPROTO_IPV6 as IPPROTO_IPV6, + IPPROTO_MAX as IPPROTO_MAX, + IPPROTO_ND as IPPROTO_ND, + IPPROTO_NONE as IPPROTO_NONE, + IPPROTO_PIM as IPPROTO_PIM, + IPPROTO_PUP as IPPROTO_PUP, + IPPROTO_ROUTING as IPPROTO_ROUTING, + IPPROTO_SCTP as IPPROTO_SCTP, + ) + + if sys.platform != "darwin": + from _socket import ( + IPPROTO_CBT as IPPROTO_CBT, + IPPROTO_ICLFXBM as IPPROTO_ICLFXBM, + IPPROTO_IGP as IPPROTO_IGP, + IPPROTO_L2TP as IPPROTO_L2TP, + IPPROTO_PGM as IPPROTO_PGM, + IPPROTO_RDP as IPPROTO_RDP, + IPPROTO_ST as IPPROTO_ST, + ) +if sys.platform != "win32" and sys.platform != "darwin": + from _socket import ( + IP_TRANSPARENT as IP_TRANSPARENT, + IPPROTO_BIP as IPPROTO_BIP, + IPPROTO_MOBILE as IPPROTO_MOBILE, + IPPROTO_VRRP as IPPROTO_VRRP, + IPX_TYPE as IPX_TYPE, + SCM_CREDENTIALS as SCM_CREDENTIALS, + SO_BINDTODEVICE as SO_BINDTODEVICE, + SO_MARK as SO_MARK, + SO_PASSCRED as SO_PASSCRED, + SO_PEERCRED as SO_PEERCRED, + SO_PRIORITY as SO_PRIORITY, + SO_SETFIB as SO_SETFIB, + SOL_ATALK as SOL_ATALK, + SOL_AX25 as SOL_AX25, + SOL_HCI as SOL_HCI, + SOL_IPX as SOL_IPX, + SOL_NETROM as SOL_NETROM, + SOL_ROSE as SOL_ROSE, + TCP_CORK as TCP_CORK, + TCP_DEFER_ACCEPT as TCP_DEFER_ACCEPT, + TCP_INFO as TCP_INFO, + TCP_LINGER2 as TCP_LINGER2, + TCP_QUICKACK as TCP_QUICKACK, + TCP_SYNCNT as TCP_SYNCNT, + TCP_WINDOW_CLAMP as TCP_WINDOW_CLAMP, + ) +if sys.platform != "win32": + from _socket import ( + CMSG_LEN as CMSG_LEN, + CMSG_SPACE as CMSG_SPACE, + EAI_ADDRFAMILY as EAI_ADDRFAMILY, + EAI_BADHINTS as EAI_BADHINTS, + EAI_MAX as EAI_MAX, + EAI_OVERFLOW as EAI_OVERFLOW, + EAI_PROTOCOL as EAI_PROTOCOL, + EAI_SYSTEM as EAI_SYSTEM, + IP_DEFAULT_MULTICAST_LOOP as IP_DEFAULT_MULTICAST_LOOP, + IP_DEFAULT_MULTICAST_TTL as IP_DEFAULT_MULTICAST_TTL, + IP_MAX_MEMBERSHIPS as IP_MAX_MEMBERSHIPS, + IP_RECVOPTS as IP_RECVOPTS, + IP_RECVRETOPTS as IP_RECVRETOPTS, + IP_RETOPTS as IP_RETOPTS, + IPPROTO_EON as IPPROTO_EON, + IPPROTO_GRE as IPPROTO_GRE, + IPPROTO_HELLO as IPPROTO_HELLO, + IPPROTO_IPCOMP as IPPROTO_IPCOMP, + IPPROTO_IPIP as IPPROTO_IPIP, + IPPROTO_RSVP as IPPROTO_RSVP, + IPPROTO_TP as IPPROTO_TP, + IPPROTO_XTP as IPPROTO_XTP, + IPV6_RTHDR_TYPE_0 as IPV6_RTHDR_TYPE_0, + LOCAL_PEERCRED as LOCAL_PEERCRED, + SCM_CREDS as SCM_CREDS, + SCM_RIGHTS as SCM_RIGHTS, + SO_REUSEPORT as SO_REUSEPORT, + sethostname as sethostname, + ) + + if sys.platform != "darwin" or sys.version_info >= (3, 9): + from _socket import ( + IPV6_DSTOPTS as IPV6_DSTOPTS, + IPV6_NEXTHOP as IPV6_NEXTHOP, + IPV6_PATHMTU as IPV6_PATHMTU, + IPV6_RECVDSTOPTS as IPV6_RECVDSTOPTS, + IPV6_RECVHOPLIMIT as IPV6_RECVHOPLIMIT, + IPV6_RECVHOPOPTS as IPV6_RECVHOPOPTS, + IPV6_RECVPATHMTU as IPV6_RECVPATHMTU, + IPV6_RECVPKTINFO as IPV6_RECVPKTINFO, + IPV6_RTHDRDSTOPTS as IPV6_RTHDRDSTOPTS, + IPV6_USE_MIN_MTU as IPV6_USE_MIN_MTU, + ) + +if sys.platform != "win32" or sys.version_info >= (3, 8): + from _socket import if_indextoname as if_indextoname, if_nameindex as if_nameindex, if_nametoindex as if_nametoindex + +if sys.platform != "darwin": + if sys.platform != "win32" or sys.version_info >= (3, 9): + from _socket import BDADDR_ANY as BDADDR_ANY, BDADDR_LOCAL as BDADDR_LOCAL, BTPROTO_RFCOMM as BTPROTO_RFCOMM + +if sys.platform == "darwin" and sys.version_info >= (3, 10): + from _socket import TCP_KEEPALIVE as TCP_KEEPALIVE + +if sys.platform == "linux": + from _socket import ( + ALG_OP_DECRYPT as ALG_OP_DECRYPT, + ALG_OP_ENCRYPT as ALG_OP_ENCRYPT, + ALG_OP_SIGN as ALG_OP_SIGN, + ALG_OP_VERIFY as ALG_OP_VERIFY, + ALG_SET_AEAD_ASSOCLEN as ALG_SET_AEAD_ASSOCLEN, + ALG_SET_AEAD_AUTHSIZE as ALG_SET_AEAD_AUTHSIZE, + ALG_SET_IV as ALG_SET_IV, + ALG_SET_KEY as ALG_SET_KEY, + ALG_SET_OP as ALG_SET_OP, + ALG_SET_PUBKEY as ALG_SET_PUBKEY, + CAN_BCM as CAN_BCM, + CAN_BCM_RX_CHANGED as CAN_BCM_RX_CHANGED, + CAN_BCM_RX_DELETE as CAN_BCM_RX_DELETE, + CAN_BCM_RX_READ as CAN_BCM_RX_READ, + CAN_BCM_RX_SETUP as CAN_BCM_RX_SETUP, + CAN_BCM_RX_STATUS as CAN_BCM_RX_STATUS, + CAN_BCM_RX_TIMEOUT as CAN_BCM_RX_TIMEOUT, + CAN_BCM_TX_DELETE as CAN_BCM_TX_DELETE, + CAN_BCM_TX_EXPIRED as CAN_BCM_TX_EXPIRED, + CAN_BCM_TX_READ as CAN_BCM_TX_READ, + CAN_BCM_TX_SEND as CAN_BCM_TX_SEND, + CAN_BCM_TX_SETUP as CAN_BCM_TX_SETUP, + CAN_BCM_TX_STATUS as CAN_BCM_TX_STATUS, + CAN_EFF_FLAG as CAN_EFF_FLAG, + CAN_EFF_MASK as CAN_EFF_MASK, + CAN_ERR_FLAG as CAN_ERR_FLAG, + CAN_ERR_MASK as CAN_ERR_MASK, + CAN_RAW as CAN_RAW, + CAN_RAW_ERR_FILTER as CAN_RAW_ERR_FILTER, + CAN_RAW_FD_FRAMES as CAN_RAW_FD_FRAMES, + CAN_RAW_FILTER as CAN_RAW_FILTER, + CAN_RAW_LOOPBACK as CAN_RAW_LOOPBACK, + CAN_RAW_RECV_OWN_MSGS as CAN_RAW_RECV_OWN_MSGS, + CAN_RTR_FLAG as CAN_RTR_FLAG, + CAN_SFF_MASK as CAN_SFF_MASK, + NETLINK_ARPD as NETLINK_ARPD, + NETLINK_CRYPTO as NETLINK_CRYPTO, + NETLINK_DNRTMSG as NETLINK_DNRTMSG, + NETLINK_FIREWALL as NETLINK_FIREWALL, + NETLINK_IP6_FW as NETLINK_IP6_FW, + NETLINK_NFLOG as NETLINK_NFLOG, + NETLINK_ROUTE as NETLINK_ROUTE, + NETLINK_ROUTE6 as NETLINK_ROUTE6, + NETLINK_SKIP as NETLINK_SKIP, + NETLINK_TAPBASE as NETLINK_TAPBASE, + NETLINK_TCPDIAG as NETLINK_TCPDIAG, + NETLINK_USERSOCK as NETLINK_USERSOCK, + NETLINK_W1 as NETLINK_W1, + NETLINK_XFRM as NETLINK_XFRM, + PACKET_BROADCAST as PACKET_BROADCAST, + PACKET_FASTROUTE as PACKET_FASTROUTE, + PACKET_HOST as PACKET_HOST, + PACKET_LOOPBACK as PACKET_LOOPBACK, + PACKET_MULTICAST as PACKET_MULTICAST, + PACKET_OTHERHOST as PACKET_OTHERHOST, + PACKET_OUTGOING as PACKET_OUTGOING, + PF_CAN as PF_CAN, + PF_PACKET as PF_PACKET, + PF_RDS as PF_RDS, + RDS_CANCEL_SENT_TO as RDS_CANCEL_SENT_TO, + RDS_CMSG_RDMA_ARGS as RDS_CMSG_RDMA_ARGS, + RDS_CMSG_RDMA_DEST as RDS_CMSG_RDMA_DEST, + RDS_CMSG_RDMA_MAP as RDS_CMSG_RDMA_MAP, + RDS_CMSG_RDMA_STATUS as RDS_CMSG_RDMA_STATUS, + RDS_CMSG_RDMA_UPDATE as RDS_CMSG_RDMA_UPDATE, + RDS_CONG_MONITOR as RDS_CONG_MONITOR, + RDS_FREE_MR as RDS_FREE_MR, + RDS_GET_MR as RDS_GET_MR, + RDS_GET_MR_FOR_DEST as RDS_GET_MR_FOR_DEST, + RDS_RDMA_DONTWAIT as RDS_RDMA_DONTWAIT, + RDS_RDMA_FENCE as RDS_RDMA_FENCE, + RDS_RDMA_INVALIDATE as RDS_RDMA_INVALIDATE, + RDS_RDMA_NOTIFY_ME as RDS_RDMA_NOTIFY_ME, + RDS_RDMA_READWRITE as RDS_RDMA_READWRITE, + RDS_RDMA_SILENT as RDS_RDMA_SILENT, + RDS_RDMA_USE_ONCE as RDS_RDMA_USE_ONCE, + RDS_RECVERR as RDS_RECVERR, + SOL_ALG as SOL_ALG, + SOL_CAN_BASE as SOL_CAN_BASE, + SOL_CAN_RAW as SOL_CAN_RAW, + SOL_RDS as SOL_RDS, + SOL_TIPC as SOL_TIPC, + TIPC_ADDR_ID as TIPC_ADDR_ID, + TIPC_ADDR_NAME as TIPC_ADDR_NAME, + TIPC_ADDR_NAMESEQ as TIPC_ADDR_NAMESEQ, + TIPC_CFG_SRV as TIPC_CFG_SRV, + TIPC_CLUSTER_SCOPE as TIPC_CLUSTER_SCOPE, + TIPC_CONN_TIMEOUT as TIPC_CONN_TIMEOUT, + TIPC_CRITICAL_IMPORTANCE as TIPC_CRITICAL_IMPORTANCE, + TIPC_DEST_DROPPABLE as TIPC_DEST_DROPPABLE, + TIPC_HIGH_IMPORTANCE as TIPC_HIGH_IMPORTANCE, + TIPC_IMPORTANCE as TIPC_IMPORTANCE, + TIPC_LOW_IMPORTANCE as TIPC_LOW_IMPORTANCE, + TIPC_MEDIUM_IMPORTANCE as TIPC_MEDIUM_IMPORTANCE, + TIPC_NODE_SCOPE as TIPC_NODE_SCOPE, + TIPC_PUBLISHED as TIPC_PUBLISHED, + TIPC_SRC_DROPPABLE as TIPC_SRC_DROPPABLE, + TIPC_SUB_CANCEL as TIPC_SUB_CANCEL, + TIPC_SUB_PORTS as TIPC_SUB_PORTS, + TIPC_SUB_SERVICE as TIPC_SUB_SERVICE, + TIPC_SUBSCR_TIMEOUT as TIPC_SUBSCR_TIMEOUT, + TIPC_TOP_SRV as TIPC_TOP_SRV, + TIPC_WAIT_FOREVER as TIPC_WAIT_FOREVER, + TIPC_WITHDRAWN as TIPC_WITHDRAWN, + TIPC_ZONE_SCOPE as TIPC_ZONE_SCOPE, + ) +if sys.platform == "linux": + from _socket import ( + CAN_ISOTP as CAN_ISOTP, + IOCTL_VM_SOCKETS_GET_LOCAL_CID as IOCTL_VM_SOCKETS_GET_LOCAL_CID, + SO_VM_SOCKETS_BUFFER_MAX_SIZE as SO_VM_SOCKETS_BUFFER_MAX_SIZE, + SO_VM_SOCKETS_BUFFER_MIN_SIZE as SO_VM_SOCKETS_BUFFER_MIN_SIZE, + SO_VM_SOCKETS_BUFFER_SIZE as SO_VM_SOCKETS_BUFFER_SIZE, + VM_SOCKETS_INVALID_VERSION as VM_SOCKETS_INVALID_VERSION, + VMADDR_CID_ANY as VMADDR_CID_ANY, + VMADDR_CID_HOST as VMADDR_CID_HOST, + VMADDR_PORT_ANY as VMADDR_PORT_ANY, + ) +if sys.platform != "win32": + from _socket import TCP_NOTSENT_LOWAT as TCP_NOTSENT_LOWAT +if sys.platform == "linux" and sys.version_info >= (3, 8): + from _socket import ( + CAN_BCM_CAN_FD_FRAME as CAN_BCM_CAN_FD_FRAME, + CAN_BCM_RX_ANNOUNCE_RESUME as CAN_BCM_RX_ANNOUNCE_RESUME, + CAN_BCM_RX_CHECK_DLC as CAN_BCM_RX_CHECK_DLC, + CAN_BCM_RX_FILTER_ID as CAN_BCM_RX_FILTER_ID, + CAN_BCM_RX_NO_AUTOTIMER as CAN_BCM_RX_NO_AUTOTIMER, + CAN_BCM_RX_RTR_FRAME as CAN_BCM_RX_RTR_FRAME, + CAN_BCM_SETTIMER as CAN_BCM_SETTIMER, + CAN_BCM_STARTTIMER as CAN_BCM_STARTTIMER, + CAN_BCM_TX_ANNOUNCE as CAN_BCM_TX_ANNOUNCE, + CAN_BCM_TX_COUNTEVT as CAN_BCM_TX_COUNTEVT, + CAN_BCM_TX_CP_CAN_ID as CAN_BCM_TX_CP_CAN_ID, + CAN_BCM_TX_RESET_MULTI_IDX as CAN_BCM_TX_RESET_MULTI_IDX, + ) +if sys.platform == "linux" and sys.version_info >= (3, 9): + from _socket import ( + CAN_J1939 as CAN_J1939, + CAN_RAW_JOIN_FILTERS as CAN_RAW_JOIN_FILTERS, + J1939_EE_INFO_NONE as J1939_EE_INFO_NONE, + J1939_EE_INFO_TX_ABORT as J1939_EE_INFO_TX_ABORT, + J1939_FILTER_MAX as J1939_FILTER_MAX, + J1939_IDLE_ADDR as J1939_IDLE_ADDR, + J1939_MAX_UNICAST_ADDR as J1939_MAX_UNICAST_ADDR, + J1939_NLA_BYTES_ACKED as J1939_NLA_BYTES_ACKED, + J1939_NLA_PAD as J1939_NLA_PAD, + J1939_NO_ADDR as J1939_NO_ADDR, + J1939_NO_NAME as J1939_NO_NAME, + J1939_NO_PGN as J1939_NO_PGN, + J1939_PGN_ADDRESS_CLAIMED as J1939_PGN_ADDRESS_CLAIMED, + J1939_PGN_ADDRESS_COMMANDED as J1939_PGN_ADDRESS_COMMANDED, + J1939_PGN_MAX as J1939_PGN_MAX, + J1939_PGN_PDU1_MAX as J1939_PGN_PDU1_MAX, + J1939_PGN_REQUEST as J1939_PGN_REQUEST, + SCM_J1939_DEST_ADDR as SCM_J1939_DEST_ADDR, + SCM_J1939_DEST_NAME as SCM_J1939_DEST_NAME, + SCM_J1939_ERRQUEUE as SCM_J1939_ERRQUEUE, + SCM_J1939_PRIO as SCM_J1939_PRIO, + SO_J1939_ERRQUEUE as SO_J1939_ERRQUEUE, + SO_J1939_FILTER as SO_J1939_FILTER, + SO_J1939_PROMISC as SO_J1939_PROMISC, + SO_J1939_SEND_PRIO as SO_J1939_SEND_PRIO, + ) +if sys.platform == "linux" and sys.version_info >= (3, 10): + from _socket import IPPROTO_MPTCP as IPPROTO_MPTCP +if sys.platform == "linux" and sys.version_info >= (3, 11): + from _socket import SO_INCOMING_CPU as SO_INCOMING_CPU +if sys.platform == "win32": + from _socket import ( + RCVALL_MAX as RCVALL_MAX, + RCVALL_OFF as RCVALL_OFF, + RCVALL_ON as RCVALL_ON, + RCVALL_SOCKETLEVELONLY as RCVALL_SOCKETLEVELONLY, + SIO_KEEPALIVE_VALS as SIO_KEEPALIVE_VALS, + SIO_LOOPBACK_FAST_PATH as SIO_LOOPBACK_FAST_PATH, + SIO_RCVALL as SIO_RCVALL, + ) + +# Re-exported from errno +EBADF: int +EAGAIN: int +EWOULDBLOCK: int + +class AddressFamily(IntEnum): + AF_INET: int + AF_INET6: int + AF_APPLETALK: int + AF_DECnet: int + AF_IPX: int + AF_SNA: int + AF_UNSPEC: int + if sys.platform != "darwin": + AF_IRDA: int + if sys.platform != "win32": + AF_ROUTE: int + AF_SYSTEM: int + AF_UNIX: int + if sys.platform != "darwin" and sys.platform != "win32": + AF_AAL5: int + AF_ASH: int + AF_ATMPVC: int + AF_ATMSVC: int + AF_AX25: int + AF_BRIDGE: int + AF_ECONET: int + AF_KEY: int + AF_LLC: int + AF_NETBEUI: int + AF_NETROM: int + AF_PPPOX: int + AF_ROSE: int + AF_SECURITY: int + AF_WANPIPE: int + AF_X25: int + if sys.platform == "linux": + AF_CAN: int + AF_PACKET: int + AF_RDS: int + AF_TIPC: int + AF_ALG: int + AF_NETLINK: int + AF_VSOCK: int + if sys.version_info >= (3, 8): + AF_QIPCRTR: int + if sys.platform != "win32" or sys.version_info >= (3, 9): + AF_LINK: int + if sys.platform != "darwin": + AF_BLUETOOTH: int + +AF_INET = AddressFamily.AF_INET +AF_INET6 = AddressFamily.AF_INET6 +AF_APPLETALK = AddressFamily.AF_APPLETALK +AF_DECnet = AddressFamily.AF_DECnet +AF_IPX = AddressFamily.AF_IPX +AF_SNA = AddressFamily.AF_SNA +AF_UNSPEC = AddressFamily.AF_UNSPEC + +if sys.platform != "darwin": + AF_IRDA = AddressFamily.AF_IRDA + +if sys.platform != "win32": + AF_ROUTE = AddressFamily.AF_ROUTE + AF_SYSTEM = AddressFamily.AF_SYSTEM + AF_UNIX = AddressFamily.AF_UNIX + +if sys.platform != "win32" and sys.platform != "darwin": + AF_AAL5 = AddressFamily.AF_AAL5 + AF_ASH = AddressFamily.AF_ASH + AF_ATMPVC = AddressFamily.AF_ATMPVC + AF_ATMSVC = AddressFamily.AF_ATMSVC + AF_AX25 = AddressFamily.AF_AX25 + AF_BRIDGE = AddressFamily.AF_BRIDGE + AF_ECONET = AddressFamily.AF_ECONET + AF_KEY = AddressFamily.AF_KEY + AF_LLC = AddressFamily.AF_LLC + AF_NETBEUI = AddressFamily.AF_NETBEUI + AF_NETROM = AddressFamily.AF_NETROM + AF_PPPOX = AddressFamily.AF_PPPOX + AF_ROSE = AddressFamily.AF_ROSE + AF_SECURITY = AddressFamily.AF_SECURITY + AF_WANPIPE = AddressFamily.AF_WANPIPE + AF_X25 = AddressFamily.AF_X25 + +if sys.platform == "linux": + AF_CAN = AddressFamily.AF_CAN + AF_PACKET = AddressFamily.AF_PACKET + AF_RDS = AddressFamily.AF_RDS + AF_TIPC = AddressFamily.AF_TIPC + AF_ALG = AddressFamily.AF_ALG + AF_NETLINK = AddressFamily.AF_NETLINK + AF_VSOCK = AddressFamily.AF_VSOCK + if sys.version_info >= (3, 8): + AF_QIPCRTR = AddressFamily.AF_QIPCRTR + +if sys.platform != "win32" or sys.version_info >= (3, 9): + AF_LINK = AddressFamily.AF_LINK + if sys.platform != "darwin": + AF_BLUETOOTH = AddressFamily.AF_BLUETOOTH + +class SocketKind(IntEnum): + SOCK_STREAM: int + SOCK_DGRAM: int + SOCK_RAW: int + SOCK_RDM: int + SOCK_SEQPACKET: int + if sys.platform == "linux": + SOCK_CLOEXEC: int + SOCK_NONBLOCK: int + +SOCK_STREAM = SocketKind.SOCK_STREAM +SOCK_DGRAM = SocketKind.SOCK_DGRAM +SOCK_RAW = SocketKind.SOCK_RAW +SOCK_RDM = SocketKind.SOCK_RDM +SOCK_SEQPACKET = SocketKind.SOCK_SEQPACKET +if sys.platform == "linux": + SOCK_CLOEXEC = SocketKind.SOCK_CLOEXEC + SOCK_NONBLOCK = SocketKind.SOCK_NONBLOCK + +class MsgFlag(IntFlag): + MSG_CTRUNC: int + MSG_DONTROUTE: int + MSG_OOB: int + MSG_PEEK: int + MSG_TRUNC: int + MSG_WAITALL: int + + if sys.platform != "darwin": + MSG_BCAST: int + MSG_MCAST: int + MSG_ERRQUEUE: int + + if sys.platform != "win32" and sys.platform != "darwin": + MSG_BTAG: int + MSG_CMSG_CLOEXEC: int + MSG_CONFIRM: int + MSG_ETAG: int + MSG_FASTOPEN: int + MSG_MORE: int + MSG_NOTIFICATION: int + + if sys.platform != "win32": + MSG_DONTWAIT: int + MSG_EOF: int + MSG_EOR: int + MSG_NOSIGNAL: int # sometimes this exists on darwin, sometimes not + +MSG_CTRUNC = MsgFlag.MSG_CTRUNC +MSG_DONTROUTE = MsgFlag.MSG_DONTROUTE +MSG_OOB = MsgFlag.MSG_OOB +MSG_PEEK = MsgFlag.MSG_PEEK +MSG_TRUNC = MsgFlag.MSG_TRUNC +MSG_WAITALL = MsgFlag.MSG_WAITALL + +if sys.platform != "darwin": + MSG_BCAST = MsgFlag.MSG_BCAST + MSG_MCAST = MsgFlag.MSG_MCAST + MSG_ERRQUEUE = MsgFlag.MSG_ERRQUEUE + +if sys.platform != "win32": + MSG_DONTWAIT = MsgFlag.MSG_DONTWAIT + MSG_EOF = MsgFlag.MSG_EOF + MSG_EOR = MsgFlag.MSG_EOR + MSG_NOSIGNAL = MsgFlag.MSG_NOSIGNAL # Sometimes this exists on darwin, sometimes not + +if sys.platform != "win32" and sys.platform != "darwin": + MSG_BTAG = MsgFlag.MSG_BTAG + MSG_CMSG_CLOEXEC = MsgFlag.MSG_CMSG_CLOEXEC + MSG_CONFIRM = MsgFlag.MSG_CONFIRM + MSG_ETAG = MsgFlag.MSG_ETAG + MSG_FASTOPEN = MsgFlag.MSG_FASTOPEN + MSG_MORE = MsgFlag.MSG_MORE + MSG_NOTIFICATION = MsgFlag.MSG_NOTIFICATION + +class AddressInfo(IntFlag): + AI_ADDRCONFIG: int + AI_ALL: int + AI_CANONNAME: int + AI_NUMERICHOST: int + AI_NUMERICSERV: int + AI_PASSIVE: int + AI_V4MAPPED: int + if sys.platform != "win32": + AI_DEFAULT: int + AI_MASK: int + AI_V4MAPPED_CFG: int + +AI_ADDRCONFIG = AddressInfo.AI_ADDRCONFIG +AI_ALL = AddressInfo.AI_ALL +AI_CANONNAME = AddressInfo.AI_CANONNAME +AI_NUMERICHOST = AddressInfo.AI_NUMERICHOST +AI_NUMERICSERV = AddressInfo.AI_NUMERICSERV +AI_PASSIVE = AddressInfo.AI_PASSIVE +AI_V4MAPPED = AddressInfo.AI_V4MAPPED + +if sys.platform != "win32": + AI_DEFAULT = AddressInfo.AI_DEFAULT + AI_MASK = AddressInfo.AI_MASK + AI_V4MAPPED_CFG = AddressInfo.AI_V4MAPPED_CFG + +if sys.platform == "win32": + errorTab: dict[int, str] # undocumented + +class _SendableFile(Protocol): + def read(self, __size: int) -> bytes: ... + def seek(self, __offset: int) -> object: ... + + # optional fields: + # + # @property + # def mode(self) -> str: ... + # def fileno(self) -> int: ... + +class socket(_socket.socket): + def __init__( + self, family: AddressFamily | int = -1, type: SocketKind | int = -1, proto: int = -1, fileno: int | None = None + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + def dup(self) -> Self: ... # noqa: F811 + def accept(self) -> tuple[socket, _RetAddress]: ... + # Note that the makefile's documented windows-specific behavior is not represented + # mode strings with duplicates are intentionally excluded + @overload + def makefile( # type: ignore[misc] + self, + mode: Literal["b", "rb", "br", "wb", "bw", "rwb", "rbw", "wrb", "wbr", "brw", "bwr"], + buffering: Literal[0], + *, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> SocketIO: ... + @overload + def makefile( + self, + mode: Literal["rwb", "rbw", "wrb", "wbr", "brw", "bwr"], + buffering: Literal[-1, 1] | None = None, + *, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> BufferedRWPair: ... + @overload + def makefile( + self, + mode: Literal["rb", "br"], + buffering: Literal[-1, 1] | None = None, + *, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> BufferedReader: ... + @overload + def makefile( + self, + mode: Literal["wb", "bw"], + buffering: Literal[-1, 1] | None = None, + *, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> BufferedWriter: ... + @overload + def makefile( + self, + mode: Literal["b", "rb", "br", "wb", "bw", "rwb", "rbw", "wrb", "wbr", "brw", "bwr"], + buffering: int, + *, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> IOBase: ... + @overload + def makefile( + self, + mode: Literal["r", "w", "rw", "wr", ""] = "r", + buffering: int | None = None, + *, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> TextIOWrapper: ... + def sendfile(self, file: _SendableFile, offset: int = 0, count: int | None = None) -> int: ... + @property + def family(self) -> AddressFamily: ... # type: ignore[override] + @property + def type(self) -> SocketKind: ... # type: ignore[override] + def get_inheritable(self) -> bool: ... + def set_inheritable(self, inheritable: bool) -> None: ... + +def fromfd(fd: _FD, family: AddressFamily | int, type: SocketKind | int, proto: int = 0) -> socket: ... + +if sys.platform != "win32": + if sys.version_info >= (3, 9): + def send_fds( + sock: socket, buffers: Iterable[ReadableBuffer], fds: Iterable[int], flags: Unused = 0, address: Unused = None + ) -> int: ... + def recv_fds(sock: socket, bufsize: int, maxfds: int, flags: int = 0) -> tuple[bytes, list[int], int, Any]: ... + +if sys.platform == "win32": + def fromshare(info: bytes) -> socket: ... + +if sys.platform == "win32": + def socketpair(family: int = ..., type: int = ..., proto: int = 0) -> tuple[socket, socket]: ... + +else: + def socketpair( + family: int | AddressFamily | None = None, type: SocketType | int = ..., proto: int = 0 + ) -> tuple[socket, socket]: ... + +class SocketIO(RawIOBase): + def __init__(self, sock: socket, mode: Literal["r", "w", "rw", "rb", "wb", "rwb"]) -> None: ... + def readinto(self, b: WriteableBuffer) -> int | None: ... + def write(self, b: ReadableBuffer) -> int | None: ... + @property + def name(self) -> int: ... # return value is really "int" + @property + def mode(self) -> Literal["rb", "wb", "rwb"]: ... + +def getfqdn(name: str = "") -> str: ... + +if sys.version_info >= (3, 11): + def create_connection( + address: tuple[str | None, int], + timeout: float | None = ..., # noqa: F811 + source_address: _Address | None = None, + *, + all_errors: bool = False, + ) -> socket: ... + +else: + def create_connection( + address: tuple[str | None, int], timeout: float | None = ..., source_address: _Address | None = None # noqa: F811 + ) -> socket: ... + +if sys.version_info >= (3, 8): + def has_dualstack_ipv6() -> bool: ... + def create_server( + address: _Address, + *, + family: int = ..., + backlog: int | None = None, + reuse_port: bool = False, + dualstack_ipv6: bool = False, + ) -> socket: ... + +# the 5th tuple item is an address +def getaddrinfo( + host: bytes | str | None, port: bytes | str | int | None, family: int = 0, type: int = 0, proto: int = 0, flags: int = 0 +) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/socketserver.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/socketserver.pyi new file mode 100644 index 00000000..3d7e77f4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/socketserver.pyi @@ -0,0 +1,163 @@ +import sys +import types +from _socket import _Address, _RetAddress +from _typeshed import ReadableBuffer +from collections.abc import Callable +from socket import socket as _socket +from typing import Any, BinaryIO, ClassVar +from typing_extensions import Self, TypeAlias + +__all__ = [ + "BaseServer", + "TCPServer", + "UDPServer", + "ThreadingUDPServer", + "ThreadingTCPServer", + "BaseRequestHandler", + "StreamRequestHandler", + "DatagramRequestHandler", + "ThreadingMixIn", +] +if sys.platform != "win32": + __all__ += [ + "ForkingMixIn", + "ForkingTCPServer", + "ForkingUDPServer", + "ThreadingUnixDatagramServer", + "ThreadingUnixStreamServer", + "UnixDatagramServer", + "UnixStreamServer", + ] + +_RequestType: TypeAlias = _socket | tuple[bytes, _socket] +_AfUnixAddress: TypeAlias = str | ReadableBuffer # adddress acceptable for an AF_UNIX socket +_AfInetAddress: TypeAlias = tuple[str | bytes | bytearray, int] # address acceptable for an AF_INET socket + +# This can possibly be generic at some point: +class BaseServer: + address_family: int + server_address: _Address + socket: _socket + allow_reuse_address: bool + request_queue_size: int + socket_type: int + timeout: float | None + RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler] + def __init__( + self, server_address: _Address, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler] + ) -> None: ... + def fileno(self) -> int: ... + def handle_request(self) -> None: ... + def serve_forever(self, poll_interval: float = 0.5) -> None: ... + def shutdown(self) -> None: ... + def server_close(self) -> None: ... + def finish_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... + def get_request(self) -> tuple[Any, Any]: ... + def handle_error(self, request: _RequestType, client_address: _RetAddress) -> None: ... + def handle_timeout(self) -> None: ... + def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... + def server_activate(self) -> None: ... + def server_bind(self) -> None: ... + def verify_request(self, request: _RequestType, client_address: _RetAddress) -> bool: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None + ) -> None: ... + def service_actions(self) -> None: ... + def shutdown_request(self, request: _RequestType) -> None: ... # undocumented + def close_request(self, request: _RequestType) -> None: ... # undocumented + +class TCPServer(BaseServer): + if sys.version_info >= (3, 11): + allow_reuse_port: bool + server_address: _AfInetAddress # type: ignore[assignment] + def __init__( + self, + server_address: _AfInetAddress, + RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], + bind_and_activate: bool = True, + ) -> None: ... + def get_request(self) -> tuple[_socket, _RetAddress]: ... + +class UDPServer(TCPServer): + max_packet_size: ClassVar[int] + def get_request(self) -> tuple[tuple[bytes, _socket], _RetAddress]: ... # type: ignore[override] + +if sys.platform != "win32": + class UnixStreamServer(BaseServer): + server_address: _AfUnixAddress # type: ignore[assignment] + def __init__( + self, + server_address: _AfUnixAddress, + RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], + bind_and_activate: bool = True, + ) -> None: ... + + class UnixDatagramServer(BaseServer): + server_address: _AfUnixAddress # type: ignore[assignment] + def __init__( + self, + server_address: _AfUnixAddress, + RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], + bind_and_activate: bool = True, + ) -> None: ... + +if sys.platform != "win32": + class ForkingMixIn: + timeout: float | None # undocumented + active_children: set[int] | None # undocumented + max_children: int # undocumented + block_on_close: bool + def collect_children(self, *, blocking: bool = False) -> None: ... # undocumented + def handle_timeout(self) -> None: ... # undocumented + def service_actions(self) -> None: ... # undocumented + def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... + def server_close(self) -> None: ... + +class ThreadingMixIn: + daemon_threads: bool + block_on_close: bool + def process_request_thread(self, request: _RequestType, client_address: _RetAddress) -> None: ... # undocumented + def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... + def server_close(self) -> None: ... + +if sys.platform != "win32": + class ForkingTCPServer(ForkingMixIn, TCPServer): ... + class ForkingUDPServer(ForkingMixIn, UDPServer): ... + +class ThreadingTCPServer(ThreadingMixIn, TCPServer): ... +class ThreadingUDPServer(ThreadingMixIn, UDPServer): ... + +if sys.platform != "win32": + class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): ... + class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): ... + +class BaseRequestHandler: + # `request` is technically of type _RequestType, + # but there are some concerns that having a union here would cause + # too much inconvenience to people using it (see + # https://github.com/python/typeshed/pull/384#issuecomment-234649696) + # + # Note also that _RetAddress is also just an alias for `Any` + request: Any + client_address: _RetAddress + server: BaseServer + def __init__(self, request: _RequestType, client_address: _RetAddress, server: BaseServer) -> None: ... + def setup(self) -> None: ... + def handle(self) -> None: ... + def finish(self) -> None: ... + +class StreamRequestHandler(BaseRequestHandler): + rbufsize: ClassVar[int] # undocumented + wbufsize: ClassVar[int] # undocumented + timeout: ClassVar[float | None] # undocumented + disable_nagle_algorithm: ClassVar[bool] # undocumented + connection: Any # undocumented + rfile: BinaryIO + wfile: BinaryIO + +class DatagramRequestHandler(BaseRequestHandler): + packet: _socket # undocumented + socket: _socket # undocumented + rfile: BinaryIO + wfile: BinaryIO diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/spwd.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/spwd.pyi new file mode 100644 index 00000000..27b1061e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/spwd.pyi @@ -0,0 +1,41 @@ +import sys +from _typeshed import structseq +from typing import Any +from typing_extensions import Final, final + +if sys.platform != "win32": + @final + class struct_spwd(structseq[Any], tuple[str, str, int, int, int, int, int, int, int]): + if sys.version_info >= (3, 10): + __match_args__: Final = ( + "sp_namp", + "sp_pwdp", + "sp_lstchg", + "sp_min", + "sp_max", + "sp_warn", + "sp_inact", + "sp_expire", + "sp_flag", + ) + @property + def sp_namp(self) -> str: ... + @property + def sp_pwdp(self) -> str: ... + @property + def sp_lstchg(self) -> int: ... + @property + def sp_min(self) -> int: ... + @property + def sp_max(self) -> int: ... + @property + def sp_warn(self) -> int: ... + @property + def sp_inact(self) -> int: ... + @property + def sp_expire(self) -> int: ... + @property + def sp_flag(self) -> int: ... + + def getspall() -> list[struct_spwd]: ... + def getspnam(__arg: str) -> struct_spwd: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sqlite3/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sqlite3/__init__.pyi new file mode 100644 index 00000000..d747be90 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sqlite3/__init__.pyi @@ -0,0 +1 @@ +from sqlite3.dbapi2 import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sqlite3/dbapi2.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sqlite3/dbapi2.pyi new file mode 100644 index 00000000..26188445 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sqlite3/dbapi2.pyi @@ -0,0 +1,458 @@ +import sqlite3 +import sys +from _typeshed import Incomplete, ReadableBuffer, StrOrBytesPath, SupportsLenAndGetItem, Unused +from collections.abc import Callable, Generator, Iterable, Iterator, Mapping +from datetime import date, datetime, time +from types import TracebackType +from typing import Any, Protocol, TypeVar, overload +from typing_extensions import Literal, Self, SupportsIndex, TypeAlias, final + +_T = TypeVar("_T") +_CursorT = TypeVar("_CursorT", bound=Cursor) +_SqliteData: TypeAlias = str | ReadableBuffer | int | float | None +# Data that is passed through adapters can be of any type accepted by an adapter. +_AdaptedInputData: TypeAlias = _SqliteData | Any +# The Mapping must really be a dict, but making it invariant is too annoying. +_Parameters: TypeAlias = SupportsLenAndGetItem[_AdaptedInputData] | Mapping[str, _AdaptedInputData] +_Adapter: TypeAlias = Callable[[_T], _SqliteData] +_Converter: TypeAlias = Callable[[bytes], Any] + +paramstyle: str +threadsafety: int +apilevel: str +Date = date +Time = time +Timestamp = datetime + +def DateFromTicks(ticks: float) -> Date: ... +def TimeFromTicks(ticks: float) -> Time: ... +def TimestampFromTicks(ticks: float) -> Timestamp: ... + +version_info: tuple[int, int, int] +sqlite_version_info: tuple[int, int, int] +Binary = memoryview + +# The remaining definitions are imported from _sqlite3. + +PARSE_COLNAMES: int +PARSE_DECLTYPES: int +SQLITE_ALTER_TABLE: int +SQLITE_ANALYZE: int +SQLITE_ATTACH: int +SQLITE_CREATE_INDEX: int +SQLITE_CREATE_TABLE: int +SQLITE_CREATE_TEMP_INDEX: int +SQLITE_CREATE_TEMP_TABLE: int +SQLITE_CREATE_TEMP_TRIGGER: int +SQLITE_CREATE_TEMP_VIEW: int +SQLITE_CREATE_TRIGGER: int +SQLITE_CREATE_VIEW: int +SQLITE_CREATE_VTABLE: int +SQLITE_DELETE: int +SQLITE_DENY: int +SQLITE_DETACH: int +SQLITE_DONE: int +SQLITE_DROP_INDEX: int +SQLITE_DROP_TABLE: int +SQLITE_DROP_TEMP_INDEX: int +SQLITE_DROP_TEMP_TABLE: int +SQLITE_DROP_TEMP_TRIGGER: int +SQLITE_DROP_TEMP_VIEW: int +SQLITE_DROP_TRIGGER: int +SQLITE_DROP_VIEW: int +SQLITE_DROP_VTABLE: int +SQLITE_FUNCTION: int +SQLITE_IGNORE: int +SQLITE_INSERT: int +SQLITE_OK: int +if sys.version_info >= (3, 11): + SQLITE_LIMIT_LENGTH: int + SQLITE_LIMIT_SQL_LENGTH: int + SQLITE_LIMIT_COLUMN: int + SQLITE_LIMIT_EXPR_DEPTH: int + SQLITE_LIMIT_COMPOUND_SELECT: int + SQLITE_LIMIT_VDBE_OP: int + SQLITE_LIMIT_FUNCTION_ARG: int + SQLITE_LIMIT_ATTACHED: int + SQLITE_LIMIT_LIKE_PATTERN_LENGTH: int + SQLITE_LIMIT_VARIABLE_NUMBER: int + SQLITE_LIMIT_TRIGGER_DEPTH: int + SQLITE_LIMIT_WORKER_THREADS: int +SQLITE_PRAGMA: int +SQLITE_READ: int +SQLITE_REINDEX: int +SQLITE_RECURSIVE: int +SQLITE_SAVEPOINT: int +SQLITE_SELECT: int +SQLITE_TRANSACTION: int +SQLITE_UPDATE: int +adapters: dict[tuple[type[Any], type[Any]], _Adapter[Any]] +converters: dict[str, _Converter] +sqlite_version: str +version: str + +if sys.version_info >= (3, 11): + SQLITE_ABORT: int + SQLITE_ABORT_ROLLBACK: int + SQLITE_AUTH: int + SQLITE_AUTH_USER: int + SQLITE_BUSY: int + SQLITE_BUSY_RECOVERY: int + SQLITE_BUSY_SNAPSHOT: int + SQLITE_BUSY_TIMEOUT: int + SQLITE_CANTOPEN: int + SQLITE_CANTOPEN_CONVPATH: int + SQLITE_CANTOPEN_DIRTYWAL: int + SQLITE_CANTOPEN_FULLPATH: int + SQLITE_CANTOPEN_ISDIR: int + SQLITE_CANTOPEN_NOTEMPDIR: int + SQLITE_CANTOPEN_SYMLINK: int + SQLITE_CONSTRAINT: int + SQLITE_CONSTRAINT_CHECK: int + SQLITE_CONSTRAINT_COMMITHOOK: int + SQLITE_CONSTRAINT_FOREIGNKEY: int + SQLITE_CONSTRAINT_FUNCTION: int + SQLITE_CONSTRAINT_NOTNULL: int + SQLITE_CONSTRAINT_PINNED: int + SQLITE_CONSTRAINT_PRIMARYKEY: int + SQLITE_CONSTRAINT_ROWID: int + SQLITE_CONSTRAINT_TRIGGER: int + SQLITE_CONSTRAINT_UNIQUE: int + SQLITE_CONSTRAINT_VTAB: int + SQLITE_CORRUPT: int + SQLITE_CORRUPT_INDEX: int + SQLITE_CORRUPT_SEQUENCE: int + SQLITE_CORRUPT_VTAB: int + SQLITE_EMPTY: int + SQLITE_ERROR: int + SQLITE_ERROR_MISSING_COLLSEQ: int + SQLITE_ERROR_RETRY: int + SQLITE_ERROR_SNAPSHOT: int + SQLITE_FORMAT: int + SQLITE_FULL: int + SQLITE_INTERNAL: int + SQLITE_INTERRUPT: int + SQLITE_IOERR: int + SQLITE_IOERR_ACCESS: int + SQLITE_IOERR_AUTH: int + SQLITE_IOERR_BEGIN_ATOMIC: int + SQLITE_IOERR_BLOCKED: int + SQLITE_IOERR_CHECKRESERVEDLOCK: int + SQLITE_IOERR_CLOSE: int + SQLITE_IOERR_COMMIT_ATOMIC: int + SQLITE_IOERR_CONVPATH: int + SQLITE_IOERR_CORRUPTFS: int + SQLITE_IOERR_DATA: int + SQLITE_IOERR_DELETE: int + SQLITE_IOERR_DELETE_NOENT: int + SQLITE_IOERR_DIR_CLOSE: int + SQLITE_IOERR_DIR_FSYNC: int + SQLITE_IOERR_FSTAT: int + SQLITE_IOERR_FSYNC: int + SQLITE_IOERR_GETTEMPPATH: int + SQLITE_IOERR_LOCK: int + SQLITE_IOERR_MMAP: int + SQLITE_IOERR_NOMEM: int + SQLITE_IOERR_RDLOCK: int + SQLITE_IOERR_READ: int + SQLITE_IOERR_ROLLBACK_ATOMIC: int + SQLITE_IOERR_SEEK: int + SQLITE_IOERR_SHMLOCK: int + SQLITE_IOERR_SHMMAP: int + SQLITE_IOERR_SHMOPEN: int + SQLITE_IOERR_SHMSIZE: int + SQLITE_IOERR_SHORT_READ: int + SQLITE_IOERR_TRUNCATE: int + SQLITE_IOERR_UNLOCK: int + SQLITE_IOERR_VNODE: int + SQLITE_IOERR_WRITE: int + SQLITE_LOCKED: int + SQLITE_LOCKED_SHAREDCACHE: int + SQLITE_LOCKED_VTAB: int + SQLITE_MISMATCH: int + SQLITE_MISUSE: int + SQLITE_NOLFS: int + SQLITE_NOMEM: int + SQLITE_NOTADB: int + SQLITE_NOTFOUND: int + SQLITE_NOTICE: int + SQLITE_NOTICE_RECOVER_ROLLBACK: int + SQLITE_NOTICE_RECOVER_WAL: int + SQLITE_OK_LOAD_PERMANENTLY: int + SQLITE_OK_SYMLINK: int + SQLITE_PERM: int + SQLITE_PROTOCOL: int + SQLITE_RANGE: int + SQLITE_READONLY: int + SQLITE_READONLY_CANTINIT: int + SQLITE_READONLY_CANTLOCK: int + SQLITE_READONLY_DBMOVED: int + SQLITE_READONLY_DIRECTORY: int + SQLITE_READONLY_RECOVERY: int + SQLITE_READONLY_ROLLBACK: int + SQLITE_ROW: int + SQLITE_SCHEMA: int + SQLITE_TOOBIG: int + SQLITE_WARNING: int + SQLITE_WARNING_AUTOINDEX: int + +# Can take or return anything depending on what's in the registry. +@overload +def adapt(__obj: Any, __proto: Any) -> Any: ... +@overload +def adapt(__obj: Any, __proto: Any, __alt: _T) -> Any | _T: ... +def complete_statement(statement: str) -> bool: ... +def connect( + database: StrOrBytesPath, + timeout: float = ..., + detect_types: int = ..., + isolation_level: str | None = ..., + check_same_thread: bool = ..., + factory: type[Connection] | None = ..., + cached_statements: int = ..., + uri: bool = ..., +) -> Connection: ... +def enable_callback_tracebacks(__enable: bool) -> None: ... + +# takes a pos-or-keyword argument because there is a C wrapper +def enable_shared_cache(enable: int) -> None: ... + +if sys.version_info >= (3, 10): + def register_adapter(__type: type[_T], __adapter: _Adapter[_T]) -> None: ... + def register_converter(__typename: str, __converter: _Converter) -> None: ... + +else: + def register_adapter(__type: type[_T], __caster: _Adapter[_T]) -> None: ... + def register_converter(__name: str, __converter: _Converter) -> None: ... + +if sys.version_info < (3, 8): + class Cache: + def __init__(self, *args: Incomplete, **kwargs: Unused) -> None: ... + def display(self, *args: Incomplete, **kwargs: Incomplete) -> None: ... + def get(self, *args: Incomplete, **kwargs: Incomplete) -> None: ... + +class _AggregateProtocol(Protocol): + def step(self, __value: int) -> object: ... + def finalize(self) -> int: ... + +class _SingleParamWindowAggregateClass(Protocol): + def step(self, __param: Any) -> object: ... + def inverse(self, __param: Any) -> object: ... + def value(self) -> _SqliteData: ... + def finalize(self) -> _SqliteData: ... + +class _AnyParamWindowAggregateClass(Protocol): + def step(self, *args: Any) -> object: ... + def inverse(self, *args: Any) -> object: ... + def value(self) -> _SqliteData: ... + def finalize(self) -> _SqliteData: ... + +class _WindowAggregateClass(Protocol): + step: Callable[..., object] + inverse: Callable[..., object] + def value(self) -> _SqliteData: ... + def finalize(self) -> _SqliteData: ... + +class Connection: + @property + def DataError(self) -> type[sqlite3.DataError]: ... + @property + def DatabaseError(self) -> type[sqlite3.DatabaseError]: ... + @property + def Error(self) -> type[sqlite3.Error]: ... + @property + def IntegrityError(self) -> type[sqlite3.IntegrityError]: ... + @property + def InterfaceError(self) -> type[sqlite3.InterfaceError]: ... + @property + def InternalError(self) -> type[sqlite3.InternalError]: ... + @property + def NotSupportedError(self) -> type[sqlite3.NotSupportedError]: ... + @property + def OperationalError(self) -> type[sqlite3.OperationalError]: ... + @property + def ProgrammingError(self) -> type[sqlite3.ProgrammingError]: ... + @property + def Warning(self) -> type[sqlite3.Warning]: ... + @property + def in_transaction(self) -> bool: ... + isolation_level: str | None # one of '', 'DEFERRED', 'IMMEDIATE' or 'EXCLUSIVE' + @property + def total_changes(self) -> int: ... + row_factory: Any + text_factory: Any + def __init__( + self, + database: StrOrBytesPath, + timeout: float = ..., + detect_types: int = ..., + isolation_level: str | None = ..., + check_same_thread: bool = ..., + factory: type[Connection] | None = ..., + cached_statements: int = ..., + uri: bool = ..., + ) -> None: ... + def close(self) -> None: ... + if sys.version_info >= (3, 11): + def blobopen(self, __table: str, __column: str, __row: int, *, readonly: bool = False, name: str = "main") -> Blob: ... + + def commit(self) -> None: ... + def create_aggregate(self, name: str, n_arg: int, aggregate_class: Callable[[], _AggregateProtocol]) -> None: ... + if sys.version_info >= (3, 11): + # num_params determines how many params will be passed to the aggregate class. We provide an overload + # for the case where num_params = 1, which is expected to be the common case. + @overload + def create_window_function( + self, __name: str, __num_params: Literal[1], __aggregate_class: Callable[[], _SingleParamWindowAggregateClass] | None + ) -> None: ... + # And for num_params = -1, which means the aggregate must accept any number of parameters. + @overload + def create_window_function( + self, __name: str, __num_params: Literal[-1], __aggregate_class: Callable[[], _AnyParamWindowAggregateClass] | None + ) -> None: ... + @overload + def create_window_function( + self, __name: str, __num_params: int, __aggregate_class: Callable[[], _WindowAggregateClass] | None + ) -> None: ... + + def create_collation(self, __name: str, __callback: Callable[[str, str], int | SupportsIndex] | None) -> None: ... + if sys.version_info >= (3, 8): + def create_function( + self, name: str, narg: int, func: Callable[..., _SqliteData] | None, *, deterministic: bool = False + ) -> None: ... + else: + def create_function(self, name: str, num_params: int, func: Callable[..., _SqliteData] | None) -> None: ... + + @overload + def cursor(self, cursorClass: None = None) -> Cursor: ... + @overload + def cursor(self, cursorClass: Callable[[], _CursorT]) -> _CursorT: ... + def execute(self, sql: str, parameters: _Parameters = ...) -> Cursor: ... + def executemany(self, __sql: str, __parameters: Iterable[_Parameters]) -> Cursor: ... + def executescript(self, __sql_script: str) -> Cursor: ... + def interrupt(self) -> None: ... + def iterdump(self) -> Generator[str, None, None]: ... + def rollback(self) -> None: ... + def set_authorizer( + self, authorizer_callback: Callable[[int, str | None, str | None, str | None, str | None], int] | None + ) -> None: ... + def set_progress_handler(self, progress_handler: Callable[[], int | None] | None, n: int) -> None: ... + def set_trace_callback(self, trace_callback: Callable[[str], object] | None) -> None: ... + # enable_load_extension and load_extension is not available on python distributions compiled + # without sqlite3 loadable extension support. see footnotes https://docs.python.org/3/library/sqlite3.html#f1 + def enable_load_extension(self, __enabled: bool) -> None: ... + def load_extension(self, __name: str) -> None: ... + def backup( + self, + target: Connection, + *, + pages: int = -1, + progress: Callable[[int, int, int], object] | None = None, + name: str = "main", + sleep: float = 0.25, + ) -> None: ... + if sys.version_info >= (3, 11): + def setlimit(self, __category: int, __limit: int) -> int: ... + def getlimit(self, __category: int) -> int: ... + def serialize(self, *, name: str = "main") -> bytes: ... + def deserialize(self, __data: ReadableBuffer, *, name: str = "main") -> None: ... + + def __call__(self, __sql: str) -> _Statement: ... + def __enter__(self) -> Self: ... + def __exit__( + self, __type: type[BaseException] | None, __value: BaseException | None, __traceback: TracebackType | None + ) -> Literal[False]: ... + +class Cursor(Iterator[Any]): + arraysize: int + @property + def connection(self) -> Connection: ... + # May be None, but using | Any instead to avoid slightly annoying false positives. + @property + def description(self) -> tuple[tuple[str, None, None, None, None, None, None], ...] | Any: ... + @property + def lastrowid(self) -> int | None: ... + row_factory: Callable[[Cursor, Row], object] | None + @property + def rowcount(self) -> int: ... + def __init__(self, __cursor: Connection) -> None: ... + def close(self) -> None: ... + def execute(self, __sql: str, __parameters: _Parameters = ...) -> Self: ... + def executemany(self, __sql: str, __seq_of_parameters: Iterable[_Parameters]) -> Self: ... + def executescript(self, __sql_script: str) -> Cursor: ... + def fetchall(self) -> list[Any]: ... + def fetchmany(self, size: int | None = 1) -> list[Any]: ... + # Returns either a row (as created by the row_factory) or None, but + # putting None in the return annotation causes annoying false positives. + def fetchone(self) -> Any: ... + def setinputsizes(self, __sizes: Unused) -> None: ... # does nothing + def setoutputsize(self, __size: Unused, __column: Unused = None) -> None: ... # does nothing + def __iter__(self) -> Self: ... + def __next__(self) -> Any: ... + +class DataError(DatabaseError): ... +class DatabaseError(Error): ... + +class Error(Exception): + if sys.version_info >= (3, 11): + sqlite_errorcode: int + sqlite_errorname: str + +class IntegrityError(DatabaseError): ... +class InterfaceError(Error): ... +class InternalError(DatabaseError): ... +class NotSupportedError(DatabaseError): ... +class OperationalError(DatabaseError): ... + +if sys.version_info < (3, 10): + OptimizedUnicode = str + +@final +class PrepareProtocol: + def __init__(self, *args: object, **kwargs: object) -> None: ... + +class ProgrammingError(DatabaseError): ... + +class Row: + def __init__(self, __cursor: Cursor, __data: tuple[Any, ...]) -> None: ... + def keys(self) -> list[str]: ... + @overload + def __getitem__(self, __index: int | str) -> Any: ... + @overload + def __getitem__(self, __index: slice) -> tuple[Any, ...]: ... + def __iter__(self) -> Iterator[Any]: ... + def __len__(self) -> int: ... + # These return NotImplemented for anything that is not a Row. + def __eq__(self, __other: object) -> bool: ... + def __ge__(self, __other: object) -> bool: ... + def __gt__(self, __other: object) -> bool: ... + def __le__(self, __other: object) -> bool: ... + def __lt__(self, __other: object) -> bool: ... + def __ne__(self, __other: object) -> bool: ... + +if sys.version_info >= (3, 8): + @final + class _Statement: ... + +else: + @final + class Statement: + def __init__(self, *args: Incomplete, **kwargs: Incomplete) -> None: ... + _Statement: TypeAlias = Statement + +class Warning(Exception): ... + +if sys.version_info >= (3, 11): + @final + class Blob: + def close(self) -> None: ... + def read(self, __length: int = -1) -> bytes: ... + def write(self, __data: ReadableBuffer) -> None: ... + def tell(self) -> int: ... + # whence must be one of os.SEEK_SET, os.SEEK_CUR, os.SEEK_END + def seek(self, __offset: int, __origin: int = 0) -> None: ... + def __len__(self) -> int: ... + def __enter__(self) -> Self: ... + def __exit__(self, __typ: object, __val: object, __tb: object) -> Literal[False]: ... + def __getitem__(self, __item: SupportsIndex | slice) -> int: ... + def __setitem__(self, __item: SupportsIndex | slice, __value: int) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sre_compile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sre_compile.pyi new file mode 100644 index 00000000..2d04a886 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sre_compile.pyi @@ -0,0 +1,11 @@ +from re import Pattern +from sre_constants import * +from sre_constants import _NamedIntConstant +from sre_parse import SubPattern +from typing import Any + +MAXCODE: int + +def dis(code: list[_NamedIntConstant]) -> None: ... +def isstring(obj: Any) -> bool: ... +def compile(p: str | bytes | SubPattern, flags: int = 0) -> Pattern[Any]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sre_constants.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sre_constants.pyi new file mode 100644 index 00000000..d522372c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sre_constants.pyi @@ -0,0 +1,130 @@ +import sys +from typing import Any +from typing_extensions import Self + +MAXGROUPS: int + +MAGIC: int + +class error(Exception): + msg: str + pattern: str | bytes | None + pos: int | None + lineno: int + colno: int + def __init__(self, msg: str, pattern: str | bytes | None = None, pos: int | None = None) -> None: ... + +class _NamedIntConstant(int): + name: Any + def __new__(cls, value: int, name: str) -> Self: ... + +MAXREPEAT: _NamedIntConstant +OPCODES: list[_NamedIntConstant] +ATCODES: list[_NamedIntConstant] +CHCODES: list[_NamedIntConstant] +OP_IGNORE: dict[_NamedIntConstant, _NamedIntConstant] +OP_LOCALE_IGNORE: dict[_NamedIntConstant, _NamedIntConstant] +OP_UNICODE_IGNORE: dict[_NamedIntConstant, _NamedIntConstant] +AT_MULTILINE: dict[_NamedIntConstant, _NamedIntConstant] +AT_LOCALE: dict[_NamedIntConstant, _NamedIntConstant] +AT_UNICODE: dict[_NamedIntConstant, _NamedIntConstant] +CH_LOCALE: dict[_NamedIntConstant, _NamedIntConstant] +CH_UNICODE: dict[_NamedIntConstant, _NamedIntConstant] +SRE_FLAG_TEMPLATE: int +SRE_FLAG_IGNORECASE: int +SRE_FLAG_LOCALE: int +SRE_FLAG_MULTILINE: int +SRE_FLAG_DOTALL: int +SRE_FLAG_UNICODE: int +SRE_FLAG_VERBOSE: int +SRE_FLAG_DEBUG: int +SRE_FLAG_ASCII: int +SRE_INFO_PREFIX: int +SRE_INFO_LITERAL: int +SRE_INFO_CHARSET: int + +# Stubgen above; manually defined constants below (dynamic at runtime) + +# from OPCODES +FAILURE: _NamedIntConstant +SUCCESS: _NamedIntConstant +ANY: _NamedIntConstant +ANY_ALL: _NamedIntConstant +ASSERT: _NamedIntConstant +ASSERT_NOT: _NamedIntConstant +AT: _NamedIntConstant +BRANCH: _NamedIntConstant +if sys.version_info < (3, 11): + CALL: _NamedIntConstant +CATEGORY: _NamedIntConstant +CHARSET: _NamedIntConstant +BIGCHARSET: _NamedIntConstant +GROUPREF: _NamedIntConstant +GROUPREF_EXISTS: _NamedIntConstant +GROUPREF_IGNORE: _NamedIntConstant +IN: _NamedIntConstant +IN_IGNORE: _NamedIntConstant +INFO: _NamedIntConstant +JUMP: _NamedIntConstant +LITERAL: _NamedIntConstant +LITERAL_IGNORE: _NamedIntConstant +MARK: _NamedIntConstant +MAX_UNTIL: _NamedIntConstant +MIN_UNTIL: _NamedIntConstant +NOT_LITERAL: _NamedIntConstant +NOT_LITERAL_IGNORE: _NamedIntConstant +NEGATE: _NamedIntConstant +RANGE: _NamedIntConstant +REPEAT: _NamedIntConstant +REPEAT_ONE: _NamedIntConstant +SUBPATTERN: _NamedIntConstant +MIN_REPEAT_ONE: _NamedIntConstant +if sys.version_info >= (3, 11): + ATOMIC_GROUP: _NamedIntConstant + POSSESSIVE_REPEAT: _NamedIntConstant + POSSESSIVE_REPEAT_ONE: _NamedIntConstant +RANGE_UNI_IGNORE: _NamedIntConstant +GROUPREF_LOC_IGNORE: _NamedIntConstant +GROUPREF_UNI_IGNORE: _NamedIntConstant +IN_LOC_IGNORE: _NamedIntConstant +IN_UNI_IGNORE: _NamedIntConstant +LITERAL_LOC_IGNORE: _NamedIntConstant +LITERAL_UNI_IGNORE: _NamedIntConstant +NOT_LITERAL_LOC_IGNORE: _NamedIntConstant +NOT_LITERAL_UNI_IGNORE: _NamedIntConstant +MIN_REPEAT: _NamedIntConstant +MAX_REPEAT: _NamedIntConstant + +# from ATCODES +AT_BEGINNING: _NamedIntConstant +AT_BEGINNING_LINE: _NamedIntConstant +AT_BEGINNING_STRING: _NamedIntConstant +AT_BOUNDARY: _NamedIntConstant +AT_NON_BOUNDARY: _NamedIntConstant +AT_END: _NamedIntConstant +AT_END_LINE: _NamedIntConstant +AT_END_STRING: _NamedIntConstant +AT_LOC_BOUNDARY: _NamedIntConstant +AT_LOC_NON_BOUNDARY: _NamedIntConstant +AT_UNI_BOUNDARY: _NamedIntConstant +AT_UNI_NON_BOUNDARY: _NamedIntConstant + +# from CHCODES +CATEGORY_DIGIT: _NamedIntConstant +CATEGORY_NOT_DIGIT: _NamedIntConstant +CATEGORY_SPACE: _NamedIntConstant +CATEGORY_NOT_SPACE: _NamedIntConstant +CATEGORY_WORD: _NamedIntConstant +CATEGORY_NOT_WORD: _NamedIntConstant +CATEGORY_LINEBREAK: _NamedIntConstant +CATEGORY_NOT_LINEBREAK: _NamedIntConstant +CATEGORY_LOC_WORD: _NamedIntConstant +CATEGORY_LOC_NOT_WORD: _NamedIntConstant +CATEGORY_UNI_DIGIT: _NamedIntConstant +CATEGORY_UNI_NOT_DIGIT: _NamedIntConstant +CATEGORY_UNI_SPACE: _NamedIntConstant +CATEGORY_UNI_NOT_SPACE: _NamedIntConstant +CATEGORY_UNI_WORD: _NamedIntConstant +CATEGORY_UNI_NOT_WORD: _NamedIntConstant +CATEGORY_UNI_LINEBREAK: _NamedIntConstant +CATEGORY_UNI_NOT_LINEBREAK: _NamedIntConstant diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sre_parse.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sre_parse.pyi new file mode 100644 index 00000000..56f10bb4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sre_parse.pyi @@ -0,0 +1,111 @@ +import sys +from collections.abc import Iterable +from re import Match, Pattern as _Pattern +from sre_constants import * +from sre_constants import _NamedIntConstant as _NIC, error as _Error +from typing import Any, overload +from typing_extensions import TypeAlias + +SPECIAL_CHARS: str +REPEAT_CHARS: str +DIGITS: frozenset[str] +OCTDIGITS: frozenset[str] +HEXDIGITS: frozenset[str] +ASCIILETTERS: frozenset[str] +WHITESPACE: frozenset[str] +ESCAPES: dict[str, tuple[_NIC, int]] +CATEGORIES: dict[str, tuple[_NIC, _NIC] | tuple[_NIC, list[tuple[_NIC, _NIC]]]] +FLAGS: dict[str, int] +TYPE_FLAGS: int +GLOBAL_FLAGS: int + +if sys.version_info < (3, 11): + class Verbose(Exception): ... + +class _State: + flags: int + groupdict: dict[str, int] + groupwidths: list[int | None] + lookbehindgroups: int | None + @property + def groups(self) -> int: ... + def opengroup(self, name: str | None = ...) -> int: ... + def closegroup(self, gid: int, p: SubPattern) -> None: ... + def checkgroup(self, gid: int) -> bool: ... + def checklookbehindgroup(self, gid: int, source: Tokenizer) -> None: ... + +if sys.version_info >= (3, 8): + State: TypeAlias = _State +else: + Pattern: TypeAlias = _State + +_OpSubpatternType: TypeAlias = tuple[int | None, int, int, SubPattern] +_OpGroupRefExistsType: TypeAlias = tuple[int, SubPattern, SubPattern] +_OpInType: TypeAlias = list[tuple[_NIC, int]] +_OpBranchType: TypeAlias = tuple[None, list[SubPattern]] +_AvType: TypeAlias = _OpInType | _OpBranchType | Iterable[SubPattern] | _OpGroupRefExistsType | _OpSubpatternType +_CodeType: TypeAlias = tuple[_NIC, _AvType] + +class SubPattern: + data: list[_CodeType] + width: int | None + + if sys.version_info >= (3, 8): + state: State + def __init__(self, state: State, data: list[_CodeType] | None = None) -> None: ... + else: + pattern: Pattern + def __init__(self, pattern: Pattern, data: list[_CodeType] | None = None) -> None: ... + + def dump(self, level: int = 0) -> None: ... + def __len__(self) -> int: ... + def __delitem__(self, index: int | slice) -> None: ... + def __getitem__(self, index: int | slice) -> SubPattern | _CodeType: ... + def __setitem__(self, index: int | slice, code: _CodeType) -> None: ... + def insert(self, index: int, code: _CodeType) -> None: ... + def append(self, code: _CodeType) -> None: ... + def getwidth(self) -> tuple[int, int]: ... + +class Tokenizer: + istext: bool + string: Any + decoded_string: str + index: int + next: str | None + def __init__(self, string: Any) -> None: ... + def match(self, char: str) -> bool: ... + def get(self) -> str | None: ... + def getwhile(self, n: int, charset: Iterable[str]) -> str: ... + if sys.version_info >= (3, 8): + def getuntil(self, terminator: str, name: str) -> str: ... + else: + def getuntil(self, terminator: str) -> str: ... + + @property + def pos(self) -> int: ... + def tell(self) -> int: ... + def seek(self, index: int) -> None: ... + def error(self, msg: str, offset: int = 0) -> _Error: ... + + if sys.version_info >= (3, 11): + def checkgroupname(self, name: str, offset: int, nested: int) -> None: ... + +def fix_flags(src: str | bytes, flags: int) -> int: ... + +_TemplateType: TypeAlias = tuple[list[tuple[int, int]], list[str | None]] +_TemplateByteType: TypeAlias = tuple[list[tuple[int, int]], list[bytes | None]] +if sys.version_info >= (3, 8): + def parse(str: str, flags: int = 0, state: State | None = None) -> SubPattern: ... + @overload + def parse_template(source: str, state: _Pattern[Any]) -> _TemplateType: ... + @overload + def parse_template(source: bytes, state: _Pattern[Any]) -> _TemplateByteType: ... + +else: + def parse(str: str, flags: int = 0, pattern: Pattern | None = None) -> SubPattern: ... + @overload + def parse_template(source: str, pattern: _Pattern[Any]) -> _TemplateType: ... + @overload + def parse_template(source: bytes, pattern: _Pattern[Any]) -> _TemplateByteType: ... + +def expand_template(template: _TemplateType, match: Match[Any]) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ssl.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ssl.pyi new file mode 100644 index 00000000..b73573dd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/ssl.pyi @@ -0,0 +1,518 @@ +import enum +import socket +import sys +from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer +from collections.abc import Callable, Iterable +from typing import Any, NamedTuple, overload +from typing_extensions import Literal, Self, TypeAlias, TypedDict, final + +_PCTRTT: TypeAlias = tuple[tuple[str, str], ...] +_PCTRTTT: TypeAlias = tuple[_PCTRTT, ...] +_PeerCertRetDictType: TypeAlias = dict[str, str | _PCTRTTT | _PCTRTT] +_PeerCertRetType: TypeAlias = _PeerCertRetDictType | bytes | None +_EnumRetType: TypeAlias = list[tuple[bytes, str, set[str] | bool]] +_PasswordType: TypeAlias = Callable[[], str | bytes | bytearray] | str | bytes | bytearray + +_SrvnmeCbType: TypeAlias = Callable[[SSLSocket | SSLObject, str | None, SSLSocket], int | None] + +class _Cipher(TypedDict): + aead: bool + alg_bits: int + auth: str + description: str + digest: str | None + id: int + kea: str + name: str + protocol: str + strength_bits: int + symmetric: str + +class SSLError(OSError): + library: str + reason: str + +class SSLZeroReturnError(SSLError): ... +class SSLWantReadError(SSLError): ... +class SSLWantWriteError(SSLError): ... +class SSLSyscallError(SSLError): ... +class SSLEOFError(SSLError): ... + +class SSLCertVerificationError(SSLError, ValueError): + verify_code: int + verify_message: str + +CertificateError = SSLCertVerificationError + +def wrap_socket( + sock: socket.socket, + keyfile: StrOrBytesPath | None = None, + certfile: StrOrBytesPath | None = None, + server_side: bool = False, + cert_reqs: int = ..., + ssl_version: int = ..., + ca_certs: str | None = None, + do_handshake_on_connect: bool = True, + suppress_ragged_eofs: bool = True, + ciphers: str | None = None, +) -> SSLSocket: ... +def create_default_context( + purpose: Purpose = ..., + *, + cafile: StrOrBytesPath | None = None, + capath: StrOrBytesPath | None = None, + cadata: str | ReadableBuffer | None = None, +) -> SSLContext: ... + +if sys.version_info >= (3, 10): + def _create_unverified_context( + protocol: int | None = None, + *, + cert_reqs: int = ..., + check_hostname: bool = False, + purpose: Purpose = ..., + certfile: StrOrBytesPath | None = None, + keyfile: StrOrBytesPath | None = None, + cafile: StrOrBytesPath | None = None, + capath: StrOrBytesPath | None = None, + cadata: str | ReadableBuffer | None = None, + ) -> SSLContext: ... + +else: + def _create_unverified_context( + protocol: int = ..., + *, + cert_reqs: int = ..., + check_hostname: bool = False, + purpose: Purpose = ..., + certfile: StrOrBytesPath | None = None, + keyfile: StrOrBytesPath | None = None, + cafile: StrOrBytesPath | None = None, + capath: StrOrBytesPath | None = None, + cadata: str | ReadableBuffer | None = None, + ) -> SSLContext: ... + +_create_default_https_context: Callable[..., SSLContext] + +def RAND_bytes(__num: int) -> bytes: ... +def RAND_pseudo_bytes(__num: int) -> tuple[bytes, bool]: ... +def RAND_status() -> bool: ... +def RAND_egd(path: str) -> None: ... +def RAND_add(__string: str | ReadableBuffer, __entropy: float) -> None: ... + +if sys.version_info < (3, 12): + def match_hostname(cert: _PeerCertRetDictType, hostname: str) -> None: ... + +def cert_time_to_seconds(cert_time: str) -> int: ... + +if sys.version_info >= (3, 10): + def get_server_certificate( + addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = None, timeout: float = ... + ) -> str: ... + +else: + def get_server_certificate(addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = None) -> str: ... + +def DER_cert_to_PEM_cert(der_cert_bytes: ReadableBuffer) -> str: ... +def PEM_cert_to_DER_cert(pem_cert_string: str) -> bytes: ... + +class DefaultVerifyPaths(NamedTuple): + cafile: str + capath: str + openssl_cafile_env: str + openssl_cafile: str + openssl_capath_env: str + openssl_capath: str + +def get_default_verify_paths() -> DefaultVerifyPaths: ... + +if sys.platform == "win32": + def enum_certificates(store_name: str) -> _EnumRetType: ... + def enum_crls(store_name: str) -> _EnumRetType: ... + +class VerifyMode(enum.IntEnum): + CERT_NONE: int + CERT_OPTIONAL: int + CERT_REQUIRED: int + +CERT_NONE: VerifyMode +CERT_OPTIONAL: VerifyMode +CERT_REQUIRED: VerifyMode + +class VerifyFlags(enum.IntFlag): + VERIFY_DEFAULT: int + VERIFY_CRL_CHECK_LEAF: int + VERIFY_CRL_CHECK_CHAIN: int + VERIFY_X509_STRICT: int + VERIFY_X509_TRUSTED_FIRST: int + if sys.version_info >= (3, 10): + VERIFY_ALLOW_PROXY_CERTS: int + VERIFY_X509_PARTIAL_CHAIN: int + +VERIFY_DEFAULT: VerifyFlags +VERIFY_CRL_CHECK_LEAF: VerifyFlags +VERIFY_CRL_CHECK_CHAIN: VerifyFlags +VERIFY_X509_STRICT: VerifyFlags +VERIFY_X509_TRUSTED_FIRST: VerifyFlags + +if sys.version_info >= (3, 10): + VERIFY_ALLOW_PROXY_CERTS: VerifyFlags + VERIFY_X509_PARTIAL_CHAIN: VerifyFlags + +class _SSLMethod(enum.IntEnum): + PROTOCOL_SSLv23: int + PROTOCOL_SSLv2: int + PROTOCOL_SSLv3: int + PROTOCOL_TLSv1: int + PROTOCOL_TLSv1_1: int + PROTOCOL_TLSv1_2: int + PROTOCOL_TLS: int + PROTOCOL_TLS_CLIENT: int + PROTOCOL_TLS_SERVER: int + +PROTOCOL_SSLv23: _SSLMethod +PROTOCOL_SSLv2: _SSLMethod +PROTOCOL_SSLv3: _SSLMethod +PROTOCOL_TLSv1: _SSLMethod +PROTOCOL_TLSv1_1: _SSLMethod +PROTOCOL_TLSv1_2: _SSLMethod +PROTOCOL_TLS: _SSLMethod +PROTOCOL_TLS_CLIENT: _SSLMethod +PROTOCOL_TLS_SERVER: _SSLMethod + +class Options(enum.IntFlag): + OP_ALL: int + OP_NO_SSLv2: int + OP_NO_SSLv3: int + OP_NO_TLSv1: int + OP_NO_TLSv1_1: int + OP_NO_TLSv1_2: int + OP_NO_TLSv1_3: int + OP_CIPHER_SERVER_PREFERENCE: int + OP_SINGLE_DH_USE: int + OP_SINGLE_ECDH_USE: int + OP_NO_COMPRESSION: int + OP_NO_TICKET: int + OP_NO_RENEGOTIATION: int + if sys.version_info >= (3, 8): + OP_ENABLE_MIDDLEBOX_COMPAT: int + if sys.platform == "linux": + OP_IGNORE_UNEXPECTED_EOF: int + +OP_ALL: Options +OP_NO_SSLv2: Options +OP_NO_SSLv3: Options +OP_NO_TLSv1: Options +OP_NO_TLSv1_1: Options +OP_NO_TLSv1_2: Options +OP_NO_TLSv1_3: Options +OP_CIPHER_SERVER_PREFERENCE: Options +OP_SINGLE_DH_USE: Options +OP_SINGLE_ECDH_USE: Options +OP_NO_COMPRESSION: Options +OP_NO_TICKET: Options +OP_NO_RENEGOTIATION: Options +if sys.version_info >= (3, 8): + OP_ENABLE_MIDDLEBOX_COMPAT: Options + if sys.platform == "linux": + OP_IGNORE_UNEXPECTED_EOF: Options + +HAS_NEVER_CHECK_COMMON_NAME: bool +HAS_SSLv2: bool +HAS_SSLv3: bool +HAS_TLSv1: bool +HAS_TLSv1_1: bool +HAS_TLSv1_2: bool +HAS_TLSv1_3: bool +HAS_ALPN: bool +HAS_ECDH: bool +HAS_SNI: bool +HAS_NPN: bool +CHANNEL_BINDING_TYPES: list[str] + +OPENSSL_VERSION: str +OPENSSL_VERSION_INFO: tuple[int, int, int, int, int] +OPENSSL_VERSION_NUMBER: int + +class AlertDescription(enum.IntEnum): + ALERT_DESCRIPTION_ACCESS_DENIED: int + ALERT_DESCRIPTION_BAD_CERTIFICATE: int + ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE: int + ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE: int + ALERT_DESCRIPTION_BAD_RECORD_MAC: int + ALERT_DESCRIPTION_CERTIFICATE_EXPIRED: int + ALERT_DESCRIPTION_CERTIFICATE_REVOKED: int + ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN: int + ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE: int + ALERT_DESCRIPTION_CLOSE_NOTIFY: int + ALERT_DESCRIPTION_DECODE_ERROR: int + ALERT_DESCRIPTION_DECOMPRESSION_FAILURE: int + ALERT_DESCRIPTION_DECRYPT_ERROR: int + ALERT_DESCRIPTION_HANDSHAKE_FAILURE: int + ALERT_DESCRIPTION_ILLEGAL_PARAMETER: int + ALERT_DESCRIPTION_INSUFFICIENT_SECURITY: int + ALERT_DESCRIPTION_INTERNAL_ERROR: int + ALERT_DESCRIPTION_NO_RENEGOTIATION: int + ALERT_DESCRIPTION_PROTOCOL_VERSION: int + ALERT_DESCRIPTION_RECORD_OVERFLOW: int + ALERT_DESCRIPTION_UNEXPECTED_MESSAGE: int + ALERT_DESCRIPTION_UNKNOWN_CA: int + ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY: int + ALERT_DESCRIPTION_UNRECOGNIZED_NAME: int + ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE: int + ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION: int + ALERT_DESCRIPTION_USER_CANCELLED: int + +ALERT_DESCRIPTION_HANDSHAKE_FAILURE: AlertDescription +ALERT_DESCRIPTION_INTERNAL_ERROR: AlertDescription +ALERT_DESCRIPTION_ACCESS_DENIED: AlertDescription +ALERT_DESCRIPTION_BAD_CERTIFICATE: AlertDescription +ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE: AlertDescription +ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE: AlertDescription +ALERT_DESCRIPTION_BAD_RECORD_MAC: AlertDescription +ALERT_DESCRIPTION_CERTIFICATE_EXPIRED: AlertDescription +ALERT_DESCRIPTION_CERTIFICATE_REVOKED: AlertDescription +ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN: AlertDescription +ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE: AlertDescription +ALERT_DESCRIPTION_CLOSE_NOTIFY: AlertDescription +ALERT_DESCRIPTION_DECODE_ERROR: AlertDescription +ALERT_DESCRIPTION_DECOMPRESSION_FAILURE: AlertDescription +ALERT_DESCRIPTION_DECRYPT_ERROR: AlertDescription +ALERT_DESCRIPTION_ILLEGAL_PARAMETER: AlertDescription +ALERT_DESCRIPTION_INSUFFICIENT_SECURITY: AlertDescription +ALERT_DESCRIPTION_NO_RENEGOTIATION: AlertDescription +ALERT_DESCRIPTION_PROTOCOL_VERSION: AlertDescription +ALERT_DESCRIPTION_RECORD_OVERFLOW: AlertDescription +ALERT_DESCRIPTION_UNEXPECTED_MESSAGE: AlertDescription +ALERT_DESCRIPTION_UNKNOWN_CA: AlertDescription +ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY: AlertDescription +ALERT_DESCRIPTION_UNRECOGNIZED_NAME: AlertDescription +ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE: AlertDescription +ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION: AlertDescription +ALERT_DESCRIPTION_USER_CANCELLED: AlertDescription + +class _ASN1ObjectBase(NamedTuple): + nid: int + shortname: str + longname: str + oid: str + +class _ASN1Object(_ASN1ObjectBase): + def __new__(cls, oid: str) -> Self: ... + @classmethod + def fromnid(cls, nid: int) -> Self: ... + @classmethod + def fromname(cls, name: str) -> Self: ... + +class Purpose(_ASN1Object, enum.Enum): + SERVER_AUTH: _ASN1Object + CLIENT_AUTH: _ASN1Object + +class SSLSocket(socket.socket): + context: SSLContext + server_side: bool + server_hostname: str | None + session: SSLSession | None + @property + def session_reused(self) -> bool | None: ... + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + def connect(self, addr: socket._Address) -> None: ... + def connect_ex(self, addr: socket._Address) -> int: ... + def recv(self, buflen: int = 1024, flags: int = 0) -> bytes: ... + def recv_into(self, buffer: WriteableBuffer, nbytes: int | None = None, flags: int = 0) -> int: ... + def recvfrom(self, buflen: int = 1024, flags: int = 0) -> tuple[bytes, socket._RetAddress]: ... + def recvfrom_into( + self, buffer: WriteableBuffer, nbytes: int | None = None, flags: int = 0 + ) -> tuple[int, socket._RetAddress]: ... + def send(self, data: ReadableBuffer, flags: int = 0) -> int: ... + def sendall(self, data: ReadableBuffer, flags: int = 0) -> None: ... + @overload + def sendto(self, data: ReadableBuffer, flags_or_addr: socket._Address, addr: None = None) -> int: ... + @overload + def sendto(self, data: ReadableBuffer, flags_or_addr: int, addr: socket._Address) -> int: ... + def shutdown(self, how: int) -> None: ... + def read(self, len: int = 1024, buffer: bytearray | None = None) -> bytes: ... + def write(self, data: ReadableBuffer) -> int: ... + def do_handshake(self, block: bool = False) -> None: ... # block is undocumented + @overload + def getpeercert(self, binary_form: Literal[False] = False) -> _PeerCertRetDictType | None: ... + @overload + def getpeercert(self, binary_form: Literal[True]) -> bytes | None: ... + @overload + def getpeercert(self, binary_form: bool) -> _PeerCertRetType: ... + def cipher(self) -> tuple[str, str, int] | None: ... + def shared_ciphers(self) -> list[tuple[str, str, int]] | None: ... + def compression(self) -> str | None: ... + def get_channel_binding(self, cb_type: str = "tls-unique") -> bytes | None: ... + def selected_alpn_protocol(self) -> str | None: ... + def selected_npn_protocol(self) -> str | None: ... + def accept(self) -> tuple[SSLSocket, socket._RetAddress]: ... + def unwrap(self) -> socket.socket: ... + def version(self) -> str | None: ... + def pending(self) -> int: ... + if sys.version_info >= (3, 8): + def verify_client_post_handshake(self) -> None: ... + +class TLSVersion(enum.IntEnum): + MINIMUM_SUPPORTED: int + MAXIMUM_SUPPORTED: int + SSLv3: int + TLSv1: int + TLSv1_1: int + TLSv1_2: int + TLSv1_3: int + +class SSLContext: + check_hostname: bool + options: Options + verify_flags: VerifyFlags + verify_mode: VerifyMode + @property + def protocol(self) -> _SSLMethod: ... + hostname_checks_common_name: bool + maximum_version: TLSVersion + minimum_version: TLSVersion + sni_callback: Callable[[SSLObject, str, SSLContext], None | int] | None + # The following two attributes have class-level defaults. + # However, the docs explicitly state that it's OK to override these attributes on instances, + # so making these ClassVars wouldn't be appropriate + sslobject_class: type[SSLObject] + sslsocket_class: type[SSLSocket] + if sys.version_info >= (3, 8): + keylog_filename: str + post_handshake_auth: bool + if sys.version_info >= (3, 10): + security_level: int + if sys.version_info >= (3, 10): + # Using the default (None) for the `protocol` parameter is deprecated, + # but there isn't a good way of marking that in the stub unless/until PEP 702 is accepted + def __new__(cls, protocol: int | None = None, *args: Any, **kwargs: Any) -> Self: ... + else: + def __new__(cls, protocol: int = ..., *args: Any, **kwargs: Any) -> Self: ... + + def cert_store_stats(self) -> dict[str, int]: ... + def load_cert_chain( + self, certfile: StrOrBytesPath, keyfile: StrOrBytesPath | None = None, password: _PasswordType | None = None + ) -> None: ... + def load_default_certs(self, purpose: Purpose = ...) -> None: ... + def load_verify_locations( + self, + cafile: StrOrBytesPath | None = None, + capath: StrOrBytesPath | None = None, + cadata: str | ReadableBuffer | None = None, + ) -> None: ... + @overload + def get_ca_certs(self, binary_form: Literal[False] = False) -> list[_PeerCertRetDictType]: ... + @overload + def get_ca_certs(self, binary_form: Literal[True]) -> list[bytes]: ... + @overload + def get_ca_certs(self, binary_form: bool = False) -> Any: ... + def get_ciphers(self) -> list[_Cipher]: ... + def set_default_verify_paths(self) -> None: ... + def set_ciphers(self, __cipherlist: str) -> None: ... + def set_alpn_protocols(self, alpn_protocols: Iterable[str]) -> None: ... + def set_npn_protocols(self, npn_protocols: Iterable[str]) -> None: ... + def set_servername_callback(self, server_name_callback: _SrvnmeCbType | None) -> None: ... + def load_dh_params(self, __path: str) -> None: ... + def set_ecdh_curve(self, __name: str) -> None: ... + def wrap_socket( + self, + sock: socket.socket, + server_side: bool = False, + do_handshake_on_connect: bool = True, + suppress_ragged_eofs: bool = True, + server_hostname: str | None = None, + session: SSLSession | None = None, + ) -> SSLSocket: ... + def wrap_bio( + self, + incoming: MemoryBIO, + outgoing: MemoryBIO, + server_side: bool = False, + server_hostname: str | None = None, + session: SSLSession | None = None, + ) -> SSLObject: ... + def session_stats(self) -> dict[str, int]: ... + +class SSLObject: + context: SSLContext + @property + def server_side(self) -> bool: ... + @property + def server_hostname(self) -> str | None: ... + session: SSLSession | None + @property + def session_reused(self) -> bool: ... + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + def read(self, len: int = 1024, buffer: bytearray | None = None) -> bytes: ... + def write(self, data: ReadableBuffer) -> int: ... + @overload + def getpeercert(self, binary_form: Literal[False] = False) -> _PeerCertRetDictType | None: ... + @overload + def getpeercert(self, binary_form: Literal[True]) -> bytes | None: ... + @overload + def getpeercert(self, binary_form: bool) -> _PeerCertRetType: ... + def selected_alpn_protocol(self) -> str | None: ... + def selected_npn_protocol(self) -> str | None: ... + def cipher(self) -> tuple[str, str, int] | None: ... + def shared_ciphers(self) -> list[tuple[str, str, int]] | None: ... + def compression(self) -> str | None: ... + def pending(self) -> int: ... + def do_handshake(self) -> None: ... + def unwrap(self) -> None: ... + def version(self) -> str | None: ... + def get_channel_binding(self, cb_type: str = "tls-unique") -> bytes | None: ... + if sys.version_info >= (3, 8): + def verify_client_post_handshake(self) -> None: ... + +@final +class MemoryBIO: + pending: int + eof: bool + def read(self, __size: int = -1) -> bytes: ... + def write(self, __buf: ReadableBuffer) -> int: ... + def write_eof(self) -> None: ... + +@final +class SSLSession: + @property + def has_ticket(self) -> bool: ... + @property + def id(self) -> bytes: ... + @property + def ticket_lifetime_hint(self) -> int: ... + @property + def time(self) -> int: ... + @property + def timeout(self) -> int: ... + +class SSLErrorNumber(enum.IntEnum): + SSL_ERROR_EOF: int + SSL_ERROR_INVALID_ERROR_CODE: int + SSL_ERROR_SSL: int + SSL_ERROR_SYSCALL: int + SSL_ERROR_WANT_CONNECT: int + SSL_ERROR_WANT_READ: int + SSL_ERROR_WANT_WRITE: int + SSL_ERROR_WANT_X509_LOOKUP: int + SSL_ERROR_ZERO_RETURN: int + +SSL_ERROR_EOF: SSLErrorNumber # undocumented +SSL_ERROR_INVALID_ERROR_CODE: SSLErrorNumber # undocumented +SSL_ERROR_SSL: SSLErrorNumber # undocumented +SSL_ERROR_SYSCALL: SSLErrorNumber # undocumented +SSL_ERROR_WANT_CONNECT: SSLErrorNumber # undocumented +SSL_ERROR_WANT_READ: SSLErrorNumber # undocumented +SSL_ERROR_WANT_WRITE: SSLErrorNumber # undocumented +SSL_ERROR_WANT_X509_LOOKUP: SSLErrorNumber # undocumented +SSL_ERROR_ZERO_RETURN: SSLErrorNumber # undocumented + +def get_protocol_name(protocol_code: int) -> str: ... + +if sys.version_info < (3, 9): + AF_INET: int +PEM_FOOTER: str +PEM_HEADER: str +SOCK_STREAM: int +SOL_SOCKET: int +SO_TYPE: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/stat.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/stat.pyi new file mode 100644 index 00000000..4518acb5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/stat.pyi @@ -0,0 +1 @@ +from _stat import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/statistics.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/statistics.pyi new file mode 100644 index 00000000..1358b1f9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/statistics.pyi @@ -0,0 +1,131 @@ +import sys +from _typeshed import SupportsRichComparisonT +from collections.abc import Hashable, Iterable, Sequence +from decimal import Decimal +from fractions import Fraction +from typing import Any, NamedTuple, SupportsFloat, TypeVar +from typing_extensions import Literal, Self, TypeAlias + +__all__ = [ + "StatisticsError", + "pstdev", + "pvariance", + "stdev", + "variance", + "median", + "median_low", + "median_high", + "median_grouped", + "mean", + "mode", + "harmonic_mean", +] + +if sys.version_info >= (3, 8): + __all__ += ["geometric_mean", "multimode", "NormalDist", "fmean", "quantiles"] + +if sys.version_info >= (3, 10): + __all__ += ["covariance", "correlation", "linear_regression"] + +# Most functions in this module accept homogeneous collections of one of these types +_Number: TypeAlias = float | Decimal | Fraction +_NumberT = TypeVar("_NumberT", float, Decimal, Fraction) + +# Used in mode, multimode +_HashableT = TypeVar("_HashableT", bound=Hashable) + +class StatisticsError(ValueError): ... + +if sys.version_info >= (3, 11): + def fmean(data: Iterable[SupportsFloat], weights: Iterable[SupportsFloat] | None = None) -> float: ... + +elif sys.version_info >= (3, 8): + def fmean(data: Iterable[SupportsFloat]) -> float: ... + +if sys.version_info >= (3, 8): + def geometric_mean(data: Iterable[SupportsFloat]) -> float: ... + +def mean(data: Iterable[_NumberT]) -> _NumberT: ... + +if sys.version_info >= (3, 10): + def harmonic_mean(data: Iterable[_NumberT], weights: Iterable[_Number] | None = None) -> _NumberT: ... + +else: + def harmonic_mean(data: Iterable[_NumberT]) -> _NumberT: ... + +def median(data: Iterable[_NumberT]) -> _NumberT: ... +def median_low(data: Iterable[SupportsRichComparisonT]) -> SupportsRichComparisonT: ... +def median_high(data: Iterable[SupportsRichComparisonT]) -> SupportsRichComparisonT: ... + +if sys.version_info >= (3, 11): + def median_grouped(data: Iterable[SupportsFloat], interval: SupportsFloat = 1.0) -> float: ... + +else: + def median_grouped(data: Iterable[_NumberT], interval: _NumberT | float = 1) -> _NumberT | float: ... + +def mode(data: Iterable[_HashableT]) -> _HashableT: ... + +if sys.version_info >= (3, 8): + def multimode(data: Iterable[_HashableT]) -> list[_HashableT]: ... + +def pstdev(data: Iterable[_NumberT], mu: _NumberT | None = None) -> _NumberT: ... +def pvariance(data: Iterable[_NumberT], mu: _NumberT | None = None) -> _NumberT: ... + +if sys.version_info >= (3, 8): + def quantiles( + data: Iterable[_NumberT], *, n: int = 4, method: Literal["inclusive", "exclusive"] = "exclusive" + ) -> list[_NumberT]: ... + +def stdev(data: Iterable[_NumberT], xbar: _NumberT | None = None) -> _NumberT: ... +def variance(data: Iterable[_NumberT], xbar: _NumberT | None = None) -> _NumberT: ... + +if sys.version_info >= (3, 8): + class NormalDist: + def __init__(self, mu: float = 0.0, sigma: float = 1.0) -> None: ... + @property + def mean(self) -> float: ... + @property + def median(self) -> float: ... + @property + def mode(self) -> float: ... + @property + def stdev(self) -> float: ... + @property + def variance(self) -> float: ... + @classmethod + def from_samples(cls, data: Iterable[SupportsFloat]) -> Self: ... + def samples(self, n: int, *, seed: Any | None = None) -> list[float]: ... + def pdf(self, x: float) -> float: ... + def cdf(self, x: float) -> float: ... + def inv_cdf(self, p: float) -> float: ... + def overlap(self, other: NormalDist) -> float: ... + def quantiles(self, n: int = 4) -> list[float]: ... + if sys.version_info >= (3, 9): + def zscore(self, x: float) -> float: ... + + def __eq__(self, x2: object) -> bool: ... + def __add__(self, x2: float | NormalDist) -> NormalDist: ... + def __sub__(self, x2: float | NormalDist) -> NormalDist: ... + def __mul__(self, x2: float) -> NormalDist: ... + def __truediv__(self, x2: float) -> NormalDist: ... + def __pos__(self) -> NormalDist: ... + def __neg__(self) -> NormalDist: ... + __radd__ = __add__ + def __rsub__(self, x2: float | NormalDist) -> NormalDist: ... + __rmul__ = __mul__ + +if sys.version_info >= (3, 10): + def correlation(__x: Sequence[_Number], __y: Sequence[_Number]) -> float: ... + def covariance(__x: Sequence[_Number], __y: Sequence[_Number]) -> float: ... + + class LinearRegression(NamedTuple): + slope: float + intercept: float + +if sys.version_info >= (3, 11): + def linear_regression( + __regressor: Sequence[_Number], __dependent_variable: Sequence[_Number], *, proportional: bool = False + ) -> LinearRegression: ... + +elif sys.version_info >= (3, 10): + def linear_regression(__regressor: Sequence[_Number], __dependent_variable: Sequence[_Number]) -> LinearRegression: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/string.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/string.pyi new file mode 100644 index 00000000..dc9a449e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/string.pyi @@ -0,0 +1,83 @@ +import sys +from _typeshed import StrOrLiteralStr +from collections.abc import Iterable, Mapping, Sequence +from re import Pattern, RegexFlag +from typing import Any, ClassVar, overload +from typing_extensions import LiteralString, TypeAlias + +__all__ = [ + "ascii_letters", + "ascii_lowercase", + "ascii_uppercase", + "capwords", + "digits", + "hexdigits", + "octdigits", + "printable", + "punctuation", + "whitespace", + "Formatter", + "Template", +] + +ascii_letters: LiteralString +ascii_lowercase: LiteralString +ascii_uppercase: LiteralString +digits: LiteralString +hexdigits: LiteralString +octdigits: LiteralString +punctuation: LiteralString +printable: LiteralString +whitespace: LiteralString + +def capwords(s: StrOrLiteralStr, sep: StrOrLiteralStr | None = None) -> StrOrLiteralStr: ... + +if sys.version_info >= (3, 9): + _TemplateMetaclass: TypeAlias = type +else: + class _TemplateMetaclass(type): + pattern: ClassVar[str] + def __init__(cls, name: str, bases: tuple[type, ...], dct: dict[str, Any]) -> None: ... + +class Template(metaclass=_TemplateMetaclass): + template: str + delimiter: ClassVar[str] + idpattern: ClassVar[str] + braceidpattern: ClassVar[str | None] + flags: ClassVar[RegexFlag] + pattern: ClassVar[Pattern[str]] + def __init__(self, template: str) -> None: ... + def substitute(self, __mapping: Mapping[str, object] = ..., **kwds: object) -> str: ... + def safe_substitute(self, __mapping: Mapping[str, object] = ..., **kwds: object) -> str: ... + if sys.version_info >= (3, 11): + def get_identifiers(self) -> list[str]: ... + def is_valid(self) -> bool: ... + +class Formatter: + @overload + def format(self, __format_string: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: ... + @overload + def format(self, __format_string: str, *args: Any, **kwargs: Any) -> str: ... + @overload + def vformat( + self, format_string: LiteralString, args: Sequence[LiteralString], kwargs: Mapping[LiteralString, LiteralString] + ) -> LiteralString: ... + @overload + def vformat(self, format_string: str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> str: ... + def _vformat( # undocumented + self, + format_string: str, + args: Sequence[Any], + kwargs: Mapping[str, Any], + used_args: set[int | str], + recursion_depth: int, + auto_arg_index: int = 0, + ) -> tuple[str, int]: ... + def parse( + self, format_string: StrOrLiteralStr + ) -> Iterable[tuple[StrOrLiteralStr, StrOrLiteralStr | None, StrOrLiteralStr | None, StrOrLiteralStr | None]]: ... + def get_field(self, field_name: str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> Any: ... + def get_value(self, key: int | str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> Any: ... + def check_unused_args(self, used_args: set[int | str], args: Sequence[Any], kwargs: Mapping[str, Any]) -> None: ... + def format_field(self, value: Any, format_spec: str) -> Any: ... + def convert_field(self, value: Any, conversion: str) -> Any: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/stringprep.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/stringprep.pyi new file mode 100644 index 00000000..fc28c027 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/stringprep.pyi @@ -0,0 +1,27 @@ +b1_set: set[int] +b3_exceptions: dict[int, str] +c22_specials: set[int] +c6_set: set[int] +c7_set: set[int] +c8_set: set[int] +c9_set: set[int] + +def in_table_a1(code: str) -> bool: ... +def in_table_b1(code: str) -> bool: ... +def map_table_b3(code: str) -> str: ... +def map_table_b2(a: str) -> str: ... +def in_table_c11(code: str) -> bool: ... +def in_table_c12(code: str) -> bool: ... +def in_table_c11_c12(code: str) -> bool: ... +def in_table_c21(code: str) -> bool: ... +def in_table_c22(code: str) -> bool: ... +def in_table_c21_c22(code: str) -> bool: ... +def in_table_c3(code: str) -> bool: ... +def in_table_c4(code: str) -> bool: ... +def in_table_c5(code: str) -> bool: ... +def in_table_c6(code: str) -> bool: ... +def in_table_c7(code: str) -> bool: ... +def in_table_c8(code: str) -> bool: ... +def in_table_c9(code: str) -> bool: ... +def in_table_d1(code: str) -> bool: ... +def in_table_d2(code: str) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/struct.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/struct.pyi new file mode 100644 index 00000000..4220cd82 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/struct.pyi @@ -0,0 +1,26 @@ +from _typeshed import ReadableBuffer, WriteableBuffer +from collections.abc import Iterator +from typing import Any + +__all__ = ["calcsize", "pack", "pack_into", "unpack", "unpack_from", "iter_unpack", "Struct", "error"] + +class error(Exception): ... + +def pack(__fmt: str | bytes, *v: Any) -> bytes: ... +def pack_into(__fmt: str | bytes, __buffer: WriteableBuffer, __offset: int, *v: Any) -> None: ... +def unpack(__format: str | bytes, __buffer: ReadableBuffer) -> tuple[Any, ...]: ... +def unpack_from(__format: str | bytes, buffer: ReadableBuffer, offset: int = 0) -> tuple[Any, ...]: ... +def iter_unpack(__format: str | bytes, __buffer: ReadableBuffer) -> Iterator[tuple[Any, ...]]: ... +def calcsize(__format: str | bytes) -> int: ... + +class Struct: + @property + def format(self) -> str: ... + @property + def size(self) -> int: ... + def __init__(self, format: str | bytes) -> None: ... + def pack(self, *v: Any) -> bytes: ... + def pack_into(self, buffer: WriteableBuffer, offset: int, *v: Any) -> None: ... + def unpack(self, __buffer: ReadableBuffer) -> tuple[Any, ...]: ... + def unpack_from(self, buffer: ReadableBuffer, offset: int = 0) -> tuple[Any, ...]: ... + def iter_unpack(self, __buffer: ReadableBuffer) -> Iterator[tuple[Any, ...]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/subprocess.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/subprocess.pyi new file mode 100644 index 00000000..3c804181 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/subprocess.pyi @@ -0,0 +1,2622 @@ +import sys +from _typeshed import ReadableBuffer, StrOrBytesPath +from collections.abc import Callable, Collection, Iterable, Mapping, Sequence +from types import TracebackType +from typing import IO, Any, AnyStr, Generic, TypeVar, overload +from typing_extensions import Literal, Self, TypeAlias + +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = [ + "Popen", + "PIPE", + "STDOUT", + "call", + "check_call", + "getstatusoutput", + "getoutput", + "check_output", + "run", + "CalledProcessError", + "DEVNULL", + "SubprocessError", + "TimeoutExpired", + "CompletedProcess", +] + +if sys.platform == "win32": + __all__ += [ + "CREATE_NEW_CONSOLE", + "CREATE_NEW_PROCESS_GROUP", + "STARTF_USESHOWWINDOW", + "STARTF_USESTDHANDLES", + "STARTUPINFO", + "STD_ERROR_HANDLE", + "STD_INPUT_HANDLE", + "STD_OUTPUT_HANDLE", + "SW_HIDE", + "ABOVE_NORMAL_PRIORITY_CLASS", + "BELOW_NORMAL_PRIORITY_CLASS", + "CREATE_BREAKAWAY_FROM_JOB", + "CREATE_DEFAULT_ERROR_MODE", + "CREATE_NO_WINDOW", + "DETACHED_PROCESS", + "HIGH_PRIORITY_CLASS", + "IDLE_PRIORITY_CLASS", + "NORMAL_PRIORITY_CLASS", + "REALTIME_PRIORITY_CLASS", + ] + +# We prefer to annotate inputs to methods (eg subprocess.check_call) with these +# union types. +# For outputs we use laborious literal based overloads to try to determine +# which specific return types to use, and prefer to fall back to Any when +# this does not work, so the caller does not have to use an assertion to confirm +# which type. +# +# For example: +# +# try: +# x = subprocess.check_output(["ls", "-l"]) +# reveal_type(x) # bytes, based on the overloads +# except TimeoutError as e: +# reveal_type(e.cmd) # Any, but morally is _CMD +_FILE: TypeAlias = None | int | IO[Any] +_InputString: TypeAlias = ReadableBuffer | str +if sys.version_info >= (3, 8): + _CMD: TypeAlias = StrOrBytesPath | Sequence[StrOrBytesPath] +else: + # Python 3.7 doesn't support _CMD being a single PathLike. + # See: https://bugs.python.org/issue31961 + _CMD: TypeAlias = str | bytes | Sequence[StrOrBytesPath] +if sys.platform == "win32": + _ENV: TypeAlias = Mapping[str, str] +else: + _ENV: TypeAlias = Mapping[bytes, StrOrBytesPath] | Mapping[str, StrOrBytesPath] + +_T = TypeVar("_T") + +# These two are private but documented +if sys.version_info >= (3, 11): + _USE_VFORK: bool +if sys.version_info >= (3, 8): + _USE_POSIX_SPAWN: bool + +class CompletedProcess(Generic[_T]): + # morally: _CMD + args: Any + returncode: int + # These can both be None, but requiring checks for None would be tedious + # and writing all the overloads would be horrific. + stdout: _T + stderr: _T + def __init__(self, args: _CMD, returncode: int, stdout: _T | None = None, stderr: _T | None = None) -> None: ... + def check_returncode(self) -> None: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +if sys.version_info >= (3, 11): + # 3.11 adds "process_group" argument + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str | None = None, + input: str | None = None, + text: Literal[True], + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + capture_output: bool = False, + check: bool = False, + encoding: str, + errors: str | None = None, + input: str | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str, + input: str | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + *, + universal_newlines: Literal[True], + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + # where the *real* keyword only args start + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str | None = None, + input: str | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False, None] = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + capture_output: bool = False, + check: bool = False, + encoding: None = None, + errors: None = None, + input: ReadableBuffer | None = None, + text: Literal[None, False] = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> CompletedProcess[bytes]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str | None = None, + input: _InputString | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> CompletedProcess[Any]: ... + +elif sys.version_info >= (3, 10): + # 3.10 adds "pipesize" argument + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str | None = None, + input: str | None = None, + text: Literal[True], + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + capture_output: bool = False, + check: bool = False, + encoding: str, + errors: str | None = None, + input: str | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str, + input: str | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + *, + universal_newlines: Literal[True], + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + # where the *real* keyword only args start + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str | None = None, + input: str | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False, None] = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + capture_output: bool = False, + check: bool = False, + encoding: None = None, + errors: None = None, + input: ReadableBuffer | None = None, + text: Literal[None, False] = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> CompletedProcess[bytes]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str | None = None, + input: _InputString | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> CompletedProcess[Any]: ... + +elif sys.version_info >= (3, 9): + # 3.9 adds arguments "user", "group", "extra_groups" and "umask" + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str | None = None, + input: str | None = None, + text: Literal[True], + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + capture_output: bool = False, + check: bool = False, + encoding: str, + errors: str | None = None, + input: str | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str, + input: str | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + *, + universal_newlines: Literal[True], + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + # where the *real* keyword only args start + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str | None = None, + input: str | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False, None] = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + capture_output: bool = False, + check: bool = False, + encoding: None = None, + errors: None = None, + input: ReadableBuffer | None = None, + text: Literal[None, False] = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> CompletedProcess[bytes]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str | None = None, + input: _InputString | None = None, + text: bool | None = None, + timeout: float | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> CompletedProcess[Any]: ... + +else: + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str | None = None, + input: str | None = None, + text: Literal[True], + timeout: float | None = None, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + capture_output: bool = False, + check: bool = False, + encoding: str, + errors: str | None = None, + input: str | None = None, + text: bool | None = None, + timeout: float | None = None, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str, + input: str | None = None, + text: bool | None = None, + timeout: float | None = None, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + *, + universal_newlines: Literal[True], + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + # where the *real* keyword only args start + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str | None = None, + input: str | None = None, + text: bool | None = None, + timeout: float | None = None, + ) -> CompletedProcess[str]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False, None] = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + capture_output: bool = False, + check: bool = False, + encoding: None = None, + errors: None = None, + input: ReadableBuffer | None = None, + text: Literal[None, False] = None, + timeout: float | None = None, + ) -> CompletedProcess[bytes]: ... + @overload + def run( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + capture_output: bool = False, + check: bool = False, + encoding: str | None = None, + errors: str | None = None, + input: _InputString | None = None, + text: bool | None = None, + timeout: float | None = None, + ) -> CompletedProcess[Any]: ... + +# Same args as Popen.__init__ +if sys.version_info >= (3, 11): + # 3.11 adds "process_group" argument + def call( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> int: ... + +elif sys.version_info >= (3, 10): + # 3.10 adds "pipesize" argument + def call( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> int: ... + +elif sys.version_info >= (3, 9): + # 3.9 adds arguments "user", "group", "extra_groups" and "umask" + def call( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> int: ... + +else: + def call( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + text: bool | None = None, + ) -> int: ... + +# Same args as Popen.__init__ +if sys.version_info >= (3, 11): + # 3.11 adds "process_group" argument + def check_call( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + timeout: float | None = ..., + *, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> int: ... + +elif sys.version_info >= (3, 10): + # 3.10 adds "pipesize" argument + def check_call( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + timeout: float | None = ..., + *, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> int: ... + +elif sys.version_info >= (3, 9): + # 3.9 adds arguments "user", "group", "extra_groups" and "umask" + def check_call( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + timeout: float | None = ..., + *, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> int: ... + +else: + def check_call( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stdout: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + timeout: float | None = ..., + *, + text: bool | None = None, + ) -> int: ... + +if sys.version_info >= (3, 11): + # 3.11 adds "process_group" argument + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + input: _InputString | None = ..., + encoding: str | None = None, + errors: str | None = None, + text: Literal[True], + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + input: _InputString | None = ..., + encoding: str, + errors: str | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + input: _InputString | None = ..., + encoding: str | None = None, + errors: str, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + *, + universal_newlines: Literal[True], + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + # where the real keyword only ones start + timeout: float | None = None, + input: _InputString | None = ..., + encoding: str | None = None, + errors: str | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False, None] = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + input: _InputString | None = ..., + encoding: None = None, + errors: None = None, + text: Literal[None, False] = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> bytes: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + input: _InputString | None = ..., + encoding: str | None = None, + errors: str | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> Any: ... # morally: -> str | bytes + +elif sys.version_info >= (3, 10): + # 3.10 adds "pipesize" argument + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + input: _InputString | None = ..., + encoding: str | None = None, + errors: str | None = None, + text: Literal[True], + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + input: _InputString | None = ..., + encoding: str, + errors: str | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + input: _InputString | None = ..., + encoding: str | None = None, + errors: str, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + *, + universal_newlines: Literal[True], + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + # where the real keyword only ones start + timeout: float | None = None, + input: _InputString | None = ..., + encoding: str | None = None, + errors: str | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False, None] = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + input: _InputString | None = ..., + encoding: None = None, + errors: None = None, + text: Literal[None, False] = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> bytes: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + input: _InputString | None = ..., + encoding: str | None = None, + errors: str | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> Any: ... # morally: -> str | bytes + +elif sys.version_info >= (3, 9): + # 3.9 adds arguments "user", "group", "extra_groups" and "umask" + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + input: _InputString | None = ..., + encoding: str | None = None, + errors: str | None = None, + text: Literal[True], + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + input: _InputString | None = ..., + encoding: str, + errors: str | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + input: _InputString | None = ..., + encoding: str | None = None, + errors: str, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + *, + universal_newlines: Literal[True], + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + # where the real keyword only ones start + timeout: float | None = None, + input: _InputString | None = ..., + encoding: str | None = None, + errors: str | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False, None] = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + input: _InputString | None = ..., + encoding: None = None, + errors: None = None, + text: Literal[None, False] = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> bytes: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + input: _InputString | None = ..., + encoding: str | None = None, + errors: str | None = None, + text: bool | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> Any: ... # morally: -> str | bytes + +else: + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + input: _InputString | None = ..., + encoding: str | None = None, + errors: str | None = None, + text: Literal[True], + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + input: _InputString | None = ..., + encoding: str, + errors: str | None = None, + text: bool | None = None, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + input: _InputString | None = ..., + encoding: str | None = None, + errors: str, + text: bool | None = None, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + *, + universal_newlines: Literal[True], + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + # where the real keyword only ones start + timeout: float | None = None, + input: _InputString | None = ..., + encoding: str | None = None, + errors: str | None = None, + text: bool | None = None, + ) -> str: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False, None] = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + input: _InputString | None = ..., + encoding: None = None, + errors: None = None, + text: Literal[None, False] = None, + ) -> bytes: ... + @overload + def check_output( + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE = None, + stderr: _FILE = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + timeout: float | None = None, + input: _InputString | None = ..., + encoding: str | None = None, + errors: str | None = None, + text: bool | None = None, + ) -> Any: ... # morally: -> str | bytes + +PIPE: int +STDOUT: int +DEVNULL: int + +class SubprocessError(Exception): ... + +class TimeoutExpired(SubprocessError): + def __init__( + self, cmd: _CMD, timeout: float, output: str | bytes | None = None, stderr: str | bytes | None = None + ) -> None: ... + # morally: _CMD + cmd: Any + timeout: float + # morally: str | bytes | None + output: Any + stdout: bytes | None + stderr: bytes | None + +class CalledProcessError(SubprocessError): + returncode: int + # morally: _CMD + cmd: Any + # morally: str | bytes | None + output: Any + + # morally: str | bytes | None + stdout: Any + stderr: Any + def __init__( + self, returncode: int, cmd: _CMD, output: str | bytes | None = None, stderr: str | bytes | None = None + ) -> None: ... + +class Popen(Generic[AnyStr]): + args: _CMD + stdin: IO[AnyStr] | None + stdout: IO[AnyStr] | None + stderr: IO[AnyStr] | None + pid: int + returncode: int | Any + universal_newlines: bool + + if sys.version_info >= (3, 11): + # process_group is added in 3.11 + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + text: bool | None = None, + encoding: str, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> None: ... + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + text: bool | None = None, + encoding: str | None = None, + errors: str, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> None: ... + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + *, + universal_newlines: Literal[True], + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + # where the *real* keyword only args start + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> None: ... + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + text: Literal[True], + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> None: ... + @overload + def __init__( + self: Popen[bytes], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False, None] = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + text: Literal[None, False] = None, + encoding: None = None, + errors: None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> None: ... + @overload + def __init__( + self: Popen[Any], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, + ) -> None: ... + elif sys.version_info >= (3, 10): + # pipesize is added in 3.10 + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + text: bool | None = None, + encoding: str, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> None: ... + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + text: bool | None = None, + encoding: str | None = None, + errors: str, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> None: ... + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + *, + universal_newlines: Literal[True], + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + # where the *real* keyword only args start + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> None: ... + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + text: Literal[True], + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> None: ... + @overload + def __init__( + self: Popen[bytes], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False, None] = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + text: Literal[None, False] = None, + encoding: None = None, + errors: None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> None: ... + @overload + def __init__( + self: Popen[Any], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + ) -> None: ... + elif sys.version_info >= (3, 9): + # user, group, extra_groups, umask were added in 3.9 + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + text: bool | None = None, + encoding: str, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> None: ... + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + text: bool | None = None, + encoding: str | None = None, + errors: str, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> None: ... + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + *, + universal_newlines: Literal[True], + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + # where the *real* keyword only args start + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> None: ... + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + text: Literal[True], + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> None: ... + @overload + def __init__( + self: Popen[bytes], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False, None] = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + text: Literal[None, False] = None, + encoding: None = None, + errors: None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> None: ... + @overload + def __init__( + self: Popen[Any], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + ) -> None: ... + else: + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + text: bool | None = None, + encoding: str, + errors: str | None = None, + ) -> None: ... + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + text: bool | None = None, + encoding: str | None = None, + errors: str, + ) -> None: ... + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + *, + universal_newlines: Literal[True], + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + # where the *real* keyword only args start + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + ) -> None: ... + @overload + def __init__( + self: Popen[str], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + text: Literal[True], + encoding: str | None = None, + errors: str | None = None, + ) -> None: ... + @overload + def __init__( + self: Popen[bytes], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False, None] = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + text: Literal[None, False] = None, + encoding: None = None, + errors: None = None, + ) -> None: ... + @overload + def __init__( + self: Popen[Any], + args: _CMD, + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., + *, + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + ) -> None: ... + + def poll(self) -> int | None: ... + def wait(self, timeout: float | None = None) -> int: ... + # morally the members of the returned tuple should be optional + # TODO this should allow ReadableBuffer for Popen[bytes], but adding + # overloads for that runs into a mypy bug (python/mypy#14070). + def communicate(self, input: AnyStr | None = None, timeout: float | None = None) -> tuple[AnyStr, AnyStr]: ... + def send_signal(self, sig: int) -> None: ... + def terminate(self) -> None: ... + def kill(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +# The result really is always a str. +if sys.version_info >= (3, 11): + def getstatusoutput(cmd: str | bytes, *, encoding: str | None = None, errors: str | None = None) -> tuple[int, str]: ... + def getoutput(cmd: str | bytes, *, encoding: str | None = None, errors: str | None = None) -> str: ... + +else: + def getstatusoutput(cmd: str | bytes) -> tuple[int, str]: ... + def getoutput(cmd: str | bytes) -> str: ... + +if sys.version_info >= (3, 8): + def list2cmdline(seq: Iterable[StrOrBytesPath]) -> str: ... # undocumented + +else: + def list2cmdline(seq: Iterable[str]) -> str: ... # undocumented + +if sys.platform == "win32": + class STARTUPINFO: + def __init__( + self, + *, + dwFlags: int = 0, + hStdInput: Any | None = None, + hStdOutput: Any | None = None, + hStdError: Any | None = None, + wShowWindow: int = 0, + lpAttributeList: Mapping[str, Any] | None = None, + ) -> None: ... + dwFlags: int + hStdInput: Any | None + hStdOutput: Any | None + hStdError: Any | None + wShowWindow: int + lpAttributeList: Mapping[str, Any] + from _winapi import ( + ABOVE_NORMAL_PRIORITY_CLASS as ABOVE_NORMAL_PRIORITY_CLASS, + BELOW_NORMAL_PRIORITY_CLASS as BELOW_NORMAL_PRIORITY_CLASS, + CREATE_BREAKAWAY_FROM_JOB as CREATE_BREAKAWAY_FROM_JOB, + CREATE_DEFAULT_ERROR_MODE as CREATE_DEFAULT_ERROR_MODE, + CREATE_NEW_CONSOLE as CREATE_NEW_CONSOLE, + CREATE_NEW_PROCESS_GROUP as CREATE_NEW_PROCESS_GROUP, + CREATE_NO_WINDOW as CREATE_NO_WINDOW, + DETACHED_PROCESS as DETACHED_PROCESS, + HIGH_PRIORITY_CLASS as HIGH_PRIORITY_CLASS, + IDLE_PRIORITY_CLASS as IDLE_PRIORITY_CLASS, + NORMAL_PRIORITY_CLASS as NORMAL_PRIORITY_CLASS, + REALTIME_PRIORITY_CLASS as REALTIME_PRIORITY_CLASS, + STARTF_USESHOWWINDOW as STARTF_USESHOWWINDOW, + STARTF_USESTDHANDLES as STARTF_USESTDHANDLES, + STD_ERROR_HANDLE as STD_ERROR_HANDLE, + STD_INPUT_HANDLE as STD_INPUT_HANDLE, + STD_OUTPUT_HANDLE as STD_OUTPUT_HANDLE, + SW_HIDE as SW_HIDE, + ) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sunau.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sunau.pyi new file mode 100644 index 00000000..6109b368 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sunau.pyi @@ -0,0 +1,84 @@ +import sys +from _typeshed import Unused +from typing import IO, Any, NamedTuple, NoReturn, overload +from typing_extensions import Literal, Self, TypeAlias + +_File: TypeAlias = str | IO[bytes] + +class Error(Exception): ... + +AUDIO_FILE_MAGIC: int +AUDIO_FILE_ENCODING_MULAW_8: int +AUDIO_FILE_ENCODING_LINEAR_8: int +AUDIO_FILE_ENCODING_LINEAR_16: int +AUDIO_FILE_ENCODING_LINEAR_24: int +AUDIO_FILE_ENCODING_LINEAR_32: int +AUDIO_FILE_ENCODING_FLOAT: int +AUDIO_FILE_ENCODING_DOUBLE: int +AUDIO_FILE_ENCODING_ADPCM_G721: int +AUDIO_FILE_ENCODING_ADPCM_G722: int +AUDIO_FILE_ENCODING_ADPCM_G723_3: int +AUDIO_FILE_ENCODING_ADPCM_G723_5: int +AUDIO_FILE_ENCODING_ALAW_8: int +AUDIO_UNKNOWN_SIZE: int + +class _sunau_params(NamedTuple): + nchannels: int + sampwidth: int + framerate: int + nframes: int + comptype: str + compname: str + +class Au_read: + def __init__(self, f: _File) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + def getfp(self) -> IO[bytes] | None: ... + def rewind(self) -> None: ... + def close(self) -> None: ... + def tell(self) -> int: ... + def getnchannels(self) -> int: ... + def getnframes(self) -> int: ... + def getsampwidth(self) -> int: ... + def getframerate(self) -> int: ... + def getcomptype(self) -> str: ... + def getcompname(self) -> str: ... + def getparams(self) -> _sunau_params: ... + def getmarkers(self) -> None: ... + def getmark(self, id: Any) -> NoReturn: ... + def setpos(self, pos: int) -> None: ... + def readframes(self, nframes: int) -> bytes | None: ... + +class Au_write: + def __init__(self, f: _File) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + def setnchannels(self, nchannels: int) -> None: ... + def getnchannels(self) -> int: ... + def setsampwidth(self, sampwidth: int) -> None: ... + def getsampwidth(self) -> int: ... + def setframerate(self, framerate: float) -> None: ... + def getframerate(self) -> int: ... + def setnframes(self, nframes: int) -> None: ... + def getnframes(self) -> int: ... + def setcomptype(self, type: str, name: str) -> None: ... + def getcomptype(self) -> str: ... + def getcompname(self) -> str: ... + def setparams(self, params: _sunau_params) -> None: ... + def getparams(self) -> _sunau_params: ... + def tell(self) -> int: ... + # should be any bytes-like object after 3.4, but we don't have a type for that + def writeframesraw(self, data: bytes) -> None: ... + def writeframes(self, data: bytes) -> None: ... + def close(self) -> None: ... + +@overload +def open(f: _File, mode: Literal["r", "rb"]) -> Au_read: ... +@overload +def open(f: _File, mode: Literal["w", "wb"]) -> Au_write: ... +@overload +def open(f: _File, mode: str | None = None) -> Any: ... + +if sys.version_info < (3, 9): + openfp = open diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/symbol.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/symbol.pyi new file mode 100644 index 00000000..bb666037 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/symbol.pyi @@ -0,0 +1,97 @@ +import sys + +single_input: int +file_input: int +eval_input: int +decorator: int +decorators: int +decorated: int +async_funcdef: int +funcdef: int +parameters: int +typedargslist: int +tfpdef: int +varargslist: int +vfpdef: int +stmt: int +simple_stmt: int +small_stmt: int +expr_stmt: int +annassign: int +testlist_star_expr: int +augassign: int +del_stmt: int +pass_stmt: int +flow_stmt: int +break_stmt: int +continue_stmt: int +return_stmt: int +yield_stmt: int +raise_stmt: int +import_stmt: int +import_name: int +import_from: int +import_as_name: int +dotted_as_name: int +import_as_names: int +dotted_as_names: int +dotted_name: int +global_stmt: int +nonlocal_stmt: int +assert_stmt: int +compound_stmt: int +async_stmt: int +if_stmt: int +while_stmt: int +for_stmt: int +try_stmt: int +with_stmt: int +with_item: int +except_clause: int +suite: int +test: int +test_nocond: int +lambdef: int +lambdef_nocond: int +or_test: int +and_test: int +not_test: int +comparison: int +comp_op: int +star_expr: int +expr: int +xor_expr: int +and_expr: int +shift_expr: int +arith_expr: int +term: int +factor: int +power: int +atom_expr: int +atom: int +testlist_comp: int +trailer: int +subscriptlist: int +subscript: int +sliceop: int +exprlist: int +testlist: int +dictorsetmaker: int +classdef: int +arglist: int +argument: int +comp_iter: int +comp_for: int +comp_if: int +encoding_decl: int +yield_expr: int +yield_arg: int +sync_comp_for: int +if sys.version_info >= (3, 8): + func_body_suite: int + func_type: int + func_type_input: int + namedexpr_test: int + typelist: int + +sym_name: dict[int, str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/symtable.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/symtable.pyi new file mode 100644 index 00000000..304ae8bf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/symtable.pyi @@ -0,0 +1,63 @@ +import sys +from _collections_abc import dict_keys +from collections.abc import Sequence +from typing import Any + +__all__ = ["symtable", "SymbolTable", "Class", "Function", "Symbol"] + +def symtable(code: str, filename: str, compile_type: str) -> SymbolTable: ... + +class SymbolTable: + def __init__(self, raw_table: Any, filename: str) -> None: ... + def get_type(self) -> str: ... + def get_id(self) -> int: ... + def get_name(self) -> str: ... + def get_lineno(self) -> int: ... + def is_optimized(self) -> bool: ... + def is_nested(self) -> bool: ... + def has_children(self) -> bool: ... + if sys.version_info < (3, 9): + def has_exec(self) -> bool: ... + + def get_identifiers(self) -> dict_keys[str, int]: ... + def lookup(self, name: str) -> Symbol: ... + def get_symbols(self) -> list[Symbol]: ... + def get_children(self) -> list[SymbolTable]: ... + +class Function(SymbolTable): + def get_parameters(self) -> tuple[str, ...]: ... + def get_locals(self) -> tuple[str, ...]: ... + def get_globals(self) -> tuple[str, ...]: ... + def get_frees(self) -> tuple[str, ...]: ... + if sys.version_info >= (3, 8): + def get_nonlocals(self) -> tuple[str, ...]: ... + +class Class(SymbolTable): + def get_methods(self) -> tuple[str, ...]: ... + +class Symbol: + if sys.version_info >= (3, 8): + def __init__( + self, name: str, flags: int, namespaces: Sequence[SymbolTable] | None = None, *, module_scope: bool = False + ) -> None: ... + def is_nonlocal(self) -> bool: ... + else: + def __init__(self, name: str, flags: int, namespaces: Sequence[SymbolTable] | None = None) -> None: ... + + def get_name(self) -> str: ... + def is_referenced(self) -> bool: ... + def is_parameter(self) -> bool: ... + def is_global(self) -> bool: ... + def is_declared_global(self) -> bool: ... + def is_local(self) -> bool: ... + def is_annotated(self) -> bool: ... + def is_free(self) -> bool: ... + def is_imported(self) -> bool: ... + def is_assigned(self) -> bool: ... + def is_namespace(self) -> bool: ... + def get_namespaces(self) -> Sequence[SymbolTable]: ... + def get_namespace(self) -> SymbolTable: ... + +class SymbolTableFactory: + def new(self, table: Any, filename: str) -> SymbolTable: ... + def __call__(self, table: Any, filename: str) -> SymbolTable: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sys.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sys.pyi new file mode 100644 index 00000000..e1288159 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sys.pyi @@ -0,0 +1,339 @@ +import sys +from _typeshed import OptExcInfo, ProfileFunction, TraceFunction, structseq +from builtins import object as _object +from collections.abc import AsyncGenerator, Callable, Coroutine, Sequence +from importlib.abc import PathEntryFinder +from importlib.machinery import ModuleSpec +from io import TextIOWrapper +from types import FrameType, ModuleType, TracebackType +from typing import Any, NoReturn, Protocol, TextIO, TypeVar, overload +from typing_extensions import Final, Literal, TypeAlias, final + +_T = TypeVar("_T") + +# see https://github.com/python/typeshed/issues/8513#issue-1333671093 for the rationale behind this alias +_ExitCode: TypeAlias = str | int | None +_OptExcInfo: TypeAlias = OptExcInfo # noqa: Y047 # TODO: obsolete, remove fall 2022 or later + +# Intentionally omits one deprecated and one optional method of `importlib.abc.MetaPathFinder` +class _MetaPathFinder(Protocol): + def find_spec(self, fullname: str, path: Sequence[str] | None, target: ModuleType | None = ...) -> ModuleSpec | None: ... + +# ----- sys variables ----- +if sys.platform != "win32": + abiflags: str +argv: list[str] +base_exec_prefix: str +base_prefix: str +byteorder: Literal["little", "big"] +builtin_module_names: Sequence[str] # actually a tuple of strings +copyright: str +if sys.platform == "win32": + dllhandle: int +dont_write_bytecode: bool +displayhook: Callable[[object], Any] +excepthook: Callable[[type[BaseException], BaseException, TracebackType | None], Any] +exec_prefix: str +executable: str +float_repr_style: Literal["short", "legacy"] +hexversion: int +last_type: type[BaseException] | None +last_value: BaseException | None +last_traceback: TracebackType | None +maxsize: int +maxunicode: int +meta_path: list[_MetaPathFinder] +modules: dict[str, ModuleType] +if sys.version_info >= (3, 10): + orig_argv: list[str] +path: list[str] +path_hooks: list[Callable[[str], PathEntryFinder]] +path_importer_cache: dict[str, PathEntryFinder | None] +platform: str +if sys.version_info >= (3, 9): + platlibdir: str +prefix: str +if sys.version_info >= (3, 8): + pycache_prefix: str | None +ps1: object +ps2: object +stdin: TextIO +stdout: TextIO +stderr: TextIO +if sys.version_info >= (3, 10): + stdlib_module_names: frozenset[str] + +__stdin__: Final[TextIOWrapper] # Contains the original value of stdin +__stdout__: Final[TextIOWrapper] # Contains the original value of stdout +__stderr__: Final[TextIOWrapper] # Contains the original value of stderr +tracebacklimit: int +version: str +api_version: int +warnoptions: Any +# Each entry is a tuple of the form (action, message, category, module, +# lineno) +if sys.platform == "win32": + winver: str +_xoptions: dict[Any, Any] + +# Type alias used as a mixin for structseq classes that cannot be instantiated at runtime +# This can't be represented in the type system, so we just use `structseq[Any]` +_UninstantiableStructseq: TypeAlias = structseq[Any] + +flags: _flags + +if sys.version_info >= (3, 10): + _FlagTuple: TypeAlias = tuple[int, int, int, int, int, int, int, int, int, int, int, int, int, bool, int, int] +else: + _FlagTuple: TypeAlias = tuple[int, int, int, int, int, int, int, int, int, int, int, int, int, bool, int] + +@final +class _flags(_UninstantiableStructseq, _FlagTuple): + @property + def debug(self) -> int: ... + @property + def inspect(self) -> int: ... + @property + def interactive(self) -> int: ... + @property + def optimize(self) -> int: ... + @property + def dont_write_bytecode(self) -> int: ... + @property + def no_user_site(self) -> int: ... + @property + def no_site(self) -> int: ... + @property + def ignore_environment(self) -> int: ... + @property + def verbose(self) -> int: ... + @property + def bytes_warning(self) -> int: ... + @property + def quiet(self) -> int: ... + @property + def hash_randomization(self) -> int: ... + @property + def isolated(self) -> int: ... + @property + def dev_mode(self) -> bool: ... + @property + def utf8_mode(self) -> int: ... + if sys.version_info >= (3, 10): + @property + def warn_default_encoding(self) -> int: ... # undocumented + if sys.version_info >= (3, 11): + @property + def safe_path(self) -> bool: ... + +float_info: _float_info + +@final +class _float_info(structseq[float], tuple[float, int, int, float, int, int, int, int, float, int, int]): + @property + def max(self) -> float: ... # DBL_MAX + @property + def max_exp(self) -> int: ... # DBL_MAX_EXP + @property + def max_10_exp(self) -> int: ... # DBL_MAX_10_EXP + @property + def min(self) -> float: ... # DBL_MIN + @property + def min_exp(self) -> int: ... # DBL_MIN_EXP + @property + def min_10_exp(self) -> int: ... # DBL_MIN_10_EXP + @property + def dig(self) -> int: ... # DBL_DIG + @property + def mant_dig(self) -> int: ... # DBL_MANT_DIG + @property + def epsilon(self) -> float: ... # DBL_EPSILON + @property + def radix(self) -> int: ... # FLT_RADIX + @property + def rounds(self) -> int: ... # FLT_ROUNDS + +hash_info: _hash_info + +@final +class _hash_info(structseq[Any | int], tuple[int, int, int, int, int, str, int, int, int]): + @property + def width(self) -> int: ... + @property + def modulus(self) -> int: ... + @property + def inf(self) -> int: ... + @property + def nan(self) -> int: ... + @property + def imag(self) -> int: ... + @property + def algorithm(self) -> str: ... + @property + def hash_bits(self) -> int: ... + @property + def seed_bits(self) -> int: ... + @property + def cutoff(self) -> int: ... # undocumented + +implementation: _implementation + +class _implementation: + name: str + version: _version_info + hexversion: int + cache_tag: str + # Define __getattr__, as the documentation states: + # > sys.implementation may contain additional attributes specific to the Python implementation. + # > These non-standard attributes must start with an underscore, and are not described here. + def __getattr__(self, name: str) -> Any: ... + +int_info: _int_info + +@final +class _int_info(structseq[int], tuple[int, int, int, int]): + @property + def bits_per_digit(self) -> int: ... + @property + def sizeof_digit(self) -> int: ... + @property + def default_max_str_digits(self) -> int: ... + @property + def str_digits_check_threshold(self) -> int: ... + +@final +class _version_info(_UninstantiableStructseq, tuple[int, int, int, str, int]): + @property + def major(self) -> int: ... + @property + def minor(self) -> int: ... + @property + def micro(self) -> int: ... + @property + def releaselevel(self) -> str: ... + @property + def serial(self) -> int: ... + +version_info: _version_info + +def call_tracing(__func: Callable[..., _T], __args: Any) -> _T: ... +def _clear_type_cache() -> None: ... +def _current_frames() -> dict[int, FrameType]: ... +def _getframe(__depth: int = 0) -> FrameType: ... +def _debugmallocstats() -> None: ... +def __displayhook__(__value: object) -> None: ... +def __excepthook__(__exctype: type[BaseException], __value: BaseException, __traceback: TracebackType | None) -> None: ... +def exc_info() -> OptExcInfo: ... + +if sys.version_info >= (3, 11): + def exception() -> BaseException | None: ... + +def exit(__status: _ExitCode = None) -> NoReturn: ... +def getallocatedblocks() -> int: ... +def getdefaultencoding() -> str: ... + +if sys.platform != "win32": + def getdlopenflags() -> int: ... + +def getfilesystemencoding() -> str: ... +def getfilesystemencodeerrors() -> str: ... +def getrefcount(__object: Any) -> int: ... +def getrecursionlimit() -> int: ... +@overload +def getsizeof(obj: object) -> int: ... +@overload +def getsizeof(obj: object, default: int) -> int: ... +def getswitchinterval() -> float: ... +def getprofile() -> ProfileFunction | None: ... +def setprofile(profilefunc: ProfileFunction | None) -> None: ... +def gettrace() -> TraceFunction | None: ... +def settrace(tracefunc: TraceFunction | None) -> None: ... + +if sys.platform == "win32": + # A tuple of length 5, even though it has more than 5 attributes. + @final + class _WinVersion(_UninstantiableStructseq, tuple[int, int, int, int, str]): + @property + def major(self) -> int: ... + @property + def minor(self) -> int: ... + @property + def build(self) -> int: ... + @property + def platform(self) -> int: ... + @property + def service_pack(self) -> str: ... + @property + def service_pack_minor(self) -> int: ... + @property + def service_pack_major(self) -> int: ... + @property + def suite_mask(self) -> int: ... + @property + def product_type(self) -> int: ... + @property + def platform_version(self) -> tuple[int, int, int]: ... + + def getwindowsversion() -> _WinVersion: ... + +def intern(__string: str) -> str: ... +def is_finalizing() -> bool: ... +def breakpointhook(*args: Any, **kwargs: Any) -> Any: ... + +__breakpointhook__ = breakpointhook # Contains the original value of breakpointhook + +if sys.platform != "win32": + def setdlopenflags(__flags: int) -> None: ... + +def setrecursionlimit(__limit: int) -> None: ... +def setswitchinterval(__interval: float) -> None: ... +def gettotalrefcount() -> int: ... # Debug builds only + +if sys.version_info < (3, 9): + def getcheckinterval() -> int: ... # deprecated + def setcheckinterval(__n: int) -> None: ... # deprecated + +if sys.version_info < (3, 9): + # An 11-tuple or None + def callstats() -> tuple[int, int, int, int, int, int, int, int, int, int, int] | None: ... + +if sys.version_info >= (3, 8): + # Doesn't exist at runtime, but exported in the stubs so pytest etc. can annotate their code more easily. + class UnraisableHookArgs: + exc_type: type[BaseException] + exc_value: BaseException | None + exc_traceback: TracebackType | None + err_msg: str | None + object: _object + unraisablehook: Callable[[UnraisableHookArgs], Any] + def __unraisablehook__(__unraisable: UnraisableHookArgs) -> Any: ... + def addaudithook(hook: Callable[[str, tuple[Any, ...]], Any]) -> None: ... + def audit(__event: str, *args: Any) -> None: ... + +_AsyncgenHook: TypeAlias = Callable[[AsyncGenerator[Any, Any]], None] | None + +@final +class _asyncgen_hooks(structseq[_AsyncgenHook], tuple[_AsyncgenHook, _AsyncgenHook]): + @property + def firstiter(self) -> _AsyncgenHook: ... + @property + def finalizer(self) -> _AsyncgenHook: ... + +def get_asyncgen_hooks() -> _asyncgen_hooks: ... +def set_asyncgen_hooks(firstiter: _AsyncgenHook = ..., finalizer: _AsyncgenHook = ...) -> None: ... + +if sys.platform == "win32": + def _enablelegacywindowsfsencoding() -> None: ... + +def get_coroutine_origin_tracking_depth() -> int: ... +def set_coroutine_origin_tracking_depth(depth: int) -> None: ... + +if sys.version_info < (3, 8): + _CoroWrapper: TypeAlias = Callable[[Coroutine[Any, Any, Any]], Any] + def set_coroutine_wrapper(__wrapper: _CoroWrapper) -> None: ... + def get_coroutine_wrapper() -> _CoroWrapper: ... + +# The following two functions were added in 3.11.0, 3.10.7, 3.9.14, 3.8.14, & 3.7.14, +# as part of the response to CVE-2020-10735 +def set_int_max_str_digits(maxdigits: int) -> None: ... +def get_int_max_str_digits() -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sysconfig.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sysconfig.pyi new file mode 100644 index 00000000..7e29cf13 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/sysconfig.pyi @@ -0,0 +1,44 @@ +import sys +from typing import IO, Any, overload +from typing_extensions import Literal + +__all__ = [ + "get_config_h_filename", + "get_config_var", + "get_config_vars", + "get_makefile_filename", + "get_path", + "get_path_names", + "get_paths", + "get_platform", + "get_python_version", + "get_scheme_names", + "parse_config_h", +] + +def get_config_var(name: str) -> Any: ... +@overload +def get_config_vars() -> dict[str, Any]: ... +@overload +def get_config_vars(arg: str, *args: str) -> list[Any]: ... +def get_scheme_names() -> tuple[str, ...]: ... + +if sys.version_info >= (3, 10): + def get_default_scheme() -> str: ... + def get_preferred_scheme(key: Literal["prefix", "home", "user"]) -> str: ... + +def get_path_names() -> tuple[str, ...]: ... +def get_path(name: str, scheme: str = ..., vars: dict[str, Any] | None = None, expand: bool = True) -> str: ... +def get_paths(scheme: str = ..., vars: dict[str, Any] | None = None, expand: bool = True) -> dict[str, str]: ... +def get_python_version() -> str: ... +def get_platform() -> str: ... + +if sys.version_info >= (3, 11): + def is_python_build(check_home: object = None) -> bool: ... + +else: + def is_python_build(check_home: bool = False) -> bool: ... + +def parse_config_h(fp: IO[Any], vars: dict[str, Any] | None = None) -> dict[str, Any]: ... +def get_config_h_filename() -> str: ... +def get_makefile_filename() -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/syslog.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/syslog.pyi new file mode 100644 index 00000000..cfa8df88 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/syslog.pyi @@ -0,0 +1,47 @@ +import sys +from typing import overload +from typing_extensions import Literal + +if sys.platform != "win32": + LOG_ALERT: Literal[1] + LOG_AUTH: Literal[32] + LOG_AUTHPRIV: Literal[80] + LOG_CONS: Literal[2] + LOG_CRIT: Literal[2] + LOG_CRON: Literal[72] + LOG_DAEMON: Literal[24] + LOG_DEBUG: Literal[7] + LOG_EMERG: Literal[0] + LOG_ERR: Literal[3] + LOG_INFO: Literal[6] + LOG_KERN: Literal[0] + LOG_LOCAL0: Literal[128] + LOG_LOCAL1: Literal[136] + LOG_LOCAL2: Literal[144] + LOG_LOCAL3: Literal[152] + LOG_LOCAL4: Literal[160] + LOG_LOCAL5: Literal[168] + LOG_LOCAL6: Literal[176] + LOG_LOCAL7: Literal[184] + LOG_LPR: Literal[48] + LOG_MAIL: Literal[16] + LOG_NDELAY: Literal[8] + LOG_NEWS: Literal[56] + LOG_NOTICE: Literal[5] + LOG_NOWAIT: Literal[16] + LOG_ODELAY: Literal[4] + LOG_PERROR: Literal[32] + LOG_PID: Literal[1] + LOG_SYSLOG: Literal[40] + LOG_USER: Literal[8] + LOG_UUCP: Literal[64] + LOG_WARNING: Literal[4] + def LOG_MASK(a: int) -> int: ... + def LOG_UPTO(a: int) -> int: ... + def closelog() -> None: ... + def openlog(ident: str = ..., logoption: int = ..., facility: int = ...) -> None: ... + def setlogmask(x: int) -> int: ... + @overload + def syslog(priority: int, message: str) -> None: ... + @overload + def syslog(message: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tabnanny.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tabnanny.pyi new file mode 100644 index 00000000..8a8592f4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tabnanny.pyi @@ -0,0 +1,16 @@ +from _typeshed import StrOrBytesPath +from collections.abc import Iterable + +__all__ = ["check", "NannyNag", "process_tokens"] + +verbose: int +filename_only: int + +class NannyNag(Exception): + def __init__(self, lineno: int, msg: str, line: str) -> None: ... + def get_lineno(self) -> int: ... + def get_msg(self) -> str: ... + def get_line(self) -> str: ... + +def check(file: StrOrBytesPath) -> None: ... +def process_tokens(tokens: Iterable[tuple[int, str, tuple[int, int], tuple[int, int], str]]) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tarfile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tarfile.pyi new file mode 100644 index 00000000..5cf1d55c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tarfile.pyi @@ -0,0 +1,380 @@ +import bz2 +import io +import sys +from _typeshed import StrOrBytesPath, StrPath +from builtins import list as _list # aliases to avoid name clashes with fields named "type" or "list" +from collections.abc import Callable, Iterable, Iterator, Mapping +from gzip import _ReadableFileobj as _GzipReadableFileobj, _WritableFileobj as _GzipWritableFileobj +from types import TracebackType +from typing import IO, ClassVar, Protocol, overload +from typing_extensions import Literal, Self + +__all__ = [ + "TarFile", + "TarInfo", + "is_tarfile", + "TarError", + "ReadError", + "CompressionError", + "StreamError", + "ExtractError", + "HeaderError", + "ENCODING", + "USTAR_FORMAT", + "GNU_FORMAT", + "PAX_FORMAT", + "DEFAULT_FORMAT", + "open", +] + +class _Fileobj(Protocol): + def read(self, __size: int) -> bytes: ... + def write(self, __b: bytes) -> object: ... + def tell(self) -> int: ... + def seek(self, __pos: int) -> object: ... + def close(self) -> object: ... + # Optional fields: + # name: str | bytes + # mode: Literal["rb", "r+b", "wb", "xb"] + +class _Bz2ReadableFileobj(bz2._ReadableFileobj): + def close(self) -> object: ... + +class _Bz2WritableFileobj(bz2._WritableFileobj): + def close(self) -> object: ... + +# tar constants +NUL: bytes +BLOCKSIZE: int +RECORDSIZE: int +GNU_MAGIC: bytes +POSIX_MAGIC: bytes + +LENGTH_NAME: int +LENGTH_LINK: int +LENGTH_PREFIX: int + +REGTYPE: bytes +AREGTYPE: bytes +LNKTYPE: bytes +SYMTYPE: bytes +CONTTYPE: bytes +BLKTYPE: bytes +DIRTYPE: bytes +FIFOTYPE: bytes +CHRTYPE: bytes + +GNUTYPE_LONGNAME: bytes +GNUTYPE_LONGLINK: bytes +GNUTYPE_SPARSE: bytes + +XHDTYPE: bytes +XGLTYPE: bytes +SOLARIS_XHDTYPE: bytes + +USTAR_FORMAT: int +GNU_FORMAT: int +PAX_FORMAT: int +DEFAULT_FORMAT: int + +# tarfile constants + +SUPPORTED_TYPES: tuple[bytes, ...] +REGULAR_TYPES: tuple[bytes, ...] +GNU_TYPES: tuple[bytes, ...] +PAX_FIELDS: tuple[str, ...] +PAX_NUMBER_FIELDS: dict[str, type] +PAX_NAME_FIELDS: set[str] + +ENCODING: str + +def open( + name: StrOrBytesPath | None = None, + mode: str = "r", + fileobj: IO[bytes] | None = None, # depends on mode + bufsize: int = 10240, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + compresslevel: int | None = ..., +) -> TarFile: ... + +class ExFileObject(io.BufferedReader): + def __init__(self, tarfile: TarFile, tarinfo: TarInfo) -> None: ... + +class TarFile: + OPEN_METH: ClassVar[Mapping[str, str]] + name: StrOrBytesPath | None + mode: Literal["r", "a", "w", "x"] + fileobj: _Fileobj | None + format: int | None + tarinfo: type[TarInfo] + dereference: bool | None + ignore_zeros: bool | None + encoding: str | None + errors: str + fileobject: type[ExFileObject] + pax_headers: Mapping[str, str] | None + debug: int | None + errorlevel: int | None + offset: int # undocumented + def __init__( + self, + name: StrOrBytesPath | None = None, + mode: Literal["r", "a", "w", "x"] = "r", + fileobj: _Fileobj | None = None, + format: int | None = None, + tarinfo: type[TarInfo] | None = None, + dereference: bool | None = None, + ignore_zeros: bool | None = None, + encoding: str | None = None, + errors: str = "surrogateescape", + pax_headers: Mapping[str, str] | None = None, + debug: int | None = None, + errorlevel: int | None = None, + copybufsize: int | None = None, # undocumented + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __iter__(self) -> Iterator[TarInfo]: ... + @classmethod + def open( + cls, + name: StrOrBytesPath | None = None, + mode: str = "r", + fileobj: IO[bytes] | None = None, # depends on mode + bufsize: int = 10240, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + errors: str = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @classmethod + def taropen( + cls, + name: StrOrBytesPath | None, + mode: Literal["r", "a", "w", "x"] = "r", + fileobj: _Fileobj | None = None, + *, + compresslevel: int = ..., + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @overload + @classmethod + def gzopen( + cls, + name: StrOrBytesPath | None, + mode: Literal["r"] = "r", + fileobj: _GzipReadableFileobj | None = None, + compresslevel: int = 9, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @overload + @classmethod + def gzopen( + cls, + name: StrOrBytesPath | None, + mode: Literal["w", "x"], + fileobj: _GzipWritableFileobj | None = None, + compresslevel: int = 9, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @overload + @classmethod + def bz2open( + cls, + name: StrOrBytesPath | None, + mode: Literal["w", "x"], + fileobj: _Bz2WritableFileobj | None = None, + compresslevel: int = 9, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @overload + @classmethod + def bz2open( + cls, + name: StrOrBytesPath | None, + mode: Literal["r"] = "r", + fileobj: _Bz2ReadableFileobj | None = None, + compresslevel: int = 9, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + @classmethod + def xzopen( + cls, + name: StrOrBytesPath | None, + mode: Literal["r", "w", "x"] = "r", + fileobj: IO[bytes] | None = None, + preset: int | None = None, + *, + format: int | None = ..., + tarinfo: type[TarInfo] | None = ..., + dereference: bool | None = ..., + ignore_zeros: bool | None = ..., + encoding: str | None = ..., + pax_headers: Mapping[str, str] | None = ..., + debug: int | None = ..., + errorlevel: int | None = ..., + ) -> Self: ... + def getmember(self, name: str) -> TarInfo: ... + def getmembers(self) -> _list[TarInfo]: ... + def getnames(self) -> _list[str]: ... + def list(self, verbose: bool = True, *, members: _list[TarInfo] | None = None) -> None: ... + def next(self) -> TarInfo | None: ... + def extractall( + self, path: StrOrBytesPath = ".", members: Iterable[TarInfo] | None = None, *, numeric_owner: bool = False + ) -> None: ... + def extract( + self, member: str | TarInfo, path: StrOrBytesPath = "", set_attrs: bool = True, *, numeric_owner: bool = False + ) -> None: ... + def _extract_member( + self, tarinfo: TarInfo, targetpath: str, set_attrs: bool = True, numeric_owner: bool = False + ) -> None: ... # undocumented + def extractfile(self, member: str | TarInfo) -> IO[bytes] | None: ... + def makedir(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + def makefile(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + def makeunknown(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + def makefifo(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + def makedev(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + def makelink(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + def chown(self, tarinfo: TarInfo, targetpath: StrOrBytesPath, numeric_owner: bool) -> None: ... # undocumented + def chmod(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + def utime(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented + def add( + self, + name: StrPath, + arcname: StrPath | None = None, + recursive: bool = True, + *, + filter: Callable[[TarInfo], TarInfo | None] | None = None, + ) -> None: ... + def addfile(self, tarinfo: TarInfo, fileobj: IO[bytes] | None = None) -> None: ... + def gettarinfo( + self, name: StrOrBytesPath | None = None, arcname: str | None = None, fileobj: IO[bytes] | None = None + ) -> TarInfo: ... + def close(self) -> None: ... + +if sys.version_info >= (3, 9): + def is_tarfile(name: StrOrBytesPath | IO[bytes]) -> bool: ... + +else: + def is_tarfile(name: StrOrBytesPath) -> bool: ... + +if sys.version_info < (3, 8): + def filemode(mode: int) -> str: ... # undocumented + +class TarError(Exception): ... +class ReadError(TarError): ... +class CompressionError(TarError): ... +class StreamError(TarError): ... +class ExtractError(TarError): ... +class HeaderError(TarError): ... + +class TarInfo: + name: str + path: str + size: int + mtime: int + chksum: int + devmajor: int + devminor: int + offset: int + offset_data: int + sparse: bytes | None + tarfile: TarFile | None + mode: int + type: bytes + linkname: str + uid: int + gid: int + uname: str + gname: str + pax_headers: Mapping[str, str] + def __init__(self, name: str = "") -> None: ... + @classmethod + def frombuf(cls, buf: bytes | bytearray, encoding: str, errors: str) -> Self: ... + @classmethod + def fromtarfile(cls, tarfile: TarFile) -> Self: ... + @property + def linkpath(self) -> str: ... + @linkpath.setter + def linkpath(self, linkname: str) -> None: ... + def get_info(self) -> Mapping[str, str | int | bytes | Mapping[str, str]]: ... + if sys.version_info >= (3, 8): + def tobuf(self, format: int | None = 2, encoding: str | None = "utf-8", errors: str = "surrogateescape") -> bytes: ... + else: + def tobuf(self, format: int | None = 1, encoding: str | None = "utf-8", errors: str = "surrogateescape") -> bytes: ... + + def create_ustar_header( + self, info: Mapping[str, str | int | bytes | Mapping[str, str]], encoding: str, errors: str + ) -> bytes: ... + def create_gnu_header( + self, info: Mapping[str, str | int | bytes | Mapping[str, str]], encoding: str, errors: str + ) -> bytes: ... + def create_pax_header(self, info: Mapping[str, str | int | bytes | Mapping[str, str]], encoding: str) -> bytes: ... + @classmethod + def create_pax_global_header(cls, pax_headers: Mapping[str, str]) -> bytes: ... + def isfile(self) -> bool: ... + def isreg(self) -> bool: ... + def issparse(self) -> bool: ... + def isdir(self) -> bool: ... + def issym(self) -> bool: ... + def islnk(self) -> bool: ... + def ischr(self) -> bool: ... + def isblk(self) -> bool: ... + def isfifo(self) -> bool: ... + def isdev(self) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/telnetlib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/telnetlib.pyi new file mode 100644 index 00000000..10f6e493 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/telnetlib.pyi @@ -0,0 +1,121 @@ +import socket +from collections.abc import Callable, Sequence +from re import Match, Pattern +from types import TracebackType +from typing import Any +from typing_extensions import Self + +__all__ = ["Telnet"] + +DEBUGLEVEL: int +TELNET_PORT: int + +IAC: bytes +DONT: bytes +DO: bytes +WONT: bytes +WILL: bytes +theNULL: bytes + +SE: bytes +NOP: bytes +DM: bytes +BRK: bytes +IP: bytes +AO: bytes +AYT: bytes +EC: bytes +EL: bytes +GA: bytes +SB: bytes + +BINARY: bytes +ECHO: bytes +RCP: bytes +SGA: bytes +NAMS: bytes +STATUS: bytes +TM: bytes +RCTE: bytes +NAOL: bytes +NAOP: bytes +NAOCRD: bytes +NAOHTS: bytes +NAOHTD: bytes +NAOFFD: bytes +NAOVTS: bytes +NAOVTD: bytes +NAOLFD: bytes +XASCII: bytes +LOGOUT: bytes +BM: bytes +DET: bytes +SUPDUP: bytes +SUPDUPOUTPUT: bytes +SNDLOC: bytes +TTYPE: bytes +EOR: bytes +TUID: bytes +OUTMRK: bytes +TTYLOC: bytes +VT3270REGIME: bytes +X3PAD: bytes +NAWS: bytes +TSPEED: bytes +LFLOW: bytes +LINEMODE: bytes +XDISPLOC: bytes +OLD_ENVIRON: bytes +AUTHENTICATION: bytes +ENCRYPT: bytes +NEW_ENVIRON: bytes + +TN3270E: bytes +XAUTH: bytes +CHARSET: bytes +RSP: bytes +COM_PORT_OPTION: bytes +SUPPRESS_LOCAL_ECHO: bytes +TLS: bytes +KERMIT: bytes +SEND_URL: bytes +FORWARD_X: bytes +PRAGMA_LOGON: bytes +SSPI_LOGON: bytes +PRAGMA_HEARTBEAT: bytes +EXOPL: bytes +NOOPT: bytes + +class Telnet: + host: str | None # undocumented + def __init__(self, host: str | None = None, port: int = 0, timeout: float = ...) -> None: ... + def open(self, host: str, port: int = 0, timeout: float = ...) -> None: ... + def msg(self, msg: str, *args: Any) -> None: ... + def set_debuglevel(self, debuglevel: int) -> None: ... + def close(self) -> None: ... + def get_socket(self) -> socket.socket: ... + def fileno(self) -> int: ... + def write(self, buffer: bytes) -> None: ... + def read_until(self, match: bytes, timeout: float | None = None) -> bytes: ... + def read_all(self) -> bytes: ... + def read_some(self) -> bytes: ... + def read_very_eager(self) -> bytes: ... + def read_eager(self) -> bytes: ... + def read_lazy(self) -> bytes: ... + def read_very_lazy(self) -> bytes: ... + def read_sb_data(self) -> bytes: ... + def set_option_negotiation_callback(self, callback: Callable[[socket.socket, bytes, bytes], object] | None) -> None: ... + def process_rawq(self) -> None: ... + def rawq_getchar(self) -> bytes: ... + def fill_rawq(self) -> None: ... + def sock_avail(self) -> bool: ... + def interact(self) -> None: ... + def mt_interact(self) -> None: ... + def listener(self) -> None: ... + def expect( + self, list: Sequence[Pattern[bytes] | bytes], timeout: float | None = None + ) -> tuple[int, Match[bytes] | None, bytes]: ... + def __enter__(self) -> Self: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tempfile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tempfile.pyi new file mode 100644 index 00000000..dbff6d63 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tempfile.pyi @@ -0,0 +1,463 @@ +import io +import sys +from _typeshed import BytesPath, GenericPath, StrPath, WriteableBuffer +from collections.abc import Iterable, Iterator +from types import TracebackType +from typing import IO, Any, AnyStr, Generic, overload +from typing_extensions import Literal, Self, TypeAlias + +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = [ + "NamedTemporaryFile", + "TemporaryFile", + "SpooledTemporaryFile", + "TemporaryDirectory", + "mkstemp", + "mkdtemp", + "mktemp", + "TMP_MAX", + "gettempprefix", + "tempdir", + "gettempdir", + "gettempprefixb", + "gettempdirb", +] + +# global variables +TMP_MAX: int +tempdir: str | None +template: str + +_StrMode: TypeAlias = Literal["r", "w", "a", "x", "r+", "w+", "a+", "x+", "rt", "wt", "at", "xt", "r+t", "w+t", "a+t", "x+t"] +_BytesMode: TypeAlias = Literal["rb", "wb", "ab", "xb", "r+b", "w+b", "a+b", "x+b"] + +if sys.version_info >= (3, 8): + @overload + def NamedTemporaryFile( + mode: _StrMode, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, + *, + errors: str | None = None, + ) -> _TemporaryFileWrapper[str]: ... + @overload + def NamedTemporaryFile( + mode: _BytesMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, + *, + errors: str | None = None, + ) -> _TemporaryFileWrapper[bytes]: ... + @overload + def NamedTemporaryFile( + mode: str = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, + *, + errors: str | None = None, + ) -> _TemporaryFileWrapper[Any]: ... + +else: + @overload + def NamedTemporaryFile( + mode: _StrMode, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, + ) -> _TemporaryFileWrapper[str]: ... + @overload + def NamedTemporaryFile( + mode: _BytesMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, + ) -> _TemporaryFileWrapper[bytes]: ... + @overload + def NamedTemporaryFile( + mode: str = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, + ) -> _TemporaryFileWrapper[Any]: ... + +if sys.platform == "win32": + TemporaryFile = NamedTemporaryFile +else: + if sys.version_info >= (3, 8): + @overload + def TemporaryFile( + mode: _StrMode, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + *, + errors: str | None = None, + ) -> IO[str]: ... + @overload + def TemporaryFile( + mode: _BytesMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + *, + errors: str | None = None, + ) -> IO[bytes]: ... + @overload + def TemporaryFile( + mode: str = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + *, + errors: str | None = None, + ) -> IO[Any]: ... + else: + @overload + def TemporaryFile( + mode: _StrMode, + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: AnyStr | None = ..., + prefix: AnyStr | None = ..., + dir: GenericPath[AnyStr] | None = ..., + ) -> IO[str]: ... + @overload + def TemporaryFile( + mode: _BytesMode = ..., + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: AnyStr | None = ..., + prefix: AnyStr | None = ..., + dir: GenericPath[AnyStr] | None = ..., + ) -> IO[bytes]: ... + @overload + def TemporaryFile( + mode: str = ..., + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: AnyStr | None = ..., + prefix: AnyStr | None = ..., + dir: GenericPath[AnyStr] | None = ..., + ) -> IO[Any]: ... + +class _TemporaryFileWrapper(Generic[AnyStr], IO[AnyStr]): + file: IO[AnyStr] # io.TextIOWrapper, io.BufferedReader or io.BufferedWriter + name: str + delete: bool + def __init__(self, file: IO[AnyStr], name: str, delete: bool = True) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, exc: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None) -> None: ... + def __getattr__(self, name: str) -> Any: ... + def close(self) -> None: ... + # These methods don't exist directly on this object, but + # are delegated to the underlying IO object through __getattr__. + # We need to add them here so that this class is concrete. + def __iter__(self) -> Iterator[AnyStr]: ... + # FIXME: __next__ doesn't actually exist on this class and should be removed: + # see also https://github.com/python/typeshed/pull/5456#discussion_r633068648 + # >>> import tempfile + # >>> ntf=tempfile.NamedTemporaryFile() + # >>> next(ntf) + # Traceback (most recent call last): + # File "", line 1, in + # TypeError: '_TemporaryFileWrapper' object is not an iterator + def __next__(self) -> AnyStr: ... + def fileno(self) -> int: ... + def flush(self) -> None: ... + def isatty(self) -> bool: ... + def read(self, n: int = ...) -> AnyStr: ... + def readable(self) -> bool: ... + def readline(self, limit: int = ...) -> AnyStr: ... + def readlines(self, hint: int = ...) -> list[AnyStr]: ... + def seek(self, offset: int, whence: int = ...) -> int: ... + def seekable(self) -> bool: ... + def tell(self) -> int: ... + def truncate(self, size: int | None = ...) -> int: ... + def writable(self) -> bool: ... + def write(self, s: AnyStr) -> int: ... + def writelines(self, lines: Iterable[AnyStr]) -> None: ... + +if sys.version_info >= (3, 11): + _SpooledTemporaryFileBase = io.IOBase +else: + _SpooledTemporaryFileBase = object + +# It does not actually derive from IO[AnyStr], but it does mostly behave +# like one. +class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): + @property + def encoding(self) -> str: ... # undocumented + @property + def newlines(self) -> str | tuple[str, ...] | None: ... # undocumented + # bytes needs to go first, as default mode is to open as bytes + if sys.version_info >= (3, 8): + @overload + def __init__( + self: SpooledTemporaryFile[bytes], + max_size: int = 0, + mode: _BytesMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + *, + errors: str | None = None, + ) -> None: ... + @overload + def __init__( + self: SpooledTemporaryFile[str], + max_size: int, + mode: _StrMode, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + *, + errors: str | None = None, + ) -> None: ... + @overload + def __init__( + self: SpooledTemporaryFile[str], + max_size: int = 0, + *, + mode: _StrMode, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + errors: str | None = None, + ) -> None: ... + @overload + def __init__( + self, + max_size: int, + mode: str, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + *, + errors: str | None = None, + ) -> None: ... + @overload + def __init__( + self, + max_size: int = 0, + *, + mode: str, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + errors: str | None = None, + ) -> None: ... + @property + def errors(self) -> str | None: ... + else: + @overload + def __init__( + self: SpooledTemporaryFile[bytes], + max_size: int = 0, + mode: _BytesMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + ) -> None: ... + @overload + def __init__( + self: SpooledTemporaryFile[str], + max_size: int, + mode: _StrMode, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + ) -> None: ... + @overload + def __init__( + self: SpooledTemporaryFile[str], + max_size: int = 0, + *, + mode: _StrMode, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + ) -> None: ... + @overload + def __init__( + self, + max_size: int, + mode: str, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + ) -> None: ... + @overload + def __init__( + self, + max_size: int = 0, + *, + mode: str, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + ) -> None: ... + + def rollover(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, exc: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None) -> None: ... + # These methods are copied from the abstract methods of IO, because + # SpooledTemporaryFile implements IO. + # See also https://github.com/python/typeshed/pull/2452#issuecomment-420657918. + def close(self) -> None: ... + def fileno(self) -> int: ... + def flush(self) -> None: ... + def isatty(self) -> bool: ... + if sys.version_info >= (3, 11): + # These three work only if the SpooledTemporaryFile is opened in binary mode, + # because the underlying object in text mode does not have these methods. + def read1(self, __size: int = ...) -> AnyStr: ... + def readinto(self, b: WriteableBuffer) -> int: ... + def readinto1(self, b: WriteableBuffer) -> int: ... + def detach(self) -> io.RawIOBase: ... + + def read(self, __n: int = ...) -> AnyStr: ... + def readline(self, __limit: int | None = ...) -> AnyStr: ... # type: ignore[override] + def readlines(self, __hint: int = ...) -> list[AnyStr]: ... # type: ignore[override] + def seek(self, offset: int, whence: int = ...) -> int: ... + def tell(self) -> int: ... + def truncate(self, size: int | None = None) -> None: ... # type: ignore[override] + def write(self, s: AnyStr) -> int: ... + def writelines(self, iterable: Iterable[AnyStr]) -> None: ... # type: ignore[override] + def __iter__(self) -> Iterator[AnyStr]: ... # type: ignore[override] + # These exist at runtime only on 3.11+. + def readable(self) -> bool: ... + def seekable(self) -> bool: ... + def writable(self) -> bool: ... + def __next__(self) -> AnyStr: ... # type: ignore[override] + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +class TemporaryDirectory(Generic[AnyStr]): + name: AnyStr + if sys.version_info >= (3, 10): + @overload + def __init__( + self: TemporaryDirectory[str], + suffix: str | None = None, + prefix: str | None = None, + dir: StrPath | None = None, + ignore_cleanup_errors: bool = False, + ) -> None: ... + @overload + def __init__( + self: TemporaryDirectory[bytes], + suffix: bytes | None = None, + prefix: bytes | None = None, + dir: BytesPath | None = None, + ignore_cleanup_errors: bool = False, + ) -> None: ... + else: + @overload + def __init__( + self: TemporaryDirectory[str], suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None + ) -> None: ... + @overload + def __init__( + self: TemporaryDirectory[bytes], + suffix: bytes | None = None, + prefix: bytes | None = None, + dir: BytesPath | None = None, + ) -> None: ... + + def cleanup(self) -> None: ... + def __enter__(self) -> AnyStr: ... + def __exit__(self, exc: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None) -> None: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +# The overloads overlap, but they should still work fine. +@overload +def mkstemp( # type: ignore[misc] + suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None, text: bool = False +) -> tuple[int, str]: ... +@overload +def mkstemp( + suffix: bytes | None = None, prefix: bytes | None = None, dir: BytesPath | None = None, text: bool = False +) -> tuple[int, bytes]: ... + +# The overloads overlap, but they should still work fine. +@overload +def mkdtemp(suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None) -> str: ... # type: ignore[misc] +@overload +def mkdtemp(suffix: bytes | None = None, prefix: bytes | None = None, dir: BytesPath | None = None) -> bytes: ... +def mktemp(suffix: str = "", prefix: str = "tmp", dir: StrPath | None = None) -> str: ... +def gettempdirb() -> bytes: ... +def gettempprefixb() -> bytes: ... +def gettempdir() -> str: ... +def gettempprefix() -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/termios.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/termios.pyi new file mode 100644 index 00000000..bf8d7bee --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/termios.pyi @@ -0,0 +1,265 @@ +import sys +from _typeshed import FileDescriptorLike +from typing import Any +from typing_extensions import TypeAlias + +if sys.platform != "win32": + # Must be a list of length 7, containing 6 ints and a list of NCCS 1-character bytes or ints. + _Attr: TypeAlias = list[int | list[bytes | int]] + + B0: int + B1000000: int + B110: int + B115200: int + B1152000: int + B1200: int + B134: int + B150: int + B1500000: int + B1800: int + B19200: int + B200: int + B2000000: int + B230400: int + B2400: int + B2500000: int + B300: int + B3000000: int + B3500000: int + B38400: int + B4000000: int + B460800: int + B4800: int + B50: int + B500000: int + B57600: int + B576000: int + B600: int + B75: int + B921600: int + B9600: int + BRKINT: int + BS0: int + BS1: int + BSDLY: int + CBAUD: int + CBAUDEX: int + CDEL: int + CDSUSP: int + CEOF: int + CEOL: int + CEOL2: int + CEOT: int + CERASE: int + CESC: int + CFLUSH: int + CIBAUD: int + CINTR: int + CKILL: int + CLNEXT: int + CLOCAL: int + CNUL: int + COMMON: int + CQUIT: int + CR0: int + CR1: int + CR2: int + CR3: int + CRDLY: int + CREAD: int + CRPRNT: int + CRTSCTS: int + CS5: int + CS6: int + CS7: int + CS8: int + CSIZE: int + CSTART: int + CSTOP: int + CSTOPB: int + CSUSP: int + CSWTCH: int + CWERASE: int + ECHO: int + ECHOCTL: int + ECHOE: int + ECHOK: int + ECHOKE: int + ECHONL: int + ECHOPRT: int + EXTA: int + EXTB: int + FF0: int + FF1: int + FFDLY: int + FIOASYNC: int + FIOCLEX: int + FIONBIO: int + FIONCLEX: int + FIONREAD: int + FLUSHO: int + HUPCL: int + IBSHIFT: int + ICANON: int + ICRNL: int + IEXTEN: int + IGNBRK: int + IGNCR: int + IGNPAR: int + IMAXBEL: int + INIT_C_CC: int + INLCR: int + INPCK: int + IOCSIZE_MASK: int + IOCSIZE_SHIFT: int + ISIG: int + ISTRIP: int + IUCLC: int + IXANY: int + IXOFF: int + IXON: int + N_MOUSE: int + N_PPP: int + N_SLIP: int + N_STRIP: int + N_TTY: int + NCC: int + NCCS: int + NL0: int + NL1: int + NLDLY: int + NOFLSH: int + NSWTCH: int + OCRNL: int + OFDEL: int + OFILL: int + OLCUC: int + ONLCR: int + ONLRET: int + ONOCR: int + OPOST: int + PARENB: int + PARMRK: int + PARODD: int + PENDIN: int + TAB0: int + TAB1: int + TAB2: int + TAB3: int + TABDLY: int + TCFLSH: int + TCGETA: int + TCGETS: int + TCIFLUSH: int + TCIOFF: int + TCIOFLUSH: int + TCION: int + TCOFLUSH: int + TCOOFF: int + TCOON: int + TCSADRAIN: int + TCSAFLUSH: int + TCSANOW: int + TCSASOFT: int + TCSBRK: int + TCSBRKP: int + TCSETA: int + TCSETAF: int + TCSETAW: int + TCSETS: int + TCSETSF: int + TCSETSW: int + TCXONC: int + TIOCCONS: int + TIOCEXCL: int + TIOCGETD: int + TIOCGICOUNT: int + TIOCGLCKTRMIOS: int + TIOCGPGRP: int + TIOCGSERIAL: int + TIOCGSIZE: int + TIOCGSOFTCAR: int + TIOCGWINSZ: int + TIOCINQ: int + TIOCLINUX: int + TIOCM_CAR: int + TIOCM_CD: int + TIOCM_CTS: int + TIOCM_DSR: int + TIOCM_DTR: int + TIOCM_LE: int + TIOCM_RI: int + TIOCM_RNG: int + TIOCM_RTS: int + TIOCM_SR: int + TIOCM_ST: int + TIOCMBIC: int + TIOCMBIS: int + TIOCMGET: int + TIOCMIWAIT: int + TIOCMSET: int + TIOCNOTTY: int + TIOCNXCL: int + TIOCOUTQ: int + TIOCPKT_DATA: int + TIOCPKT_DOSTOP: int + TIOCPKT_FLUSHREAD: int + TIOCPKT_FLUSHWRITE: int + TIOCPKT_NOSTOP: int + TIOCPKT_START: int + TIOCPKT_STOP: int + TIOCPKT: int + TIOCSCTTY: int + TIOCSER_TEMT: int + TIOCSERCONFIG: int + TIOCSERGETLSR: int + TIOCSERGETMULTI: int + TIOCSERGSTRUCT: int + TIOCSERGWILD: int + TIOCSERSETMULTI: int + TIOCSERSWILD: int + TIOCSETD: int + TIOCSLCKTRMIOS: int + TIOCSPGRP: int + TIOCSSERIAL: int + TIOCSSIZE: int + TIOCSSOFTCAR: int + TIOCSTI: int + TIOCSWINSZ: int + TIOCTTYGSTRUCT: int + TOSTOP: int + VDISCARD: int + VEOF: int + VEOL: int + VEOL2: int + VERASE: int + VINTR: int + VKILL: int + VLNEXT: int + VMIN: int + VQUIT: int + VREPRINT: int + VSTART: int + VSTOP: int + VSUSP: int + VSWTC: int + VSWTCH: int + VT0: int + VT1: int + VTDLY: int + VTIME: int + VWERASE: int + XCASE: int + XTABS: int + + def tcgetattr(__fd: FileDescriptorLike) -> list[Any]: ... # Returns _Attr; we use Any to avoid a union in the return type + def tcsetattr(__fd: FileDescriptorLike, __when: int, __attributes: _Attr) -> None: ... + def tcsendbreak(__fd: FileDescriptorLike, __duration: int) -> None: ... + def tcdrain(__fd: FileDescriptorLike) -> None: ... + def tcflush(__fd: FileDescriptorLike, __queue: int) -> None: ... + def tcflow(__fd: FileDescriptorLike, __action: int) -> None: ... + if sys.version_info >= (3, 11): + def tcgetwinsize(__fd: FileDescriptorLike) -> tuple[int, int]: ... + def tcsetwinsize(__fd: FileDescriptorLike, __winsize: tuple[int, int]) -> None: ... + + class error(Exception): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/textwrap.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/textwrap.pyi new file mode 100644 index 00000000..c00cce3c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/textwrap.pyi @@ -0,0 +1,103 @@ +from collections.abc import Callable +from re import Pattern + +__all__ = ["TextWrapper", "wrap", "fill", "dedent", "indent", "shorten"] + +class TextWrapper: + width: int + initial_indent: str + subsequent_indent: str + expand_tabs: bool + replace_whitespace: bool + fix_sentence_endings: bool + drop_whitespace: bool + break_long_words: bool + break_on_hyphens: bool + tabsize: int + max_lines: int | None + placeholder: str + + # Attributes not present in documentation + sentence_end_re: Pattern[str] + wordsep_re: Pattern[str] + wordsep_simple_re: Pattern[str] + whitespace_trans: str + unicode_whitespace_trans: dict[int, int] + uspace: int + x: str # leaked loop variable + def __init__( + self, + width: int = 70, + initial_indent: str = "", + subsequent_indent: str = "", + expand_tabs: bool = True, + replace_whitespace: bool = True, + fix_sentence_endings: bool = False, + break_long_words: bool = True, + drop_whitespace: bool = True, + break_on_hyphens: bool = True, + tabsize: int = 8, + *, + max_lines: int | None = None, + placeholder: str = " [...]", + ) -> None: ... + # Private methods *are* part of the documented API for subclasses. + def _munge_whitespace(self, text: str) -> str: ... + def _split(self, text: str) -> list[str]: ... + def _fix_sentence_endings(self, chunks: list[str]) -> None: ... + def _handle_long_word(self, reversed_chunks: list[str], cur_line: list[str], cur_len: int, width: int) -> None: ... + def _wrap_chunks(self, chunks: list[str]) -> list[str]: ... + def _split_chunks(self, text: str) -> list[str]: ... + def wrap(self, text: str) -> list[str]: ... + def fill(self, text: str) -> str: ... + +def wrap( + text: str, + width: int = 70, + *, + initial_indent: str = "", + subsequent_indent: str = "", + expand_tabs: bool = True, + tabsize: int = 8, + replace_whitespace: bool = True, + fix_sentence_endings: bool = False, + break_long_words: bool = True, + break_on_hyphens: bool = True, + drop_whitespace: bool = True, + max_lines: int | None = None, + placeholder: str = " [...]", +) -> list[str]: ... +def fill( + text: str, + width: int = 70, + *, + initial_indent: str = "", + subsequent_indent: str = "", + expand_tabs: bool = True, + tabsize: int = 8, + replace_whitespace: bool = True, + fix_sentence_endings: bool = False, + break_long_words: bool = True, + break_on_hyphens: bool = True, + drop_whitespace: bool = True, + max_lines: int | None = None, + placeholder: str = " [...]", +) -> str: ... +def shorten( + text: str, + width: int, + *, + initial_indent: str = "", + subsequent_indent: str = "", + expand_tabs: bool = True, + tabsize: int = 8, + replace_whitespace: bool = True, + fix_sentence_endings: bool = False, + break_long_words: bool = True, + break_on_hyphens: bool = True, + drop_whitespace: bool = True, + # Omit `max_lines: int = None`, it is forced to 1 here. + placeholder: str = " [...]", +) -> str: ... +def dedent(text: str) -> str: ... +def indent(text: str, prefix: str, predicate: Callable[[str], bool] | None = None) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/this.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/this.pyi new file mode 100644 index 00000000..8de996b0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/this.pyi @@ -0,0 +1,2 @@ +s: str +d: dict[str, str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/threading.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/threading.pyi new file mode 100644 index 00000000..c0179788 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/threading.pyi @@ -0,0 +1,192 @@ +import sys +from _typeshed import ProfileFunction, TraceFunction +from collections.abc import Callable, Iterable, Mapping +from types import TracebackType +from typing import Any, TypeVar +from typing_extensions import final + +_T = TypeVar("_T") + +__all__ = [ + "get_ident", + "active_count", + "Condition", + "current_thread", + "enumerate", + "main_thread", + "TIMEOUT_MAX", + "Event", + "Lock", + "RLock", + "Semaphore", + "BoundedSemaphore", + "Thread", + "Barrier", + "BrokenBarrierError", + "Timer", + "ThreadError", + "setprofile", + "settrace", + "local", + "stack_size", +] + +if sys.version_info >= (3, 8): + __all__ += ["ExceptHookArgs", "excepthook", "get_native_id"] + +if sys.version_info >= (3, 10): + __all__ += ["getprofile", "gettrace"] + +_profile_hook: ProfileFunction | None + +def active_count() -> int: ... +def activeCount() -> int: ... # deprecated alias for active_count() +def current_thread() -> Thread: ... +def currentThread() -> Thread: ... # deprecated alias for current_thread() +def get_ident() -> int: ... +def enumerate() -> list[Thread]: ... +def main_thread() -> Thread: ... + +if sys.version_info >= (3, 8): + from _thread import get_native_id as get_native_id + +def settrace(func: TraceFunction) -> None: ... +def setprofile(func: ProfileFunction | None) -> None: ... + +if sys.version_info >= (3, 10): + def gettrace() -> TraceFunction | None: ... + def getprofile() -> ProfileFunction | None: ... + +def stack_size(size: int = ...) -> int: ... + +TIMEOUT_MAX: float + +class ThreadError(Exception): ... + +class local: + def __getattribute__(self, __name: str) -> Any: ... + def __setattr__(self, __name: str, __value: Any) -> None: ... + def __delattr__(self, __name: str) -> None: ... + +class Thread: + name: str + @property + def ident(self) -> int | None: ... + daemon: bool + def __init__( + self, + group: None = None, + target: Callable[..., object] | None = None, + name: str | None = None, + args: Iterable[Any] = ..., + kwargs: Mapping[str, Any] | None = None, + *, + daemon: bool | None = None, + ) -> None: ... + def start(self) -> None: ... + def run(self) -> None: ... + def join(self, timeout: float | None = None) -> None: ... + if sys.version_info >= (3, 8): + @property + def native_id(self) -> int | None: ... # only available on some platforms + + def is_alive(self) -> bool: ... + if sys.version_info < (3, 9): + def isAlive(self) -> bool: ... + # the following methods are all deprecated + def getName(self) -> str: ... + def setName(self, name: str) -> None: ... + def isDaemon(self) -> bool: ... + def setDaemon(self, daemonic: bool) -> None: ... + +class _DummyThread(Thread): + def __init__(self) -> None: ... + +@final +class Lock: + def __enter__(self) -> bool: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... + def release(self) -> None: ... + def locked(self) -> bool: ... + +@final +class _RLock: + def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ... + def release(self) -> None: ... + __enter__ = acquire + def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... + +RLock = _RLock + +class Condition: + def __init__(self, lock: Lock | _RLock | None = None) -> None: ... + def __enter__(self) -> bool: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... + def release(self) -> None: ... + def wait(self, timeout: float | None = None) -> bool: ... + def wait_for(self, predicate: Callable[[], _T], timeout: float | None = None) -> _T: ... + def notify(self, n: int = 1) -> None: ... + def notify_all(self) -> None: ... + def notifyAll(self) -> None: ... # deprecated alias for notify_all() + +class Semaphore: + _value: int + def __init__(self, value: int = 1) -> None: ... + def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... + def acquire(self, blocking: bool = True, timeout: float | None = None) -> bool: ... + def __enter__(self, blocking: bool = True, timeout: float | None = None) -> bool: ... + if sys.version_info >= (3, 9): + def release(self, n: int = 1) -> None: ... + else: + def release(self) -> None: ... + +class BoundedSemaphore(Semaphore): ... + +class Event: + def is_set(self) -> bool: ... + def isSet(self) -> bool: ... # deprecated alias for is_set() + def set(self) -> None: ... + def clear(self) -> None: ... + def wait(self, timeout: float | None = None) -> bool: ... + +if sys.version_info >= (3, 8): + from _thread import _excepthook, _ExceptHookArgs + + excepthook = _excepthook + ExceptHookArgs = _ExceptHookArgs + +class Timer(Thread): + args: Iterable[Any] # undocumented + finished: Event # undocumented + function: Callable[..., Any] # undocumented + interval: float # undocumented + kwargs: Mapping[str, Any] # undocumented + + def __init__( + self, + interval: float, + function: Callable[..., object], + args: Iterable[Any] | None = None, + kwargs: Mapping[str, Any] | None = None, + ) -> None: ... + def cancel(self) -> None: ... + +class Barrier: + @property + def parties(self) -> int: ... + @property + def n_waiting(self) -> int: ... + @property + def broken(self) -> bool: ... + def __init__(self, parties: int, action: Callable[[], None] | None = None, timeout: float | None = None) -> None: ... + def wait(self, timeout: float | None = None) -> int: ... + def reset(self) -> None: ... + def abort(self) -> None: ... + +class BrokenBarrierError(RuntimeError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/time.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/time.pyi new file mode 100644 index 00000000..035d7893 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/time.pyi @@ -0,0 +1,111 @@ +import sys +from _typeshed import structseq +from typing import Any, Protocol +from typing_extensions import Final, Literal, TypeAlias, final + +_TimeTuple: TypeAlias = tuple[int, int, int, int, int, int, int, int, int] + +altzone: int +daylight: int +timezone: int +tzname: tuple[str, str] + +if sys.platform == "linux": + CLOCK_BOOTTIME: int +if sys.platform != "linux" and sys.platform != "win32" and sys.platform != "darwin": + CLOCK_PROF: int # FreeBSD, NetBSD, OpenBSD + CLOCK_UPTIME: int # FreeBSD, OpenBSD + +if sys.platform != "win32": + CLOCK_MONOTONIC: int + CLOCK_MONOTONIC_RAW: int + CLOCK_PROCESS_CPUTIME_ID: int + CLOCK_REALTIME: int + CLOCK_THREAD_CPUTIME_ID: int + if sys.platform != "linux" and sys.platform != "darwin": + CLOCK_HIGHRES: int # Solaris only + +if sys.version_info >= (3, 8) and sys.platform == "darwin": + CLOCK_UPTIME_RAW: int + +if sys.version_info >= (3, 9) and sys.platform == "linux": + CLOCK_TAI: int + +# Constructor takes an iterable of any type, of length between 9 and 11 elements. +# However, it always *behaves* like a tuple of 9 elements, +# even if an iterable with length >9 is passed. +# https://github.com/python/typeshed/pull/6560#discussion_r767162532 +@final +class struct_time(structseq[Any | int], _TimeTuple): + if sys.version_info >= (3, 10): + __match_args__: Final = ("tm_year", "tm_mon", "tm_mday", "tm_hour", "tm_min", "tm_sec", "tm_wday", "tm_yday", "tm_isdst") + @property + def tm_year(self) -> int: ... + @property + def tm_mon(self) -> int: ... + @property + def tm_mday(self) -> int: ... + @property + def tm_hour(self) -> int: ... + @property + def tm_min(self) -> int: ... + @property + def tm_sec(self) -> int: ... + @property + def tm_wday(self) -> int: ... + @property + def tm_yday(self) -> int: ... + @property + def tm_isdst(self) -> int: ... + # These final two properties only exist if a 10- or 11-item sequence was passed to the constructor. + @property + def tm_zone(self) -> str: ... + @property + def tm_gmtoff(self) -> int: ... + +def asctime(t: _TimeTuple | struct_time = ...) -> str: ... + +if sys.version_info < (3, 8): + def clock() -> float: ... + +def ctime(secs: float | None = ...) -> str: ... +def gmtime(secs: float | None = ...) -> struct_time: ... +def localtime(secs: float | None = ...) -> struct_time: ... +def mktime(t: _TimeTuple | struct_time) -> float: ... +def sleep(secs: float) -> None: ... +def strftime(format: str, t: _TimeTuple | struct_time = ...) -> str: ... +def strptime(string: str, format: str = ...) -> struct_time: ... +def time() -> float: ... + +if sys.platform != "win32": + def tzset() -> None: ... # Unix only + +class _ClockInfo(Protocol): + adjustable: bool + implementation: str + monotonic: bool + resolution: float + +def get_clock_info(name: Literal["monotonic", "perf_counter", "process_time", "time", "thread_time"]) -> _ClockInfo: ... +def monotonic() -> float: ... +def perf_counter() -> float: ... +def process_time() -> float: ... + +if sys.platform != "win32": + def clock_getres(clk_id: int) -> float: ... # Unix only + def clock_gettime(clk_id: int) -> float: ... # Unix only + def clock_settime(clk_id: int, time: float) -> None: ... # Unix only + +if sys.platform != "win32": + def clock_gettime_ns(clock_id: int) -> int: ... + def clock_settime_ns(clock_id: int, time: int) -> int: ... + +if sys.platform == "linux": + def pthread_getcpuclockid(thread_id: int) -> int: ... + +def monotonic_ns() -> int: ... +def perf_counter_ns() -> int: ... +def process_time_ns() -> int: ... +def time_ns() -> int: ... +def thread_time() -> float: ... +def thread_time_ns() -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/timeit.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/timeit.pyi new file mode 100644 index 00000000..a5da943c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/timeit.pyi @@ -0,0 +1,32 @@ +from collections.abc import Callable, Sequence +from typing import IO, Any +from typing_extensions import TypeAlias + +__all__ = ["Timer", "timeit", "repeat", "default_timer"] + +_Timer: TypeAlias = Callable[[], float] +_Stmt: TypeAlias = str | Callable[[], object] + +default_timer: _Timer + +class Timer: + def __init__( + self, stmt: _Stmt = "pass", setup: _Stmt = "pass", timer: _Timer = ..., globals: dict[str, Any] | None = None + ) -> None: ... + def print_exc(self, file: IO[str] | None = None) -> None: ... + def timeit(self, number: int = 1000000) -> float: ... + def repeat(self, repeat: int = 5, number: int = 1000000) -> list[float]: ... + def autorange(self, callback: Callable[[int, float], object] | None = None) -> tuple[int, float]: ... + +def timeit( + stmt: _Stmt = "pass", setup: _Stmt = "pass", timer: _Timer = ..., number: int = 1000000, globals: dict[str, Any] | None = None +) -> float: ... +def repeat( + stmt: _Stmt = "pass", + setup: _Stmt = "pass", + timer: _Timer = ..., + repeat: int = 5, + number: int = 1000000, + globals: dict[str, Any] | None = None, +) -> list[float]: ... +def main(args: Sequence[str] | None = None, *, _wrap_timer: Callable[[_Timer], _Timer] | None = None) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/__init__.pyi new file mode 100644 index 00000000..9dc13c80 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/__init__.pyi @@ -0,0 +1,3589 @@ +import _tkinter +import sys +from _typeshed import Incomplete, StrOrBytesPath +from collections.abc import Callable, Mapping, Sequence +from enum import Enum +from tkinter.constants import * +from tkinter.font import _FontDescription +from types import TracebackType +from typing import Any, Generic, NamedTuple, Protocol, TypeVar, overload, type_check_only +from typing_extensions import Literal, TypeAlias, TypedDict + +if sys.version_info >= (3, 9): + __all__ = [ + "TclError", + "NO", + "FALSE", + "OFF", + "YES", + "TRUE", + "ON", + "N", + "S", + "W", + "E", + "NW", + "SW", + "NE", + "SE", + "NS", + "EW", + "NSEW", + "CENTER", + "NONE", + "X", + "Y", + "BOTH", + "LEFT", + "TOP", + "RIGHT", + "BOTTOM", + "RAISED", + "SUNKEN", + "FLAT", + "RIDGE", + "GROOVE", + "SOLID", + "HORIZONTAL", + "VERTICAL", + "NUMERIC", + "CHAR", + "WORD", + "BASELINE", + "INSIDE", + "OUTSIDE", + "SEL", + "SEL_FIRST", + "SEL_LAST", + "END", + "INSERT", + "CURRENT", + "ANCHOR", + "ALL", + "NORMAL", + "DISABLED", + "ACTIVE", + "HIDDEN", + "CASCADE", + "CHECKBUTTON", + "COMMAND", + "RADIOBUTTON", + "SEPARATOR", + "SINGLE", + "BROWSE", + "MULTIPLE", + "EXTENDED", + "DOTBOX", + "UNDERLINE", + "PIESLICE", + "CHORD", + "ARC", + "FIRST", + "LAST", + "BUTT", + "PROJECTING", + "ROUND", + "BEVEL", + "MITER", + "MOVETO", + "SCROLL", + "UNITS", + "PAGES", + "TkVersion", + "TclVersion", + "READABLE", + "WRITABLE", + "EXCEPTION", + "EventType", + "Event", + "NoDefaultRoot", + "Variable", + "StringVar", + "IntVar", + "DoubleVar", + "BooleanVar", + "mainloop", + "getint", + "getdouble", + "getboolean", + "Misc", + "CallWrapper", + "XView", + "YView", + "Wm", + "Tk", + "Tcl", + "Pack", + "Place", + "Grid", + "BaseWidget", + "Widget", + "Toplevel", + "Button", + "Canvas", + "Checkbutton", + "Entry", + "Frame", + "Label", + "Listbox", + "Menu", + "Menubutton", + "Message", + "Radiobutton", + "Scale", + "Scrollbar", + "Text", + "OptionMenu", + "Image", + "PhotoImage", + "BitmapImage", + "image_names", + "image_types", + "Spinbox", + "LabelFrame", + "PanedWindow", + ] + +# Using anything from tkinter.font in this file means that 'import tkinter' +# seems to also load tkinter.font. That's not how it actually works, but +# unfortunately not much can be done about it. https://github.com/python/typeshed/pull/4346 + +TclError = _tkinter.TclError +wantobjects: int +TkVersion: float +TclVersion: float +READABLE = _tkinter.READABLE +WRITABLE = _tkinter.WRITABLE +EXCEPTION = _tkinter.EXCEPTION + +# Quick guide for figuring out which widget class to choose: +# - Misc: any widget (don't use BaseWidget because Tk doesn't inherit from BaseWidget) +# - Widget: anything that is meant to be put into another widget with e.g. pack or grid +# +# Don't trust tkinter's docstrings, because they have been created by copy/pasting from +# Tk's manual pages more than 10 years ago. Use the latest manual pages instead: +# +# $ sudo apt install tk-doc tcl-doc +# $ man 3tk label # tkinter.Label +# $ man 3tk ttk_label # tkinter.ttk.Label +# $ man 3tcl after # tkinter.Misc.after +# +# You can also read the manual pages online: https://www.tcl.tk/doc/ + +# Some widgets have an option named -compound that accepts different values +# than the _Compound defined here. Many other options have similar things. +_Anchor: TypeAlias = Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] # manual page: Tk_GetAnchor +_Bitmap: TypeAlias = str # manual page: Tk_GetBitmap +_ButtonCommand: TypeAlias = str | Callable[[], Any] # accepts string of tcl code, return value is returned from Button.invoke() +_CanvasItemId: TypeAlias = int +_Color: TypeAlias = str # typically '#rrggbb', '#rgb' or color names. +_Compound: TypeAlias = Literal["top", "left", "center", "right", "bottom", "none"] # -compound in manual page named 'options' +# manual page: Tk_GetCursor +_Cursor: TypeAlias = str | tuple[str] | tuple[str, str] | tuple[str, str, str] | tuple[str, str, str, str] +# example when it's sequence: entry['invalidcommand'] = [entry.register(print), '%P'] +_EntryValidateCommand: TypeAlias = str | list[str] | tuple[str, ...] | Callable[[], bool] +_GridIndex: TypeAlias = int | str +_ImageSpec: TypeAlias = _Image | str # str can be from e.g. tkinter.image_names() +_Relief: TypeAlias = Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] # manual page: Tk_GetRelief +_ScreenUnits: TypeAlias = str | float # Often the right type instead of int. Manual page: Tk_GetPixels +# -xscrollcommand and -yscrollcommand in 'options' manual page +_XYScrollCommand: TypeAlias = str | Callable[[float, float], object] +_TakeFocusValue: TypeAlias = int | Literal[""] | Callable[[str], bool | None] # -takefocus in manual page named 'options' + +if sys.version_info >= (3, 11): + class _VersionInfoType(NamedTuple): + major: int + minor: int + micro: int + releaselevel: str + serial: int + +class EventType(str, Enum): + Activate: str + ButtonPress: str + Button = ButtonPress + ButtonRelease: str + Circulate: str + CirculateRequest: str + ClientMessage: str + Colormap: str + Configure: str + ConfigureRequest: str + Create: str + Deactivate: str + Destroy: str + Enter: str + Expose: str + FocusIn: str + FocusOut: str + GraphicsExpose: str + Gravity: str + KeyPress: str + Key = KeyPress + KeyRelease: str + Keymap: str + Leave: str + Map: str + MapRequest: str + Mapping: str + Motion: str + MouseWheel: str + NoExpose: str + Property: str + Reparent: str + ResizeRequest: str + Selection: str + SelectionClear: str + SelectionRequest: str + Unmap: str + VirtualEvent: str + Visibility: str + +_W = TypeVar("_W", bound=Misc) +# Events considered covariant because you should never assign to event.widget. +_W_co = TypeVar("_W_co", covariant=True, bound=Misc) + +class Event(Generic[_W_co]): + serial: int + num: int + focus: bool + height: int + width: int + keycode: int + state: int | str + time: int + x: int + y: int + x_root: int + y_root: int + char: str + send_event: bool + keysym: str + keysym_num: int + type: EventType + widget: _W_co + delta: int + +def NoDefaultRoot() -> None: ... + +_TraceMode: TypeAlias = Literal["array", "read", "write", "unset"] + +class Variable: + def __init__(self, master: Misc | None = None, value: Incomplete | None = None, name: str | None = None) -> None: ... + def set(self, value) -> None: ... + initialize = set + def get(self): ... + def trace_add(self, mode: _TraceMode, callback: Callable[[str, str, str], object]) -> str: ... + def trace_remove(self, mode: _TraceMode, cbname: str) -> None: ... + def trace_info(self) -> list[tuple[tuple[_TraceMode, ...], str]]: ... + def trace_variable(self, mode, callback): ... # deprecated + def trace_vdelete(self, mode, cbname) -> None: ... # deprecated + def trace_vinfo(self): ... # deprecated + trace = trace_variable # deprecated + def __eq__(self, other: object) -> bool: ... + +class StringVar(Variable): + def __init__(self, master: Misc | None = None, value: str | None = None, name: str | None = None) -> None: ... + def set(self, value: str) -> None: ... + initialize = set + def get(self) -> str: ... + +class IntVar(Variable): + def __init__(self, master: Misc | None = None, value: int | None = None, name: str | None = None) -> None: ... + def set(self, value: int) -> None: ... + initialize = set + def get(self) -> int: ... + +class DoubleVar(Variable): + def __init__(self, master: Misc | None = None, value: float | None = None, name: str | None = None) -> None: ... + def set(self, value: float) -> None: ... + initialize = set + def get(self) -> float: ... + +class BooleanVar(Variable): + def __init__(self, master: Misc | None = None, value: bool | None = None, name: str | None = None) -> None: ... + def set(self, value: bool) -> None: ... + initialize = set + def get(self) -> bool: ... + +def mainloop(n: int = 0) -> None: ... + +getint: Incomplete +getdouble: Incomplete + +def getboolean(s): ... + +class _GridIndexInfo(TypedDict, total=False): + minsize: _ScreenUnits + pad: _ScreenUnits + uniform: str | None + weight: int + +class Misc: + master: Misc | None + tk: _tkinter.TkappType + children: dict[str, Widget] + def destroy(self) -> None: ... + def deletecommand(self, name: str) -> None: ... + def tk_strictMotif(self, boolean: Incomplete | None = None): ... + def tk_bisque(self) -> None: ... + def tk_setPalette(self, *args, **kw) -> None: ... + def wait_variable(self, name: str | Variable = "PY_VAR") -> None: ... + waitvar = wait_variable + def wait_window(self, window: Misc | None = None) -> None: ... + def wait_visibility(self, window: Misc | None = None) -> None: ... + def setvar(self, name: str = "PY_VAR", value: str = "1") -> None: ... + def getvar(self, name: str = "PY_VAR"): ... + def getint(self, s): ... + def getdouble(self, s): ... + def getboolean(self, s): ... + def focus_set(self) -> None: ... + focus = focus_set + def focus_force(self) -> None: ... + def focus_get(self) -> Misc | None: ... + def focus_displayof(self) -> Misc | None: ... + def focus_lastfor(self) -> Misc | None: ... + def tk_focusFollowsMouse(self) -> None: ... + def tk_focusNext(self) -> Misc | None: ... + def tk_focusPrev(self) -> Misc | None: ... + @overload + def after(self, ms: int, func: None = None) -> None: ... + @overload + def after(self, ms: int | Literal["idle"], func: Callable[..., object], *args: Any) -> str: ... + # after_idle is essentially partialmethod(after, "idle") + def after_idle(self, func: Callable[..., object], *args: Any) -> str: ... + def after_cancel(self, id: str) -> None: ... + def bell(self, displayof: Literal[0] | Misc | None = 0) -> None: ... + def clipboard_get(self, *, displayof: Misc = ..., type: str = ...) -> str: ... + def clipboard_clear(self, *, displayof: Misc = ...) -> None: ... + def clipboard_append(self, string: str, *, displayof: Misc = ..., format: str = ..., type: str = ...) -> None: ... + def grab_current(self): ... + def grab_release(self) -> None: ... + def grab_set(self) -> None: ... + def grab_set_global(self) -> None: ... + def grab_status(self) -> Literal["local", "global"] | None: ... + def option_add( + self, pattern, value, priority: int | Literal["widgetDefault", "startupFile", "userDefault", "interactive"] | None = None + ) -> None: ... + def option_clear(self) -> None: ... + def option_get(self, name, className): ... + def option_readfile(self, fileName, priority: Incomplete | None = None) -> None: ... + def selection_clear(self, **kw) -> None: ... + def selection_get(self, **kw): ... + def selection_handle(self, command, **kw) -> None: ... + def selection_own(self, **kw) -> None: ... + def selection_own_get(self, **kw): ... + def send(self, interp, cmd, *args): ... + def lower(self, belowThis: Incomplete | None = None) -> None: ... + def tkraise(self, aboveThis: Incomplete | None = None) -> None: ... + lift = tkraise + if sys.version_info >= (3, 11): + def info_patchlevel(self) -> _VersionInfoType: ... + + def winfo_atom(self, name: str, displayof: Literal[0] | Misc | None = 0) -> int: ... + def winfo_atomname(self, id: int, displayof: Literal[0] | Misc | None = 0) -> str: ... + def winfo_cells(self) -> int: ... + def winfo_children(self) -> list[Widget]: ... # Widget because it can't be Toplevel or Tk + def winfo_class(self) -> str: ... + def winfo_colormapfull(self) -> bool: ... + def winfo_containing(self, rootX: int, rootY: int, displayof: Literal[0] | Misc | None = 0) -> Misc | None: ... + def winfo_depth(self) -> int: ... + def winfo_exists(self) -> bool: ... + def winfo_fpixels(self, number: _ScreenUnits) -> float: ... + def winfo_geometry(self) -> str: ... + def winfo_height(self) -> int: ... + def winfo_id(self) -> int: ... + def winfo_interps(self, displayof: Literal[0] | Misc | None = 0) -> tuple[str, ...]: ... + def winfo_ismapped(self) -> bool: ... + def winfo_manager(self) -> str: ... + def winfo_name(self) -> str: ... + def winfo_parent(self) -> str: ... # return value needs nametowidget() + def winfo_pathname(self, id: int, displayof: Literal[0] | Misc | None = 0): ... + def winfo_pixels(self, number: _ScreenUnits) -> int: ... + def winfo_pointerx(self) -> int: ... + def winfo_pointerxy(self) -> tuple[int, int]: ... + def winfo_pointery(self) -> int: ... + def winfo_reqheight(self) -> int: ... + def winfo_reqwidth(self) -> int: ... + def winfo_rgb(self, color: _Color) -> tuple[int, int, int]: ... + def winfo_rootx(self) -> int: ... + def winfo_rooty(self) -> int: ... + def winfo_screen(self) -> str: ... + def winfo_screencells(self) -> int: ... + def winfo_screendepth(self) -> int: ... + def winfo_screenheight(self) -> int: ... + def winfo_screenmmheight(self) -> int: ... + def winfo_screenmmwidth(self) -> int: ... + def winfo_screenvisual(self) -> str: ... + def winfo_screenwidth(self) -> int: ... + def winfo_server(self) -> str: ... + def winfo_toplevel(self) -> Tk | Toplevel: ... + def winfo_viewable(self) -> bool: ... + def winfo_visual(self) -> str: ... + def winfo_visualid(self) -> str: ... + def winfo_visualsavailable(self, includeids: bool = False) -> list[tuple[str, int]]: ... + def winfo_vrootheight(self) -> int: ... + def winfo_vrootwidth(self) -> int: ... + def winfo_vrootx(self) -> int: ... + def winfo_vrooty(self) -> int: ... + def winfo_width(self) -> int: ... + def winfo_x(self) -> int: ... + def winfo_y(self) -> int: ... + def update(self) -> None: ... + def update_idletasks(self) -> None: ... + @overload + def bindtags(self, tagList: None = None) -> tuple[str, ...]: ... + @overload + def bindtags(self, tagList: list[str] | tuple[str, ...]) -> None: ... + # bind with isinstance(func, str) doesn't return anything, but all other + # binds do. The default value of func is not str. + @overload + def bind( + self, + sequence: str | None = None, + func: Callable[[Event[Misc]], object] | None = None, + add: Literal["", "+"] | bool | None = None, + ) -> str: ... + @overload + def bind(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + @overload + def bind(self, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + # There's no way to know what type of widget bind_all and bind_class + # callbacks will get, so those are Misc. + @overload + def bind_all( + self, + sequence: str | None = None, + func: Callable[[Event[Misc]], object] | None = None, + add: Literal["", "+"] | bool | None = None, + ) -> str: ... + @overload + def bind_all(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + @overload + def bind_all(self, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + @overload + def bind_class( + self, + className: str, + sequence: str | None = None, + func: Callable[[Event[Misc]], object] | None = None, + add: Literal["", "+"] | bool | None = None, + ) -> str: ... + @overload + def bind_class(self, className: str, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + @overload + def bind_class(self, className: str, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + def unbind(self, sequence: str, funcid: str | None = None) -> None: ... + def unbind_all(self, sequence: str) -> None: ... + def unbind_class(self, className: str, sequence: str) -> None: ... + def mainloop(self, n: int = 0) -> None: ... + def quit(self) -> None: ... + @property + def _windowingsystem(self) -> Literal["win32", "aqua", "x11"]: ... + def nametowidget(self, name: str | Misc | _tkinter.Tcl_Obj) -> Any: ... + def register( + self, func: Callable[..., object], subst: Callable[..., Sequence[Any]] | None = None, needcleanup: int = 1 + ) -> str: ... + def keys(self) -> list[str]: ... + @overload + def pack_propagate(self, flag: bool) -> bool | None: ... + @overload + def pack_propagate(self) -> None: ... + propagate = pack_propagate + def grid_anchor(self, anchor: _Anchor | None = None) -> None: ... + anchor = grid_anchor + @overload + def grid_bbox( + self, column: None = None, row: None = None, col2: None = None, row2: None = None + ) -> tuple[int, int, int, int] | None: ... + @overload + def grid_bbox(self, column: int, row: int, col2: None = None, row2: None = None) -> tuple[int, int, int, int] | None: ... + @overload + def grid_bbox(self, column: int, row: int, col2: int, row2: int) -> tuple[int, int, int, int] | None: ... + bbox = grid_bbox + def grid_columnconfigure( + self, + index: _GridIndex, + cnf: _GridIndexInfo = ..., + *, + minsize: _ScreenUnits = ..., + pad: _ScreenUnits = ..., + uniform: str = ..., + weight: int = ..., + ) -> _GridIndexInfo | Any: ... # can be None but annoying to check + def grid_rowconfigure( + self, + index: _GridIndex, + cnf: _GridIndexInfo = ..., + *, + minsize: _ScreenUnits = ..., + pad: _ScreenUnits = ..., + uniform: str = ..., + weight: int = ..., + ) -> _GridIndexInfo | Any: ... # can be None but annoying to check + columnconfigure = grid_columnconfigure + rowconfigure = grid_rowconfigure + def grid_location(self, x: _ScreenUnits, y: _ScreenUnits) -> tuple[int, int]: ... + @overload + def grid_propagate(self, flag: bool) -> None: ... + @overload + def grid_propagate(self) -> bool: ... + def grid_size(self) -> tuple[int, int]: ... + size = grid_size + # Widget because Toplevel or Tk is never a slave + def pack_slaves(self) -> list[Widget]: ... + def grid_slaves(self, row: int | None = None, column: int | None = None) -> list[Widget]: ... + def place_slaves(self) -> list[Widget]: ... + slaves = pack_slaves + def event_add(self, virtual: str, *sequences: str) -> None: ... + def event_delete(self, virtual: str, *sequences: str) -> None: ... + def event_generate( + self, + sequence: str, + *, + above: Misc | int = ..., + borderwidth: _ScreenUnits = ..., + button: int = ..., + count: int = ..., + data: Any = ..., # anything with usable str() value + delta: int = ..., + detail: str = ..., + focus: bool = ..., + height: _ScreenUnits = ..., + keycode: int = ..., + keysym: str = ..., + mode: str = ..., + override: bool = ..., + place: Literal["PlaceOnTop", "PlaceOnBottom"] = ..., + root: Misc | int = ..., + rootx: _ScreenUnits = ..., + rooty: _ScreenUnits = ..., + sendevent: bool = ..., + serial: int = ..., + state: int | str = ..., + subwindow: Misc | int = ..., + time: int = ..., + warp: bool = ..., + width: _ScreenUnits = ..., + when: Literal["now", "tail", "head", "mark"] = ..., + x: _ScreenUnits = ..., + y: _ScreenUnits = ..., + ) -> None: ... + def event_info(self, virtual: str | None = None) -> tuple[str, ...]: ... + def image_names(self) -> tuple[str, ...]: ... + def image_types(self) -> tuple[str, ...]: ... + # See #4363 and #4891 + def __setitem__(self, key: str, value: Any) -> None: ... + def __getitem__(self, key: str) -> Any: ... + def cget(self, key: str) -> Any: ... + def configure(self, cnf: Any = None) -> Any: ... + # TODO: config is an alias of configure, but adding that here creates lots of mypy errors + +class CallWrapper: + func: Incomplete + subst: Incomplete + widget: Incomplete + def __init__(self, func, subst, widget) -> None: ... + def __call__(self, *args): ... + +class XView: + @overload + def xview(self) -> tuple[float, float]: ... + @overload + def xview(self, *args): ... + def xview_moveto(self, fraction: float) -> None: ... + @overload + def xview_scroll(self, number: int, what: Literal["units", "pages"]) -> None: ... + @overload + def xview_scroll(self, number: _ScreenUnits, what: Literal["pixels"]) -> None: ... + +class YView: + @overload + def yview(self) -> tuple[float, float]: ... + @overload + def yview(self, *args): ... + def yview_moveto(self, fraction: float) -> None: ... + @overload + def yview_scroll(self, number: int, what: Literal["units", "pages"]) -> None: ... + @overload + def yview_scroll(self, number: _ScreenUnits, what: Literal["pixels"]) -> None: ... + +class Wm: + @overload + def wm_aspect(self, minNumer: int, minDenom: int, maxNumer: int, maxDenom: int) -> None: ... + @overload + def wm_aspect( + self, minNumer: None = None, minDenom: None = None, maxNumer: None = None, maxDenom: None = None + ) -> tuple[int, int, int, int] | None: ... + aspect = wm_aspect + @overload + def wm_attributes(self) -> tuple[Any, ...]: ... + @overload + def wm_attributes(self, __option: str): ... + @overload + def wm_attributes(self, __option: str, __value, *__other_option_value_pairs: Any) -> None: ... + attributes = wm_attributes + def wm_client(self, name: str | None = None) -> str: ... + client = wm_client + @overload + def wm_colormapwindows(self) -> list[Misc]: ... + @overload + def wm_colormapwindows(self, __wlist: list[Misc] | tuple[Misc, ...]) -> None: ... + @overload + def wm_colormapwindows(self, __first_wlist_item: Misc, *other_wlist_items: Misc) -> None: ... + colormapwindows = wm_colormapwindows + def wm_command(self, value: str | None = None) -> str: ... + command = wm_command + # Some of these always return empty string, but return type is set to None to prevent accidentally using it + def wm_deiconify(self) -> None: ... + deiconify = wm_deiconify + def wm_focusmodel(self, model: Literal["active", "passive"] | None = None) -> Literal["active", "passive", ""]: ... + focusmodel = wm_focusmodel + def wm_forget(self, window: Wm) -> None: ... + forget = wm_forget + def wm_frame(self) -> str: ... + frame = wm_frame + @overload + def wm_geometry(self, newGeometry: None = None) -> str: ... + @overload + def wm_geometry(self, newGeometry: str) -> None: ... + geometry = wm_geometry + def wm_grid( + self, + baseWidth: Incomplete | None = None, + baseHeight: Incomplete | None = None, + widthInc: Incomplete | None = None, + heightInc: Incomplete | None = None, + ): ... + grid = wm_grid + def wm_group(self, pathName: Incomplete | None = None): ... + group = wm_group + def wm_iconbitmap(self, bitmap: Incomplete | None = None, default: Incomplete | None = None): ... + iconbitmap = wm_iconbitmap + def wm_iconify(self) -> None: ... + iconify = wm_iconify + def wm_iconmask(self, bitmap: Incomplete | None = None): ... + iconmask = wm_iconmask + def wm_iconname(self, newName: Incomplete | None = None) -> str: ... + iconname = wm_iconname + def wm_iconphoto(self, default: bool, __image1: _PhotoImageLike | str, *args: _PhotoImageLike | str) -> None: ... + iconphoto = wm_iconphoto + def wm_iconposition(self, x: int | None = None, y: int | None = None) -> tuple[int, int] | None: ... + iconposition = wm_iconposition + def wm_iconwindow(self, pathName: Incomplete | None = None): ... + iconwindow = wm_iconwindow + def wm_manage(self, widget) -> None: ... + manage = wm_manage + @overload + def wm_maxsize(self, width: None = None, height: None = None) -> tuple[int, int]: ... + @overload + def wm_maxsize(self, width: int, height: int) -> None: ... + maxsize = wm_maxsize + @overload + def wm_minsize(self, width: None = None, height: None = None) -> tuple[int, int]: ... + @overload + def wm_minsize(self, width: int, height: int) -> None: ... + minsize = wm_minsize + @overload + def wm_overrideredirect(self, boolean: None = None) -> bool | None: ... # returns True or None + @overload + def wm_overrideredirect(self, boolean: bool) -> None: ... + overrideredirect = wm_overrideredirect + def wm_positionfrom(self, who: Literal["program", "user"] | None = None) -> Literal["", "program", "user"]: ... + positionfrom = wm_positionfrom + @overload + def wm_protocol(self, name: str, func: Callable[[], object] | str) -> None: ... + @overload + def wm_protocol(self, name: str, func: None = None) -> str: ... + @overload + def wm_protocol(self, name: None = None, func: None = None) -> tuple[str, ...]: ... + protocol = wm_protocol + @overload + def wm_resizable(self, width: None = None, height: None = None) -> tuple[bool, bool]: ... + @overload + def wm_resizable(self, width: bool, height: bool) -> None: ... + resizable = wm_resizable + def wm_sizefrom(self, who: Literal["program", "user"] | None = None) -> Literal["", "program", "user"]: ... + sizefrom = wm_sizefrom + @overload + def wm_state(self, newstate: None = None) -> str: ... + @overload + def wm_state(self, newstate: str) -> None: ... + state = wm_state + @overload + def wm_title(self, string: None = None) -> str: ... + @overload + def wm_title(self, string: str) -> None: ... + title = wm_title + @overload + def wm_transient(self, master: None = None) -> _tkinter.Tcl_Obj: ... + @overload + def wm_transient(self, master: Wm | _tkinter.Tcl_Obj) -> None: ... + transient = wm_transient + def wm_withdraw(self) -> None: ... + withdraw = wm_withdraw + +class _ExceptionReportingCallback(Protocol): + def __call__(self, __exc: type[BaseException], __val: BaseException, __tb: TracebackType | None) -> object: ... + +class Tk(Misc, Wm): + master: None + def __init__( + # Make sure to keep in sync with other functions that use the same + # args. + # use `git grep screenName` to find them + self, + screenName: str | None = None, + baseName: str | None = None, + className: str = "Tk", + useTk: bool = True, + sync: bool = False, + use: str | None = None, + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + cursor: _Cursor = ..., + height: _ScreenUnits = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + menu: Menu = ..., + padx: _ScreenUnits = ..., + pady: _ScreenUnits = ..., + relief: _Relief = ..., + takefocus: _TakeFocusValue = ..., + width: _ScreenUnits = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def destroy(self) -> None: ... + def readprofile(self, baseName: str, className: str) -> None: ... + report_callback_exception: _ExceptionReportingCallback + # Tk has __getattr__ so that tk_instance.foo falls back to tk_instance.tk.foo + # Please keep in sync with _tkinter.TkappType. + # Some methods are intentionally missing because they are inherited from Misc instead. + def adderrorinfo(self, __msg): ... + def call(self, __command: Any, *args: Any) -> Any: ... + def createcommand(self, __name, __func): ... + if sys.platform != "win32": + def createfilehandler(self, __file, __mask, __func): ... + def deletefilehandler(self, __file): ... + + def createtimerhandler(self, __milliseconds, __func): ... + def dooneevent(self, __flags: int = ...): ... + def eval(self, __script: str) -> str: ... + def evalfile(self, __fileName): ... + def exprboolean(self, __s): ... + def exprdouble(self, __s): ... + def exprlong(self, __s): ... + def exprstring(self, __s): ... + def globalgetvar(self, *args, **kwargs): ... + def globalsetvar(self, *args, **kwargs): ... + def globalunsetvar(self, *args, **kwargs): ... + def interpaddr(self): ... + def loadtk(self) -> None: ... + def record(self, __script): ... + if sys.version_info < (3, 11): + def split(self, __arg): ... + + def splitlist(self, __arg): ... + def unsetvar(self, *args, **kwargs): ... + def wantobjects(self, *args, **kwargs): ... + def willdispatch(self): ... + +def Tcl(screenName: str | None = None, baseName: str | None = None, className: str = "Tk", useTk: bool = False) -> Tk: ... + +_InMiscTotal = TypedDict("_InMiscTotal", {"in": Misc}) +_InMiscNonTotal = TypedDict("_InMiscNonTotal", {"in": Misc}, total=False) + +class _PackInfo(_InMiscTotal): + # 'before' and 'after' never appear in _PackInfo + anchor: _Anchor + expand: bool + fill: Literal["none", "x", "y", "both"] + side: Literal["left", "right", "top", "bottom"] + # Paddings come out as int or tuple of int, even though any _ScreenUnits + # can be specified in pack(). + ipadx: int + ipady: int + padx: int | tuple[int, int] + pady: int | tuple[int, int] + +class Pack: + # _PackInfo is not the valid type for cnf because pad stuff accepts any + # _ScreenUnits instead of int only. I didn't bother to create another + # TypedDict for cnf because it appears to be a legacy thing that was + # replaced by **kwargs. + def pack_configure( + self, + cnf: Mapping[str, Any] | None = ..., + *, + after: Misc = ..., + anchor: _Anchor = ..., + before: Misc = ..., + expand: int = ..., + fill: Literal["none", "x", "y", "both"] = ..., + side: Literal["left", "right", "top", "bottom"] = ..., + ipadx: _ScreenUnits = ..., + ipady: _ScreenUnits = ..., + padx: _ScreenUnits | tuple[_ScreenUnits, _ScreenUnits] = ..., + pady: _ScreenUnits | tuple[_ScreenUnits, _ScreenUnits] = ..., + in_: Misc = ..., + **kw: Any, # allow keyword argument named 'in', see #4836 + ) -> None: ... + def pack_forget(self) -> None: ... + def pack_info(self) -> _PackInfo: ... # errors if widget hasn't been packed + pack = pack_configure + forget = pack_forget + propagate = Misc.pack_propagate + +class _PlaceInfo(_InMiscNonTotal): # empty dict if widget hasn't been placed + anchor: _Anchor + bordermode: Literal["inside", "outside", "ignore"] + width: str # can be int()ed (even after e.g. widget.place(height='2.3c') or similar) + height: str # can be int()ed + x: str # can be int()ed + y: str # can be int()ed + relheight: str # can be float()ed if not empty string + relwidth: str # can be float()ed if not empty string + relx: str # can be float()ed if not empty string + rely: str # can be float()ed if not empty string + +class Place: + def place_configure( + self, + cnf: Mapping[str, Any] | None = ..., + *, + anchor: _Anchor = ..., + bordermode: Literal["inside", "outside", "ignore"] = ..., + width: _ScreenUnits = ..., + height: _ScreenUnits = ..., + x: _ScreenUnits = ..., + y: _ScreenUnits = ..., + # str allowed for compatibility with place_info() + relheight: str | float = ..., + relwidth: str | float = ..., + relx: str | float = ..., + rely: str | float = ..., + in_: Misc = ..., + **kw: Any, # allow keyword argument named 'in', see #4836 + ) -> None: ... + def place_forget(self) -> None: ... + def place_info(self) -> _PlaceInfo: ... + place = place_configure + info = place_info + +class _GridInfo(_InMiscNonTotal): # empty dict if widget hasn't been gridded + column: int + columnspan: int + row: int + rowspan: int + ipadx: int + ipady: int + padx: int | tuple[int, int] + pady: int | tuple[int, int] + sticky: str # consists of letters 'n', 's', 'w', 'e', no repeats, may be empty + +class Grid: + def grid_configure( + self, + cnf: Mapping[str, Any] | None = ..., + *, + column: int = ..., + columnspan: int = ..., + row: int = ..., + rowspan: int = ..., + ipadx: _ScreenUnits = ..., + ipady: _ScreenUnits = ..., + padx: _ScreenUnits | tuple[_ScreenUnits, _ScreenUnits] = ..., + pady: _ScreenUnits | tuple[_ScreenUnits, _ScreenUnits] = ..., + sticky: str = ..., # consists of letters 'n', 's', 'w', 'e', may contain repeats, may be empty + in_: Misc = ..., + **kw: Any, # allow keyword argument named 'in', see #4836 + ) -> None: ... + def grid_forget(self) -> None: ... + def grid_remove(self) -> None: ... + def grid_info(self) -> _GridInfo: ... + grid = grid_configure + location = Misc.grid_location + size = Misc.grid_size + +class BaseWidget(Misc): + master: Misc + widgetName: Incomplete + def __init__(self, master, widgetName, cnf=..., kw=..., extra=...) -> None: ... + def destroy(self) -> None: ... + +# This class represents any widget except Toplevel or Tk. +class Widget(BaseWidget, Pack, Place, Grid): + # Allow bind callbacks to take e.g. Event[Label] instead of Event[Misc]. + # Tk and Toplevel get notified for their child widgets' events, but other + # widgets don't. + @overload + def bind( + self: _W, + sequence: str | None = None, + func: Callable[[Event[_W]], object] | None = None, + add: Literal["", "+"] | bool | None = None, + ) -> str: ... + @overload + def bind(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + @overload + def bind(self, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + +class Toplevel(BaseWidget, Wm): + # Toplevel and Tk have the same options because they correspond to the same + # Tcl/Tk toplevel widget. For some reason, config and configure must be + # copy/pasted here instead of aliasing as 'config = Tk.config'. + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = ..., + *, + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + class_: str = ..., + colormap: Literal["new", ""] | Misc = ..., + container: bool = ..., + cursor: _Cursor = ..., + height: _ScreenUnits = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + menu: Menu = ..., + name: str = ..., + padx: _ScreenUnits = ..., + pady: _ScreenUnits = ..., + relief: _Relief = ..., + screen: str = ..., # can't be changed after creating widget + takefocus: _TakeFocusValue = ..., + use: int = ..., + visual: str | tuple[str, int] = ..., + width: _ScreenUnits = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + cursor: _Cursor = ..., + height: _ScreenUnits = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + menu: Menu = ..., + padx: _ScreenUnits = ..., + pady: _ScreenUnits = ..., + relief: _Relief = ..., + takefocus: _TakeFocusValue = ..., + width: _ScreenUnits = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +class Button(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = ..., + *, + activebackground: _Color = ..., + activeforeground: _Color = ..., + anchor: _Anchor = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., # same as borderwidth + bg: _Color = ..., # same as background + bitmap: _Bitmap = ..., + border: _ScreenUnits = ..., # same as borderwidth + borderwidth: _ScreenUnits = ..., + command: _ButtonCommand = ..., + compound: _Compound = ..., + cursor: _Cursor = ..., + default: Literal["normal", "active", "disabled"] = ..., + disabledforeground: _Color = ..., + fg: _Color = ..., # same as foreground + font: _FontDescription = ..., + foreground: _Color = ..., + # width and height must be int for buttons containing just text, but + # ints are also valid _ScreenUnits + height: _ScreenUnits = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + image: _ImageSpec = ..., + justify: Literal["left", "center", "right"] = ..., + name: str = ..., + overrelief: _Relief = ..., + padx: _ScreenUnits = ..., + pady: _ScreenUnits = ..., + relief: _Relief = ..., + repeatdelay: int = ..., + repeatinterval: int = ..., + state: Literal["normal", "active", "disabled"] = ..., + takefocus: _TakeFocusValue = ..., + text: float | str = ..., + # We allow the textvariable to be any Variable, not necessarily + # StringVar. This is useful for e.g. a button that displays the value + # of an IntVar. + textvariable: Variable = ..., + underline: int = ..., + width: _ScreenUnits = ..., + wraplength: _ScreenUnits = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + activebackground: _Color = ..., + activeforeground: _Color = ..., + anchor: _Anchor = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + bitmap: _Bitmap = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + command: _ButtonCommand = ..., + compound: _Compound = ..., + cursor: _Cursor = ..., + default: Literal["normal", "active", "disabled"] = ..., + disabledforeground: _Color = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + height: _ScreenUnits = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + image: _ImageSpec = ..., + justify: Literal["left", "center", "right"] = ..., + overrelief: _Relief = ..., + padx: _ScreenUnits = ..., + pady: _ScreenUnits = ..., + relief: _Relief = ..., + repeatdelay: int = ..., + repeatinterval: int = ..., + state: Literal["normal", "active", "disabled"] = ..., + takefocus: _TakeFocusValue = ..., + text: float | str = ..., + textvariable: Variable = ..., + underline: int = ..., + width: _ScreenUnits = ..., + wraplength: _ScreenUnits = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def flash(self) -> None: ... + def invoke(self) -> Any: ... + +class Canvas(Widget, XView, YView): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = ..., + *, + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + closeenough: float = ..., + confine: bool = ..., + cursor: _Cursor = ..., + # canvas manual page has a section named COORDINATES, and the first + # part of it describes _ScreenUnits. + height: _ScreenUnits = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + insertbackground: _Color = ..., + insertborderwidth: _ScreenUnits = ..., + insertofftime: int = ..., + insertontime: int = ..., + insertwidth: _ScreenUnits = ..., + name: str = ..., + offset=..., # undocumented + relief: _Relief = ..., + # Setting scrollregion to None doesn't reset it back to empty, + # but setting it to () does. + scrollregion: tuple[_ScreenUnits, _ScreenUnits, _ScreenUnits, _ScreenUnits] | tuple[()] = ..., + selectbackground: _Color = ..., + selectborderwidth: _ScreenUnits = ..., + selectforeground: _Color = ..., + # man page says that state can be 'hidden', but it can't + state: Literal["normal", "disabled"] = ..., + takefocus: _TakeFocusValue = ..., + width: _ScreenUnits = ..., + xscrollcommand: _XYScrollCommand = ..., + xscrollincrement: _ScreenUnits = ..., + yscrollcommand: _XYScrollCommand = ..., + yscrollincrement: _ScreenUnits = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + closeenough: float = ..., + confine: bool = ..., + cursor: _Cursor = ..., + height: _ScreenUnits = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + insertbackground: _Color = ..., + insertborderwidth: _ScreenUnits = ..., + insertofftime: int = ..., + insertontime: int = ..., + insertwidth: _ScreenUnits = ..., + offset=..., # undocumented + relief: _Relief = ..., + scrollregion: tuple[_ScreenUnits, _ScreenUnits, _ScreenUnits, _ScreenUnits] | tuple[()] = ..., + selectbackground: _Color = ..., + selectborderwidth: _ScreenUnits = ..., + selectforeground: _Color = ..., + state: Literal["normal", "disabled"] = ..., + takefocus: _TakeFocusValue = ..., + width: _ScreenUnits = ..., + xscrollcommand: _XYScrollCommand = ..., + xscrollincrement: _ScreenUnits = ..., + yscrollcommand: _XYScrollCommand = ..., + yscrollincrement: _ScreenUnits = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def addtag(self, *args): ... # internal method + def addtag_above(self, newtag: str, tagOrId: str | _CanvasItemId) -> None: ... + def addtag_all(self, newtag: str) -> None: ... + def addtag_below(self, newtag: str, tagOrId: str | _CanvasItemId) -> None: ... + def addtag_closest( + self, + newtag: str, + x: _ScreenUnits, + y: _ScreenUnits, + halo: _ScreenUnits | None = None, + start: str | _CanvasItemId | None = None, + ) -> None: ... + def addtag_enclosed(self, newtag: str, x1: _ScreenUnits, y1: _ScreenUnits, x2: _ScreenUnits, y2: _ScreenUnits) -> None: ... + def addtag_overlapping(self, newtag: str, x1: _ScreenUnits, y1: _ScreenUnits, x2: _ScreenUnits, y2: _ScreenUnits) -> None: ... + def addtag_withtag(self, newtag: str, tagOrId: str | _CanvasItemId) -> None: ... + def find(self, *args): ... # internal method + def find_above(self, tagOrId: str | _CanvasItemId) -> tuple[_CanvasItemId, ...]: ... + def find_all(self) -> tuple[_CanvasItemId, ...]: ... + def find_below(self, tagOrId: str | _CanvasItemId) -> tuple[_CanvasItemId, ...]: ... + def find_closest( + self, x: _ScreenUnits, y: _ScreenUnits, halo: _ScreenUnits | None = None, start: str | _CanvasItemId | None = None + ) -> tuple[_CanvasItemId, ...]: ... + def find_enclosed( + self, x1: _ScreenUnits, y1: _ScreenUnits, x2: _ScreenUnits, y2: _ScreenUnits + ) -> tuple[_CanvasItemId, ...]: ... + def find_overlapping(self, x1: _ScreenUnits, y1: _ScreenUnits, x2: _ScreenUnits, y2: float) -> tuple[_CanvasItemId, ...]: ... + def find_withtag(self, tagOrId: str | _CanvasItemId) -> tuple[_CanvasItemId, ...]: ... + # Incompatible with Misc.bbox(), tkinter violates LSP + def bbox(self, *args: str | _CanvasItemId) -> tuple[int, int, int, int]: ... # type: ignore[override] + @overload + def tag_bind( + self, + tagOrId: str | _CanvasItemId, + sequence: str | None = None, + func: Callable[[Event[Canvas]], object] | None = None, + add: Literal["", "+"] | bool | None = None, + ) -> str: ... + @overload + def tag_bind( + self, tagOrId: str | int, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None + ) -> None: ... + @overload + def tag_bind(self, tagOrId: str | _CanvasItemId, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + def tag_unbind(self, tagOrId: str | _CanvasItemId, sequence: str, funcid: str | None = None) -> None: ... + def canvasx(self, screenx, gridspacing: Incomplete | None = None): ... + def canvasy(self, screeny, gridspacing: Incomplete | None = None): ... + @overload + def coords(self, __tagOrId: str | _CanvasItemId) -> list[float]: ... + @overload + def coords(self, __tagOrId: str | _CanvasItemId, __args: list[int] | list[float] | tuple[float, ...]) -> None: ... + @overload + def coords(self, __tagOrId: str | _CanvasItemId, __x1: float, __y1: float, *args: float) -> None: ... + # create_foo() methods accept coords as a list or tuple, or as separate arguments. + # Lists and tuples can be flat as in [1, 2, 3, 4], or nested as in [(1, 2), (3, 4)]. + # Keyword arguments should be the same in all overloads of each method. + def create_arc(self, *args, **kw) -> _CanvasItemId: ... + def create_bitmap(self, *args, **kw) -> _CanvasItemId: ... + def create_image(self, *args, **kw) -> _CanvasItemId: ... + @overload + def create_line( + self, + __x0: float, + __y0: float, + __x1: float, + __y1: float, + *, + activedash: str | list[int] | tuple[int, ...] = ..., + activefill: _Color = ..., + activestipple: str = ..., + activewidth: _ScreenUnits = ..., + arrow: Literal["first", "last", "both"] = ..., + arrowshape: tuple[float, float, float] = ..., + capstyle: Literal["round", "projecting", "butt"] = ..., + dash: str | list[int] | tuple[int, ...] = ..., + dashoffset: _ScreenUnits = ..., + disableddash: str | list[int] | tuple[int, ...] = ..., + disabledfill: _Color = ..., + disabledstipple: _Bitmap = ..., + disabledwidth: _ScreenUnits = ..., + fill: _Color = ..., + joinstyle: Literal["round", "bevel", "miter"] = ..., + offset: _ScreenUnits = ..., + smooth: bool = ..., + splinesteps: float = ..., + state: Literal["normal", "active", "disabled"] = ..., + stipple: _Bitmap = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: _ScreenUnits = ..., + ) -> _CanvasItemId: ... + @overload + def create_line( + self, + __xy_pair_0: tuple[float, float], + __xy_pair_1: tuple[float, float], + *, + activedash: str | list[int] | tuple[int, ...] = ..., + activefill: _Color = ..., + activestipple: str = ..., + activewidth: _ScreenUnits = ..., + arrow: Literal["first", "last", "both"] = ..., + arrowshape: tuple[float, float, float] = ..., + capstyle: Literal["round", "projecting", "butt"] = ..., + dash: str | list[int] | tuple[int, ...] = ..., + dashoffset: _ScreenUnits = ..., + disableddash: str | list[int] | tuple[int, ...] = ..., + disabledfill: _Color = ..., + disabledstipple: _Bitmap = ..., + disabledwidth: _ScreenUnits = ..., + fill: _Color = ..., + joinstyle: Literal["round", "bevel", "miter"] = ..., + offset: _ScreenUnits = ..., + smooth: bool = ..., + splinesteps: float = ..., + state: Literal["normal", "active", "disabled"] = ..., + stipple: _Bitmap = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: _ScreenUnits = ..., + ) -> _CanvasItemId: ... + @overload + def create_line( + self, + __coords: ( + tuple[float, float, float, float] + | tuple[tuple[float, float], tuple[float, float]] + | list[int] + | list[float] + | list[tuple[int, int]] + | list[tuple[float, float]] + ), + *, + activedash: str | list[int] | tuple[int, ...] = ..., + activefill: _Color = ..., + activestipple: str = ..., + activewidth: _ScreenUnits = ..., + arrow: Literal["first", "last", "both"] = ..., + arrowshape: tuple[float, float, float] = ..., + capstyle: Literal["round", "projecting", "butt"] = ..., + dash: str | list[int] | tuple[int, ...] = ..., + dashoffset: _ScreenUnits = ..., + disableddash: str | list[int] | tuple[int, ...] = ..., + disabledfill: _Color = ..., + disabledstipple: _Bitmap = ..., + disabledwidth: _ScreenUnits = ..., + fill: _Color = ..., + joinstyle: Literal["round", "bevel", "miter"] = ..., + offset: _ScreenUnits = ..., + smooth: bool = ..., + splinesteps: float = ..., + state: Literal["normal", "active", "disabled"] = ..., + stipple: _Bitmap = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: _ScreenUnits = ..., + ) -> _CanvasItemId: ... + @overload + def create_oval( + self, + __x0: float, + __y0: float, + __x1: float, + __y1: float, + *, + activedash: str | list[int] | tuple[int, ...] = ..., + activefill: _Color = ..., + activeoutline: _Color = ..., + activeoutlinestipple: _Color = ..., + activestipple: str = ..., + activewidth: _ScreenUnits = ..., + dash: str | list[int] | tuple[int, ...] = ..., + dashoffset: _ScreenUnits = ..., + disableddash: str | list[int] | tuple[int, ...] = ..., + disabledfill: _Color = ..., + disabledoutline: _Color = ..., + disabledoutlinestipple: _Color = ..., + disabledstipple: _Bitmap = ..., + disabledwidth: _ScreenUnits = ..., + fill: _Color = ..., + offset: _ScreenUnits = ..., + outline: _Color = ..., + outlineoffset: _ScreenUnits = ..., + outlinestipple: _Bitmap = ..., + state: Literal["normal", "active", "disabled"] = ..., + stipple: _Bitmap = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: _ScreenUnits = ..., + ) -> _CanvasItemId: ... + @overload + def create_oval( + self, + __xy_pair_0: tuple[float, float], + __xy_pair_1: tuple[float, float], + *, + activedash: str | list[int] | tuple[int, ...] = ..., + activefill: _Color = ..., + activeoutline: _Color = ..., + activeoutlinestipple: _Color = ..., + activestipple: str = ..., + activewidth: _ScreenUnits = ..., + dash: str | list[int] | tuple[int, ...] = ..., + dashoffset: _ScreenUnits = ..., + disableddash: str | list[int] | tuple[int, ...] = ..., + disabledfill: _Color = ..., + disabledoutline: _Color = ..., + disabledoutlinestipple: _Color = ..., + disabledstipple: _Bitmap = ..., + disabledwidth: _ScreenUnits = ..., + fill: _Color = ..., + offset: _ScreenUnits = ..., + outline: _Color = ..., + outlineoffset: _ScreenUnits = ..., + outlinestipple: _Bitmap = ..., + state: Literal["normal", "active", "disabled"] = ..., + stipple: _Bitmap = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: _ScreenUnits = ..., + ) -> _CanvasItemId: ... + @overload + def create_oval( + self, + __coords: ( + tuple[float, float, float, float] + | tuple[tuple[float, float], tuple[float, float]] + | list[int] + | list[float] + | list[tuple[int, int]] + | list[tuple[float, float]] + ), + *, + activedash: str | list[int] | tuple[int, ...] = ..., + activefill: _Color = ..., + activeoutline: _Color = ..., + activeoutlinestipple: _Color = ..., + activestipple: str = ..., + activewidth: _ScreenUnits = ..., + dash: str | list[int] | tuple[int, ...] = ..., + dashoffset: _ScreenUnits = ..., + disableddash: str | list[int] | tuple[int, ...] = ..., + disabledfill: _Color = ..., + disabledoutline: _Color = ..., + disabledoutlinestipple: _Color = ..., + disabledstipple: _Bitmap = ..., + disabledwidth: _ScreenUnits = ..., + fill: _Color = ..., + offset: _ScreenUnits = ..., + outline: _Color = ..., + outlineoffset: _ScreenUnits = ..., + outlinestipple: _Bitmap = ..., + state: Literal["normal", "active", "disabled"] = ..., + stipple: _Bitmap = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: _ScreenUnits = ..., + ) -> _CanvasItemId: ... + @overload + def create_polygon( + self, + __x0: float, + __y0: float, + __x1: float, + __y1: float, + *xy_pairs: float, + activedash: str | list[int] | tuple[int, ...] = ..., + activefill: _Color = ..., + activeoutline: _Color = ..., + activeoutlinestipple: _Color = ..., + activestipple: str = ..., + activewidth: _ScreenUnits = ..., + dash: str | list[int] | tuple[int, ...] = ..., + dashoffset: _ScreenUnits = ..., + disableddash: str | list[int] | tuple[int, ...] = ..., + disabledfill: _Color = ..., + disabledoutline: _Color = ..., + disabledoutlinestipple: _Color = ..., + disabledstipple: _Bitmap = ..., + disabledwidth: _ScreenUnits = ..., + fill: _Color = ..., + joinstyle: Literal["round", "bevel", "miter"] = ..., + offset: _ScreenUnits = ..., + outline: _Color = ..., + outlineoffset: _ScreenUnits = ..., + outlinestipple: _Bitmap = ..., + smooth: bool = ..., + splinesteps: float = ..., + state: Literal["normal", "active", "disabled"] = ..., + stipple: _Bitmap = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: _ScreenUnits = ..., + ) -> _CanvasItemId: ... + @overload + def create_polygon( + self, + __xy_pair_0: tuple[float, float], + __xy_pair_1: tuple[float, float], + *xy_pairs: tuple[float, float], + activedash: str | list[int] | tuple[int, ...] = ..., + activefill: _Color = ..., + activeoutline: _Color = ..., + activeoutlinestipple: _Color = ..., + activestipple: str = ..., + activewidth: _ScreenUnits = ..., + dash: str | list[int] | tuple[int, ...] = ..., + dashoffset: _ScreenUnits = ..., + disableddash: str | list[int] | tuple[int, ...] = ..., + disabledfill: _Color = ..., + disabledoutline: _Color = ..., + disabledoutlinestipple: _Color = ..., + disabledstipple: _Bitmap = ..., + disabledwidth: _ScreenUnits = ..., + fill: _Color = ..., + joinstyle: Literal["round", "bevel", "miter"] = ..., + offset: _ScreenUnits = ..., + outline: _Color = ..., + outlineoffset: _ScreenUnits = ..., + outlinestipple: _Bitmap = ..., + smooth: bool = ..., + splinesteps: float = ..., + state: Literal["normal", "active", "disabled"] = ..., + stipple: _Bitmap = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: _ScreenUnits = ..., + ) -> _CanvasItemId: ... + @overload + def create_polygon( + self, + __coords: ( + tuple[float, ...] + | tuple[tuple[float, float], ...] + | list[int] + | list[float] + | list[tuple[int, int]] + | list[tuple[float, float]] + ), + *, + activedash: str | list[int] | tuple[int, ...] = ..., + activefill: _Color = ..., + activeoutline: _Color = ..., + activeoutlinestipple: _Color = ..., + activestipple: str = ..., + activewidth: _ScreenUnits = ..., + dash: str | list[int] | tuple[int, ...] = ..., + dashoffset: _ScreenUnits = ..., + disableddash: str | list[int] | tuple[int, ...] = ..., + disabledfill: _Color = ..., + disabledoutline: _Color = ..., + disabledoutlinestipple: _Color = ..., + disabledstipple: _Bitmap = ..., + disabledwidth: _ScreenUnits = ..., + fill: _Color = ..., + joinstyle: Literal["round", "bevel", "miter"] = ..., + offset: _ScreenUnits = ..., + outline: _Color = ..., + outlineoffset: _ScreenUnits = ..., + outlinestipple: _Bitmap = ..., + smooth: bool = ..., + splinesteps: float = ..., + state: Literal["normal", "active", "disabled"] = ..., + stipple: _Bitmap = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: _ScreenUnits = ..., + ) -> _CanvasItemId: ... + @overload + def create_rectangle( + self, + __x0: float, + __y0: float, + __x1: float, + __y1: float, + *, + activedash: str | list[int] | tuple[int, ...] = ..., + activefill: _Color = ..., + activeoutline: _Color = ..., + activeoutlinestipple: _Color = ..., + activestipple: str = ..., + activewidth: _ScreenUnits = ..., + dash: str | list[int] | tuple[int, ...] = ..., + dashoffset: _ScreenUnits = ..., + disableddash: str | list[int] | tuple[int, ...] = ..., + disabledfill: _Color = ..., + disabledoutline: _Color = ..., + disabledoutlinestipple: _Color = ..., + disabledstipple: _Bitmap = ..., + disabledwidth: _ScreenUnits = ..., + fill: _Color = ..., + offset: _ScreenUnits = ..., + outline: _Color = ..., + outlineoffset: _ScreenUnits = ..., + outlinestipple: _Bitmap = ..., + state: Literal["normal", "active", "disabled"] = ..., + stipple: _Bitmap = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: _ScreenUnits = ..., + ) -> _CanvasItemId: ... + @overload + def create_rectangle( + self, + __xy_pair_0: tuple[float, float], + __xy_pair_1: tuple[float, float], + *, + activedash: str | list[int] | tuple[int, ...] = ..., + activefill: _Color = ..., + activeoutline: _Color = ..., + activeoutlinestipple: _Color = ..., + activestipple: str = ..., + activewidth: _ScreenUnits = ..., + dash: str | list[int] | tuple[int, ...] = ..., + dashoffset: _ScreenUnits = ..., + disableddash: str | list[int] | tuple[int, ...] = ..., + disabledfill: _Color = ..., + disabledoutline: _Color = ..., + disabledoutlinestipple: _Color = ..., + disabledstipple: _Bitmap = ..., + disabledwidth: _ScreenUnits = ..., + fill: _Color = ..., + offset: _ScreenUnits = ..., + outline: _Color = ..., + outlineoffset: _ScreenUnits = ..., + outlinestipple: _Bitmap = ..., + state: Literal["normal", "active", "disabled"] = ..., + stipple: _Bitmap = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: _ScreenUnits = ..., + ) -> _CanvasItemId: ... + @overload + def create_rectangle( + self, + __coords: ( + tuple[float, float, float, float] + | tuple[tuple[float, float], tuple[float, float]] + | list[int] + | list[float] + | list[tuple[int, int]] + | list[tuple[float, float]] + ), + *, + activedash: str | list[int] | tuple[int, ...] = ..., + activefill: _Color = ..., + activeoutline: _Color = ..., + activeoutlinestipple: _Color = ..., + activestipple: str = ..., + activewidth: _ScreenUnits = ..., + dash: str | list[int] | tuple[int, ...] = ..., + dashoffset: _ScreenUnits = ..., + disableddash: str | list[int] | tuple[int, ...] = ..., + disabledfill: _Color = ..., + disabledoutline: _Color = ..., + disabledoutlinestipple: _Color = ..., + disabledstipple: _Bitmap = ..., + disabledwidth: _ScreenUnits = ..., + fill: _Color = ..., + offset: _ScreenUnits = ..., + outline: _Color = ..., + outlineoffset: _ScreenUnits = ..., + outlinestipple: _Bitmap = ..., + state: Literal["normal", "active", "disabled"] = ..., + stipple: _Bitmap = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: _ScreenUnits = ..., + ) -> _CanvasItemId: ... + @overload + def create_text( + self, + __x: float, + __y: float, + *, + activefill: _Color = ..., + activestipple: str = ..., + anchor: _Anchor = ..., + disabledfill: _Color = ..., + disabledstipple: _Bitmap = ..., + fill: _Color = ..., + font: _FontDescription = ..., + justify: Literal["left", "center", "right"] = ..., + offset: _ScreenUnits = ..., + state: Literal["normal", "active", "disabled"] = ..., + stipple: _Bitmap = ..., + tags: str | list[str] | tuple[str, ...] = ..., + text: float | str = ..., + width: _ScreenUnits = ..., + ) -> _CanvasItemId: ... + @overload + def create_text( + self, + __coords: tuple[float, float] | list[int] | list[float], + *, + activefill: _Color = ..., + activestipple: str = ..., + anchor: _Anchor = ..., + disabledfill: _Color = ..., + disabledstipple: _Bitmap = ..., + fill: _Color = ..., + font: _FontDescription = ..., + justify: Literal["left", "center", "right"] = ..., + offset: _ScreenUnits = ..., + state: Literal["normal", "active", "disabled"] = ..., + stipple: _Bitmap = ..., + tags: str | list[str] | tuple[str, ...] = ..., + text: float | str = ..., + width: _ScreenUnits = ..., + ) -> _CanvasItemId: ... + @overload + def create_window( + self, + __x: float, + __y: float, + *, + anchor: _Anchor = ..., + height: _ScreenUnits = ..., + state: Literal["normal", "active", "disabled"] = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: _ScreenUnits = ..., + window: Widget = ..., + ) -> _CanvasItemId: ... + @overload + def create_window( + self, + __coords: tuple[float, float] | list[int] | list[float], + *, + anchor: _Anchor = ..., + height: _ScreenUnits = ..., + state: Literal["normal", "active", "disabled"] = ..., + tags: str | list[str] | tuple[str, ...] = ..., + width: _ScreenUnits = ..., + window: Widget = ..., + ) -> _CanvasItemId: ... + def dchars(self, *args) -> None: ... + def delete(self, *tagsOrCanvasIds: str | _CanvasItemId) -> None: ... + @overload + def dtag(self, __tag: str, __tag_to_delete: str | None = ...) -> None: ... + @overload + def dtag(self, __id: _CanvasItemId, __tag_to_delete: str) -> None: ... + def focus(self, *args): ... + def gettags(self, __tagOrId: str | _CanvasItemId) -> tuple[str, ...]: ... + def icursor(self, *args) -> None: ... + def index(self, *args): ... + def insert(self, *args) -> None: ... + def itemcget(self, tagOrId, option): ... + # itemconfigure kwargs depend on item type, which is not known when type checking + def itemconfigure( + self, tagOrId: str | _CanvasItemId, cnf: dict[str, Any] | None = None, **kw: Any + ) -> dict[str, tuple[str, str, str, str, str]] | None: ... + itemconfig = itemconfigure + def move(self, *args) -> None: ... + if sys.version_info >= (3, 8): + def moveto(self, tagOrId: str | _CanvasItemId, x: Literal[""] | float = "", y: Literal[""] | float = "") -> None: ... + + def postscript(self, cnf=..., **kw): ... + # tkinter does: + # lower = tag_lower + # lift = tkraise = tag_raise + # + # But mypy doesn't like aliasing here (maybe because Misc defines the same names) + def tag_lower(self, __first: str | _CanvasItemId, __second: str | _CanvasItemId | None = ...) -> None: ... + def lower(self, __first: str | _CanvasItemId, __second: str | _CanvasItemId | None = ...) -> None: ... # type: ignore[override] + def tag_raise(self, __first: str | _CanvasItemId, __second: str | _CanvasItemId | None = ...) -> None: ... + def tkraise(self, __first: str | _CanvasItemId, __second: str | _CanvasItemId | None = ...) -> None: ... # type: ignore[override] + def lift(self, __first: str | _CanvasItemId, __second: str | _CanvasItemId | None = ...) -> None: ... # type: ignore[override] + def scale(self, *args) -> None: ... + def scan_mark(self, x, y) -> None: ... + def scan_dragto(self, x, y, gain: int = 10) -> None: ... + def select_adjust(self, tagOrId, index) -> None: ... + def select_clear(self) -> None: ... + def select_from(self, tagOrId, index) -> None: ... + def select_item(self): ... + def select_to(self, tagOrId, index) -> None: ... + def type(self, tagOrId): ... + +class Checkbutton(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = ..., + *, + activebackground: _Color = ..., + activeforeground: _Color = ..., + anchor: _Anchor = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + bitmap: _Bitmap = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + command: _ButtonCommand = ..., + compound: _Compound = ..., + cursor: _Cursor = ..., + disabledforeground: _Color = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + height: _ScreenUnits = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + image: _ImageSpec = ..., + indicatoron: bool = ..., + justify: Literal["left", "center", "right"] = ..., + name: str = ..., + offrelief: _Relief = ..., + # The checkbutton puts a value to its variable when it's checked or + # unchecked. We don't restrict the type of that value here, so + # Any-typing is fine. + # + # I think Checkbutton shouldn't be generic, because then specifying + # "any checkbutton regardless of what variable it uses" would be + # difficult, and we might run into issues just like how list[float] + # and list[int] are incompatible. Also, we would need a way to + # specify "Checkbutton not associated with any variable", which is + # done by setting variable to empty string (the default). + offvalue: Any = ..., + onvalue: Any = ..., + overrelief: _Relief = ..., + padx: _ScreenUnits = ..., + pady: _ScreenUnits = ..., + relief: _Relief = ..., + selectcolor: _Color = ..., + selectimage: _ImageSpec = ..., + state: Literal["normal", "active", "disabled"] = ..., + takefocus: _TakeFocusValue = ..., + text: float | str = ..., + textvariable: Variable = ..., + tristateimage: _ImageSpec = ..., + tristatevalue: Any = ..., + underline: int = ..., + variable: Variable | Literal[""] = ..., + width: _ScreenUnits = ..., + wraplength: _ScreenUnits = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + activebackground: _Color = ..., + activeforeground: _Color = ..., + anchor: _Anchor = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + bitmap: _Bitmap = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + command: _ButtonCommand = ..., + compound: _Compound = ..., + cursor: _Cursor = ..., + disabledforeground: _Color = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + height: _ScreenUnits = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + image: _ImageSpec = ..., + indicatoron: bool = ..., + justify: Literal["left", "center", "right"] = ..., + offrelief: _Relief = ..., + offvalue: Any = ..., + onvalue: Any = ..., + overrelief: _Relief = ..., + padx: _ScreenUnits = ..., + pady: _ScreenUnits = ..., + relief: _Relief = ..., + selectcolor: _Color = ..., + selectimage: _ImageSpec = ..., + state: Literal["normal", "active", "disabled"] = ..., + takefocus: _TakeFocusValue = ..., + text: float | str = ..., + textvariable: Variable = ..., + tristateimage: _ImageSpec = ..., + tristatevalue: Any = ..., + underline: int = ..., + variable: Variable | Literal[""] = ..., + width: _ScreenUnits = ..., + wraplength: _ScreenUnits = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def deselect(self) -> None: ... + def flash(self) -> None: ... + def invoke(self) -> Any: ... + def select(self) -> None: ... + def toggle(self) -> None: ... + +_EntryIndex: TypeAlias = str | int # "INDICES" in manual page + +class Entry(Widget, XView): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = ..., + *, + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + cursor: _Cursor = ..., + disabledbackground: _Color = ..., + disabledforeground: _Color = ..., + exportselection: bool = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + insertbackground: _Color = ..., + insertborderwidth: _ScreenUnits = ..., + insertofftime: int = ..., + insertontime: int = ..., + insertwidth: _ScreenUnits = ..., + invalidcommand: _EntryValidateCommand = ..., + invcmd: _EntryValidateCommand = ..., # same as invalidcommand + justify: Literal["left", "center", "right"] = ..., + name: str = ..., + readonlybackground: _Color = ..., + relief: _Relief = ..., + selectbackground: _Color = ..., + selectborderwidth: _ScreenUnits = ..., + selectforeground: _Color = ..., + show: str = ..., + state: Literal["normal", "disabled", "readonly"] = ..., + takefocus: _TakeFocusValue = ..., + textvariable: Variable = ..., + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., + validatecommand: _EntryValidateCommand = ..., + vcmd: _EntryValidateCommand = ..., # same as validatecommand + width: int = ..., + xscrollcommand: _XYScrollCommand = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + cursor: _Cursor = ..., + disabledbackground: _Color = ..., + disabledforeground: _Color = ..., + exportselection: bool = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + insertbackground: _Color = ..., + insertborderwidth: _ScreenUnits = ..., + insertofftime: int = ..., + insertontime: int = ..., + insertwidth: _ScreenUnits = ..., + invalidcommand: _EntryValidateCommand = ..., + invcmd: _EntryValidateCommand = ..., + justify: Literal["left", "center", "right"] = ..., + readonlybackground: _Color = ..., + relief: _Relief = ..., + selectbackground: _Color = ..., + selectborderwidth: _ScreenUnits = ..., + selectforeground: _Color = ..., + show: str = ..., + state: Literal["normal", "disabled", "readonly"] = ..., + takefocus: _TakeFocusValue = ..., + textvariable: Variable = ..., + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., + validatecommand: _EntryValidateCommand = ..., + vcmd: _EntryValidateCommand = ..., + width: int = ..., + xscrollcommand: _XYScrollCommand = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def delete(self, first: _EntryIndex, last: _EntryIndex | None = None) -> None: ... + def get(self) -> str: ... + def icursor(self, index: _EntryIndex) -> None: ... + def index(self, index: _EntryIndex) -> int: ... + def insert(self, index: _EntryIndex, string: str) -> None: ... + def scan_mark(self, x) -> None: ... + def scan_dragto(self, x) -> None: ... + def selection_adjust(self, index: _EntryIndex) -> None: ... + def selection_clear(self) -> None: ... # type: ignore[override] + def selection_from(self, index: _EntryIndex) -> None: ... + def selection_present(self) -> bool: ... + def selection_range(self, start: _EntryIndex, end: _EntryIndex) -> None: ... + def selection_to(self, index: _EntryIndex) -> None: ... + select_adjust = selection_adjust + select_clear = selection_clear + select_from = selection_from + select_present = selection_present + select_range = selection_range + select_to = selection_to + +class Frame(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = ..., + *, + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + class_: str = ..., # can't be changed with configure() + colormap: Literal["new", ""] | Misc = ..., # can't be changed with configure() + container: bool = ..., # can't be changed with configure() + cursor: _Cursor = ..., + height: _ScreenUnits = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + name: str = ..., + padx: _ScreenUnits = ..., + pady: _ScreenUnits = ..., + relief: _Relief = ..., + takefocus: _TakeFocusValue = ..., + visual: str | tuple[str, int] = ..., # can't be changed with configure() + width: _ScreenUnits = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + cursor: _Cursor = ..., + height: _ScreenUnits = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + padx: _ScreenUnits = ..., + pady: _ScreenUnits = ..., + relief: _Relief = ..., + takefocus: _TakeFocusValue = ..., + width: _ScreenUnits = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +class Label(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = ..., + *, + activebackground: _Color = ..., + activeforeground: _Color = ..., + anchor: _Anchor = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + bitmap: _Bitmap = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + compound: _Compound = ..., + cursor: _Cursor = ..., + disabledforeground: _Color = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + height: _ScreenUnits = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + image: _ImageSpec = ..., + justify: Literal["left", "center", "right"] = ..., + name: str = ..., + padx: _ScreenUnits = ..., + pady: _ScreenUnits = ..., + relief: _Relief = ..., + state: Literal["normal", "active", "disabled"] = ..., + takefocus: _TakeFocusValue = ..., + text: float | str = ..., + textvariable: Variable = ..., + underline: int = ..., + width: _ScreenUnits = ..., + wraplength: _ScreenUnits = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + activebackground: _Color = ..., + activeforeground: _Color = ..., + anchor: _Anchor = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + bitmap: _Bitmap = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + compound: _Compound = ..., + cursor: _Cursor = ..., + disabledforeground: _Color = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + height: _ScreenUnits = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + image: _ImageSpec = ..., + justify: Literal["left", "center", "right"] = ..., + padx: _ScreenUnits = ..., + pady: _ScreenUnits = ..., + relief: _Relief = ..., + state: Literal["normal", "active", "disabled"] = ..., + takefocus: _TakeFocusValue = ..., + text: float | str = ..., + textvariable: Variable = ..., + underline: int = ..., + width: _ScreenUnits = ..., + wraplength: _ScreenUnits = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +class Listbox(Widget, XView, YView): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = ..., + *, + activestyle: Literal["dotbox", "none", "underline"] = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + cursor: _Cursor = ..., + disabledforeground: _Color = ..., + exportselection: int = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + height: int = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + justify: Literal["left", "center", "right"] = ..., + # There's no tkinter.ListVar, but seems like bare tkinter.Variable + # actually works for this: + # + # >>> import tkinter + # >>> lb = tkinter.Listbox() + # >>> var = lb['listvariable'] = tkinter.Variable() + # >>> var.set(['foo', 'bar', 'baz']) + # >>> lb.get(0, 'end') + # ('foo', 'bar', 'baz') + listvariable: Variable = ..., + name: str = ..., + relief: _Relief = ..., + selectbackground: _Color = ..., + selectborderwidth: _ScreenUnits = ..., + selectforeground: _Color = ..., + # from listbox man page: "The value of the [selectmode] option may be + # arbitrary, but the default bindings expect it to be ..." + # + # I have never seen anyone setting this to something else than what + # "the default bindings expect", but let's support it anyway. + selectmode: str = ..., + setgrid: bool = ..., + state: Literal["normal", "disabled"] = ..., + takefocus: _TakeFocusValue = ..., + width: int = ..., + xscrollcommand: _XYScrollCommand = ..., + yscrollcommand: _XYScrollCommand = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + activestyle: Literal["dotbox", "none", "underline"] = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + cursor: _Cursor = ..., + disabledforeground: _Color = ..., + exportselection: bool = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + height: int = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + justify: Literal["left", "center", "right"] = ..., + listvariable: Variable = ..., + relief: _Relief = ..., + selectbackground: _Color = ..., + selectborderwidth: _ScreenUnits = ..., + selectforeground: _Color = ..., + selectmode: str = ..., + setgrid: bool = ..., + state: Literal["normal", "disabled"] = ..., + takefocus: _TakeFocusValue = ..., + width: int = ..., + xscrollcommand: _XYScrollCommand = ..., + yscrollcommand: _XYScrollCommand = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def activate(self, index: str | int) -> None: ... + def bbox(self, index: str | int) -> tuple[int, int, int, int] | None: ... # type: ignore[override] + def curselection(self): ... + def delete(self, first: str | int, last: str | int | None = None) -> None: ... + def get(self, first: str | int, last: str | int | None = None): ... + def index(self, index: str | int) -> int: ... + def insert(self, index: str | int, *elements: str | float) -> None: ... + def nearest(self, y): ... + def scan_mark(self, x, y) -> None: ... + def scan_dragto(self, x, y) -> None: ... + def see(self, index: str | int) -> None: ... + def selection_anchor(self, index: str | int) -> None: ... + select_anchor = selection_anchor + def selection_clear(self, first: str | int, last: str | int | None = None) -> None: ... # type: ignore[override] + select_clear = selection_clear + def selection_includes(self, index: str | int): ... + select_includes = selection_includes + def selection_set(self, first: str | int, last: str | int | None = None) -> None: ... + select_set = selection_set + def size(self) -> int: ... # type: ignore[override] + def itemcget(self, index: str | int, option): ... + def itemconfigure(self, index: str | int, cnf: Incomplete | None = None, **kw): ... + itemconfig = itemconfigure + +class Menu(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = ..., + *, + activebackground: _Color = ..., + activeborderwidth: _ScreenUnits = ..., + activeforeground: _Color = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + cursor: _Cursor = ..., + disabledforeground: _Color = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + name: str = ..., + postcommand: Callable[[], object] | str = ..., + relief: _Relief = ..., + selectcolor: _Color = ..., + takefocus: _TakeFocusValue = ..., + tearoff: int = ..., + # I guess tearoffcommand arguments are supposed to be widget objects, + # but they are widget name strings. Use nametowidget() to handle the + # arguments of tearoffcommand. + tearoffcommand: Callable[[str, str], object] | str = ..., + title: str = ..., + type: Literal["menubar", "tearoff", "normal"] = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + activebackground: _Color = ..., + activeborderwidth: _ScreenUnits = ..., + activeforeground: _Color = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + cursor: _Cursor = ..., + disabledforeground: _Color = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + postcommand: Callable[[], object] | str = ..., + relief: _Relief = ..., + selectcolor: _Color = ..., + takefocus: _TakeFocusValue = ..., + tearoff: bool = ..., + tearoffcommand: Callable[[str, str], object] | str = ..., + title: str = ..., + type: Literal["menubar", "tearoff", "normal"] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def tk_popup(self, x: int, y: int, entry: str | int = "") -> None: ... + def activate(self, index: str | int) -> None: ... + def add(self, itemType, cnf=..., **kw): ... # docstring says "Internal function." + def insert(self, index, itemType, cnf=..., **kw): ... # docstring says "Internal function." + def add_cascade( + self, + cnf: dict[str, Any] | None = ..., + *, + accelerator: str = ..., + activebackground: _Color = ..., + activeforeground: _Color = ..., + background: _Color = ..., + bitmap: _Bitmap = ..., + columnbreak: int = ..., + command: Callable[[], object] | str = ..., + compound: _Compound = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + hidemargin: bool = ..., + image: _ImageSpec = ..., + label: str = ..., + menu: Menu = ..., + state: Literal["normal", "active", "disabled"] = ..., + underline: int = ..., + ) -> None: ... + def add_checkbutton( + self, + cnf: dict[str, Any] | None = ..., + *, + accelerator: str = ..., + activebackground: _Color = ..., + activeforeground: _Color = ..., + background: _Color = ..., + bitmap: _Bitmap = ..., + columnbreak: int = ..., + command: Callable[[], object] | str = ..., + compound: _Compound = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + hidemargin: bool = ..., + image: _ImageSpec = ..., + indicatoron: bool = ..., + label: str = ..., + offvalue: Any = ..., + onvalue: Any = ..., + selectcolor: _Color = ..., + selectimage: _ImageSpec = ..., + state: Literal["normal", "active", "disabled"] = ..., + underline: int = ..., + variable: Variable = ..., + ) -> None: ... + def add_command( + self, + cnf: dict[str, Any] | None = ..., + *, + accelerator: str = ..., + activebackground: _Color = ..., + activeforeground: _Color = ..., + background: _Color = ..., + bitmap: _Bitmap = ..., + columnbreak: int = ..., + command: Callable[[], object] | str = ..., + compound: _Compound = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + hidemargin: bool = ..., + image: _ImageSpec = ..., + label: str = ..., + state: Literal["normal", "active", "disabled"] = ..., + underline: int = ..., + ) -> None: ... + def add_radiobutton( + self, + cnf: dict[str, Any] | None = ..., + *, + accelerator: str = ..., + activebackground: _Color = ..., + activeforeground: _Color = ..., + background: _Color = ..., + bitmap: _Bitmap = ..., + columnbreak: int = ..., + command: Callable[[], object] | str = ..., + compound: _Compound = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + hidemargin: bool = ..., + image: _ImageSpec = ..., + indicatoron: bool = ..., + label: str = ..., + selectcolor: _Color = ..., + selectimage: _ImageSpec = ..., + state: Literal["normal", "active", "disabled"] = ..., + underline: int = ..., + value: Any = ..., + variable: Variable = ..., + ) -> None: ... + def add_separator(self, cnf: dict[str, Any] | None = ..., *, background: _Color = ...) -> None: ... + def insert_cascade( + self, + index: str | int, + cnf: dict[str, Any] | None = ..., + *, + accelerator: str = ..., + activebackground: _Color = ..., + activeforeground: _Color = ..., + background: _Color = ..., + bitmap: _Bitmap = ..., + columnbreak: int = ..., + command: Callable[[], object] | str = ..., + compound: _Compound = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + hidemargin: bool = ..., + image: _ImageSpec = ..., + label: str = ..., + menu: Menu = ..., + state: Literal["normal", "active", "disabled"] = ..., + underline: int = ..., + ) -> None: ... + def insert_checkbutton( + self, + index: str | int, + cnf: dict[str, Any] | None = ..., + *, + accelerator: str = ..., + activebackground: _Color = ..., + activeforeground: _Color = ..., + background: _Color = ..., + bitmap: _Bitmap = ..., + columnbreak: int = ..., + command: Callable[[], object] | str = ..., + compound: _Compound = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + hidemargin: bool = ..., + image: _ImageSpec = ..., + indicatoron: bool = ..., + label: str = ..., + offvalue: Any = ..., + onvalue: Any = ..., + selectcolor: _Color = ..., + selectimage: _ImageSpec = ..., + state: Literal["normal", "active", "disabled"] = ..., + underline: int = ..., + variable: Variable = ..., + ) -> None: ... + def insert_command( + self, + index: str | int, + cnf: dict[str, Any] | None = ..., + *, + accelerator: str = ..., + activebackground: _Color = ..., + activeforeground: _Color = ..., + background: _Color = ..., + bitmap: _Bitmap = ..., + columnbreak: int = ..., + command: Callable[[], object] | str = ..., + compound: _Compound = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + hidemargin: bool = ..., + image: _ImageSpec = ..., + label: str = ..., + state: Literal["normal", "active", "disabled"] = ..., + underline: int = ..., + ) -> None: ... + def insert_radiobutton( + self, + index: str | int, + cnf: dict[str, Any] | None = ..., + *, + accelerator: str = ..., + activebackground: _Color = ..., + activeforeground: _Color = ..., + background: _Color = ..., + bitmap: _Bitmap = ..., + columnbreak: int = ..., + command: Callable[[], object] | str = ..., + compound: _Compound = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + hidemargin: bool = ..., + image: _ImageSpec = ..., + indicatoron: bool = ..., + label: str = ..., + selectcolor: _Color = ..., + selectimage: _ImageSpec = ..., + state: Literal["normal", "active", "disabled"] = ..., + underline: int = ..., + value: Any = ..., + variable: Variable = ..., + ) -> None: ... + def insert_separator(self, index: str | int, cnf: dict[str, Any] | None = ..., *, background: _Color = ...) -> None: ... + def delete(self, index1: str | int, index2: str | int | None = None) -> None: ... + def entrycget(self, index: str | int, option: str) -> Any: ... + def entryconfigure( + self, index: str | int, cnf: dict[str, Any] | None = None, **kw: Any + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + entryconfig = entryconfigure + def index(self, index: str | int) -> int | None: ... + def invoke(self, index: str | int) -> Any: ... + def post(self, x: int, y: int) -> None: ... + def type(self, index: str | int) -> Literal["cascade", "checkbutton", "command", "radiobutton", "separator"]: ... + def unpost(self) -> None: ... + def xposition(self, index: str | int) -> int: ... + def yposition(self, index: str | int) -> int: ... + +class Menubutton(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = ..., + *, + activebackground: _Color = ..., + activeforeground: _Color = ..., + anchor: _Anchor = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + bitmap: _Bitmap = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + compound: _Compound = ..., + cursor: _Cursor = ..., + direction: Literal["above", "below", "left", "right", "flush"] = ..., + disabledforeground: _Color = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + height: _ScreenUnits = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + image: _ImageSpec = ..., + indicatoron: bool = ..., + justify: Literal["left", "center", "right"] = ..., + menu: Menu = ..., + name: str = ..., + padx: _ScreenUnits = ..., + pady: _ScreenUnits = ..., + relief: _Relief = ..., + state: Literal["normal", "active", "disabled"] = ..., + takefocus: _TakeFocusValue = ..., + text: float | str = ..., + textvariable: Variable = ..., + underline: int = ..., + width: _ScreenUnits = ..., + wraplength: _ScreenUnits = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + activebackground: _Color = ..., + activeforeground: _Color = ..., + anchor: _Anchor = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + bitmap: _Bitmap = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + compound: _Compound = ..., + cursor: _Cursor = ..., + direction: Literal["above", "below", "left", "right", "flush"] = ..., + disabledforeground: _Color = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + height: _ScreenUnits = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + image: _ImageSpec = ..., + indicatoron: bool = ..., + justify: Literal["left", "center", "right"] = ..., + menu: Menu = ..., + padx: _ScreenUnits = ..., + pady: _ScreenUnits = ..., + relief: _Relief = ..., + state: Literal["normal", "active", "disabled"] = ..., + takefocus: _TakeFocusValue = ..., + text: float | str = ..., + textvariable: Variable = ..., + underline: int = ..., + width: _ScreenUnits = ..., + wraplength: _ScreenUnits = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +class Message(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = ..., + *, + anchor: _Anchor = ..., + aspect: int = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + cursor: _Cursor = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + justify: Literal["left", "center", "right"] = ..., + name: str = ..., + padx: _ScreenUnits = ..., + pady: _ScreenUnits = ..., + relief: _Relief = ..., + takefocus: _TakeFocusValue = ..., + text: float | str = ..., + textvariable: Variable = ..., + # there's width but no height + width: _ScreenUnits = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + anchor: _Anchor = ..., + aspect: int = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + cursor: _Cursor = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + justify: Literal["left", "center", "right"] = ..., + padx: _ScreenUnits = ..., + pady: _ScreenUnits = ..., + relief: _Relief = ..., + takefocus: _TakeFocusValue = ..., + text: float | str = ..., + textvariable: Variable = ..., + width: _ScreenUnits = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +class Radiobutton(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = ..., + *, + activebackground: _Color = ..., + activeforeground: _Color = ..., + anchor: _Anchor = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + bitmap: _Bitmap = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + command: _ButtonCommand = ..., + compound: _Compound = ..., + cursor: _Cursor = ..., + disabledforeground: _Color = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + height: _ScreenUnits = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + image: _ImageSpec = ..., + indicatoron: bool = ..., + justify: Literal["left", "center", "right"] = ..., + name: str = ..., + offrelief: _Relief = ..., + overrelief: _Relief = ..., + padx: _ScreenUnits = ..., + pady: _ScreenUnits = ..., + relief: _Relief = ..., + selectcolor: _Color = ..., + selectimage: _ImageSpec = ..., + state: Literal["normal", "active", "disabled"] = ..., + takefocus: _TakeFocusValue = ..., + text: float | str = ..., + textvariable: Variable = ..., + tristateimage: _ImageSpec = ..., + tristatevalue: Any = ..., + underline: int = ..., + value: Any = ..., + variable: Variable | Literal[""] = ..., + width: _ScreenUnits = ..., + wraplength: _ScreenUnits = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + activebackground: _Color = ..., + activeforeground: _Color = ..., + anchor: _Anchor = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + bitmap: _Bitmap = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + command: _ButtonCommand = ..., + compound: _Compound = ..., + cursor: _Cursor = ..., + disabledforeground: _Color = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + height: _ScreenUnits = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + image: _ImageSpec = ..., + indicatoron: bool = ..., + justify: Literal["left", "center", "right"] = ..., + offrelief: _Relief = ..., + overrelief: _Relief = ..., + padx: _ScreenUnits = ..., + pady: _ScreenUnits = ..., + relief: _Relief = ..., + selectcolor: _Color = ..., + selectimage: _ImageSpec = ..., + state: Literal["normal", "active", "disabled"] = ..., + takefocus: _TakeFocusValue = ..., + text: float | str = ..., + textvariable: Variable = ..., + tristateimage: _ImageSpec = ..., + tristatevalue: Any = ..., + underline: int = ..., + value: Any = ..., + variable: Variable | Literal[""] = ..., + width: _ScreenUnits = ..., + wraplength: _ScreenUnits = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def deselect(self) -> None: ... + def flash(self) -> None: ... + def invoke(self) -> Any: ... + def select(self) -> None: ... + +class Scale(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = ..., + *, + activebackground: _Color = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + bigincrement: float = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + # don't know why the callback gets string instead of float + command: str | Callable[[str], object] = ..., + cursor: _Cursor = ..., + digits: int = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + from_: float = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + label: str = ..., + length: _ScreenUnits = ..., + name: str = ..., + orient: Literal["horizontal", "vertical"] = ..., + relief: _Relief = ..., + repeatdelay: int = ..., + repeatinterval: int = ..., + resolution: float = ..., + showvalue: bool = ..., + sliderlength: _ScreenUnits = ..., + sliderrelief: _Relief = ..., + state: Literal["normal", "active", "disabled"] = ..., + takefocus: _TakeFocusValue = ..., + tickinterval: float = ..., + to: float = ..., + troughcolor: _Color = ..., + variable: IntVar | DoubleVar = ..., + width: _ScreenUnits = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + activebackground: _Color = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + bigincrement: float = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + command: str | Callable[[str], object] = ..., + cursor: _Cursor = ..., + digits: int = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + from_: float = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + label: str = ..., + length: _ScreenUnits = ..., + orient: Literal["horizontal", "vertical"] = ..., + relief: _Relief = ..., + repeatdelay: int = ..., + repeatinterval: int = ..., + resolution: float = ..., + showvalue: bool = ..., + sliderlength: _ScreenUnits = ..., + sliderrelief: _Relief = ..., + state: Literal["normal", "active", "disabled"] = ..., + takefocus: _TakeFocusValue = ..., + tickinterval: float = ..., + to: float = ..., + troughcolor: _Color = ..., + variable: IntVar | DoubleVar = ..., + width: _ScreenUnits = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def get(self) -> float: ... + def set(self, value) -> None: ... + def coords(self, value: float | None = None) -> tuple[int, int]: ... + def identify(self, x, y) -> Literal["", "slider", "trough1", "trough2"]: ... + +class Scrollbar(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = ..., + *, + activebackground: _Color = ..., + activerelief: _Relief = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + # There are many ways how the command may get called. Search for + # 'SCROLLING COMMANDS' in scrollbar man page. There doesn't seem to + # be any way to specify an overloaded callback function, so we say + # that it can take any args while it can't in reality. + command: Callable[..., tuple[float, float] | None] | str = ..., + cursor: _Cursor = ..., + elementborderwidth: _ScreenUnits = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + jump: bool = ..., + name: str = ..., + orient: Literal["horizontal", "vertical"] = ..., + relief: _Relief = ..., + repeatdelay: int = ..., + repeatinterval: int = ..., + takefocus: _TakeFocusValue = ..., + troughcolor: _Color = ..., + width: _ScreenUnits = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + activebackground: _Color = ..., + activerelief: _Relief = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + command: Callable[..., tuple[float, float] | None] | str = ..., + cursor: _Cursor = ..., + elementborderwidth: _ScreenUnits = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + jump: bool = ..., + orient: Literal["horizontal", "vertical"] = ..., + relief: _Relief = ..., + repeatdelay: int = ..., + repeatinterval: int = ..., + takefocus: _TakeFocusValue = ..., + troughcolor: _Color = ..., + width: _ScreenUnits = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def activate(self, index: Incomplete | None = None): ... + def delta(self, deltax: int, deltay: int) -> float: ... + def fraction(self, x: int, y: int) -> float: ... + def identify(self, x: int, y: int) -> Literal["arrow1", "arrow2", "slider", "trough1", "trough2", ""]: ... + def get(self) -> tuple[float, float, float, float] | tuple[float, float]: ... + def set(self, first: float, last: float) -> None: ... + +_TextIndex: TypeAlias = _tkinter.Tcl_Obj | str | float | Misc + +class Text(Widget, XView, YView): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = ..., + *, + autoseparators: bool = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + blockcursor: bool = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + cursor: _Cursor = ..., + endline: int | Literal[""] = ..., + exportselection: bool = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + # width is always int, but height is allowed to be ScreenUnits. + # This doesn't make any sense to me, and this isn't documented. + # The docs seem to say that both should be integers. + height: _ScreenUnits = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + inactiveselectbackground: _Color = ..., + insertbackground: _Color = ..., + insertborderwidth: _ScreenUnits = ..., + insertofftime: int = ..., + insertontime: int = ..., + insertunfocussed: Literal["none", "hollow", "solid"] = ..., + insertwidth: _ScreenUnits = ..., + maxundo: int = ..., + name: str = ..., + padx: _ScreenUnits = ..., + pady: _ScreenUnits = ..., + relief: _Relief = ..., + selectbackground: _Color = ..., + selectborderwidth: _ScreenUnits = ..., + selectforeground: _Color = ..., + setgrid: bool = ..., + spacing1: _ScreenUnits = ..., + spacing2: _ScreenUnits = ..., + spacing3: _ScreenUnits = ..., + startline: int | Literal[""] = ..., + state: Literal["normal", "disabled"] = ..., + # Literal inside Tuple doesn't actually work + tabs: _ScreenUnits | str | tuple[_ScreenUnits | str, ...] = ..., + tabstyle: Literal["tabular", "wordprocessor"] = ..., + takefocus: _TakeFocusValue = ..., + undo: bool = ..., + width: int = ..., + wrap: Literal["none", "char", "word"] = ..., + xscrollcommand: _XYScrollCommand = ..., + yscrollcommand: _XYScrollCommand = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + autoseparators: bool = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + blockcursor: bool = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + cursor: _Cursor = ..., + endline: int | Literal[""] = ..., + exportselection: bool = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + height: _ScreenUnits = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + inactiveselectbackground: _Color = ..., + insertbackground: _Color = ..., + insertborderwidth: _ScreenUnits = ..., + insertofftime: int = ..., + insertontime: int = ..., + insertunfocussed: Literal["none", "hollow", "solid"] = ..., + insertwidth: _ScreenUnits = ..., + maxundo: int = ..., + padx: _ScreenUnits = ..., + pady: _ScreenUnits = ..., + relief: _Relief = ..., + selectbackground: _Color = ..., + selectborderwidth: _ScreenUnits = ..., + selectforeground: _Color = ..., + setgrid: bool = ..., + spacing1: _ScreenUnits = ..., + spacing2: _ScreenUnits = ..., + spacing3: _ScreenUnits = ..., + startline: int | Literal[""] = ..., + state: Literal["normal", "disabled"] = ..., + tabs: _ScreenUnits | str | tuple[_ScreenUnits | str, ...] = ..., + tabstyle: Literal["tabular", "wordprocessor"] = ..., + takefocus: _TakeFocusValue = ..., + undo: bool = ..., + width: int = ..., + wrap: Literal["none", "char", "word"] = ..., + xscrollcommand: _XYScrollCommand = ..., + yscrollcommand: _XYScrollCommand = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def bbox(self, index: _TextIndex) -> tuple[int, int, int, int] | None: ... # type: ignore[override] + def compare(self, index1: _TextIndex, op: Literal["<", "<=", "==", ">=", ">", "!="], index2: _TextIndex) -> bool: ... + def count(self, index1, index2, *args): ... # TODO + @overload + def debug(self, boolean: None = None) -> bool: ... + @overload + def debug(self, boolean: bool) -> None: ... + def delete(self, index1: _TextIndex, index2: _TextIndex | None = None) -> None: ... + def dlineinfo(self, index: _TextIndex) -> tuple[int, int, int, int, int] | None: ... + @overload + def dump( + self, + index1: _TextIndex, + index2: _TextIndex | None = None, + command: None = None, + *, + all: bool = ..., + image: bool = ..., + mark: bool = ..., + tag: bool = ..., + text: bool = ..., + window: bool = ..., + ) -> list[tuple[str, str, str]]: ... + @overload + def dump( + self, + index1: _TextIndex, + index2: _TextIndex | None, + command: Callable[[str, str, str], object] | str, + *, + all: bool = ..., + image: bool = ..., + mark: bool = ..., + tag: bool = ..., + text: bool = ..., + window: bool = ..., + ) -> None: ... + @overload + def dump( + self, + index1: _TextIndex, + index2: _TextIndex | None = None, + *, + command: Callable[[str, str, str], object] | str, + all: bool = ..., + image: bool = ..., + mark: bool = ..., + tag: bool = ..., + text: bool = ..., + window: bool = ..., + ) -> None: ... + def edit(self, *args): ... # docstring says "Internal method" + @overload + def edit_modified(self, arg: None = None) -> bool: ... # actually returns Literal[0, 1] + @overload + def edit_modified(self, arg: bool) -> None: ... # actually returns empty string + def edit_redo(self) -> None: ... # actually returns empty string + def edit_reset(self) -> None: ... # actually returns empty string + def edit_separator(self) -> None: ... # actually returns empty string + def edit_undo(self) -> None: ... # actually returns empty string + def get(self, index1: _TextIndex, index2: _TextIndex | None = None) -> str: ... + # TODO: image_* methods + def image_cget(self, index, option): ... + def image_configure(self, index, cnf: Incomplete | None = None, **kw): ... + def image_create(self, index, cnf=..., **kw): ... + def image_names(self): ... + def index(self, index: _TextIndex) -> str: ... + def insert(self, index: _TextIndex, chars: str, *args: str | list[str] | tuple[str, ...]) -> None: ... + @overload + def mark_gravity(self, markName: str, direction: None = None) -> Literal["left", "right"]: ... + @overload + def mark_gravity(self, markName: str, direction: Literal["left", "right"]) -> None: ... # actually returns empty string + def mark_names(self) -> tuple[str, ...]: ... + def mark_set(self, markName: str, index: _TextIndex) -> None: ... + def mark_unset(self, *markNames: str) -> None: ... + def mark_next(self, index: _TextIndex) -> str | None: ... + def mark_previous(self, index: _TextIndex) -> str | None: ... + # **kw of peer_create is same as the kwargs of Text.__init__ + def peer_create(self, newPathName: str | Text, cnf: dict[str, Any] = ..., **kw) -> None: ... + def peer_names(self) -> tuple[_tkinter.Tcl_Obj, ...]: ... + def replace(self, index1: _TextIndex, index2: _TextIndex, chars: str, *args: str | list[str] | tuple[str, ...]) -> None: ... + def scan_mark(self, x: int, y: int) -> None: ... + def scan_dragto(self, x: int, y: int) -> None: ... + def search( + self, + pattern: str, + index: _TextIndex, + stopindex: _TextIndex | None = None, + forwards: bool | None = None, + backwards: bool | None = None, + exact: bool | None = None, + regexp: bool | None = None, + nocase: bool | None = None, + count: Variable | None = None, + elide: bool | None = None, + ) -> str: ... # returns empty string for not found + def see(self, index: _TextIndex) -> None: ... + def tag_add(self, tagName: str, index1: _TextIndex, *args: _TextIndex) -> None: ... + # tag_bind stuff is very similar to Canvas + @overload + def tag_bind( + self, + tagName: str, + sequence: str | None, + func: Callable[[Event[Text]], object] | None, + add: Literal["", "+"] | bool | None = None, + ) -> str: ... + @overload + def tag_bind(self, tagName: str, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + def tag_unbind(self, tagName: str, sequence: str, funcid: str | None = None) -> None: ... + # allowing any string for cget instead of just Literals because there's no other way to look up tag options + def tag_cget(self, tagName: str, option: str): ... + @overload + def tag_configure( + self, + tagName: str, + cnf: dict[str, Any] | None = None, + *, + background: _Color = ..., + bgstipple: _Bitmap = ..., + borderwidth: _ScreenUnits = ..., + border: _ScreenUnits = ..., # alias for borderwidth + elide: bool = ..., + fgstipple: _Bitmap = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + justify: Literal["left", "right", "center"] = ..., + lmargin1: _ScreenUnits = ..., + lmargin2: _ScreenUnits = ..., + lmargincolor: _Color = ..., + offset: _ScreenUnits = ..., + overstrike: bool = ..., + overstrikefg: _Color = ..., + relief: _Relief = ..., + rmargin: _ScreenUnits = ..., + rmargincolor: _Color = ..., + selectbackground: _Color = ..., + selectforeground: _Color = ..., + spacing1: _ScreenUnits = ..., + spacing2: _ScreenUnits = ..., + spacing3: _ScreenUnits = ..., + tabs: Any = ..., # the exact type is kind of complicated, see manual page + tabstyle: Literal["tabular", "wordprocessor"] = ..., + underline: bool = ..., + underlinefg: _Color = ..., + wrap: Literal["none", "char", "word"] = ..., # be careful with "none" vs None + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def tag_configure(self, tagName: str, cnf: str) -> tuple[str, str, str, Any, Any]: ... + tag_config = tag_configure + def tag_delete(self, __first_tag_name: str, *tagNames: str) -> None: ... # error if no tag names given + def tag_lower(self, tagName: str, belowThis: str | None = None) -> None: ... + def tag_names(self, index: _TextIndex | None = None) -> tuple[str, ...]: ... + def tag_nextrange( + self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = None + ) -> tuple[str, str] | tuple[()]: ... + def tag_prevrange( + self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = None + ) -> tuple[str, str] | tuple[()]: ... + def tag_raise(self, tagName: str, aboveThis: str | None = None) -> None: ... + def tag_ranges(self, tagName: str) -> tuple[_tkinter.Tcl_Obj, ...]: ... + # tag_remove and tag_delete are different + def tag_remove(self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = None) -> None: ... + # TODO: window_* methods + def window_cget(self, index, option): ... + def window_configure(self, index, cnf: Incomplete | None = None, **kw): ... + window_config = window_configure + def window_create(self, index, cnf=..., **kw) -> None: ... + def window_names(self): ... + def yview_pickplace(self, *what): ... # deprecated + +class _setit: + def __init__(self, var, value, callback: Incomplete | None = None) -> None: ... + def __call__(self, *args) -> None: ... + +# manual page: tk_optionMenu +class OptionMenu(Menubutton): + widgetName: Incomplete + menuname: Incomplete + def __init__( + # differs from other widgets + self, + master: Misc | None, + variable: StringVar, + value: str, + *values: str, + # kwarg only from now on + command: Callable[[StringVar], object] | None = ..., + ) -> None: ... + # configure, config, cget are inherited from Menubutton + # destroy and __getitem__ are overridden, signature does not change + +# Marker to indicate that it is a valid bitmap/photo image. PIL implements compatible versions +# which don't share a class hierachy. The actual API is a __str__() which returns a valid name, +# not something that type checkers can detect. +@type_check_only +class _Image: ... + +@type_check_only +class _BitmapImageLike(_Image): ... + +@type_check_only +class _PhotoImageLike(_Image): ... + +class Image(_Image): + name: Incomplete + tk: _tkinter.TkappType + def __init__( + self, imgtype, name: Incomplete | None = None, cnf=..., master: Misc | _tkinter.TkappType | None = None, **kw + ) -> None: ... + def __del__(self) -> None: ... + def __setitem__(self, key, value) -> None: ... + def __getitem__(self, key): ... + configure: Incomplete + config: Incomplete + def height(self) -> int: ... + def type(self): ... + def width(self) -> int: ... + +class PhotoImage(Image, _PhotoImageLike): + # This should be kept in sync with PIL.ImageTK.PhotoImage.__init__() + def __init__( + self, + name: str | None = None, + cnf: dict[str, Any] = ..., + master: Misc | _tkinter.TkappType | None = None, + *, + data: str | bytes = ..., # not same as data argument of put() + format: str = ..., + file: StrOrBytesPath = ..., + gamma: float = ..., + height: int = ..., + palette: int | str = ..., + width: int = ..., + ) -> None: ... + def configure( + self, + *, + data: str | bytes = ..., + format: str = ..., + file: StrOrBytesPath = ..., + gamma: float = ..., + height: int = ..., + palette: int | str = ..., + width: int = ..., + ) -> None: ... + config = configure + def blank(self) -> None: ... + def cget(self, option: str) -> str: ... + def __getitem__(self, key: str) -> str: ... # always string: image['height'] can be '0' + def copy(self) -> PhotoImage: ... + def zoom(self, x: int, y: int | Literal[""] = "") -> PhotoImage: ... + def subsample(self, x: int, y: int | Literal[""] = "") -> PhotoImage: ... + def get(self, x: int, y: int) -> tuple[int, int, int]: ... + def put( + self, + data: ( + str + | list[str] + | list[list[_Color]] + | list[tuple[_Color, ...]] + | tuple[str, ...] + | tuple[list[_Color], ...] + | tuple[tuple[_Color, ...], ...] + ), + to: tuple[int, int] | None = None, + ) -> None: ... + def write(self, filename: StrOrBytesPath, format: str | None = None, from_coords: tuple[int, int] | None = None) -> None: ... + if sys.version_info >= (3, 8): + def transparency_get(self, x: int, y: int) -> bool: ... + def transparency_set(self, x: int, y: int, boolean: bool) -> None: ... + +class BitmapImage(Image, _BitmapImageLike): + # This should be kept in sync with PIL.ImageTK.BitmapImage.__init__() + def __init__( + self, + name: Incomplete | None = None, + cnf: dict[str, Any] = ..., + master: Misc | _tkinter.TkappType | None = None, + *, + background: _Color = ..., + data: str | bytes = ..., + file: StrOrBytesPath = ..., + foreground: _Color = ..., + maskdata: str = ..., + maskfile: StrOrBytesPath = ..., + ) -> None: ... + +def image_names() -> tuple[str, ...]: ... +def image_types() -> tuple[str, ...]: ... + +class Spinbox(Widget, XView): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = ..., + *, + activebackground: _Color = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + buttonbackground: _Color = ..., + buttoncursor: _Cursor = ..., + buttondownrelief: _Relief = ..., + buttonuprelief: _Relief = ..., + # percent substitutions don't seem to be supported, it's similar to Entry's validation stuff + command: Callable[[], object] | str | list[str] | tuple[str, ...] = ..., + cursor: _Cursor = ..., + disabledbackground: _Color = ..., + disabledforeground: _Color = ..., + exportselection: bool = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + format: str = ..., + from_: float = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + increment: float = ..., + insertbackground: _Color = ..., + insertborderwidth: _ScreenUnits = ..., + insertofftime: int = ..., + insertontime: int = ..., + insertwidth: _ScreenUnits = ..., + invalidcommand: _EntryValidateCommand = ..., + invcmd: _EntryValidateCommand = ..., + justify: Literal["left", "center", "right"] = ..., + name: str = ..., + readonlybackground: _Color = ..., + relief: _Relief = ..., + repeatdelay: int = ..., + repeatinterval: int = ..., + selectbackground: _Color = ..., + selectborderwidth: _ScreenUnits = ..., + selectforeground: _Color = ..., + state: Literal["normal", "disabled", "readonly"] = ..., + takefocus: _TakeFocusValue = ..., + textvariable: Variable = ..., + to: float = ..., + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., + validatecommand: _EntryValidateCommand = ..., + vcmd: _EntryValidateCommand = ..., + values: list[str] | tuple[str, ...] = ..., + width: int = ..., + wrap: bool = ..., + xscrollcommand: _XYScrollCommand = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + activebackground: _Color = ..., + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + buttonbackground: _Color = ..., + buttoncursor: _Cursor = ..., + buttondownrelief: _Relief = ..., + buttonuprelief: _Relief = ..., + command: Callable[[], object] | str | list[str] | tuple[str, ...] = ..., + cursor: _Cursor = ..., + disabledbackground: _Color = ..., + disabledforeground: _Color = ..., + exportselection: bool = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + format: str = ..., + from_: float = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + increment: float = ..., + insertbackground: _Color = ..., + insertborderwidth: _ScreenUnits = ..., + insertofftime: int = ..., + insertontime: int = ..., + insertwidth: _ScreenUnits = ..., + invalidcommand: _EntryValidateCommand = ..., + invcmd: _EntryValidateCommand = ..., + justify: Literal["left", "center", "right"] = ..., + readonlybackground: _Color = ..., + relief: _Relief = ..., + repeatdelay: int = ..., + repeatinterval: int = ..., + selectbackground: _Color = ..., + selectborderwidth: _ScreenUnits = ..., + selectforeground: _Color = ..., + state: Literal["normal", "disabled", "readonly"] = ..., + takefocus: _TakeFocusValue = ..., + textvariable: Variable = ..., + to: float = ..., + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., + validatecommand: _EntryValidateCommand = ..., + vcmd: _EntryValidateCommand = ..., + values: list[str] | tuple[str, ...] = ..., + width: int = ..., + wrap: bool = ..., + xscrollcommand: _XYScrollCommand = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def bbox(self, index) -> tuple[int, int, int, int] | None: ... # type: ignore[override] + def delete(self, first, last: Incomplete | None = None) -> Literal[""]: ... + def get(self) -> str: ... + def icursor(self, index): ... + def identify(self, x: int, y: int) -> Literal["", "buttondown", "buttonup", "entry"]: ... + def index(self, index: _EntryIndex) -> int: ... + def insert(self, index: _EntryIndex, s: str) -> Literal[""]: ... + # spinbox.invoke("asdf") gives error mentioning .invoke("none"), but it's not documented + def invoke(self, element: Literal["none", "buttonup", "buttondown"]) -> Literal[""]: ... + def scan(self, *args): ... + def scan_mark(self, x): ... + def scan_dragto(self, x): ... + def selection(self, *args) -> tuple[int, ...]: ... + def selection_adjust(self, index): ... + def selection_clear(self): ... + def selection_element(self, element: Incomplete | None = None): ... + if sys.version_info >= (3, 8): + def selection_from(self, index: int) -> None: ... + def selection_present(self) -> None: ... + def selection_range(self, start: int, end: int) -> None: ... + def selection_to(self, index: int) -> None: ... + +class LabelFrame(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = ..., + *, + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + class_: str = ..., # can't be changed with configure() + colormap: Literal["new", ""] | Misc = ..., # can't be changed with configure() + container: bool = ..., # undocumented, can't be changed with configure() + cursor: _Cursor = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + height: _ScreenUnits = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + # 'ne' and 'en' are valid labelanchors, but only 'ne' is a valid _Anchor. + labelanchor: Literal["nw", "n", "ne", "en", "e", "es", "se", "s", "sw", "ws", "w", "wn"] = ..., + labelwidget: Misc = ..., + name: str = ..., + padx: _ScreenUnits = ..., + pady: _ScreenUnits = ..., + relief: _Relief = ..., + takefocus: _TakeFocusValue = ..., + text: float | str = ..., + visual: str | tuple[str, int] = ..., # can't be changed with configure() + width: _ScreenUnits = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + cursor: _Cursor = ..., + fg: _Color = ..., + font: _FontDescription = ..., + foreground: _Color = ..., + height: _ScreenUnits = ..., + highlightbackground: _Color = ..., + highlightcolor: _Color = ..., + highlightthickness: _ScreenUnits = ..., + labelanchor: Literal["nw", "n", "ne", "en", "e", "es", "se", "s", "sw", "ws", "w", "wn"] = ..., + labelwidget: Misc = ..., + padx: _ScreenUnits = ..., + pady: _ScreenUnits = ..., + relief: _Relief = ..., + takefocus: _TakeFocusValue = ..., + text: float | str = ..., + width: _ScreenUnits = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +class PanedWindow(Widget): + def __init__( + self, + master: Misc | None = None, + cnf: dict[str, Any] | None = ..., + *, + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + cursor: _Cursor = ..., + handlepad: _ScreenUnits = ..., + handlesize: _ScreenUnits = ..., + height: _ScreenUnits = ..., + name: str = ..., + opaqueresize: bool = ..., + orient: Literal["horizontal", "vertical"] = ..., + proxybackground: _Color = ..., + proxyborderwidth: _ScreenUnits = ..., + proxyrelief: _Relief = ..., + relief: _Relief = ..., + sashcursor: _Cursor = ..., + sashpad: _ScreenUnits = ..., + sashrelief: _Relief = ..., + sashwidth: _ScreenUnits = ..., + showhandle: bool = ..., + width: _ScreenUnits = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + background: _Color = ..., + bd: _ScreenUnits = ..., + bg: _Color = ..., + border: _ScreenUnits = ..., + borderwidth: _ScreenUnits = ..., + cursor: _Cursor = ..., + handlepad: _ScreenUnits = ..., + handlesize: _ScreenUnits = ..., + height: _ScreenUnits = ..., + opaqueresize: bool = ..., + orient: Literal["horizontal", "vertical"] = ..., + proxybackground: _Color = ..., + proxyborderwidth: _ScreenUnits = ..., + proxyrelief: _Relief = ..., + relief: _Relief = ..., + sashcursor: _Cursor = ..., + sashpad: _ScreenUnits = ..., + sashrelief: _Relief = ..., + sashwidth: _ScreenUnits = ..., + showhandle: bool = ..., + width: _ScreenUnits = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def add(self, child: Widget, **kw) -> None: ... + def remove(self, child) -> None: ... + forget: Incomplete + def identify(self, x: int, y: int): ... + def proxy(self, *args): ... + def proxy_coord(self): ... + def proxy_forget(self): ... + def proxy_place(self, x, y): ... + def sash(self, *args): ... + def sash_coord(self, index): ... + def sash_mark(self, index): ... + def sash_place(self, index, x, y): ... + def panecget(self, child, option): ... + def paneconfigure(self, tagOrId, cnf: Incomplete | None = None, **kw): ... + paneconfig: Incomplete + def panes(self): ... + +def _test() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/colorchooser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/colorchooser.pyi new file mode 100644 index 00000000..4300d94f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/colorchooser.pyi @@ -0,0 +1,20 @@ +import sys +from tkinter import Misc, _Color +from tkinter.commondialog import Dialog +from typing import ClassVar + +if sys.version_info >= (3, 9): + __all__ = ["Chooser", "askcolor"] + +class Chooser(Dialog): + command: ClassVar[str] + +if sys.version_info >= (3, 9): + def askcolor( + color: str | bytes | None = None, *, initialcolor: _Color = ..., parent: Misc = ..., title: str = ... + ) -> tuple[None, None] | tuple[tuple[int, int, int], str]: ... + +else: + def askcolor( + color: str | bytes | None = None, *, initialcolor: _Color = ..., parent: Misc = ..., title: str = ... + ) -> tuple[None, None] | tuple[tuple[float, float, float], str]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/commondialog.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/commondialog.pyi new file mode 100644 index 00000000..eba3ab5b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/commondialog.pyi @@ -0,0 +1,14 @@ +import sys +from _typeshed import Incomplete +from collections.abc import Mapping +from typing import ClassVar + +if sys.version_info >= (3, 9): + __all__ = ["Dialog"] + +class Dialog: + command: ClassVar[str | None] + master: Incomplete | None + options: Mapping[str, Incomplete] + def __init__(self, master: Incomplete | None = None, **options: Incomplete) -> None: ... + def show(self, **options: Incomplete) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/constants.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/constants.pyi new file mode 100644 index 00000000..1383b0f9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/constants.pyi @@ -0,0 +1,80 @@ +from typing_extensions import Literal + +# These are not actually bools. See #4669 +NO: bool +YES: bool +TRUE: bool +FALSE: bool +ON: bool +OFF: bool +N: Literal["n"] +S: Literal["s"] +W: Literal["w"] +E: Literal["e"] +NW: Literal["nw"] +SW: Literal["sw"] +NE: Literal["ne"] +SE: Literal["se"] +NS: Literal["ns"] +EW: Literal["ew"] +NSEW: Literal["nsew"] +CENTER: Literal["center"] +NONE: Literal["none"] +X: Literal["x"] +Y: Literal["y"] +BOTH: Literal["both"] +LEFT: Literal["left"] +TOP: Literal["top"] +RIGHT: Literal["right"] +BOTTOM: Literal["bottom"] +RAISED: Literal["raised"] +SUNKEN: Literal["sunken"] +FLAT: Literal["flat"] +RIDGE: Literal["ridge"] +GROOVE: Literal["groove"] +SOLID: Literal["solid"] +HORIZONTAL: Literal["horizontal"] +VERTICAL: Literal["vertical"] +NUMERIC: Literal["numeric"] +CHAR: Literal["char"] +WORD: Literal["word"] +BASELINE: Literal["baseline"] +INSIDE: Literal["inside"] +OUTSIDE: Literal["outside"] +SEL: Literal["sel"] +SEL_FIRST: Literal["sel.first"] +SEL_LAST: Literal["sel.last"] +END: Literal["end"] +INSERT: Literal["insert"] +CURRENT: Literal["current"] +ANCHOR: Literal["anchor"] +ALL: Literal["all"] +NORMAL: Literal["normal"] +DISABLED: Literal["disabled"] +ACTIVE: Literal["active"] +HIDDEN: Literal["hidden"] +CASCADE: Literal["cascade"] +CHECKBUTTON: Literal["checkbutton"] +COMMAND: Literal["command"] +RADIOBUTTON: Literal["radiobutton"] +SEPARATOR: Literal["separator"] +SINGLE: Literal["single"] +BROWSE: Literal["browse"] +MULTIPLE: Literal["multiple"] +EXTENDED: Literal["extended"] +DOTBOX: Literal["dotbox"] +UNDERLINE: Literal["underline"] +PIESLICE: Literal["pieslice"] +CHORD: Literal["chord"] +ARC: Literal["arc"] +FIRST: Literal["first"] +LAST: Literal["last"] +BUTT: Literal["butt"] +PROJECTING: Literal["projecting"] +ROUND: Literal["round"] +BEVEL: Literal["bevel"] +MITER: Literal["miter"] +MOVETO: Literal["moveto"] +SCROLL: Literal["scroll"] +UNITS: Literal["units"] +PAGES: Literal["pages"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/dialog.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/dialog.pyi new file mode 100644 index 00000000..8825188c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/dialog.pyi @@ -0,0 +1,16 @@ +import sys +from _typeshed import Incomplete +from collections.abc import Mapping +from tkinter import Widget +from typing import Any + +if sys.version_info >= (3, 9): + __all__ = ["Dialog"] + +DIALOG_ICON: str + +class Dialog(Widget): + widgetName: str + num: int + def __init__(self, master: Incomplete | None = None, cnf: Mapping[str, Any] = ..., **kw: Incomplete) -> None: ... + def destroy(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/dnd.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/dnd.pyi new file mode 100644 index 00000000..4a6ab42b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/dnd.pyi @@ -0,0 +1,19 @@ +import sys +from tkinter import Event, Misc, Tk, Widget +from typing import ClassVar, Protocol + +if sys.version_info >= (3, 9): + __all__ = ["dnd_start", "DndHandler"] + +class _DndSource(Protocol): + def dnd_end(self, target: Widget | None, event: Event[Misc] | None) -> None: ... + +class DndHandler: + root: ClassVar[Tk | None] + def __init__(self, source: _DndSource, event: Event[Misc]) -> None: ... + def cancel(self, event: Event[Misc] | None = None) -> None: ... + def finish(self, event: Event[Misc] | None, commit: int = 0) -> None: ... + def on_motion(self, event: Event[Misc]) -> None: ... + def on_release(self, event: Event[Misc]) -> None: ... + +def dnd_start(source: _DndSource, event: Event[Misc]) -> DndHandler | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/filedialog.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/filedialog.pyi new file mode 100644 index 00000000..10b36e4d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/filedialog.pyi @@ -0,0 +1,152 @@ +import sys +from _typeshed import Incomplete, StrOrBytesPath +from collections.abc import Iterable +from tkinter import Button, Entry, Frame, Listbox, Misc, Scrollbar, StringVar, Toplevel, commondialog +from typing import IO, ClassVar +from typing_extensions import Literal + +if sys.version_info >= (3, 9): + __all__ = [ + "FileDialog", + "LoadFileDialog", + "SaveFileDialog", + "Open", + "SaveAs", + "Directory", + "askopenfilename", + "asksaveasfilename", + "askopenfilenames", + "askopenfile", + "askopenfiles", + "asksaveasfile", + "askdirectory", + ] + +dialogstates: dict[Incomplete, tuple[Incomplete, Incomplete]] + +class FileDialog: + title: str + master: Incomplete + directory: Incomplete | None + top: Toplevel + botframe: Frame + selection: Entry + filter: Entry + midframe: Entry + filesbar: Scrollbar + files: Listbox + dirsbar: Scrollbar + dirs: Listbox + ok_button: Button + filter_button: Button + cancel_button: Button + def __init__( + self, master, title: Incomplete | None = None + ) -> None: ... # title is usually a str or None, but e.g. int doesn't raise en exception either + how: Incomplete | None + def go(self, dir_or_file=".", pattern: str = "*", default: str = "", key: Incomplete | None = None): ... + def quit(self, how: Incomplete | None = None) -> None: ... + def dirs_double_event(self, event) -> None: ... + def dirs_select_event(self, event) -> None: ... + def files_double_event(self, event) -> None: ... + def files_select_event(self, event) -> None: ... + def ok_event(self, event) -> None: ... + def ok_command(self) -> None: ... + def filter_command(self, event: Incomplete | None = None) -> None: ... + def get_filter(self): ... + def get_selection(self): ... + def cancel_command(self, event: Incomplete | None = None) -> None: ... + def set_filter(self, dir, pat) -> None: ... + def set_selection(self, file) -> None: ... + +class LoadFileDialog(FileDialog): + title: str + def ok_command(self) -> None: ... + +class SaveFileDialog(FileDialog): + title: str + def ok_command(self) -> None: ... + +class _Dialog(commondialog.Dialog): ... + +class Open(_Dialog): + command: ClassVar[str] + +class SaveAs(_Dialog): + command: ClassVar[str] + +class Directory(commondialog.Dialog): + command: ClassVar[str] + +# TODO: command kwarg available on macos +def asksaveasfilename( + *, + confirmoverwrite: bool | None = ..., + defaultextension: str | None = ..., + filetypes: Iterable[tuple[str, str | list[str] | tuple[str, ...]]] | None = ..., + initialdir: StrOrBytesPath | None = ..., + initialfile: StrOrBytesPath | None = ..., + parent: Misc | None = ..., + title: str | None = ..., + typevariable: StringVar | str | None = ..., +) -> str: ... # can be empty string +def askopenfilename( + *, + defaultextension: str | None = ..., + filetypes: Iterable[tuple[str, str | list[str] | tuple[str, ...]]] | None = ..., + initialdir: StrOrBytesPath | None = ..., + initialfile: StrOrBytesPath | None = ..., + parent: Misc | None = ..., + title: str | None = ..., + typevariable: StringVar | str | None = ..., +) -> str: ... # can be empty string +def askopenfilenames( + *, + defaultextension: str | None = ..., + filetypes: Iterable[tuple[str, str | list[str] | tuple[str, ...]]] | None = ..., + initialdir: StrOrBytesPath | None = ..., + initialfile: StrOrBytesPath | None = ..., + parent: Misc | None = ..., + title: str | None = ..., + typevariable: StringVar | str | None = ..., +) -> Literal[""] | tuple[str, ...]: ... +def askdirectory( + *, initialdir: StrOrBytesPath | None = ..., mustexist: bool | None = ..., parent: Misc | None = ..., title: str | None = ... +) -> str: ... # can be empty string + +# TODO: If someone actually uses these, overload to have the actual return type of open(..., mode) +def asksaveasfile( + mode: str = "w", + *, + confirmoverwrite: bool | None = ..., + defaultextension: str | None = ..., + filetypes: Iterable[tuple[str, str | list[str] | tuple[str, ...]]] | None = ..., + initialdir: StrOrBytesPath | None = ..., + initialfile: StrOrBytesPath | None = ..., + parent: Misc | None = ..., + title: str | None = ..., + typevariable: StringVar | str | None = ..., +) -> IO[Incomplete] | None: ... +def askopenfile( + mode: str = "r", + *, + defaultextension: str | None = ..., + filetypes: Iterable[tuple[str, str | list[str] | tuple[str, ...]]] | None = ..., + initialdir: StrOrBytesPath | None = ..., + initialfile: StrOrBytesPath | None = ..., + parent: Misc | None = ..., + title: str | None = ..., + typevariable: StringVar | str | None = ..., +) -> IO[Incomplete] | None: ... +def askopenfiles( + mode: str = "r", + *, + defaultextension: str | None = ..., + filetypes: Iterable[tuple[str, str | list[str] | tuple[str, ...]]] | None = ..., + initialdir: StrOrBytesPath | None = ..., + initialfile: StrOrBytesPath | None = ..., + parent: Misc | None = ..., + title: str | None = ..., + typevariable: StringVar | str | None = ..., +) -> tuple[IO[Incomplete], ...]: ... # can be empty tuple +def test() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/font.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/font.pyi new file mode 100644 index 00000000..0a557e92 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/font.pyi @@ -0,0 +1,112 @@ +import _tkinter +import sys +import tkinter +from typing import Any, overload +from typing_extensions import Literal, TypeAlias, TypedDict + +if sys.version_info >= (3, 9): + __all__ = ["NORMAL", "ROMAN", "BOLD", "ITALIC", "nametofont", "Font", "families", "names"] + +NORMAL: Literal["normal"] +ROMAN: Literal["roman"] +BOLD: Literal["bold"] +ITALIC: Literal["italic"] + +_FontDescription: TypeAlias = ( + str # "Helvetica 12" + | Font # A font object constructed in Python + | list[Any] # ("Helvetica", 12, BOLD) + | tuple[Any, ...] + | _tkinter.Tcl_Obj # A font object constructed in Tcl +) + +class _FontDict(TypedDict): + family: str + size: int + weight: Literal["normal", "bold"] + slant: Literal["roman", "italic"] + underline: bool + overstrike: bool + +class _MetricsDict(TypedDict): + ascent: int + descent: int + linespace: int + fixed: bool + +class Font: + name: str + delete_font: bool + def __init__( + self, + # In tkinter, 'root' refers to tkinter.Tk by convention, but the code + # actually works with any tkinter widget so we use tkinter.Misc. + root: tkinter.Misc | None = None, + font: _FontDescription | None = None, + name: str | None = None, + exists: bool = False, + *, + family: str = ..., + size: int = ..., + weight: Literal["normal", "bold"] = ..., + slant: Literal["roman", "italic"] = ..., + underline: bool = ..., + overstrike: bool = ..., + ) -> None: ... + def __setitem__(self, key: str, value: Any) -> None: ... + @overload + def cget(self, option: Literal["family"]) -> str: ... + @overload + def cget(self, option: Literal["size"]) -> int: ... + @overload + def cget(self, option: Literal["weight"]) -> Literal["normal", "bold"]: ... + @overload + def cget(self, option: Literal["slant"]) -> Literal["roman", "italic"]: ... + @overload + def cget(self, option: Literal["underline", "overstrike"]) -> bool: ... + @overload + def cget(self, option: str) -> Any: ... + __getitem__ = cget + @overload + def actual(self, option: Literal["family"], displayof: tkinter.Misc | None = None) -> str: ... + @overload + def actual(self, option: Literal["size"], displayof: tkinter.Misc | None = None) -> int: ... + @overload + def actual(self, option: Literal["weight"], displayof: tkinter.Misc | None = None) -> Literal["normal", "bold"]: ... + @overload + def actual(self, option: Literal["slant"], displayof: tkinter.Misc | None = None) -> Literal["roman", "italic"]: ... + @overload + def actual(self, option: Literal["underline", "overstrike"], displayof: tkinter.Misc | None = None) -> bool: ... + @overload + def actual(self, option: None, displayof: tkinter.Misc | None = None) -> _FontDict: ... + @overload + def actual(self, *, displayof: tkinter.Misc | None = None) -> _FontDict: ... + def config( + self, + *, + family: str = ..., + size: int = ..., + weight: Literal["normal", "bold"] = ..., + slant: Literal["roman", "italic"] = ..., + underline: bool = ..., + overstrike: bool = ..., + ) -> _FontDict | None: ... + configure = config + def copy(self) -> Font: ... + @overload + def metrics(self, __option: Literal["ascent", "descent", "linespace"], *, displayof: tkinter.Misc | None = ...) -> int: ... + @overload + def metrics(self, __option: Literal["fixed"], *, displayof: tkinter.Misc | None = ...) -> bool: ... + @overload + def metrics(self, *, displayof: tkinter.Misc | None = ...) -> _MetricsDict: ... + def measure(self, text: str, displayof: tkinter.Misc | None = None) -> int: ... + def __eq__(self, other: object) -> bool: ... + +def families(root: tkinter.Misc | None = None, displayof: tkinter.Misc | None = None) -> tuple[str, ...]: ... +def names(root: tkinter.Misc | None = None) -> tuple[str, ...]: ... + +if sys.version_info >= (3, 10): + def nametofont(name: str, root: tkinter.Misc | None = None) -> Font: ... + +else: + def nametofont(name: str) -> Font: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/messagebox.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/messagebox.pyi new file mode 100644 index 00000000..5a04b66d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/messagebox.pyi @@ -0,0 +1,44 @@ +import sys +from tkinter.commondialog import Dialog +from typing import ClassVar + +if sys.version_info >= (3, 9): + __all__ = [ + "showinfo", + "showwarning", + "showerror", + "askquestion", + "askokcancel", + "askyesno", + "askyesnocancel", + "askretrycancel", + ] + +ERROR: str +INFO: str +QUESTION: str +WARNING: str +ABORTRETRYIGNORE: str +OK: str +OKCANCEL: str +RETRYCANCEL: str +YESNO: str +YESNOCANCEL: str +ABORT: str +RETRY: str +IGNORE: str +CANCEL: str +YES: str +NO: str + +class Message(Dialog): + command: ClassVar[str] + +def showinfo(title: str | None = None, message: str | None = None, **options) -> str: ... +def showwarning(title: str | None = None, message: str | None = None, **options) -> str: ... +def showerror(title: str | None = None, message: str | None = None, **options) -> str: ... +def askquestion(title: str | None = None, message: str | None = None, **options) -> str: ... +def askokcancel(title: str | None = None, message: str | None = None, **options) -> bool: ... +def askyesno(title: str | None = None, message: str | None = None, **options) -> bool: ... +def askyesnocancel(title: str | None = None, message: str | None = None, **options) -> bool | None: ... +def askretrycancel(title: str | None = None, message: str | None = None, **options) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/scrolledtext.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/scrolledtext.pyi new file mode 100644 index 00000000..114f8c3d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/scrolledtext.pyi @@ -0,0 +1,10 @@ +from _typeshed import Incomplete +from tkinter import Frame, Misc, Scrollbar, Text + +__all__ = ["ScrolledText"] + +# The methods from Pack, Place, and Grid are dynamically added over the parent's impls +class ScrolledText(Text): + frame: Frame + vbar: Scrollbar + def __init__(self, master: Misc | None = None, **kwargs: Incomplete) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/simpledialog.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/simpledialog.pyi new file mode 100644 index 00000000..2c57cce7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/simpledialog.pyi @@ -0,0 +1,54 @@ +from tkinter import Event, Frame, Misc, Toplevel + +class Dialog(Toplevel): + def __init__(self, parent: Misc | None, title: str | None = None) -> None: ... + def body(self, master: Frame) -> Misc | None: ... + def buttonbox(self) -> None: ... + def ok(self, event: Event[Misc] | None = None) -> None: ... + def cancel(self, event: Event[Misc] | None = None) -> None: ... + def validate(self) -> bool: ... + def apply(self) -> None: ... + +class SimpleDialog: + def __init__( + self, + master: Misc | None, + text: str = "", + buttons: list[str] = ..., + default: int | None = None, + cancel: int | None = None, + title: str | None = None, + class_: str | None = None, + ) -> None: ... + def go(self) -> int | None: ... + def return_event(self, event: Event[Misc]) -> None: ... + def wm_delete_window(self) -> None: ... + def done(self, num: int) -> None: ... + +def askfloat( + title: str | None, + prompt: str, + *, + initialvalue: float | None = ..., + minvalue: float | None = ..., + maxvalue: float | None = ..., + parent: Misc | None = ..., +) -> float | None: ... +def askinteger( + title: str | None, + prompt: str, + *, + initialvalue: int | None = ..., + minvalue: int | None = ..., + maxvalue: int | None = ..., + parent: Misc | None = ..., +) -> int | None: ... +def askstring( + title: str | None, + prompt: str, + *, + initialvalue: str | None = ..., + show: str | None = ..., + # minvalue/maxvalue is accepted but not useful. + parent: Misc | None = ..., +) -> str | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/tix.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/tix.pyi new file mode 100644 index 00000000..5dd6f040 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/tix.pyi @@ -0,0 +1,300 @@ +import tkinter +from _typeshed import Incomplete +from typing import Any +from typing_extensions import Literal + +WINDOW: Literal["window"] +TEXT: Literal["text"] +STATUS: Literal["status"] +IMMEDIATE: Literal["immediate"] +IMAGE: Literal["image"] +IMAGETEXT: Literal["imagetext"] +BALLOON: Literal["balloon"] +AUTO: Literal["auto"] +ACROSSTOP: Literal["acrosstop"] + +ASCII: Literal["ascii"] +CELL: Literal["cell"] +COLUMN: Literal["column"] +DECREASING: Literal["decreasing"] +INCREASING: Literal["increasing"] +INTEGER: Literal["integer"] +MAIN: Literal["main"] +MAX: Literal["max"] +REAL: Literal["real"] +ROW: Literal["row"] +S_REGION: Literal["s-region"] +X_REGION: Literal["x-region"] +Y_REGION: Literal["y-region"] + +# These should be kept in sync with _tkinter constants, except TCL_ALL_EVENTS which doesn't match ALL_EVENTS +TCL_DONT_WAIT: Literal[2] +TCL_WINDOW_EVENTS: Literal[4] +TCL_FILE_EVENTS: Literal[8] +TCL_TIMER_EVENTS: Literal[16] +TCL_IDLE_EVENTS: Literal[32] +TCL_ALL_EVENTS: Literal[0] + +class tixCommand: + def tix_addbitmapdir(self, directory: str) -> None: ... + def tix_cget(self, option: str) -> Any: ... + def tix_configure(self, cnf: dict[str, Any] | None = None, **kw: Any) -> Any: ... + def tix_filedialog(self, dlgclass: str | None = None) -> str: ... + def tix_getbitmap(self, name: str) -> str: ... + def tix_getimage(self, name: str) -> str: ... + def tix_option_get(self, name: str) -> Any: ... + def tix_resetoptions(self, newScheme: str, newFontSet: str, newScmPrio: str | None = None) -> None: ... + +class Tk(tkinter.Tk, tixCommand): + def __init__(self, screenName: str | None = None, baseName: str | None = None, className: str = "Tix") -> None: ... + +class TixWidget(tkinter.Widget): + def __init__( + self, + master: tkinter.Misc | None = None, + widgetName: str | None = None, + static_options: list[str] | None = None, + cnf: dict[str, Any] = ..., + kw: dict[str, Any] = ..., + ) -> None: ... + def __getattr__(self, name: str): ... + def set_silent(self, value: str) -> None: ... + def subwidget(self, name: str) -> tkinter.Widget: ... + def subwidgets_all(self) -> list[tkinter.Widget]: ... + def config_all(self, option: Any, value: Any) -> None: ... + def image_create(self, imgtype: str, cnf: dict[str, Any] = ..., master: tkinter.Widget | None = None, **kw) -> None: ... + def image_delete(self, imgname: str) -> None: ... + +class TixSubWidget(TixWidget): + def __init__(self, master: tkinter.Widget, name: str, destroy_physically: int = 1, check_intermediate: int = 1) -> None: ... + +class DisplayStyle: + def __init__(self, itemtype: str, cnf: dict[str, Any] = ..., *, master: tkinter.Widget | None = None, **kw) -> None: ... + def __getitem__(self, key: str): ... + def __setitem__(self, key: str, value: Any) -> None: ... + def delete(self) -> None: ... + def config(self, cnf: dict[str, Any] = ..., **kw): ... + +class Balloon(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def bind_widget(self, widget: tkinter.Widget, cnf: dict[str, Any] = ..., **kw) -> None: ... + def unbind_widget(self, widget: tkinter.Widget) -> None: ... + +class ButtonBox(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def add(self, name: str, cnf: dict[str, Any] = ..., **kw) -> tkinter.Widget: ... + def invoke(self, name: str) -> None: ... + +class ComboBox(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def add_history(self, str: str) -> None: ... + def append_history(self, str: str) -> None: ... + def insert(self, index: int, str: str) -> None: ... + def pick(self, index: int) -> None: ... + +class Control(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def decrement(self) -> None: ... + def increment(self) -> None: ... + def invoke(self) -> None: ... + +class LabelEntry(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + +class LabelFrame(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + +class Meter(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + +class OptionMenu(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def add_command(self, name: str, cnf: dict[str, Any] = ..., **kw) -> None: ... + def add_separator(self, name: str, cnf: dict[str, Any] = ..., **kw) -> None: ... + def delete(self, name: str) -> None: ... + def disable(self, name: str) -> None: ... + def enable(self, name: str) -> None: ... + +class PopupMenu(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def bind_widget(self, widget: tkinter.Widget) -> None: ... + def unbind_widget(self, widget: tkinter.Widget) -> None: ... + def post_widget(self, widget: tkinter.Widget, x: int, y: int) -> None: ... + +class Select(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def add(self, name: str, cnf: dict[str, Any] = ..., **kw) -> tkinter.Widget: ... + def invoke(self, name: str) -> None: ... + +class StdButtonBox(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def invoke(self, name: str) -> None: ... + +class DirList(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def chdir(self, dir: str) -> None: ... + +class DirTree(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def chdir(self, dir: str) -> None: ... + +class DirSelectDialog(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def popup(self) -> None: ... + def popdown(self) -> None: ... + +class DirSelectBox(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... + +class ExFileSelectBox(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def filter(self) -> None: ... + def invoke(self) -> None: ... + +class FileSelectBox(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def apply_filter(self) -> None: ... + def invoke(self) -> None: ... + +class FileEntry(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def invoke(self) -> None: ... + def file_dialog(self) -> None: ... + +class HList(TixWidget, tkinter.XView, tkinter.YView): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def add(self, entry: str, cnf: dict[str, Any] = ..., **kw) -> tkinter.Widget: ... + def add_child(self, parent: str | None = None, cnf: dict[str, Any] = ..., **kw) -> tkinter.Widget: ... + def anchor_set(self, entry: str) -> None: ... + def anchor_clear(self) -> None: ... + # FIXME: Overload, certain combos return, others don't + def column_width(self, col: int = 0, width: int | None = None, chars: int | None = None) -> int | None: ... + def delete_all(self) -> None: ... + def delete_entry(self, entry: str) -> None: ... + def delete_offsprings(self, entry: str) -> None: ... + def delete_siblings(self, entry: str) -> None: ... + def dragsite_set(self, index: int) -> None: ... + def dragsite_clear(self) -> None: ... + def dropsite_set(self, index: int) -> None: ... + def dropsite_clear(self) -> None: ... + def header_create(self, col: int, cnf: dict[str, Any] = ..., **kw) -> None: ... + def header_configure(self, col: int, cnf: dict[str, Any] = ..., **kw) -> Incomplete | None: ... + def header_cget(self, col: int, opt): ... + def header_exists(self, col: int) -> bool: ... + def header_exist(self, col: int) -> bool: ... + def header_delete(self, col: int) -> None: ... + def header_size(self, col: int) -> int: ... + def hide_entry(self, entry: str) -> None: ... + def indicator_create(self, entry: str, cnf: dict[str, Any] = ..., **kw) -> None: ... + def indicator_configure(self, entry: str, cnf: dict[str, Any] = ..., **kw) -> Incomplete | None: ... + def indicator_cget(self, entry: str, opt): ... + def indicator_exists(self, entry: str) -> bool: ... + def indicator_delete(self, entry: str) -> None: ... + def indicator_size(self, entry: str) -> int: ... + def info_anchor(self) -> str: ... + def info_bbox(self, entry: str) -> tuple[int, int, int, int]: ... + def info_children(self, entry: str | None = None) -> tuple[str, ...]: ... + def info_data(self, entry: str) -> Any: ... + def info_dragsite(self) -> str: ... + def info_dropsite(self) -> str: ... + def info_exists(self, entry: str) -> bool: ... + def info_hidden(self, entry: str) -> bool: ... + def info_next(self, entry: str) -> str: ... + def info_parent(self, entry: str) -> str: ... + def info_prev(self, entry: str) -> str: ... + def info_selection(self) -> tuple[str, ...]: ... + def item_cget(self, entry: str, col: int, opt): ... + def item_configure(self, entry: str, col: int, cnf: dict[str, Any] = ..., **kw) -> Incomplete | None: ... + def item_create(self, entry: str, col: int, cnf: dict[str, Any] = ..., **kw) -> None: ... + def item_exists(self, entry: str, col: int) -> bool: ... + def item_delete(self, entry: str, col: int) -> None: ... + def entrycget(self, entry: str, opt): ... + def entryconfigure(self, entry: str, cnf: dict[str, Any] = ..., **kw) -> Incomplete | None: ... + def nearest(self, y: int) -> str: ... + def see(self, entry: str) -> None: ... + def selection_clear(self, cnf: dict[str, Any] = ..., **kw) -> None: ... + def selection_includes(self, entry: str) -> bool: ... + def selection_set(self, first: str, last: str | None = None) -> None: ... + def show_entry(self, entry: str) -> None: ... + +class CheckList(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def autosetmode(self) -> None: ... + def close(self, entrypath: str) -> None: ... + def getmode(self, entrypath: str) -> str: ... + def open(self, entrypath: str) -> None: ... + def getselection(self, mode: str = "on") -> tuple[str, ...]: ... + def getstatus(self, entrypath: str) -> str: ... + def setstatus(self, entrypath: str, mode: str = "on") -> None: ... + +class Tree(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def autosetmode(self) -> None: ... + def close(self, entrypath: str) -> None: ... + def getmode(self, entrypath: str) -> str: ... + def open(self, entrypath: str) -> None: ... + def setmode(self, entrypath: str, mode: str = "none") -> None: ... + +class TList(TixWidget, tkinter.XView, tkinter.YView): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def active_set(self, index: int) -> None: ... + def active_clear(self) -> None: ... + def anchor_set(self, index: int) -> None: ... + def anchor_clear(self) -> None: ... + def delete(self, from_: int, to: int | None = None) -> None: ... + def dragsite_set(self, index: int) -> None: ... + def dragsite_clear(self) -> None: ... + def dropsite_set(self, index: int) -> None: ... + def dropsite_clear(self) -> None: ... + def insert(self, index: int, cnf: dict[str, Any] = ..., **kw) -> None: ... + def info_active(self) -> int: ... + def info_anchor(self) -> int: ... + def info_down(self, index: int) -> int: ... + def info_left(self, index: int) -> int: ... + def info_right(self, index: int) -> int: ... + def info_selection(self) -> tuple[int, ...]: ... + def info_size(self) -> int: ... + def info_up(self, index: int) -> int: ... + def nearest(self, x: int, y: int) -> int: ... + def see(self, index: int) -> None: ... + def selection_clear(self, cnf: dict[str, Any] = ..., **kw) -> None: ... + def selection_includes(self, index: int) -> bool: ... + def selection_set(self, first: int, last: int | None = None) -> None: ... + +class PanedWindow(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def add(self, name: str, cnf: dict[str, Any] = ..., **kw) -> None: ... + def delete(self, name: str) -> None: ... + def forget(self, name: str) -> None: ... # type: ignore[override] + def panecget(self, entry: str, opt): ... + def paneconfigure(self, entry: str, cnf: dict[str, Any] = ..., **kw) -> Incomplete | None: ... + def panes(self) -> list[tkinter.Widget]: ... + +class ListNoteBook(TixWidget): + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def add(self, name: str, cnf: dict[str, Any] = ..., **kw) -> None: ... + def page(self, name: str) -> tkinter.Widget: ... + def pages(self) -> list[tkinter.Widget]: ... + def raise_page(self, name: str) -> None: ... + +class NoteBook(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def add(self, name: str, cnf: dict[str, Any] = ..., **kw) -> None: ... + def delete(self, name: str) -> None: ... + def page(self, name: str) -> tkinter.Widget: ... + def pages(self) -> list[tkinter.Widget]: ... + def raise_page(self, name: str) -> None: ... + def raised(self) -> bool: ... + +class InputOnly(TixWidget): + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + +class Form: + def __setitem__(self, key: str, value: Any) -> None: ... + def config(self, cnf: dict[str, Any] = ..., **kw) -> None: ... + def form(self, cnf: dict[str, Any] = ..., **kw) -> None: ... + def check(self) -> bool: ... + def forget(self) -> None: ... + def grid(self, xsize: int = 0, ysize: int = 0) -> tuple[int, int] | None: ... + def info(self, option: str | None = None): ... + def slaves(self) -> list[tkinter.Widget]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/ttk.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/ttk.pyi new file mode 100644 index 00000000..61ebc0e2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tkinter/ttk.pyi @@ -0,0 +1,1186 @@ +import _tkinter +import sys +import tkinter +from _typeshed import Incomplete +from collections.abc import Callable +from tkinter.font import _FontDescription +from typing import Any, overload +from typing_extensions import Literal, TypeAlias, TypedDict + +__all__ = [ + "Button", + "Checkbutton", + "Combobox", + "Entry", + "Frame", + "Label", + "Labelframe", + "LabelFrame", + "Menubutton", + "Notebook", + "Panedwindow", + "PanedWindow", + "Progressbar", + "Radiobutton", + "Scale", + "Scrollbar", + "Separator", + "Sizegrip", + "Style", + "Treeview", + "LabeledScale", + "OptionMenu", + "tclobjs_to_py", + "setup_master", + "Spinbox", +] + +def tclobjs_to_py(adict: dict[Any, Any]) -> dict[Any, Any]: ... +def setup_master(master: Incomplete | None = None): ... + +_Padding: TypeAlias = ( + tkinter._ScreenUnits + | tuple[tkinter._ScreenUnits] + | tuple[tkinter._ScreenUnits, tkinter._ScreenUnits] + | tuple[tkinter._ScreenUnits, tkinter._ScreenUnits, tkinter._ScreenUnits] + | tuple[tkinter._ScreenUnits, tkinter._ScreenUnits, tkinter._ScreenUnits, tkinter._ScreenUnits] +) + +# from ttk_widget (aka ttk::widget) manual page, differs from tkinter._Compound +_TtkCompound: TypeAlias = Literal["text", "image", tkinter._Compound] + +class Style: + master: Incomplete + tk: _tkinter.TkappType + def __init__(self, master: tkinter.Misc | None = None) -> None: ... + def configure(self, style, query_opt: Incomplete | None = None, **kw): ... + def map(self, style, query_opt: Incomplete | None = None, **kw): ... + def lookup(self, style, option, state: Incomplete | None = None, default: Incomplete | None = None): ... + def layout(self, style, layoutspec: Incomplete | None = None): ... + def element_create(self, elementname, etype, *args, **kw) -> None: ... + def element_names(self): ... + def element_options(self, elementname): ... + def theme_create(self, themename, parent: Incomplete | None = None, settings: Incomplete | None = None) -> None: ... + def theme_settings(self, themename, settings) -> None: ... + def theme_names(self) -> tuple[str, ...]: ... + @overload + def theme_use(self, themename: str) -> None: ... + @overload + def theme_use(self, themename: None = None) -> str: ... + +class Widget(tkinter.Widget): + def __init__(self, master: tkinter.Misc | None, widgetname, kw: Incomplete | None = None) -> None: ... + def identify(self, x: int, y: int) -> str: ... + def instate(self, statespec, callback: Incomplete | None = None, *args, **kw): ... + def state(self, statespec: Incomplete | None = None): ... + +class Button(Widget): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = ..., + command: tkinter._ButtonCommand = ..., + compound: _TtkCompound = ..., + cursor: tkinter._Cursor = ..., + default: Literal["normal", "active", "disabled"] = ..., + image: tkinter._ImageSpec = ..., + name: str = ..., + padding=..., # undocumented + state: str = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + text: float | str = ..., + textvariable: tkinter.Variable = ..., + underline: int = ..., + width: int | Literal[""] = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + command: tkinter._ButtonCommand = ..., + compound: _TtkCompound = ..., + cursor: tkinter._Cursor = ..., + default: Literal["normal", "active", "disabled"] = ..., + image: tkinter._ImageSpec = ..., + padding=..., + state: str = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + text: float | str = ..., + textvariable: tkinter.Variable = ..., + underline: int = ..., + width: int | Literal[""] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def invoke(self) -> Any: ... + +class Checkbutton(Widget): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = ..., + command: tkinter._ButtonCommand = ..., + compound: _TtkCompound = ..., + cursor: tkinter._Cursor = ..., + image: tkinter._ImageSpec = ..., + name: str = ..., + offvalue: Any = ..., + onvalue: Any = ..., + padding=..., # undocumented + state: str = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + text: float | str = ..., + textvariable: tkinter.Variable = ..., + underline: int = ..., + # Seems like variable can be empty string, but actually setting it to + # empty string segfaults before Tcl 8.6.9. Search for ttk::checkbutton + # here: https://sourceforge.net/projects/tcl/files/Tcl/8.6.9/tcltk-release-notes-8.6.9.txt/view + variable: tkinter.Variable = ..., + width: int | Literal[""] = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + command: tkinter._ButtonCommand = ..., + compound: _TtkCompound = ..., + cursor: tkinter._Cursor = ..., + image: tkinter._ImageSpec = ..., + offvalue: Any = ..., + onvalue: Any = ..., + padding=..., + state: str = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + text: float | str = ..., + textvariable: tkinter.Variable = ..., + underline: int = ..., + variable: tkinter.Variable = ..., + width: int | Literal[""] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def invoke(self) -> Any: ... + +class Entry(Widget, tkinter.Entry): + def __init__( + self, + master: tkinter.Misc | None = None, + widget: str | None = None, + *, + background: tkinter._Color = ..., # undocumented + class_: str = ..., + cursor: tkinter._Cursor = ..., + exportselection: bool = ..., + font: _FontDescription = ..., + foreground: tkinter._Color = ..., + invalidcommand: tkinter._EntryValidateCommand = ..., + justify: Literal["left", "center", "right"] = ..., + name: str = ..., + show: str = ..., + state: str = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + textvariable: tkinter.Variable = ..., + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., + validatecommand: tkinter._EntryValidateCommand = ..., + width: int = ..., + xscrollcommand: tkinter._XYScrollCommand = ..., + ) -> None: ... + @overload # type: ignore[override] + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + background: tkinter._Color = ..., + cursor: tkinter._Cursor = ..., + exportselection: bool = ..., + font: _FontDescription = ..., + foreground: tkinter._Color = ..., + invalidcommand: tkinter._EntryValidateCommand = ..., + justify: Literal["left", "center", "right"] = ..., + show: str = ..., + state: str = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + textvariable: tkinter.Variable = ..., + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., + validatecommand: tkinter._EntryValidateCommand = ..., + width: int = ..., + xscrollcommand: tkinter._XYScrollCommand = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + # config must be copy/pasted, otherwise ttk.Entry().config is mypy error (don't know why) + @overload # type: ignore[override] + def config( + self, + cnf: dict[str, Any] | None = None, + *, + background: tkinter._Color = ..., + cursor: tkinter._Cursor = ..., + exportselection: bool = ..., + font: _FontDescription = ..., + foreground: tkinter._Color = ..., + invalidcommand: tkinter._EntryValidateCommand = ..., + justify: Literal["left", "center", "right"] = ..., + show: str = ..., + state: str = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + textvariable: tkinter.Variable = ..., + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., + validatecommand: tkinter._EntryValidateCommand = ..., + width: int = ..., + xscrollcommand: tkinter._XYScrollCommand = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + def bbox(self, index) -> tuple[int, int, int, int]: ... # type: ignore[override] + def identify(self, x: int, y: int) -> str: ... + def validate(self): ... + +class Combobox(Entry): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + background: tkinter._Color = ..., # undocumented + class_: str = ..., + cursor: tkinter._Cursor = ..., + exportselection: bool = ..., + font: _FontDescription = ..., # undocumented + foreground: tkinter._Color = ..., # undocumented + height: int = ..., + invalidcommand: tkinter._EntryValidateCommand = ..., # undocumented + justify: Literal["left", "center", "right"] = ..., + name: str = ..., + postcommand: Callable[[], object] | str = ..., + show=..., # undocumented + state: str = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + textvariable: tkinter.Variable = ..., + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., # undocumented + validatecommand: tkinter._EntryValidateCommand = ..., # undocumented + values: list[str] | tuple[str, ...] = ..., + width: int = ..., + xscrollcommand: tkinter._XYScrollCommand = ..., # undocumented + ) -> None: ... + @overload # type: ignore[override] + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + background: tkinter._Color = ..., + cursor: tkinter._Cursor = ..., + exportselection: bool = ..., + font: _FontDescription = ..., + foreground: tkinter._Color = ..., + height: int = ..., + invalidcommand: tkinter._EntryValidateCommand = ..., + justify: Literal["left", "center", "right"] = ..., + postcommand: Callable[[], object] | str = ..., + show=..., + state: str = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + textvariable: tkinter.Variable = ..., + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., + validatecommand: tkinter._EntryValidateCommand = ..., + values: list[str] | tuple[str, ...] = ..., + width: int = ..., + xscrollcommand: tkinter._XYScrollCommand = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + # config must be copy/pasted, otherwise ttk.Combobox().config is mypy error (don't know why) + @overload # type: ignore[override] + def config( + self, + cnf: dict[str, Any] | None = None, + *, + background: tkinter._Color = ..., + cursor: tkinter._Cursor = ..., + exportselection: bool = ..., + font: _FontDescription = ..., + foreground: tkinter._Color = ..., + height: int = ..., + invalidcommand: tkinter._EntryValidateCommand = ..., + justify: Literal["left", "center", "right"] = ..., + postcommand: Callable[[], object] | str = ..., + show=..., + state: str = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + textvariable: tkinter.Variable = ..., + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., + validatecommand: tkinter._EntryValidateCommand = ..., + values: list[str] | tuple[str, ...] = ..., + width: int = ..., + xscrollcommand: tkinter._XYScrollCommand = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + def current(self, newindex: int | None = None) -> int: ... + def set(self, value: Any) -> None: ... + +class Frame(Widget): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + border: tkinter._ScreenUnits = ..., + borderwidth: tkinter._ScreenUnits = ..., + class_: str = ..., + cursor: tkinter._Cursor = ..., + height: tkinter._ScreenUnits = ..., + name: str = ..., + padding: _Padding = ..., + relief: tkinter._Relief = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + width: tkinter._ScreenUnits = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + border: tkinter._ScreenUnits = ..., + borderwidth: tkinter._ScreenUnits = ..., + cursor: tkinter._Cursor = ..., + height: tkinter._ScreenUnits = ..., + padding: _Padding = ..., + relief: tkinter._Relief = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + width: tkinter._ScreenUnits = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +class Label(Widget): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + anchor: tkinter._Anchor = ..., + background: tkinter._Color = ..., + border: tkinter._ScreenUnits = ..., # alias for borderwidth + borderwidth: tkinter._ScreenUnits = ..., # undocumented + class_: str = ..., + compound: _TtkCompound = ..., + cursor: tkinter._Cursor = ..., + font: _FontDescription = ..., + foreground: tkinter._Color = ..., + image: tkinter._ImageSpec = ..., + justify: Literal["left", "center", "right"] = ..., + name: str = ..., + padding: _Padding = ..., + relief: tkinter._Relief = ..., + state: str = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + text: float | str = ..., + textvariable: tkinter.Variable = ..., + underline: int = ..., + width: int | Literal[""] = ..., + wraplength: tkinter._ScreenUnits = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + anchor: tkinter._Anchor = ..., + background: tkinter._Color = ..., + border: tkinter._ScreenUnits = ..., + borderwidth: tkinter._ScreenUnits = ..., + compound: _TtkCompound = ..., + cursor: tkinter._Cursor = ..., + font: _FontDescription = ..., + foreground: tkinter._Color = ..., + image: tkinter._ImageSpec = ..., + justify: Literal["left", "center", "right"] = ..., + padding: _Padding = ..., + relief: tkinter._Relief = ..., + state: str = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + text: float | str = ..., + textvariable: tkinter.Variable = ..., + underline: int = ..., + width: int | Literal[""] = ..., + wraplength: tkinter._ScreenUnits = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +class Labelframe(Widget): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + border: tkinter._ScreenUnits = ..., + borderwidth: tkinter._ScreenUnits = ..., # undocumented + class_: str = ..., + cursor: tkinter._Cursor = ..., + height: tkinter._ScreenUnits = ..., + labelanchor: Literal["nw", "n", "ne", "en", "e", "es", "se", "s", "sw", "ws", "w", "wn"] = ..., + labelwidget: tkinter.Misc = ..., + name: str = ..., + padding: _Padding = ..., + relief: tkinter._Relief = ..., # undocumented + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + text: float | str = ..., + underline: int = ..., + width: tkinter._ScreenUnits = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + border: tkinter._ScreenUnits = ..., + borderwidth: tkinter._ScreenUnits = ..., + cursor: tkinter._Cursor = ..., + height: tkinter._ScreenUnits = ..., + labelanchor: Literal["nw", "n", "ne", "en", "e", "es", "se", "s", "sw", "ws", "w", "wn"] = ..., + labelwidget: tkinter.Misc = ..., + padding: _Padding = ..., + relief: tkinter._Relief = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + text: float | str = ..., + underline: int = ..., + width: tkinter._ScreenUnits = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +LabelFrame = Labelframe + +class Menubutton(Widget): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = ..., + compound: _TtkCompound = ..., + cursor: tkinter._Cursor = ..., + direction: Literal["above", "below", "left", "right", "flush"] = ..., + image: tkinter._ImageSpec = ..., + menu: tkinter.Menu = ..., + name: str = ..., + padding=..., # undocumented + state: str = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + text: float | str = ..., + textvariable: tkinter.Variable = ..., + underline: int = ..., + width: int | Literal[""] = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + compound: _TtkCompound = ..., + cursor: tkinter._Cursor = ..., + direction: Literal["above", "below", "left", "right", "flush"] = ..., + image: tkinter._ImageSpec = ..., + menu: tkinter.Menu = ..., + padding=..., + state: str = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + text: float | str = ..., + textvariable: tkinter.Variable = ..., + underline: int = ..., + width: int | Literal[""] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +class Notebook(Widget): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = ..., + cursor: tkinter._Cursor = ..., + height: int = ..., + name: str = ..., + padding: _Padding = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + width: int = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + cursor: tkinter._Cursor = ..., + height: int = ..., + padding: _Padding = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + width: int = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def add( + self, + child: tkinter.Widget, + *, + state: Literal["normal", "disabled", "hidden"] = ..., + sticky: str = ..., # consists of letters 'n', 's', 'w', 'e', no repeats, may be empty + padding: _Padding = ..., + text: str = ..., + image=..., # Sequence of an image name, followed by zero or more (sequences of one or more state names followed by an image name) + compound: tkinter._Compound = ..., + underline: int = ..., + ) -> None: ... + def forget(self, tab_id) -> None: ... + def hide(self, tab_id) -> None: ... + def identify(self, x: int, y: int) -> str: ... + def index(self, tab_id): ... + def insert(self, pos, child, **kw) -> None: ... + def select(self, tab_id: Incomplete | None = None): ... + def tab(self, tab_id, option: Incomplete | None = None, **kw): ... + def tabs(self): ... + def enable_traversal(self) -> None: ... + +class Panedwindow(Widget, tkinter.PanedWindow): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = ..., + cursor: tkinter._Cursor = ..., + # width and height for tkinter.ttk.Panedwindow are int but for tkinter.PanedWindow they are screen units + height: int = ..., + name: str = ..., + orient: Literal["vertical", "horizontal"] = ..., # can't be changed with configure() + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + width: int = ..., + ) -> None: ... + def add(self, child: tkinter.Widget, *, weight: int = ..., **kw) -> None: ... + @overload # type: ignore[override] + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + cursor: tkinter._Cursor = ..., + height: int = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + width: int = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + # config must be copy/pasted, otherwise ttk.Panedwindow().config is mypy error (don't know why) + @overload # type: ignore[override] + def config( + self, + cnf: dict[str, Any] | None = None, + *, + cursor: tkinter._Cursor = ..., + height: int = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + width: int = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + forget: Incomplete + def insert(self, pos, child, **kw) -> None: ... + def pane(self, pane, option: Incomplete | None = None, **kw): ... + def sashpos(self, index, newpos: Incomplete | None = None): ... + +PanedWindow = Panedwindow + +class Progressbar(Widget): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = ..., + cursor: tkinter._Cursor = ..., + length: tkinter._ScreenUnits = ..., + maximum: float = ..., + mode: Literal["determinate", "indeterminate"] = ..., + name: str = ..., + orient: Literal["horizontal", "vertical"] = ..., + phase: int = ..., # docs say read-only but assigning int to this works + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + value: float = ..., + variable: tkinter.IntVar | tkinter.DoubleVar = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + cursor: tkinter._Cursor = ..., + length: tkinter._ScreenUnits = ..., + maximum: float = ..., + mode: Literal["determinate", "indeterminate"] = ..., + orient: Literal["horizontal", "vertical"] = ..., + phase: int = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + value: float = ..., + variable: tkinter.IntVar | tkinter.DoubleVar = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def start(self, interval: Literal["idle"] | int | None = None) -> None: ... + def step(self, amount: float | None = None) -> None: ... + def stop(self) -> None: ... + +class Radiobutton(Widget): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = ..., + command: tkinter._ButtonCommand = ..., + compound: _TtkCompound = ..., + cursor: tkinter._Cursor = ..., + image: tkinter._ImageSpec = ..., + name: str = ..., + padding=..., # undocumented + state: str = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + text: float | str = ..., + textvariable: tkinter.Variable = ..., + underline: int = ..., + value: Any = ..., + variable: tkinter.Variable | Literal[""] = ..., + width: int | Literal[""] = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + command: tkinter._ButtonCommand = ..., + compound: _TtkCompound = ..., + cursor: tkinter._Cursor = ..., + image: tkinter._ImageSpec = ..., + padding=..., + state: str = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + text: float | str = ..., + textvariable: tkinter.Variable = ..., + underline: int = ..., + value: Any = ..., + variable: tkinter.Variable | Literal[""] = ..., + width: int | Literal[""] = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def invoke(self) -> Any: ... + +# type ignore, because identify() methods of Widget and tkinter.Scale are incompatible +class Scale(Widget, tkinter.Scale): # type: ignore[misc] + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = ..., + command: str | Callable[[str], object] = ..., + cursor: tkinter._Cursor = ..., + from_: float = ..., + length: tkinter._ScreenUnits = ..., + name: str = ..., + orient: Literal["horizontal", "vertical"] = ..., + state: str = ..., # undocumented + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + to: float = ..., + value: float = ..., + variable: tkinter.IntVar | tkinter.DoubleVar = ..., + ) -> None: ... + @overload # type: ignore[override] + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + command: str | Callable[[str], object] = ..., + cursor: tkinter._Cursor = ..., + from_: float = ..., + length: tkinter._ScreenUnits = ..., + orient: Literal["horizontal", "vertical"] = ..., + state: str = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + to: float = ..., + value: float = ..., + variable: tkinter.IntVar | tkinter.DoubleVar = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + # config must be copy/pasted, otherwise ttk.Scale().config is mypy error (don't know why) + @overload # type: ignore[override] + def config( + self, + cnf: dict[str, Any] | None = None, + *, + command: str | Callable[[str], object] = ..., + cursor: tkinter._Cursor = ..., + from_: float = ..., + length: tkinter._ScreenUnits = ..., + orient: Literal["horizontal", "vertical"] = ..., + state: str = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + to: float = ..., + value: float = ..., + variable: tkinter.IntVar | tkinter.DoubleVar = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + def get(self, x: int | None = None, y: int | None = None) -> float: ... + +# type ignore, because identify() methods of Widget and tkinter.Scale are incompatible +class Scrollbar(Widget, tkinter.Scrollbar): # type: ignore[misc] + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = ..., + command: Callable[..., tuple[float, float] | None] | str = ..., + cursor: tkinter._Cursor = ..., + name: str = ..., + orient: Literal["horizontal", "vertical"] = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + ) -> None: ... + @overload # type: ignore[override] + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + command: Callable[..., tuple[float, float] | None] | str = ..., + cursor: tkinter._Cursor = ..., + orient: Literal["horizontal", "vertical"] = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + # config must be copy/pasted, otherwise ttk.Scrollbar().config is mypy error (don't know why) + @overload # type: ignore[override] + def config( + self, + cnf: dict[str, Any] | None = None, + *, + command: Callable[..., tuple[float, float] | None] | str = ..., + cursor: tkinter._Cursor = ..., + orient: Literal["horizontal", "vertical"] = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + +class Separator(Widget): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = ..., + cursor: tkinter._Cursor = ..., + name: str = ..., + orient: Literal["horizontal", "vertical"] = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + cursor: tkinter._Cursor = ..., + orient: Literal["horizontal", "vertical"] = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +class Sizegrip(Widget): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = ..., + cursor: tkinter._Cursor = ..., + name: str = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + cursor: tkinter._Cursor = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + +class Spinbox(Entry): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + background: tkinter._Color = ..., # undocumented + class_: str = ..., + command: Callable[[], object] | str | list[str] | tuple[str, ...] = ..., + cursor: tkinter._Cursor = ..., + exportselection: bool = ..., # undocumented + font: _FontDescription = ..., # undocumented + foreground: tkinter._Color = ..., # undocumented + format: str = ..., + from_: float = ..., + increment: float = ..., + invalidcommand: tkinter._EntryValidateCommand = ..., # undocumented + justify: Literal["left", "center", "right"] = ..., # undocumented + name: str = ..., + show=..., # undocumented + state: str = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + textvariable: tkinter.Variable = ..., # undocumented + to: float = ..., + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., + validatecommand: tkinter._EntryValidateCommand = ..., + values: list[str] | tuple[str, ...] = ..., + width: int = ..., # undocumented + wrap: bool = ..., + xscrollcommand: tkinter._XYScrollCommand = ..., + ) -> None: ... + @overload # type: ignore[override] + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + background: tkinter._Color = ..., + command: Callable[[], object] | str | list[str] | tuple[str, ...] = ..., + cursor: tkinter._Cursor = ..., + exportselection: bool = ..., + font: _FontDescription = ..., + foreground: tkinter._Color = ..., + format: str = ..., + from_: float = ..., + increment: float = ..., + invalidcommand: tkinter._EntryValidateCommand = ..., + justify: Literal["left", "center", "right"] = ..., + show=..., + state: str = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + textvariable: tkinter.Variable = ..., + to: float = ..., + validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., + validatecommand: tkinter._EntryValidateCommand = ..., + values: list[str] | tuple[str, ...] = ..., + width: int = ..., + wrap: bool = ..., + xscrollcommand: tkinter._XYScrollCommand = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure # type: ignore[assignment] + def set(self, value: Any) -> None: ... + +class _TreeviewItemDict(TypedDict): + text: str + image: list[str] | Literal[""] # no idea why it's wrapped in list + values: list[Any] | Literal[""] + open: bool # actually 0 or 1 + tags: list[str] | Literal[""] + +class _TreeviewTagDict(TypedDict): + # There is also 'text' and 'anchor', but they don't seem to do anything, using them is likely a bug + foreground: tkinter._Color + background: tkinter._Color + font: _FontDescription + image: str # not wrapped in list :D + +class _TreeviewHeaderDict(TypedDict): + text: str + image: list[str] | Literal[""] + anchor: tkinter._Anchor + command: str + state: str # Doesn't seem to appear anywhere else than in these dicts + +class _TreeviewColumnDict(TypedDict): + width: int + minwidth: int + stretch: bool # actually 0 or 1 + anchor: tkinter._Anchor + id: str + +_TreeviewColumnId: TypeAlias = int | str # manual page: "COLUMN IDENTIFIERS" + +class Treeview(Widget, tkinter.XView, tkinter.YView): + def __init__( + self, + master: tkinter.Misc | None = None, + *, + class_: str = ..., + columns: str | list[str] | tuple[str, ...] = ..., + cursor: tkinter._Cursor = ..., + displaycolumns: str | list[str] | tuple[str, ...] | list[int] | tuple[int, ...] = ..., + height: int = ..., + name: str = ..., + padding: _Padding = ..., + selectmode: Literal["extended", "browse", "none"] = ..., + # list/tuple of Literal don't actually work in mypy + # + # 'tree headings' is same as ['tree', 'headings'], and I wouldn't be + # surprised if someone is using it. + show: Literal["tree", "headings", "tree headings", ""] | list[str] | tuple[str, ...] = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + xscrollcommand: tkinter._XYScrollCommand = ..., + yscrollcommand: tkinter._XYScrollCommand = ..., + ) -> None: ... + @overload + def configure( + self, + cnf: dict[str, Any] | None = None, + *, + columns: str | list[str] | tuple[str, ...] = ..., + cursor: tkinter._Cursor = ..., + displaycolumns: str | list[str] | tuple[str, ...] | list[int] | tuple[int, ...] = ..., + height: int = ..., + padding: _Padding = ..., + selectmode: Literal["extended", "browse", "none"] = ..., + show: Literal["tree", "headings", "tree headings", ""] | list[str] | tuple[str, ...] = ..., + style: str = ..., + takefocus: tkinter._TakeFocusValue = ..., + xscrollcommand: tkinter._XYScrollCommand = ..., + yscrollcommand: tkinter._XYScrollCommand = ..., + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... + @overload + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure + def bbox(self, item, column: _TreeviewColumnId | None = None) -> tuple[int, int, int, int] | Literal[""]: ... # type: ignore[override] + def get_children(self, item: str | None = None) -> tuple[str, ...]: ... + def set_children(self, item: str, *newchildren: str) -> None: ... + @overload + def column(self, column: _TreeviewColumnId, option: Literal["width", "minwidth"]) -> int: ... + @overload + def column(self, column: _TreeviewColumnId, option: Literal["stretch"]) -> bool: ... # actually 0 or 1 + @overload + def column(self, column: _TreeviewColumnId, option: Literal["anchor"]) -> _tkinter.Tcl_Obj: ... + @overload + def column(self, column: _TreeviewColumnId, option: Literal["id"]) -> str: ... + @overload + def column(self, column: _TreeviewColumnId, option: str) -> Any: ... + @overload + def column( + self, + column: _TreeviewColumnId, + option: None = None, + *, + width: int = ..., + minwidth: int = ..., + stretch: bool = ..., + anchor: tkinter._Anchor = ..., + # id is read-only + ) -> _TreeviewColumnDict | None: ... + def delete(self, *items: str) -> None: ... + def detach(self, *items: str) -> None: ... + def exists(self, item: str) -> bool: ... + @overload # type: ignore[override] + def focus(self, item: None = None) -> str: ... # can return empty string + @overload + def focus(self, item: str) -> Literal[""]: ... + @overload + def heading(self, column: _TreeviewColumnId, option: Literal["text"]) -> str: ... + @overload + def heading(self, column: _TreeviewColumnId, option: Literal["image"]) -> tuple[str] | str: ... + @overload + def heading(self, column: _TreeviewColumnId, option: Literal["anchor"]) -> _tkinter.Tcl_Obj: ... + @overload + def heading(self, column: _TreeviewColumnId, option: Literal["command"]) -> str: ... + @overload + def heading(self, column: _TreeviewColumnId, option: str) -> Any: ... + @overload + def heading(self, column: _TreeviewColumnId, option: None = None) -> _TreeviewHeaderDict: ... # type: ignore[misc] + @overload + def heading( + self, + column: _TreeviewColumnId, + option: None = None, + *, + text: str = ..., + image: tkinter._ImageSpec = ..., + anchor: tkinter._Anchor = ..., + command: str | Callable[[], object] = ..., + ) -> None: ... + def identify(self, component, x, y): ... # Internal Method. Leave untyped + def identify_row(self, y: int) -> str: ... + def identify_column(self, x: int) -> str: ... + def identify_region(self, x: int, y: int) -> Literal["heading", "separator", "tree", "cell", "nothing"]: ... + def identify_element(self, x: int, y: int) -> str: ... # don't know what possible return values are + def index(self, item: str) -> int: ... + def insert( + self, + parent: str, + index: int | Literal["end"], + iid: str | None = None, + *, + id: str = ..., # same as iid + text: str = ..., + image: tkinter._ImageSpec = ..., + values: list[Any] | tuple[Any, ...] = ..., + open: bool = ..., + tags: str | list[str] | tuple[str, ...] = ..., + ) -> str: ... + @overload + def item(self, item: str, option: Literal["text"]) -> str: ... + @overload + def item(self, item: str, option: Literal["image"]) -> tuple[str] | Literal[""]: ... + @overload + def item(self, item: str, option: Literal["values"]) -> tuple[Any, ...] | Literal[""]: ... + @overload + def item(self, item: str, option: Literal["open"]) -> bool: ... # actually 0 or 1 + @overload + def item(self, item: str, option: Literal["tags"]) -> tuple[str, ...] | Literal[""]: ... + @overload + def item(self, item: str, option: str) -> Any: ... + @overload + def item(self, item: str, option: None = None) -> _TreeviewItemDict: ... # type: ignore[misc] + @overload + def item( + self, + item: str, + option: None = None, + *, + text: str = ..., + image: tkinter._ImageSpec = ..., + values: list[Any] | tuple[Any, ...] | Literal[""] = ..., + open: bool = ..., + tags: str | list[str] | tuple[str, ...] = ..., + ) -> None: ... + def move(self, item: str, parent: str, index: int) -> None: ... + reattach = move + def next(self, item: str) -> str: ... # returning empty string means last item + def parent(self, item: str) -> str: ... + def prev(self, item: str) -> str: ... # returning empty string means first item + def see(self, item: str) -> None: ... + if sys.version_info >= (3, 8): + def selection(self) -> tuple[str, ...]: ... + else: + def selection(self, selop: Incomplete | None = ..., items: Incomplete | None = None) -> tuple[str, ...]: ... + + def selection_set(self, items: str | list[str] | tuple[str, ...]) -> None: ... + def selection_add(self, items: str | list[str] | tuple[str, ...]) -> None: ... + def selection_remove(self, items: str | list[str] | tuple[str, ...]) -> None: ... + def selection_toggle(self, items: str | list[str] | tuple[str, ...]) -> None: ... + @overload + def set(self, item: str, column: None = None, value: None = None) -> dict[str, Any]: ... + @overload + def set(self, item: str, column: _TreeviewColumnId, value: None = None) -> Any: ... + @overload + def set(self, item: str, column: _TreeviewColumnId, value: Any) -> Literal[""]: ... + # There's no tag_unbind() or 'add' argument for whatever reason. + # Also, it's 'callback' instead of 'func' here. + @overload + def tag_bind( + self, tagname: str, sequence: str | None = None, callback: Callable[[tkinter.Event[Treeview]], object] | None = None + ) -> str: ... + @overload + def tag_bind(self, tagname: str, sequence: str | None, callback: str) -> None: ... + @overload + def tag_bind(self, tagname: str, *, callback: str) -> None: ... + @overload + def tag_configure(self, tagname: str, option: Literal["foreground", "background"]) -> tkinter._Color: ... + @overload + def tag_configure(self, tagname: str, option: Literal["font"]) -> _FontDescription: ... + @overload + def tag_configure(self, tagname: str, option: Literal["image"]) -> str: ... + @overload + def tag_configure( + self, + tagname: str, + option: None = None, + *, + # There is also 'text' and 'anchor', but they don't seem to do anything, using them is likely a bug + foreground: tkinter._Color = ..., + background: tkinter._Color = ..., + font: _FontDescription = ..., + image: tkinter._ImageSpec = ..., + ) -> _TreeviewTagDict | Any: ... # can be None but annoying to check + @overload + def tag_has(self, tagname: str, item: None = None) -> tuple[str, ...]: ... + @overload + def tag_has(self, tagname: str, item: str) -> bool: ... + +class LabeledScale(Frame): + label: Incomplete + scale: Incomplete + # TODO: don't any-type **kw. That goes to Frame.__init__. + def __init__( + self, + master: tkinter.Misc | None = None, + variable: tkinter.IntVar | tkinter.DoubleVar | None = None, + from_: float = 0, + to: float = 10, + *, + compound: Literal["top", "bottom"] = ..., + **kw, + ) -> None: ... + # destroy is overridden, signature does not change + value: Any + +class OptionMenu(Menubutton): + def __init__( + self, + master, + variable, + default: str | None = None, + *values: str, + # rest of these are keyword-only because *args syntax used above + style: str = ..., + direction: Literal["above", "below", "left", "right", "flush"] = ..., + command: Callable[[tkinter.StringVar], object] | None = ..., + ) -> None: ... + # configure, config, cget, destroy are inherited from Menubutton + # destroy and __setitem__ are overridden, signature does not change + def set_menu(self, default: Incomplete | None = None, *values) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/token.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/token.pyi new file mode 100644 index 00000000..fcd6ef87 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/token.pyi @@ -0,0 +1,150 @@ +import sys + +__all__ = [ + "AMPER", + "AMPEREQUAL", + "AT", + "ATEQUAL", + "CIRCUMFLEX", + "CIRCUMFLEXEQUAL", + "COLON", + "COMMA", + "DEDENT", + "DOT", + "DOUBLESLASH", + "DOUBLESLASHEQUAL", + "DOUBLESTAR", + "DOUBLESTAREQUAL", + "ELLIPSIS", + "ENDMARKER", + "EQEQUAL", + "EQUAL", + "ERRORTOKEN", + "GREATER", + "GREATEREQUAL", + "INDENT", + "ISEOF", + "ISNONTERMINAL", + "ISTERMINAL", + "LBRACE", + "LEFTSHIFT", + "LEFTSHIFTEQUAL", + "LESS", + "LESSEQUAL", + "LPAR", + "LSQB", + "MINEQUAL", + "MINUS", + "NAME", + "NEWLINE", + "NOTEQUAL", + "NT_OFFSET", + "NUMBER", + "N_TOKENS", + "OP", + "PERCENT", + "PERCENTEQUAL", + "PLUS", + "PLUSEQUAL", + "RARROW", + "RBRACE", + "RIGHTSHIFT", + "RIGHTSHIFTEQUAL", + "RPAR", + "RSQB", + "SEMI", + "SLASH", + "SLASHEQUAL", + "STAR", + "STAREQUAL", + "STRING", + "TILDE", + "VBAR", + "VBAREQUAL", + "tok_name", + "ENCODING", + "NL", + "COMMENT", +] + +if sys.version_info >= (3, 8): + __all__ += ["ASYNC", "AWAIT", "COLONEQUAL", "TYPE_COMMENT", "TYPE_IGNORE"] + +if sys.version_info >= (3, 10): + __all__ += ["SOFT_KEYWORD"] + +ENDMARKER: int +NAME: int +NUMBER: int +STRING: int +NEWLINE: int +INDENT: int +DEDENT: int +LPAR: int +RPAR: int +LSQB: int +RSQB: int +COLON: int +COMMA: int +SEMI: int +PLUS: int +MINUS: int +STAR: int +SLASH: int +VBAR: int +AMPER: int +LESS: int +GREATER: int +EQUAL: int +DOT: int +PERCENT: int +LBRACE: int +RBRACE: int +EQEQUAL: int +NOTEQUAL: int +LESSEQUAL: int +GREATEREQUAL: int +TILDE: int +CIRCUMFLEX: int +LEFTSHIFT: int +RIGHTSHIFT: int +DOUBLESTAR: int +PLUSEQUAL: int +MINEQUAL: int +STAREQUAL: int +SLASHEQUAL: int +PERCENTEQUAL: int +AMPEREQUAL: int +VBAREQUAL: int +CIRCUMFLEXEQUAL: int +LEFTSHIFTEQUAL: int +RIGHTSHIFTEQUAL: int +DOUBLESTAREQUAL: int +DOUBLESLASH: int +DOUBLESLASHEQUAL: int +AT: int +RARROW: int +ELLIPSIS: int +ATEQUAL: int +if sys.version_info >= (3, 8): + AWAIT: int + ASYNC: int +OP: int +ERRORTOKEN: int +N_TOKENS: int +NT_OFFSET: int +tok_name: dict[int, str] +COMMENT: int +NL: int +ENCODING: int +if sys.version_info >= (3, 8): + TYPE_COMMENT: int + TYPE_IGNORE: int + COLONEQUAL: int + EXACT_TOKEN_TYPES: dict[str, int] +if sys.version_info >= (3, 10): + SOFT_KEYWORD: int + +def ISTERMINAL(x: int) -> bool: ... +def ISNONTERMINAL(x: int) -> bool: ... +def ISEOF(x: int) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tokenize.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tokenize.pyi new file mode 100644 index 00000000..ba57402f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tokenize.pyi @@ -0,0 +1,175 @@ +import sys +from _typeshed import FileDescriptorOrPath +from collections.abc import Callable, Generator, Iterable, Sequence +from re import Pattern +from token import * +from typing import Any, NamedTuple, TextIO +from typing_extensions import TypeAlias + +__all__ = [ + "AMPER", + "AMPEREQUAL", + "AT", + "ATEQUAL", + "CIRCUMFLEX", + "CIRCUMFLEXEQUAL", + "COLON", + "COMMA", + "COMMENT", + "DEDENT", + "DOT", + "DOUBLESLASH", + "DOUBLESLASHEQUAL", + "DOUBLESTAR", + "DOUBLESTAREQUAL", + "ELLIPSIS", + "ENCODING", + "ENDMARKER", + "EQEQUAL", + "EQUAL", + "ERRORTOKEN", + "GREATER", + "GREATEREQUAL", + "INDENT", + "ISEOF", + "ISNONTERMINAL", + "ISTERMINAL", + "LBRACE", + "LEFTSHIFT", + "LEFTSHIFTEQUAL", + "LESS", + "LESSEQUAL", + "LPAR", + "LSQB", + "MINEQUAL", + "MINUS", + "NAME", + "NEWLINE", + "NL", + "NOTEQUAL", + "NT_OFFSET", + "NUMBER", + "N_TOKENS", + "OP", + "PERCENT", + "PERCENTEQUAL", + "PLUS", + "PLUSEQUAL", + "RARROW", + "RBRACE", + "RIGHTSHIFT", + "RIGHTSHIFTEQUAL", + "RPAR", + "RSQB", + "SEMI", + "SLASH", + "SLASHEQUAL", + "STAR", + "STAREQUAL", + "STRING", + "TILDE", + "TokenInfo", + "VBAR", + "VBAREQUAL", + "detect_encoding", + "tok_name", + "tokenize", + "untokenize", +] + +if sys.version_info >= (3, 8): + __all__ += ["ASYNC", "AWAIT", "COLONEQUAL", "generate_tokens", "TYPE_COMMENT", "TYPE_IGNORE"] + +if sys.version_info >= (3, 10): + __all__ += ["SOFT_KEYWORD"] + +if sys.version_info >= (3, 8): + from token import EXACT_TOKEN_TYPES as EXACT_TOKEN_TYPES +else: + EXACT_TOKEN_TYPES: dict[str, int] + +cookie_re: Pattern[str] +blank_re: Pattern[bytes] + +_Position: TypeAlias = tuple[int, int] + +class _TokenInfo(NamedTuple): + type: int + string: str + start: _Position + end: _Position + line: str + +class TokenInfo(_TokenInfo): + @property + def exact_type(self) -> int: ... + +# Backwards compatible tokens can be sequences of a shorter length too +_Token: TypeAlias = TokenInfo | Sequence[int | str | _Position] + +class TokenError(Exception): ... +class StopTokenizing(Exception): ... # undocumented + +class Untokenizer: + tokens: list[str] + prev_row: int + prev_col: int + encoding: str | None + def add_whitespace(self, start: _Position) -> None: ... + def untokenize(self, iterable: Iterable[_Token]) -> str: ... + def compat(self, token: Sequence[int | str], iterable: Iterable[_Token]) -> None: ... + +# the docstring says "returns bytes" but is incorrect -- +# if the ENCODING token is missing, it skips the encode +def untokenize(iterable: Iterable[_Token]) -> Any: ... +def detect_encoding(readline: Callable[[], bytes | bytearray]) -> tuple[str, Sequence[bytes]]: ... +def tokenize(readline: Callable[[], bytes | bytearray]) -> Generator[TokenInfo, None, None]: ... +def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ... # undocumented +def open(filename: FileDescriptorOrPath) -> TextIO: ... +def group(*choices: str) -> str: ... # undocumented +def any(*choices: str) -> str: ... # undocumented +def maybe(*choices: str) -> str: ... # undocumented + +Whitespace: str # undocumented +Comment: str # undocumented +Ignore: str # undocumented +Name: str # undocumented + +Hexnumber: str # undocumented +Binnumber: str # undocumented +Octnumber: str # undocumented +Decnumber: str # undocumented +Intnumber: str # undocumented +Exponent: str # undocumented +Pointfloat: str # undocumented +Expfloat: str # undocumented +Floatnumber: str # undocumented +Imagnumber: str # undocumented +Number: str # undocumented + +def _all_string_prefixes() -> set[str]: ... # undocumented + +StringPrefix: str # undocumented + +Single: str # undocumented +Double: str # undocumented +Single3: str # undocumented +Double3: str # undocumented +Triple: str # undocumented +String: str # undocumented + +Special: str # undocumented +Funny: str # undocumented + +PlainToken: str # undocumented +Token: str # undocumented + +ContStr: str # undocumented +PseudoExtras: str # undocumented +PseudoToken: str # undocumented + +endpats: dict[str, str] # undocumented +single_quoted: set[str] # undocumented +triple_quoted: set[str] # undocumented + +tabsize: int # undocumented diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tomllib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tomllib.pyi new file mode 100644 index 00000000..3a6ce93f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tomllib.pyi @@ -0,0 +1,10 @@ +from _typeshed import SupportsRead +from collections.abc import Callable +from typing import Any + +__all__ = ("loads", "load", "TOMLDecodeError") + +class TOMLDecodeError(ValueError): ... + +def load(__fp: SupportsRead[bytes], *, parse_float: Callable[[str], Any] = ...) -> dict[str, Any]: ... +def loads(__s: str, *, parse_float: Callable[[str], Any] = ...) -> dict[str, Any]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/trace.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/trace.pyi new file mode 100644 index 00000000..f79b38f1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/trace.pyi @@ -0,0 +1,59 @@ +import sys +import types +from _typeshed import StrPath, TraceFunction +from collections.abc import Callable, Mapping, Sequence +from typing import Any, TypeVar +from typing_extensions import ParamSpec, TypeAlias + +__all__ = ["Trace", "CoverageResults"] + +_T = TypeVar("_T") +_P = ParamSpec("_P") +_FileModuleFunction: TypeAlias = tuple[str, str | None, str] + +class CoverageResults: + def __init__( + self, + counts: dict[tuple[str, int], int] | None = None, + calledfuncs: dict[_FileModuleFunction, int] | None = None, + infile: StrPath | None = None, + callers: dict[tuple[_FileModuleFunction, _FileModuleFunction], int] | None = None, + outfile: StrPath | None = None, + ) -> None: ... # undocumented + def update(self, other: CoverageResults) -> None: ... + def write_results(self, show_missing: bool = True, summary: bool = False, coverdir: StrPath | None = None) -> None: ... + def write_results_file( + self, path: StrPath, lines: Sequence[str], lnotab: Any, lines_hit: Mapping[int, int], encoding: str | None = None + ) -> tuple[int, int]: ... + def is_ignored_filename(self, filename: str) -> bool: ... # undocumented + +class Trace: + def __init__( + self, + count: int = 1, + trace: int = 1, + countfuncs: int = 0, + countcallers: int = 0, + ignoremods: Sequence[str] = ..., + ignoredirs: Sequence[str] = ..., + infile: StrPath | None = None, + outfile: StrPath | None = None, + timing: bool = False, + ) -> None: ... + def run(self, cmd: str | types.CodeType) -> None: ... + def runctx( + self, cmd: str | types.CodeType, globals: Mapping[str, Any] | None = None, locals: Mapping[str, Any] | None = None + ) -> None: ... + if sys.version_info >= (3, 9): + def runfunc(self, __func: Callable[_P, _T], *args: _P.args, **kw: _P.kwargs) -> _T: ... + else: + def runfunc(self, func: Callable[_P, _T], *args: _P.args, **kw: _P.kwargs) -> _T: ... + + def file_module_function_of(self, frame: types.FrameType) -> _FileModuleFunction: ... + def globaltrace_trackcallers(self, frame: types.FrameType, why: str, arg: Any) -> None: ... + def globaltrace_countfuncs(self, frame: types.FrameType, why: str, arg: Any) -> None: ... + def globaltrace_lt(self, frame: types.FrameType, why: str, arg: Any) -> None: ... + def localtrace_trace_and_count(self, frame: types.FrameType, why: str, arg: Any) -> TraceFunction: ... + def localtrace_trace(self, frame: types.FrameType, why: str, arg: Any) -> TraceFunction: ... + def localtrace_count(self, frame: types.FrameType, why: str, arg: Any) -> TraceFunction: ... + def results(self) -> CoverageResults: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/traceback.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/traceback.pyi new file mode 100644 index 00000000..a6d6d3e1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/traceback.pyi @@ -0,0 +1,258 @@ +import sys +from _typeshed import SupportsWrite +from collections.abc import Generator, Iterable, Iterator, Mapping +from types import FrameType, TracebackType +from typing import Any, overload +from typing_extensions import Literal, Self, TypeAlias + +__all__ = [ + "extract_stack", + "extract_tb", + "format_exception", + "format_exception_only", + "format_list", + "format_stack", + "format_tb", + "print_exc", + "format_exc", + "print_exception", + "print_last", + "print_stack", + "print_tb", + "clear_frames", + "FrameSummary", + "StackSummary", + "TracebackException", + "walk_stack", + "walk_tb", +] + +_PT: TypeAlias = tuple[str, int, str, str | None] + +def print_tb(tb: TracebackType | None, limit: int | None = None, file: SupportsWrite[str] | None = None) -> None: ... + +if sys.version_info >= (3, 10): + @overload + def print_exception( + __exc: type[BaseException] | None, + value: BaseException | None = ..., + tb: TracebackType | None = ..., + limit: int | None = None, + file: SupportsWrite[str] | None = None, + chain: bool = True, + ) -> None: ... + @overload + def print_exception( + __exc: BaseException, *, limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True + ) -> None: ... + @overload + def format_exception( + __exc: type[BaseException] | None, + value: BaseException | None = ..., + tb: TracebackType | None = ..., + limit: int | None = None, + chain: bool = True, + ) -> list[str]: ... + @overload + def format_exception(__exc: BaseException, *, limit: int | None = None, chain: bool = True) -> list[str]: ... + +else: + def print_exception( + etype: type[BaseException] | None, + value: BaseException | None, + tb: TracebackType | None, + limit: int | None = None, + file: SupportsWrite[str] | None = None, + chain: bool = True, + ) -> None: ... + def format_exception( + etype: type[BaseException] | None, + value: BaseException | None, + tb: TracebackType | None, + limit: int | None = None, + chain: bool = True, + ) -> list[str]: ... + +def print_exc(limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: ... +def print_last(limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: ... +def print_stack(f: FrameType | None = None, limit: int | None = None, file: SupportsWrite[str] | None = None) -> None: ... +def extract_tb(tb: TracebackType | None, limit: int | None = None) -> StackSummary: ... +def extract_stack(f: FrameType | None = None, limit: int | None = None) -> StackSummary: ... +def format_list(extracted_list: list[FrameSummary]) -> list[str]: ... + +# undocumented +def print_list(extracted_list: list[FrameSummary], file: SupportsWrite[str] | None = None) -> None: ... + +if sys.version_info >= (3, 10): + def format_exception_only(__exc: type[BaseException] | None, value: BaseException | None = ...) -> list[str]: ... + +else: + def format_exception_only(etype: type[BaseException] | None, value: BaseException | None) -> list[str]: ... + +def format_exc(limit: int | None = None, chain: bool = True) -> str: ... +def format_tb(tb: TracebackType | None, limit: int | None = None) -> list[str]: ... +def format_stack(f: FrameType | None = None, limit: int | None = None) -> list[str]: ... +def clear_frames(tb: TracebackType | None) -> None: ... +def walk_stack(f: FrameType | None) -> Iterator[tuple[FrameType, int]]: ... +def walk_tb(tb: TracebackType | None) -> Iterator[tuple[FrameType, int]]: ... + +if sys.version_info >= (3, 11): + class _ExceptionPrintContext: + def indent(self) -> str: ... + def emit(self, text_gen: str | Iterable[str], margin_char: str | None = None) -> Generator[str, None, None]: ... + +class TracebackException: + __cause__: TracebackException + __context__: TracebackException + __suppress_context__: bool + stack: StackSummary + exc_type: type[BaseException] + filename: str + lineno: int + text: str + offset: int + msg: str + if sys.version_info >= (3, 11): + def __init__( + self, + exc_type: type[BaseException], + exc_value: BaseException, + exc_traceback: TracebackType | None, + *, + limit: int | None = None, + lookup_lines: bool = True, + capture_locals: bool = False, + compact: bool = False, + max_group_width: int = 15, + max_group_depth: int = 10, + _seen: set[int] | None = None, + ) -> None: ... + @classmethod + def from_exception( + cls, + exc: BaseException, + *, + limit: int | None = None, + lookup_lines: bool = True, + capture_locals: bool = False, + compact: bool = False, + max_group_width: int = 15, + max_group_depth: int = 10, + ) -> Self: ... + elif sys.version_info >= (3, 10): + def __init__( + self, + exc_type: type[BaseException], + exc_value: BaseException, + exc_traceback: TracebackType | None, + *, + limit: int | None = None, + lookup_lines: bool = True, + capture_locals: bool = False, + compact: bool = False, + _seen: set[int] | None = None, + ) -> None: ... + @classmethod + def from_exception( + cls, + exc: BaseException, + *, + limit: int | None = None, + lookup_lines: bool = True, + capture_locals: bool = False, + compact: bool = False, + ) -> Self: ... + else: + def __init__( + self, + exc_type: type[BaseException], + exc_value: BaseException, + exc_traceback: TracebackType | None, + *, + limit: int | None = None, + lookup_lines: bool = True, + capture_locals: bool = False, + _seen: set[int] | None = None, + ) -> None: ... + @classmethod + def from_exception( + cls, exc: BaseException, *, limit: int | None = None, lookup_lines: bool = True, capture_locals: bool = False + ) -> Self: ... + + def __eq__(self, other: object) -> bool: ... + if sys.version_info >= (3, 11): + def format(self, *, chain: bool = True, _ctx: _ExceptionPrintContext | None = None) -> Generator[str, None, None]: ... + else: + def format(self, *, chain: bool = True) -> Generator[str, None, None]: ... + + def format_exception_only(self) -> Generator[str, None, None]: ... + + if sys.version_info >= (3, 11): + def print(self, *, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: ... + +class FrameSummary(Iterable[Any]): + if sys.version_info >= (3, 11): + def __init__( + self, + filename: str, + lineno: int | None, + name: str, + *, + lookup_line: bool = True, + locals: Mapping[str, str] | None = None, + line: str | None = None, + end_lineno: int | None = None, + colno: int | None = None, + end_colno: int | None = None, + ) -> None: ... + end_lineno: int | None + colno: int | None + end_colno: int | None + else: + def __init__( + self, + filename: str, + lineno: int | None, + name: str, + *, + lookup_line: bool = True, + locals: Mapping[str, str] | None = None, + line: str | None = None, + ) -> None: ... + filename: str + lineno: int | None + name: str + locals: dict[str, str] | None + @property + def line(self) -> str | None: ... + @overload + def __getitem__(self, pos: Literal[0]) -> str: ... + @overload + def __getitem__(self, pos: Literal[1]) -> int: ... + @overload + def __getitem__(self, pos: Literal[2]) -> str: ... + @overload + def __getitem__(self, pos: Literal[3]) -> str | None: ... + @overload + def __getitem__(self, pos: int) -> Any: ... + def __iter__(self) -> Iterator[Any]: ... + def __eq__(self, other: object) -> bool: ... + if sys.version_info >= (3, 8): + def __len__(self) -> Literal[4]: ... + +class StackSummary(list[FrameSummary]): + @classmethod + def extract( + cls, + frame_gen: Iterable[tuple[FrameType, int]], + *, + limit: int | None = None, + lookup_lines: bool = True, + capture_locals: bool = False, + ) -> StackSummary: ... + @classmethod + def from_list(cls, a_list: Iterable[FrameSummary | _PT]) -> StackSummary: ... + if sys.version_info >= (3, 11): + def format_frame_summary(self, frame_summary: FrameSummary) -> str: ... + + def format(self) -> list[str]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tracemalloc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tracemalloc.pyi new file mode 100644 index 00000000..3dc8b860 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tracemalloc.pyi @@ -0,0 +1,119 @@ +import sys +from _tracemalloc import * +from collections.abc import Sequence +from typing import Any, overload +from typing_extensions import SupportsIndex, TypeAlias + +def get_object_traceback(obj: object) -> Traceback | None: ... +def take_snapshot() -> Snapshot: ... + +class BaseFilter: + inclusive: bool + def __init__(self, inclusive: bool) -> None: ... + +class DomainFilter(BaseFilter): + @property + def domain(self) -> int: ... + def __init__(self, inclusive: bool, domain: int) -> None: ... + +class Filter(BaseFilter): + domain: int | None + lineno: int | None + @property + def filename_pattern(self) -> str: ... + all_frames: bool + def __init__( + self, + inclusive: bool, + filename_pattern: str, + lineno: int | None = None, + all_frames: bool = False, + domain: int | None = None, + ) -> None: ... + +class Statistic: + count: int + size: int + traceback: Traceback + def __init__(self, traceback: Traceback, size: int, count: int) -> None: ... + def __eq__(self, other: object) -> bool: ... + +class StatisticDiff: + count: int + count_diff: int + size: int + size_diff: int + traceback: Traceback + def __init__(self, traceback: Traceback, size: int, size_diff: int, count: int, count_diff: int) -> None: ... + def __eq__(self, other: object) -> bool: ... + +_FrameTuple: TypeAlias = tuple[str, int] + +class Frame: + @property + def filename(self) -> str: ... + @property + def lineno(self) -> int: ... + def __init__(self, frame: _FrameTuple) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __lt__(self, other: Frame) -> bool: ... + if sys.version_info >= (3, 11): + def __gt__(self, other: Frame) -> bool: ... + def __ge__(self, other: Frame) -> bool: ... + def __le__(self, other: Frame) -> bool: ... + else: + def __gt__(self, other: Frame, NotImplemented: Any = ...) -> bool: ... + def __ge__(self, other: Frame, NotImplemented: Any = ...) -> bool: ... + def __le__(self, other: Frame, NotImplemented: Any = ...) -> bool: ... + +if sys.version_info >= (3, 9): + _TraceTuple: TypeAlias = tuple[int, int, Sequence[_FrameTuple], int | None] | tuple[int, int, Sequence[_FrameTuple]] +else: + _TraceTuple: TypeAlias = tuple[int, int, Sequence[_FrameTuple]] + +class Trace: + @property + def domain(self) -> int: ... + @property + def size(self) -> int: ... + @property + def traceback(self) -> Traceback: ... + def __init__(self, trace: _TraceTuple) -> None: ... + def __eq__(self, other: object) -> bool: ... + +class Traceback(Sequence[Frame]): + if sys.version_info >= (3, 9): + @property + def total_nframe(self) -> int | None: ... + def __init__(self, frames: Sequence[_FrameTuple], total_nframe: int | None = None) -> None: ... + else: + def __init__(self, frames: Sequence[_FrameTuple]) -> None: ... + + def format(self, limit: int | None = None, most_recent_first: bool = False) -> list[str]: ... + @overload + def __getitem__(self, index: SupportsIndex) -> Frame: ... + @overload + def __getitem__(self, index: slice) -> Sequence[Frame]: ... + def __contains__(self, frame: Frame) -> bool: ... # type: ignore[override] + def __len__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + def __lt__(self, other: Traceback) -> bool: ... + if sys.version_info >= (3, 11): + def __gt__(self, other: Traceback) -> bool: ... + def __ge__(self, other: Traceback) -> bool: ... + def __le__(self, other: Traceback) -> bool: ... + else: + def __gt__(self, other: Traceback, NotImplemented: Any = ...) -> bool: ... + def __ge__(self, other: Traceback, NotImplemented: Any = ...) -> bool: ... + def __le__(self, other: Traceback, NotImplemented: Any = ...) -> bool: ... + +class Snapshot: + def __init__(self, traces: Sequence[_TraceTuple], traceback_limit: int) -> None: ... + def compare_to(self, old_snapshot: Snapshot, key_type: str, cumulative: bool = False) -> list[StatisticDiff]: ... + def dump(self, filename: str) -> None: ... + def filter_traces(self, filters: Sequence[DomainFilter | Filter]) -> Snapshot: ... + @staticmethod + def load(filename: str) -> Snapshot: ... + def statistics(self, key_type: str, cumulative: bool = False) -> list[Statistic]: ... + traceback_limit: int + traces: Sequence[Trace] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tty.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tty.pyi new file mode 100644 index 00000000..43f2e1cf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/tty.pyi @@ -0,0 +1,19 @@ +import sys +from typing import IO +from typing_extensions import TypeAlias + +if sys.platform != "win32": + __all__ = ["setraw", "setcbreak"] + + _FD: TypeAlias = int | IO[str] + + # XXX: Undocumented integer constants + IFLAG: int + OFLAG: int + CFLAG: int + LFLAG: int + ISPEED: int + OSPEED: int + CC: int + def setraw(fd: _FD, when: int = 2) -> None: ... + def setcbreak(fd: _FD, when: int = 2) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/turtle.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/turtle.pyi new file mode 100644 index 00000000..8017c829 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/turtle.pyi @@ -0,0 +1,698 @@ +from collections.abc import Callable, Sequence +from tkinter import Canvas, Frame, Misc, PhotoImage, Scrollbar +from typing import Any, ClassVar, overload +from typing_extensions import Self, TypeAlias + +__all__ = [ + "ScrolledCanvas", + "TurtleScreen", + "Screen", + "RawTurtle", + "Turtle", + "RawPen", + "Pen", + "Shape", + "Vec2D", + "addshape", + "bgcolor", + "bgpic", + "bye", + "clearscreen", + "colormode", + "delay", + "exitonclick", + "getcanvas", + "getshapes", + "listen", + "mainloop", + "mode", + "numinput", + "onkey", + "onkeypress", + "onkeyrelease", + "onscreenclick", + "ontimer", + "register_shape", + "resetscreen", + "screensize", + "setup", + "setworldcoordinates", + "textinput", + "title", + "tracer", + "turtles", + "update", + "window_height", + "window_width", + "back", + "backward", + "begin_fill", + "begin_poly", + "bk", + "circle", + "clear", + "clearstamp", + "clearstamps", + "clone", + "color", + "degrees", + "distance", + "dot", + "down", + "end_fill", + "end_poly", + "fd", + "fillcolor", + "filling", + "forward", + "get_poly", + "getpen", + "getscreen", + "get_shapepoly", + "getturtle", + "goto", + "heading", + "hideturtle", + "home", + "ht", + "isdown", + "isvisible", + "left", + "lt", + "onclick", + "ondrag", + "onrelease", + "pd", + "pen", + "pencolor", + "pendown", + "pensize", + "penup", + "pos", + "position", + "pu", + "radians", + "right", + "reset", + "resizemode", + "rt", + "seth", + "setheading", + "setpos", + "setposition", + "settiltangle", + "setundobuffer", + "setx", + "sety", + "shape", + "shapesize", + "shapetransform", + "shearfactor", + "showturtle", + "speed", + "st", + "stamp", + "tilt", + "tiltangle", + "towards", + "turtlesize", + "undo", + "undobufferentries", + "up", + "width", + "write", + "xcor", + "ycor", + "write_docstringdict", + "done", + "Terminator", +] + +# Note: '_Color' is the alias we use for arguments and _AnyColor is the +# alias we use for return types. Really, these two aliases should be the +# same, but as per the "no union returns" typeshed policy, we'll return +# Any instead. +_Color: TypeAlias = str | tuple[float, float, float] +_AnyColor: TypeAlias = Any + +# TODO: Replace this with a TypedDict once it becomes standardized. +_PenState: TypeAlias = dict[str, Any] + +_Speed: TypeAlias = str | float +_PolygonCoords: TypeAlias = Sequence[tuple[float, float]] + +class Vec2D(tuple[float, float]): + def __new__(cls, x: float, y: float) -> Self: ... + def __add__(self, other: tuple[float, float]) -> Vec2D: ... # type: ignore[override] + @overload # type: ignore[override] + def __mul__(self, other: Vec2D) -> float: ... + @overload + def __mul__(self, other: float) -> Vec2D: ... + def __rmul__(self, other: float) -> Vec2D: ... # type: ignore[override] + def __sub__(self, other: tuple[float, float]) -> Vec2D: ... + def __neg__(self) -> Vec2D: ... + def __abs__(self) -> float: ... + def rotate(self, angle: float) -> Vec2D: ... + +# Does not actually inherit from Canvas, but dynamically gets all methods of Canvas +class ScrolledCanvas(Canvas, Frame): # type: ignore[misc] + bg: str + hscroll: Scrollbar + vscroll: Scrollbar + def __init__( + self, master: Misc | None, width: int = 500, height: int = 350, canvwidth: int = 600, canvheight: int = 500 + ) -> None: ... + canvwidth: int + canvheight: int + def reset(self, canvwidth: int | None = None, canvheight: int | None = None, bg: str | None = None) -> None: ... + +class TurtleScreenBase: + cv: Canvas + canvwidth: int + canvheight: int + xscale: float + yscale: float + def __init__(self, cv: Canvas) -> None: ... + def mainloop(self) -> None: ... + def textinput(self, title: str, prompt: str) -> str | None: ... + def numinput( + self, title: str, prompt: str, default: float | None = None, minval: float | None = None, maxval: float | None = None + ) -> float | None: ... + +class Terminator(Exception): ... +class TurtleGraphicsError(Exception): ... + +class Shape: + def __init__(self, type_: str, data: _PolygonCoords | PhotoImage | None = None) -> None: ... + def addcomponent(self, poly: _PolygonCoords, fill: _Color, outline: _Color | None = None) -> None: ... + +class TurtleScreen(TurtleScreenBase): + def __init__(self, cv: Canvas, mode: str = "standard", colormode: float = 1.0, delay: int = 10) -> None: ... + def clear(self) -> None: ... + @overload + def mode(self, mode: None = None) -> str: ... + @overload + def mode(self, mode: str) -> None: ... + def setworldcoordinates(self, llx: float, lly: float, urx: float, ury: float) -> None: ... + def register_shape(self, name: str, shape: _PolygonCoords | Shape | None = None) -> None: ... + @overload + def colormode(self, cmode: None = None) -> float: ... + @overload + def colormode(self, cmode: float) -> None: ... + def reset(self) -> None: ... + def turtles(self) -> list[Turtle]: ... + @overload + def bgcolor(self) -> _AnyColor: ... + @overload + def bgcolor(self, color: _Color) -> None: ... + @overload + def bgcolor(self, r: float, g: float, b: float) -> None: ... + @overload + def tracer(self, n: None = None) -> int: ... + @overload + def tracer(self, n: int, delay: int | None = None) -> None: ... + @overload + def delay(self, delay: None = None) -> int: ... + @overload + def delay(self, delay: int) -> None: ... + def update(self) -> None: ... + def window_width(self) -> int: ... + def window_height(self) -> int: ... + def getcanvas(self) -> Canvas: ... + def getshapes(self) -> list[str]: ... + def onclick(self, fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: ... + def onkey(self, fun: Callable[[], object], key: str) -> None: ... + def listen(self, xdummy: float | None = None, ydummy: float | None = None) -> None: ... + def ontimer(self, fun: Callable[[], object], t: int = 0) -> None: ... + @overload + def bgpic(self, picname: None = None) -> str: ... + @overload + def bgpic(self, picname: str) -> None: ... + @overload + def screensize(self, canvwidth: None = None, canvheight: None = None, bg: None = None) -> tuple[int, int]: ... + # Looks like if self.cv is not a ScrolledCanvas, this could return a tuple as well + @overload + def screensize(self, canvwidth: int, canvheight: int, bg: _Color | None = None) -> None: ... + onscreenclick = onclick + resetscreen = reset + clearscreen = clear + addshape = register_shape + def onkeypress(self, fun: Callable[[], object], key: str | None = None) -> None: ... + onkeyrelease = onkey + +class TNavigator: + START_ORIENTATION: dict[str, Vec2D] + DEFAULT_MODE: str + DEFAULT_ANGLEOFFSET: int + DEFAULT_ANGLEORIENT: int + def __init__(self, mode: str = "standard") -> None: ... + def reset(self) -> None: ... + def degrees(self, fullcircle: float = 360.0) -> None: ... + def radians(self) -> None: ... + def forward(self, distance: float) -> None: ... + def back(self, distance: float) -> None: ... + def right(self, angle: float) -> None: ... + def left(self, angle: float) -> None: ... + def pos(self) -> Vec2D: ... + def xcor(self) -> float: ... + def ycor(self) -> float: ... + @overload + def goto(self, x: tuple[float, float], y: None = None) -> None: ... + @overload + def goto(self, x: float, y: float) -> None: ... + def home(self) -> None: ... + def setx(self, x: float) -> None: ... + def sety(self, y: float) -> None: ... + @overload + def distance(self, x: TNavigator | tuple[float, float], y: None = None) -> float: ... + @overload + def distance(self, x: float, y: float) -> float: ... + @overload + def towards(self, x: TNavigator | tuple[float, float], y: None = None) -> float: ... + @overload + def towards(self, x: float, y: float) -> float: ... + def heading(self) -> float: ... + def setheading(self, to_angle: float) -> None: ... + def circle(self, radius: float, extent: float | None = None, steps: int | None = None) -> None: ... + fd = forward + bk = back + backward = back + rt = right + lt = left + position = pos + setpos = goto + setposition = goto + seth = setheading + +class TPen: + def __init__(self, resizemode: str = "noresize") -> None: ... + @overload + def resizemode(self, rmode: None = None) -> str: ... + @overload + def resizemode(self, rmode: str) -> None: ... + @overload + def pensize(self, width: None = None) -> int: ... + @overload + def pensize(self, width: int) -> None: ... + def penup(self) -> None: ... + def pendown(self) -> None: ... + def isdown(self) -> bool: ... + @overload + def speed(self, speed: None = None) -> int: ... + @overload + def speed(self, speed: _Speed) -> None: ... + @overload + def pencolor(self) -> _AnyColor: ... + @overload + def pencolor(self, color: _Color) -> None: ... + @overload + def pencolor(self, r: float, g: float, b: float) -> None: ... + @overload + def fillcolor(self) -> _AnyColor: ... + @overload + def fillcolor(self, color: _Color) -> None: ... + @overload + def fillcolor(self, r: float, g: float, b: float) -> None: ... + @overload + def color(self) -> tuple[_AnyColor, _AnyColor]: ... + @overload + def color(self, color: _Color) -> None: ... + @overload + def color(self, r: float, g: float, b: float) -> None: ... + @overload + def color(self, color1: _Color, color2: _Color) -> None: ... + def showturtle(self) -> None: ... + def hideturtle(self) -> None: ... + def isvisible(self) -> bool: ... + # Note: signatures 1 and 2 overlap unsafely when no arguments are provided + @overload + def pen(self) -> _PenState: ... # type: ignore[misc] + @overload + def pen( + self, + pen: _PenState | None = None, + *, + shown: bool = ..., + pendown: bool = ..., + pencolor: _Color = ..., + fillcolor: _Color = ..., + pensize: int = ..., + speed: int = ..., + resizemode: str = ..., + stretchfactor: tuple[float, float] = ..., + outline: int = ..., + tilt: float = ..., + ) -> None: ... + width = pensize + up = penup + pu = penup + pd = pendown + down = pendown + st = showturtle + ht = hideturtle + +class RawTurtle(TPen, TNavigator): + screen: TurtleScreen + screens: ClassVar[list[TurtleScreen]] + def __init__( + self, + canvas: Canvas | TurtleScreen | None = None, + shape: str = "classic", + undobuffersize: int = 1000, + visible: bool = True, + ) -> None: ... + def reset(self) -> None: ... + def setundobuffer(self, size: int | None) -> None: ... + def undobufferentries(self) -> int: ... + def clear(self) -> None: ... + def clone(self) -> Self: ... + @overload + def shape(self, name: None = None) -> str: ... + @overload + def shape(self, name: str) -> None: ... + # Unsafely overlaps when no arguments are provided + @overload + def shapesize(self) -> tuple[float, float, float]: ... # type: ignore[misc] + @overload + def shapesize( + self, stretch_wid: float | None = None, stretch_len: float | None = None, outline: float | None = None + ) -> None: ... + @overload + def shearfactor(self, shear: None = None) -> float: ... + @overload + def shearfactor(self, shear: float) -> None: ... + # Unsafely overlaps when no arguments are provided + @overload + def shapetransform(self) -> tuple[float, float, float, float]: ... # type: ignore[misc] + @overload + def shapetransform( + self, t11: float | None = None, t12: float | None = None, t21: float | None = None, t22: float | None = None + ) -> None: ... + def get_shapepoly(self) -> _PolygonCoords | None: ... + def settiltangle(self, angle: float) -> None: ... + @overload + def tiltangle(self, angle: None = None) -> float: ... + @overload + def tiltangle(self, angle: float) -> None: ... + def tilt(self, angle: float) -> None: ... + # Can return either 'int' or Tuple[int, ...] based on if the stamp is + # a compound stamp or not. So, as per the "no Union return" policy, + # we return Any. + def stamp(self) -> Any: ... + def clearstamp(self, stampid: int | tuple[int, ...]) -> None: ... + def clearstamps(self, n: int | None = None) -> None: ... + def filling(self) -> bool: ... + def begin_fill(self) -> None: ... + def end_fill(self) -> None: ... + def dot(self, size: int | None = None, *color: _Color) -> None: ... + def write(self, arg: object, move: bool = False, align: str = "left", font: tuple[str, int, str] = ...) -> None: ... + def begin_poly(self) -> None: ... + def end_poly(self) -> None: ... + def get_poly(self) -> _PolygonCoords | None: ... + def getscreen(self) -> TurtleScreen: ... + def getturtle(self) -> Self: ... + getpen = getturtle + def onclick(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... + def onrelease(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... + def ondrag(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... + def undo(self) -> None: ... + turtlesize = shapesize + +class _Screen(TurtleScreen): + def __init__(self) -> None: ... + # Note int and float are interpreted differently, hence the Union instead of just float + def setup( + self, + width: int | float = 0.5, # noqa: Y041 + height: int | float = 0.75, # noqa: Y041 + startx: int | None = None, + starty: int | None = None, + ) -> None: ... + def title(self, titlestring: str) -> None: ... + def bye(self) -> None: ... + def exitonclick(self) -> None: ... + +class Turtle(RawTurtle): + def __init__(self, shape: str = "classic", undobuffersize: int = 1000, visible: bool = True) -> None: ... + +RawPen = RawTurtle +Pen = Turtle + +def write_docstringdict(filename: str = "turtle_docstringdict") -> None: ... + +# Note: it's somewhat unfortunate that we have to copy the function signatures. +# It would be nice if we could partially reduce the redundancy by doing something +# like the following: +# +# _screen: Screen +# clear = _screen.clear +# +# However, it seems pytype does not support this type of syntax in pyi files. + +# Functions copied from TurtleScreenBase: + +# Note: mainloop() was always present in the global scope, but was added to +# TurtleScreenBase in Python 3.0 +def mainloop() -> None: ... +def textinput(title: str, prompt: str) -> str | None: ... +def numinput( + title: str, prompt: str, default: float | None = None, minval: float | None = None, maxval: float | None = None +) -> float | None: ... + +# Functions copied from TurtleScreen: + +def clear() -> None: ... +@overload +def mode(mode: None = None) -> str: ... +@overload +def mode(mode: str) -> None: ... +def setworldcoordinates(llx: float, lly: float, urx: float, ury: float) -> None: ... +def register_shape(name: str, shape: _PolygonCoords | Shape | None = None) -> None: ... +@overload +def colormode(cmode: None = None) -> float: ... +@overload +def colormode(cmode: float) -> None: ... +def reset() -> None: ... +def turtles() -> list[Turtle]: ... +@overload +def bgcolor() -> _AnyColor: ... +@overload +def bgcolor(color: _Color) -> None: ... +@overload +def bgcolor(r: float, g: float, b: float) -> None: ... +@overload +def tracer(n: None = None) -> int: ... +@overload +def tracer(n: int, delay: int | None = None) -> None: ... +@overload +def delay(delay: None = None) -> int: ... +@overload +def delay(delay: int) -> None: ... +def update() -> None: ... +def window_width() -> int: ... +def window_height() -> int: ... +def getcanvas() -> Canvas: ... +def getshapes() -> list[str]: ... +def onclick(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: ... +def onkey(fun: Callable[[], object], key: str) -> None: ... +def listen(xdummy: float | None = None, ydummy: float | None = None) -> None: ... +def ontimer(fun: Callable[[], object], t: int = 0) -> None: ... +@overload +def bgpic(picname: None = None) -> str: ... +@overload +def bgpic(picname: str) -> None: ... +@overload +def screensize(canvwidth: None = None, canvheight: None = None, bg: None = None) -> tuple[int, int]: ... +@overload +def screensize(canvwidth: int, canvheight: int, bg: _Color | None = None) -> None: ... + +onscreenclick = onclick +resetscreen = reset +clearscreen = clear +addshape = register_shape + +def onkeypress(fun: Callable[[], object], key: str | None = None) -> None: ... + +onkeyrelease = onkey + +# Functions copied from _Screen: + +def setup(width: float = 0.5, height: float = 0.75, startx: int | None = None, starty: int | None = None) -> None: ... +def title(titlestring: str) -> None: ... +def bye() -> None: ... +def exitonclick() -> None: ... +def Screen() -> _Screen: ... + +# Functions copied from TNavigator: + +def degrees(fullcircle: float = 360.0) -> None: ... +def radians() -> None: ... +def forward(distance: float) -> None: ... +def back(distance: float) -> None: ... +def right(angle: float) -> None: ... +def left(angle: float) -> None: ... +def pos() -> Vec2D: ... +def xcor() -> float: ... +def ycor() -> float: ... +@overload +def goto(x: tuple[float, float], y: None = None) -> None: ... +@overload +def goto(x: float, y: float) -> None: ... +def home() -> None: ... +def setx(x: float) -> None: ... +def sety(y: float) -> None: ... +@overload +def distance(x: TNavigator | tuple[float, float], y: None = None) -> float: ... +@overload +def distance(x: float, y: float) -> float: ... +@overload +def towards(x: TNavigator | tuple[float, float], y: None = None) -> float: ... +@overload +def towards(x: float, y: float) -> float: ... +def heading() -> float: ... +def setheading(to_angle: float) -> None: ... +def circle(radius: float, extent: float | None = None, steps: int | None = None) -> None: ... + +fd = forward +bk = back +backward = back +rt = right +lt = left +position = pos +setpos = goto +setposition = goto +seth = setheading + +# Functions copied from TPen: +@overload +def resizemode(rmode: None = None) -> str: ... +@overload +def resizemode(rmode: str) -> None: ... +@overload +def pensize(width: None = None) -> int: ... +@overload +def pensize(width: int) -> None: ... +def penup() -> None: ... +def pendown() -> None: ... +def isdown() -> bool: ... +@overload +def speed(speed: None = None) -> int: ... +@overload +def speed(speed: _Speed) -> None: ... +@overload +def pencolor() -> _AnyColor: ... +@overload +def pencolor(color: _Color) -> None: ... +@overload +def pencolor(r: float, g: float, b: float) -> None: ... +@overload +def fillcolor() -> _AnyColor: ... +@overload +def fillcolor(color: _Color) -> None: ... +@overload +def fillcolor(r: float, g: float, b: float) -> None: ... +@overload +def color() -> tuple[_AnyColor, _AnyColor]: ... +@overload +def color(color: _Color) -> None: ... +@overload +def color(r: float, g: float, b: float) -> None: ... +@overload +def color(color1: _Color, color2: _Color) -> None: ... +def showturtle() -> None: ... +def hideturtle() -> None: ... +def isvisible() -> bool: ... + +# Note: signatures 1 and 2 overlap unsafely when no arguments are provided +@overload +def pen() -> _PenState: ... # type: ignore[misc] +@overload +def pen( + pen: _PenState | None = None, + *, + shown: bool = ..., + pendown: bool = ..., + pencolor: _Color = ..., + fillcolor: _Color = ..., + pensize: int = ..., + speed: int = ..., + resizemode: str = ..., + stretchfactor: tuple[float, float] = ..., + outline: int = ..., + tilt: float = ..., +) -> None: ... + +width = pensize +up = penup +pu = penup +pd = pendown +down = pendown +st = showturtle +ht = hideturtle + +# Functions copied from RawTurtle: + +def setundobuffer(size: int | None) -> None: ... +def undobufferentries() -> int: ... +@overload +def shape(name: None = None) -> str: ... +@overload +def shape(name: str) -> None: ... + +# Unsafely overlaps when no arguments are provided +@overload +def shapesize() -> tuple[float, float, float]: ... # type: ignore[misc] +@overload +def shapesize(stretch_wid: float | None = None, stretch_len: float | None = None, outline: float | None = None) -> None: ... +@overload +def shearfactor(shear: None = None) -> float: ... +@overload +def shearfactor(shear: float) -> None: ... + +# Unsafely overlaps when no arguments are provided +@overload +def shapetransform() -> tuple[float, float, float, float]: ... # type: ignore[misc] +@overload +def shapetransform( + t11: float | None = None, t12: float | None = None, t21: float | None = None, t22: float | None = None +) -> None: ... +def get_shapepoly() -> _PolygonCoords | None: ... +def settiltangle(angle: float) -> None: ... +@overload +def tiltangle(angle: None = None) -> float: ... +@overload +def tiltangle(angle: float) -> None: ... +def tilt(angle: float) -> None: ... + +# Can return either 'int' or Tuple[int, ...] based on if the stamp is +# a compound stamp or not. So, as per the "no Union return" policy, +# we return Any. +def stamp() -> Any: ... +def clearstamp(stampid: int | tuple[int, ...]) -> None: ... +def clearstamps(n: int | None = None) -> None: ... +def filling() -> bool: ... +def begin_fill() -> None: ... +def end_fill() -> None: ... +def dot(size: int | None = None, *color: _Color) -> None: ... +def write(arg: object, move: bool = False, align: str = "left", font: tuple[str, int, str] = ...) -> None: ... +def begin_poly() -> None: ... +def end_poly() -> None: ... +def get_poly() -> _PolygonCoords | None: ... +def getscreen() -> TurtleScreen: ... +def getturtle() -> Turtle: ... + +getpen = getturtle + +def onrelease(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: ... +def ondrag(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: ... +def undo() -> None: ... + +turtlesize = shapesize + +# Functions copied from RawTurtle with a few tweaks: + +def clone() -> Turtle: ... + +# Extra functions present only in the global scope: + +done = mainloop diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/types.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/types.pyi new file mode 100644 index 00000000..7db11830 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/types.pyi @@ -0,0 +1,615 @@ +import sys +from _typeshed import SupportsKeysAndGetItem +from collections.abc import ( + AsyncGenerator, + Awaitable, + Callable, + Coroutine, + Generator, + ItemsView, + Iterable, + Iterator, + KeysView, + MutableSequence, + ValuesView, +) +from importlib.machinery import ModuleSpec + +# pytype crashes if types.MappingProxyType inherits from collections.abc.Mapping instead of typing.Mapping +from typing import Any, ClassVar, Generic, Mapping, Protocol, TypeVar, overload # noqa: Y022 +from typing_extensions import Literal, ParamSpec, final + +__all__ = [ + "FunctionType", + "LambdaType", + "CodeType", + "MappingProxyType", + "SimpleNamespace", + "GeneratorType", + "CoroutineType", + "AsyncGeneratorType", + "MethodType", + "BuiltinFunctionType", + "ModuleType", + "TracebackType", + "FrameType", + "GetSetDescriptorType", + "MemberDescriptorType", + "new_class", + "prepare_class", + "DynamicClassAttribute", + "coroutine", + "BuiltinMethodType", + "ClassMethodDescriptorType", + "MethodDescriptorType", + "MethodWrapperType", + "WrapperDescriptorType", + "resolve_bases", +] + +if sys.version_info >= (3, 8): + __all__ += ["CellType"] + +if sys.version_info >= (3, 9): + __all__ += ["GenericAlias"] + +if sys.version_info >= (3, 10): + __all__ += ["EllipsisType", "NoneType", "NotImplementedType", "UnionType"] + +# Note, all classes "defined" here require special handling. + +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T_co = TypeVar("_T_co", covariant=True) +_T_contra = TypeVar("_T_contra", contravariant=True) +_KT = TypeVar("_KT") +_VT_co = TypeVar("_VT_co", covariant=True) +_V_co = TypeVar("_V_co", covariant=True) + +@final +class _Cell: + if sys.version_info >= (3, 8): + def __init__(self, __contents: object = ...) -> None: ... + + __hash__: ClassVar[None] # type: ignore[assignment] + cell_contents: Any + +# Make sure this class definition stays roughly in line with `builtins.function` +@final +class FunctionType: + @property + def __closure__(self) -> tuple[_Cell, ...] | None: ... + __code__: CodeType + __defaults__: tuple[Any, ...] | None + __dict__: dict[str, Any] + @property + def __globals__(self) -> dict[str, Any]: ... + __name__: str + __qualname__: str + __annotations__: dict[str, Any] + __kwdefaults__: dict[str, Any] + if sys.version_info >= (3, 10): + @property + def __builtins__(self) -> dict[str, Any]: ... + + __module__: str + def __init__( + self, + code: CodeType, + globals: dict[str, Any], + name: str | None = ..., + argdefs: tuple[object, ...] | None = ..., + closure: tuple[_Cell, ...] | None = ..., + ) -> None: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + @overload + def __get__(self, __instance: None, __owner: type) -> FunctionType: ... + @overload + def __get__(self, __instance: object, __owner: type | None = None) -> MethodType: ... + +LambdaType = FunctionType + +@final +class CodeType: + @property + def co_argcount(self) -> int: ... + if sys.version_info >= (3, 8): + @property + def co_posonlyargcount(self) -> int: ... + + @property + def co_kwonlyargcount(self) -> int: ... + @property + def co_nlocals(self) -> int: ... + @property + def co_stacksize(self) -> int: ... + @property + def co_flags(self) -> int: ... + @property + def co_code(self) -> bytes: ... + @property + def co_consts(self) -> tuple[Any, ...]: ... + @property + def co_names(self) -> tuple[str, ...]: ... + @property + def co_varnames(self) -> tuple[str, ...]: ... + @property + def co_filename(self) -> str: ... + @property + def co_name(self) -> str: ... + @property + def co_firstlineno(self) -> int: ... + @property + def co_lnotab(self) -> bytes: ... + @property + def co_freevars(self) -> tuple[str, ...]: ... + @property + def co_cellvars(self) -> tuple[str, ...]: ... + if sys.version_info >= (3, 10): + @property + def co_linetable(self) -> bytes: ... + def co_lines(self) -> Iterator[tuple[int, int, int | None]]: ... + if sys.version_info >= (3, 11): + @property + def co_exceptiontable(self) -> bytes: ... + @property + def co_qualname(self) -> str: ... + def co_positions(self) -> Iterable[tuple[int | None, int | None, int | None, int | None]]: ... + + if sys.version_info >= (3, 11): + def __init__( + self, + __argcount: int, + __posonlyargcount: int, + __kwonlyargcount: int, + __nlocals: int, + __stacksize: int, + __flags: int, + __codestring: bytes, + __constants: tuple[object, ...], + __names: tuple[str, ...], + __varnames: tuple[str, ...], + __filename: str, + __name: str, + __qualname: str, + __firstlineno: int, + __linetable: bytes, + __exceptiontable: bytes, + __freevars: tuple[str, ...] = ..., + __cellvars: tuple[str, ...] = ..., + ) -> None: ... + elif sys.version_info >= (3, 10): + def __init__( + self, + __argcount: int, + __posonlyargcount: int, + __kwonlyargcount: int, + __nlocals: int, + __stacksize: int, + __flags: int, + __codestring: bytes, + __constants: tuple[object, ...], + __names: tuple[str, ...], + __varnames: tuple[str, ...], + __filename: str, + __name: str, + __firstlineno: int, + __linetable: bytes, + __freevars: tuple[str, ...] = ..., + __cellvars: tuple[str, ...] = ..., + ) -> None: ... + elif sys.version_info >= (3, 8): + def __init__( + self, + __argcount: int, + __posonlyargcount: int, + __kwonlyargcount: int, + __nlocals: int, + __stacksize: int, + __flags: int, + __codestring: bytes, + __constants: tuple[object, ...], + __names: tuple[str, ...], + __varnames: tuple[str, ...], + __filename: str, + __name: str, + __firstlineno: int, + __lnotab: bytes, + __freevars: tuple[str, ...] = ..., + __cellvars: tuple[str, ...] = ..., + ) -> None: ... + else: + def __init__( + self, + __argcount: int, + __kwonlyargcount: int, + __nlocals: int, + __stacksize: int, + __flags: int, + __codestring: bytes, + __constants: tuple[object, ...], + __names: tuple[str, ...], + __varnames: tuple[str, ...], + __filename: str, + __name: str, + __firstlineno: int, + __lnotab: bytes, + __freevars: tuple[str, ...] = ..., + __cellvars: tuple[str, ...] = ..., + ) -> None: ... + if sys.version_info >= (3, 11): + def replace( + self, + *, + co_argcount: int = -1, + co_posonlyargcount: int = -1, + co_kwonlyargcount: int = -1, + co_nlocals: int = -1, + co_stacksize: int = -1, + co_flags: int = -1, + co_firstlineno: int = -1, + co_code: bytes = ..., + co_consts: tuple[object, ...] = ..., + co_names: tuple[str, ...] = ..., + co_varnames: tuple[str, ...] = ..., + co_freevars: tuple[str, ...] = ..., + co_cellvars: tuple[str, ...] = ..., + co_filename: str = ..., + co_name: str = ..., + co_qualname: str = ..., + co_linetable: bytes = ..., + co_exceptiontable: bytes = ..., + ) -> CodeType: ... + elif sys.version_info >= (3, 10): + def replace( + self, + *, + co_argcount: int = -1, + co_posonlyargcount: int = -1, + co_kwonlyargcount: int = -1, + co_nlocals: int = -1, + co_stacksize: int = -1, + co_flags: int = -1, + co_firstlineno: int = -1, + co_code: bytes = ..., + co_consts: tuple[object, ...] = ..., + co_names: tuple[str, ...] = ..., + co_varnames: tuple[str, ...] = ..., + co_freevars: tuple[str, ...] = ..., + co_cellvars: tuple[str, ...] = ..., + co_filename: str = ..., + co_name: str = ..., + co_linetable: bytes = ..., + ) -> CodeType: ... + elif sys.version_info >= (3, 8): + def replace( + self, + *, + co_argcount: int = -1, + co_posonlyargcount: int = -1, + co_kwonlyargcount: int = -1, + co_nlocals: int = -1, + co_stacksize: int = -1, + co_flags: int = -1, + co_firstlineno: int = -1, + co_code: bytes = ..., + co_consts: tuple[object, ...] = ..., + co_names: tuple[str, ...] = ..., + co_varnames: tuple[str, ...] = ..., + co_freevars: tuple[str, ...] = ..., + co_cellvars: tuple[str, ...] = ..., + co_filename: str = ..., + co_name: str = ..., + co_lnotab: bytes = ..., + ) -> CodeType: ... + +@final +class MappingProxyType(Mapping[_KT, _VT_co], Generic[_KT, _VT_co]): + __hash__: ClassVar[None] # type: ignore[assignment] + def __init__(self, mapping: SupportsKeysAndGetItem[_KT, _VT_co]) -> None: ... + def __getitem__(self, __key: _KT) -> _VT_co: ... + def __iter__(self) -> Iterator[_KT]: ... + def __len__(self) -> int: ... + def __eq__(self, __value: object) -> bool: ... + def copy(self) -> dict[_KT, _VT_co]: ... + def keys(self) -> KeysView[_KT]: ... + def values(self) -> ValuesView[_VT_co]: ... + def items(self) -> ItemsView[_KT, _VT_co]: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + def __reversed__(self) -> Iterator[_KT]: ... + def __or__(self, __value: Mapping[_T1, _T2]) -> dict[_KT | _T1, _VT_co | _T2]: ... + def __ror__(self, __value: Mapping[_T1, _T2]) -> dict[_KT | _T1, _VT_co | _T2]: ... + +class SimpleNamespace: + __hash__: ClassVar[None] # type: ignore[assignment] + def __init__(self, **kwargs: Any) -> None: ... + def __getattribute__(self, __name: str) -> Any: ... + def __setattr__(self, __name: str, __value: Any) -> None: ... + def __delattr__(self, __name: str) -> None: ... + +class _LoaderProtocol(Protocol): + def load_module(self, fullname: str) -> ModuleType: ... + +class ModuleType: + __name__: str + __file__: str | None + @property + def __dict__(self) -> dict[str, Any]: ... # type: ignore[override] + __loader__: _LoaderProtocol | None + __package__: str | None + __path__: MutableSequence[str] + __spec__: ModuleSpec | None + def __init__(self, name: str, doc: str | None = ...) -> None: ... + # __getattr__ doesn't exist at runtime, + # but having it here in typeshed makes dynamic imports + # using `builtins.__import__` or `importlib.import_module` less painful + def __getattr__(self, name: str) -> Any: ... + +@final +class GeneratorType(Generator[_T_co, _T_contra, _V_co]): + @property + def gi_yieldfrom(self) -> GeneratorType[_T_co, _T_contra, Any] | None: ... + if sys.version_info >= (3, 11): + @property + def gi_suspended(self) -> bool: ... + __name__: str + __qualname__: str + def __iter__(self) -> GeneratorType[_T_co, _T_contra, _V_co]: ... + def __next__(self) -> _T_co: ... + def send(self, __arg: _T_contra) -> _T_co: ... + @overload + def throw( + self, __typ: type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... + ) -> _T_co: ... + @overload + def throw(self, __typ: BaseException, __val: None = None, __tb: TracebackType | None = ...) -> _T_co: ... + +@final +class AsyncGeneratorType(AsyncGenerator[_T_co, _T_contra]): + @property + def ag_await(self) -> Awaitable[Any] | None: ... + __name__: str + __qualname__: str + def __aiter__(self) -> AsyncGeneratorType[_T_co, _T_contra]: ... + def __anext__(self) -> Coroutine[Any, Any, _T_co]: ... + def asend(self, __val: _T_contra) -> Coroutine[Any, Any, _T_co]: ... + @overload + async def athrow( + self, __typ: type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... + ) -> _T_co: ... + @overload + async def athrow(self, __typ: BaseException, __val: None = None, __tb: TracebackType | None = ...) -> _T_co: ... + def aclose(self) -> Coroutine[Any, Any, None]: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... + +@final +class CoroutineType(Coroutine[_T_co, _T_contra, _V_co]): + __name__: str + __qualname__: str + @property + def cr_origin(self) -> tuple[tuple[str, int, str], ...] | None: ... + if sys.version_info >= (3, 11): + @property + def cr_suspended(self) -> bool: ... + + def close(self) -> None: ... + def __await__(self) -> Generator[Any, None, _V_co]: ... + def send(self, __arg: _T_contra) -> _T_co: ... + @overload + def throw( + self, __typ: type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... + ) -> _T_co: ... + @overload + def throw(self, __typ: BaseException, __val: None = None, __tb: TracebackType | None = ...) -> _T_co: ... + +class _StaticFunctionType: + # Fictional type to correct the type of MethodType.__func__. + # FunctionType is a descriptor, so mypy follows the descriptor protocol and + # converts MethodType.__func__ back to MethodType (the return type of + # FunctionType.__get__). But this is actually a special case; MethodType is + # implemented in C and its attribute access doesn't go through + # __getattribute__. + # By wrapping FunctionType in _StaticFunctionType, we get the right result; + # similar to wrapping a function in staticmethod() at runtime to prevent it + # being bound as a method. + def __get__(self, obj: object, type: type | None) -> FunctionType: ... + +@final +class MethodType: + @property + def __closure__(self) -> tuple[_Cell, ...] | None: ... # inherited from the added function + @property + def __defaults__(self) -> tuple[Any, ...] | None: ... # inherited from the added function + @property + def __func__(self) -> _StaticFunctionType: ... + @property + def __self__(self) -> object: ... + @property + def __name__(self) -> str: ... # inherited from the added function + @property + def __qualname__(self) -> str: ... # inherited from the added function + def __init__(self, __func: Callable[..., Any], __obj: object) -> None: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + +@final +class BuiltinFunctionType: + @property + def __self__(self) -> object | ModuleType: ... + @property + def __name__(self) -> str: ... + @property + def __qualname__(self) -> str: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + +BuiltinMethodType = BuiltinFunctionType + +@final +class WrapperDescriptorType: + @property + def __name__(self) -> str: ... + @property + def __qualname__(self) -> str: ... + @property + def __objclass__(self) -> type: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __get__(self, __instance: Any, __owner: type | None = None) -> Any: ... + +@final +class MethodWrapperType: + @property + def __self__(self) -> object: ... + @property + def __name__(self) -> str: ... + @property + def __qualname__(self) -> str: ... + @property + def __objclass__(self) -> type: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __eq__(self, __other: object) -> bool: ... + def __ne__(self, __other: object) -> bool: ... + +@final +class MethodDescriptorType: + @property + def __name__(self) -> str: ... + @property + def __qualname__(self) -> str: ... + @property + def __objclass__(self) -> type: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __get__(self, __instance: Any, __owner: type | None = None) -> Any: ... + +@final +class ClassMethodDescriptorType: + @property + def __name__(self) -> str: ... + @property + def __qualname__(self) -> str: ... + @property + def __objclass__(self) -> type: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __get__(self, __instance: Any, __owner: type | None = None) -> Any: ... + +@final +class TracebackType: + def __init__(self, tb_next: TracebackType | None, tb_frame: FrameType, tb_lasti: int, tb_lineno: int) -> None: ... + tb_next: TracebackType | None + # the rest are read-only even in 3.7 + @property + def tb_frame(self) -> FrameType: ... + @property + def tb_lasti(self) -> int: ... + @property + def tb_lineno(self) -> int: ... + +@final +class FrameType: + @property + def f_back(self) -> FrameType | None: ... + @property + def f_builtins(self) -> dict[str, Any]: ... + @property + def f_code(self) -> CodeType: ... + @property + def f_globals(self) -> dict[str, Any]: ... + @property + def f_lasti(self) -> int: ... + # see discussion in #6769: f_lineno *can* sometimes be None, + # but you should probably file a bug report with CPython if you encounter it being None in the wild. + # An `int | None` annotation here causes too many false-positive errors. + @property + def f_lineno(self) -> int | Any: ... + @property + def f_locals(self) -> dict[str, Any]: ... + f_trace: Callable[[FrameType, str, Any], Any] | None + f_trace_lines: bool + f_trace_opcodes: bool + def clear(self) -> None: ... + +@final +class GetSetDescriptorType: + @property + def __name__(self) -> str: ... + @property + def __qualname__(self) -> str: ... + @property + def __objclass__(self) -> type: ... + def __get__(self, __instance: Any, __owner: type | None = None) -> Any: ... + def __set__(self, __instance: Any, __value: Any) -> None: ... + def __delete__(self, __obj: Any) -> None: ... + +@final +class MemberDescriptorType: + @property + def __name__(self) -> str: ... + @property + def __qualname__(self) -> str: ... + @property + def __objclass__(self) -> type: ... + def __get__(self, __instance: Any, __owner: type | None = None) -> Any: ... + def __set__(self, __instance: Any, __value: Any) -> None: ... + def __delete__(self, __obj: Any) -> None: ... + +def new_class( + name: str, + bases: Iterable[object] = ..., + kwds: dict[str, Any] | None = None, + exec_body: Callable[[dict[str, Any]], object] | None = None, +) -> type: ... +def resolve_bases(bases: Iterable[object]) -> tuple[Any, ...]: ... +def prepare_class( + name: str, bases: tuple[type, ...] = ..., kwds: dict[str, Any] | None = None +) -> tuple[type, dict[str, Any], dict[str, Any]]: ... + +# Actually a different type, but `property` is special and we want that too. +DynamicClassAttribute = property + +_Fn = TypeVar("_Fn", bound=Callable[..., object]) +_R = TypeVar("_R") +_P = ParamSpec("_P") + +# it's not really an Awaitable, but can be used in an await expression. Real type: Generator & Awaitable +# The type: ignore is due to overlapping overloads, not the use of ParamSpec +@overload +def coroutine(func: Callable[_P, Generator[Any, Any, _R]]) -> Callable[_P, Awaitable[_R]]: ... # type: ignore[misc] +@overload +def coroutine(func: _Fn) -> _Fn: ... + +if sys.version_info >= (3, 8): + CellType = _Cell + +if sys.version_info >= (3, 9): + class GenericAlias: + @property + def __origin__(self) -> type: ... + @property + def __args__(self) -> tuple[Any, ...]: ... + @property + def __parameters__(self) -> tuple[Any, ...]: ... + def __init__(self, origin: type, args: Any) -> None: ... + def __getitem__(self, __typeargs: Any) -> GenericAlias: ... + if sys.version_info >= (3, 11): + @property + def __unpacked__(self) -> bool: ... + @property + def __typing_unpacked_tuple_args__(self) -> tuple[Any, ...] | None: ... + + # GenericAlias delegates attr access to `__origin__` + def __getattr__(self, name: str) -> Any: ... + +if sys.version_info >= (3, 10): + @final + class NoneType: + def __bool__(self) -> Literal[False]: ... + EllipsisType = ellipsis # noqa: F821 from builtins + from builtins import _NotImplementedType + + NotImplementedType = _NotImplementedType + @final + class UnionType: + @property + def __args__(self) -> tuple[Any, ...]: ... + def __or__(self, __obj: Any) -> UnionType: ... + def __ror__(self, __obj: Any) -> UnionType: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/typing.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/typing.pyi new file mode 100644 index 00000000..2cbb7c81 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/typing.pyi @@ -0,0 +1,850 @@ +import collections # Needed by aliases like DefaultDict, see mypy issue 2986 +import sys +import typing_extensions +from _collections_abc import dict_items, dict_keys, dict_values +from _typeshed import IdentityFunction, Incomplete, SupportsKeysAndGetItem +from abc import ABCMeta, abstractmethod +from contextlib import AbstractAsyncContextManager, AbstractContextManager +from re import Match as Match, Pattern as Pattern +from types import ( + BuiltinFunctionType, + CodeType, + FrameType, + FunctionType, + MethodDescriptorType, + MethodType, + MethodWrapperType, + ModuleType, + TracebackType, + WrapperDescriptorType, +) +from typing_extensions import Never as _Never, ParamSpec as _ParamSpec, final as _final + +__all__ = [ + "AbstractSet", + "Any", + "AnyStr", + "AsyncContextManager", + "AsyncGenerator", + "AsyncIterable", + "AsyncIterator", + "Awaitable", + "ByteString", + "Callable", + "ChainMap", + "ClassVar", + "Collection", + "Container", + "ContextManager", + "Coroutine", + "Counter", + "DefaultDict", + "Deque", + "Dict", + "FrozenSet", + "Generator", + "Generic", + "Hashable", + "ItemsView", + "Iterable", + "Iterator", + "KeysView", + "List", + "Mapping", + "MappingView", + "MutableMapping", + "MutableSequence", + "MutableSet", + "NamedTuple", + "NewType", + "Optional", + "Reversible", + "Sequence", + "Set", + "Sized", + "SupportsAbs", + "SupportsBytes", + "SupportsComplex", + "SupportsFloat", + "SupportsInt", + "SupportsRound", + "Text", + "Tuple", + "Type", + "TypeVar", + "Union", + "ValuesView", + "TYPE_CHECKING", + "cast", + "get_type_hints", + "no_type_check", + "no_type_check_decorator", + "overload", + "ForwardRef", + "NoReturn", + "OrderedDict", +] + +if sys.version_info >= (3, 8): + __all__ += [ + "Final", + "Literal", + "Protocol", + "SupportsIndex", + "TypedDict", + "final", + "get_args", + "get_origin", + "runtime_checkable", + ] + +if sys.version_info >= (3, 9): + __all__ += ["Annotated", "BinaryIO", "IO", "Match", "Pattern", "TextIO"] + +if sys.version_info >= (3, 10): + __all__ += ["Concatenate", "ParamSpec", "ParamSpecArgs", "ParamSpecKwargs", "TypeAlias", "TypeGuard", "is_typeddict"] + +if sys.version_info >= (3, 11): + __all__ += [ + "LiteralString", + "Never", + "NotRequired", + "Required", + "Self", + "TypeVarTuple", + "Unpack", + "assert_never", + "assert_type", + "clear_overloads", + "dataclass_transform", + "get_overloads", + "reveal_type", + ] + +ContextManager = AbstractContextManager +AsyncContextManager = AbstractAsyncContextManager + +# This itself is only available during type checking +def type_check_only(func_or_cls: _F) -> _F: ... + +Any = object() + +@_final +class TypeVar: + __name__: str + __bound__: Any | None + __constraints__: tuple[Any, ...] + __covariant__: bool + __contravariant__: bool + def __init__( + self, name: str, *constraints: Any, bound: Any | None = None, covariant: bool = False, contravariant: bool = False + ) -> None: ... + if sys.version_info >= (3, 10): + def __or__(self, right: Any) -> _SpecialForm: ... + def __ror__(self, left: Any) -> _SpecialForm: ... + if sys.version_info >= (3, 11): + def __typing_subst__(self, arg: Incomplete) -> Incomplete: ... + +# Used for an undocumented mypy feature. Does not exist at runtime. +_promote = object() + +# N.B. Keep this definition in sync with typing_extensions._SpecialForm +@_final +class _SpecialForm: + def __getitem__(self, parameters: Any) -> object: ... + if sys.version_info >= (3, 10): + def __or__(self, other: Any) -> _SpecialForm: ... + def __ror__(self, other: Any) -> _SpecialForm: ... + +_F = TypeVar("_F", bound=Callable[..., Any]) +_P = _ParamSpec("_P") +_T = TypeVar("_T") + +def overload(func: _F) -> _F: ... + +# Unlike the vast majority module-level objects in stub files, +# these `_SpecialForm` objects in typing need the default value `= ...`, +# due to the fact that they are used elswhere in the same file. +# Otherwise, flake8 erroneously flags them as undefined. +# `_SpecialForm` objects in typing.py that are not used elswhere in the same file +# do not need the default value assignment. +Union: _SpecialForm = ... +Generic: _SpecialForm = ... +# Protocol is only present in 3.8 and later, but mypy needs it unconditionally +Protocol: _SpecialForm = ... +Callable: _SpecialForm = ... +Type: _SpecialForm = ... +NoReturn: _SpecialForm = ... +ClassVar: _SpecialForm = ... + +Optional: _SpecialForm +Tuple: _SpecialForm +if sys.version_info >= (3, 8): + Final: _SpecialForm + def final(f: _T) -> _T: ... + Literal: _SpecialForm + # TypedDict is a (non-subscriptable) special form. + TypedDict: object + +if sys.version_info >= (3, 11): + Self: _SpecialForm + Never: _SpecialForm = ... + Unpack: _SpecialForm + Required: _SpecialForm + NotRequired: _SpecialForm + LiteralString: _SpecialForm + + class TypeVarTuple: + __name__: str + def __init__(self, name: str) -> None: ... + def __iter__(self) -> Any: ... + def __typing_subst__(self, arg: Never) -> Never: ... + def __typing_prepare_subst__(self, alias: Incomplete, args: Incomplete) -> Incomplete: ... + +if sys.version_info >= (3, 10): + class ParamSpecArgs: + __origin__: ParamSpec + def __init__(self, origin: ParamSpec) -> None: ... + + class ParamSpecKwargs: + __origin__: ParamSpec + def __init__(self, origin: ParamSpec) -> None: ... + + class ParamSpec: + __name__: str + __bound__: Any | None + __covariant__: bool + __contravariant__: bool + def __init__( + self, name: str, *, bound: Any | None = None, contravariant: bool = False, covariant: bool = False + ) -> None: ... + @property + def args(self) -> ParamSpecArgs: ... + @property + def kwargs(self) -> ParamSpecKwargs: ... + if sys.version_info >= (3, 11): + def __typing_subst__(self, arg: Incomplete) -> Incomplete: ... + def __typing_prepare_subst__(self, alias: Incomplete, args: Incomplete) -> Incomplete: ... + + def __or__(self, right: Any) -> _SpecialForm: ... + def __ror__(self, left: Any) -> _SpecialForm: ... + Concatenate: _SpecialForm + TypeAlias: _SpecialForm + TypeGuard: _SpecialForm + + class NewType: + def __init__(self, name: str, tp: Any) -> None: ... + def __call__(self, x: _T) -> _T: ... + def __or__(self, other: Any) -> _SpecialForm: ... + def __ror__(self, other: Any) -> _SpecialForm: ... + __supertype__: type + +else: + def NewType(name: str, tp: Any) -> Any: ... + +# These type variables are used by the container types. +_S = TypeVar("_S") +_KT = TypeVar("_KT") # Key type. +_VT = TypeVar("_VT") # Value type. +_T_co = TypeVar("_T_co", covariant=True) # Any type covariant containers. +_V_co = TypeVar("_V_co", covariant=True) # Any type covariant containers. +_KT_co = TypeVar("_KT_co", covariant=True) # Key type covariant containers. +_VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers. +_T_contra = TypeVar("_T_contra", contravariant=True) # Ditto contravariant. +_TC = TypeVar("_TC", bound=Type[object]) + +def no_type_check(arg: _F) -> _F: ... +def no_type_check_decorator(decorator: Callable[_P, _T]) -> Callable[_P, _T]: ... # type: ignore[misc] + +# Type aliases and type constructors + +class _Alias: + # Class for defining generic aliases for library types. + def __getitem__(self, typeargs: Any) -> Any: ... + +List = _Alias() +Dict = _Alias() +DefaultDict = _Alias() +Set = _Alias() +FrozenSet = _Alias() +Counter = _Alias() +Deque = _Alias() +ChainMap = _Alias() + +OrderedDict = _Alias() + +if sys.version_info >= (3, 9): + Annotated: _SpecialForm + +# Predefined type variables. +AnyStr = TypeVar("AnyStr", str, bytes) # noqa: Y001 + +# Technically in 3.7 this inherited from GenericMeta. But let's not reflect that, since +# type checkers tend to assume that Protocols all have the ABCMeta metaclass. +class _ProtocolMeta(ABCMeta): ... + +# Abstract base classes. + +def runtime_checkable(cls: _TC) -> _TC: ... +@runtime_checkable +class SupportsInt(Protocol, metaclass=ABCMeta): + @abstractmethod + def __int__(self) -> int: ... + +@runtime_checkable +class SupportsFloat(Protocol, metaclass=ABCMeta): + @abstractmethod + def __float__(self) -> float: ... + +@runtime_checkable +class SupportsComplex(Protocol, metaclass=ABCMeta): + @abstractmethod + def __complex__(self) -> complex: ... + +@runtime_checkable +class SupportsBytes(Protocol, metaclass=ABCMeta): + @abstractmethod + def __bytes__(self) -> bytes: ... + +if sys.version_info >= (3, 8): + @runtime_checkable + class SupportsIndex(Protocol, metaclass=ABCMeta): + @abstractmethod + def __index__(self) -> int: ... + +@runtime_checkable +class SupportsAbs(Protocol[_T_co]): + @abstractmethod + def __abs__(self) -> _T_co: ... + +@runtime_checkable +class SupportsRound(Protocol[_T_co]): + @overload + @abstractmethod + def __round__(self) -> int: ... + @overload + @abstractmethod + def __round__(self, __ndigits: int) -> _T_co: ... + +@runtime_checkable +class Sized(Protocol, metaclass=ABCMeta): + @abstractmethod + def __len__(self) -> int: ... + +@runtime_checkable +class Hashable(Protocol, metaclass=ABCMeta): + # TODO: This is special, in that a subclass of a hashable class may not be hashable + # (for example, list vs. object). It's not obvious how to represent this. This class + # is currently mostly useless for static checking. + @abstractmethod + def __hash__(self) -> int: ... + +@runtime_checkable +class Iterable(Protocol[_T_co]): + @abstractmethod + def __iter__(self) -> Iterator[_T_co]: ... + +@runtime_checkable +class Iterator(Iterable[_T_co], Protocol[_T_co]): + @abstractmethod + def __next__(self) -> _T_co: ... + def __iter__(self) -> Iterator[_T_co]: ... + +@runtime_checkable +class Reversible(Iterable[_T_co], Protocol[_T_co]): + @abstractmethod + def __reversed__(self) -> Iterator[_T_co]: ... + +class Generator(Iterator[_T_co], Generic[_T_co, _T_contra, _V_co]): + def __next__(self) -> _T_co: ... + @abstractmethod + def send(self, __value: _T_contra) -> _T_co: ... + @overload + @abstractmethod + def throw( + self, __typ: Type[BaseException], __val: BaseException | object = None, __tb: TracebackType | None = None + ) -> _T_co: ... + @overload + @abstractmethod + def throw(self, __typ: BaseException, __val: None = None, __tb: TracebackType | None = None) -> _T_co: ... + def close(self) -> None: ... + def __iter__(self) -> Generator[_T_co, _T_contra, _V_co]: ... + @property + def gi_code(self) -> CodeType: ... + @property + def gi_frame(self) -> FrameType: ... + @property + def gi_running(self) -> bool: ... + @property + def gi_yieldfrom(self) -> Generator[Any, Any, Any] | None: ... + +@runtime_checkable +class Awaitable(Protocol[_T_co]): + @abstractmethod + def __await__(self) -> Generator[Any, None, _T_co]: ... + +class Coroutine(Awaitable[_V_co], Generic[_T_co, _T_contra, _V_co]): + __name__: str + __qualname__: str + @property + def cr_await(self) -> Any | None: ... + @property + def cr_code(self) -> CodeType: ... + @property + def cr_frame(self) -> FrameType: ... + @property + def cr_running(self) -> bool: ... + @abstractmethod + def send(self, __value: _T_contra) -> _T_co: ... + @overload + @abstractmethod + def throw( + self, __typ: Type[BaseException], __val: BaseException | object = None, __tb: TracebackType | None = None + ) -> _T_co: ... + @overload + @abstractmethod + def throw(self, __typ: BaseException, __val: None = None, __tb: TracebackType | None = None) -> _T_co: ... + @abstractmethod + def close(self) -> None: ... + +# NOTE: This type does not exist in typing.py or PEP 484 but mypy needs it to exist. +# The parameters correspond to Generator, but the 4th is the original type. +@type_check_only +class AwaitableGenerator( + Awaitable[_V_co], Generator[_T_co, _T_contra, _V_co], Generic[_T_co, _T_contra, _V_co, _S], metaclass=ABCMeta +): ... + +@runtime_checkable +class AsyncIterable(Protocol[_T_co]): + @abstractmethod + def __aiter__(self) -> AsyncIterator[_T_co]: ... + +@runtime_checkable +class AsyncIterator(AsyncIterable[_T_co], Protocol[_T_co]): + @abstractmethod + def __anext__(self) -> Awaitable[_T_co]: ... + def __aiter__(self) -> AsyncIterator[_T_co]: ... + +class AsyncGenerator(AsyncIterator[_T_co], Generic[_T_co, _T_contra]): + def __anext__(self) -> Awaitable[_T_co]: ... + @abstractmethod + def asend(self, __value: _T_contra) -> Awaitable[_T_co]: ... + @overload + @abstractmethod + def athrow( + self, __typ: Type[BaseException], __val: BaseException | object = None, __tb: TracebackType | None = None + ) -> Awaitable[_T_co]: ... + @overload + @abstractmethod + def athrow(self, __typ: BaseException, __val: None = None, __tb: TracebackType | None = None) -> Awaitable[_T_co]: ... + def aclose(self) -> Awaitable[None]: ... + @property + def ag_await(self) -> Any: ... + @property + def ag_code(self) -> CodeType: ... + @property + def ag_frame(self) -> FrameType: ... + @property + def ag_running(self) -> bool: ... + +@runtime_checkable +class Container(Protocol[_T_co]): + # This is generic more on vibes than anything else + @abstractmethod + def __contains__(self, __x: object) -> bool: ... + +@runtime_checkable +class Collection(Iterable[_T_co], Container[_T_co], Protocol[_T_co]): + # Implement Sized (but don't have it as a base class). + @abstractmethod + def __len__(self) -> int: ... + +class Sequence(Collection[_T_co], Reversible[_T_co], Generic[_T_co]): + @overload + @abstractmethod + def __getitem__(self, index: int) -> _T_co: ... + @overload + @abstractmethod + def __getitem__(self, index: slice) -> Sequence[_T_co]: ... + # Mixin methods + def index(self, value: Any, start: int = 0, stop: int = ...) -> int: ... + def count(self, value: Any) -> int: ... + def __contains__(self, value: object) -> bool: ... + def __iter__(self) -> Iterator[_T_co]: ... + def __reversed__(self) -> Iterator[_T_co]: ... + +class MutableSequence(Sequence[_T], Generic[_T]): + @abstractmethod + def insert(self, index: int, value: _T) -> None: ... + @overload + @abstractmethod + def __getitem__(self, index: int) -> _T: ... + @overload + @abstractmethod + def __getitem__(self, index: slice) -> MutableSequence[_T]: ... + @overload + @abstractmethod + def __setitem__(self, index: int, value: _T) -> None: ... + @overload + @abstractmethod + def __setitem__(self, index: slice, value: Iterable[_T]) -> None: ... + @overload + @abstractmethod + def __delitem__(self, index: int) -> None: ... + @overload + @abstractmethod + def __delitem__(self, index: slice) -> None: ... + # Mixin methods + def append(self, value: _T) -> None: ... + def clear(self) -> None: ... + def extend(self, values: Iterable[_T]) -> None: ... + def reverse(self) -> None: ... + def pop(self, index: int = -1) -> _T: ... + def remove(self, value: _T) -> None: ... + def __iadd__(self, values: Iterable[_T]) -> typing_extensions.Self: ... + +class AbstractSet(Collection[_T_co], Generic[_T_co]): + @abstractmethod + def __contains__(self, x: object) -> bool: ... + def _hash(self) -> int: ... + # Mixin methods + def __le__(self, other: AbstractSet[Any]) -> bool: ... + def __lt__(self, other: AbstractSet[Any]) -> bool: ... + def __gt__(self, other: AbstractSet[Any]) -> bool: ... + def __ge__(self, other: AbstractSet[Any]) -> bool: ... + def __and__(self, other: AbstractSet[Any]) -> AbstractSet[_T_co]: ... + def __or__(self, other: AbstractSet[_T]) -> AbstractSet[_T_co | _T]: ... + def __sub__(self, other: AbstractSet[Any]) -> AbstractSet[_T_co]: ... + def __xor__(self, other: AbstractSet[_T]) -> AbstractSet[_T_co | _T]: ... + def isdisjoint(self, other: Iterable[Any]) -> bool: ... + +class MutableSet(AbstractSet[_T], Generic[_T]): + @abstractmethod + def add(self, value: _T) -> None: ... + @abstractmethod + def discard(self, value: _T) -> None: ... + # Mixin methods + def clear(self) -> None: ... + def pop(self) -> _T: ... + def remove(self, value: _T) -> None: ... + def __ior__(self, it: AbstractSet[_T]) -> typing_extensions.Self: ... # type: ignore[override,misc] + def __iand__(self, it: AbstractSet[Any]) -> typing_extensions.Self: ... + def __ixor__(self, it: AbstractSet[_T]) -> typing_extensions.Self: ... # type: ignore[override,misc] + def __isub__(self, it: AbstractSet[Any]) -> typing_extensions.Self: ... + +class MappingView(Sized): + def __init__(self, mapping: Mapping[Any, Any]) -> None: ... # undocumented + def __len__(self) -> int: ... + +class ItemsView(MappingView, AbstractSet[tuple[_KT_co, _VT_co]], Generic[_KT_co, _VT_co]): + def __init__(self, mapping: Mapping[_KT_co, _VT_co]) -> None: ... # undocumented + def __and__(self, other: Iterable[Any]) -> set[tuple[_KT_co, _VT_co]]: ... + def __rand__(self, other: Iterable[_T]) -> set[_T]: ... + def __contains__(self, item: object) -> bool: ... + def __iter__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... + if sys.version_info >= (3, 8): + def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... + + def __or__(self, other: Iterable[_T]) -> set[tuple[_KT_co, _VT_co] | _T]: ... + def __ror__(self, other: Iterable[_T]) -> set[tuple[_KT_co, _VT_co] | _T]: ... + def __sub__(self, other: Iterable[Any]) -> set[tuple[_KT_co, _VT_co]]: ... + def __rsub__(self, other: Iterable[_T]) -> set[_T]: ... + def __xor__(self, other: Iterable[_T]) -> set[tuple[_KT_co, _VT_co] | _T]: ... + def __rxor__(self, other: Iterable[_T]) -> set[tuple[_KT_co, _VT_co] | _T]: ... + +class KeysView(MappingView, AbstractSet[_KT_co], Generic[_KT_co]): + def __init__(self, mapping: Mapping[_KT_co, Any]) -> None: ... # undocumented + def __and__(self, other: Iterable[Any]) -> set[_KT_co]: ... + def __rand__(self, other: Iterable[_T]) -> set[_T]: ... + def __contains__(self, key: object) -> bool: ... + def __iter__(self) -> Iterator[_KT_co]: ... + if sys.version_info >= (3, 8): + def __reversed__(self) -> Iterator[_KT_co]: ... + + def __or__(self, other: Iterable[_T]) -> set[_KT_co | _T]: ... + def __ror__(self, other: Iterable[_T]) -> set[_KT_co | _T]: ... + def __sub__(self, other: Iterable[Any]) -> set[_KT_co]: ... + def __rsub__(self, other: Iterable[_T]) -> set[_T]: ... + def __xor__(self, other: Iterable[_T]) -> set[_KT_co | _T]: ... + def __rxor__(self, other: Iterable[_T]) -> set[_KT_co | _T]: ... + +class ValuesView(MappingView, Collection[_VT_co], Generic[_VT_co]): + def __init__(self, mapping: Mapping[Any, _VT_co]) -> None: ... # undocumented + def __contains__(self, value: object) -> bool: ... + def __iter__(self) -> Iterator[_VT_co]: ... + if sys.version_info >= (3, 8): + def __reversed__(self) -> Iterator[_VT_co]: ... + +class Mapping(Collection[_KT], Generic[_KT, _VT_co]): + # TODO: We wish the key type could also be covariant, but that doesn't work, + # see discussion in https://github.com/python/typing/pull/273. + @abstractmethod + def __getitem__(self, __key: _KT) -> _VT_co: ... + # Mixin methods + @overload + def get(self, __key: _KT) -> _VT_co | None: ... + @overload + def get(self, __key: _KT, default: _VT_co | _T) -> _VT_co | _T: ... + def items(self) -> ItemsView[_KT, _VT_co]: ... + def keys(self) -> KeysView[_KT]: ... + def values(self) -> ValuesView[_VT_co]: ... + def __contains__(self, __o: object) -> bool: ... + +class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]): + @abstractmethod + def __setitem__(self, __key: _KT, __value: _VT) -> None: ... + @abstractmethod + def __delitem__(self, __key: _KT) -> None: ... + def clear(self) -> None: ... + @overload + def pop(self, __key: _KT) -> _VT: ... + @overload + def pop(self, __key: _KT, default: _VT | _T) -> _VT | _T: ... + def popitem(self) -> tuple[_KT, _VT]: ... + # This overload should be allowed only if the value type is compatible with None. + # + # Keep the following methods in line with MutableMapping.setdefault, modulo positional-only differences: + # -- collections.OrderedDict.setdefault + # -- collections.ChainMap.setdefault + # -- weakref.WeakKeyDictionary.setdefault + @overload + def setdefault(self: MutableMapping[_KT, _T | None], __key: _KT, __default: None = None) -> _T | None: ... + @overload + def setdefault(self, __key: _KT, __default: _VT) -> _VT: ... + # 'update' used to take a Union, but using overloading is better. + # The second overloaded type here is a bit too general, because + # Mapping[tuple[_KT, _VT], W] is a subclass of Iterable[tuple[_KT, _VT]], + # but will always have the behavior of the first overloaded type + # at runtime, leading to keys of a mix of types _KT and tuple[_KT, _VT]. + # We don't currently have any way of forcing all Mappings to use + # the first overload, but by using overloading rather than a Union, + # mypy will commit to using the first overload when the argument is + # known to be a Mapping with unknown type parameters, which is closer + # to the behavior we want. See mypy issue #1430. + # + # Various mapping classes have __ior__ methods that should be kept roughly in line with .update(): + # -- dict.__ior__ + # -- os._Environ.__ior__ + # -- collections.UserDict.__ior__ + # -- collections.ChainMap.__ior__ + # -- peewee.attrdict.__add__ + # -- peewee.attrdict.__iadd__ + # -- weakref.WeakValueDictionary.__ior__ + # -- weakref.WeakKeyDictionary.__ior__ + @overload + def update(self, __m: SupportsKeysAndGetItem[_KT, _VT], **kwargs: _VT) -> None: ... + @overload + def update(self, __m: Iterable[tuple[_KT, _VT]], **kwargs: _VT) -> None: ... + @overload + def update(self, **kwargs: _VT) -> None: ... + +Text = str + +TYPE_CHECKING: bool + +# In stubs, the arguments of the IO class are marked as positional-only. +# This differs from runtime, but better reflects the fact that in reality +# classes deriving from IO use different names for the arguments. +class IO(Iterator[AnyStr], Generic[AnyStr]): + # At runtime these are all abstract properties, + # but making them abstract in the stub is hugely disruptive, for not much gain. + # See #8726 + @property + def mode(self) -> str: ... + @property + def name(self) -> str: ... + @abstractmethod + def close(self) -> None: ... + @property + def closed(self) -> bool: ... + @abstractmethod + def fileno(self) -> int: ... + @abstractmethod + def flush(self) -> None: ... + @abstractmethod + def isatty(self) -> bool: ... + @abstractmethod + def read(self, __n: int = -1) -> AnyStr: ... + @abstractmethod + def readable(self) -> bool: ... + @abstractmethod + def readline(self, __limit: int = -1) -> AnyStr: ... + @abstractmethod + def readlines(self, __hint: int = -1) -> list[AnyStr]: ... + @abstractmethod + def seek(self, __offset: int, __whence: int = 0) -> int: ... + @abstractmethod + def seekable(self) -> bool: ... + @abstractmethod + def tell(self) -> int: ... + @abstractmethod + def truncate(self, __size: int | None = None) -> int: ... + @abstractmethod + def writable(self) -> bool: ... + @abstractmethod + def write(self, __s: AnyStr) -> int: ... + @abstractmethod + def writelines(self, __lines: Iterable[AnyStr]) -> None: ... + @abstractmethod + def __next__(self) -> AnyStr: ... + @abstractmethod + def __iter__(self) -> Iterator[AnyStr]: ... + @abstractmethod + def __enter__(self) -> IO[AnyStr]: ... + @abstractmethod + def __exit__( + self, __t: Type[BaseException] | None, __value: BaseException | None, __traceback: TracebackType | None + ) -> None: ... + +class BinaryIO(IO[bytes]): + @abstractmethod + def __enter__(self) -> BinaryIO: ... + +class TextIO(IO[str]): + # See comment regarding the @properties in the `IO` class + @property + def buffer(self) -> BinaryIO: ... + @property + def encoding(self) -> str: ... + @property + def errors(self) -> str | None: ... + @property + def line_buffering(self) -> int: ... # int on PyPy, bool on CPython + @property + def newlines(self) -> Any: ... # None, str or tuple + @abstractmethod + def __enter__(self) -> TextIO: ... + +class ByteString(Sequence[int], metaclass=ABCMeta): ... + +# Functions + +_get_type_hints_obj_allowed_types: typing_extensions.TypeAlias = ( # noqa: Y042 + object + | Callable[..., Any] + | FunctionType + | BuiltinFunctionType + | MethodType + | ModuleType + | WrapperDescriptorType + | MethodWrapperType + | MethodDescriptorType +) + +if sys.version_info >= (3, 9): + def get_type_hints( + obj: _get_type_hints_obj_allowed_types, + globalns: dict[str, Any] | None = None, + localns: dict[str, Any] | None = None, + include_extras: bool = False, + ) -> dict[str, Any]: ... + +else: + def get_type_hints( + obj: _get_type_hints_obj_allowed_types, globalns: dict[str, Any] | None = None, localns: dict[str, Any] | None = None + ) -> dict[str, Any]: ... + +if sys.version_info >= (3, 8): + def get_origin(tp: Any) -> Any | None: ... + def get_args(tp: Any) -> tuple[Any, ...]: ... + +@overload +def cast(typ: Type[_T], val: Any) -> _T: ... +@overload +def cast(typ: str, val: Any) -> Any: ... +@overload +def cast(typ: object, val: Any) -> Any: ... + +if sys.version_info >= (3, 11): + def reveal_type(__obj: _T) -> _T: ... + def assert_never(__arg: Never) -> Never: ... + def assert_type(__val: _T, __typ: Any) -> _T: ... + def clear_overloads() -> None: ... + def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: ... + def dataclass_transform( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + frozen_default: bool = False, # on 3.11, runtime accepts it as part of kwargs + field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = ..., + **kwargs: Any, + ) -> IdentityFunction: ... + +# Type constructors + +class NamedTuple(tuple[Any, ...]): + if sys.version_info < (3, 8): + _field_types: collections.OrderedDict[str, type] + elif sys.version_info < (3, 9): + _field_types: dict[str, type] + _field_defaults: dict[str, Any] + _fields: tuple[str, ...] + _source: str + @overload + def __init__(self, typename: str, fields: Iterable[tuple[str, Any]] = ...) -> None: ... + @overload + def __init__(self, typename: str, fields: None = None, **kwargs: Any) -> None: ... + @classmethod + def _make(cls: Type[_T], iterable: Iterable[Any]) -> _T: ... + if sys.version_info >= (3, 8): + def _asdict(self) -> dict[str, Any]: ... + else: + def _asdict(self) -> collections.OrderedDict[str, Any]: ... + + def _replace(self, **kwargs: Any) -> typing_extensions.Self: ... + +# Internal mypy fallback type for all typed dicts (does not exist at runtime) +# N.B. Keep this mostly in sync with typing_extensions._TypedDict/mypy_extensions._TypedDict +@type_check_only +class _TypedDict(Mapping[str, object], metaclass=ABCMeta): + __total__: ClassVar[bool] + if sys.version_info >= (3, 9): + __required_keys__: ClassVar[frozenset[str]] + __optional_keys__: ClassVar[frozenset[str]] + def copy(self) -> typing_extensions.Self: ... + # Using Never so that only calls using mypy plugin hook that specialize the signature + # can go through. + def setdefault(self, k: _Never, default: object) -> object: ... + # Mypy plugin hook for 'pop' expects that 'default' has a type variable type. + def pop(self, k: _Never, default: _T = ...) -> object: ... # pyright: ignore[reportInvalidTypeVarUse] + def update(self: _T, __m: _T) -> None: ... + def __delitem__(self, k: _Never) -> None: ... + def items(self) -> dict_items[str, object]: ... + def keys(self) -> dict_keys[str, object]: ... + def values(self) -> dict_values[str, object]: ... + if sys.version_info >= (3, 9): + def __or__(self, __value: typing_extensions.Self) -> typing_extensions.Self: ... + def __ior__(self, __value: typing_extensions.Self) -> typing_extensions.Self: ... + +@_final +class ForwardRef: + __forward_arg__: str + __forward_code__: CodeType + __forward_evaluated__: bool + __forward_value__: Any | None + __forward_is_argument__: bool + __forward_is_class__: bool + __forward_module__: Any | None + if sys.version_info >= (3, 9): + # The module and is_class arguments were added in later Python 3.9 versions. + def __init__(self, arg: str, is_argument: bool = True, module: Any | None = None, *, is_class: bool = False) -> None: ... + else: + def __init__(self, arg: str, is_argument: bool = True) -> None: ... + + if sys.version_info >= (3, 9): + def _evaluate( + self, globalns: dict[str, Any] | None, localns: dict[str, Any] | None, recursive_guard: frozenset[str] + ) -> Any | None: ... + else: + def _evaluate(self, globalns: dict[str, Any] | None, localns: dict[str, Any] | None) -> Any | None: ... + + def __eq__(self, other: object) -> bool: ... + if sys.version_info >= (3, 11): + def __or__(self, other: Any) -> _SpecialForm: ... + def __ror__(self, other: Any) -> _SpecialForm: ... + +if sys.version_info >= (3, 10): + def is_typeddict(tp: object) -> bool: ... + +def _type_repr(obj: object) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/typing_extensions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/typing_extensions.pyi new file mode 100644 index 00000000..7fced5ec --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/typing_extensions.pyi @@ -0,0 +1,311 @@ +import abc +import collections +import sys +import typing +from _collections_abc import dict_items, dict_keys, dict_values +from _typeshed import IdentityFunction, Incomplete +from collections.abc import Iterable +from typing import ( # noqa: Y022,Y039 + TYPE_CHECKING as TYPE_CHECKING, + Any as Any, + AsyncContextManager as AsyncContextManager, + AsyncGenerator as AsyncGenerator, + AsyncIterable as AsyncIterable, + AsyncIterator as AsyncIterator, + Awaitable as Awaitable, + Callable, + ChainMap as ChainMap, + ClassVar as ClassVar, + ContextManager as ContextManager, + Coroutine as Coroutine, + Counter as Counter, + DefaultDict as DefaultDict, + Deque as Deque, + Mapping, + NewType as NewType, + NoReturn as NoReturn, + Sequence, + Text as Text, + Type as Type, + _Alias, + overload as overload, + type_check_only, +) + +__all__ = [ + "Any", + "ClassVar", + "Concatenate", + "Final", + "LiteralString", + "ParamSpec", + "ParamSpecArgs", + "ParamSpecKwargs", + "Self", + "Type", + "TypeVar", + "TypeVarTuple", + "Unpack", + "Awaitable", + "AsyncIterator", + "AsyncIterable", + "Coroutine", + "AsyncGenerator", + "AsyncContextManager", + "ChainMap", + "ContextManager", + "Counter", + "Deque", + "DefaultDict", + "NamedTuple", + "OrderedDict", + "TypedDict", + "SupportsIndex", + "Annotated", + "assert_never", + "assert_type", + "dataclass_transform", + "final", + "IntVar", + "is_typeddict", + "Literal", + "NewType", + "overload", + "override", + "Protocol", + "reveal_type", + "runtime", + "runtime_checkable", + "Text", + "TypeAlias", + "TypeGuard", + "TYPE_CHECKING", + "Never", + "NoReturn", + "Required", + "NotRequired", + "clear_overloads", + "get_args", + "get_origin", + "get_overloads", + "get_type_hints", +] + +_T = typing.TypeVar("_T") +_F = typing.TypeVar("_F", bound=Callable[..., Any]) +_TC = typing.TypeVar("_TC", bound=Type[object]) + +# unfortunately we have to duplicate this class definition from typing.pyi or we break pytype +class _SpecialForm: + def __getitem__(self, parameters: Any) -> object: ... + if sys.version_info >= (3, 10): + def __or__(self, other: Any) -> _SpecialForm: ... + def __ror__(self, other: Any) -> _SpecialForm: ... + +# Do not import (and re-export) Protocol or runtime_checkable from +# typing module because type checkers need to be able to distinguish +# typing.Protocol and typing_extensions.Protocol so they can properly +# warn users about potential runtime exceptions when using typing.Protocol +# on older versions of Python. +Protocol: _SpecialForm = ... + +def runtime_checkable(cls: _TC) -> _TC: ... + +# This alias for above is kept here for backwards compatibility. +runtime = runtime_checkable +Final: _SpecialForm + +def final(f: _F) -> _F: ... + +Literal: _SpecialForm + +def IntVar(name: str) -> Any: ... # returns a new TypeVar + +# Internal mypy fallback type for all typed dicts (does not exist at runtime) +# N.B. Keep this mostly in sync with typing._TypedDict/mypy_extensions._TypedDict +@type_check_only +class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): + __required_keys__: ClassVar[frozenset[str]] + __optional_keys__: ClassVar[frozenset[str]] + __total__: ClassVar[bool] + def copy(self) -> Self: ... + # Using Never so that only calls using mypy plugin hook that specialize the signature + # can go through. + def setdefault(self, k: Never, default: object) -> object: ... + # Mypy plugin hook for 'pop' expects that 'default' has a type variable type. + def pop(self, k: Never, default: _T = ...) -> object: ... # pyright: ignore[reportInvalidTypeVarUse] + def update(self: _T, __m: _T) -> None: ... + def items(self) -> dict_items[str, object]: ... + def keys(self) -> dict_keys[str, object]: ... + def values(self) -> dict_values[str, object]: ... + def __delitem__(self, k: Never) -> None: ... + if sys.version_info >= (3, 9): + def __or__(self, __value: Self) -> Self: ... + def __ior__(self, __value: Self) -> Self: ... + +# TypedDict is a (non-subscriptable) special form. +TypedDict: object + +OrderedDict = _Alias() + +def get_type_hints( + obj: Callable[..., Any], + globalns: dict[str, Any] | None = None, + localns: dict[str, Any] | None = None, + include_extras: bool = False, +) -> dict[str, Any]: ... +def get_args(tp: Any) -> tuple[Any, ...]: ... +def get_origin(tp: Any) -> Any | None: ... + +Annotated: _SpecialForm +_AnnotatedAlias: Any # undocumented + +@runtime_checkable +class SupportsIndex(Protocol, metaclass=abc.ABCMeta): + @abc.abstractmethod + def __index__(self) -> int: ... + +# New things in 3.10 +if sys.version_info >= (3, 10): + from typing import ( + Concatenate as Concatenate, + ParamSpecArgs as ParamSpecArgs, + ParamSpecKwargs as ParamSpecKwargs, + TypeAlias as TypeAlias, + TypeGuard as TypeGuard, + is_typeddict as is_typeddict, + ) +else: + class ParamSpecArgs: + __origin__: ParamSpec + def __init__(self, origin: ParamSpec) -> None: ... + + class ParamSpecKwargs: + __origin__: ParamSpec + def __init__(self, origin: ParamSpec) -> None: ... + + Concatenate: _SpecialForm + TypeAlias: _SpecialForm + TypeGuard: _SpecialForm + def is_typeddict(tp: object) -> bool: ... + +# New things in 3.11 +# NamedTuples are not new, but the ability to create generic NamedTuples is new in 3.11 +if sys.version_info >= (3, 11): + from typing import ( + LiteralString as LiteralString, + NamedTuple as NamedTuple, + Never as Never, + NotRequired as NotRequired, + Required as Required, + Self as Self, + Unpack as Unpack, + assert_never as assert_never, + assert_type as assert_type, + clear_overloads as clear_overloads, + dataclass_transform as dataclass_transform, + get_overloads as get_overloads, + reveal_type as reveal_type, + ) +else: + Self: _SpecialForm + Never: _SpecialForm = ... + def reveal_type(__obj: _T) -> _T: ... + def assert_never(__arg: Never) -> Never: ... + def assert_type(__val: _T, __typ: Any) -> _T: ... + def clear_overloads() -> None: ... + def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: ... + + Required: _SpecialForm + NotRequired: _SpecialForm + LiteralString: _SpecialForm + Unpack: _SpecialForm + + def dataclass_transform( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + frozen_default: bool = False, + field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = ..., + **kwargs: object, + ) -> IdentityFunction: ... + + class NamedTuple(tuple[Any, ...]): + if sys.version_info < (3, 8): + _field_types: collections.OrderedDict[str, type] + elif sys.version_info < (3, 9): + _field_types: dict[str, type] + _field_defaults: dict[str, Any] + _fields: tuple[str, ...] + _source: str + @overload + def __init__(self, typename: str, fields: Iterable[tuple[str, Any]] = ...) -> None: ... + @overload + def __init__(self, typename: str, fields: None = None, **kwargs: Any) -> None: ... + @classmethod + def _make(cls, iterable: Iterable[Any]) -> Self: ... + if sys.version_info >= (3, 8): + def _asdict(self) -> dict[str, Any]: ... + else: + def _asdict(self) -> collections.OrderedDict[str, Any]: ... + + def _replace(self, **kwargs: Any) -> Self: ... + +# New things in 3.xx +# The `default` parameter was added to TypeVar, ParamSpec, and TypeVarTuple (PEP 696) +# The `infer_variance` parameter was added to TypeVar (PEP 695) +# typing_extensions.override (PEP 698) +@final +class TypeVar: + __name__: str + __bound__: Any | None + __constraints__: tuple[Any, ...] + __covariant__: bool + __contravariant__: bool + __default__: Any | None + def __init__( + self, + name: str, + *constraints: Any, + bound: Any | None = None, + covariant: bool = False, + contravariant: bool = False, + default: Any | None = None, + infer_variance: bool = False, + ) -> None: ... + if sys.version_info >= (3, 10): + def __or__(self, right: Any) -> _SpecialForm: ... + def __ror__(self, left: Any) -> _SpecialForm: ... + if sys.version_info >= (3, 11): + def __typing_subst__(self, arg: Incomplete) -> Incomplete: ... + +@final +class ParamSpec: + __name__: str + __bound__: type[Any] | None + __covariant__: bool + __contravariant__: bool + __default__: type[Any] | None + def __init__( + self, + name: str, + *, + bound: None | type[Any] | str = None, + contravariant: bool = False, + covariant: bool = False, + default: type[Any] | str | None = None, + ) -> None: ... + @property + def args(self) -> ParamSpecArgs: ... + @property + def kwargs(self) -> ParamSpecKwargs: ... + +@final +class TypeVarTuple: + __name__: str + __default__: Any | None + def __init__(self, name: str, *, default: Any | None = None) -> None: ... + def __iter__(self) -> Any: ... # Unpack[Self] + +def override(__arg: _F) -> _F: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unicodedata.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unicodedata.pyi new file mode 100644 index 00000000..5a1f7fe6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unicodedata.pyi @@ -0,0 +1,76 @@ +import sys +from _typeshed import ReadOnlyBuffer +from typing import Any, TypeVar, overload +from typing_extensions import Literal, TypeAlias, final + +ucd_3_2_0: UCD +unidata_version: str + +if sys.version_info < (3, 10): + ucnhash_CAPI: Any + +_T = TypeVar("_T") + +def bidirectional(__chr: str) -> str: ... +def category(__chr: str) -> str: ... +def combining(__chr: str) -> int: ... +@overload +def decimal(__chr: str) -> int: ... +@overload +def decimal(__chr: str, __default: _T) -> int | _T: ... +def decomposition(__chr: str) -> str: ... +@overload +def digit(__chr: str) -> int: ... +@overload +def digit(__chr: str, __default: _T) -> int | _T: ... + +_EastAsianWidth: TypeAlias = Literal["F", "H", "W", "Na", "A", "N"] + +def east_asian_width(__chr: str) -> _EastAsianWidth: ... + +if sys.version_info >= (3, 8): + def is_normalized(__form: str, __unistr: str) -> bool: ... + +def lookup(__name: str | ReadOnlyBuffer) -> str: ... +def mirrored(__chr: str) -> int: ... +@overload +def name(__chr: str) -> str: ... +@overload +def name(__chr: str, __default: _T) -> str | _T: ... +def normalize(__form: str, __unistr: str) -> str: ... +@overload +def numeric(__chr: str) -> float: ... +@overload +def numeric(__chr: str, __default: _T) -> float | _T: ... +@final +class UCD: + # The methods below are constructed from the same array in C + # (unicodedata_functions) and hence identical to the functions above. + unidata_version: str + def bidirectional(self, __chr: str) -> str: ... + def category(self, __chr: str) -> str: ... + def combining(self, __chr: str) -> int: ... + @overload + def decimal(self, __chr: str) -> int: ... + @overload + def decimal(self, __chr: str, __default: _T) -> int | _T: ... + def decomposition(self, __chr: str) -> str: ... + @overload + def digit(self, __chr: str) -> int: ... + @overload + def digit(self, __chr: str, __default: _T) -> int | _T: ... + def east_asian_width(self, __chr: str) -> _EastAsianWidth: ... + if sys.version_info >= (3, 8): + def is_normalized(self, __form: str, __unistr: str) -> bool: ... + + def lookup(self, __name: str | ReadOnlyBuffer) -> str: ... + def mirrored(self, __chr: str) -> int: ... + @overload + def name(self, __chr: str) -> str: ... + @overload + def name(self, __chr: str, __default: _T) -> str | _T: ... + def normalize(self, __form: str, __unistr: str) -> str: ... + @overload + def numeric(self, __chr: str) -> float: ... + @overload + def numeric(self, __chr: str, __default: _T) -> float | _T: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/__init__.pyi new file mode 100644 index 00000000..33820c79 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/__init__.pyi @@ -0,0 +1,69 @@ +import sys + +from .case import ( + FunctionTestCase as FunctionTestCase, + SkipTest as SkipTest, + TestCase as TestCase, + expectedFailure as expectedFailure, + skip as skip, + skipIf as skipIf, + skipUnless as skipUnless, +) +from .loader import ( + TestLoader as TestLoader, + defaultTestLoader as defaultTestLoader, + findTestCases as findTestCases, + getTestCaseNames as getTestCaseNames, + makeSuite as makeSuite, +) +from .main import TestProgram as TestProgram, main as main +from .result import TestResult as TestResult +from .runner import TextTestResult as TextTestResult, TextTestRunner as TextTestRunner +from .signals import ( + installHandler as installHandler, + registerResult as registerResult, + removeHandler as removeHandler, + removeResult as removeResult, +) +from .suite import BaseTestSuite as BaseTestSuite, TestSuite as TestSuite + +if sys.version_info >= (3, 8): + from unittest.async_case import * + + from .case import addModuleCleanup as addModuleCleanup + +if sys.version_info >= (3, 11): + from .case import doModuleCleanups as doModuleCleanups, enterModuleContext as enterModuleContext + +__all__ = [ + "TestResult", + "TestCase", + "TestSuite", + "TextTestRunner", + "TestLoader", + "FunctionTestCase", + "main", + "defaultTestLoader", + "SkipTest", + "skip", + "skipIf", + "skipUnless", + "expectedFailure", + "TextTestResult", + "installHandler", + "registerResult", + "removeResult", + "removeHandler", + "getTestCaseNames", + "makeSuite", + "findTestCases", +] + +if sys.version_info >= (3, 8): + __all__ += ["addModuleCleanup", "IsolatedAsyncioTestCase"] + +if sys.version_info >= (3, 11): + __all__ += ["enterModuleContext", "doModuleCleanups"] + +def load_tests(loader: TestLoader, tests: TestSuite, pattern: str | None) -> TestSuite: ... +def __dir__() -> set[str]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/_log.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/_log.pyi new file mode 100644 index 00000000..4de5d502 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/_log.pyi @@ -0,0 +1,28 @@ +import logging +import sys +from types import TracebackType +from typing import ClassVar, Generic, NamedTuple, TypeVar +from unittest.case import TestCase + +_L = TypeVar("_L", None, _LoggingWatcher) + +class _LoggingWatcher(NamedTuple): + records: list[logging.LogRecord] + output: list[str] + +class _AssertLogsContext(Generic[_L]): + LOGGING_FORMAT: ClassVar[str] + test_case: TestCase + logger_name: str + level: int + msg: None + if sys.version_info >= (3, 10): + def __init__(self, test_case: TestCase, logger_name: str, level: int, no_logs: bool) -> None: ... + no_logs: bool + else: + def __init__(self, test_case: TestCase, logger_name: str, level: int) -> None: ... + + def __enter__(self) -> _L: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None + ) -> bool | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/async_case.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/async_case.pyi new file mode 100644 index 00000000..c1de205f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/async_case.pyi @@ -0,0 +1,19 @@ +import sys +from collections.abc import Awaitable, Callable +from typing import TypeVar +from typing_extensions import ParamSpec + +from .case import TestCase + +if sys.version_info >= (3, 11): + from contextlib import AbstractAsyncContextManager + +_T = TypeVar("_T") +_P = ParamSpec("_P") + +class IsolatedAsyncioTestCase(TestCase): + async def asyncSetUp(self) -> None: ... + async def asyncTearDown(self) -> None: ... + def addAsyncCleanup(self, __func: Callable[_P, Awaitable[object]], *args: _P.args, **kwargs: _P.kwargs) -> None: ... + if sys.version_info >= (3, 11): + async def enterAsyncContext(self, cm: AbstractAsyncContextManager[_T]) -> _T: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/case.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/case.pyi new file mode 100644 index 00000000..b118ecfc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/case.pyi @@ -0,0 +1,325 @@ +import logging +import sys +import unittest.result +from _typeshed import SupportsDunderGE, SupportsDunderGT, SupportsDunderLE, SupportsDunderLT, SupportsRSub, SupportsSub +from collections.abc import Callable, Container, Iterable, Mapping, Sequence, Set as AbstractSet +from contextlib import AbstractContextManager +from re import Pattern +from types import TracebackType +from typing import Any, AnyStr, ClassVar, Generic, NamedTuple, NoReturn, Protocol, SupportsAbs, SupportsRound, TypeVar, overload +from typing_extensions import ParamSpec, Self, TypeAlias +from warnings import WarningMessage + +if sys.version_info >= (3, 9): + from types import GenericAlias + +if sys.version_info >= (3, 10): + from types import UnionType + +_T = TypeVar("_T") +_S = TypeVar("_S", bound=SupportsSub[Any, Any]) +_E = TypeVar("_E", bound=BaseException) +_FT = TypeVar("_FT", bound=Callable[..., Any]) +_P = ParamSpec("_P") + +DIFF_OMITTED: str + +class _BaseTestCaseContext: + def __init__(self, test_case: TestCase) -> None: ... + +if sys.version_info >= (3, 9): + from unittest._log import _AssertLogsContext, _LoggingWatcher +else: + # Unused dummy for _AssertLogsContext. Starting with Python 3.10, + # this is generic over the logging watcher, but in lower versions + # the watcher is hard-coded. + _L = TypeVar("_L") + + class _LoggingWatcher(NamedTuple): + records: list[logging.LogRecord] + output: list[str] + + class _AssertLogsContext(_BaseTestCaseContext, Generic[_L]): + LOGGING_FORMAT: ClassVar[str] + test_case: TestCase + logger_name: str + level: int + msg: None + def __init__(self, test_case: TestCase, logger_name: str, level: int) -> None: ... + def __enter__(self) -> _LoggingWatcher: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> bool | None: ... + +if sys.version_info >= (3, 8): + def addModuleCleanup(__function: Callable[_P, Any], *args: _P.args, **kwargs: _P.kwargs) -> None: ... + def doModuleCleanups() -> None: ... + +if sys.version_info >= (3, 11): + def enterModuleContext(cm: AbstractContextManager[_T]) -> _T: ... + +def expectedFailure(test_item: _FT) -> _FT: ... +def skip(reason: str) -> Callable[[_FT], _FT]: ... +def skipIf(condition: object, reason: str) -> Callable[[_FT], _FT]: ... +def skipUnless(condition: object, reason: str) -> Callable[[_FT], _FT]: ... + +class SkipTest(Exception): + def __init__(self, reason: str) -> None: ... + +class _SupportsAbsAndDunderGE(SupportsDunderGE[Any], SupportsAbs[Any], Protocol): ... + +# Keep this alias in sync with builtins._ClassInfo +# We can't import it from builtins or pytype crashes, +# due to the fact that pytype uses a custom builtins stub rather than typeshed's builtins stub +if sys.version_info >= (3, 10): + _ClassInfo: TypeAlias = type | UnionType | tuple[_ClassInfo, ...] +else: + _ClassInfo: TypeAlias = type | tuple[_ClassInfo, ...] + +class TestCase: + failureException: type[BaseException] + longMessage: bool + maxDiff: int | None + # undocumented + _testMethodName: str + # undocumented + _testMethodDoc: str + def __init__(self, methodName: str = "runTest") -> None: ... + def __eq__(self, other: object) -> bool: ... + def setUp(self) -> None: ... + def tearDown(self) -> None: ... + @classmethod + def setUpClass(cls) -> None: ... + @classmethod + def tearDownClass(cls) -> None: ... + def run(self, result: unittest.result.TestResult | None = None) -> unittest.result.TestResult | None: ... + def __call__(self, result: unittest.result.TestResult | None = ...) -> unittest.result.TestResult | None: ... + def skipTest(self, reason: Any) -> NoReturn: ... + def subTest(self, msg: Any = ..., **params: Any) -> AbstractContextManager[None]: ... + def debug(self) -> None: ... + if sys.version_info < (3, 11): + def _addSkip(self, result: unittest.result.TestResult, test_case: TestCase, reason: str) -> None: ... + + def assertEqual(self, first: Any, second: Any, msg: Any = None) -> None: ... + def assertNotEqual(self, first: Any, second: Any, msg: Any = None) -> None: ... + def assertTrue(self, expr: Any, msg: Any = None) -> None: ... + def assertFalse(self, expr: Any, msg: Any = None) -> None: ... + def assertIs(self, expr1: object, expr2: object, msg: Any = None) -> None: ... + def assertIsNot(self, expr1: object, expr2: object, msg: Any = None) -> None: ... + def assertIsNone(self, obj: object, msg: Any = None) -> None: ... + def assertIsNotNone(self, obj: object, msg: Any = None) -> None: ... + def assertIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = None) -> None: ... + def assertNotIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = None) -> None: ... + def assertIsInstance(self, obj: object, cls: _ClassInfo, msg: Any = None) -> None: ... + def assertNotIsInstance(self, obj: object, cls: _ClassInfo, msg: Any = None) -> None: ... + @overload + def assertGreater(self, a: SupportsDunderGT[_T], b: _T, msg: Any = None) -> None: ... + @overload + def assertGreater(self, a: _T, b: SupportsDunderLT[_T], msg: Any = None) -> None: ... + @overload + def assertGreaterEqual(self, a: SupportsDunderGE[_T], b: _T, msg: Any = None) -> None: ... + @overload + def assertGreaterEqual(self, a: _T, b: SupportsDunderLE[_T], msg: Any = None) -> None: ... + @overload + def assertLess(self, a: SupportsDunderLT[_T], b: _T, msg: Any = None) -> None: ... + @overload + def assertLess(self, a: _T, b: SupportsDunderGT[_T], msg: Any = None) -> None: ... + @overload + def assertLessEqual(self, a: SupportsDunderLT[_T], b: _T, msg: Any = None) -> None: ... + @overload + def assertLessEqual(self, a: _T, b: SupportsDunderGT[_T], msg: Any = None) -> None: ... + # `assertRaises`, `assertRaisesRegex`, and `assertRaisesRegexp` + # are not using `ParamSpec` intentionally, + # because they might be used with explicitly wrong arg types to raise some error in tests. + @overload + def assertRaises( # type: ignore[misc] + self, + expected_exception: type[BaseException] | tuple[type[BaseException], ...], + callable: Callable[..., Any], + *args: Any, + **kwargs: Any, + ) -> None: ... + @overload + def assertRaises( + self, expected_exception: type[_E] | tuple[type[_E], ...], *, msg: Any = ... + ) -> _AssertRaisesContext[_E]: ... + @overload + def assertRaisesRegex( # type: ignore[misc] + self, + expected_exception: type[BaseException] | tuple[type[BaseException], ...], + expected_regex: str | Pattern[str], + callable: Callable[..., Any], + *args: Any, + **kwargs: Any, + ) -> None: ... + @overload + def assertRaisesRegex( + self, expected_exception: type[_E] | tuple[type[_E], ...], expected_regex: str | Pattern[str], *, msg: Any = ... + ) -> _AssertRaisesContext[_E]: ... + @overload + def assertWarns( # type: ignore[misc] + self, + expected_warning: type[Warning] | tuple[type[Warning], ...], + callable: Callable[_P, Any], + *args: _P.args, + **kwargs: _P.kwargs, + ) -> None: ... + @overload + def assertWarns( + self, expected_warning: type[Warning] | tuple[type[Warning], ...], *, msg: Any = ... + ) -> _AssertWarnsContext: ... + @overload + def assertWarnsRegex( # type: ignore[misc] + self, + expected_warning: type[Warning] | tuple[type[Warning], ...], + expected_regex: str | Pattern[str], + callable: Callable[_P, Any], + *args: _P.args, + **kwargs: _P.kwargs, + ) -> None: ... + @overload + def assertWarnsRegex( + self, expected_warning: type[Warning] | tuple[type[Warning], ...], expected_regex: str | Pattern[str], *, msg: Any = ... + ) -> _AssertWarnsContext: ... + def assertLogs( + self, logger: str | logging.Logger | None = None, level: int | str | None = None + ) -> _AssertLogsContext[_LoggingWatcher]: ... + if sys.version_info >= (3, 10): + def assertNoLogs( + self, logger: str | logging.Logger | None = None, level: int | str | None = None + ) -> _AssertLogsContext[None]: ... + + @overload + def assertAlmostEqual(self, first: _S, second: _S, places: None, msg: Any, delta: _SupportsAbsAndDunderGE) -> None: ... + @overload + def assertAlmostEqual( + self, first: _S, second: _S, places: None = None, msg: Any = None, *, delta: _SupportsAbsAndDunderGE + ) -> None: ... + @overload + def assertAlmostEqual( + self, + first: SupportsSub[_T, SupportsAbs[SupportsRound[object]]], + second: _T, + places: int | None = None, + msg: Any = None, + delta: None = None, + ) -> None: ... + @overload + def assertAlmostEqual( + self, + first: _T, + second: SupportsRSub[_T, SupportsAbs[SupportsRound[object]]], + places: int | None = None, + msg: Any = None, + delta: None = None, + ) -> None: ... + @overload + def assertNotAlmostEqual(self, first: _S, second: _S, places: None, msg: Any, delta: _SupportsAbsAndDunderGE) -> None: ... + @overload + def assertNotAlmostEqual( + self, first: _S, second: _S, places: None = None, msg: Any = None, *, delta: _SupportsAbsAndDunderGE + ) -> None: ... + @overload + def assertNotAlmostEqual( + self, + first: SupportsSub[_T, SupportsAbs[SupportsRound[object]]], + second: _T, + places: int | None = None, + msg: Any = None, + delta: None = None, + ) -> None: ... + @overload + def assertNotAlmostEqual( + self, + first: _T, + second: SupportsRSub[_T, SupportsAbs[SupportsRound[object]]], + places: int | None = None, + msg: Any = None, + delta: None = None, + ) -> None: ... + def assertRegex(self, text: AnyStr, expected_regex: AnyStr | Pattern[AnyStr], msg: Any = None) -> None: ... + def assertNotRegex(self, text: AnyStr, unexpected_regex: AnyStr | Pattern[AnyStr], msg: Any = None) -> None: ... + def assertCountEqual(self, first: Iterable[Any], second: Iterable[Any], msg: Any = None) -> None: ... + def addTypeEqualityFunc(self, typeobj: type[Any], function: Callable[..., None]) -> None: ... + def assertMultiLineEqual(self, first: str, second: str, msg: Any = None) -> None: ... + def assertSequenceEqual( + self, seq1: Sequence[Any], seq2: Sequence[Any], msg: Any = None, seq_type: type[Sequence[Any]] | None = None + ) -> None: ... + def assertListEqual(self, list1: list[Any], list2: list[Any], msg: Any = None) -> None: ... + def assertTupleEqual(self, tuple1: tuple[Any, ...], tuple2: tuple[Any, ...], msg: Any = None) -> None: ... + def assertSetEqual(self, set1: AbstractSet[object], set2: AbstractSet[object], msg: Any = None) -> None: ... + def assertDictEqual(self, d1: Mapping[Any, object], d2: Mapping[Any, object], msg: Any = None) -> None: ... + def fail(self, msg: Any = None) -> NoReturn: ... + def countTestCases(self) -> int: ... + def defaultTestResult(self) -> unittest.result.TestResult: ... + def id(self) -> str: ... + def shortDescription(self) -> str | None: ... + if sys.version_info >= (3, 8): + def addCleanup(self, __function: Callable[_P, Any], *args: _P.args, **kwargs: _P.kwargs) -> None: ... + else: + def addCleanup(self, function: Callable[_P, Any], *args: _P.args, **kwargs: _P.kwargs) -> None: ... + + if sys.version_info >= (3, 11): + def enterContext(self, cm: AbstractContextManager[_T]) -> _T: ... + + def doCleanups(self) -> None: ... + if sys.version_info >= (3, 8): + @classmethod + def addClassCleanup(cls, __function: Callable[_P, Any], *args: _P.args, **kwargs: _P.kwargs) -> None: ... + @classmethod + def doClassCleanups(cls) -> None: ... + + if sys.version_info >= (3, 11): + @classmethod + def enterClassContext(cls, cm: AbstractContextManager[_T]) -> _T: ... + + def _formatMessage(self, msg: str | None, standardMsg: str) -> str: ... # undocumented + def _getAssertEqualityFunc(self, first: Any, second: Any) -> Callable[..., None]: ... # undocumented + if sys.version_info < (3, 12): + failUnlessEqual = assertEqual + assertEquals = assertEqual + failIfEqual = assertNotEqual + assertNotEquals = assertNotEqual + failUnless = assertTrue + assert_ = assertTrue + failIf = assertFalse + failUnlessRaises = assertRaises + failUnlessAlmostEqual = assertAlmostEqual + assertAlmostEquals = assertAlmostEqual + failIfAlmostEqual = assertNotAlmostEqual + assertNotAlmostEquals = assertNotAlmostEqual + assertRegexpMatches = assertRegex + assertNotRegexpMatches = assertNotRegex + assertRaisesRegexp = assertRaisesRegex + def assertDictContainsSubset( + self, subset: Mapping[Any, Any], dictionary: Mapping[Any, Any], msg: object = None + ) -> None: ... + +class FunctionTestCase(TestCase): + def __init__( + self, + testFunc: Callable[[], Any], + setUp: Callable[[], Any] | None = None, + tearDown: Callable[[], Any] | None = None, + description: str | None = None, + ) -> None: ... + def runTest(self) -> None: ... + +class _AssertRaisesContext(Generic[_E]): + exception: _E + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None + ) -> bool: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +class _AssertWarnsContext: + warning: WarningMessage + filename: str + lineno: int + warnings: list[WarningMessage] + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/loader.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/loader.pyi new file mode 100644 index 00000000..f3850c93 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/loader.pyi @@ -0,0 +1,46 @@ +import unittest.case +import unittest.suite +from collections.abc import Callable, Sequence +from re import Pattern +from types import ModuleType +from typing import Any +from typing_extensions import TypeAlias + +_SortComparisonMethod: TypeAlias = Callable[[str, str], int] +_SuiteClass: TypeAlias = Callable[[list[unittest.case.TestCase]], unittest.suite.TestSuite] + +VALID_MODULE_NAME: Pattern[str] + +class TestLoader: + errors: list[type[BaseException]] + testMethodPrefix: str + sortTestMethodsUsing: _SortComparisonMethod + testNamePatterns: list[str] | None + suiteClass: _SuiteClass + def loadTestsFromTestCase(self, testCaseClass: type[unittest.case.TestCase]) -> unittest.suite.TestSuite: ... + def loadTestsFromModule(self, module: ModuleType, *args: Any, pattern: Any = None) -> unittest.suite.TestSuite: ... + def loadTestsFromName(self, name: str, module: ModuleType | None = None) -> unittest.suite.TestSuite: ... + def loadTestsFromNames(self, names: Sequence[str], module: ModuleType | None = None) -> unittest.suite.TestSuite: ... + def getTestCaseNames(self, testCaseClass: type[unittest.case.TestCase]) -> Sequence[str]: ... + def discover( + self, start_dir: str, pattern: str = "test*.py", top_level_dir: str | None = None + ) -> unittest.suite.TestSuite: ... + def _match_path(self, path: str, full_path: str, pattern: str) -> bool: ... + +defaultTestLoader: TestLoader + +def getTestCaseNames( + testCaseClass: type[unittest.case.TestCase], + prefix: str, + sortUsing: _SortComparisonMethod = ..., + testNamePatterns: list[str] | None = None, +) -> Sequence[str]: ... +def makeSuite( + testCaseClass: type[unittest.case.TestCase], + prefix: str = "test", + sortUsing: _SortComparisonMethod = ..., + suiteClass: _SuiteClass = ..., +) -> unittest.suite.TestSuite: ... +def findTestCases( + module: ModuleType, prefix: str = "test", sortUsing: _SortComparisonMethod = ..., suiteClass: _SuiteClass = ... +) -> unittest.suite.TestSuite: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/main.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/main.pyi new file mode 100644 index 00000000..6d970c92 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/main.pyi @@ -0,0 +1,47 @@ +import unittest.case +import unittest.loader +import unittest.result +import unittest.suite +from collections.abc import Iterable +from types import ModuleType +from typing import Any, Protocol + +MAIN_EXAMPLES: str +MODULE_EXAMPLES: str + +class _TestRunner(Protocol): + def run(self, test: unittest.suite.TestSuite | unittest.case.TestCase) -> unittest.result.TestResult: ... + +# not really documented +class TestProgram: + result: unittest.result.TestResult + module: None | str | ModuleType + verbosity: int + failfast: bool | None + catchbreak: bool | None + buffer: bool | None + progName: str | None + warnings: str | None + testNamePatterns: list[str] | None + def __init__( + self, + module: None | str | ModuleType = "__main__", + defaultTest: str | Iterable[str] | None = None, + argv: list[str] | None = None, + testRunner: type[_TestRunner] | _TestRunner | None = None, + testLoader: unittest.loader.TestLoader = ..., + exit: bool = True, + verbosity: int = 1, + failfast: bool | None = None, + catchbreak: bool | None = None, + buffer: bool | None = None, + warnings: str | None = None, + *, + tb_locals: bool = False, + ) -> None: ... + def usageExit(self, msg: Any = None) -> None: ... + def parseArgs(self, argv: list[str]) -> None: ... + def createTests(self, from_discovery: bool = False, Loader: unittest.loader.TestLoader | None = None) -> None: ... + def runTests(self) -> None: ... # undocumented + +main = TestProgram diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/mock.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/mock.pyi new file mode 100644 index 00000000..f0345c90 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/mock.pyi @@ -0,0 +1,442 @@ +import sys +from collections.abc import Awaitable, Callable, Coroutine, Iterable, Mapping, Sequence +from contextlib import _GeneratorContextManager +from types import TracebackType +from typing import Any, Generic, TypeVar, overload +from typing_extensions import Final, Literal, Self, TypeAlias + +_T = TypeVar("_T") +_TT = TypeVar("_TT", bound=type[Any]) +_R = TypeVar("_R") +_F = TypeVar("_F", bound=Callable[..., Any]) +_AF = TypeVar("_AF", bound=Callable[..., Coroutine[Any, Any, Any]]) + +if sys.version_info >= (3, 8): + __all__ = ( + "Mock", + "MagicMock", + "patch", + "sentinel", + "DEFAULT", + "ANY", + "call", + "create_autospec", + "AsyncMock", + "FILTER_DIR", + "NonCallableMock", + "NonCallableMagicMock", + "mock_open", + "PropertyMock", + "seal", + ) +else: + __all__ = ( + "Mock", + "MagicMock", + "patch", + "sentinel", + "DEFAULT", + "ANY", + "call", + "create_autospec", + "FILTER_DIR", + "NonCallableMock", + "NonCallableMagicMock", + "mock_open", + "PropertyMock", + "seal", + ) + +__version__: Final[str] + +FILTER_DIR: Any + +class _SentinelObject: + name: Any + def __init__(self, name: Any) -> None: ... + +class _Sentinel: + def __getattr__(self, name: str) -> Any: ... + +sentinel: Any +DEFAULT: Any + +_ArgsKwargs: TypeAlias = tuple[tuple[Any, ...], Mapping[str, Any]] +_NameArgsKwargs: TypeAlias = tuple[str, tuple[Any, ...], Mapping[str, Any]] +_CallValue: TypeAlias = str | tuple[Any, ...] | Mapping[str, Any] | _ArgsKwargs | _NameArgsKwargs + +class _Call(tuple[Any, ...]): + def __new__( + cls, value: _CallValue = ..., name: str | None = "", parent: Any | None = None, two: bool = False, from_kall: bool = True + ) -> Self: ... + name: Any + parent: Any + from_kall: Any + def __init__( + self, + value: _CallValue = ..., + name: str | None = None, + parent: Any | None = None, + two: bool = False, + from_kall: bool = True, + ) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, __other: object) -> bool: ... + def __call__(self, *args: Any, **kwargs: Any) -> _Call: ... + def __getattr__(self, attr: str) -> Any: ... + def __getattribute__(self, attr: str) -> Any: ... + if sys.version_info >= (3, 8): + @property + def args(self) -> tuple[Any, ...]: ... + @property + def kwargs(self) -> Mapping[str, Any]: ... + + def call_list(self) -> Any: ... + +call: _Call + +class _CallList(list[_Call]): + def __contains__(self, value: Any) -> bool: ... + +class Base: + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + +# We subclass with "Any" because mocks are explicitly designed to stand in for other types, +# something that can't be expressed with our static type system. +class NonCallableMock(Base, Any): + def __new__(__cls, *args: Any, **kw: Any) -> Self: ... + def __init__( + self, + spec: list[str] | object | type[object] | None = None, + wraps: Any | None = None, + name: str | None = None, + spec_set: list[str] | object | type[object] | None = None, + parent: NonCallableMock | None = None, + _spec_state: Any | None = None, + _new_name: str = "", + _new_parent: NonCallableMock | None = None, + _spec_as_instance: bool = False, + _eat_self: bool | None = None, + unsafe: bool = False, + **kwargs: Any, + ) -> None: ... + def __getattr__(self, name: str) -> Any: ... + def __delattr__(self, name: str) -> None: ... + def __setattr__(self, name: str, value: Any) -> None: ... + def __dir__(self) -> list[str]: ... + if sys.version_info >= (3, 8): + def _calls_repr(self, prefix: str = "Calls") -> str: ... + def assert_called_with(self, *args: Any, **kwargs: Any) -> None: ... + def assert_not_called(self) -> None: ... + def assert_called_once_with(self, *args: Any, **kwargs: Any) -> None: ... + def _format_mock_failure_message(self, args: Any, kwargs: Any, action: str = "call") -> str: ... + else: + def assert_called_with(_mock_self, *args: Any, **kwargs: Any) -> None: ... + def assert_not_called(_mock_self) -> None: ... + def assert_called_once_with(_mock_self, *args: Any, **kwargs: Any) -> None: ... + def _format_mock_failure_message(self, args: Any, kwargs: Any) -> str: ... + if sys.version_info >= (3, 8): + def assert_called(self) -> None: ... + def assert_called_once(self) -> None: ... + else: + def assert_called(_mock_self) -> None: ... + def assert_called_once(_mock_self) -> None: ... + + def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: ... + def _extract_mock_name(self) -> str: ... + def _get_call_signature_from_name(self, name: str) -> Any: ... + def assert_any_call(self, *args: Any, **kwargs: Any) -> None: ... + def assert_has_calls(self, calls: Sequence[_Call], any_order: bool = False) -> None: ... + def mock_add_spec(self, spec: Any, spec_set: bool = False) -> None: ... + def _mock_add_spec(self, spec: Any, spec_set: bool, _spec_as_instance: bool = False, _eat_self: bool = False) -> None: ... + def attach_mock(self, mock: NonCallableMock, attribute: str) -> None: ... + def configure_mock(self, **kwargs: Any) -> None: ... + return_value: Any + side_effect: Any + called: bool + call_count: int + call_args: Any + call_args_list: _CallList + mock_calls: _CallList + def _format_mock_call_signature(self, args: Any, kwargs: Any) -> str: ... + def _call_matcher(self, _call: tuple[_Call, ...]) -> _Call: ... + def _get_child_mock(self, **kw: Any) -> NonCallableMock: ... + +class CallableMixin(Base): + side_effect: Any + def __init__( + self, + spec: Any | None = None, + side_effect: Any | None = None, + return_value: Any = ..., + wraps: Any | None = None, + name: Any | None = None, + spec_set: Any | None = None, + parent: Any | None = None, + _spec_state: Any | None = None, + _new_name: Any = "", + _new_parent: Any | None = None, + **kwargs: Any, + ) -> None: ... + if sys.version_info >= (3, 8): + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + else: + def __call__(_mock_self, *args: Any, **kwargs: Any) -> Any: ... + +class Mock(CallableMixin, NonCallableMock): ... + +class _patch(Generic[_T]): + attribute_name: Any + getter: Callable[[], Any] + attribute: str + new: _T + new_callable: Any + spec: Any + create: bool + has_local: Any + spec_set: Any + autospec: Any + kwargs: Mapping[str, Any] + additional_patchers: Any + # If new==DEFAULT, self is _patch[Any]. Ideally we'd be able to add an overload for it so that self is _patch[MagicMock], + # but that's impossible with the current type system. + if sys.version_info >= (3, 10): + def __init__( + self: _patch[_T], + getter: Callable[[], Any], + attribute: str, + new: _T, + spec: Any | None, + create: bool, + spec_set: Any | None, + autospec: Any | None, + new_callable: Any | None, + kwargs: Mapping[str, Any], + *, + unsafe: bool = False, + ) -> None: ... + else: + def __init__( + self: _patch[_T], + getter: Callable[[], Any], + attribute: str, + new: _T, + spec: Any | None, + create: bool, + spec_set: Any | None, + autospec: Any | None, + new_callable: Any | None, + kwargs: Mapping[str, Any], + ) -> None: ... + + def copy(self) -> _patch[_T]: ... + @overload + def __call__(self, func: _TT) -> _TT: ... + @overload + def __call__(self, func: Callable[..., _R]) -> Callable[..., _R]: ... + if sys.version_info >= (3, 8): + def decoration_helper( + self, patched: _patch[Any], args: Sequence[Any], keywargs: Any + ) -> _GeneratorContextManager[tuple[Sequence[Any], Any]]: ... + + def decorate_class(self, klass: _TT) -> _TT: ... + def decorate_callable(self, func: Callable[..., _R]) -> Callable[..., _R]: ... + if sys.version_info >= (3, 8): + def decorate_async_callable(self, func: Callable[..., Awaitable[_R]]) -> Callable[..., Awaitable[_R]]: ... + + def get_original(self) -> tuple[Any, bool]: ... + target: Any + temp_original: Any + is_local: bool + def __enter__(self) -> _T: ... + def __exit__( + self, __exc_type: type[BaseException] | None, __exc_value: BaseException | None, __traceback: TracebackType | None + ) -> None: ... + def start(self) -> _T: ... + def stop(self) -> None: ... + +class _patch_dict: + in_dict: Any + values: Any + clear: Any + def __init__(self, in_dict: Any, values: Any = ..., clear: Any = False, **kwargs: Any) -> None: ... + def __call__(self, f: Any) -> Any: ... + if sys.version_info >= (3, 10): + def decorate_callable(self, f: _F) -> _F: ... + def decorate_async_callable(self, f: _AF) -> _AF: ... + + def decorate_class(self, klass: Any) -> Any: ... + def __enter__(self) -> Any: ... + def __exit__(self, *args: object) -> Any: ... + start: Any + stop: Any + +if sys.version_info >= (3, 8): + _Mock: TypeAlias = MagicMock | AsyncMock +else: + _Mock: TypeAlias = MagicMock + +class _patcher: + TEST_PREFIX: str + dict: type[_patch_dict] + # This overload also covers the case, where new==DEFAULT. In this case, the return type is _patch[Any]. + # Ideally we'd be able to add an overload for it so that the return type is _patch[MagicMock], + # but that's impossible with the current type system. + @overload + def __call__( # type: ignore[misc] + self, + target: str, + new: _T, + spec: Any | None = ..., + create: bool = ..., + spec_set: Any | None = ..., + autospec: Any | None = ..., + new_callable: Any | None = ..., + **kwargs: Any, + ) -> _patch[_T]: ... + @overload + def __call__( + self, + target: str, + *, + spec: Any | None = ..., + create: bool = ..., + spec_set: Any | None = ..., + autospec: Any | None = ..., + new_callable: Any | None = ..., + **kwargs: Any, + ) -> _patch[_Mock]: ... + @overload + @staticmethod + def object( # type: ignore[misc] + target: Any, + attribute: str, + new: _T, + spec: Any | None = ..., + create: bool = ..., + spec_set: Any | None = ..., + autospec: Any | None = ..., + new_callable: Any | None = ..., + **kwargs: Any, + ) -> _patch[_T]: ... + @overload + @staticmethod + def object( + target: Any, + attribute: str, + *, + spec: Any | None = ..., + create: bool = ..., + spec_set: Any | None = ..., + autospec: Any | None = ..., + new_callable: Any | None = ..., + **kwargs: Any, + ) -> _patch[_Mock]: ... + @staticmethod + def multiple( + target: Any, + spec: Any | None = ..., + create: bool = ..., + spec_set: Any | None = ..., + autospec: Any | None = ..., + new_callable: Any | None = ..., + **kwargs: Any, + ) -> _patch[Any]: ... + @staticmethod + def stopall() -> None: ... + +patch: _patcher + +class MagicMixin: + def __init__(self, *args: Any, **kw: Any) -> None: ... + +class NonCallableMagicMock(MagicMixin, NonCallableMock): ... +class MagicMock(MagicMixin, Mock): ... + +if sys.version_info >= (3, 8): + class AsyncMockMixin(Base): + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + async def _execute_mock_call(self, *args: Any, **kwargs: Any) -> Any: ... + def assert_awaited(self) -> None: ... + def assert_awaited_once(self) -> None: ... + def assert_awaited_with(self, *args: Any, **kwargs: Any) -> None: ... + def assert_awaited_once_with(self, *args: Any, **kwargs: Any) -> None: ... + def assert_any_await(self, *args: Any, **kwargs: Any) -> None: ... + def assert_has_awaits(self, calls: Iterable[_Call], any_order: bool = False) -> None: ... + def assert_not_awaited(self) -> None: ... + def reset_mock(self, *args: Any, **kwargs: Any) -> None: ... + await_count: int + await_args: _Call | None + await_args_list: _CallList + + class AsyncMagicMixin(MagicMixin): + def __init__(self, *args: Any, **kw: Any) -> None: ... + + class AsyncMock(AsyncMockMixin, AsyncMagicMixin, Mock): ... + +class MagicProxy: + name: str + parent: Any + def __init__(self, name: str, parent: Any) -> None: ... + if sys.version_info < (3, 8): + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + + def create_mock(self) -> Any: ... + def __get__(self, obj: Any, _type: Any | None = None) -> Any: ... + +class _ANY: + def __eq__(self, other: object) -> Literal[True]: ... + def __ne__(self, other: object) -> Literal[False]: ... + +ANY: Any + +if sys.version_info >= (3, 10): + def create_autospec( + spec: Any, + spec_set: Any = False, + instance: Any = False, + _parent: Any | None = None, + _name: Any | None = None, + *, + unsafe: bool = False, + **kwargs: Any, + ) -> Any: ... + +else: + def create_autospec( + spec: Any, + spec_set: Any = False, + instance: Any = False, + _parent: Any | None = None, + _name: Any | None = None, + **kwargs: Any, + ) -> Any: ... + +class _SpecState: + spec: Any + ids: Any + spec_set: Any + parent: Any + instance: Any + name: Any + def __init__( + self, + spec: Any, + spec_set: Any = False, + parent: Any | None = None, + name: Any | None = None, + ids: Any | None = None, + instance: Any = False, + ) -> None: ... + +def mock_open(mock: Any | None = None, read_data: Any = "") -> Any: ... + +class PropertyMock(Mock): + if sys.version_info >= (3, 8): + def __get__(self, obj: _T, obj_type: type[_T] | None = None) -> Self: ... + else: + def __get__(self, obj: _T, obj_type: type[_T] | None) -> Self: ... + + def __set__(self, obj: Any, value: Any) -> None: ... + +def seal(mock: Any) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/result.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/result.pyi new file mode 100644 index 00000000..8d78bc0f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/result.pyi @@ -0,0 +1,39 @@ +import unittest.case +from _typeshed import OptExcInfo +from collections.abc import Callable +from typing import Any, TextIO, TypeVar + +_F = TypeVar("_F", bound=Callable[..., Any]) + +STDOUT_LINE: str +STDERR_LINE: str + +# undocumented +def failfast(method: _F) -> _F: ... + +class TestResult: + errors: list[tuple[unittest.case.TestCase, str]] + failures: list[tuple[unittest.case.TestCase, str]] + skipped: list[tuple[unittest.case.TestCase, str]] + expectedFailures: list[tuple[unittest.case.TestCase, str]] + unexpectedSuccesses: list[unittest.case.TestCase] + shouldStop: bool + testsRun: int + buffer: bool + failfast: bool + tb_locals: bool + def __init__(self, stream: TextIO | None = None, descriptions: bool | None = None, verbosity: int | None = None) -> None: ... + def printErrors(self) -> None: ... + def wasSuccessful(self) -> bool: ... + def stop(self) -> None: ... + def startTest(self, test: unittest.case.TestCase) -> None: ... + def stopTest(self, test: unittest.case.TestCase) -> None: ... + def startTestRun(self) -> None: ... + def stopTestRun(self) -> None: ... + def addError(self, test: unittest.case.TestCase, err: OptExcInfo) -> None: ... + def addFailure(self, test: unittest.case.TestCase, err: OptExcInfo) -> None: ... + def addSuccess(self, test: unittest.case.TestCase) -> None: ... + def addSkip(self, test: unittest.case.TestCase, reason: str) -> None: ... + def addExpectedFailure(self, test: unittest.case.TestCase, err: OptExcInfo) -> None: ... + def addUnexpectedSuccess(self, test: unittest.case.TestCase) -> None: ... + def addSubTest(self, test: unittest.case.TestCase, subtest: unittest.case.TestCase, err: OptExcInfo | None) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/runner.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/runner.pyi new file mode 100644 index 00000000..c0ddcdb4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/runner.pyi @@ -0,0 +1,36 @@ +import unittest.case +import unittest.result +import unittest.suite +from collections.abc import Callable, Iterable +from typing import TextIO +from typing_extensions import TypeAlias + +_ResultClassType: TypeAlias = Callable[[TextIO, bool, int], unittest.result.TestResult] + +class TextTestResult(unittest.result.TestResult): + descriptions: bool # undocumented + dots: bool # undocumented + separator1: str + separator2: str + showAll: bool # undocumented + stream: TextIO # undocumented + def __init__(self, stream: TextIO, descriptions: bool, verbosity: int) -> None: ... + def getDescription(self, test: unittest.case.TestCase) -> str: ... + def printErrorList(self, flavour: str, errors: Iterable[tuple[unittest.case.TestCase, str]]) -> None: ... + +class TextTestRunner: + resultclass: _ResultClassType + def __init__( + self, + stream: TextIO | None = None, + descriptions: bool = True, + verbosity: int = 1, + failfast: bool = False, + buffer: bool = False, + resultclass: _ResultClassType | None = None, + warnings: type[Warning] | None = None, + *, + tb_locals: bool = False, + ) -> None: ... + def _makeResult(self) -> unittest.result.TestResult: ... + def run(self, test: unittest.suite.TestSuite | unittest.case.TestCase) -> unittest.result.TestResult: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/signals.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/signals.pyi new file mode 100644 index 00000000..a60133ad --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/signals.pyi @@ -0,0 +1,15 @@ +import unittest.result +from collections.abc import Callable +from typing import TypeVar, overload +from typing_extensions import ParamSpec + +_P = ParamSpec("_P") +_T = TypeVar("_T") + +def installHandler() -> None: ... +def registerResult(result: unittest.result.TestResult) -> None: ... +def removeResult(result: unittest.result.TestResult) -> bool: ... +@overload +def removeHandler(method: None = None) -> None: ... +@overload +def removeHandler(method: Callable[_P, _T]) -> Callable[_P, _T]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/suite.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/suite.pyi new file mode 100644 index 00000000..f6b8ef00 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/suite.pyi @@ -0,0 +1,22 @@ +import unittest.case +import unittest.result +from collections.abc import Iterable, Iterator +from typing_extensions import TypeAlias + +_TestType: TypeAlias = unittest.case.TestCase | TestSuite + +class BaseTestSuite(Iterable[_TestType]): + _tests: list[unittest.case.TestCase] + _removed_tests: int + def __init__(self, tests: Iterable[_TestType] = ...) -> None: ... + def __call__(self, result: unittest.result.TestResult) -> unittest.result.TestResult: ... + def addTest(self, test: _TestType) -> None: ... + def addTests(self, tests: Iterable[_TestType]) -> None: ... + def run(self, result: unittest.result.TestResult) -> unittest.result.TestResult: ... + def debug(self) -> None: ... + def countTestCases(self) -> int: ... + def __iter__(self) -> Iterator[_TestType]: ... + def __eq__(self, other: object) -> bool: ... + +class TestSuite(BaseTestSuite): + def run(self, result: unittest.result.TestResult, debug: bool = False) -> unittest.result.TestResult: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/util.pyi new file mode 100644 index 00000000..845accfe --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/unittest/util.pyi @@ -0,0 +1,23 @@ +from collections.abc import Sequence +from typing import Any, TypeVar +from typing_extensions import TypeAlias + +_T = TypeVar("_T") +_Mismatch: TypeAlias = tuple[_T, _T, int] + +_MAX_LENGTH: int +_PLACEHOLDER_LEN: int +_MIN_BEGIN_LEN: int +_MIN_END_LEN: int +_MIN_COMMON_LEN: int +_MIN_DIFF_LEN: int + +def _shorten(s: str, prefixlen: int, suffixlen: int) -> str: ... +def _common_shorten_repr(*args: str) -> tuple[str, ...]: ... +def safe_repr(obj: object, short: bool = False) -> str: ... +def strclass(cls: type) -> str: ... +def sorted_list_difference(expected: Sequence[_T], actual: Sequence[_T]) -> tuple[list[_T], list[_T]]: ... +def unorderable_list_difference(expected: Sequence[_T], actual: Sequence[_T]) -> tuple[list[_T], list[_T]]: ... +def three_way_cmp(x: Any, y: Any) -> int: ... +def _count_diff_all_purpose(actual: Sequence[_T], expected: Sequence[_T]) -> list[_Mismatch[_T]]: ... +def _count_diff_hashable(actual: Sequence[_T], expected: Sequence[_T]) -> list[_Mismatch[_T]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/urllib/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/urllib/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/urllib/error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/urllib/error.pyi new file mode 100644 index 00000000..89cec9bf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/urllib/error.pyi @@ -0,0 +1,23 @@ +from email.message import Message +from typing import IO +from urllib.response import addinfourl + +__all__ = ["URLError", "HTTPError", "ContentTooShortError"] + +class URLError(OSError): + reason: str | BaseException + def __init__(self, reason: str | BaseException, filename: str | None = None) -> None: ... + +class HTTPError(URLError, addinfourl): + @property + def headers(self) -> Message: ... + @headers.setter + def headers(self, headers: Message) -> None: ... + @property + def reason(self) -> str: ... # type: ignore[override] + code: int + def __init__(self, url: str, code: int, msg: str, hdrs: Message, fp: IO[bytes] | None) -> None: ... + +class ContentTooShortError(URLError): + content: tuple[str, Message] + def __init__(self, message: str, content: tuple[str, Message]) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/urllib/parse.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/urllib/parse.pyi new file mode 100644 index 00000000..8e179ca7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/urllib/parse.pyi @@ -0,0 +1,209 @@ +import sys +from collections.abc import Callable, Iterable, Mapping, Sequence +from typing import Any, AnyStr, Generic, NamedTuple, TypeVar, overload +from typing_extensions import Literal, TypeAlias + +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = [ + "urlparse", + "urlunparse", + "urljoin", + "urldefrag", + "urlsplit", + "urlunsplit", + "urlencode", + "parse_qs", + "parse_qsl", + "quote", + "quote_plus", + "quote_from_bytes", + "unquote", + "unquote_plus", + "unquote_to_bytes", + "DefragResult", + "ParseResult", + "SplitResult", + "DefragResultBytes", + "ParseResultBytes", + "SplitResultBytes", +] + +uses_relative: list[str] +uses_netloc: list[str] +uses_params: list[str] +non_hierarchical: list[str] +uses_query: list[str] +uses_fragment: list[str] +scheme_chars: str +if sys.version_info < (3, 11): + MAX_CACHE_SIZE: int + +class _ResultMixinStr: + def encode(self, encoding: str = "ascii", errors: str = "strict") -> _ResultMixinBytes: ... + +class _ResultMixinBytes: + def decode(self, encoding: str = "ascii", errors: str = "strict") -> _ResultMixinStr: ... + +class _NetlocResultMixinBase(Generic[AnyStr]): + @property + def username(self) -> AnyStr | None: ... + @property + def password(self) -> AnyStr | None: ... + @property + def hostname(self) -> AnyStr | None: ... + @property + def port(self) -> int | None: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +class _NetlocResultMixinStr(_NetlocResultMixinBase[str], _ResultMixinStr): ... +class _NetlocResultMixinBytes(_NetlocResultMixinBase[bytes], _ResultMixinBytes): ... + +class _DefragResultBase(NamedTuple, Generic[AnyStr]): + url: AnyStr + fragment: AnyStr + +class _SplitResultBase(NamedTuple, Generic[AnyStr]): + scheme: AnyStr + netloc: AnyStr + path: AnyStr + query: AnyStr + fragment: AnyStr + +class _ParseResultBase(NamedTuple, Generic[AnyStr]): + scheme: AnyStr + netloc: AnyStr + path: AnyStr + params: AnyStr + query: AnyStr + fragment: AnyStr + +# Structured result objects for string data +class DefragResult(_DefragResultBase[str], _ResultMixinStr): + def geturl(self) -> str: ... + +class SplitResult(_SplitResultBase[str], _NetlocResultMixinStr): + def geturl(self) -> str: ... + +class ParseResult(_ParseResultBase[str], _NetlocResultMixinStr): + def geturl(self) -> str: ... + +# Structured result objects for bytes data +class DefragResultBytes(_DefragResultBase[bytes], _ResultMixinBytes): + def geturl(self) -> bytes: ... + +class SplitResultBytes(_SplitResultBase[bytes], _NetlocResultMixinBytes): + def geturl(self) -> bytes: ... + +class ParseResultBytes(_ParseResultBase[bytes], _NetlocResultMixinBytes): + def geturl(self) -> bytes: ... + +def parse_qs( + qs: AnyStr | None, + keep_blank_values: bool = False, + strict_parsing: bool = False, + encoding: str = "utf-8", + errors: str = "replace", + max_num_fields: int | None = None, + separator: str = "&", +) -> dict[AnyStr, list[AnyStr]]: ... +def parse_qsl( + qs: AnyStr | None, + keep_blank_values: bool = False, + strict_parsing: bool = False, + encoding: str = "utf-8", + errors: str = "replace", + max_num_fields: int | None = None, + separator: str = "&", +) -> list[tuple[AnyStr, AnyStr]]: ... +@overload +def quote(string: str, safe: str | Iterable[int] = "/", encoding: str | None = None, errors: str | None = None) -> str: ... +@overload +def quote(string: bytes | bytearray, safe: str | Iterable[int] = "/") -> str: ... +def quote_from_bytes(bs: bytes | bytearray, safe: str | Iterable[int] = "/") -> str: ... +@overload +def quote_plus(string: str, safe: str | Iterable[int] = "", encoding: str | None = None, errors: str | None = None) -> str: ... +@overload +def quote_plus(string: bytes | bytearray, safe: str | Iterable[int] = "") -> str: ... + +if sys.version_info >= (3, 9): + def unquote(string: str | bytes, encoding: str = "utf-8", errors: str = "replace") -> str: ... + +else: + def unquote(string: str, encoding: str = "utf-8", errors: str = "replace") -> str: ... + +def unquote_to_bytes(string: str | bytes | bytearray) -> bytes: ... +def unquote_plus(string: str, encoding: str = "utf-8", errors: str = "replace") -> str: ... +@overload +def urldefrag(url: str) -> DefragResult: ... +@overload +def urldefrag(url: bytes | bytearray | None) -> DefragResultBytes: ... + +_Q = TypeVar("_Q", bound=str | Iterable[int]) +_QueryType: TypeAlias = ( + Mapping[Any, Any] | Mapping[Any, Sequence[Any]] | Sequence[tuple[Any, Any]] | Sequence[tuple[Any, Sequence[Any]]] +) + +@overload +def urlencode( + query: _QueryType, + doseq: bool = False, + safe: str = "", + encoding: str | None = None, + errors: str | None = None, + quote_via: Callable[[AnyStr, str, str, str], str] = ..., +) -> str: ... +@overload +def urlencode( + query: _QueryType, + doseq: bool, + safe: _Q, + encoding: str | None = None, + errors: str | None = None, + quote_via: Callable[[AnyStr, _Q, str, str], str] = ..., +) -> str: ... +@overload +def urlencode( + query: _QueryType, + doseq: bool = False, + *, + safe: _Q, + encoding: str | None = None, + errors: str | None = None, + quote_via: Callable[[AnyStr, _Q, str, str], str] = ..., +) -> str: ... +def urljoin(base: AnyStr, url: AnyStr | None, allow_fragments: bool = True) -> AnyStr: ... +@overload +def urlparse(url: str, scheme: str = "", allow_fragments: bool = True) -> ParseResult: ... +@overload +def urlparse( + url: bytes | bytearray | None, scheme: bytes | bytearray | None | Literal[""] = "", allow_fragments: bool = True +) -> ParseResultBytes: ... +@overload +def urlsplit(url: str, scheme: str = "", allow_fragments: bool = True) -> SplitResult: ... + +if sys.version_info >= (3, 11): + @overload + def urlsplit( + url: bytes | None, scheme: bytes | None | Literal[""] = "", allow_fragments: bool = True + ) -> SplitResultBytes: ... + +else: + @overload + def urlsplit( + url: bytes | bytearray | None, scheme: bytes | bytearray | None | Literal[""] = "", allow_fragments: bool = True + ) -> SplitResultBytes: ... + +@overload +def urlunparse( + components: tuple[AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None] +) -> AnyStr: ... +@overload +def urlunparse(components: Sequence[AnyStr | None]) -> AnyStr: ... +@overload +def urlunsplit(components: tuple[AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None]) -> AnyStr: ... +@overload +def urlunsplit(components: Sequence[AnyStr | None]) -> AnyStr: ... +def unwrap(url: str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/urllib/request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/urllib/request.pyi new file mode 100644 index 00000000..09ce2796 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/urllib/request.pyi @@ -0,0 +1,387 @@ +import ssl +import sys +from _typeshed import ReadableBuffer, StrOrBytesPath, SupportsRead +from collections.abc import Callable, Iterable, Mapping, MutableMapping, Sequence +from email.message import Message +from http.client import HTTPConnection, HTTPMessage, HTTPResponse +from http.cookiejar import CookieJar +from re import Pattern +from typing import IO, Any, ClassVar, NoReturn, Protocol, TypeVar, overload +from typing_extensions import TypeAlias +from urllib.error import HTTPError as HTTPError +from urllib.response import addclosehook, addinfourl + +__all__ = [ + "Request", + "OpenerDirector", + "BaseHandler", + "HTTPDefaultErrorHandler", + "HTTPRedirectHandler", + "HTTPCookieProcessor", + "ProxyHandler", + "HTTPPasswordMgr", + "HTTPPasswordMgrWithDefaultRealm", + "HTTPPasswordMgrWithPriorAuth", + "AbstractBasicAuthHandler", + "HTTPBasicAuthHandler", + "ProxyBasicAuthHandler", + "AbstractDigestAuthHandler", + "HTTPDigestAuthHandler", + "ProxyDigestAuthHandler", + "HTTPHandler", + "FileHandler", + "FTPHandler", + "CacheFTPHandler", + "DataHandler", + "UnknownHandler", + "HTTPErrorProcessor", + "urlopen", + "install_opener", + "build_opener", + "pathname2url", + "url2pathname", + "getproxies", + "urlretrieve", + "urlcleanup", + "URLopener", + "FancyURLopener", + "HTTPSHandler", +] + +_T = TypeVar("_T") +_UrlopenRet: TypeAlias = Any +_DataType: TypeAlias = ReadableBuffer | SupportsRead[bytes] | Iterable[bytes] | None + +def urlopen( + url: str | Request, + data: _DataType | None = None, + timeout: float | None = ..., + *, + cafile: str | None = None, + capath: str | None = None, + cadefault: bool = False, + context: ssl.SSLContext | None = None, +) -> _UrlopenRet: ... +def install_opener(opener: OpenerDirector) -> None: ... +def build_opener(*handlers: BaseHandler | Callable[[], BaseHandler]) -> OpenerDirector: ... + +if sys.platform == "win32": + from nturl2path import pathname2url as pathname2url, url2pathname as url2pathname +else: + def url2pathname(pathname: str) -> str: ... + def pathname2url(pathname: str) -> str: ... + +def getproxies() -> dict[str, str]: ... +def parse_http_list(s: str) -> list[str]: ... +def parse_keqv_list(l: list[str]) -> dict[str, str]: ... + +if sys.platform == "win32" or sys.platform == "darwin": + def proxy_bypass(host: str) -> Any: ... # undocumented + +else: + def proxy_bypass(host: str, proxies: Mapping[str, str] | None = None) -> Any: ... # undocumented + +class Request: + @property + def full_url(self) -> str: ... + @full_url.setter + def full_url(self, value: str) -> None: ... + @full_url.deleter + def full_url(self) -> None: ... + type: str + host: str + origin_req_host: str + selector: str + data: _DataType + headers: MutableMapping[str, str] + unredirected_hdrs: dict[str, str] + unverifiable: bool + method: str | None + timeout: float | None # Undocumented, only set after __init__() by OpenerDirector.open() + def __init__( + self, + url: str, + data: _DataType = None, + headers: MutableMapping[str, str] = ..., + origin_req_host: str | None = None, + unverifiable: bool = False, + method: str | None = None, + ) -> None: ... + def get_method(self) -> str: ... + def add_header(self, key: str, val: str) -> None: ... + def add_unredirected_header(self, key: str, val: str) -> None: ... + def has_header(self, header_name: str) -> bool: ... + def remove_header(self, header_name: str) -> None: ... + def get_full_url(self) -> str: ... + def set_proxy(self, host: str, type: str) -> None: ... + @overload + def get_header(self, header_name: str) -> str | None: ... + @overload + def get_header(self, header_name: str, default: _T) -> str | _T: ... + def header_items(self) -> list[tuple[str, str]]: ... + def has_proxy(self) -> bool: ... + +class OpenerDirector: + addheaders: list[tuple[str, str]] + def add_handler(self, handler: BaseHandler) -> None: ... + def open(self, fullurl: str | Request, data: _DataType = None, timeout: float | None = ...) -> _UrlopenRet: ... + def error(self, proto: str, *args: Any) -> _UrlopenRet: ... + def close(self) -> None: ... + +class BaseHandler: + handler_order: ClassVar[int] + parent: OpenerDirector + def add_parent(self, parent: OpenerDirector) -> None: ... + def close(self) -> None: ... + def __lt__(self, other: object) -> bool: ... + +class HTTPDefaultErrorHandler(BaseHandler): + def http_error_default( + self, req: Request, fp: IO[bytes], code: int, msg: str, hdrs: HTTPMessage + ) -> HTTPError: ... # undocumented + +class HTTPRedirectHandler(BaseHandler): + max_redirections: ClassVar[int] # undocumented + max_repeats: ClassVar[int] # undocumented + inf_msg: ClassVar[str] # undocumented + def redirect_request( + self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage, newurl: str + ) -> Request | None: ... + def http_error_301(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... + def http_error_302(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... + def http_error_303(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... + def http_error_307(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... + if sys.version_info >= (3, 11): + def http_error_308( + self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage + ) -> _UrlopenRet | None: ... + +class HTTPCookieProcessor(BaseHandler): + cookiejar: CookieJar + def __init__(self, cookiejar: CookieJar | None = None) -> None: ... + def http_request(self, request: Request) -> Request: ... # undocumented + def http_response(self, request: Request, response: HTTPResponse) -> HTTPResponse: ... # undocumented + def https_request(self, request: Request) -> Request: ... # undocumented + def https_response(self, request: Request, response: HTTPResponse) -> HTTPResponse: ... # undocumented + +class ProxyHandler(BaseHandler): + def __init__(self, proxies: dict[str, str] | None = None) -> None: ... + def proxy_open(self, req: Request, proxy: str, type: str) -> _UrlopenRet | None: ... # undocumented + # TODO add a method for every (common) proxy protocol + +class HTTPPasswordMgr: + def add_password(self, realm: str, uri: str | Sequence[str], user: str, passwd: str) -> None: ... + def find_user_password(self, realm: str, authuri: str) -> tuple[str | None, str | None]: ... + def is_suburi(self, base: str, test: str) -> bool: ... # undocumented + def reduce_uri(self, uri: str, default_port: bool = True) -> str: ... # undocumented + +class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr): + def add_password(self, realm: str | None, uri: str | Sequence[str], user: str, passwd: str) -> None: ... + def find_user_password(self, realm: str | None, authuri: str) -> tuple[str | None, str | None]: ... + +class HTTPPasswordMgrWithPriorAuth(HTTPPasswordMgrWithDefaultRealm): + def add_password( + self, realm: str | None, uri: str | Sequence[str], user: str, passwd: str, is_authenticated: bool = False + ) -> None: ... + def update_authenticated(self, uri: str | Sequence[str], is_authenticated: bool = False) -> None: ... + def is_authenticated(self, authuri: str) -> bool: ... + +class AbstractBasicAuthHandler: + rx: ClassVar[Pattern[str]] # undocumented + passwd: HTTPPasswordMgr + add_password: Callable[[str, str | Sequence[str], str, str], None] + def __init__(self, password_mgr: HTTPPasswordMgr | None = None) -> None: ... + def http_error_auth_reqed(self, authreq: str, host: str, req: Request, headers: HTTPMessage) -> None: ... + def http_request(self, req: Request) -> Request: ... # undocumented + def http_response(self, req: Request, response: HTTPResponse) -> HTTPResponse: ... # undocumented + def https_request(self, req: Request) -> Request: ... # undocumented + def https_response(self, req: Request, response: HTTPResponse) -> HTTPResponse: ... # undocumented + def retry_http_basic_auth(self, host: str, req: Request, realm: str) -> _UrlopenRet | None: ... # undocumented + +class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): + auth_header: ClassVar[str] # undocumented + def http_error_401(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... + +class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): + auth_header: ClassVar[str] + def http_error_407(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... + +class AbstractDigestAuthHandler: + def __init__(self, passwd: HTTPPasswordMgr | None = None) -> None: ... + def reset_retry_count(self) -> None: ... + def http_error_auth_reqed(self, auth_header: str, host: str, req: Request, headers: HTTPMessage) -> None: ... + def retry_http_digest_auth(self, req: Request, auth: str) -> _UrlopenRet | None: ... + def get_cnonce(self, nonce: str) -> str: ... + def get_authorization(self, req: Request, chal: Mapping[str, str]) -> str: ... + def get_algorithm_impls(self, algorithm: str) -> tuple[Callable[[str], str], Callable[[str, str], str]]: ... + def get_entity_digest(self, data: ReadableBuffer | None, chal: Mapping[str, str]) -> str | None: ... + +class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): + auth_header: ClassVar[str] # undocumented + def http_error_401(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... + +class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): + auth_header: ClassVar[str] # undocumented + def http_error_407(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... + +class _HTTPConnectionProtocol(Protocol): + def __call__( + self, + host: str, + port: int | None = ..., + timeout: float = ..., + source_address: tuple[str, int] | None = ..., + blocksize: int = ..., + ) -> HTTPConnection: ... + +class AbstractHTTPHandler(BaseHandler): # undocumented + def __init__(self, debuglevel: int = 0) -> None: ... + def set_http_debuglevel(self, level: int) -> None: ... + def do_request_(self, request: Request) -> Request: ... + def do_open(self, http_class: _HTTPConnectionProtocol, req: Request, **http_conn_args: Any) -> HTTPResponse: ... + +class HTTPHandler(AbstractHTTPHandler): + def http_open(self, req: Request) -> HTTPResponse: ... + def http_request(self, request: Request) -> Request: ... # undocumented + +class HTTPSHandler(AbstractHTTPHandler): + def __init__( + self, debuglevel: int = 0, context: ssl.SSLContext | None = None, check_hostname: bool | None = None + ) -> None: ... + def https_open(self, req: Request) -> HTTPResponse: ... + def https_request(self, request: Request) -> Request: ... # undocumented + +class FileHandler(BaseHandler): + names: ClassVar[tuple[str, ...] | None] # undocumented + def file_open(self, req: Request) -> addinfourl: ... + def get_names(self) -> tuple[str, ...]: ... # undocumented + def open_local_file(self, req: Request) -> addinfourl: ... # undocumented + +class DataHandler(BaseHandler): + def data_open(self, req: Request) -> addinfourl: ... + +class ftpwrapper: # undocumented + def __init__( + self, user: str, passwd: str, host: str, port: int, dirs: str, timeout: float | None = None, persistent: bool = True + ) -> None: ... + def close(self) -> None: ... + def endtransfer(self) -> None: ... + def file_close(self) -> None: ... + def init(self) -> None: ... + def real_close(self) -> None: ... + def retrfile(self, file: str, type: str) -> tuple[addclosehook, int]: ... + +class FTPHandler(BaseHandler): + def ftp_open(self, req: Request) -> addinfourl: ... + def connect_ftp( + self, user: str, passwd: str, host: str, port: int, dirs: str, timeout: float + ) -> ftpwrapper: ... # undocumented + +class CacheFTPHandler(FTPHandler): + def setTimeout(self, t: float) -> None: ... + def setMaxConns(self, m: int) -> None: ... + def check_cache(self) -> None: ... # undocumented + def clear_cache(self) -> None: ... # undocumented + +class UnknownHandler(BaseHandler): + def unknown_open(self, req: Request) -> NoReturn: ... + +class HTTPErrorProcessor(BaseHandler): + def http_response(self, request: Request, response: HTTPResponse) -> _UrlopenRet: ... + def https_response(self, request: Request, response: HTTPResponse) -> _UrlopenRet: ... + +def urlretrieve( + url: str, + filename: StrOrBytesPath | None = None, + reporthook: Callable[[int, int, int], object] | None = None, + data: _DataType = None, +) -> tuple[str, HTTPMessage]: ... +def urlcleanup() -> None: ... + +class URLopener: + version: ClassVar[str] + def __init__(self, proxies: dict[str, str] | None = None, **x509: str) -> None: ... + def open(self, fullurl: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... + def open_unknown(self, fullurl: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... + def retrieve( + self, + url: str, + filename: str | None = None, + reporthook: Callable[[int, int, int], object] | None = None, + data: ReadableBuffer | None = None, + ) -> tuple[str, Message | None]: ... + def addheader(self, *args: tuple[str, str]) -> None: ... # undocumented + def cleanup(self) -> None: ... # undocumented + def close(self) -> None: ... # undocumented + def http_error( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = None + ) -> _UrlopenRet: ... # undocumented + def http_error_default( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage + ) -> _UrlopenRet: ... # undocumented + def open_data(self, url: str, data: ReadableBuffer | None = None) -> addinfourl: ... # undocumented + def open_file(self, url: str) -> addinfourl: ... # undocumented + def open_ftp(self, url: str) -> addinfourl: ... # undocumented + def open_http(self, url: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... # undocumented + def open_https(self, url: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... # undocumented + def open_local_file(self, url: str) -> addinfourl: ... # undocumented + def open_unknown_proxy(self, proxy: str, fullurl: str, data: ReadableBuffer | None = None) -> None: ... # undocumented + +class FancyURLopener(URLopener): + def prompt_user_passwd(self, host: str, realm: str) -> tuple[str, str]: ... + def get_user_passwd(self, host: str, realm: str, clear_cache: int = 0) -> tuple[str, str]: ... # undocumented + def http_error_301( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None + ) -> _UrlopenRet | addinfourl | None: ... # undocumented + def http_error_302( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None + ) -> _UrlopenRet | addinfourl | None: ... # undocumented + def http_error_303( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None + ) -> _UrlopenRet | addinfourl | None: ... # undocumented + def http_error_307( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None + ) -> _UrlopenRet | addinfourl | None: ... # undocumented + if sys.version_info >= (3, 11): + def http_error_308( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None + ) -> _UrlopenRet | addinfourl | None: ... # undocumented + + def http_error_401( + self, + url: str, + fp: IO[bytes], + errcode: int, + errmsg: str, + headers: HTTPMessage, + data: ReadableBuffer | None = None, + retry: bool = False, + ) -> _UrlopenRet | None: ... # undocumented + def http_error_407( + self, + url: str, + fp: IO[bytes], + errcode: int, + errmsg: str, + headers: HTTPMessage, + data: ReadableBuffer | None = None, + retry: bool = False, + ) -> _UrlopenRet | None: ... # undocumented + def http_error_default( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage + ) -> addinfourl: ... # undocumented + def redirect_internal( + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None + ) -> _UrlopenRet | None: ... # undocumented + def retry_http_basic_auth( + self, url: str, realm: str, data: ReadableBuffer | None = None + ) -> _UrlopenRet | None: ... # undocumented + def retry_https_basic_auth( + self, url: str, realm: str, data: ReadableBuffer | None = None + ) -> _UrlopenRet | None: ... # undocumented + def retry_proxy_http_basic_auth( + self, url: str, realm: str, data: ReadableBuffer | None = None + ) -> _UrlopenRet | None: ... # undocumented + def retry_proxy_https_basic_auth( + self, url: str, realm: str, data: ReadableBuffer | None = None + ) -> _UrlopenRet | None: ... # undocumented diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/urllib/response.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/urllib/response.pyi new file mode 100644 index 00000000..61ba6870 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/urllib/response.pyi @@ -0,0 +1,59 @@ +import sys +from _typeshed import ReadableBuffer +from collections.abc import Callable, Iterable +from email.message import Message +from types import TracebackType +from typing import IO, Any, BinaryIO +from typing_extensions import Self + +__all__ = ["addbase", "addclosehook", "addinfo", "addinfourl"] + +class addbase(BinaryIO): + fp: IO[bytes] + def __init__(self, fp: IO[bytes]) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __iter__(self) -> Self: ... + def __next__(self) -> bytes: ... + def close(self) -> None: ... + # These methods don't actually exist, but the class inherits at runtime from + # tempfile._TemporaryFileWrapper, which uses __getattr__ to delegate to the + # underlying file object. To satisfy the BinaryIO interface, we pretend that this + # class has these additional methods. + def fileno(self) -> int: ... + def flush(self) -> None: ... + def isatty(self) -> bool: ... + def read(self, n: int = ...) -> bytes: ... + def readable(self) -> bool: ... + def readline(self, limit: int = ...) -> bytes: ... + def readlines(self, hint: int = ...) -> list[bytes]: ... + def seek(self, offset: int, whence: int = ...) -> int: ... + def seekable(self) -> bool: ... + def tell(self) -> int: ... + def truncate(self, size: int | None = ...) -> int: ... + def writable(self) -> bool: ... + def write(self, s: ReadableBuffer) -> int: ... + def writelines(self, lines: Iterable[ReadableBuffer]) -> None: ... + +class addclosehook(addbase): + closehook: Callable[..., object] + hookargs: tuple[Any, ...] + def __init__(self, fp: IO[bytes], closehook: Callable[..., object], *hookargs: Any) -> None: ... + +class addinfo(addbase): + headers: Message + def __init__(self, fp: IO[bytes], headers: Message) -> None: ... + def info(self) -> Message: ... + +class addinfourl(addinfo): + url: str + code: int | None + if sys.version_info >= (3, 9): + @property + def status(self) -> int | None: ... + + def __init__(self, fp: IO[bytes], headers: Message, url: str, code: int | None = None) -> None: ... + def geturl(self) -> str: ... + def getcode(self) -> int | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/urllib/robotparser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/urllib/robotparser.pyi new file mode 100644 index 00000000..d218c3dc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/urllib/robotparser.pyi @@ -0,0 +1,22 @@ +import sys +from collections.abc import Iterable +from typing import NamedTuple + +__all__ = ["RobotFileParser"] + +class RequestRate(NamedTuple): + requests: int + seconds: int + +class RobotFileParser: + def __init__(self, url: str = "") -> None: ... + def set_url(self, url: str) -> None: ... + def read(self) -> None: ... + def parse(self, lines: Iterable[str]) -> None: ... + def can_fetch(self, useragent: str, url: str) -> bool: ... + def mtime(self) -> int: ... + def modified(self) -> None: ... + def crawl_delay(self, useragent: str) -> str | None: ... + def request_rate(self, useragent: str) -> RequestRate | None: ... + if sys.version_info >= (3, 8): + def site_maps(self) -> list[str] | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/uu.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/uu.pyi new file mode 100644 index 00000000..324053e0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/uu.pyi @@ -0,0 +1,13 @@ +from typing import BinaryIO +from typing_extensions import TypeAlias + +__all__ = ["Error", "encode", "decode"] + +_File: TypeAlias = str | BinaryIO + +class Error(Exception): ... + +def encode( + in_file: _File, out_file: _File, name: str | None = None, mode: int | None = None, *, backtick: bool = False +) -> None: ... +def decode(in_file: _File, out_file: _File | None = None, mode: int | None = None, quiet: bool = False) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/uuid.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/uuid.pyi new file mode 100644 index 00000000..24925778 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/uuid.pyi @@ -0,0 +1,87 @@ +import sys +from _typeshed import Unused +from enum import Enum +from typing_extensions import TypeAlias + +# Because UUID has properties called int and bytes we need to rename these temporarily. +_Int: TypeAlias = int +_Bytes: TypeAlias = bytes +_FieldsType: TypeAlias = tuple[int, int, int, int, int, int] + +class SafeUUID(Enum): + safe: int + unsafe: int + unknown: None + +class UUID: + def __init__( + self, + hex: str | None = None, + bytes: _Bytes | None = None, + bytes_le: _Bytes | None = None, + fields: _FieldsType | None = None, + int: _Int | None = None, + version: _Int | None = None, + *, + is_safe: SafeUUID = ..., + ) -> None: ... + @property + def is_safe(self) -> SafeUUID: ... + @property + def bytes(self) -> _Bytes: ... + @property + def bytes_le(self) -> _Bytes: ... + @property + def clock_seq(self) -> _Int: ... + @property + def clock_seq_hi_variant(self) -> _Int: ... + @property + def clock_seq_low(self) -> _Int: ... + @property + def fields(self) -> _FieldsType: ... + @property + def hex(self) -> str: ... + @property + def int(self) -> _Int: ... + @property + def node(self) -> _Int: ... + @property + def time(self) -> _Int: ... + @property + def time_hi_version(self) -> _Int: ... + @property + def time_low(self) -> _Int: ... + @property + def time_mid(self) -> _Int: ... + @property + def urn(self) -> str: ... + @property + def variant(self) -> str: ... + @property + def version(self) -> _Int | None: ... + def __int__(self) -> _Int: ... + def __eq__(self, other: object) -> bool: ... + def __lt__(self, other: UUID) -> bool: ... + def __le__(self, other: UUID) -> bool: ... + def __gt__(self, other: UUID) -> bool: ... + def __ge__(self, other: UUID) -> bool: ... + +if sys.version_info >= (3, 9): + def getnode() -> int: ... + +else: + def getnode(*, getters: Unused = None) -> int: ... # undocumented + +def uuid1(node: _Int | None = None, clock_seq: _Int | None = None) -> UUID: ... +def uuid3(namespace: UUID, name: str) -> UUID: ... +def uuid4() -> UUID: ... +def uuid5(namespace: UUID, name: str) -> UUID: ... + +NAMESPACE_DNS: UUID +NAMESPACE_URL: UUID +NAMESPACE_OID: UUID +NAMESPACE_X500: UUID +RESERVED_NCS: str +RFC_4122: str +RESERVED_MICROSOFT: str +RESERVED_FUTURE: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/warnings.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/warnings.pyi new file mode 100644 index 00000000..6222eb65 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/warnings.pyi @@ -0,0 +1,112 @@ +import sys +from _warnings import warn as warn, warn_explicit as warn_explicit +from collections.abc import Sequence +from types import ModuleType, TracebackType +from typing import Any, Generic, TextIO, TypeVar, overload +from typing_extensions import Literal, TypeAlias + +__all__ = [ + "warn", + "warn_explicit", + "showwarning", + "formatwarning", + "filterwarnings", + "simplefilter", + "resetwarnings", + "catch_warnings", +] + +_W = TypeVar("_W", bound=list[WarningMessage] | None) +_ActionKind: TypeAlias = Literal["default", "error", "ignore", "always", "module", "once"] + +filters: Sequence[tuple[str, str | None, type[Warning], str | None, int]] # undocumented, do not mutate + +def showwarning( + message: Warning | str, + category: type[Warning], + filename: str, + lineno: int, + file: TextIO | None = None, + line: str | None = None, +) -> None: ... +def formatwarning( + message: Warning | str, category: type[Warning], filename: str, lineno: int, line: str | None = None +) -> str: ... +def filterwarnings( + action: _ActionKind, message: str = "", category: type[Warning] = ..., module: str = "", lineno: int = 0, append: bool = False +) -> None: ... +def simplefilter(action: _ActionKind, category: type[Warning] = ..., lineno: int = 0, append: bool = False) -> None: ... +def resetwarnings() -> None: ... + +class _OptionError(Exception): ... + +class WarningMessage: + message: Warning | str + category: type[Warning] + filename: str + lineno: int + file: TextIO | None + line: str | None + source: Any | None + def __init__( + self, + message: Warning | str, + category: type[Warning], + filename: str, + lineno: int, + file: TextIO | None = None, + line: str | None = None, + source: Any | None = None, + ) -> None: ... + +class catch_warnings(Generic[_W]): + if sys.version_info >= (3, 11): + @overload + def __init__( + self: catch_warnings[None], + *, + record: Literal[False] = False, + module: ModuleType | None = None, + action: _ActionKind | None = None, + category: type[Warning] = ..., + lineno: int = 0, + append: bool = False, + ) -> None: ... + @overload + def __init__( + self: catch_warnings[list[WarningMessage]], + *, + record: Literal[True], + module: ModuleType | None = None, + action: _ActionKind | None = None, + category: type[Warning] = ..., + lineno: int = 0, + append: bool = False, + ) -> None: ... + @overload + def __init__( + self: catch_warnings[list[WarningMessage] | None], + *, + record: bool, + module: ModuleType | None = None, + action: _ActionKind | None = None, + category: type[Warning] = ..., + lineno: int = 0, + append: bool = False, + ) -> None: ... + else: + @overload + def __init__(self: catch_warnings[None], *, record: Literal[False] = False, module: ModuleType | None = None) -> None: ... + @overload + def __init__( + self: catch_warnings[list[WarningMessage]], *, record: Literal[True], module: ModuleType | None = None + ) -> None: ... + @overload + def __init__( + self: catch_warnings[list[WarningMessage] | None], *, record: bool, module: ModuleType | None = None + ) -> None: ... + + def __enter__(self) -> _W: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/wave.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/wave.pyi new file mode 100644 index 00000000..0d004d6b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/wave.pyi @@ -0,0 +1,78 @@ +import sys +from _typeshed import ReadableBuffer, Unused +from typing import IO, Any, BinaryIO, NamedTuple, NoReturn, overload +from typing_extensions import Literal, Self, TypeAlias + +if sys.version_info >= (3, 9): + __all__ = ["open", "Error", "Wave_read", "Wave_write"] +else: + __all__ = ["open", "openfp", "Error", "Wave_read", "Wave_write"] + +_File: TypeAlias = str | IO[bytes] + +class Error(Exception): ... + +WAVE_FORMAT_PCM: Literal[1] + +class _wave_params(NamedTuple): + nchannels: int + sampwidth: int + framerate: int + nframes: int + comptype: str + compname: str + +class Wave_read: + def __init__(self, f: _File) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + def getfp(self) -> BinaryIO | None: ... + def rewind(self) -> None: ... + def close(self) -> None: ... + def tell(self) -> int: ... + def getnchannels(self) -> int: ... + def getnframes(self) -> int: ... + def getsampwidth(self) -> int: ... + def getframerate(self) -> int: ... + def getcomptype(self) -> str: ... + def getcompname(self) -> str: ... + def getparams(self) -> _wave_params: ... + def getmarkers(self) -> None: ... + def getmark(self, id: Any) -> NoReturn: ... + def setpos(self, pos: int) -> None: ... + def readframes(self, nframes: int) -> bytes: ... + +class Wave_write: + def __init__(self, f: _File) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + def setnchannels(self, nchannels: int) -> None: ... + def getnchannels(self) -> int: ... + def setsampwidth(self, sampwidth: int) -> None: ... + def getsampwidth(self) -> int: ... + def setframerate(self, framerate: float) -> None: ... + def getframerate(self) -> int: ... + def setnframes(self, nframes: int) -> None: ... + def getnframes(self) -> int: ... + def setcomptype(self, comptype: str, compname: str) -> None: ... + def getcomptype(self) -> str: ... + def getcompname(self) -> str: ... + def setparams(self, params: _wave_params | tuple[int, int, int, int, str, str]) -> None: ... + def getparams(self) -> _wave_params: ... + def setmark(self, id: Any, pos: Any, name: Any) -> NoReturn: ... + def getmark(self, id: Any) -> NoReturn: ... + def getmarkers(self) -> None: ... + def tell(self) -> int: ... + def writeframesraw(self, data: ReadableBuffer) -> None: ... + def writeframes(self, data: ReadableBuffer) -> None: ... + def close(self) -> None: ... + +@overload +def open(f: _File, mode: Literal["r", "rb"]) -> Wave_read: ... +@overload +def open(f: _File, mode: Literal["w", "wb"]) -> Wave_write: ... +@overload +def open(f: _File, mode: str | None = None) -> Any: ... + +if sys.version_info < (3, 9): + openfp = open diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/weakref.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/weakref.pyi new file mode 100644 index 00000000..1e0aac81 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/weakref.pyi @@ -0,0 +1,137 @@ +import sys +from _typeshed import SupportsKeysAndGetItem +from _weakref import ( + CallableProxyType as CallableProxyType, + ProxyType as ProxyType, + ReferenceType as ReferenceType, + getweakrefcount as getweakrefcount, + getweakrefs as getweakrefs, + proxy as proxy, + ref as ref, +) +from _weakrefset import WeakSet as WeakSet +from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping +from typing import Any, Generic, TypeVar, overload +from typing_extensions import ParamSpec, Self + +__all__ = [ + "ref", + "proxy", + "getweakrefcount", + "getweakrefs", + "WeakKeyDictionary", + "ReferenceType", + "ProxyType", + "CallableProxyType", + "ProxyTypes", + "WeakValueDictionary", + "WeakSet", + "WeakMethod", + "finalize", +] + +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") +_CallableT = TypeVar("_CallableT", bound=Callable[..., Any]) +_P = ParamSpec("_P") + +ProxyTypes: tuple[type[Any], ...] + +class WeakMethod(ref[_CallableT], Generic[_CallableT]): + def __new__(cls, meth: _CallableT, callback: Callable[[_CallableT], object] | None = None) -> Self: ... + def __call__(self) -> _CallableT | None: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + +class WeakValueDictionary(MutableMapping[_KT, _VT]): + @overload + def __init__(self) -> None: ... + @overload + def __init__(self: WeakValueDictionary[_KT, _VT], __other: Mapping[_KT, _VT] | Iterable[tuple[_KT, _VT]]) -> None: ... + @overload + def __init__( + self: WeakValueDictionary[str, _VT], __other: Mapping[str, _VT] | Iterable[tuple[str, _VT]] = ..., **kwargs: _VT + ) -> None: ... + def __len__(self) -> int: ... + def __getitem__(self, key: _KT) -> _VT: ... + def __setitem__(self, key: _KT, value: _VT) -> None: ... + def __delitem__(self, key: _KT) -> None: ... + def __contains__(self, key: object) -> bool: ... + def __iter__(self) -> Iterator[_KT]: ... + def copy(self) -> WeakValueDictionary[_KT, _VT]: ... + __copy__ = copy + def __deepcopy__(self, memo: Any) -> Self: ... + # These are incompatible with Mapping + def keys(self) -> Iterator[_KT]: ... # type: ignore[override] + def values(self) -> Iterator[_VT]: ... # type: ignore[override] + def items(self) -> Iterator[tuple[_KT, _VT]]: ... # type: ignore[override] + def itervaluerefs(self) -> Iterator[KeyedRef[_KT, _VT]]: ... + def valuerefs(self) -> list[KeyedRef[_KT, _VT]]: ... + def setdefault(self, key: _KT, default: _VT) -> _VT: ... # type: ignore[override] + @overload + def pop(self, key: _KT) -> _VT: ... + @overload + def pop(self, key: _KT, default: _VT | _T = ...) -> _VT | _T: ... + if sys.version_info >= (3, 9): + def __or__(self, other: Mapping[_T1, _T2]) -> WeakValueDictionary[_KT | _T1, _VT | _T2]: ... + def __ror__(self, other: Mapping[_T1, _T2]) -> WeakValueDictionary[_KT | _T1, _VT | _T2]: ... + # WeakValueDictionary.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + @overload + def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... + +class KeyedRef(ref[_T], Generic[_KT, _T]): + key: _KT + # This __new__ method uses a non-standard name for the "cls" parameter + def __new__(type, ob: _T, callback: Callable[[_T], Any], key: _KT) -> Self: ... + def __init__(self, ob: _T, callback: Callable[[_T], Any], key: _KT) -> None: ... + +class WeakKeyDictionary(MutableMapping[_KT, _VT]): + @overload + def __init__(self, dict: None = None) -> None: ... + @overload + def __init__(self, dict: Mapping[_KT, _VT] | Iterable[tuple[_KT, _VT]]) -> None: ... + def __len__(self) -> int: ... + def __getitem__(self, key: _KT) -> _VT: ... + def __setitem__(self, key: _KT, value: _VT) -> None: ... + def __delitem__(self, key: _KT) -> None: ... + def __contains__(self, key: object) -> bool: ... + def __iter__(self) -> Iterator[_KT]: ... + def copy(self) -> WeakKeyDictionary[_KT, _VT]: ... + __copy__ = copy + def __deepcopy__(self, memo: Any) -> Self: ... + # These are incompatible with Mapping + def keys(self) -> Iterator[_KT]: ... # type: ignore[override] + def values(self) -> Iterator[_VT]: ... # type: ignore[override] + def items(self) -> Iterator[tuple[_KT, _VT]]: ... # type: ignore[override] + def keyrefs(self) -> list[ref[_KT]]: ... + # Keep WeakKeyDictionary.setdefault in line with MutableMapping.setdefault, modulo positional-only differences + @overload + def setdefault(self: WeakKeyDictionary[_KT, _VT | None], key: _KT, default: None = None) -> _VT: ... + @overload + def setdefault(self, key: _KT, default: _VT) -> _VT: ... + @overload + def pop(self, key: _KT) -> _VT: ... + @overload + def pop(self, key: _KT, default: _VT | _T = ...) -> _VT | _T: ... + if sys.version_info >= (3, 9): + def __or__(self, other: Mapping[_T1, _T2]) -> WeakKeyDictionary[_KT | _T1, _VT | _T2]: ... + def __ror__(self, other: Mapping[_T1, _T2]) -> WeakKeyDictionary[_KT | _T1, _VT | _T2]: ... + # WeakKeyDictionary.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + @overload + def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... + +class finalize: # TODO: This is a good candidate for to be a `Generic[_P, _T]` class + def __init__(self, __obj: object, __func: Callable[_P, Any], *args: _P.args, **kwargs: _P.kwargs) -> None: ... + def __call__(self, _: Any = None) -> Any | None: ... + def detach(self) -> tuple[Any, Any, tuple[Any, ...], dict[str, Any]] | None: ... + def peek(self) -> tuple[Any, Any, tuple[Any, ...], dict[str, Any]] | None: ... + @property + def alive(self) -> bool: ... + atexit: bool diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/webbrowser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/webbrowser.pyi new file mode 100644 index 00000000..02edd42e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/webbrowser.pyi @@ -0,0 +1,73 @@ +import sys +from abc import abstractmethod +from collections.abc import Callable, Sequence +from typing_extensions import Literal + +__all__ = ["Error", "open", "open_new", "open_new_tab", "get", "register"] + +class Error(Exception): ... + +def register( + name: str, klass: Callable[[], BaseBrowser] | None, instance: BaseBrowser | None = None, *, preferred: bool = False +) -> None: ... +def get(using: str | None = None) -> BaseBrowser: ... +def open(url: str, new: int = 0, autoraise: bool = True) -> bool: ... +def open_new(url: str) -> bool: ... +def open_new_tab(url: str) -> bool: ... + +class BaseBrowser: + args: list[str] + name: str + basename: str + def __init__(self, name: str = "") -> None: ... + @abstractmethod + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... + def open_new(self, url: str) -> bool: ... + def open_new_tab(self, url: str) -> bool: ... + +class GenericBrowser(BaseBrowser): + def __init__(self, name: str | Sequence[str]) -> None: ... + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... + +class BackgroundBrowser(GenericBrowser): ... + +class UnixBrowser(BaseBrowser): + def open(self, url: str, new: Literal[0, 1, 2] = 0, autoraise: bool = True) -> bool: ... # type: ignore[override] + raise_opts: list[str] | None + background: bool + redirect_stdout: bool + remote_args: list[str] + remote_action: str + remote_action_newwin: str + remote_action_newtab: str + +class Mozilla(UnixBrowser): ... + +class Galeon(UnixBrowser): + raise_opts: list[str] + +class Chrome(UnixBrowser): ... +class Opera(UnixBrowser): ... +class Elinks(UnixBrowser): ... + +class Konqueror(BaseBrowser): + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... + +class Grail(BaseBrowser): + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... + +if sys.platform == "win32": + class WindowsDefault(BaseBrowser): + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... + +if sys.platform == "darwin": + class MacOSX(BaseBrowser): + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... + + class MacOSXOSAScript(BaseBrowser): # In runtime this class does not have `name` and `basename` + if sys.version_info >= (3, 11): + def __init__(self, name: str = "default") -> None: ... + else: + def __init__(self, name: str) -> None: ... + + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/winreg.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/winreg.pyi new file mode 100644 index 00000000..5b2d09a3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/winreg.pyi @@ -0,0 +1,100 @@ +import sys +from types import TracebackType +from typing import Any +from typing_extensions import Literal, Self, TypeAlias, final + +if sys.platform == "win32": + _KeyType: TypeAlias = HKEYType | int + def CloseKey(__hkey: _KeyType) -> None: ... + def ConnectRegistry(__computer_name: str | None, __key: _KeyType) -> HKEYType: ... + def CreateKey(__key: _KeyType, __sub_key: str | None) -> HKEYType: ... + def CreateKeyEx(key: _KeyType, sub_key: str | None, reserved: int = 0, access: int = 131078) -> HKEYType: ... + def DeleteKey(__key: _KeyType, __sub_key: str) -> None: ... + def DeleteKeyEx(key: _KeyType, sub_key: str, access: int = 256, reserved: int = 0) -> None: ... + def DeleteValue(__key: _KeyType, __value: str) -> None: ... + def EnumKey(__key: _KeyType, __index: int) -> str: ... + def EnumValue(__key: _KeyType, __index: int) -> tuple[str, Any, int]: ... + def ExpandEnvironmentStrings(__str: str) -> str: ... + def FlushKey(__key: _KeyType) -> None: ... + def LoadKey(__key: _KeyType, __sub_key: str, __file_name: str) -> None: ... + def OpenKey(key: _KeyType, sub_key: str, reserved: int = 0, access: int = 131097) -> HKEYType: ... + def OpenKeyEx(key: _KeyType, sub_key: str, reserved: int = 0, access: int = 131097) -> HKEYType: ... + def QueryInfoKey(__key: _KeyType) -> tuple[int, int, int]: ... + def QueryValue(__key: _KeyType, __sub_key: str | None) -> str: ... + def QueryValueEx(__key: _KeyType, __name: str) -> tuple[Any, int]: ... + def SaveKey(__key: _KeyType, __file_name: str) -> None: ... + def SetValue(__key: _KeyType, __sub_key: str, __type: int, __value: str) -> None: ... + def SetValueEx( + __key: _KeyType, __value_name: str | None, __reserved: Any, __type: int, __value: str | int + ) -> None: ... # reserved is ignored + def DisableReflectionKey(__key: _KeyType) -> None: ... + def EnableReflectionKey(__key: _KeyType) -> None: ... + def QueryReflectionKey(__key: _KeyType) -> bool: ... + HKEY_CLASSES_ROOT: int + HKEY_CURRENT_USER: int + HKEY_LOCAL_MACHINE: int + HKEY_USERS: int + HKEY_PERFORMANCE_DATA: int + HKEY_CURRENT_CONFIG: int + HKEY_DYN_DATA: int + + KEY_ALL_ACCESS: Literal[983103] + KEY_WRITE: Literal[131078] + KEY_READ: Literal[131097] + KEY_EXECUTE: Literal[131097] + KEY_QUERY_VALUE: Literal[1] + KEY_SET_VALUE: Literal[2] + KEY_CREATE_SUB_KEY: Literal[4] + KEY_ENUMERATE_SUB_KEYS: Literal[8] + KEY_NOTIFY: Literal[16] + KEY_CREATE_LINK: Literal[32] + + KEY_WOW64_64KEY: Literal[256] + KEY_WOW64_32KEY: Literal[512] + + REG_BINARY: Literal[3] + REG_DWORD: Literal[4] + REG_DWORD_LITTLE_ENDIAN: Literal[4] + REG_DWORD_BIG_ENDIAN: Literal[5] + REG_EXPAND_SZ: Literal[2] + REG_LINK: Literal[6] + REG_MULTI_SZ: Literal[7] + REG_NONE: Literal[0] + REG_QWORD: Literal[11] + REG_QWORD_LITTLE_ENDIAN: Literal[11] + REG_RESOURCE_LIST: Literal[8] + REG_FULL_RESOURCE_DESCRIPTOR: Literal[9] + REG_RESOURCE_REQUIREMENTS_LIST: Literal[10] + REG_SZ: Literal[1] + + REG_CREATED_NEW_KEY: int # undocumented + REG_LEGAL_CHANGE_FILTER: int # undocumented + REG_LEGAL_OPTION: int # undocumented + REG_NOTIFY_CHANGE_ATTRIBUTES: int # undocumented + REG_NOTIFY_CHANGE_LAST_SET: int # undocumented + REG_NOTIFY_CHANGE_NAME: int # undocumented + REG_NOTIFY_CHANGE_SECURITY: int # undocumented + REG_NO_LAZY_FLUSH: int # undocumented + REG_OPENED_EXISTING_KEY: int # undocumented + REG_OPTION_BACKUP_RESTORE: int # undocumented + REG_OPTION_CREATE_LINK: int # undocumented + REG_OPTION_NON_VOLATILE: int # undocumented + REG_OPTION_OPEN_LINK: int # undocumented + REG_OPTION_RESERVED: int # undocumented + REG_OPTION_VOLATILE: int # undocumented + REG_REFRESH_HIVE: int # undocumented + REG_WHOLE_HIVE_VOLATILE: int # undocumented + + error = OSError + + # Though this class has a __name__ of PyHKEY, it's exposed as HKEYType for some reason + @final + class HKEYType: + def __bool__(self) -> bool: ... + def __int__(self) -> int: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> bool | None: ... + def Close(self) -> None: ... + def Detach(self) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/winsound.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/winsound.pyi new file mode 100644 index 00000000..9b2b57a3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/winsound.pyi @@ -0,0 +1,28 @@ +import sys +from _typeshed import ReadableBuffer +from typing import overload +from typing_extensions import Literal + +if sys.platform == "win32": + SND_FILENAME: Literal[131072] + SND_ALIAS: Literal[65536] + SND_LOOP: Literal[8] + SND_MEMORY: Literal[4] + SND_PURGE: Literal[64] + SND_ASYNC: Literal[1] + SND_NODEFAULT: Literal[2] + SND_NOSTOP: Literal[16] + SND_NOWAIT: Literal[8192] + + MB_ICONASTERISK: Literal[64] + MB_ICONEXCLAMATION: Literal[48] + MB_ICONHAND: Literal[16] + MB_ICONQUESTION: Literal[32] + MB_OK: Literal[0] + def Beep(frequency: int, duration: int) -> None: ... + # Can actually accept anything ORed with 4, and if not it's definitely str, but that's inexpressible + @overload + def PlaySound(sound: ReadableBuffer | None, flags: Literal[4]) -> None: ... + @overload + def PlaySound(sound: str | ReadableBuffer | None, flags: int) -> None: ... + def MessageBeep(type: int = 0) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/wsgiref/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/wsgiref/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/wsgiref/handlers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/wsgiref/handlers.pyi new file mode 100644 index 00000000..ebead540 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/wsgiref/handlers.pyi @@ -0,0 +1,91 @@ +from _typeshed import OptExcInfo +from _typeshed.wsgi import ErrorStream, InputStream, StartResponse, WSGIApplication, WSGIEnvironment +from abc import abstractmethod +from collections.abc import Callable, MutableMapping +from typing import IO + +from .headers import Headers +from .util import FileWrapper + +__all__ = ["BaseHandler", "SimpleHandler", "BaseCGIHandler", "CGIHandler", "IISCGIHandler", "read_environ"] + +def format_date_time(timestamp: float | None) -> str: ... # undocumented +def read_environ() -> dict[str, str]: ... + +class BaseHandler: + wsgi_version: tuple[int, int] # undocumented + wsgi_multithread: bool + wsgi_multiprocess: bool + wsgi_run_once: bool + + origin_server: bool + http_version: str + server_software: str | None + + os_environ: MutableMapping[str, str] + + wsgi_file_wrapper: type[FileWrapper] | None + headers_class: type[Headers] # undocumented + + traceback_limit: int | None + error_status: str + error_headers: list[tuple[str, str]] + error_body: bytes + def run(self, application: WSGIApplication) -> None: ... + def setup_environ(self) -> None: ... + def finish_response(self) -> None: ... + def get_scheme(self) -> str: ... + def set_content_length(self) -> None: ... + def cleanup_headers(self) -> None: ... + def start_response( + self, status: str, headers: list[tuple[str, str]], exc_info: OptExcInfo | None = None + ) -> Callable[[bytes], None]: ... + def send_preamble(self) -> None: ... + def write(self, data: bytes) -> None: ... + def sendfile(self) -> bool: ... + def finish_content(self) -> None: ... + def close(self) -> None: ... + def send_headers(self) -> None: ... + def result_is_file(self) -> bool: ... + def client_is_modern(self) -> bool: ... + def log_exception(self, exc_info: OptExcInfo) -> None: ... + def handle_error(self) -> None: ... + def error_output(self, environ: WSGIEnvironment, start_response: StartResponse) -> list[bytes]: ... + @abstractmethod + def _write(self, data: bytes) -> None: ... + @abstractmethod + def _flush(self) -> None: ... + @abstractmethod + def get_stdin(self) -> InputStream: ... + @abstractmethod + def get_stderr(self) -> ErrorStream: ... + @abstractmethod + def add_cgi_vars(self) -> None: ... + +class SimpleHandler(BaseHandler): + stdin: InputStream + stdout: IO[bytes] + stderr: ErrorStream + base_env: MutableMapping[str, str] + def __init__( + self, + stdin: InputStream, + stdout: IO[bytes], + stderr: ErrorStream, + environ: MutableMapping[str, str], + multithread: bool = True, + multiprocess: bool = False, + ) -> None: ... + def get_stdin(self) -> InputStream: ... + def get_stderr(self) -> ErrorStream: ... + def add_cgi_vars(self) -> None: ... + def _write(self, data: bytes) -> None: ... + def _flush(self) -> None: ... + +class BaseCGIHandler(SimpleHandler): ... + +class CGIHandler(BaseCGIHandler): + def __init__(self) -> None: ... + +class IISCGIHandler(BaseCGIHandler): + def __init__(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/wsgiref/headers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/wsgiref/headers.pyi new file mode 100644 index 00000000..2654d79b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/wsgiref/headers.pyi @@ -0,0 +1,26 @@ +from re import Pattern +from typing import overload +from typing_extensions import TypeAlias + +_HeaderList: TypeAlias = list[tuple[str, str]] + +tspecials: Pattern[str] # undocumented + +class Headers: + def __init__(self, headers: _HeaderList | None = None) -> None: ... + def __len__(self) -> int: ... + def __setitem__(self, name: str, val: str) -> None: ... + def __delitem__(self, name: str) -> None: ... + def __getitem__(self, name: str) -> str | None: ... + def __contains__(self, name: str) -> bool: ... + def get_all(self, name: str) -> list[str]: ... + @overload + def get(self, name: str, default: str) -> str: ... + @overload + def get(self, name: str, default: str | None = None) -> str | None: ... + def keys(self) -> list[str]: ... + def values(self) -> list[str]: ... + def items(self) -> _HeaderList: ... + def __bytes__(self) -> bytes: ... + def setdefault(self, name: str, value: str) -> str: ... + def add_header(self, _name: str, _value: str | None, **_params: str | None) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/wsgiref/simple_server.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/wsgiref/simple_server.pyi new file mode 100644 index 00000000..547f562c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/wsgiref/simple_server.pyi @@ -0,0 +1,37 @@ +from _typeshed.wsgi import ErrorStream, StartResponse, WSGIApplication, WSGIEnvironment +from http.server import BaseHTTPRequestHandler, HTTPServer +from typing import TypeVar, overload + +from .handlers import SimpleHandler + +__all__ = ["WSGIServer", "WSGIRequestHandler", "demo_app", "make_server"] + +server_version: str # undocumented +sys_version: str # undocumented +software_version: str # undocumented + +class ServerHandler(SimpleHandler): # undocumented + server_software: str + +class WSGIServer(HTTPServer): + application: WSGIApplication | None + base_environ: WSGIEnvironment # only available after call to setup_environ() + def setup_environ(self) -> None: ... + def get_app(self) -> WSGIApplication | None: ... + def set_app(self, application: WSGIApplication | None) -> None: ... + +class WSGIRequestHandler(BaseHTTPRequestHandler): + server_version: str + def get_environ(self) -> WSGIEnvironment: ... + def get_stderr(self) -> ErrorStream: ... + +def demo_app(environ: WSGIEnvironment, start_response: StartResponse) -> list[bytes]: ... + +_S = TypeVar("_S", bound=WSGIServer) + +@overload +def make_server(host: str, port: int, app: WSGIApplication, *, handler_class: type[WSGIRequestHandler] = ...) -> WSGIServer: ... +@overload +def make_server( + host: str, port: int, app: WSGIApplication, server_class: type[_S], handler_class: type[WSGIRequestHandler] = ... +) -> _S: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/wsgiref/types.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/wsgiref/types.pyi new file mode 100644 index 00000000..4e8f4726 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/wsgiref/types.pyi @@ -0,0 +1,32 @@ +from collections.abc import Callable, Iterable, Iterator +from sys import _OptExcInfo +from typing import Any, Protocol +from typing_extensions import TypeAlias + +__all__ = ["StartResponse", "WSGIEnvironment", "WSGIApplication", "InputStream", "ErrorStream", "FileWrapper"] + +class StartResponse(Protocol): + def __call__( + self, __status: str, __headers: list[tuple[str, str]], __exc_info: _OptExcInfo | None = ... + ) -> Callable[[bytes], object]: ... + +WSGIEnvironment: TypeAlias = dict[str, Any] +WSGIApplication: TypeAlias = Callable[[WSGIEnvironment, StartResponse], Iterable[bytes]] + +class InputStream(Protocol): + def read(self, __size: int = ...) -> bytes: ... + def readline(self, __size: int = ...) -> bytes: ... + def readlines(self, __hint: int = ...) -> list[bytes]: ... + def __iter__(self) -> Iterator[bytes]: ... + +class ErrorStream(Protocol): + def flush(self) -> object: ... + def write(self, __s: str) -> object: ... + def writelines(self, __seq: list[str]) -> object: ... + +class _Readable(Protocol): + def read(self, __size: int = ...) -> bytes: ... + # Optional: def close(self) -> object: ... + +class FileWrapper(Protocol): + def __call__(self, __file: _Readable, __block_size: int = ...) -> Iterable[bytes]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/wsgiref/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/wsgiref/util.pyi new file mode 100644 index 00000000..962fac2c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/wsgiref/util.pyi @@ -0,0 +1,24 @@ +import sys +from _typeshed.wsgi import WSGIEnvironment +from collections.abc import Callable +from typing import IO, Any + +__all__ = ["FileWrapper", "guess_scheme", "application_uri", "request_uri", "shift_path_info", "setup_testing_defaults"] + +class FileWrapper: + filelike: IO[bytes] + blksize: int + close: Callable[[], None] # only exists if filelike.close exists + def __init__(self, filelike: IO[bytes], blksize: int = 8192) -> None: ... + if sys.version_info < (3, 11): + def __getitem__(self, key: Any) -> bytes: ... + + def __iter__(self) -> FileWrapper: ... + def __next__(self) -> bytes: ... + +def guess_scheme(environ: WSGIEnvironment) -> str: ... +def application_uri(environ: WSGIEnvironment) -> str: ... +def request_uri(environ: WSGIEnvironment, include_query: bool = True) -> str: ... +def shift_path_info(environ: WSGIEnvironment) -> str | None: ... +def setup_testing_defaults(environ: WSGIEnvironment) -> None: ... +def is_hop_by_hop(header_name: str) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/wsgiref/validate.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/wsgiref/validate.pyi new file mode 100644 index 00000000..fa8a6bbb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/wsgiref/validate.pyi @@ -0,0 +1,50 @@ +from _typeshed.wsgi import ErrorStream, InputStream, WSGIApplication +from collections.abc import Callable, Iterable, Iterator +from typing import Any, NoReturn +from typing_extensions import TypeAlias + +__all__ = ["validator"] + +class WSGIWarning(Warning): ... + +def validator(application: WSGIApplication) -> WSGIApplication: ... + +class InputWrapper: + input: InputStream + def __init__(self, wsgi_input: InputStream) -> None: ... + def read(self, size: int) -> bytes: ... + def readline(self, size: int = ...) -> bytes: ... + def readlines(self, hint: int = ...) -> bytes: ... + def __iter__(self) -> Iterator[bytes]: ... + def close(self) -> NoReturn: ... + +class ErrorWrapper: + errors: ErrorStream + def __init__(self, wsgi_errors: ErrorStream) -> None: ... + def write(self, s: str) -> None: ... + def flush(self) -> None: ... + def writelines(self, seq: Iterable[str]) -> None: ... + def close(self) -> NoReturn: ... + +_WriterCallback: TypeAlias = Callable[[bytes], Any] + +class WriteWrapper: + writer: _WriterCallback + def __init__(self, wsgi_writer: _WriterCallback) -> None: ... + def __call__(self, s: bytes) -> None: ... + +class PartialIteratorWrapper: + iterator: Iterator[bytes] + def __init__(self, wsgi_iterator: Iterator[bytes]) -> None: ... + def __iter__(self) -> IteratorWrapper: ... + +class IteratorWrapper: + original_iterator: Iterator[bytes] + iterator: Iterator[bytes] + closed: bool + check_start_response: bool | None + def __init__(self, wsgi_iterator: Iterator[bytes], check_start_response: bool | None) -> None: ... + def __iter__(self) -> IteratorWrapper: ... + def __next__(self) -> bytes: ... + def close(self) -> None: ... + def __del__(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xdrlib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xdrlib.pyi new file mode 100644 index 00000000..78f3ecec --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xdrlib.pyi @@ -0,0 +1,57 @@ +from collections.abc import Callable, Sequence +from typing import TypeVar + +__all__ = ["Error", "Packer", "Unpacker", "ConversionError"] + +_T = TypeVar("_T") + +class Error(Exception): + msg: str + def __init__(self, msg: str) -> None: ... + +class ConversionError(Error): ... + +class Packer: + def reset(self) -> None: ... + def get_buffer(self) -> bytes: ... + def get_buf(self) -> bytes: ... + def pack_uint(self, x: int) -> None: ... + def pack_int(self, x: int) -> None: ... + def pack_enum(self, x: int) -> None: ... + def pack_bool(self, x: bool) -> None: ... + def pack_uhyper(self, x: int) -> None: ... + def pack_hyper(self, x: int) -> None: ... + def pack_float(self, x: float) -> None: ... + def pack_double(self, x: float) -> None: ... + def pack_fstring(self, n: int, s: bytes) -> None: ... + def pack_fopaque(self, n: int, s: bytes) -> None: ... + def pack_string(self, s: bytes) -> None: ... + def pack_opaque(self, s: bytes) -> None: ... + def pack_bytes(self, s: bytes) -> None: ... + def pack_list(self, list: Sequence[_T], pack_item: Callable[[_T], object]) -> None: ... + def pack_farray(self, n: int, list: Sequence[_T], pack_item: Callable[[_T], object]) -> None: ... + def pack_array(self, list: Sequence[_T], pack_item: Callable[[_T], object]) -> None: ... + +class Unpacker: + def __init__(self, data: bytes) -> None: ... + def reset(self, data: bytes) -> None: ... + def get_position(self) -> int: ... + def set_position(self, position: int) -> None: ... + def get_buffer(self) -> bytes: ... + def done(self) -> None: ... + def unpack_uint(self) -> int: ... + def unpack_int(self) -> int: ... + def unpack_enum(self) -> int: ... + def unpack_bool(self) -> bool: ... + def unpack_uhyper(self) -> int: ... + def unpack_hyper(self) -> int: ... + def unpack_float(self) -> float: ... + def unpack_double(self) -> float: ... + def unpack_fstring(self, n: int) -> bytes: ... + def unpack_fopaque(self, n: int) -> bytes: ... + def unpack_string(self) -> bytes: ... + def unpack_opaque(self) -> bytes: ... + def unpack_bytes(self) -> bytes: ... + def unpack_list(self, unpack_item: Callable[[], _T]) -> list[_T]: ... + def unpack_farray(self, n: int, unpack_item: Callable[[], _T]) -> list[_T]: ... + def unpack_array(self, unpack_item: Callable[[], _T]) -> list[_T]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/__init__.pyi new file mode 100644 index 00000000..a487d246 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/__init__.pyi @@ -0,0 +1 @@ +from xml import parsers as parsers diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/NodeFilter.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/NodeFilter.pyi new file mode 100644 index 00000000..80fb73d2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/NodeFilter.pyi @@ -0,0 +1,19 @@ +class NodeFilter: + FILTER_ACCEPT: int + FILTER_REJECT: int + FILTER_SKIP: int + + SHOW_ALL: int + SHOW_ELEMENT: int + SHOW_ATTRIBUTE: int + SHOW_TEXT: int + SHOW_CDATA_SECTION: int + SHOW_ENTITY_REFERENCE: int + SHOW_ENTITY: int + SHOW_PROCESSING_INSTRUCTION: int + SHOW_COMMENT: int + SHOW_DOCUMENT: int + SHOW_DOCUMENT_TYPE: int + SHOW_DOCUMENT_FRAGMENT: int + SHOW_NOTATION: int + def acceptNode(self, node) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/__init__.pyi new file mode 100644 index 00000000..e5b91bf2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/__init__.pyi @@ -0,0 +1,69 @@ +from typing import Any + +from .domreg import getDOMImplementation as getDOMImplementation, registerDOMImplementation as registerDOMImplementation + +class Node: + ELEMENT_NODE: int + ATTRIBUTE_NODE: int + TEXT_NODE: int + CDATA_SECTION_NODE: int + ENTITY_REFERENCE_NODE: int + ENTITY_NODE: int + PROCESSING_INSTRUCTION_NODE: int + COMMENT_NODE: int + DOCUMENT_NODE: int + DOCUMENT_TYPE_NODE: int + DOCUMENT_FRAGMENT_NODE: int + NOTATION_NODE: int + +# ExceptionCode +INDEX_SIZE_ERR: int +DOMSTRING_SIZE_ERR: int +HIERARCHY_REQUEST_ERR: int +WRONG_DOCUMENT_ERR: int +INVALID_CHARACTER_ERR: int +NO_DATA_ALLOWED_ERR: int +NO_MODIFICATION_ALLOWED_ERR: int +NOT_FOUND_ERR: int +NOT_SUPPORTED_ERR: int +INUSE_ATTRIBUTE_ERR: int +INVALID_STATE_ERR: int +SYNTAX_ERR: int +INVALID_MODIFICATION_ERR: int +NAMESPACE_ERR: int +INVALID_ACCESS_ERR: int +VALIDATION_ERR: int + +class DOMException(Exception): + code: int + def __init__(self, *args: Any, **kw: Any) -> None: ... + def _get_code(self) -> int: ... + +class IndexSizeErr(DOMException): ... +class DomstringSizeErr(DOMException): ... +class HierarchyRequestErr(DOMException): ... +class WrongDocumentErr(DOMException): ... +class InvalidCharacterErr(DOMException): ... +class NoDataAllowedErr(DOMException): ... +class NoModificationAllowedErr(DOMException): ... +class NotFoundErr(DOMException): ... +class NotSupportedErr(DOMException): ... +class InuseAttributeErr(DOMException): ... +class InvalidStateErr(DOMException): ... +class SyntaxErr(DOMException): ... +class InvalidModificationErr(DOMException): ... +class NamespaceErr(DOMException): ... +class InvalidAccessErr(DOMException): ... +class ValidationErr(DOMException): ... + +class UserDataHandler: + NODE_CLONED: int + NODE_IMPORTED: int + NODE_DELETED: int + NODE_RENAMED: int + +XML_NAMESPACE: str +XMLNS_NAMESPACE: str +XHTML_NAMESPACE: str +EMPTY_NAMESPACE: None +EMPTY_PREFIX: None diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/domreg.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/domreg.pyi new file mode 100644 index 00000000..a46d3ff0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/domreg.pyi @@ -0,0 +1,10 @@ +from _typeshed.xml import DOMImplementation +from collections.abc import Callable, Iterable + +well_known_implementations: dict[str, str] +registered: dict[str, Callable[[], DOMImplementation]] + +def registerDOMImplementation(name: str, factory: Callable[[], DOMImplementation]) -> None: ... +def getDOMImplementation( + name: str | None = None, features: str | Iterable[tuple[str, str | None]] = ... +) -> DOMImplementation: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/expatbuilder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/expatbuilder.pyi new file mode 100644 index 00000000..45f0af7a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/expatbuilder.pyi @@ -0,0 +1,100 @@ +from _typeshed import Incomplete, ReadableBuffer, SupportsRead +from typing import Any, NoReturn +from xml.dom.minidom import Document, DOMImplementation, Node, TypeInfo +from xml.dom.xmlbuilder import DOMBuilderFilter, Options + +TEXT_NODE = Node.TEXT_NODE +CDATA_SECTION_NODE = Node.CDATA_SECTION_NODE +DOCUMENT_NODE = Node.DOCUMENT_NODE +FILTER_ACCEPT = DOMBuilderFilter.FILTER_ACCEPT +FILTER_REJECT = DOMBuilderFilter.FILTER_REJECT +FILTER_SKIP = DOMBuilderFilter.FILTER_SKIP +FILTER_INTERRUPT = DOMBuilderFilter.FILTER_INTERRUPT +theDOMImplementation: DOMImplementation | None + +class ElementInfo: + tagName: Incomplete + def __init__(self, tagName, model: Incomplete | None = None) -> None: ... + def getAttributeType(self, aname) -> TypeInfo: ... + def getAttributeTypeNS(self, namespaceURI, localName) -> TypeInfo: ... + def isElementContent(self) -> bool: ... + def isEmpty(self) -> bool: ... + def isId(self, aname) -> bool: ... + def isIdNS(self, euri, ename, auri, aname) -> bool: ... + +class ExpatBuilder: + document: Document # Created in self.reset() + curNode: Incomplete # Created in self.reset() + def __init__(self, options: Options | None = None) -> None: ... + def createParser(self): ... + def getParser(self): ... + def reset(self) -> None: ... + def install(self, parser) -> None: ... + def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> Document: ... + def parseString(self, string: str | ReadableBuffer) -> Document: ... + def start_doctype_decl_handler(self, doctypeName, systemId, publicId, has_internal_subset) -> None: ... + def end_doctype_decl_handler(self) -> None: ... + def pi_handler(self, target, data) -> None: ... + def character_data_handler_cdata(self, data) -> None: ... + def character_data_handler(self, data) -> None: ... + def start_cdata_section_handler(self) -> None: ... + def end_cdata_section_handler(self) -> None: ... + def entity_decl_handler(self, entityName, is_parameter_entity, value, base, systemId, publicId, notationName) -> None: ... + def notation_decl_handler(self, notationName, base, systemId, publicId) -> None: ... + def comment_handler(self, data) -> None: ... + def external_entity_ref_handler(self, context, base, systemId, publicId) -> int: ... + def first_element_handler(self, name, attributes) -> None: ... + def start_element_handler(self, name, attributes) -> None: ... + def end_element_handler(self, name) -> None: ... + def element_decl_handler(self, name, model) -> None: ... + def attlist_decl_handler(self, elem, name, type, default, required) -> None: ... + def xml_decl_handler(self, version, encoding, standalone) -> None: ... + +class FilterVisibilityController: + filter: DOMBuilderFilter + def __init__(self, filter: DOMBuilderFilter) -> None: ... + def startContainer(self, node: Node) -> int: ... + def acceptNode(self, node: Node) -> int: ... + +class FilterCrutch: + def __init__(self, builder) -> None: ... + +class Rejecter(FilterCrutch): + def start_element_handler(self, *args: Any) -> None: ... + def end_element_handler(self, *args: Any) -> None: ... + +class Skipper(FilterCrutch): + def start_element_handler(self, *args: Any) -> None: ... + def end_element_handler(self, *args: Any) -> None: ... + +class FragmentBuilder(ExpatBuilder): + fragment: Incomplete | None + originalDocument: Incomplete + context: Incomplete + def __init__(self, context, options: Options | None = None) -> None: ... + +class Namespaces: + def createParser(self): ... + def install(self, parser) -> None: ... + def start_namespace_decl_handler(self, prefix, uri) -> None: ... + def start_element_handler(self, name, attributes) -> None: ... + def end_element_handler(self, name) -> None: ... + +class ExpatBuilderNS(Namespaces, ExpatBuilder): ... +class FragmentBuilderNS(Namespaces, FragmentBuilder): ... +class ParseEscape(Exception): ... + +class InternalSubsetExtractor(ExpatBuilder): + subset: Any | None + def getSubset(self) -> Any | None: ... + def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> None: ... # type: ignore[override] + def parseString(self, string: str | ReadableBuffer) -> None: ... # type: ignore[override] + def start_doctype_decl_handler(self, name, publicId, systemId, has_internal_subset) -> None: ... # type: ignore[override] + def end_doctype_decl_handler(self) -> NoReturn: ... + def start_element_handler(self, name, attrs) -> NoReturn: ... + +def parse(file: str | SupportsRead[ReadableBuffer | str], namespaces: bool = True): ... +def parseString(string: str | ReadableBuffer, namespaces: bool = True): ... +def parseFragment(file, context, namespaces: bool = True): ... +def parseFragmentString(string: str, context, namespaces: bool = True): ... +def makeBuilder(options: Options) -> ExpatBuilderNS | ExpatBuilder: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/minicompat.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/minicompat.pyi new file mode 100644 index 00000000..4507b3d9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/minicompat.pyi @@ -0,0 +1,20 @@ +from collections.abc import Iterable +from typing import Any, TypeVar + +__all__ = ["NodeList", "EmptyNodeList", "StringTypes", "defproperty"] + +_T = TypeVar("_T") + +StringTypes: tuple[type[str]] + +class NodeList(list[_T]): + length: int + def item(self, index: int) -> _T | None: ... + +class EmptyNodeList(tuple[Any, ...]): + length: int + def item(self, index: int) -> None: ... + def __add__(self, other: Iterable[_T]) -> NodeList[_T]: ... # type: ignore[override] + def __radd__(self, other: Iterable[_T]) -> NodeList[_T]: ... + +def defproperty(klass: type[Any], name: str, doc: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/minidom.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/minidom.pyi new file mode 100644 index 00000000..ecc7bb6b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/minidom.pyi @@ -0,0 +1,346 @@ +import sys +import xml.dom +from _typeshed import Incomplete, ReadableBuffer, SupportsRead, SupportsWrite +from typing import NoReturn, TypeVar +from typing_extensions import Literal, Self +from xml.dom.minicompat import NodeList +from xml.dom.xmlbuilder import DocumentLS, DOMImplementationLS +from xml.sax.xmlreader import XMLReader + +_N = TypeVar("_N", bound=Node) + +def parse(file: str | SupportsRead[ReadableBuffer | str], parser: XMLReader | None = None, bufsize: int | None = None): ... +def parseString(string: str | ReadableBuffer, parser: XMLReader | None = None): ... +def getDOMImplementation(features=None) -> DOMImplementation | None: ... + +class Node(xml.dom.Node): + namespaceURI: str | None + parentNode: Incomplete + ownerDocument: Incomplete + nextSibling: Incomplete + previousSibling: Incomplete + prefix: Incomplete + @property + def firstChild(self) -> Node | None: ... + @property + def lastChild(self) -> Node | None: ... + @property + def localName(self) -> str | None: ... + def __bool__(self) -> Literal[True]: ... + if sys.version_info >= (3, 9): + def toxml(self, encoding: str | None = None, standalone: bool | None = None): ... + def toprettyxml( + self, indent: str = "\t", newl: str = "\n", encoding: str | None = None, standalone: bool | None = None + ): ... + else: + def toxml(self, encoding: str | None = None): ... + def toprettyxml(self, indent: str = "\t", newl: str = "\n", encoding: str | None = None): ... + + def hasChildNodes(self) -> bool: ... + def insertBefore(self, newChild, refChild): ... + def appendChild(self, node: _N) -> _N: ... + def replaceChild(self, newChild, oldChild): ... + def removeChild(self, oldChild): ... + def normalize(self) -> None: ... + def cloneNode(self, deep): ... + def isSupported(self, feature, version): ... + def isSameNode(self, other): ... + def getInterface(self, feature): ... + def getUserData(self, key): ... + def setUserData(self, key, data, handler): ... + childNodes: Incomplete + def unlink(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, et, ev, tb) -> None: ... + +class DocumentFragment(Node): + nodeType: int + nodeName: str + nodeValue: Incomplete + attributes: Incomplete + parentNode: Incomplete + childNodes: Incomplete + def __init__(self) -> None: ... + +class Attr(Node): + name: str + nodeType: int + attributes: Incomplete + specified: bool + ownerElement: Incomplete + namespaceURI: str | None + childNodes: Incomplete + nodeName: Incomplete + nodeValue: str + value: str + prefix: Incomplete + def __init__( + self, qName: str, namespaceURI: str | None = None, localName: str | None = None, prefix: Incomplete | None = None + ) -> None: ... + def unlink(self) -> None: ... + @property + def isId(self) -> bool: ... + @property + def schemaType(self): ... + +class NamedNodeMap: + def __init__(self, attrs, attrsNS, ownerElement) -> None: ... + def item(self, index): ... + def items(self): ... + def itemsNS(self): ... + def __contains__(self, key): ... + def keys(self): ... + def keysNS(self): ... + def values(self): ... + def get(self, name: str, value: Incomplete | None = None): ... + def __len__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + def __ge__(self, other: NamedNodeMap) -> bool: ... + def __gt__(self, other: NamedNodeMap) -> bool: ... + def __le__(self, other: NamedNodeMap) -> bool: ... + def __lt__(self, other: NamedNodeMap) -> bool: ... + def __getitem__(self, attname_or_tuple: tuple[str, str | None] | str): ... + def __setitem__(self, attname: str, value: Attr | str) -> None: ... + def getNamedItem(self, name: str) -> Attr | None: ... + def getNamedItemNS(self, namespaceURI: str, localName: str | None) -> Attr | None: ... + def removeNamedItem(self, name: str) -> Attr: ... + def removeNamedItemNS(self, namespaceURI: str, localName: str | None): ... + def setNamedItem(self, node: Attr) -> Attr: ... + def setNamedItemNS(self, node: Attr) -> Attr: ... + def __delitem__(self, attname_or_tuple: tuple[str, str | None] | str) -> None: ... + @property + def length(self) -> int: ... + +AttributeList = NamedNodeMap + +class TypeInfo: + namespace: Incomplete | None + name: str + def __init__(self, namespace: Incomplete | None, name: str) -> None: ... + +class Element(Node): + nodeType: int + nodeValue: Incomplete + schemaType: Incomplete + parentNode: Incomplete + tagName: str + nodeName: str + prefix: Incomplete + namespaceURI: str | None + childNodes: Incomplete + nextSibling: Incomplete + def __init__( + self, tagName, namespaceURI: str | None = None, prefix: Incomplete | None = None, localName: Incomplete | None = None + ) -> None: ... + def unlink(self) -> None: ... + def getAttribute(self, attname: str) -> str: ... + def getAttributeNS(self, namespaceURI: str, localName): ... + def setAttribute(self, attname: str, value: str) -> None: ... + def setAttributeNS(self, namespaceURI: str, qualifiedName: str, value) -> None: ... + def getAttributeNode(self, attrname: str): ... + def getAttributeNodeNS(self, namespaceURI: str, localName): ... + def setAttributeNode(self, attr): ... + setAttributeNodeNS: Incomplete + def removeAttribute(self, name: str) -> None: ... + def removeAttributeNS(self, namespaceURI: str, localName) -> None: ... + def removeAttributeNode(self, node): ... + removeAttributeNodeNS: Incomplete + def hasAttribute(self, name: str) -> bool: ... + def hasAttributeNS(self, namespaceURI: str, localName) -> bool: ... + def getElementsByTagName(self, name: str) -> NodeList[Node]: ... + def getElementsByTagNameNS(self, namespaceURI: str, localName): ... + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... + def hasAttributes(self) -> bool: ... + def setIdAttribute(self, name) -> None: ... + def setIdAttributeNS(self, namespaceURI: str, localName) -> None: ... + def setIdAttributeNode(self, idAttr) -> None: ... + @property + def attributes(self) -> NamedNodeMap: ... + +class Childless: + attributes: Incomplete + childNodes: Incomplete + firstChild: Incomplete + lastChild: Incomplete + def appendChild(self, node) -> NoReturn: ... + def hasChildNodes(self) -> bool: ... + def insertBefore(self, newChild, refChild) -> NoReturn: ... + def removeChild(self, oldChild) -> NoReturn: ... + def normalize(self) -> None: ... + def replaceChild(self, newChild, oldChild) -> NoReturn: ... + +class ProcessingInstruction(Childless, Node): + nodeType: int + target: Incomplete + data: Incomplete + def __init__(self, target, data) -> None: ... + nodeValue: Incomplete + nodeName: Incomplete + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... + +class CharacterData(Childless, Node): + ownerDocument: Incomplete + previousSibling: Incomplete + def __init__(self) -> None: ... + def __len__(self) -> int: ... + data: str + nodeValue: Incomplete + def substringData(self, offset: int, count: int) -> str: ... + def appendData(self, arg: str) -> None: ... + def insertData(self, offset: int, arg: str) -> None: ... + def deleteData(self, offset: int, count: int) -> None: ... + def replaceData(self, offset: int, count: int, arg: str) -> None: ... + @property + def length(self) -> int: ... + +class Text(CharacterData): + nodeType: int + nodeName: str + attributes: Incomplete + data: Incomplete + def splitText(self, offset): ... + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... + def replaceWholeText(self, content): ... + @property + def isWhitespaceInElementContent(self) -> bool: ... + @property + def wholeText(self) -> str: ... + +class Comment(CharacterData): + nodeType: int + nodeName: str + def __init__(self, data) -> None: ... + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... + +class CDATASection(Text): + nodeType: int + nodeName: str + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... + +class ReadOnlySequentialNamedNodeMap: + def __init__(self, seq=...) -> None: ... + def __len__(self) -> int: ... + def getNamedItem(self, name): ... + def getNamedItemNS(self, namespaceURI: str, localName): ... + def __getitem__(self, name_or_tuple): ... + def item(self, index): ... + def removeNamedItem(self, name) -> None: ... + def removeNamedItemNS(self, namespaceURI: str, localName) -> None: ... + def setNamedItem(self, node) -> None: ... + def setNamedItemNS(self, node) -> None: ... + @property + def length(self) -> int: ... + +class Identified: + publicId: Incomplete + systemId: Incomplete + +class DocumentType(Identified, Childless, Node): + nodeType: int + nodeValue: Incomplete + name: Incomplete + internalSubset: Incomplete + entities: Incomplete + notations: Incomplete + nodeName: Incomplete + def __init__(self, qualifiedName: str) -> None: ... + def cloneNode(self, deep): ... + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... + +class Entity(Identified, Node): + attributes: Incomplete + nodeType: int + nodeValue: Incomplete + actualEncoding: Incomplete + encoding: Incomplete + version: Incomplete + nodeName: Incomplete + notationName: Incomplete + childNodes: Incomplete + def __init__(self, name, publicId, systemId, notation) -> None: ... + def appendChild(self, newChild) -> NoReturn: ... + def insertBefore(self, newChild, refChild) -> NoReturn: ... + def removeChild(self, oldChild) -> NoReturn: ... + def replaceChild(self, newChild, oldChild) -> NoReturn: ... + +class Notation(Identified, Childless, Node): + nodeType: int + nodeValue: Incomplete + nodeName: Incomplete + def __init__(self, name, publicId, systemId) -> None: ... + +class DOMImplementation(DOMImplementationLS): + def hasFeature(self, feature: str, version: str | None) -> bool: ... + def createDocument(self, namespaceURI: str | None, qualifiedName: str | None, doctype: DocumentType | None) -> Document: ... + def createDocumentType(self, qualifiedName: str | None, publicId: str, systemId: str) -> DocumentType: ... + def getInterface(self, feature: str) -> Self | None: ... + +class ElementInfo: + tagName: Incomplete + def __init__(self, name) -> None: ... + def getAttributeType(self, aname): ... + def getAttributeTypeNS(self, namespaceURI: str, localName): ... + def isElementContent(self): ... + def isEmpty(self): ... + def isId(self, aname): ... + def isIdNS(self, namespaceURI: str, localName): ... + +class Document(Node, DocumentLS): + implementation: Incomplete + nodeType: int + nodeName: str + nodeValue: Incomplete + attributes: Incomplete + parentNode: Incomplete + previousSibling: Incomplete + nextSibling: Incomplete + actualEncoding: Incomplete + encoding: str | None + standalone: bool | None + version: Incomplete + strictErrorChecking: bool + errorHandler: Incomplete + documentURI: Incomplete + doctype: DocumentType | None + childNodes: Incomplete + def __init__(self) -> None: ... + def appendChild(self, node: _N) -> _N: ... + documentElement: Incomplete + def removeChild(self, oldChild): ... + def unlink(self) -> None: ... + def cloneNode(self, deep): ... + def createDocumentFragment(self) -> DocumentFragment: ... + def createElement(self, tagName: str) -> Element: ... + def createTextNode(self, data: str) -> Text: ... + def createCDATASection(self, data: str) -> CDATASection: ... + def createComment(self, data: str) -> Comment: ... + def createProcessingInstruction(self, target, data): ... + def createAttribute(self, qName) -> Attr: ... + def createElementNS(self, namespaceURI: str, qualifiedName: str): ... + def createAttributeNS(self, namespaceURI: str, qualifiedName: str) -> Attr: ... + def getElementById(self, id): ... + def getElementsByTagName(self, name: str) -> NodeList[Node]: ... + def getElementsByTagNameNS(self, namespaceURI: str, localName): ... + def isSupported(self, feature: str, version: str | None) -> bool: ... + def importNode(self, node, deep): ... + if sys.version_info >= (3, 9): + def writexml( + self, + writer: SupportsWrite[str], + indent: str = "", + addindent: str = "", + newl: str = "", + encoding: str | None = None, + standalone: bool | None = None, + ) -> None: ... + else: + def writexml( + self, + writer: SupportsWrite[str], + indent: str = "", + addindent: str = "", + newl: str = "", + encoding: Incomplete | None = None, + ) -> None: ... + + def renameNode(self, n, namespaceURI: str, name): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/pulldom.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/pulldom.pyi new file mode 100644 index 00000000..92090516 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/pulldom.pyi @@ -0,0 +1,93 @@ +import sys +from _typeshed import Incomplete, SupportsRead +from collections.abc import Sequence +from typing_extensions import Literal, TypeAlias +from xml.dom.minidom import Document, DOMImplementation, Element, Text +from xml.sax.handler import ContentHandler +from xml.sax.xmlreader import XMLReader + +START_ELEMENT: Literal["START_ELEMENT"] +END_ELEMENT: Literal["END_ELEMENT"] +COMMENT: Literal["COMMENT"] +START_DOCUMENT: Literal["START_DOCUMENT"] +END_DOCUMENT: Literal["END_DOCUMENT"] +PROCESSING_INSTRUCTION: Literal["PROCESSING_INSTRUCTION"] +IGNORABLE_WHITESPACE: Literal["IGNORABLE_WHITESPACE"] +CHARACTERS: Literal["CHARACTERS"] + +_DocumentFactory: TypeAlias = DOMImplementation | None +_Node: TypeAlias = Document | Element | Text + +_Event: TypeAlias = tuple[ + Literal[ + Literal["START_ELEMENT"], + Literal["END_ELEMENT"], + Literal["COMMENT"], + Literal["START_DOCUMENT"], + Literal["END_DOCUMENT"], + Literal["PROCESSING_INSTRUCTION"], + Literal["IGNORABLE_WHITESPACE"], + Literal["CHARACTERS"], + ], + _Node, +] + +class PullDOM(ContentHandler): + document: Document | None + documentFactory: _DocumentFactory + firstEvent: Incomplete + lastEvent: Incomplete + elementStack: Sequence[Incomplete] + pending_events: Sequence[Incomplete] + def __init__(self, documentFactory: _DocumentFactory = None) -> None: ... + def pop(self) -> Element: ... + def setDocumentLocator(self, locator) -> None: ... + def startPrefixMapping(self, prefix, uri) -> None: ... + def endPrefixMapping(self, prefix) -> None: ... + def startElementNS(self, name, tagName, attrs) -> None: ... + def endElementNS(self, name, tagName) -> None: ... + def startElement(self, name, attrs) -> None: ... + def endElement(self, name) -> None: ... + def comment(self, s) -> None: ... + def processingInstruction(self, target, data) -> None: ... + def ignorableWhitespace(self, chars) -> None: ... + def characters(self, chars) -> None: ... + def startDocument(self) -> None: ... + def buildDocument(self, uri, tagname): ... + def endDocument(self) -> None: ... + def clear(self) -> None: ... + +class ErrorHandler: + def warning(self, exception) -> None: ... + def error(self, exception) -> None: ... + def fatalError(self, exception) -> None: ... + +class DOMEventStream: + stream: SupportsRead[bytes] | SupportsRead[str] + parser: XMLReader + bufsize: int + def __init__(self, stream: SupportsRead[bytes] | SupportsRead[str], parser: XMLReader, bufsize: int) -> None: ... + pulldom: Incomplete + if sys.version_info < (3, 11): + def __getitem__(self, pos): ... + + def __next__(self): ... + def __iter__(self): ... + def getEvent(self) -> _Event: ... + def expandNode(self, node: _Node) -> None: ... + def reset(self) -> None: ... + def clear(self) -> None: ... + +class SAX2DOM(PullDOM): + def startElementNS(self, name, tagName, attrs) -> None: ... + def startElement(self, name, attrs) -> None: ... + def processingInstruction(self, target, data) -> None: ... + def ignorableWhitespace(self, chars) -> None: ... + def characters(self, chars) -> None: ... + +default_bufsize: int + +def parse( + stream_or_string: str | SupportsRead[bytes] | SupportsRead[str], parser: XMLReader | None = None, bufsize: int | None = None +) -> DOMEventStream: ... +def parseString(string: str, parser: XMLReader | None = None) -> DOMEventStream: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/xmlbuilder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/xmlbuilder.pyi new file mode 100644 index 00000000..c07e4ba2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/dom/xmlbuilder.pyi @@ -0,0 +1,108 @@ +from _typeshed import Incomplete, Unused +from typing import Any, NoReturn +from typing_extensions import Literal, TypeAlias +from urllib.request import OpenerDirector +from xml.dom.expatbuilder import ExpatBuilder, ExpatBuilderNS +from xml.dom.minidom import Node + +__all__ = ["DOMBuilder", "DOMEntityResolver", "DOMInputSource"] + +# UNKNOWN TYPES: +# - `Options.errorHandler`. +# The same as `_DOMBuilderErrorHandlerType`? +# Maybe `xml.sax.handler.ErrorHandler`? +# - Return type of DOMBuilder.getFeature(). +# We could get rid of the `Incomplete` if we knew more +# about `Options.errorHandler`. + +# ALIASES REPRESENTING MORE UNKNOWN TYPES: + +# probably the same as `Options.errorHandler`? +# Maybe `xml.sax.handler.ErrorHandler`? +_DOMBuilderErrorHandlerType: TypeAlias = Incomplete | None +# probably some kind of IO... +_DOMInputSourceCharacterStreamType: TypeAlias = Incomplete | None +# probably a string?? +_DOMInputSourceStringDataType: TypeAlias = Incomplete | None +# probably a string?? +_DOMInputSourceEncodingType: TypeAlias = Incomplete | None + +class Options: + namespaces: int + namespace_declarations: bool + validation: bool + external_parameter_entities: bool + external_general_entities: bool + external_dtd_subset: bool + validate_if_schema: bool + validate: bool + datatype_normalization: bool + create_entity_ref_nodes: bool + entities: bool + whitespace_in_element_content: bool + cdata_sections: bool + comments: bool + charset_overrides_xml_encoding: bool + infoset: bool + supported_mediatypes_only: bool + errorHandler: Any | None + filter: DOMBuilderFilter | None # a guess, but seems likely + +class DOMBuilder: + entityResolver: DOMEntityResolver | None # a guess, but seems likely + errorHandler: _DOMBuilderErrorHandlerType + filter: DOMBuilderFilter | None # a guess, but seems likely + ACTION_REPLACE: Literal[1] + ACTION_APPEND_AS_CHILDREN: Literal[2] + ACTION_INSERT_AFTER: Literal[3] + ACTION_INSERT_BEFORE: Literal[4] + def setFeature(self, name: str, state: int) -> None: ... + def supportsFeature(self, name: str) -> bool: ... + def canSetFeature(self, name: str, state: int) -> bool: ... + # getFeature could return any attribute from an instance of `Options` + def getFeature(self, name: str) -> Incomplete: ... + def parseURI(self, uri: str) -> ExpatBuilder | ExpatBuilderNS: ... + def parse(self, input: DOMInputSource) -> ExpatBuilder | ExpatBuilderNS: ... + # `input` and `cnode` argtypes for `parseWithContext` are unknowable + # as the function does nothing with them, and always raises an exception. + # But `input` is *probably* `DOMInputSource`? + def parseWithContext(self, input: Unused, cnode: Unused, action: Literal[1, 2, 3, 4]) -> NoReturn: ... + +class DOMEntityResolver: + def resolveEntity(self, publicId: str | None, systemId: str) -> DOMInputSource: ... + +class DOMInputSource: + byteStream: OpenerDirector | None + characterStream: _DOMInputSourceCharacterStreamType + stringData: _DOMInputSourceStringDataType + encoding: _DOMInputSourceEncodingType + publicId: str | None + systemId: str | None + baseURI: str | None + +class DOMBuilderFilter: + FILTER_ACCEPT: Literal[1] + FILTER_REJECT: Literal[2] + FILTER_SKIP: Literal[3] + FILTER_INTERRUPT: Literal[4] + whatToShow: int + def acceptNode(self, element: Unused) -> Literal[1]: ... + def startContainer(self, element: Unused) -> Literal[1]: ... + +class DocumentLS: + async_: bool + def abort(self) -> NoReturn: ... + # `load()` and `loadXML()` always raise exceptions + # so the argtypes of `uri` and `source` are unknowable. + # `source` is *probably* `DOMInputSource`? + # `uri` is *probably* a str? (see DOMBuilder.parseURI()) + def load(self, uri: Unused) -> NoReturn: ... + def loadXML(self, source: Unused) -> NoReturn: ... + def saveXML(self, snode: Node | None) -> str: ... + +class DOMImplementationLS: + MODE_SYNCHRONOUS: Literal[1] + MODE_ASYNCHRONOUS: Literal[2] + def createDOMBuilder(self, mode: Literal[1], schemaType: None) -> DOMBuilder: ... + def createDOMWriter(self) -> NoReturn: ... + def createDOMInputSource(self) -> DOMInputSource: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/etree/ElementInclude.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/etree/ElementInclude.pyi new file mode 100644 index 00000000..cbba15dd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/etree/ElementInclude.pyi @@ -0,0 +1,28 @@ +import sys +from _typeshed import FileDescriptorOrPath +from collections.abc import Callable +from xml.etree.ElementTree import Element + +XINCLUDE: str +XINCLUDE_INCLUDE: str +XINCLUDE_FALLBACK: str + +if sys.version_info >= (3, 9): + DEFAULT_MAX_INCLUSION_DEPTH: int + +class FatalIncludeError(SyntaxError): ... + +def default_loader(href: FileDescriptorOrPath, parse: str, encoding: str | None = None) -> str | Element: ... + +# TODO: loader is of type default_loader ie it takes a callable that has the +# same signature as default_loader. But default_loader has a keyword argument +# Which can't be represented using Callable... +if sys.version_info >= (3, 9): + def include( + elem: Element, loader: Callable[..., str | Element] | None = None, base_url: str | None = None, max_depth: int | None = 6 + ) -> None: ... + + class LimitedRecursiveIncludeError(FatalIncludeError): ... + +else: + def include(elem: Element, loader: Callable[..., str | Element] | None = None) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/etree/ElementPath.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/etree/ElementPath.pyi new file mode 100644 index 00000000..c3f6207e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/etree/ElementPath.pyi @@ -0,0 +1,34 @@ +from collections.abc import Callable, Generator +from re import Pattern +from typing import TypeVar +from typing_extensions import TypeAlias +from xml.etree.ElementTree import Element + +xpath_tokenizer_re: Pattern[str] + +_Token: TypeAlias = tuple[str, str] +_Next: TypeAlias = Callable[[], _Token] +_Callback: TypeAlias = Callable[[_SelectorContext, list[Element]], Generator[Element, None, None]] + +def xpath_tokenizer(pattern: str, namespaces: dict[str, str] | None = None) -> Generator[_Token, None, None]: ... +def get_parent_map(context: _SelectorContext) -> dict[Element, Element]: ... +def prepare_child(next: _Next, token: _Token) -> _Callback: ... +def prepare_star(next: _Next, token: _Token) -> _Callback: ... +def prepare_self(next: _Next, token: _Token) -> _Callback: ... +def prepare_descendant(next: _Next, token: _Token) -> _Callback: ... +def prepare_parent(next: _Next, token: _Token) -> _Callback: ... +def prepare_predicate(next: _Next, token: _Token) -> _Callback: ... + +ops: dict[str, Callable[[_Next, _Token], _Callback]] + +class _SelectorContext: + parent_map: dict[Element, Element] | None + root: Element + def __init__(self, root: Element) -> None: ... + +_T = TypeVar("_T") + +def iterfind(elem: Element, path: str, namespaces: dict[str, str] | None = None) -> Generator[Element, None, None]: ... +def find(elem: Element, path: str, namespaces: dict[str, str] | None = None) -> Element | None: ... +def findall(elem: Element, path: str, namespaces: dict[str, str] | None = None) -> list[Element]: ... +def findtext(elem: Element, path: str, default: _T | None = None, namespaces: dict[str, str] | None = None) -> _T | str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/etree/ElementTree.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/etree/ElementTree.pyi new file mode 100644 index 00000000..5f654c0e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/etree/ElementTree.pyi @@ -0,0 +1,360 @@ +import sys +from _collections_abc import dict_keys +from _typeshed import FileDescriptorOrPath, ReadableBuffer, SupportsRead, SupportsWrite +from collections.abc import Callable, Generator, ItemsView, Iterable, Iterator, Mapping, Sequence +from typing import Any, TypeVar, overload +from typing_extensions import Literal, SupportsIndex, TypeAlias, TypeGuard + +__all__ = [ + "Comment", + "dump", + "Element", + "ElementTree", + "fromstring", + "fromstringlist", + "iselement", + "iterparse", + "parse", + "ParseError", + "PI", + "ProcessingInstruction", + "QName", + "SubElement", + "tostring", + "tostringlist", + "TreeBuilder", + "VERSION", + "XML", + "XMLID", + "XMLParser", + "XMLPullParser", + "register_namespace", +] + +if sys.version_info >= (3, 8): + __all__ += ["C14NWriterTarget", "canonicalize"] + +if sys.version_info >= (3, 9): + __all__ += ["indent"] + +_T = TypeVar("_T") +_FileRead: TypeAlias = FileDescriptorOrPath | SupportsRead[bytes] | SupportsRead[str] +_FileWriteC14N: TypeAlias = FileDescriptorOrPath | SupportsWrite[bytes] +_FileWrite: TypeAlias = _FileWriteC14N | SupportsWrite[str] + +VERSION: str + +class ParseError(SyntaxError): + code: int + position: tuple[int, int] + +# In reality it works based on `.tag` attribute duck typing. +def iselement(element: object) -> TypeGuard[Element]: ... + +if sys.version_info >= (3, 8): + @overload + def canonicalize( + xml_data: str | ReadableBuffer | None = None, + *, + out: None = None, + from_file: _FileRead | None = None, + with_comments: bool = False, + strip_text: bool = False, + rewrite_prefixes: bool = False, + qname_aware_tags: Iterable[str] | None = None, + qname_aware_attrs: Iterable[str] | None = None, + exclude_attrs: Iterable[str] | None = None, + exclude_tags: Iterable[str] | None = None, + ) -> str: ... + @overload + def canonicalize( + xml_data: str | ReadableBuffer | None = None, + *, + out: SupportsWrite[str], + from_file: _FileRead | None = None, + with_comments: bool = False, + strip_text: bool = False, + rewrite_prefixes: bool = False, + qname_aware_tags: Iterable[str] | None = None, + qname_aware_attrs: Iterable[str] | None = None, + exclude_attrs: Iterable[str] | None = None, + exclude_tags: Iterable[str] | None = None, + ) -> None: ... + +class Element: + tag: str + attrib: dict[str, str] + text: str | None + tail: str | None + def __init__(self, tag: str | Callable[..., Element], attrib: dict[str, str] = ..., **extra: str) -> None: ... + def append(self, __subelement: Element) -> None: ... + def clear(self) -> None: ... + def extend(self, __elements: Iterable[Element]) -> None: ... + def find(self, path: str, namespaces: dict[str, str] | None = None) -> Element | None: ... + def findall(self, path: str, namespaces: dict[str, str] | None = None) -> list[Element]: ... + @overload + def findtext(self, path: str, default: None = None, namespaces: dict[str, str] | None = None) -> str | None: ... + @overload + def findtext(self, path: str, default: _T, namespaces: dict[str, str] | None = None) -> _T | str: ... + @overload + def get(self, key: str, default: None = None) -> str | None: ... + @overload + def get(self, key: str, default: _T) -> str | _T: ... + def insert(self, __index: int, __subelement: Element) -> None: ... + def items(self) -> ItemsView[str, str]: ... + def iter(self, tag: str | None = None) -> Generator[Element, None, None]: ... + def iterfind(self, path: str, namespaces: dict[str, str] | None = None) -> Generator[Element, None, None]: ... + def itertext(self) -> Generator[str, None, None]: ... + def keys(self) -> dict_keys[str, str]: ... + # makeelement returns the type of self in Python impl, but not in C impl + def makeelement(self, __tag: str, __attrib: dict[str, str]) -> Element: ... + def remove(self, __subelement: Element) -> None: ... + def set(self, __key: str, __value: str) -> None: ... + def __copy__(self) -> Element: ... # returns the type of self in Python impl, but not in C impl + def __deepcopy__(self, __memo: Any) -> Element: ... # Only exists in C impl + def __delitem__(self, __i: SupportsIndex | slice) -> None: ... + @overload + def __getitem__(self, __i: SupportsIndex) -> Element: ... + @overload + def __getitem__(self, __s: slice) -> list[Element]: ... + def __len__(self) -> int: ... + # Doesn't actually exist at runtime, but instance of the class are indeed iterable due to __getitem__. + def __iter__(self) -> Iterator[Element]: ... + @overload + def __setitem__(self, __i: SupportsIndex, __o: Element) -> None: ... + @overload + def __setitem__(self, __s: slice, __o: Iterable[Element]) -> None: ... + if sys.version_info < (3, 9): + def getchildren(self) -> list[Element]: ... + def getiterator(self, tag: str | None = None) -> list[Element]: ... + +def SubElement(parent: Element, tag: str, attrib: dict[str, str] = ..., **extra: str) -> Element: ... +def Comment(text: str | None = None) -> Element: ... +def ProcessingInstruction(target: str, text: str | None = None) -> Element: ... + +PI: Callable[..., Element] + +class QName: + text: str + def __init__(self, text_or_uri: str, tag: str | None = None) -> None: ... + def __lt__(self, other: QName | str) -> bool: ... + def __le__(self, other: QName | str) -> bool: ... + def __gt__(self, other: QName | str) -> bool: ... + def __ge__(self, other: QName | str) -> bool: ... + def __eq__(self, other: object) -> bool: ... + +class ElementTree: + def __init__(self, element: Element | None = None, file: _FileRead | None = None) -> None: ... + def getroot(self) -> Element | Any: ... + def parse(self, source: _FileRead, parser: XMLParser | None = None) -> Element: ... + def iter(self, tag: str | None = None) -> Generator[Element, None, None]: ... + if sys.version_info < (3, 9): + def getiterator(self, tag: str | None = None) -> list[Element]: ... + + def find(self, path: str, namespaces: dict[str, str] | None = None) -> Element | None: ... + @overload + def findtext(self, path: str, default: None = None, namespaces: dict[str, str] | None = None) -> str | None: ... + @overload + def findtext(self, path: str, default: _T, namespaces: dict[str, str] | None = None) -> _T | str: ... + def findall(self, path: str, namespaces: dict[str, str] | None = None) -> list[Element]: ... + def iterfind(self, path: str, namespaces: dict[str, str] | None = None) -> Generator[Element, None, None]: ... + def write( + self, + file_or_filename: _FileWrite, + encoding: str | None = None, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + method: str | None = None, + *, + short_empty_elements: bool = True, + ) -> None: ... + def write_c14n(self, file: _FileWriteC14N) -> None: ... + +def register_namespace(prefix: str, uri: str) -> None: ... + +if sys.version_info >= (3, 8): + @overload + def tostring( + element: Element, + encoding: None = None, + method: str | None = None, + *, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, + ) -> bytes: ... + @overload + def tostring( + element: Element, + encoding: Literal["unicode"], + method: str | None = None, + *, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, + ) -> str: ... + @overload + def tostring( + element: Element, + encoding: str, + method: str | None = None, + *, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, + ) -> Any: ... + @overload + def tostringlist( + element: Element, + encoding: None = None, + method: str | None = None, + *, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, + ) -> list[bytes]: ... + @overload + def tostringlist( + element: Element, + encoding: Literal["unicode"], + method: str | None = None, + *, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, + ) -> list[str]: ... + @overload + def tostringlist( + element: Element, + encoding: str, + method: str | None = None, + *, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, + ) -> list[Any]: ... + +else: + @overload + def tostring( + element: Element, encoding: None = None, method: str | None = None, *, short_empty_elements: bool = True + ) -> bytes: ... + @overload + def tostring( + element: Element, encoding: Literal["unicode"], method: str | None = None, *, short_empty_elements: bool = True + ) -> str: ... + @overload + def tostring(element: Element, encoding: str, method: str | None = None, *, short_empty_elements: bool = True) -> Any: ... + @overload + def tostringlist( + element: Element, encoding: None = None, method: str | None = None, *, short_empty_elements: bool = True + ) -> list[bytes]: ... + @overload + def tostringlist( + element: Element, encoding: Literal["unicode"], method: str | None = None, *, short_empty_elements: bool = True + ) -> list[str]: ... + @overload + def tostringlist( + element: Element, encoding: str, method: str | None = None, *, short_empty_elements: bool = True + ) -> list[Any]: ... + +def dump(elem: Element) -> None: ... + +if sys.version_info >= (3, 9): + def indent(tree: Element | ElementTree, space: str = " ", level: int = 0) -> None: ... + +def parse(source: _FileRead, parser: XMLParser | None = None) -> ElementTree: ... +def iterparse( + source: _FileRead, events: Sequence[str] | None = None, parser: XMLParser | None = None +) -> Iterator[tuple[str, Any]]: ... + +class XMLPullParser: + def __init__(self, events: Sequence[str] | None = None, *, _parser: XMLParser | None = None) -> None: ... + def feed(self, data: str | ReadableBuffer) -> None: ... + def close(self) -> None: ... + # Second element in the tuple could be `Element`, `tuple[str, str]` or `None`. + # Use `Any` to avoid false-positive errors. + def read_events(self) -> Iterator[tuple[str, Any]]: ... + +def XML(text: str | ReadableBuffer, parser: XMLParser | None = None) -> Element: ... +def XMLID(text: str | ReadableBuffer, parser: XMLParser | None = None) -> tuple[Element, dict[str, Element]]: ... + +# This is aliased to XML in the source. +fromstring = XML + +def fromstringlist(sequence: Sequence[str | ReadableBuffer], parser: XMLParser | None = None) -> Element: ... + +# This type is both not precise enough and too precise. The TreeBuilder +# requires the elementfactory to accept tag and attrs in its args and produce +# some kind of object that has .text and .tail properties. +# I've chosen to constrain the ElementFactory to always produce an Element +# because that is how almost everyone will use it. +# Unfortunately, the type of the factory arguments is dependent on how +# TreeBuilder is called by client code (they could pass strs, bytes or whatever); +# but we don't want to use a too-broad type, or it would be too hard to write +# elementfactories. +_ElementFactory: TypeAlias = Callable[[Any, dict[Any, Any]], Element] + +class TreeBuilder: + if sys.version_info >= (3, 8): + # comment_factory can take None because passing None to Comment is not an error + def __init__( + self, + element_factory: _ElementFactory | None = ..., + *, + comment_factory: Callable[[str | None], Element] | None = ..., + pi_factory: Callable[[str, str | None], Element] | None = ..., + insert_comments: bool = ..., + insert_pis: bool = ..., + ) -> None: ... + insert_comments: bool + insert_pis: bool + else: + def __init__(self, element_factory: _ElementFactory | None = ...) -> None: ... + + def close(self) -> Element: ... + def data(self, __data: str) -> None: ... + # tag and attrs are passed to the element_factory, so they could be anything + # depending on what the particular factory supports. + def start(self, __tag: Any, __attrs: dict[Any, Any]) -> Element: ... + def end(self, __tag: str) -> Element: ... + if sys.version_info >= (3, 8): + # These two methods have pos-only parameters in the C implementation + def comment(self, __text: str | None) -> Element: ... + def pi(self, __target: str, __text: str | None = None) -> Element: ... + +if sys.version_info >= (3, 8): + class C14NWriterTarget: + def __init__( + self, + write: Callable[[str], object], + *, + with_comments: bool = False, + strip_text: bool = False, + rewrite_prefixes: bool = False, + qname_aware_tags: Iterable[str] | None = None, + qname_aware_attrs: Iterable[str] | None = None, + exclude_attrs: Iterable[str] | None = None, + exclude_tags: Iterable[str] | None = None, + ) -> None: ... + def data(self, data: str) -> None: ... + def start_ns(self, prefix: str, uri: str) -> None: ... + def start(self, tag: str, attrs: Mapping[str, str]) -> None: ... + def end(self, tag: str) -> None: ... + def comment(self, text: str) -> None: ... + def pi(self, target: str, data: str) -> None: ... + +class XMLParser: + parser: Any + target: Any + # TODO-what is entity used for??? + entity: Any + version: str + if sys.version_info >= (3, 8): + def __init__(self, *, target: Any = ..., encoding: str | None = ...) -> None: ... + else: + def __init__(self, html: int = ..., target: Any = ..., encoding: str | None = ...) -> None: ... + def doctype(self, __name: str, __pubid: str, __system: str) -> None: ... + + def close(self) -> Any: ... + def feed(self, __data: str | ReadableBuffer) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/etree/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/etree/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/etree/cElementTree.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/etree/cElementTree.pyi new file mode 100644 index 00000000..02272d80 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/etree/cElementTree.pyi @@ -0,0 +1 @@ +from xml.etree.ElementTree import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/parsers/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/parsers/__init__.pyi new file mode 100644 index 00000000..cebdb6a3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/parsers/__init__.pyi @@ -0,0 +1 @@ +from xml.parsers import expat as expat diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/parsers/expat/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/parsers/expat/__init__.pyi new file mode 100644 index 00000000..73f3758c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/parsers/expat/__init__.pyi @@ -0,0 +1 @@ +from pyexpat import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/parsers/expat/errors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/parsers/expat/errors.pyi new file mode 100644 index 00000000..e22d769e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/parsers/expat/errors.pyi @@ -0,0 +1 @@ +from pyexpat.errors import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/parsers/expat/model.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/parsers/expat/model.pyi new file mode 100644 index 00000000..d8f44b47 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/parsers/expat/model.pyi @@ -0,0 +1 @@ +from pyexpat.model import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/sax/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/sax/__init__.pyi new file mode 100644 index 00000000..ca981a00 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/sax/__init__.pyi @@ -0,0 +1,47 @@ +import sys +from _typeshed import ReadableBuffer, StrPath, SupportsRead, _T_co +from collections.abc import Iterable +from typing import Any, NoReturn, Protocol +from xml.sax.handler import ContentHandler as ContentHandler, ErrorHandler as ErrorHandler +from xml.sax.xmlreader import Locator, XMLReader + +class _SupportsReadClose(SupportsRead[_T_co], Protocol[_T_co]): + def close(self) -> None: ... + +class SAXException(Exception): + def __init__(self, msg: str, exception: Exception | None = None) -> None: ... + def getMessage(self) -> str: ... + def getException(self) -> Exception: ... + def __getitem__(self, ix: Any) -> NoReturn: ... + +class SAXParseException(SAXException): + def __init__(self, msg: str, exception: Exception | None, locator: Locator) -> None: ... + def getColumnNumber(self) -> int: ... + def getLineNumber(self) -> int: ... + def getPublicId(self): ... + def getSystemId(self): ... + +class SAXNotRecognizedException(SAXException): ... +class SAXNotSupportedException(SAXException): ... +class SAXReaderNotAvailable(SAXNotSupportedException): ... + +default_parser_list: list[str] + +if sys.version_info >= (3, 8): + def make_parser(parser_list: Iterable[str] = ...) -> XMLReader: ... + def parse( + source: StrPath | _SupportsReadClose[bytes] | _SupportsReadClose[str], + handler: ContentHandler, + errorHandler: ErrorHandler = ..., + ) -> None: ... + +else: + def make_parser(parser_list: list[str] = ...) -> XMLReader: ... + def parse( + source: str | _SupportsReadClose[bytes] | _SupportsReadClose[str], + handler: ContentHandler, + errorHandler: ErrorHandler = ..., + ) -> None: ... + +def parseString(string: ReadableBuffer | str, handler: ContentHandler, errorHandler: ErrorHandler | None = ...) -> None: ... +def _create_parser(parser_name: str) -> XMLReader: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/sax/handler.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/sax/handler.pyi new file mode 100644 index 00000000..63b725bd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/sax/handler.pyi @@ -0,0 +1,54 @@ +import sys +from typing import NoReturn + +version: str + +class ErrorHandler: + def error(self, exception: BaseException) -> NoReturn: ... + def fatalError(self, exception: BaseException) -> NoReturn: ... + def warning(self, exception: BaseException) -> None: ... + +class ContentHandler: + def setDocumentLocator(self, locator): ... + def startDocument(self): ... + def endDocument(self): ... + def startPrefixMapping(self, prefix, uri): ... + def endPrefixMapping(self, prefix): ... + def startElement(self, name, attrs): ... + def endElement(self, name): ... + def startElementNS(self, name, qname, attrs): ... + def endElementNS(self, name, qname): ... + def characters(self, content): ... + def ignorableWhitespace(self, whitespace): ... + def processingInstruction(self, target, data): ... + def skippedEntity(self, name): ... + +class DTDHandler: + def notationDecl(self, name, publicId, systemId): ... + def unparsedEntityDecl(self, name, publicId, systemId, ndata): ... + +class EntityResolver: + def resolveEntity(self, publicId, systemId): ... + +feature_namespaces: str +feature_namespace_prefixes: str +feature_string_interning: str +feature_validation: str +feature_external_ges: str +feature_external_pes: str +all_features: list[str] +property_lexical_handler: str +property_declaration_handler: str +property_dom_node: str +property_xml_string: str +property_encoding: str +property_interning_dict: str +all_properties: list[str] + +if sys.version_info >= (3, 10): + class LexicalHandler: + def comment(self, content: str) -> object: ... + def startDTD(self, name: str, public_id: str | None, system_id: str | None) -> object: ... + def endDTD(self) -> object: ... + def startCDATA(self) -> object: ... + def endCDATA(self) -> object: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/sax/saxutils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/sax/saxutils.pyi new file mode 100644 index 00000000..67a06d2f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/sax/saxutils.pyi @@ -0,0 +1,60 @@ +from _typeshed import SupportsWrite +from codecs import StreamReaderWriter, StreamWriter +from collections.abc import Mapping +from io import RawIOBase, TextIOBase +from xml.sax import handler, xmlreader + +def escape(data: str, entities: Mapping[str, str] = ...) -> str: ... +def unescape(data: str, entities: Mapping[str, str] = ...) -> str: ... +def quoteattr(data: str, entities: Mapping[str, str] = ...) -> str: ... + +class XMLGenerator(handler.ContentHandler): + def __init__( + self, + out: TextIOBase | RawIOBase | StreamWriter | StreamReaderWriter | SupportsWrite[str] | None = None, + encoding: str = "iso-8859-1", + short_empty_elements: bool = False, + ) -> None: ... + def startDocument(self): ... + def endDocument(self): ... + def startPrefixMapping(self, prefix, uri): ... + def endPrefixMapping(self, prefix): ... + def startElement(self, name, attrs): ... + def endElement(self, name): ... + def startElementNS(self, name, qname, attrs): ... + def endElementNS(self, name, qname): ... + def characters(self, content): ... + def ignorableWhitespace(self, content): ... + def processingInstruction(self, target, data): ... + +class XMLFilterBase(xmlreader.XMLReader): + def __init__(self, parent: xmlreader.XMLReader | None = None) -> None: ... + def error(self, exception): ... + def fatalError(self, exception): ... + def warning(self, exception): ... + def setDocumentLocator(self, locator): ... + def startDocument(self): ... + def endDocument(self): ... + def startPrefixMapping(self, prefix, uri): ... + def endPrefixMapping(self, prefix): ... + def startElement(self, name, attrs): ... + def endElement(self, name): ... + def startElementNS(self, name, qname, attrs): ... + def endElementNS(self, name, qname): ... + def characters(self, content): ... + def ignorableWhitespace(self, chars): ... + def processingInstruction(self, target, data): ... + def skippedEntity(self, name): ... + def notationDecl(self, name, publicId, systemId): ... + def unparsedEntityDecl(self, name, publicId, systemId, ndata): ... + def resolveEntity(self, publicId, systemId): ... + def parse(self, source): ... + def setLocale(self, locale): ... + def getFeature(self, name): ... + def setFeature(self, name, state): ... + def getProperty(self, name): ... + def setProperty(self, name, value): ... + def getParent(self): ... + def setParent(self, parent): ... + +def prepare_input_source(source, base=""): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/sax/xmlreader.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/sax/xmlreader.pyi new file mode 100644 index 00000000..0bf167b0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xml/sax/xmlreader.pyi @@ -0,0 +1,71 @@ +from collections.abc import Mapping + +class XMLReader: + def parse(self, source): ... + def getContentHandler(self): ... + def setContentHandler(self, handler): ... + def getDTDHandler(self): ... + def setDTDHandler(self, handler): ... + def getEntityResolver(self): ... + def setEntityResolver(self, resolver): ... + def getErrorHandler(self): ... + def setErrorHandler(self, handler): ... + def setLocale(self, locale): ... + def getFeature(self, name): ... + def setFeature(self, name, state): ... + def getProperty(self, name): ... + def setProperty(self, name, value): ... + +class IncrementalParser(XMLReader): + def __init__(self, bufsize: int = 65536) -> None: ... + def parse(self, source): ... + def feed(self, data): ... + def prepareParser(self, source): ... + def close(self): ... + def reset(self): ... + +class Locator: + def getColumnNumber(self): ... + def getLineNumber(self): ... + def getPublicId(self): ... + def getSystemId(self): ... + +class InputSource: + def __init__(self, system_id: str | None = None) -> None: ... + def setPublicId(self, public_id): ... + def getPublicId(self): ... + def setSystemId(self, system_id): ... + def getSystemId(self): ... + def setEncoding(self, encoding): ... + def getEncoding(self): ... + def setByteStream(self, bytefile): ... + def getByteStream(self): ... + def setCharacterStream(self, charfile): ... + def getCharacterStream(self): ... + +class AttributesImpl: + def __init__(self, attrs: Mapping[str, str]) -> None: ... + def getLength(self): ... + def getType(self, name): ... + def getValue(self, name): ... + def getValueByQName(self, name): ... + def getNameByQName(self, name): ... + def getQNameByName(self, name): ... + def getNames(self): ... + def getQNames(self): ... + def __len__(self) -> int: ... + def __getitem__(self, name): ... + def keys(self): ... + def __contains__(self, name): ... + def get(self, name, alternative=None): ... + def copy(self): ... + def items(self): ... + def values(self): ... + +class AttributesNSImpl(AttributesImpl): + def __init__(self, attrs: Mapping[tuple[str, str], str], qnames: Mapping[tuple[str, str], str]) -> None: ... + def getValueByQName(self, name): ... + def getNameByQName(self, name): ... + def getQNameByName(self, name): ... + def getQNames(self): ... + def copy(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xmlrpc/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xmlrpc/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xmlrpc/client.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xmlrpc/client.pyi new file mode 100644 index 00000000..7bf701ae --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xmlrpc/client.pyi @@ -0,0 +1,322 @@ +import gzip +import http.client +import sys +import time +from _typeshed import ReadableBuffer, SupportsRead, SupportsWrite, _BufferWithLen +from collections.abc import Callable, Iterable, Mapping +from datetime import datetime +from io import BytesIO +from types import TracebackType +from typing import Any, Protocol, overload +from typing_extensions import Literal, Self, TypeAlias + +class _SupportsTimeTuple(Protocol): + def timetuple(self) -> time.struct_time: ... + +_DateTimeComparable: TypeAlias = DateTime | datetime | str | _SupportsTimeTuple +_Marshallable: TypeAlias = ( + bool + | int + | float + | str + | bytes + | bytearray + | None + | tuple[_Marshallable, ...] + # Ideally we'd use _Marshallable for list and dict, but invariance makes that impractical + | list[Any] + | dict[str, Any] + | datetime + | DateTime + | Binary +) +_XMLDate: TypeAlias = int | datetime | tuple[int, ...] | time.struct_time +_HostType: TypeAlias = tuple[str, dict[str, str]] | str + +def escape(s: str) -> str: ... # undocumented + +MAXINT: int # undocumented +MININT: int # undocumented + +PARSE_ERROR: int # undocumented +SERVER_ERROR: int # undocumented +APPLICATION_ERROR: int # undocumented +SYSTEM_ERROR: int # undocumented +TRANSPORT_ERROR: int # undocumented + +NOT_WELLFORMED_ERROR: int # undocumented +UNSUPPORTED_ENCODING: int # undocumented +INVALID_ENCODING_CHAR: int # undocumented +INVALID_XMLRPC: int # undocumented +METHOD_NOT_FOUND: int # undocumented +INVALID_METHOD_PARAMS: int # undocumented +INTERNAL_ERROR: int # undocumented + +class Error(Exception): ... + +class ProtocolError(Error): + url: str + errcode: int + errmsg: str + headers: dict[str, str] + def __init__(self, url: str, errcode: int, errmsg: str, headers: dict[str, str]) -> None: ... + +class ResponseError(Error): ... + +class Fault(Error): + faultCode: int + faultString: str + def __init__(self, faultCode: int, faultString: str, **extra: Any) -> None: ... + +boolean = bool +Boolean = bool + +def _iso8601_format(value: datetime) -> str: ... # undocumented +def _strftime(value: _XMLDate) -> str: ... # undocumented + +class DateTime: + value: str # undocumented + def __init__(self, value: int | str | datetime | time.struct_time | tuple[int, ...] = 0) -> None: ... + def __lt__(self, other: _DateTimeComparable) -> bool: ... + def __le__(self, other: _DateTimeComparable) -> bool: ... + def __gt__(self, other: _DateTimeComparable) -> bool: ... + def __ge__(self, other: _DateTimeComparable) -> bool: ... + def __eq__(self, other: _DateTimeComparable) -> bool: ... # type: ignore[override] + def make_comparable(self, other: _DateTimeComparable) -> tuple[str, str]: ... # undocumented + def timetuple(self) -> time.struct_time: ... # undocumented + def decode(self, data: Any) -> None: ... + def encode(self, out: SupportsWrite[str]) -> None: ... + +def _datetime(data: Any) -> DateTime: ... # undocumented +def _datetime_type(data: str) -> datetime: ... # undocumented + +class Binary: + data: bytes + def __init__(self, data: bytes | bytearray | None = None) -> None: ... + def decode(self, data: ReadableBuffer) -> None: ... + def encode(self, out: SupportsWrite[str]) -> None: ... + def __eq__(self, other: object) -> bool: ... + +def _binary(data: ReadableBuffer) -> Binary: ... # undocumented + +WRAPPERS: tuple[type[DateTime], type[Binary]] # undocumented + +class ExpatParser: # undocumented + def __init__(self, target: Unmarshaller) -> None: ... + def feed(self, data: str | ReadableBuffer) -> None: ... + def close(self) -> None: ... + +_WriteCallback: TypeAlias = Callable[[str], object] + +class Marshaller: + # TODO: Replace 'Any' with some kind of binding + dispatch: dict[type[Any], Callable[[Marshaller, Any, _WriteCallback], None]] + memo: dict[Any, None] + data: None + encoding: str | None + allow_none: bool + def __init__(self, encoding: str | None = None, allow_none: bool = False) -> None: ... + def dumps(self, values: Fault | Iterable[_Marshallable]) -> str: ... + def __dump(self, value: _Marshallable, write: _WriteCallback) -> None: ... # undocumented + def dump_nil(self, value: None, write: _WriteCallback) -> None: ... + def dump_bool(self, value: bool, write: _WriteCallback) -> None: ... + def dump_long(self, value: int, write: _WriteCallback) -> None: ... + def dump_int(self, value: int, write: _WriteCallback) -> None: ... + def dump_double(self, value: float, write: _WriteCallback) -> None: ... + def dump_unicode(self, value: str, write: _WriteCallback, escape: Callable[[str], str] = ...) -> None: ... + def dump_bytes(self, value: ReadableBuffer, write: _WriteCallback) -> None: ... + def dump_array(self, value: Iterable[_Marshallable], write: _WriteCallback) -> None: ... + def dump_struct( + self, value: Mapping[str, _Marshallable], write: _WriteCallback, escape: Callable[[str], str] = ... + ) -> None: ... + def dump_datetime(self, value: _XMLDate, write: _WriteCallback) -> None: ... + def dump_instance(self, value: object, write: _WriteCallback) -> None: ... + +class Unmarshaller: + dispatch: dict[str, Callable[[Unmarshaller, str], None]] + + _type: str | None + _stack: list[_Marshallable] + _marks: list[int] + _data: list[str] + _value: bool + _methodname: str | None + _encoding: str + append: Callable[[Any], None] + _use_datetime: bool + _use_builtin_types: bool + def __init__(self, use_datetime: bool = False, use_builtin_types: bool = False) -> None: ... + def close(self) -> tuple[_Marshallable, ...]: ... + def getmethodname(self) -> str | None: ... + def xml(self, encoding: str, standalone: Any) -> None: ... # Standalone is ignored + def start(self, tag: str, attrs: dict[str, str]) -> None: ... + def data(self, text: str) -> None: ... + def end(self, tag: str) -> None: ... + def end_dispatch(self, tag: str, data: str) -> None: ... + def end_nil(self, data: str) -> None: ... + def end_boolean(self, data: str) -> None: ... + def end_int(self, data: str) -> None: ... + def end_double(self, data: str) -> None: ... + def end_bigdecimal(self, data: str) -> None: ... + def end_string(self, data: str) -> None: ... + def end_array(self, data: str) -> None: ... + def end_struct(self, data: str) -> None: ... + def end_base64(self, data: str) -> None: ... + def end_dateTime(self, data: str) -> None: ... + def end_value(self, data: str) -> None: ... + def end_params(self, data: str) -> None: ... + def end_fault(self, data: str) -> None: ... + def end_methodName(self, data: str) -> None: ... + +class _MultiCallMethod: # undocumented + __call_list: list[tuple[str, tuple[_Marshallable, ...]]] + __name: str + def __init__(self, call_list: list[tuple[str, _Marshallable]], name: str) -> None: ... + def __getattr__(self, name: str) -> _MultiCallMethod: ... + def __call__(self, *args: _Marshallable) -> None: ... + +class MultiCallIterator: # undocumented + results: list[list[_Marshallable]] + def __init__(self, results: list[list[_Marshallable]]) -> None: ... + def __getitem__(self, i: int) -> _Marshallable: ... + +class MultiCall: + __server: ServerProxy + __call_list: list[tuple[str, tuple[_Marshallable, ...]]] + def __init__(self, server: ServerProxy) -> None: ... + def __getattr__(self, name: str) -> _MultiCallMethod: ... + def __call__(self) -> MultiCallIterator: ... + +# A little white lie +FastMarshaller: Marshaller | None +FastParser: ExpatParser | None +FastUnmarshaller: Unmarshaller | None + +def getparser(use_datetime: bool = False, use_builtin_types: bool = False) -> tuple[ExpatParser, Unmarshaller]: ... +def dumps( + params: Fault | tuple[_Marshallable, ...], + methodname: str | None = None, + methodresponse: bool | None = None, + encoding: str | None = None, + allow_none: bool = False, +) -> str: ... +def loads( + data: str, use_datetime: bool = False, use_builtin_types: bool = False +) -> tuple[tuple[_Marshallable, ...], str | None]: ... +def gzip_encode(data: ReadableBuffer) -> bytes: ... # undocumented +def gzip_decode(data: ReadableBuffer, max_decode: int = 20971520) -> bytes: ... # undocumented + +class GzipDecodedResponse(gzip.GzipFile): # undocumented + io: BytesIO + def __init__(self, response: SupportsRead[ReadableBuffer]) -> None: ... + +class _Method: # undocumented + __send: Callable[[str, tuple[_Marshallable, ...]], _Marshallable] + __name: str + def __init__(self, send: Callable[[str, tuple[_Marshallable, ...]], _Marshallable], name: str) -> None: ... + def __getattr__(self, name: str) -> _Method: ... + def __call__(self, *args: _Marshallable) -> _Marshallable: ... + +class Transport: + user_agent: str + accept_gzip_encoding: bool + encode_threshold: int | None + + _use_datetime: bool + _use_builtin_types: bool + _connection: tuple[_HostType | None, http.client.HTTPConnection | None] + _headers: list[tuple[str, str]] + _extra_headers: list[tuple[str, str]] + + if sys.version_info >= (3, 8): + def __init__( + self, use_datetime: bool = False, use_builtin_types: bool = False, *, headers: Iterable[tuple[str, str]] = ... + ) -> None: ... + else: + def __init__(self, use_datetime: bool = False, use_builtin_types: bool = False) -> None: ... + + def request( + self, host: _HostType, handler: str, request_body: _BufferWithLen, verbose: bool = False + ) -> tuple[_Marshallable, ...]: ... + def single_request( + self, host: _HostType, handler: str, request_body: _BufferWithLen, verbose: bool = False + ) -> tuple[_Marshallable, ...]: ... + def getparser(self) -> tuple[ExpatParser, Unmarshaller]: ... + def get_host_info(self, host: _HostType) -> tuple[str, list[tuple[str, str]], dict[str, str]]: ... + def make_connection(self, host: _HostType) -> http.client.HTTPConnection: ... + def close(self) -> None: ... + def send_request( + self, host: _HostType, handler: str, request_body: _BufferWithLen, debug: bool + ) -> http.client.HTTPConnection: ... + def send_headers(self, connection: http.client.HTTPConnection, headers: list[tuple[str, str]]) -> None: ... + def send_content(self, connection: http.client.HTTPConnection, request_body: _BufferWithLen) -> None: ... + def parse_response(self, response: http.client.HTTPResponse) -> tuple[_Marshallable, ...]: ... + +class SafeTransport(Transport): + if sys.version_info >= (3, 8): + def __init__( + self, + use_datetime: bool = False, + use_builtin_types: bool = False, + *, + headers: Iterable[tuple[str, str]] = ..., + context: Any | None = None, + ) -> None: ... + else: + def __init__( + self, use_datetime: bool = False, use_builtin_types: bool = False, *, context: Any | None = None + ) -> None: ... + + def make_connection(self, host: _HostType) -> http.client.HTTPSConnection: ... + +class ServerProxy: + __host: str + __handler: str + __transport: Transport + __encoding: str + __verbose: bool + __allow_none: bool + + if sys.version_info >= (3, 8): + def __init__( + self, + uri: str, + transport: Transport | None = None, + encoding: str | None = None, + verbose: bool = False, + allow_none: bool = False, + use_datetime: bool = False, + use_builtin_types: bool = False, + *, + headers: Iterable[tuple[str, str]] = ..., + context: Any | None = None, + ) -> None: ... + else: + def __init__( + self, + uri: str, + transport: Transport | None = None, + encoding: str | None = None, + verbose: bool = False, + allow_none: bool = False, + use_datetime: bool = False, + use_builtin_types: bool = False, + *, + context: Any | None = None, + ) -> None: ... + + def __getattr__(self, name: str) -> _Method: ... + @overload + def __call__(self, attr: Literal["close"]) -> Callable[[], None]: ... + @overload + def __call__(self, attr: Literal["transport"]) -> Transport: ... + @overload + def __call__(self, attr: str) -> Callable[[], None] | Transport: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def __close(self) -> None: ... # undocumented + def __request(self, methodname: str, params: tuple[_Marshallable, ...]) -> tuple[_Marshallable, ...]: ... # undocumented + +Server = ServerProxy diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xmlrpc/server.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xmlrpc/server.pyi new file mode 100644 index 00000000..800c2055 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xmlrpc/server.pyi @@ -0,0 +1,143 @@ +import http.server +import pydoc +import socketserver +from collections.abc import Callable, Iterable, Mapping +from re import Pattern +from typing import Any, ClassVar, Protocol +from typing_extensions import TypeAlias +from xmlrpc.client import Fault, _Marshallable + +# The dispatch accepts anywhere from 0 to N arguments, no easy way to allow this in mypy +class _DispatchArity0(Protocol): + def __call__(self) -> _Marshallable: ... + +class _DispatchArity1(Protocol): + def __call__(self, __arg1: _Marshallable) -> _Marshallable: ... + +class _DispatchArity2(Protocol): + def __call__(self, __arg1: _Marshallable, __arg2: _Marshallable) -> _Marshallable: ... + +class _DispatchArity3(Protocol): + def __call__(self, __arg1: _Marshallable, __arg2: _Marshallable, __arg3: _Marshallable) -> _Marshallable: ... + +class _DispatchArity4(Protocol): + def __call__( + self, __arg1: _Marshallable, __arg2: _Marshallable, __arg3: _Marshallable, __arg4: _Marshallable + ) -> _Marshallable: ... + +class _DispatchArityN(Protocol): + def __call__(self, *args: _Marshallable) -> _Marshallable: ... + +_DispatchProtocol: TypeAlias = ( + _DispatchArity0 | _DispatchArity1 | _DispatchArity2 | _DispatchArity3 | _DispatchArity4 | _DispatchArityN +) + +def resolve_dotted_attribute(obj: Any, attr: str, allow_dotted_names: bool = True) -> Any: ... # undocumented +def list_public_methods(obj: Any) -> list[str]: ... # undocumented + +class SimpleXMLRPCDispatcher: # undocumented + funcs: dict[str, _DispatchProtocol] + instance: Any | None + allow_none: bool + encoding: str + use_builtin_types: bool + def __init__(self, allow_none: bool = False, encoding: str | None = None, use_builtin_types: bool = False) -> None: ... + def register_instance(self, instance: Any, allow_dotted_names: bool = False) -> None: ... + def register_function(self, function: _DispatchProtocol | None = None, name: str | None = None) -> Callable[..., Any]: ... + def register_introspection_functions(self) -> None: ... + def register_multicall_functions(self) -> None: ... + def _marshaled_dispatch( + self, + data: str, + dispatch_method: Callable[[str | None, tuple[_Marshallable, ...]], Fault | tuple[_Marshallable, ...]] | None = None, + path: Any | None = None, + ) -> str: ... # undocumented + def system_listMethods(self) -> list[str]: ... # undocumented + def system_methodSignature(self, method_name: str) -> str: ... # undocumented + def system_methodHelp(self, method_name: str) -> str: ... # undocumented + def system_multicall(self, call_list: list[dict[str, _Marshallable]]) -> list[_Marshallable]: ... # undocumented + def _dispatch(self, method: str, params: Iterable[_Marshallable]) -> _Marshallable: ... # undocumented + +class SimpleXMLRPCRequestHandler(http.server.BaseHTTPRequestHandler): + rpc_paths: ClassVar[tuple[str, ...]] + encode_threshold: int # undocumented + aepattern: Pattern[str] # undocumented + def accept_encodings(self) -> dict[str, float]: ... + def is_rpc_path_valid(self) -> bool: ... + def do_POST(self) -> None: ... + def decode_request_content(self, data: bytes) -> bytes | None: ... + def report_404(self) -> None: ... + +class SimpleXMLRPCServer(socketserver.TCPServer, SimpleXMLRPCDispatcher): + _send_traceback_handler: bool + def __init__( + self, + addr: tuple[str, int], + requestHandler: type[SimpleXMLRPCRequestHandler] = ..., + logRequests: bool = True, + allow_none: bool = False, + encoding: str | None = None, + bind_and_activate: bool = True, + use_builtin_types: bool = False, + ) -> None: ... + +class MultiPathXMLRPCServer(SimpleXMLRPCServer): # undocumented + dispatchers: dict[str, SimpleXMLRPCDispatcher] + def __init__( + self, + addr: tuple[str, int], + requestHandler: type[SimpleXMLRPCRequestHandler] = ..., + logRequests: bool = True, + allow_none: bool = False, + encoding: str | None = None, + bind_and_activate: bool = True, + use_builtin_types: bool = False, + ) -> None: ... + def add_dispatcher(self, path: str, dispatcher: SimpleXMLRPCDispatcher) -> SimpleXMLRPCDispatcher: ... + def get_dispatcher(self, path: str) -> SimpleXMLRPCDispatcher: ... + +class CGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher): + def __init__(self, allow_none: bool = False, encoding: str | None = None, use_builtin_types: bool = False) -> None: ... + def handle_xmlrpc(self, request_text: str) -> None: ... + def handle_get(self) -> None: ... + def handle_request(self, request_text: str | None = None) -> None: ... + +class ServerHTMLDoc(pydoc.HTMLDoc): # undocumented + def docroutine( # type: ignore[override] + self, + object: object, + name: str, + mod: str | None = None, + funcs: Mapping[str, str] = ..., + classes: Mapping[str, str] = ..., + methods: Mapping[str, str] = ..., + cl: type | None = None, + ) -> str: ... + def docserver(self, server_name: str, package_documentation: str, methods: dict[str, str]) -> str: ... + +class XMLRPCDocGenerator: # undocumented + server_name: str + server_documentation: str + server_title: str + def set_server_title(self, server_title: str) -> None: ... + def set_server_name(self, server_name: str) -> None: ... + def set_server_documentation(self, server_documentation: str) -> None: ... + def generate_html_documentation(self) -> str: ... + +class DocXMLRPCRequestHandler(SimpleXMLRPCRequestHandler): + def do_GET(self) -> None: ... + +class DocXMLRPCServer(SimpleXMLRPCServer, XMLRPCDocGenerator): + def __init__( + self, + addr: tuple[str, int], + requestHandler: type[SimpleXMLRPCRequestHandler] = ..., + logRequests: bool = True, + allow_none: bool = False, + encoding: str | None = None, + bind_and_activate: bool = True, + use_builtin_types: bool = False, + ) -> None: ... + +class DocCGIXMLRPCRequestHandler(CGIXMLRPCRequestHandler, XMLRPCDocGenerator): + def __init__(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xxlimited.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xxlimited.pyi new file mode 100644 index 00000000..b2fb72ad --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/xxlimited.pyi @@ -0,0 +1,21 @@ +import sys +from typing import Any +from typing_extensions import final + +class Str: ... + +@final +class Xxo: + def demo(self) -> None: ... + +def foo(__i: int, __j: int) -> Any: ... +def new() -> Xxo: ... + +if sys.version_info >= (3, 10): + class Error: ... + +else: + class error: ... + class Null: ... + + def roj(__b: Any) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/zipapp.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/zipapp.pyi new file mode 100644 index 00000000..c7cf1704 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/zipapp.pyi @@ -0,0 +1,20 @@ +from collections.abc import Callable +from pathlib import Path +from typing import BinaryIO +from typing_extensions import TypeAlias + +__all__ = ["ZipAppError", "create_archive", "get_interpreter"] + +_Path: TypeAlias = str | Path | BinaryIO + +class ZipAppError(ValueError): ... + +def create_archive( + source: _Path, + target: _Path | None = None, + interpreter: str | None = None, + main: str | None = None, + filter: Callable[[Path], bool] | None = None, + compressed: bool = False, +) -> None: ... +def get_interpreter(archive: _Path) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/zipfile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/zipfile.pyi new file mode 100644 index 00000000..b969d0cf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/zipfile.pyi @@ -0,0 +1,282 @@ +import io +import sys +from _typeshed import StrOrBytesPath, StrPath, _BufferWithLen +from collections.abc import Callable, Iterable, Iterator +from os import PathLike +from types import TracebackType +from typing import IO, Any, Protocol, overload +from typing_extensions import Literal, Self, TypeAlias + +__all__ = [ + "BadZipFile", + "BadZipfile", + "error", + "ZIP_STORED", + "ZIP_DEFLATED", + "ZIP_BZIP2", + "ZIP_LZMA", + "is_zipfile", + "ZipInfo", + "ZipFile", + "PyZipFile", + "LargeZipFile", +] + +if sys.version_info >= (3, 8): + __all__ += ["Path"] + +_DateTuple: TypeAlias = tuple[int, int, int, int, int, int] +_ReadWriteMode: TypeAlias = Literal["r", "w"] +_ReadWriteBinaryMode: TypeAlias = Literal["r", "w", "rb", "wb"] +_ZipFileMode: TypeAlias = Literal["r", "w", "x", "a"] + +class BadZipFile(Exception): ... + +BadZipfile = BadZipFile +error = BadZipfile + +class LargeZipFile(Exception): ... + +class _ZipStream(Protocol): + def read(self, __n: int) -> bytes: ... + # The following methods are optional: + # def seekable(self) -> bool: ... + # def tell(self) -> int: ... + # def seek(self, __n: int) -> object: ... + +# Stream shape as required by _EndRecData() and _EndRecData64(). +class _SupportsReadSeekTell(Protocol): + def read(self, __n: int = ...) -> bytes: ... + def seek(self, __cookie: int, __whence: int) -> object: ... + def tell(self) -> int: ... + +class _ClosableZipStream(_ZipStream, Protocol): + def close(self) -> object: ... + +class ZipExtFile(io.BufferedIOBase): + MAX_N: int + MIN_READ_SIZE: int + MAX_SEEK_READ: int + newlines: list[bytes] | None + mode: _ReadWriteMode + name: str + @overload + def __init__( + self, fileobj: _ClosableZipStream, mode: _ReadWriteMode, zipinfo: ZipInfo, pwd: bytes | None, close_fileobj: Literal[True] + ) -> None: ... + @overload + def __init__( + self, + fileobj: _ClosableZipStream, + mode: _ReadWriteMode, + zipinfo: ZipInfo, + pwd: bytes | None = None, + *, + close_fileobj: Literal[True], + ) -> None: ... + @overload + def __init__( + self, + fileobj: _ZipStream, + mode: _ReadWriteMode, + zipinfo: ZipInfo, + pwd: bytes | None = None, + close_fileobj: Literal[False] = False, + ) -> None: ... + def read(self, n: int | None = -1) -> bytes: ... + def readline(self, limit: int = -1) -> bytes: ... # type: ignore[override] + def peek(self, n: int = 1) -> bytes: ... + def read1(self, n: int | None) -> bytes: ... # type: ignore[override] + def seek(self, offset: int, whence: int = 0) -> int: ... + +class _Writer(Protocol): + def write(self, __s: str) -> object: ... + +class ZipFile: + filename: str | None + debug: int + comment: bytes + filelist: list[ZipInfo] + fp: IO[bytes] | None + NameToInfo: dict[str, ZipInfo] + start_dir: int # undocumented + compression: int # undocumented + compresslevel: int | None # undocumented + mode: _ZipFileMode # undocumented + pwd: bytes | None # undocumented + if sys.version_info >= (3, 11): + @overload + def __init__( + self, + file: StrPath | IO[bytes], + mode: Literal["r"] = "r", + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, + *, + strict_timestamps: bool = True, + metadata_encoding: str | None, + ) -> None: ... + @overload + def __init__( + self, + file: StrPath | IO[bytes], + mode: _ZipFileMode = "r", + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, + *, + strict_timestamps: bool = True, + metadata_encoding: None = None, + ) -> None: ... + elif sys.version_info >= (3, 8): + def __init__( + self, + file: StrPath | IO[bytes], + mode: _ZipFileMode = "r", + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, + *, + strict_timestamps: bool = True, + ) -> None: ... + else: + def __init__( + self, + file: StrPath | IO[bytes], + mode: _ZipFileMode = "r", + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, + ) -> None: ... + + def __enter__(self) -> Self: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def close(self) -> None: ... + def getinfo(self, name: str) -> ZipInfo: ... + def infolist(self) -> list[ZipInfo]: ... + def namelist(self) -> list[str]: ... + def open( + self, name: str | ZipInfo, mode: _ReadWriteMode = "r", pwd: bytes | None = None, *, force_zip64: bool = False + ) -> IO[bytes]: ... + def extract(self, member: str | ZipInfo, path: StrPath | None = None, pwd: bytes | None = None) -> str: ... + def extractall( + self, path: StrPath | None = None, members: Iterable[str | ZipInfo] | None = None, pwd: bytes | None = None + ) -> None: ... + def printdir(self, file: _Writer | None = None) -> None: ... + def setpassword(self, pwd: bytes) -> None: ... + def read(self, name: str | ZipInfo, pwd: bytes | None = None) -> bytes: ... + def testzip(self) -> str | None: ... + def write( + self, + filename: StrPath, + arcname: StrPath | None = None, + compress_type: int | None = None, + compresslevel: int | None = None, + ) -> None: ... + def writestr( + self, + zinfo_or_arcname: str | ZipInfo, + data: _BufferWithLen | str, + compress_type: int | None = None, + compresslevel: int | None = None, + ) -> None: ... + if sys.version_info >= (3, 11): + def mkdir(self, zinfo_or_directory_name: str | ZipInfo, mode: int = 0o777) -> None: ... + +class PyZipFile(ZipFile): + def __init__( + self, file: str | IO[bytes], mode: _ZipFileMode = "r", compression: int = 0, allowZip64: bool = True, optimize: int = -1 + ) -> None: ... + def writepy(self, pathname: str, basename: str = "", filterfunc: Callable[[str], bool] | None = None) -> None: ... + +class ZipInfo: + filename: str + date_time: _DateTuple + compress_type: int + comment: bytes + extra: bytes + create_system: int + create_version: int + extract_version: int + reserved: int + flag_bits: int + volume: int + internal_attr: int + external_attr: int + header_offset: int + CRC: int + compress_size: int + file_size: int + orig_filename: str # undocumented + def __init__(self, filename: str = "NoName", date_time: _DateTuple = ...) -> None: ... + if sys.version_info >= (3, 8): + @classmethod + def from_file(cls, filename: StrPath, arcname: StrPath | None = None, *, strict_timestamps: bool = True) -> Self: ... + else: + @classmethod + def from_file(cls, filename: StrPath, arcname: StrPath | None = None) -> Self: ... + + def is_dir(self) -> bool: ... + def FileHeader(self, zip64: bool | None = None) -> bytes: ... + +class _PathOpenProtocol(Protocol): + def __call__(self, mode: _ReadWriteMode = ..., pwd: bytes | None = ..., *, force_zip64: bool = ...) -> IO[bytes]: ... + +if sys.version_info >= (3, 8): + class Path: + @property + def name(self) -> str: ... + @property + def parent(self) -> PathLike[str]: ... # undocumented + if sys.version_info >= (3, 10): + @property + def filename(self) -> PathLike[str]: ... # undocumented + if sys.version_info >= (3, 11): + @property + def suffix(self) -> str: ... + @property + def suffixes(self) -> list[str]: ... + @property + def stem(self) -> str: ... + + def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: ... + if sys.version_info >= (3, 9): + def open( + self, mode: _ReadWriteBinaryMode = "r", *args: Any, pwd: bytes | None = None, **kwargs: Any + ) -> IO[bytes]: ... + else: + @property + def open(self) -> _PathOpenProtocol: ... + + def iterdir(self) -> Iterator[Path]: ... + def is_dir(self) -> bool: ... + def is_file(self) -> bool: ... + def exists(self) -> bool: ... + def read_text( + self, + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + line_buffering: bool = ..., + write_through: bool = ..., + ) -> str: ... + def read_bytes(self) -> bytes: ... + if sys.version_info >= (3, 10): + def joinpath(self, *other: StrPath) -> Path: ... + else: + def joinpath(self, add: StrPath) -> Path: ... # undocumented + + def __truediv__(self, add: StrPath) -> Path: ... + +def is_zipfile(filename: StrOrBytesPath | _SupportsReadSeekTell) -> bool: ... + +ZIP_STORED: int +ZIP_DEFLATED: int +ZIP64_LIMIT: int +ZIP_FILECOUNT_LIMIT: int +ZIP_MAX_COMMENT: int +ZIP_BZIP2: int +ZIP_LZMA: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/zipimport.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/zipimport.pyi new file mode 100644 index 00000000..ee97faac --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/zipimport.pyi @@ -0,0 +1,31 @@ +import sys +from _typeshed import StrOrBytesPath +from importlib.abc import ResourceReader +from importlib.machinery import ModuleSpec +from types import CodeType, ModuleType + +if sys.version_info >= (3, 8): + __all__ = ["ZipImportError", "zipimporter"] + +class ZipImportError(ImportError): ... + +class zipimporter: + archive: str + prefix: str + if sys.version_info >= (3, 11): + def __init__(self, path: str) -> None: ... + else: + def __init__(self, path: StrOrBytesPath) -> None: ... + + def find_loader(self, fullname: str, path: str | None = None) -> tuple[zipimporter | None, list[str]]: ... # undocumented + def find_module(self, fullname: str, path: str | None = None) -> zipimporter | None: ... + def get_code(self, fullname: str) -> CodeType: ... + def get_data(self, pathname: str) -> bytes: ... + def get_filename(self, fullname: str) -> str: ... + def get_resource_reader(self, fullname: str) -> ResourceReader | None: ... # undocumented + def get_source(self, fullname: str) -> str | None: ... + def is_package(self, fullname: str) -> bool: ... + def load_module(self, fullname: str) -> ModuleType: ... + if sys.version_info >= (3, 10): + def find_spec(self, fullname: str, target: ModuleType | None = None) -> ModuleSpec | None: ... + def invalidate_caches(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/zlib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/zlib.pyi new file mode 100644 index 00000000..c3419af0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/zlib.pyi @@ -0,0 +1,56 @@ +import sys +from _typeshed import ReadableBuffer +from typing_extensions import Literal + +DEFLATED: Literal[8] +DEF_MEM_LEVEL: int # can change +DEF_BUF_SIZE: Literal[16384] +MAX_WBITS: int +ZLIB_VERSION: str # can change +ZLIB_RUNTIME_VERSION: str # can change +Z_NO_COMPRESSION: Literal[0] +Z_PARTIAL_FLUSH: Literal[1] +Z_BEST_COMPRESSION: Literal[9] +Z_BEST_SPEED: Literal[1] +Z_BLOCK: Literal[5] +Z_DEFAULT_COMPRESSION: Literal[-1] +Z_DEFAULT_STRATEGY: Literal[0] +Z_FILTERED: Literal[1] +Z_FINISH: Literal[4] +Z_FIXED: Literal[4] +Z_FULL_FLUSH: Literal[3] +Z_HUFFMAN_ONLY: Literal[2] +Z_NO_FLUSH: Literal[0] +Z_RLE: Literal[3] +Z_SYNC_FLUSH: Literal[2] +Z_TREES: Literal[6] + +class error(Exception): ... + +class _Compress: + def compress(self, data: ReadableBuffer) -> bytes: ... + def flush(self, mode: int = ...) -> bytes: ... + def copy(self) -> _Compress: ... + +class _Decompress: + unused_data: bytes + unconsumed_tail: bytes + eof: bool + def decompress(self, data: ReadableBuffer, max_length: int = ...) -> bytes: ... + def flush(self, length: int = ...) -> bytes: ... + def copy(self) -> _Decompress: ... + +def adler32(__data: ReadableBuffer, __value: int = 1) -> int: ... + +if sys.version_info >= (3, 11): + def compress(__data: ReadableBuffer, level: int = -1, wbits: int = 15) -> bytes: ... + +else: + def compress(__data: ReadableBuffer, level: int = -1) -> bytes: ... + +def compressobj( + level: int = -1, method: int = 8, wbits: int = 15, memLevel: int = 8, strategy: int = 0, zdict: ReadableBuffer | None = None +) -> _Compress: ... +def crc32(__data: ReadableBuffer, __value: int = 0) -> int: ... +def decompress(__data: ReadableBuffer, wbits: int = 15, bufsize: int = 16384) -> bytes: ... +def decompressobj(wbits: int = 15, zdict: ReadableBuffer = b"") -> _Decompress: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/zoneinfo/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/zoneinfo/__init__.pyi new file mode 100644 index 00000000..fe994be3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stdlib/zoneinfo/__init__.pyi @@ -0,0 +1,38 @@ +from _typeshed import StrPath +from collections.abc import Iterable, Sequence +from datetime import datetime, timedelta, tzinfo +from typing import Any, Protocol +from typing_extensions import Self + +__all__ = ["ZoneInfo", "reset_tzpath", "available_timezones", "TZPATH", "ZoneInfoNotFoundError", "InvalidTZPathWarning"] + +class _IOBytes(Protocol): + def read(self, __size: int) -> bytes: ... + def seek(self, __size: int, __whence: int = ...) -> Any: ... + +class ZoneInfo(tzinfo): + @property + def key(self) -> str: ... + def __init__(self, key: str) -> None: ... + @classmethod + def no_cache(cls, key: str) -> Self: ... + @classmethod + def from_file(cls, __fobj: _IOBytes, key: str | None = ...) -> Self: ... + @classmethod + def clear_cache(cls, *, only_keys: Iterable[str] | None = ...) -> None: ... + def tzname(self, __dt: datetime | None) -> str | None: ... + def utcoffset(self, __dt: datetime | None) -> timedelta | None: ... + def dst(self, __dt: datetime | None) -> timedelta | None: ... + +# Note: Both here and in clear_cache, the types allow the use of `str` where +# a sequence of strings is required. This should be remedied if a solution +# to this typing bug is found: https://github.com/python/typing/issues/256 +def reset_tzpath(to: Sequence[StrPath] | None = None) -> None: ... +def available_timezones() -> set[str]: ... + +TZPATH: Sequence[str] + +class ZoneInfoNotFoundError(KeyError): ... +class InvalidTZPathWarning(RuntimeWarning): ... + +def __dir__() -> list[str]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/METADATA.toml new file mode 100644 index 00000000..2d07bb35 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/METADATA.toml @@ -0,0 +1,9 @@ +version = "0.1.*" +requires = ["types-Pillow"] + +[tool.stubtest] +ignore_missing_stub = true +# TODO: figure out how to run stubtest for this package +# (the package pins Pillow in a problematic way) +skip = true +platforms = ["win32"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/__init__.pyi new file mode 100644 index 00000000..17f1dd8e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/__init__.pyi @@ -0,0 +1,12 @@ +from d3dshot.capture_output import CaptureOutputs as CaptureOutputs +from d3dshot.d3dshot import D3DShot as D3DShot + +pil_is_available: bool +numpy_is_available: bool +pytorch_is_available: bool +pytorch_gpu_is_available: bool +capture_output_mapping: dict[str, CaptureOutputs] +capture_outputs: list[str] + +def determine_available_capture_outputs() -> list[CaptureOutputs]: ... +def create(capture_output: str = ..., frame_buffer_size: int = ...) -> D3DShot: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_output.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_output.pyi new file mode 100644 index 00000000..0fc75136 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_output.pyi @@ -0,0 +1,42 @@ +import enum +from _typeshed import Incomplete +from collections.abc import Sequence +from ctypes import _CVoidConstPLike +from typing_extensions import Literal, TypeAlias + +from PIL import Image + +_Frame: TypeAlias = Image.Image | Incomplete +# stub_uploader doesn't allow numpy and torch because D3DShot doesn't declare it as a dependency +# from torch import Tensor +# import numpy.typing as npt +# _Frame: TypeAlias = Image.Image | npt.NDArray[np.int32] | npt.NDArray[np.float32] | Tensor + +class CaptureOutputs(enum.Enum): + PIL: int + NUMPY: int + NUMPY_FLOAT: int + PYTORCH: int + PYTORCH_FLOAT: int + PYTORCH_GPU: int + PYTORCH_FLOAT_GPU: int + +class CaptureOutputError(BaseException): ... + +# All CaptureOutput methods just reference the backend. Making this both a base class and a wrapper. +class CaptureOutput: + # `backend` is a subclass of CaptureOutput based on the CaptureOutputs enum passed to __init__ + backend: CaptureOutput + def __init__(self, backend: CaptureOutputs = ...) -> None: ... + def process( + self, + pointer: _CVoidConstPLike, + pitch: int, + size: int, + width: int, + height: int, + region: tuple[int, int, int, int], + rotation: int, + ) -> _Frame: ... + def to_pil(self, frame: _Frame) -> Image.Image: ... + def stack(self, frames: Sequence[_Frame], stack_dimension: Literal["first", "last"]) -> _Frame: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_outputs/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_outputs/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_outputs/numpy_capture_output.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_outputs/numpy_capture_output.pyi new file mode 100644 index 00000000..0aec1bc8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_outputs/numpy_capture_output.pyi @@ -0,0 +1,28 @@ +from _typeshed import Incomplete +from collections.abc import Sequence +from ctypes import _CVoidConstPLike +from typing_extensions import Literal, TypeAlias + +from d3dshot.capture_output import CaptureOutput +from PIL import Image + +# stub_uploader doesn't allow numpy because D3DShot doesn't declare it as a dependency +# import numpy as np +# import numpy.typing as npt +# _NDArray: TypeAlias = npt.NDArray[np.int32] +_NDArray: TypeAlias = Incomplete + +class NumpyCaptureOutput(CaptureOutput): + def __init__(self) -> None: ... + def process( + self, + pointer: _CVoidConstPLike, + pitch: int, + size: int, + width: int, + height: int, + region: tuple[int, int, int, int], + rotation: int, + ) -> _NDArray: ... + def to_pil(self, frame: _NDArray) -> Image.Image: ... + def stack(self, frames: Sequence[_NDArray] | _NDArray, stack_dimension: Literal["first", "last"]) -> _NDArray: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_outputs/numpy_float_capture_output.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_outputs/numpy_float_capture_output.pyi new file mode 100644 index 00000000..2bf3331d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_outputs/numpy_float_capture_output.pyi @@ -0,0 +1,5 @@ +from d3dshot.capture_outputs.numpy_capture_output import NumpyCaptureOutput + +# stub_uploader doesn't allow numpy because D3DShot doesn't declare it as a dependency +# this CaptureOutput should be float based +class NumpyFloatCaptureOutput(NumpyCaptureOutput): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_outputs/pil_capture_output.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_outputs/pil_capture_output.pyi new file mode 100644 index 00000000..d1e2c878 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_outputs/pil_capture_output.pyi @@ -0,0 +1,24 @@ +from _typeshed import Unused +from collections.abc import Sequence +from ctypes import _CVoidConstPLike +from typing import TypeVar + +from d3dshot.capture_output import CaptureOutput +from PIL import Image + +_ImageT = TypeVar("_ImageT", bound=Image.Image) + +class PILCaptureOutput(CaptureOutput): + def __init__(self) -> None: ... + def process( + self, + pointer: _CVoidConstPLike, + pitch: int, + size: int, + width: int, + height: int, + region: tuple[int, int, int, int], + rotation: int, + ) -> Image.Image: ... + def to_pil(self, frame: _ImageT) -> _ImageT: ... + def stack(self, frames: Sequence[_ImageT], stack_dimension: Unused) -> Sequence[_ImageT]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_outputs/pytorch_capture_output.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_outputs/pytorch_capture_output.pyi new file mode 100644 index 00000000..9fc7df7e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_outputs/pytorch_capture_output.pyi @@ -0,0 +1,26 @@ +from _typeshed import Incomplete +from collections.abc import Sequence +from ctypes import _CVoidConstPLike +from typing_extensions import Literal, TypeAlias + +from d3dshot.capture_output import CaptureOutput +from PIL import Image + +# stub_uploader doesn't allow torch because D3DShot doesn't declare it as a dependency +# from torch import Tensor +_Tensor: TypeAlias = Incomplete + +class PytorchCaptureOutput(CaptureOutput): + def __init__(self) -> None: ... + def process( + self, + pointer: _CVoidConstPLike, + pitch: int, + size: int, + width: int, + height: int, + region: tuple[int, int, int, int], + rotation: int, + ) -> _Tensor: ... + def to_pil(self, frame: _Tensor) -> Image.Image: ... + def stack(self, frames: Sequence[_Tensor], stack_dimension: Literal["first", "last"]) -> _Tensor: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_outputs/pytorch_float_capture_output.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_outputs/pytorch_float_capture_output.pyi new file mode 100644 index 00000000..53e7a73d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_outputs/pytorch_float_capture_output.pyi @@ -0,0 +1,3 @@ +from d3dshot.capture_outputs.pytorch_capture_output import PytorchCaptureOutput + +class PytorchFloatCaptureOutput(PytorchCaptureOutput): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_outputs/pytorch_float_gpu_capture_output.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_outputs/pytorch_float_gpu_capture_output.pyi new file mode 100644 index 00000000..2e7c6c10 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_outputs/pytorch_float_gpu_capture_output.pyi @@ -0,0 +1,3 @@ +from d3dshot.capture_outputs.pytorch_gpu_capture_output import PytorchGPUCaptureOutput + +class PytorchFloatGPUCaptureOutput(PytorchGPUCaptureOutput): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_outputs/pytorch_gpu_capture_output.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_outputs/pytorch_gpu_capture_output.pyi new file mode 100644 index 00000000..d78cc60e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/capture_outputs/pytorch_gpu_capture_output.pyi @@ -0,0 +1,3 @@ +from d3dshot.capture_outputs.pytorch_capture_output import PytorchCaptureOutput + +class PytorchGPUCaptureOutput(PytorchCaptureOutput): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/d3dshot.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/d3dshot.pyi new file mode 100644 index 00000000..5424517e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/d3dshot.pyi @@ -0,0 +1,45 @@ +from collections import deque +from collections.abc import Iterable + +from d3dshot.capture_output import CaptureOutput as CaptureOutput, CaptureOutputs as CaptureOutputs, _Frame +from d3dshot.display import Display as Display + +class Singleton(type): ... + +class D3DShot(metaclass=Singleton): + displays: list[Display] + display: Display + capture_output: CaptureOutput + frame_buffer_size: int + frame_buffer: deque[_Frame] + previous_screenshot: _Frame | None + region: tuple[int, int, int, int] | None + + def __init__( + self, + capture_output: CaptureOutputs = ..., + frame_buffer_size: int = ..., + pil_is_available: bool = ..., + numpy_is_available: bool = ..., + pytorch_is_available: bool = ..., + pytorch_gpu_is_available: bool = ..., + ) -> None: ... + @property + def is_capturing(self) -> bool: ... + def get_latest_frame(self) -> _Frame | None: ... + def get_frame(self, frame_index: int) -> _Frame | None: ... + def get_frames(self, frame_indices: Iterable[int]) -> list[_Frame]: ... + def get_frame_stack(self, frame_indices: Iterable[int], stack_dimension: str | None = ...) -> _Frame: ... + def screenshot(self, region: tuple[int, int, int, int] | None = ...) -> _Frame | None: ... + def screenshot_to_disk( + self, directory: str | None = ..., file_name: str | None = ..., region: tuple[int, int, int, int] | None = ... + ) -> str: ... + def frame_buffer_to_disk(self, directory: str | None = ...) -> None: ... + def capture(self, target_fps: int = ..., region: tuple[int, int, int, int] | None = ...) -> bool: ... + def screenshot_every(self, interval: float, region: tuple[int, int, int, int] | None = ...) -> bool: ... + def screenshot_to_disk_every( + self, interval: float, directory: str | None = ..., region: tuple[int, int, int, int] | None = ... + ) -> bool: ... + def stop(self) -> bool: ... + def benchmark(self) -> None: ... + def detect_displays(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/display.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/display.pyi new file mode 100644 index 00000000..fb30dd2b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/display.pyi @@ -0,0 +1,49 @@ +from ctypes import _Pointer +from typing_extensions import TypedDict + +from d3dshot.dll import _ProcessFunc, _ProcessFuncRegionArg, _ProcessFuncReturn +from d3dshot.dll.d3d import ID3D11Device, ID3D11DeviceContext +from d3dshot.dll.dxgi import IDXGIAdapter, IDXGIOutput1, IDXGIOutputDuplication + +class _PositionDict(TypedDict): + left: int + top: int + right: int + bottom: int + +class Display: + name: str + adapter_name: str + resolution: tuple[int, int] + position: _PositionDict + rotation: int + scale_factor: float + is_primary: bool + hmonitor: int + dxgi_output: IDXGIOutput1 | None + dxgi_adapter: _Pointer[IDXGIAdapter] | None + # Note that Display.d3d_device and Display.d3d_device_context can never be None. + # Despite initially being set to None in __init__, + # they're always immediately set in _initialize_dxgi_output_duplication() + d3d_device: ID3D11Device + d3d_device_context: ID3D11DeviceContext + dxgi_output_duplication: _Pointer[IDXGIOutputDuplication] + + def __init__( + self, + name: str | None = ..., + adapter_name: str | None = ..., + resolution: tuple[int, int] | None = ..., + position: _PositionDict | None = ..., + rotation: int | None = ..., + scale_factor: float | None = ..., + is_primary: bool = ..., + hmonitor: int | None = ..., + dxgi_output: IDXGIOutput1 | None = ..., + dxgi_adapter: _Pointer[IDXGIAdapter] | None = ..., + ) -> None: ... + def capture( + self, process_func: _ProcessFunc[_ProcessFuncRegionArg, _ProcessFuncReturn] | None, region: _ProcessFuncRegionArg = ... + ) -> _ProcessFuncReturn: ... + @classmethod + def discover_displays(cls) -> list[Display]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/dll/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/dll/__init__.pyi new file mode 100644 index 00000000..82dc9b18 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/dll/__init__.pyi @@ -0,0 +1,28 @@ +import sys +from _typeshed import Incomplete +from collections.abc import Callable +from ctypes import _CData, c_ulong +from ctypes.wintypes import PFLOAT +from typing import TypeVar +from typing_extensions import TypeAlias + +from d3dshot.capture_output import _Frame + +_ProcessFuncRegionArg = TypeVar("_ProcessFuncRegionArg", tuple[int, int, int, int], None) +_ProcessFuncReturn = TypeVar("_ProcessFuncReturn", _Frame, None) +# The _ProcessFunc alias is used in multiple submodules +_ProcessFunc: TypeAlias = Callable[[PFLOAT, int, int, int, int, _ProcessFuncRegionArg, int], _ProcessFuncReturn] # noqa: Y047 + +if sys.platform == "win32": + from ctypes import HRESULT + + _HRESULT: TypeAlias = HRESULT +else: + _HRESULT: TypeAlias = Incomplete + +# comtypes is not typed +# from comtypes import IUnknown +class _IUnknown(_CData): + def QueryInterface(self, interface: type, iid: _CData | None = ...) -> _HRESULT: ... + def AddRef(self) -> c_ulong: ... + def Release(self) -> c_ulong: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/dll/d3d.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/dll/d3d.pyi new file mode 100644 index 00000000..e412c2db --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/dll/d3d.pyi @@ -0,0 +1,214 @@ +from ctypes import Structure, _Pointer, c_int32, c_uint, c_void_p +from ctypes.wintypes import FLOAT, UINT + +from d3dshot.dll import _HRESULT, _IUnknown +from d3dshot.dll.dxgi import IDXGIAdapter + +class DXGI_SAMPLE_DESC(Structure): + Count: UINT + Quality: UINT + +class D3D11_BOX(Structure): + left: UINT + top: UINT + front: UINT + right: UINT + bottom: UINT + back: UINT + +class D3D11_TEXTURE2D_DESC(Structure): + Width: UINT + Height: UINT + MipLevels: UINT + ArraySize: UINT + Format: UINT + SampleDesc: DXGI_SAMPLE_DESC + Usage: UINT + BindFlags: UINT + CPUAccessFlags: UINT + MiscFlags: UINT + +class ID3D11DeviceChild(_IUnknown): + def GetDevice(self) -> None: ... + def GetPrivateData(self) -> _HRESULT: ... + def SetPrivateData(self) -> _HRESULT: ... + def SetPrivateDataInterface(self) -> _HRESULT: ... + +class ID3D11Resource(ID3D11DeviceChild): + def GetType(self) -> None: ... + def SetEvictionPriority(self) -> None: ... + def GetEvictionPriority(self) -> UINT: ... + +class ID3D11Texture2D(ID3D11Resource): + def GetDesc(self, __pDesc: _Pointer[D3D11_TEXTURE2D_DESC]) -> None: ... + +class ID3D11DeviceContext(ID3D11DeviceChild): + def VSSetConstantBuffers(self) -> None: ... + def PSSetShaderResources(self) -> None: ... + def PSSetShader(self) -> None: ... + def PSSetSamplers(self) -> None: ... + def VSSetShader(self) -> None: ... + def DrawIndexed(self) -> None: ... + def Draw(self) -> None: ... + def Map(self) -> _HRESULT: ... + def Unmap(self) -> None: ... + def PSSetConstantBuffers(self) -> None: ... + def IASetInputLayout(self) -> None: ... + def IASetVertexBuffers(self) -> None: ... + def IASetIndexBuffer(self) -> None: ... + def DrawIndexedInstanced(self) -> None: ... + def DrawInstanced(self) -> None: ... + def GSSetConstantBuffers(self) -> None: ... + def GSSetShader(self) -> None: ... + def IASetPrimitiveTopology(self) -> None: ... + def VSSetShaderResources(self) -> None: ... + def VSSetSamplers(self) -> None: ... + def Begin(self) -> None: ... + def End(self) -> None: ... + def GetData(self) -> _HRESULT: ... + def SetPredication(self) -> None: ... + def GSSetShaderResources(self) -> None: ... + def GSSetSamplers(self) -> None: ... + def OMSetRenderTargets(self) -> None: ... + def OMSetRenderTargetsAndUnorderedAccessViews(self) -> None: ... + def OMSetBlendState(self) -> None: ... + def OMSetDepthStencilState(self) -> None: ... + def SOSetTargets(self) -> None: ... + def DrawAuto(self) -> None: ... + def DrawIndexedInstancedIndirect(self) -> None: ... + def DrawInstancedIndirect(self) -> None: ... + def Dispatch(self) -> None: ... + def DispatchIndirect(self) -> None: ... + def RSSetState(self) -> None: ... + def RSSetViewports(self) -> None: ... + def RSSetScissorRects(self) -> None: ... + def CopySubresourceRegion( + self, + __pDstResource: _Pointer[ID3D11Resource], + __DstSubresource: UINT, + __DstX: UINT, + __DstY: UINT, + __DstZ: UINT, + __pSrcResource: _Pointer[ID3D11Resource], + __SrcSubresource: UINT, + __pSrcBox: _Pointer[D3D11_BOX], + ) -> None: ... + def CopyResource(self, __pDstResource: _Pointer[ID3D11Resource], __pSrcResource: _Pointer[ID3D11Resource]) -> None: ... + def UpdateSubresource(self) -> None: ... + def CopyStructureCount(self) -> None: ... + def ClearRenderTargetView(self) -> None: ... + def ClearUnorderedAccessViewUint(self) -> None: ... + def ClearUnorderedAccessViewFloat(self) -> None: ... + def ClearDepthStencilView(self) -> None: ... + def GenerateMips(self) -> None: ... + def SetResourceMinLOD(self) -> None: ... + def GetResourceMinLOD(self) -> FLOAT: ... + def ResolveSubresource(self) -> None: ... + def ExecuteCommandList(self) -> None: ... + def HSSetShaderResources(self) -> None: ... + def HSSetShader(self) -> None: ... + def HSSetSamplers(self) -> None: ... + def HSSetConstantBuffers(self) -> None: ... + def DSSetShaderResources(self) -> None: ... + def DSSetShader(self) -> None: ... + def DSSetSamplers(self) -> None: ... + def DSSetConstantBuffers(self) -> None: ... + def CSSetShaderResources(self) -> None: ... + def CSSetUnorderedAccessViews(self) -> None: ... + def CSSetShader(self) -> None: ... + def CSSetSamplers(self) -> None: ... + def CSSetConstantBuffers(self) -> None: ... + def VSGetConstantBuffers(self) -> None: ... + def PSGetShaderResources(self) -> None: ... + def PSGetShader(self) -> None: ... + def PSGetSamplers(self) -> None: ... + def VSGetShader(self) -> None: ... + def PSGetConstantBuffers(self) -> None: ... + def IAGetInputLayout(self) -> None: ... + def IAGetVertexBuffers(self) -> None: ... + def IAGetIndexBuffer(self) -> None: ... + def GSGetConstantBuffers(self) -> None: ... + def GSGetShader(self) -> None: ... + def IAGetPrimitiveTopology(self) -> None: ... + def VSGetShaderResources(self) -> None: ... + def VSGetSamplers(self) -> None: ... + def GetPredication(self) -> None: ... + def GSGetShaderResources(self) -> None: ... + def GSGetSamplers(self) -> None: ... + def OMGetRenderTargets(self) -> None: ... + def OMGetRenderTargetsAndUnorderedAccessViews(self) -> None: ... + def OMGetBlendState(self) -> None: ... + def OMGetDepthStencilState(self) -> None: ... + def SOGetTargets(self) -> None: ... + def RSGetState(self) -> None: ... + def RSGetViewports(self) -> None: ... + def RSGetScissorRects(self) -> None: ... + def HSGetShaderResources(self) -> None: ... + def HSGetShader(self) -> None: ... + def HSGetSamplers(self) -> None: ... + def HSGetConstantBuffers(self) -> None: ... + def DSGetShaderResources(self) -> None: ... + def DSGetShader(self) -> None: ... + def DSGetSamplers(self) -> None: ... + def DSGetConstantBuffers(self) -> None: ... + def CSGetShaderResources(self) -> None: ... + def CSGetUnorderedAccessViews(self) -> None: ... + def CSGetShader(self) -> None: ... + def CSGetSamplers(self) -> None: ... + def CSGetConstantBuffers(self) -> None: ... + def ClearState(self) -> None: ... + def Flush(self) -> None: ... + def GetType(self) -> None: ... + def GetContextFlags(self) -> UINT: ... + def FinishCommandList(self) -> _HRESULT: ... + +class ID3D11Device(_IUnknown): + def CreateBuffer(self) -> _HRESULT: ... + def CreateTexture1D(self) -> _HRESULT: ... + def CreateTexture2D( + self, + __pDesc: _Pointer[D3D11_TEXTURE2D_DESC], + __pInitialData: c_void_p, + __ppTexture2D: _Pointer[_Pointer[ID3D11Texture2D]], + ) -> _HRESULT: ... + def CreateTexture3D(self) -> _HRESULT: ... + def CreateShaderResourceView(self) -> _HRESULT: ... + def CreateUnorderedAccessView(self) -> _HRESULT: ... + def CreateRenderTargetView(self) -> _HRESULT: ... + def CreateDepthStencilView(self) -> _HRESULT: ... + def CreateInputLayout(self) -> _HRESULT: ... + def CreateVertexShader(self) -> _HRESULT: ... + def CreateGeometryShader(self) -> _HRESULT: ... + def CreateGeometryShaderWithStreamOutput(self) -> _HRESULT: ... + def CreatePixelShader(self) -> _HRESULT: ... + def CreateHullShader(self) -> _HRESULT: ... + def CreateDomainShader(self) -> _HRESULT: ... + def CreateComputeShader(self) -> _HRESULT: ... + def CreateClassLinkage(self) -> _HRESULT: ... + def CreateBlendState(self) -> _HRESULT: ... + def CreateDepthStencilState(self) -> _HRESULT: ... + def CreateRasterizerState(self) -> _HRESULT: ... + def CreateSamplerState(self) -> _HRESULT: ... + def CreateQuery(self) -> _HRESULT: ... + def CreatePredicate(self) -> _HRESULT: ... + def CreateCounter(self) -> _HRESULT: ... + def CreateDeferredContext(self) -> _HRESULT: ... + def OpenSharedResource(self) -> _HRESULT: ... + def CheckFormatSupport(self) -> _HRESULT: ... + def CheckMultisampleQualityLevels(self) -> _HRESULT: ... + def CheckCounterInfo(self) -> _HRESULT: ... + def CheckCounter(self) -> _HRESULT: ... + def CheckFeatureSupport(self) -> _HRESULT: ... + def GetPrivateData(self) -> _HRESULT: ... + def SetPrivateData(self) -> _HRESULT: ... + def SetPrivateDataInterface(self) -> _HRESULT: ... + def GetFeatureLevel(self) -> c_int32: ... + def GetCreationFlags(self) -> c_uint: ... + def GetDeviceRemovedReason(self) -> _HRESULT: ... + def GetImmediateContext(self, __ppImmediateContext: _Pointer[_Pointer[ID3D11DeviceContext]]) -> None: ... + def SetExceptionMode(self) -> _HRESULT: ... + def GetExceptionMode(self) -> c_uint: ... + +def initialize_d3d_device(dxgi_adapter: _Pointer[IDXGIAdapter]) -> tuple[ID3D11Device, ID3D11DeviceContext]: ... +def describe_d3d11_texture_2d(d3d11_texture_2d: ID3D11Texture2D) -> D3D11_TEXTURE2D_DESC: ... +def prepare_d3d11_texture_2d_for_cpu(d3d11_texture_2d: ID3D11Texture2D, d3d_device: ID3D11Device) -> ID3D11Texture2D: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/dll/dxgi.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/dll/dxgi.pyi new file mode 100644 index 00000000..d26d4686 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/dll/dxgi.pyi @@ -0,0 +1,154 @@ +from ctypes import Array, Structure, _Pointer, c_uint +from ctypes.wintypes import BOOL, DWORD, HMONITOR, INT, LARGE_INTEGER, LONG, PFLOAT, POINT, RECT, UINT, ULARGE_INTEGER, WCHAR +from typing_extensions import TypedDict + +from d3dshot.dll import _HRESULT, _IUnknown, _ProcessFunc, _ProcessFuncRegionArg, _ProcessFuncReturn +from d3dshot.dll.d3d import ID3D11Device + +class _DXGIOutputPosition(TypedDict): + left: LONG + top: LONG + right: LONG + bottom: LONG + +class _DXGIOutput(TypedDict): + name: str + position: _DXGIOutputPosition + resolution: tuple[tuple[LONG, LONG], tuple[LONG, LONG]] + rotation: int + is_attached_to_desktop: bool + +class LUID(Structure): + LowPart: DWORD + HighPart: LONG + +class DXGI_ADAPTER_DESC1(Structure): + Description: Array[WCHAR] + VendorId: UINT + DeviceId: UINT + SubSysId: UINT + Revision: UINT + DedicatedVideoMemory: ULARGE_INTEGER + DedicatedSystemMemory: ULARGE_INTEGER + SharedSystemMemory: ULARGE_INTEGER + AdapterLuid: LUID + Flags: UINT + +class DXGI_OUTPUT_DESC(Structure): + DeviceName: Array[WCHAR] + DesktopCoordinates: RECT + AttachedToDesktop: BOOL + Rotation: UINT + Monitor: HMONITOR + +class DXGI_OUTDUPL_POINTER_POSITION(Structure): + Position: POINT + Visible: BOOL + +class DXGI_OUTDUPL_FRAME_INFO(Structure): + LastPresentTime: LARGE_INTEGER + LastMouseUpdateTime: LARGE_INTEGER + AccumulatedFrames: UINT + RectsCoalesced: BOOL + ProtectedContentMaskedOut: BOOL + PointerPosition: DXGI_OUTDUPL_POINTER_POSITION + TotalMetadataBufferSize: UINT + PointerShapeBufferSize: UINT + +class DXGI_MAPPED_RECT(Structure): + Pitch: INT + pBits: PFLOAT + +class IDXGIObject(_IUnknown): + def SetPrivateData(self) -> _HRESULT: ... + def SetPrivateDataInterface(self) -> _HRESULT: ... + def GetPrivateData(self) -> _HRESULT: ... + def GetParent(self) -> _HRESULT: ... + +class IDXGIDeviceSubObject(IDXGIObject): + def GetDevice(self) -> _HRESULT: ... + +class IDXGIResource(IDXGIDeviceSubObject): + def GetSharedHandle(self) -> _HRESULT: ... + def GetUsage(self) -> _HRESULT: ... + def SetEvictionPriority(self) -> _HRESULT: ... + def GetEvictionPriority(self) -> _HRESULT: ... + +class IDXGISurface(IDXGIDeviceSubObject): + def GetDesc(self) -> _HRESULT: ... + def Map(self, __pLockedRect: _Pointer[DXGI_MAPPED_RECT], __MapFlags: UINT) -> _HRESULT: ... + def Unmap(self) -> _HRESULT: ... + +class IDXGIOutputDuplication(IDXGIObject): + def GetDesc(self) -> None: ... + def AcquireNextFrame( + self, + __TimeoutInMilliseconds: UINT, + __pFrameInfo: _Pointer[DXGI_OUTDUPL_FRAME_INFO], + __ppDesktopResource: _Pointer[_Pointer[IDXGIResource]], + ) -> _HRESULT: ... + def GetFrameDirtyRects(self) -> _HRESULT: ... + def GetFrameMoveRects(self) -> _HRESULT: ... + def GetFramePointerShape(self) -> _HRESULT: ... + def MapDesktopSurface(self) -> _HRESULT: ... + def UnMapDesktopSurface(self) -> _HRESULT: ... + def ReleaseFrame(self) -> _HRESULT: ... + +class IDXGIOutput(IDXGIObject): + def GetDesc(self, __pDesc: _Pointer[DXGI_OUTPUT_DESC]) -> _HRESULT: ... + def GetDisplayModeList(self) -> _HRESULT: ... + def FindClosestMatchingMode(self) -> _HRESULT: ... + def WaitForVBlank(self) -> _HRESULT: ... + def TakeOwnership(self) -> _HRESULT: ... + def ReleaseOwnership(self) -> None: ... + def GetGammaControlCapabilities(self) -> _HRESULT: ... + def SetGammaControl(self) -> _HRESULT: ... + def GetGammaControl(self) -> _HRESULT: ... + def SetDisplaySurface(self) -> _HRESULT: ... + def GetDisplaySurfaceData(self) -> _HRESULT: ... + def GetFrameStatistics(self) -> _HRESULT: ... + +class IDXGIOutput1(IDXGIOutput): + def GetDisplayModeList1(self) -> _HRESULT: ... + def FindClosestMatchingMode1(self) -> _HRESULT: ... + def GetDisplaySurfaceData1(self) -> _HRESULT: ... + def DuplicateOutput( + self, __pDevice: _Pointer[ID3D11Device], __ppOutputDuplication: _Pointer[_Pointer[IDXGIOutputDuplication]] + ) -> _HRESULT: ... + +class IDXGIAdapter(IDXGIObject): + def EnumOutputs(self, __Output: UINT, __ppOutput: _Pointer[_Pointer[IDXGIOutput]]) -> _HRESULT: ... + def GetDesc(self) -> _HRESULT: ... + def CheckInterfaceSupport(self) -> _HRESULT: ... + +class IDXGIAdapter1(IDXGIAdapter): + def GetDesc1(self, __pDesc: _Pointer[DXGI_ADAPTER_DESC1]) -> _HRESULT: ... + +class IDXGIFactory(IDXGIObject): + def EnumAdapters(self) -> _HRESULT: ... + def MakeWindowAssociation(self) -> _HRESULT: ... + def GetWindowAssociation(self) -> _HRESULT: ... + def CreateSwapChain(self) -> _HRESULT: ... + def CreateSoftwareAdapter(self) -> _HRESULT: ... + +class IDXGIFactory1(IDXGIFactory): + def EnumAdapters1(self, __Adapter: c_uint, __ppAdapter: _Pointer[_Pointer[IDXGIAdapter1]]) -> _HRESULT: ... + def IsCurrent(self) -> BOOL: ... + +def initialize_dxgi_factory() -> _Pointer[IDXGIFactory1]: ... +def discover_dxgi_adapters(dxgi_factory: IDXGIFactory1) -> list[_Pointer[IDXGIAdapter1]]: ... +def describe_dxgi_adapter(dxgi_adapter: IDXGIAdapter1) -> Array[WCHAR]: ... +def discover_dxgi_outputs(dxgi_adapter: IDXGIAdapter) -> list[_Pointer[IDXGIOutput1]]: ... +def describe_dxgi_output(dxgi_output: IDXGIOutput) -> _DXGIOutput: ... +def initialize_dxgi_output_duplication( + dxgi_output: IDXGIOutput1, d3d_device: _Pointer[ID3D11Device] +) -> _Pointer[IDXGIOutputDuplication]: ... +def get_dxgi_output_duplication_frame( + dxgi_output_duplication: IDXGIOutputDuplication, + d3d_device: ID3D11Device, + process_func: _ProcessFunc[_ProcessFuncRegionArg, _ProcessFuncReturn] | None = ..., + width: int = ..., + height: int = ..., + region: _ProcessFuncRegionArg = ..., + rotation: int = ..., +) -> _ProcessFuncReturn | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/dll/shcore.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/dll/shcore.pyi new file mode 100644 index 00000000..3fd5237e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/dll/shcore.pyi @@ -0,0 +1,3 @@ +from ctypes.wintypes import HMONITOR + +def get_scale_factor_for_monitor(hmonitor: HMONITOR) -> float: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/dll/user32.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/dll/user32.pyi new file mode 100644 index 00000000..a8dfaec8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/D3DShot/d3dshot/dll/user32.pyi @@ -0,0 +1,13 @@ +import ctypes +from ctypes import wintypes + +class DISPLAY_DEVICE(ctypes.Structure): + cb: wintypes.DWORD + DeviceName: wintypes.WCHAR + DeviceString: wintypes.WCHAR + StateFlags: wintypes.DWORD + DeviceID: wintypes.WCHAR + DeviceKey: wintypes.WCHAR + +def get_display_device_name_mapping() -> dict[str, tuple[str, bool]]: ... +def get_hmonitor_by_point(x: wintypes.LONG, y: wintypes.LONG) -> wintypes.HMONITOR: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/DateTimeRange/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/DateTimeRange/METADATA.toml new file mode 100644 index 00000000..47a9b6f7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/DateTimeRange/METADATA.toml @@ -0,0 +1,3 @@ +version = "2.0.*" +requires = ["types-python-dateutil"] +obsolete_since = "2.1.0" # Released on 2023-02-19 diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/DateTimeRange/datetimerange/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/DateTimeRange/datetimerange/__init__.pyi new file mode 100644 index 00000000..7f1d403a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/DateTimeRange/datetimerange/__init__.pyi @@ -0,0 +1,61 @@ +import datetime +from collections.abc import Iterable +from typing import ClassVar +from typing_extensions import Self + +from dateutil.relativedelta import relativedelta + +from .__version__ import ( + __author__ as __author__, + __copyright__ as __copyright__, + __email__ as __email__, + __license__ as __license__, + __version__ as __version__, +) + +class DateTimeRange: + NOT_A_TIME_STR: ClassVar[str] + start_time_format: str + end_time_format: str + is_output_elapse: bool + separator: str + def __init__( + self, + start_datetime: datetime.datetime | str | None = ..., + end_datetime: datetime.datetime | str | None = ..., + start_time_format: str = ..., + end_time_format: str = ..., + ) -> None: ... + @classmethod + def from_range_text( + cls, range_text: str, separator: str = ..., start_time_format: str | None = ..., end_time_format: str | None = ... + ) -> DateTimeRange: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __add__(self, other: datetime.timedelta) -> DateTimeRange: ... + def __iadd__(self, other: datetime.timedelta) -> Self: ... + def __sub__(self, other: datetime.timedelta) -> DateTimeRange: ... + def __isub__(self, other: datetime.timedelta) -> Self: ... + def __contains__(self, x: datetime.timedelta | datetime.datetime | DateTimeRange | str) -> bool: ... + @property + def start_datetime(self) -> datetime.datetime: ... + @property + def end_datetime(self) -> datetime.datetime: ... + @property + def timedelta(self) -> datetime.timedelta: ... + def is_set(self) -> bool: ... + def validate_time_inversion(self) -> None: ... + def is_valid_timerange(self) -> bool: ... + def is_intersection(self, x: DateTimeRange, intersection_threshold: datetime.timedelta | None = None) -> bool: ... + def get_start_time_str(self) -> str: ... + def get_end_time_str(self) -> str: ... + def get_timedelta_second(self) -> float: ... + def set_start_datetime(self, value: datetime.datetime | str | None, timezone: str | None = ...) -> None: ... + def set_end_datetime(self, value: datetime.datetime | str | None, timezone: str | None = ...) -> None: ... + def set_time_range(self, start: datetime.datetime | str | None, end: datetime.datetime | str | None) -> None: ... + def range(self, step: datetime.timedelta | relativedelta) -> Iterable[datetime.datetime]: ... + def intersection(self, x: DateTimeRange, intersection_threshold: datetime.timedelta | None = None) -> DateTimeRange: ... + def encompass(self, x: DateTimeRange) -> DateTimeRange: ... + def truncate(self, percentage: float) -> None: ... + def split(self, separator: str | datetime.datetime) -> list[DateTimeRange]: ... + def subtract(self, x: DateTimeRange) -> DateTimeRange: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/DateTimeRange/datetimerange/__version__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/DateTimeRange/datetimerange/__version__.pyi new file mode 100644 index 00000000..48c3f57f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/DateTimeRange/datetimerange/__version__.pyi @@ -0,0 +1,6 @@ +__author__: str = ... +__copyright__: str +__license__: str +__version__: str +__maintainer__ = __author__ +__email__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Deprecated/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Deprecated/METADATA.toml new file mode 100644 index 00000000..3d4d518d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Deprecated/METADATA.toml @@ -0,0 +1,2 @@ +version = "1.2.*" +requires = [] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Deprecated/deprecated/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Deprecated/deprecated/__init__.pyi new file mode 100644 index 00000000..c7b200a4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Deprecated/deprecated/__init__.pyi @@ -0,0 +1,4 @@ +from .classic import deprecated as deprecated + +__credits__: str +__date__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Deprecated/deprecated/classic.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Deprecated/deprecated/classic.pyi new file mode 100644 index 00000000..902e6437 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Deprecated/deprecated/classic.pyi @@ -0,0 +1,26 @@ +from collections.abc import Callable +from typing import Any, TypeVar, overload +from typing_extensions import Literal, TypeAlias + +_F = TypeVar("_F", bound=Callable[..., Any]) +_Actions: TypeAlias = Literal["default", "error", "ignore", "always", "module", "once"] + +string_types: tuple[type, ...] + +class ClassicAdapter: + reason: str + version: str + action: _Actions | None + category: type[Warning] + def __init__( + self, reason: str = ..., version: str = ..., action: _Actions | None = ..., category: type[Warning] = ... + ) -> None: ... + def get_deprecated_msg(self, wrapped: Callable[..., Any], instance: object) -> str: ... + def __call__(self, wrapped: _F) -> Callable[[_F], _F]: ... + +@overload +def deprecated(__wrapped: _F) -> _F: ... +@overload +def deprecated( + reason: str = ..., *, version: str = ..., action: _Actions | None = ..., category: type[Warning] | None = ... +) -> Callable[[_F], _F]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Deprecated/deprecated/sphinx.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Deprecated/deprecated/sphinx.pyi new file mode 100644 index 00000000..c9b37eab --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Deprecated/deprecated/sphinx.pyi @@ -0,0 +1,35 @@ +from collections.abc import Callable +from typing import Any, TypeVar +from typing_extensions import Literal + +from .classic import ClassicAdapter, _Actions + +_F = TypeVar("_F", bound=Callable[..., Any]) + +class SphinxAdapter(ClassicAdapter): + directive: Literal["versionadded", "versionchanged", "deprecated"] + reason: str + version: str + action: _Actions | None + category: type[Warning] + def __init__( + self, + directive: Literal["versionadded", "versionchanged", "deprecated"], + reason: str = ..., + version: str = ..., + action: _Actions | None = ..., + category: type[Warning] = ..., + line_length: int = ..., + ) -> None: ... + def __call__(self, wrapped: _F) -> Callable[[_F], _F]: ... + +def versionadded(reason: str = ..., version: str = ..., line_length: int = ...) -> Callable[[_F], _F]: ... +def versionchanged(reason: str = ..., version: str = ..., line_length: int = ...) -> Callable[[_F], _F]: ... +def deprecated( + reason: str = ..., + version: str = ..., + line_length: int = ..., + *, + action: _Actions | None = ..., + category: type[Warning] | None = ..., +) -> Callable[[_F], _F]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..57820b2c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/@tests/stubtest_allowlist.txt @@ -0,0 +1,2 @@ +# Stub-only module. +exifread._types diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/METADATA.toml new file mode 100644 index 00000000..4a8e90c0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/METADATA.toml @@ -0,0 +1 @@ +version = "3.0.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/__init__.pyi new file mode 100644 index 00000000..c8b41bc1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/__init__.pyi @@ -0,0 +1,17 @@ +from logging import Logger +from typing import Any + +from ._types import Reader + +__version__: str +logger: Logger + +def process_file( + fh: Reader, + stop_tag: str = ..., + details: bool = ..., + strict: bool = ..., + debug: bool = ..., + truncate_tags: bool = ..., + auto_seek: bool = ..., +) -> dict[str, Any]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/_types.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/_types.pyi new file mode 100644 index 00000000..819d7c10 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/_types.pyi @@ -0,0 +1,14 @@ +# Stubs-only module with type aliases for ExifRead. + +from typing import Any, Protocol +from typing_extensions import Literal, TypeAlias + +# The second item of the value tuple - if it exists - can be a variety of types, +# including a callable or another dict. +TagDict: TypeAlias = dict[int, tuple[str] | tuple[str, Any]] + +class Reader(Protocol): + def __iter__(self) -> bytes: ... + def read(self, __size: int) -> bytes: ... + def tell(self) -> int: ... + def seek(self, __offset: int, __whence: Literal[0, 1] = ...) -> object: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/classes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/classes.pyi new file mode 100644 index 00000000..669fb741 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/classes.pyi @@ -0,0 +1,48 @@ +from logging import Logger +from typing import Any +from typing_extensions import Literal + +from ._types import Reader, TagDict + +logger: Logger + +class IfdTag: + printable: str + tag: int + field_type: int + field_offset: int + field_length: int + values: Any # either string, bytes or list of data items + def __init__(self, printable: str, tag: int, field_type: int, values: Any, field_offset: int, field_length: int) -> None: ... + +class ExifHeader: + file_handle: Reader + endian: Literal["I", "M"] + offset: int + fake_exif: bool + strict: bool + debug: bool + detailed: bool + truncate_tags: bool + tags: dict[str, Any] + def __init__( + self, + file_handle: Reader, + endian: Literal["I", "M"], + offset: int, + fake_exif: bool, + strict: bool, + debug: bool = ..., + detailed: bool = ..., + truncate_tags: bool = ..., + ) -> None: ... + def s2n(self, offset: int, length: int, signed: bool = ...) -> int: ... + def n2b(self, offset: int, length: int) -> bytes: ... + def list_ifd(self) -> list[int]: ... + def dump_ifd( + self, ifd: int, ifd_name: str, tag_dict: TagDict | None = ..., relative: int = ..., stop_tag: str = ... + ) -> None: ... + def extract_tiff_thumbnail(self, thumb_ifd: int) -> None: ... + def extract_jpeg_thumbnail(self) -> None: ... + def decode_maker_note(self) -> None: ... + def parse_xmp(self, xmp_bytes: bytes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/exceptions.pyi new file mode 100644 index 00000000..47b39e30 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/exceptions.pyi @@ -0,0 +1,2 @@ +class InvalidExif(Exception): ... +class ExifNotFound(Exception): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/exif_log.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/exif_log.pyi new file mode 100644 index 00000000..15899f4d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/exif_log.pyi @@ -0,0 +1,24 @@ +import logging +from typing import TextIO + +TEXT_NORMAL: int +TEXT_BOLD: int +TEXT_RED: int +TEXT_GREEN: int +TEXT_YELLOW: int +TEXT_BLUE: int +TEXT_MAGENTA: int +TEXT_CYAN: int + +def get_logger() -> logging.Logger: ... +def setup_logger(debug: bool, color: bool) -> None: ... + +class Formatter(logging.Formatter): + color: bool + debug: bool + def __init__(self, debug: bool = ..., color: bool = ...) -> None: ... + +class Handler(logging.StreamHandler[TextIO]): + color: bool + debug: bool + def __init__(self, log_level: logging._Level, debug: bool = ..., color: bool = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/heic.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/heic.pyi new file mode 100644 index 00000000..7fd0e1f8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/heic.pyi @@ -0,0 +1,56 @@ +from collections.abc import Callable +from logging import Logger + +from ._types import Reader + +logger: Logger + +class WrongBox(Exception): ... +class NoParser(Exception): ... +class BoxVersion(Exception): ... +class BadSize(Exception): ... + +class Box: + version: int + minor_version: int + item_count: int + size: int + after: int + pos: int + compat: list[bytes] + base_offset: int + subs: dict[str, Box] + locs: dict[int, list[tuple[int, int]]] + exif_infe: Box | None + item_id: int + item_type: bytes + item_name: bytes + item_protection_index: int + major_brand: bytes + offset_size: int + length_size: int + base_offset_size: int + index_size: int + flags: int + name: str + def __init__(self, name: str) -> None: ... + def set_sizes(self, offset: int, length: int, base_offset: int, index: int) -> None: ... + def set_full(self, vflags: int) -> None: ... + +class HEICExifFinder: + file_handle: Reader + def __init__(self, file_handle: Reader) -> None: ... + def get(self, nbytes: int) -> bytes: ... + def get16(self) -> int: ... + def get32(self) -> int: ... + def get64(self) -> int: ... + def get_int4x2(self) -> tuple[int, int]: ... + def get_int(self, size: int) -> int: ... + def get_string(self) -> bytes: ... + def next_box(self) -> Box: ... + def get_full(self, box: Box) -> None: ... + def skip(self, box: Box) -> None: ... + def expect_parse(self, name: str) -> Box: ... + def get_parser(self, box: Box) -> Callable[[Box], None]: ... + def parse_box(self, box: Box) -> Box: ... + def find_exif(self) -> tuple[int, bytes]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/jpeg.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/jpeg.pyi new file mode 100644 index 00000000..9b179150 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/jpeg.pyi @@ -0,0 +1,7 @@ +from logging import Logger + +from ._types import Reader + +logger: Logger + +def find_jpeg_exif(fh: Reader, data: bytes, fake_exif: bool) -> tuple[int, bytes, bool]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/__init__.pyi new file mode 100644 index 00000000..ce0670d2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/__init__.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +from exifread.tags.exif import EXIF_TAGS as EXIF_TAGS +from exifread.tags.makernote import ( + apple as apple, + canon as canon, + casio as casio, + fujifilm as fujifilm, + nikon as nikon, + olympus as olympus, +) + +DEFAULT_STOP_TAG: str +FIELD_TYPES: Incomplete +IGNORE_TAGS: Incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/exif.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/exif.pyi new file mode 100644 index 00000000..569609e9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/exif.pyi @@ -0,0 +1,7 @@ +from exifread._types import TagDict + +INTEROP_TAGS: TagDict +INTEROP_INFO: tuple[str, TagDict] +GPS_TAGS: TagDict +GPS_INFO: tuple[str, TagDict] +EXIF_TAGS: TagDict diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/makernote/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/makernote/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/makernote/apple.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/makernote/apple.pyi new file mode 100644 index 00000000..c7207263 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/makernote/apple.pyi @@ -0,0 +1,3 @@ +from exifread._types import TagDict + +TAGS: TagDict diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/makernote/canon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/makernote/canon.pyi new file mode 100644 index 00000000..97d4f93c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/makernote/canon.pyi @@ -0,0 +1,26 @@ +from collections.abc import Callable +from typing import Any +from typing_extensions import TypeAlias + +from exifread._types import TagDict + +TAGS: TagDict + +CAMERA_SETTINGS: TagDict +FOCAL_LENGTH: TagDict +SHOT_INFO: TagDict +AF_INFO_2: TagDict +FILE_INFO: TagDict + +def add_one(value: int) -> int: ... +def subtract_one(value: int) -> int: ... +def convert_temp(value: int) -> str: ... + +_CameraInfo: TypeAlias = dict[int, tuple[str, str, Callable[[int], Any]]] + +CAMERA_INFO_TAG_NAME: str +CAMERA_INFO_5D: _CameraInfo +CAMERA_INFO_5DMKII: _CameraInfo +CAMERA_INFO_5DMKIII: _CameraInfo +CAMERA_INFO_600D: _CameraInfo +CAMERA_INFO_MODEL_MAP: dict[str, _CameraInfo] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/makernote/casio.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/makernote/casio.pyi new file mode 100644 index 00000000..c7207263 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/makernote/casio.pyi @@ -0,0 +1,3 @@ +from exifread._types import TagDict + +TAGS: TagDict diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/makernote/fujifilm.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/makernote/fujifilm.pyi new file mode 100644 index 00000000..c7207263 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/makernote/fujifilm.pyi @@ -0,0 +1,3 @@ +from exifread._types import TagDict + +TAGS: TagDict diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/makernote/nikon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/makernote/nikon.pyi new file mode 100644 index 00000000..ffa7101f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/makernote/nikon.pyi @@ -0,0 +1,6 @@ +from exifread._types import TagDict + +def ev_bias(seq: list[int]) -> str: ... + +TAGS_NEW: TagDict +TAGS_OLD: TagDict diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/makernote/olympus.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/makernote/olympus.pyi new file mode 100644 index 00000000..0744c773 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/tags/makernote/olympus.pyi @@ -0,0 +1,6 @@ +from exifread._types import TagDict + +def special_mode(val: bytes) -> str: ... + +TAGS: TagDict +TAG_0x2020: TagDict diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/utils.pyi new file mode 100644 index 00000000..d534019c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ExifRead/exifread/utils.pyi @@ -0,0 +1,22 @@ +from collections.abc import Mapping +from fractions import Fraction +from typing import Any, TypeVar, overload +from typing_extensions import Self + +_T = TypeVar("_T") + +@overload +def ord_(dta: str) -> int: ... # type: ignore[misc] +@overload +def ord_(dta: _T) -> _T: ... +def make_string(seq: str | list[int]) -> str: ... +def make_string_uc(seq: str | list[int]) -> str: ... +def get_gps_coords(tags: Mapping[str, Any]) -> tuple[float, float]: ... + +class Ratio(Fraction): + def __new__(cls, numerator: int = ..., denominator: int | None = ...) -> Self: ... + @property + def num(self) -> int: ... + @property + def den(self) -> int: ... + def decimal(self) -> float: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Cors/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Cors/METADATA.toml new file mode 100644 index 00000000..b39fb0d7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Cors/METADATA.toml @@ -0,0 +1,6 @@ +version = "3.0.*" +# Requires a version of flask with a `py.typed` file +requires = ["Flask>=2.0.0"] + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Cors/flask_cors/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Cors/flask_cors/__init__.pyi new file mode 100644 index 00000000..b0962a73 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Cors/flask_cors/__init__.pyi @@ -0,0 +1,7 @@ +from logging import Logger + +from .decorator import cross_origin as cross_origin +from .extension import CORS as CORS +from .version import __version__ as __version__ + +rootlogger: Logger diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Cors/flask_cors/core.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Cors/flask_cors/core.pyi new file mode 100644 index 00000000..475a0a2f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Cors/flask_cors/core.pyi @@ -0,0 +1,64 @@ +from collections.abc import Iterable +from datetime import timedelta +from logging import Logger +from re import Pattern +from typing import Any, TypeVar, overload +from typing_extensions import TypeAlias, TypedDict + +import flask + +_IterableT = TypeVar("_IterableT", bound=Iterable[Any]) +_T = TypeVar("_T") +_MultiDict: TypeAlias = Any # werkzeug is not part of typeshed + +class _Options(TypedDict, total=False): + resources: dict[str, dict[str, Any]] | list[str] | str | None + origins: str | list[str] | None + methods: str | list[str] | None + expose_headers: str | list[str] | None + allow_headers: str | list[str] | None + supports_credentials: bool | None + max_age: timedelta | int | str | None + send_wildcard: bool | None + vary_header: bool | None + automatic_options: bool | None + intercept_exceptions: bool | None + always_send: bool | None + +LOG: Logger +ACL_ORIGIN: str +ACL_METHODS: str +ACL_ALLOW_HEADERS: str +ACL_EXPOSE_HEADERS: str +ACL_CREDENTIALS: str +ACL_MAX_AGE: str +ACL_REQUEST_METHOD: str +ACL_REQUEST_HEADERS: str +ALL_METHODS: list[str] +CONFIG_OPTIONS: list[str] +FLASK_CORS_EVALUATED: str +RegexObject: type[Pattern[str]] +DEFAULT_OPTIONS: _Options + +def parse_resources(resources: dict[str, _Options] | Iterable[str] | str | Pattern[str]) -> list[tuple[str, _Options]]: ... +def get_regexp_pattern(regexp: str | Pattern[str]) -> str: ... +def get_cors_origins(options: _Options, request_origin: str | None) -> list[str] | None: ... +def get_allow_headers(options: _Options, acl_request_headers: str | None) -> str | None: ... +def get_cors_headers(options: _Options, request_headers: dict[str, Any], request_method: str) -> _MultiDict: ... +def set_cors_headers(resp: flask.Response, options: _Options) -> flask.Response: ... +def probably_regex(maybe_regex: str | Pattern[str]) -> bool: ... +def re_fix(reg: str) -> str: ... +def try_match_any(inst: str, patterns: Iterable[str | Pattern[str]]) -> bool: ... +def try_match(request_origin: str, maybe_regex: str | Pattern[str]) -> bool: ... +def get_cors_options(appInstance: flask.Flask | None, *dicts: _Options) -> _Options: ... +def get_app_kwarg_dict(appInstance: flask.Flask | None = ...) -> _Options: ... +def flexible_str(obj: object) -> str | None: ... +def serialize_option(options_dict: _Options, key: str, upper: bool = ...) -> None: ... +@overload +def ensure_iterable(inst: str) -> list[str]: ... # type: ignore[misc] +@overload +def ensure_iterable(inst: _IterableT) -> _IterableT: ... +@overload +def ensure_iterable(inst: _T) -> list[_T]: ... +def sanitize_regex_param(param: str | list[str]) -> list[str]: ... +def serialize_options(opts: _Options) -> _Options: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Cors/flask_cors/decorator.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Cors/flask_cors/decorator.pyi new file mode 100644 index 00000000..8eb81d13 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Cors/flask_cors/decorator.pyi @@ -0,0 +1,23 @@ +from collections.abc import Callable, Iterable +from datetime import timedelta +from logging import Logger +from re import Pattern +from typing import Any +from typing_extensions import ParamSpec + +_P = ParamSpec("_P") + +LOG: Logger + +def cross_origin( + *args: Any, + origins: str | Pattern[str] | Iterable[str | Pattern[str]] | None = ..., + methods: str | list[str] | None = ..., + expose_headers: str | list[str] | None = ..., + allow_headers: str | Pattern[str] | Iterable[str | Pattern[str]] | None = ..., + supports_credentials: bool | None = ..., + max_age: timedelta | int | str | None = ..., + send_wildcard: bool | None = ..., + vary_header: bool | None = ..., + automatic_options: bool | None = ..., +) -> Callable[[Callable[_P, Any]], Callable[_P, Any]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Cors/flask_cors/extension.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Cors/flask_cors/extension.pyi new file mode 100644 index 00000000..aef6c137 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Cors/flask_cors/extension.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete +from collections.abc import Callable, Iterable +from datetime import timedelta +from logging import Logger +from typing import Any + +import flask + +LOG: Logger + +class CORS: + def __init__( + self, + app: Incomplete | None = ..., + *, + resources: dict[str, dict[str, Any]] | list[str] | str | None = ..., + origins: str | list[str] | None = ..., + methods: str | list[str] | None = ..., + expose_headers: str | list[str] | None = ..., + allow_headers: str | list[str] | None = ..., + supports_credentials: bool | None = ..., + max_age: timedelta | int | str | None = ..., + send_wildcard: bool | None = ..., + vary_header: bool | None = ..., + **kwargs: Any, + ) -> None: ... + def init_app( + self, + app: flask.Flask, + *, + resources: dict[str, dict[str, Any]] | list[str] | str = ..., + origins: str | list[str] = ..., + methods: str | list[str] = ..., + expose_headers: str | list[str] = ..., + allow_headers: str | list[str] = ..., + supports_credentials: bool = ..., + max_age: timedelta | int | str | None = ..., + send_wildcard: bool = ..., + vary_header: bool = ..., + **kwargs: Any, + ) -> None: ... + +def make_after_request_function(resources: Iterable[tuple[str, dict[str, Any]]]) -> Callable[..., Any]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Cors/flask_cors/version.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Cors/flask_cors/version.pyi new file mode 100644 index 00000000..bda5b5a7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Cors/flask_cors/version.pyi @@ -0,0 +1 @@ +__version__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Migrate/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Migrate/METADATA.toml new file mode 100644 index 00000000..7e798f1a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Migrate/METADATA.toml @@ -0,0 +1,6 @@ +version = "4.0.*" +# Requires a version of flask with a `py.typed` file +requires = ["Flask>=2.0.0", "types-Flask-SQLAlchemy"] + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Migrate/flask_migrate/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Migrate/flask_migrate/__init__.pyi new file mode 100644 index 00000000..6a68c7c6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-Migrate/flask_migrate/__init__.pyi @@ -0,0 +1,106 @@ +from collections.abc import Callable, Iterable, Sequence +from logging import Logger +from typing import Any, TypeVar +from typing_extensions import ParamSpec, TypeAlias + +import flask +from flask_sqlalchemy import SQLAlchemy + +_T = TypeVar("_T") +_P = ParamSpec("_P") +_ConfigureCallback: TypeAlias = Callable[[Config], Config] + +alembic_version: tuple[int, int, int] +log: Logger + +class Config: # should inherit from alembic.config.Config which is not possible yet + template_directory: str | None + def __init__(self, *args, **kwargs) -> None: ... + def get_template_directory(self) -> str: ... + +class Migrate: + configure_callbacks: list[_ConfigureCallback] + db: SQLAlchemy | None + directory: str + alembic_ctx_kwargs: dict[str, Any] + def __init__( + self, + app: flask.Flask | None = ..., + db: SQLAlchemy | None = ..., + directory: str = ..., + command: str = ..., + compare_type: bool = ..., + render_as_batch: bool = ..., + **kwargs, + ) -> None: ... + def init_app( + self, + app: flask.Flask, + db: SQLAlchemy | None = ..., + directory: str | None = ..., + command: str | None = ..., + compare_type: bool | None = ..., + render_as_batch: bool | None = ..., + **kwargs, + ) -> None: ... + def configure(self, f: _ConfigureCallback) -> _ConfigureCallback: ... + def call_configure_callbacks(self, config: Config): ... + def get_config( + self, directory: str | None = ..., x_arg: str | Sequence[str] | None = ..., opts: Iterable[str] | None = ... + ): ... + +def catch_errors(f: Callable[_P, _T]) -> Callable[_P, _T]: ... +def list_templates() -> None: ... +def init(directory: str | None = ..., multidb: bool = ..., template: str | None = ..., package: bool = ...) -> None: ... +def revision( + directory: str | None = ..., + message: str | None = ..., + autogenerate: bool = ..., + sql: bool = ..., + head: str = ..., + splice: bool = ..., + branch_label: str | None = ..., + version_path: str | None = ..., + rev_id: str | None = ..., +) -> None: ... +def migrate( + directory: str | None = ..., + message: str | None = ..., + sql: bool = ..., + head: str = ..., + splice: bool = ..., + branch_label: str | None = ..., + version_path: str | None = ..., + rev_id: str | None = ..., + x_arg: str | Sequence[str] | None = ..., +) -> None: ... +def edit(directory: str | None = ..., revision: str = ...) -> None: ... +def merge( + directory: str | None = ..., + revisions: str = ..., + message: str | None = ..., + branch_label: str | None = ..., + rev_id: str | None = ..., +) -> None: ... +def upgrade( + directory: str | None = ..., + revision: str = ..., + sql: bool = ..., + tag: str | None = ..., + x_arg: str | Sequence[str] | None = ..., +) -> None: ... +def downgrade( + directory: str | None = ..., + revision: str = ..., + sql: bool = ..., + tag: str | None = ..., + x_arg: str | Sequence[str] | None = ..., +) -> None: ... +def show(directory: str | None = ..., revision: str = ...) -> None: ... +def history( + directory: str | None = ..., rev_range: str | None = ..., verbose: bool = ..., indicate_current: bool = ... +) -> None: ... +def heads(directory: str | None = ..., verbose: bool = ..., resolve_dependencies: bool = ...) -> None: ... +def branches(directory: str | None = ..., verbose: bool = ...) -> None: ... +def current(directory: str | None = ..., verbose: bool = ...) -> None: ... +def stamp(directory: str | None = ..., revision: str = ..., sql: bool = ..., tag: str | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-SQLAlchemy/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-SQLAlchemy/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..2187a236 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-SQLAlchemy/@tests/stubtest_allowlist.txt @@ -0,0 +1,2 @@ +# Needed due dynamic attribute generation +flask_sqlalchemy.SQLAlchemy.__getattr__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-SQLAlchemy/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-SQLAlchemy/METADATA.toml new file mode 100644 index 00000000..d3d266df --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-SQLAlchemy/METADATA.toml @@ -0,0 +1,6 @@ +version = "2.5.*" +requires = ["types-SQLAlchemy"] +obsolete_since = "3.0.1" # Released on 2022-10-11 + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-SQLAlchemy/flask_sqlalchemy/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-SQLAlchemy/flask_sqlalchemy/__init__.pyi new file mode 100644 index 00000000..e2fadfce --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-SQLAlchemy/flask_sqlalchemy/__init__.pyi @@ -0,0 +1,97 @@ +from _typeshed import Incomplete +from collections.abc import Generator +from typing import Any, Generic, TypeVar + +from sqlalchemy.orm import scoped_session +from sqlalchemy.orm.query import Query +from sqlalchemy.orm.session import Session + +from . import utils as utils +from .model import DefaultMeta as DefaultMeta, Model as Model + +models_committed: Any +before_models_committed: Any + +class SignallingSession(Session): + app: Any + def __init__(self, db, autocommit: bool = ..., autoflush: bool = ..., **options) -> None: ... + def get_bind(self, mapper: Incomplete | None = ..., clause: Incomplete | None = ...): ... # type: ignore[override] + +def get_debug_queries(): ... + +_T = TypeVar("_T") + +class BaseQuery(Query[_T]): + def get_or_404(self, ident, description: Incomplete | None = ...): ... + def first_or_404(self, description: Incomplete | None = ...): ... + def paginate( + self, + page: Incomplete | None = ..., + per_page: Incomplete | None = ..., + error_out: bool = ..., + max_per_page: Incomplete | None = ..., + ) -> Pagination[_T]: ... + +class Pagination(Generic[_T]): + query: BaseQuery[_T] | None + page: int + per_page: int + total: int | None + items: Any + def __init__(self, query: BaseQuery[_T] | None, page: int, per_page: int, total: int | None, items) -> None: ... + @property + def pages(self) -> int: ... + def prev(self, error_out: bool = ...) -> Pagination[_T]: ... + @property + def prev_num(self) -> int | None: ... + @property + def has_prev(self) -> bool: ... + def next(self, error_out: bool = ...) -> Pagination[_T]: ... + @property + def has_next(self) -> bool: ... + @property + def next_num(self) -> int | None: ... + def iter_pages( + self, left_edge: int = ..., left_current: int = ..., right_current: int = ..., right_edge: int = ... + ) -> Generator[int | None, None, None]: ... + +def get_state(app): ... + +class SQLAlchemy: + Query: Any + use_native_unicode: Any + session: scoped_session + Model: Model + app: Any + def __init__( + self, + app: Incomplete | None = ..., + use_native_unicode: bool = ..., + session_options: Incomplete | None = ..., + metadata: Incomplete | None = ..., + query_class=..., + model_class=..., + engine_options: Incomplete | None = ..., + ) -> None: ... + @property + def metadata(self): ... + def create_scoped_session(self, options: Incomplete | None = ...): ... + def create_session(self, options): ... + def make_declarative_base(self, model, metadata: Incomplete | None = ...): ... + def init_app(self, app): ... + def apply_pool_defaults(self, app, options): ... + def apply_driver_hacks(self, app, sa_url, options): ... + @property + def engine(self): ... + def make_connector(self, app: Incomplete | None = ..., bind: Incomplete | None = ...): ... + def get_engine(self, app: Incomplete | None = ..., bind: Incomplete | None = ...): ... + def create_engine(self, sa_url, engine_opts): ... + def get_app(self, reference_app: Incomplete | None = ...): ... + def get_tables_for_bind(self, bind: Incomplete | None = ...): ... + def get_binds(self, app: Incomplete | None = ...): ... + def create_all(self, bind: str = ..., app: Incomplete | None = ...) -> None: ... + def drop_all(self, bind: str = ..., app: Incomplete | None = ...) -> None: ... + def reflect(self, bind: str = ..., app: Incomplete | None = ...) -> None: ... + def __getattr__(self, name: str) -> Any: ... # exposes dynamically classes of SQLAlchemy + +class FSADeprecationWarning(DeprecationWarning): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-SQLAlchemy/flask_sqlalchemy/model.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-SQLAlchemy/flask_sqlalchemy/model.pyi new file mode 100644 index 00000000..f1780c09 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-SQLAlchemy/flask_sqlalchemy/model.pyi @@ -0,0 +1,25 @@ +from re import Pattern +from typing import Any + +from sqlalchemy import Table +from sqlalchemy.ext.declarative import DeclarativeMeta +from sqlalchemy.orm import Query + +def should_set_tablename(cls: type) -> bool: ... + +camelcase_re: Pattern[str] + +def camel_to_snake_case(name: str) -> str: ... + +class NameMetaMixin(type): + def __init__(cls, name: str, bases: tuple[type, ...], d: dict[str, Any]) -> None: ... + def __table_cls__(cls, *args, **kwargs) -> Table | None: ... + +class BindMetaMixin(type): + def __init__(cls, name: str, bases: tuple[type, ...], d: dict[str, Any]) -> None: ... + +class DefaultMeta(NameMetaMixin, BindMetaMixin, DeclarativeMeta): ... + +class Model: + query_class: type[Query[Any]] | None + query: Query[Any] | None diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-SQLAlchemy/flask_sqlalchemy/utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-SQLAlchemy/flask_sqlalchemy/utils.pyi new file mode 100644 index 00000000..190d7248 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Flask-SQLAlchemy/flask_sqlalchemy/utils.pyi @@ -0,0 +1,3 @@ +def parse_version(v: str) -> tuple[int, int, int]: ... +def sqlalchemy_version(op: str, val: str) -> bool: ... +def engine_config_warning(config, version: str, deprecated_config_key: str, engine_option) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/JACK-Client/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/JACK-Client/METADATA.toml new file mode 100644 index 00000000..9bc81927 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/JACK-Client/METADATA.toml @@ -0,0 +1,11 @@ +version = "0.5.*" +# Requires a version of numpy with a `py.typed` file +requires = ["numpy>=1.20", "types-cffi"] + +[tool.stubtest] +# darwin and win32 are equivalent +platforms = ["darwin", "linux"] +apt_dependencies = ["libjack-dev"] +brew_dependencies = ["jack"] +# No need to install on the CI. Leaving here as information for Windows contributors. +# choco_dependencies = ["jack"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/JACK-Client/jack/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/JACK-Client/jack/__init__.pyi new file mode 100644 index 00000000..cbe7c6c8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/JACK-Client/jack/__init__.pyi @@ -0,0 +1,328 @@ +import sys +from collections.abc import Callable, Generator, Iterable, Iterator, Sequence +from typing import Any, NoReturn, overload +from typing_extensions import Literal, Self + +import numpy +from _cffi_backend import _CDataBase +from numpy.typing import NDArray + +# Aka jack_position_t +# Actual type: _cffi_backend.__CDataOwn +# This is not a real subclassing. Just ensuring type-checkers sees this type as compatible with _CDataBase +# pyright has no error code for subclassing final +class _JackPositionT(_CDataBase): # type: ignore[misc] # pyright: ignore + audio_frames_per_video_frame: float + bar: int + bar_start_tick: float + bbt_offset: int + beat: int + beat_type: float + beats_per_bar: float + beats_per_minute: float + frame: int + frame_rate: int + frame_time: float + next_time: float + padding: _CDataBase # + tick: int + ticks_per_beat: float + unique_1: int + unique_2: int + usecs: int + valid: int + video_offset: int + +class _CBufferType: + @overload + def __getitem__(self, key: int) -> str: ... + @overload + def __getitem__(self, key: slice) -> bytes: ... + @overload + def __setitem__(self, key: int, val: str) -> None: ... + @overload + def __setitem__(self, key: slice, val: bytes) -> None: ... + def __len__(self) -> int: ... + def __bytes__(self) -> bytes: ... + +STOPPED: int +ROLLING: int +STARTING: int +NETSTARTING: int +PROPERTY_CREATED: int +PROPERTY_CHANGED: int +PROPERTY_DELETED: int +POSITION_BBT: int +POSITION_TIMECODE: int +POSITION_BBT_FRAME_OFFSET: int +POSITION_AUDIO_VIDEO_RATIO: int +POSITION_VIDEO_FRAME_OFFSET: int + +class JackError(Exception): ... + +class JackErrorCode(JackError): + def __init__(self, message: str, code: int) -> None: ... + message: str = ... + code: int = ... + +class JackOpenError(JackError): + def __init__(self, name: str, status: Status) -> None: ... + name: str = ... + status: Status = ... + +class Client: + def __init__( + self, + name: str, + use_exact_name: bool = ..., + no_start_server: bool = ..., + servername: str | None = ..., + session_id: str | None = ..., + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: object) -> None: ... + @property + def name(self) -> str: ... + @property + def uuid(self) -> str: ... + @property + def samplerate(self) -> int: ... + @property + def blocksize(self) -> int: ... + @blocksize.setter + def blocksize(self, blocksize: int) -> None: ... + @property + def status(self) -> Status: ... + @property + def realtime(self) -> bool: ... + @property + def frames_since_cycle_start(self) -> int: ... + @property + def frame_time(self) -> int: ... + @property + def last_frame_time(self) -> int: ... + @property + def inports(self) -> Ports: ... + @property + def outports(self) -> Ports: ... + @property + def midi_inports(self) -> Ports: ... + @property + def midi_outports(self) -> Ports: ... + def owns(self, port: str | Port) -> bool: ... + def activate(self) -> None: ... + def deactivate(self, ignore_errors: bool = ...) -> None: ... + def cpu_load(self) -> float: ... + def close(self, ignore_errors: bool = ...) -> None: ... + def connect(self, source: str | Port, destination: str | Port) -> None: ... + def disconnect(self, source: str | Port, destination: str | Port) -> None: ... + def transport_start(self) -> None: ... + def transport_stop(self) -> None: ... + @property + def transport_state(self) -> TransportState: ... + @property + def transport_frame(self) -> int: ... + @transport_frame.setter + def transport_frame(self, frame: int) -> None: ... + def transport_locate(self, frame: int) -> None: ... + def transport_query(self) -> tuple[TransportState, dict[str, Any]]: ... # Anyof[int, float, _CDataBase] + def transport_query_struct(self) -> tuple[TransportState, _JackPositionT]: ... + def transport_reposition_struct(self, position: _JackPositionT) -> None: ... + def set_sync_timeout(self, timeout: int) -> None: ... + def set_freewheel(self, onoff: bool) -> None: ... + def set_shutdown_callback(self, callback: Callable[[Status, str], object]) -> None: ... + def set_process_callback(self, callback: Callable[[int], object]) -> None: ... + def set_freewheel_callback(self, callback: Callable[[bool], object]) -> None: ... + def set_blocksize_callback(self, callback: Callable[[int], object]) -> None: ... + def set_samplerate_callback(self, callback: Callable[[int], object]) -> None: ... + def set_client_registration_callback(self, callback: Callable[[str, bool], object]) -> None: ... + def set_port_registration_callback( + self, callback: Callable[[Port, bool], object] | None = ..., only_available: bool = ... + ) -> None: ... + def set_port_connect_callback( + self, callback: Callable[[Port, Port, bool], object] | None = ..., only_available: bool = ... + ) -> None: ... + def set_port_rename_callback( + self, callback: Callable[[Port, str, str], object] | None = ..., only_available: bool = ... + ) -> None: ... + def set_graph_order_callback(self, callback: Callable[[], object]) -> None: ... + def set_xrun_callback(self, callback: Callable[[float], object]) -> None: ... + def set_sync_callback(self, callback: Callable[[int, _JackPositionT], object] | None) -> None: ... + def release_timebase(self) -> None: ... + def set_timebase_callback( + self, callback: Callable[[int, int, _JackPositionT, bool], object] | None = ..., conditional: bool = ... + ) -> bool: ... + def set_property_change_callback(self, callback: Callable[[int, str, int], object]) -> None: ... + def get_uuid_for_client_name(self, name: str) -> str: ... + def get_client_name_by_uuid(self, uuid: str) -> str: ... + def get_port_by_name(self, name: str) -> Port: ... + def get_all_connections(self, port: Port) -> list[Port]: ... + def get_ports( + self, + name_pattern: str = ..., + is_audio: bool = ..., + is_midi: bool = ..., + is_input: bool = ..., + is_output: bool = ..., + is_physical: bool = ..., + can_monitor: bool = ..., + is_terminal: bool = ..., + ) -> list[Port]: ... + def set_property(self, subject: int | str, key: str, value: str | bytes, type: str = ...) -> None: ... + def remove_property(self, subject: int | str, key: str) -> None: ... + def remove_properties(self, subject: int | str) -> int: ... + def remove_all_properties(self) -> None: ... + +class Port: + # + def __init__(self, port_ptr: _CDataBase, client: Client) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + @property + def name(self) -> str: ... + @property + def shortname(self) -> str: ... + @shortname.setter + def shortname(self, shortname: str) -> None: ... + @property + def aliases(self) -> list[str]: ... + def set_alias(self, alias: str) -> None: ... + def unset_alias(self, alias: str) -> None: ... + @property + def uuid(self) -> str: ... + @property + def is_audio(self) -> bool: ... + @property + def is_midi(self) -> bool: ... + @property + def is_input(self) -> bool: ... + @property + def is_output(self) -> bool: ... + @property + def is_physical(self) -> bool: ... + @property + def can_monitor(self) -> bool: ... + @property + def is_terminal(self) -> bool: ... + def request_monitor(self, onoff: bool) -> None: ... + +class MidiPort(Port): + @property + def is_audio(self) -> Literal[False]: ... + @property + def is_midi(self) -> Literal[True]: ... + +class OwnPort(Port): + @property + def number_of_connections(self) -> int: ... + @property + def connections(self) -> list[Port]: ... + def is_connected_to(self, port: str | Port) -> bool: ... + def connect(self, port: str | Port) -> None: ... + def disconnect(self, other: str | Port | None = ...) -> None: ... + def unregister(self) -> None: ... + def get_buffer(self) -> _CBufferType: ... + def get_array(self) -> NDArray[numpy.float32]: ... + +class OwnMidiPort(MidiPort, OwnPort): + def __init__(self, port_ptr: _CDataBase, client: Client) -> None: ... + # The implementation raises NotImplementedError, but this is not an abstract class. + # `get_buffer()` and `get_array()` are disabled for OwnMidiPort + def get_buffer(self) -> NoReturn: ... + def get_array(self) -> NoReturn: ... + @property + def max_event_size(self) -> int: ... + @property + def lost_midi_events(self) -> int: ... + def incoming_midi_events(self) -> Generator[tuple[int, _CBufferType], None, None]: ... + def clear_buffer(self) -> None: ... + def write_midi_event(self, time: int, event: bytes | Sequence[int] | _CBufferType) -> None: ... + def reserve_midi_event(self, time: int, size: int) -> _CBufferType: ... + +class Ports: + def __init__(self, client: Client, porttype: str, flag: int) -> None: ... + def __len__(self) -> int: ... + def __getitem__(self, name: str) -> Port: ... + def __iter__(self) -> Iterator[Port]: ... + def register(self, shortname: str, is_terminal: bool = ..., is_physical: bool = ...) -> Port: ... + def clear(self) -> None: ... + +class RingBuffer: + def __init__(self, size: int) -> None: ... + @property + def write_space(self) -> int: ... + def write(self, data: bytes | Iterable[int] | _CBufferType) -> int: ... + @property + def write_buffers(self) -> tuple[_CBufferType, _CBufferType]: ... + def write_advance(self, size: int) -> None: ... + @property + def read_space(self) -> int: ... + def read(self, size: int) -> _CBufferType: ... + def peek(self, size: int) -> _CBufferType: ... + @property + def read_buffers(self) -> tuple[_CBufferType, _CBufferType]: ... + def read_advance(self, size: int) -> None: ... + def mlock(self) -> None: ... + def reset(self, size: int | None = ...) -> None: ... + @property + def size(self) -> int: ... + +class Status: + def __init__(self, code: int) -> None: ... + @property + def failure(self) -> bool: ... + @property + def invalid_option(self) -> bool: ... + @property + def name_not_unique(self) -> bool: ... + @property + def server_started(self) -> bool: ... + @property + def server_failed(self) -> bool: ... + @property + def server_error(self) -> bool: ... + @property + def no_such_client(self) -> bool: ... + @property + def load_failure(self) -> bool: ... + @property + def init_failure(self) -> bool: ... + @property + def shm_failure(self) -> bool: ... + @property + def version_error(self) -> bool: ... + @property + def backend_error(self) -> bool: ... + @property + def client_zombie(self) -> bool: ... + +class TransportState: + def __init__(self, code: int) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + +class CallbackExit(Exception): ... + +def get_property(subject: int | str, key: str) -> tuple[bytes, str] | None: ... +def get_properties(subject: int | str) -> dict[str, tuple[bytes, str]]: ... +def get_all_properties() -> dict[str, dict[str, tuple[bytes, str]]]: ... +def position2dict(pos: _JackPositionT) -> dict[str, Any]: ... # Anyof[int, float, _CDataBase] +def version() -> tuple[int, int, int, int]: ... +def version_string() -> str: ... +def client_name_size() -> int: ... +def port_name_size() -> int: ... +def set_error_function(callback: Callable[[str], object] | None = ...) -> None: ... +def set_info_function(callback: Callable[[str], object] | None = ...) -> None: ... +def client_pid(name: str) -> int: ... + +METADATA_CONNECTED: str +METADATA_HARDWARE: str +METADATA_ICON_LARGE: str +METADATA_ICON_SMALL: str +METADATA_PORT_GROUP: str +METADATA_PRETTY_NAME: str +if sys.platform != "linux": + METADATA_EVENT_TYPES: str + METADATA_ICON_NAME: str + METADATA_ORDER: str + METADATA_SIGNAL_TYPE: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..60569f64 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/@tests/stubtest_allowlist.txt @@ -0,0 +1,12 @@ +markdown.extensions.abbr.ABBR_REF_RE +markdown.extensions.attr_list.AttrListTreeprocessor.run +markdown.extensions.codehilite.CodeHilite.__init__ +markdown.extensions.fenced_code.FencedBlockPreprocessor.__init__ +markdown.extensions.footnotes.DEF_RE +markdown.extensions.footnotes.FootnotePreprocessor +markdown.extensions.footnotes.TABBED_RE +markdown.extensions.legacy_attrs.LegacyAttrs.run +markdown.extensions.toc.TocTreeprocessor.run +markdown.extensions.toc.slugify +markdown.preprocessors.ReferencePreprocessor +markdown.postprocessors.UnescapePostprocessor # deprecated diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/METADATA.toml new file mode 100644 index 00000000..9914b4e0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/METADATA.toml @@ -0,0 +1,4 @@ +version = "3.4.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/__init__.pyi new file mode 100644 index 00000000..3f57adcc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/__init__.pyi @@ -0,0 +1,2 @@ +from .core import Markdown as Markdown, markdown as markdown, markdownFromFile as markdownFromFile +from .extensions import Extension as Extension diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/__meta__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/__meta__.pyi new file mode 100644 index 00000000..9fb40899 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/__meta__.pyi @@ -0,0 +1,2 @@ +__version_info__: tuple[int, int, int, str, int] +__version__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/blockparser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/blockparser.pyi new file mode 100644 index 00000000..69fd02de --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/blockparser.pyi @@ -0,0 +1,23 @@ +from collections.abc import Iterable +from typing import Any, TypeVar +from xml.etree.ElementTree import Element, ElementTree + +from . import Markdown +from .util import Registry + +_T = TypeVar("_T") + +class State(list[_T]): + def set(self, state: _T) -> None: ... + def reset(self) -> None: ... + def isstate(self, state: _T) -> bool: ... + +class BlockParser: + blockprocessors: Registry + state: State[Any] # TODO: possible to get rid of Any? + md: Markdown + def __init__(self, md: Markdown) -> None: ... + root: Element + def parseDocument(self, lines: Iterable[str]) -> ElementTree: ... + def parseChunk(self, parent: Element, text: str) -> None: ... + def parseBlocks(self, parent: Element, blocks: list[str]) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/blockprocessors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/blockprocessors.pyi new file mode 100644 index 00000000..70919f79 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/blockprocessors.pyi @@ -0,0 +1,70 @@ +from logging import Logger +from re import Match, Pattern +from typing import Any +from xml.etree.ElementTree import Element + +from markdown import Markdown + +from .blockparser import BlockParser + +logger: Logger + +def build_block_parser(md: Markdown, **kwargs: Any): ... + +class BlockProcessor: + parser: BlockParser + tab_length: int + def __init__(self, parser: BlockParser) -> None: ... + def lastChild(self, parent: Element) -> Element | None: ... + def detab(self, text: str, length: int | None = ...) -> tuple[str, str]: ... + def looseDetab(self, text: str, level: int = ...) -> str: ... + def test(self, parent: Element, block: str) -> bool: ... + def run(self, parent: Element, blocks: list[str]) -> bool | None: ... + +class ListIndentProcessor(BlockProcessor): + ITEM_TYPES: list[str] + LIST_TYPES: list[str] + INDENT_RE: Pattern[str] + def __init__(self, parser: BlockParser) -> None: ... # Note: This was done because the args are sent as-is. + def create_item(self, parent: Element, block: str) -> None: ... + def get_level(self, parent: Element, block: str) -> tuple[int, Element]: ... + +class CodeBlockProcessor(BlockProcessor): ... + +class BlockQuoteProcessor(BlockProcessor): + RE: Pattern[str] + def clean(self, line: str) -> str: ... + +class OListProcessor(BlockProcessor): + TAG: str = ... + STARTSWITH: str = ... + LAZY_OL: bool = ... + SIBLING_TAGS: list[str] + RE: Pattern[str] + CHILD_RE: Pattern[str] + INDENT_RE: Pattern[str] + def __init__(self, parser: BlockParser) -> None: ... + def get_items(self, block: str) -> list[str]: ... + +class UListProcessor(OListProcessor): + TAG: str = ... + RE: Pattern[str] + def __init__(self, parser: BlockParser) -> None: ... + +class HashHeaderProcessor(BlockProcessor): + RE: Pattern[str] + +class SetextHeaderProcessor(BlockProcessor): + RE: Pattern[str] + +class HRProcessor(BlockProcessor): + RE: str = ... + SEARCH_RE: Pattern[str] + match: Match[str] + +class EmptyBlockProcessor(BlockProcessor): ... + +class ReferenceProcessor(BlockProcessor): + RE: Pattern[str] + +class ParagraphProcessor(BlockProcessor): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/core.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/core.pyi new file mode 100644 index 00000000..39441794 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/core.pyi @@ -0,0 +1,72 @@ +from collections.abc import Callable, Mapping, Sequence +from typing import Any, ClassVar, Protocol +from typing_extensions import Literal, Self +from xml.etree.ElementTree import Element + +from .blockparser import BlockParser +from .extensions import Extension +from .util import HtmlStash, Registry + +# TODO: The following protocols can be replaced by their counterparts from +# codecs, once they have been propagated to all type checkers. +class _WritableStream(Protocol): + def write(self, __data: bytes) -> object: ... + def seek(self, __offset: int, __whence: int) -> object: ... + def close(self) -> object: ... + +class _ReadableStream(Protocol): + def read(self, __size: int = ...) -> bytes: ... + def seek(self, __offset: int, __whence: int) -> object: ... + def close(self) -> object: ... + +class Markdown: + preprocessors: Registry + inlinePatterns: Registry + treeprocessors: Registry + postprocessors: Registry + parser: BlockParser + htmlStash: HtmlStash + output_formats: ClassVar[dict[Literal["xhtml", "html"], Callable[[Element], str]]] + output_format: Literal["xhtml", "html"] + serializer: Callable[[Element], str] + tab_length: int + block_level_elements: list[str] + registeredExtensions: list[Extension] + def __init__( + self, + *, + extensions: Sequence[str | Extension] | None = ..., + extension_configs: Mapping[str, Mapping[str, Any]] | None = ..., + output_format: Literal["xhtml", "html"] | None = ..., + tab_length: int | None = ..., + ) -> None: ... + def build_parser(self) -> Markdown: ... + def registerExtensions(self, extensions: Sequence[Extension | str], configs: Mapping[str, Mapping[str, Any]]) -> Markdown: ... + def build_extension(self, ext_name: str, configs: Mapping[str, str]) -> Extension: ... + def registerExtension(self, extension: Extension) -> Markdown: ... + def reset(self) -> Self: ... + def set_output_format(self, format: Literal["xhtml", "html"]) -> Markdown: ... + def is_block_level(self, tag: str) -> bool: ... + def convert(self, source: str) -> str: ... + def convertFile( + self, input: str | _ReadableStream | None = ..., output: str | _WritableStream | None = ..., encoding: str | None = ... + ) -> Markdown: ... + +def markdown( + text: str, + *, + extensions: Sequence[str | Extension] | None = ..., + extension_configs: Mapping[str, Mapping[str, Any]] | None = ..., + output_format: Literal["xhtml", "html"] | None = ..., + tab_length: int | None = ..., +) -> str: ... +def markdownFromFile( + *, + input: str | _ReadableStream | None = ..., + output: str | _WritableStream | None = ..., + encoding: str | None = ..., + extensions: Sequence[str | Extension] | None = ..., + extension_configs: Mapping[str, Mapping[str, Any]] | None = ..., + output_format: Literal["xhtml", "html"] | None = ..., + tab_length: int | None = ..., +) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/__init__.pyi new file mode 100644 index 00000000..e01e02de --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/__init__.pyi @@ -0,0 +1,14 @@ +from collections.abc import Mapping +from typing import Any + +from markdown.core import Markdown + +class Extension: + config: Mapping[str, list[Any]] = ... + def __init__(self, **kwargs: Any) -> None: ... + def getConfig(self, key: str, default: Any = ...) -> Any: ... + def getConfigs(self) -> dict[str, Any]: ... + def getConfigInfo(self) -> list[tuple[str, str]]: ... + def setConfig(self, key: str, value: Any) -> None: ... + def setConfigs(self, items: Mapping[str, Any]) -> None: ... + def extendMarkdown(self, md: Markdown) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/abbr.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/abbr.pyi new file mode 100644 index 00000000..aafd0af4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/abbr.pyi @@ -0,0 +1,17 @@ +from re import Pattern +from typing import Any + +from markdown.blockprocessors import BlockProcessor +from markdown.extensions import Extension +from markdown.inlinepatterns import InlineProcessor + +ABBR_REF_RE: Pattern[str] + +class AbbrExtension(Extension): ... +class AbbrPreprocessor(BlockProcessor): ... + +class AbbrInlineProcessor(InlineProcessor): + title: Any + def __init__(self, pattern, title) -> None: ... + +def makeExtension(**kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/admonition.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/admonition.pyi new file mode 100644 index 00000000..a7cf5108 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/admonition.pyi @@ -0,0 +1,16 @@ +from re import Pattern +from typing import Any + +from markdown.blockprocessors import BlockProcessor +from markdown.extensions import Extension + +class AdmonitionExtension(Extension): ... + +class AdmonitionProcessor(BlockProcessor): + CLASSNAME: str + CLASSNAME_TITLE: str + RE: Pattern[str] + RE_SPACES: Any + def get_class_and_title(self, match): ... + +def makeExtension(**kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/attr_list.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/attr_list.pyi new file mode 100644 index 00000000..238d943d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/attr_list.pyi @@ -0,0 +1,20 @@ +from re import Pattern + +from markdown.extensions import Extension +from markdown.treeprocessors import Treeprocessor + +def get_attrs(str): ... +def isheader(elem): ... + +class AttrListTreeprocessor(Treeprocessor): + BASE_RE: str + HEADER_RE: Pattern[str] + BLOCK_RE: Pattern[str] + INLINE_RE: Pattern[str] + NAME_RE: Pattern[str] + def assign_attrs(self, elem, attrs) -> None: ... + def sanitize_name(self, name): ... + +class AttrListExtension(Extension): ... + +def makeExtension(**kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/codehilite.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/codehilite.pyi new file mode 100644 index 00000000..2526ce4c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/codehilite.pyi @@ -0,0 +1,46 @@ +from _typeshed import Incomplete +from typing import Any + +from markdown.extensions import Extension +from markdown.treeprocessors import Treeprocessor + +pygments: bool + +def parse_hl_lines(expr): ... + +class CodeHilite: + src: Any + lang: Any + linenums: Any + guess_lang: Any + css_class: Any + style: Any + noclasses: Any + tab_length: Any + hl_lines: Any + use_pygments: Any + options: dict[str, Any] + def __init__( + self, + src: Incomplete | None = ..., + *, + linenums: Incomplete | None = ..., + guess_lang: bool = ..., + css_class: str = ..., + lang: Incomplete | None = ..., + style: str = ..., + noclasses: bool = ..., + tab_length: int = ..., + hl_lines: Incomplete | None = ..., + use_pygments: bool = ..., + **options: Any, + ) -> None: ... + def hilite(self, shebang: bool = ...) -> str: ... + +class HiliteTreeprocessor(Treeprocessor): + def code_unescape(self, text): ... + +class CodeHiliteExtension(Extension): + def __init__(self, **kwargs) -> None: ... + +def makeExtension(**kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/def_list.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/def_list.pyi new file mode 100644 index 00000000..48ef08d6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/def_list.pyi @@ -0,0 +1,13 @@ +from re import Pattern + +from markdown.blockprocessors import BlockProcessor, ListIndentProcessor +from markdown.extensions import Extension + +class DefListProcessor(BlockProcessor): + RE: Pattern[str] + NO_INDENT_RE: Pattern[str] + +class DefListIndentProcessor(ListIndentProcessor): ... +class DefListExtension(Extension): ... + +def makeExtension(**kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/extra.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/extra.pyi new file mode 100644 index 00000000..8d761845 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/extra.pyi @@ -0,0 +1,10 @@ +from typing import Any + +from markdown.extensions import Extension + +extensions: Any + +class ExtraExtension(Extension): + def __init__(self, **kwargs) -> None: ... + +def makeExtension(**kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/fenced_code.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/fenced_code.pyi new file mode 100644 index 00000000..2d26b070 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/fenced_code.pyi @@ -0,0 +1,17 @@ +from re import Pattern +from typing import Any + +from markdown.extensions import Extension +from markdown.preprocessors import Preprocessor + +class FencedCodeExtension(Extension): ... + +class FencedBlockPreprocessor(Preprocessor): + FENCED_BLOCK_RE: Pattern[str] + CODE_WRAP: str = ... + LANG_TAG: str = ... + checked_for_codehilite: bool = ... + codehilite_conf: Any + def __init__(self, md) -> None: ... + +def makeExtension(**kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/footnotes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/footnotes.pyi new file mode 100644 index 00000000..3f932031 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/footnotes.pyi @@ -0,0 +1,59 @@ +from re import Pattern +from typing import Any + +from markdown.core import Markdown +from markdown.extensions import Extension +from markdown.inlinepatterns import InlineProcessor +from markdown.postprocessors import Postprocessor +from markdown.preprocessors import Preprocessor +from markdown.treeprocessors import Treeprocessor + +FN_BACKLINK_TEXT: Any +NBSP_PLACEHOLDER: Any +DEF_RE: Pattern[str] +TABBED_RE: Pattern[str] +RE_REF_ID: Any + +class FootnoteExtension(Extension): + unique_prefix: int = ... + found_refs: Any + used_refs: Any + def __init__(self, **kwargs) -> None: ... + parser: Any + md: Markdown + footnotes: Any + def reset(self) -> None: ... + def unique_ref(self, reference, found: bool = ...): ... + def findFootnotesPlaceholder(self, root): ... + def setFootnote(self, id, text) -> None: ... + def get_separator(self): ... + def makeFootnoteId(self, id): ... + def makeFootnoteRefId(self, id, found: bool = ...): ... + def makeFootnotesDiv(self, root): ... + +class FootnotePreprocessor(Preprocessor): + footnotes: Any + def __init__(self, footnotes) -> None: ... + def detectTabbed(self, lines): ... + +class FootnoteInlineProcessor(InlineProcessor): + footnotes: Any + def __init__(self, pattern, footnotes) -> None: ... + +class FootnotePostTreeprocessor(Treeprocessor): + footnotes: Any + def __init__(self, footnotes) -> None: ... + def add_duplicates(self, li, duplicates) -> None: ... + def get_num_duplicates(self, li): ... + def handle_duplicates(self, parent) -> None: ... + offset: int = ... + +class FootnoteTreeprocessor(Treeprocessor): + footnotes: Any + def __init__(self, footnotes) -> None: ... + +class FootnotePostprocessor(Postprocessor): + footnotes: Any + def __init__(self, footnotes) -> None: ... + +def makeExtension(**kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/legacy_attrs.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/legacy_attrs.pyi new file mode 100644 index 00000000..30f2e0ac --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/legacy_attrs.pyi @@ -0,0 +1,13 @@ +from re import Pattern + +from markdown.extensions import Extension +from markdown.treeprocessors import Treeprocessor + +ATTR_RE: Pattern[str] + +class LegacyAttrs(Treeprocessor): + def handleAttributes(self, el, txt): ... + +class LegacyAttrExtension(Extension): ... + +def makeExtension(**kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/legacy_em.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/legacy_em.pyi new file mode 100644 index 00000000..dadaa8ee --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/legacy_em.pyi @@ -0,0 +1,11 @@ +from markdown.extensions import Extension +from markdown.inlinepatterns import UnderscoreProcessor + +EMPHASIS_RE: str +STRONG_RE: str +STRONG_EM_RE: str + +class LegacyUnderscoreProcessor(UnderscoreProcessor): ... +class LegacyEmExtension(Extension): ... + +def makeExtension(**kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/md_in_html.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/md_in_html.pyi new file mode 100644 index 00000000..2e7e9823 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/md_in_html.pyi @@ -0,0 +1,7 @@ +from markdown.blockprocessors import BlockProcessor +from markdown.extensions import Extension + +class MarkdownInHtmlProcessor(BlockProcessor): ... +class MarkdownInHtmlExtension(Extension): ... + +def makeExtension(**kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/meta.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/meta.pyi new file mode 100644 index 00000000..deca6e24 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/meta.pyi @@ -0,0 +1,20 @@ +from re import Pattern +from typing import Any + +from markdown.core import Markdown +from markdown.extensions import Extension +from markdown.preprocessors import Preprocessor + +log: Any +META_RE: Pattern[str] +META_MORE_RE: Pattern[str] +BEGIN_RE: Pattern[str] +END_RE: Pattern[str] + +class MetaExtension(Extension): + md: Markdown + def reset(self) -> None: ... + +class MetaPreprocessor(Preprocessor): ... + +def makeExtension(**kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/nl2br.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/nl2br.pyi new file mode 100644 index 00000000..81dd1fb6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/nl2br.pyi @@ -0,0 +1,7 @@ +from markdown.extensions import Extension + +BR_RE: str + +class Nl2BrExtension(Extension): ... + +def makeExtension(**kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/sane_lists.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/sane_lists.pyi new file mode 100644 index 00000000..31022c69 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/sane_lists.pyi @@ -0,0 +1,12 @@ +from markdown.blockprocessors import OListProcessor, UListProcessor +from markdown.extensions import Extension + +class SaneOListProcessor(OListProcessor): + def __init__(self, parser) -> None: ... + +class SaneUListProcessor(UListProcessor): + def __init__(self, parser) -> None: ... + +class SaneListExtension(Extension): ... + +def makeExtension(**kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/smarty.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/smarty.pyi new file mode 100644 index 00000000..6b237163 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/smarty.pyi @@ -0,0 +1,39 @@ +from typing import Any + +from markdown.extensions import Extension +from markdown.inlinepatterns import HtmlInlineProcessor + +punctClass: str +endOfWordClass: str +closeClass: str +openingQuotesBase: str +substitutions: Any +singleQuoteStartRe: Any +doubleQuoteStartRe: Any +doubleQuoteSetsRe: str +singleQuoteSetsRe: str +decadeAbbrRe: str +openingDoubleQuotesRegex: Any +closingDoubleQuotesRegex: str +closingDoubleQuotesRegex2: Any +openingSingleQuotesRegex: Any +closingSingleQuotesRegex: Any +closingSingleQuotesRegex2: Any +remainingSingleQuotesRegex: str +remainingDoubleQuotesRegex: str +HTML_STRICT_RE: str + +class SubstituteTextPattern(HtmlInlineProcessor): + replace: Any + def __init__(self, pattern, replace, md) -> None: ... + +class SmartyExtension(Extension): + substitutions: Any + def __init__(self, **kwargs) -> None: ... + def educateDashes(self, md) -> None: ... + def educateEllipses(self, md) -> None: ... + def educateAngledQuotes(self, md) -> None: ... + def educateQuotes(self, md) -> None: ... + inlinePatterns: Any + +def makeExtension(**kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/tables.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/tables.pyi new file mode 100644 index 00000000..39a4e96b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/tables.pyi @@ -0,0 +1,19 @@ +from typing import Any + +from markdown.blockprocessors import BlockProcessor +from markdown.extensions import Extension + +PIPE_NONE: int +PIPE_LEFT: int +PIPE_RIGHT: int + +class TableProcessor(BlockProcessor): + RE_CODE_PIPES: Any + RE_END_BORDER: Any + border: bool = ... + separator: str = ... + def __init__(self, parser, config) -> None: ... + +class TableExtension(Extension): ... + +def makeExtension(**kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/toc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/toc.pyi new file mode 100644 index 00000000..519880bb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/toc.pyi @@ -0,0 +1,46 @@ +from re import Pattern +from typing import Any + +from markdown.core import Markdown +from markdown.extensions import Extension +from markdown.treeprocessors import Treeprocessor + +def slugify(value, separator): ... + +IDCOUNT_RE: Pattern[str] + +def unique(id, ids): ... +def get_name(el): ... +def stashedHTML2text(text, md, strip_entities: bool = ...): ... +def unescape(text): ... +def nest_toc_tokens(toc_list): ... + +class TocTreeprocessor(Treeprocessor): + marker: Any + title: Any + base_level: Any + slugify: Any + sep: Any + use_anchors: Any + anchorlink_class: Any + use_permalinks: Any + permalink_class: Any + permalink_title: Any + header_rgx: Any + toc_top: int = ... + toc_bottom: Any + def __init__(self, md, config) -> None: ... + def iterparent(self, node) -> None: ... + def replace_marker(self, root, elem) -> None: ... + def set_level(self, elem) -> None: ... + def add_anchor(self, c, elem_id) -> None: ... + def add_permalink(self, c, elem_id) -> None: ... + def build_toc_div(self, toc_list): ... + +class TocExtension(Extension): + TreeProcessorClass: Any + def __init__(self, **kwargs) -> None: ... + md: Markdown + def reset(self) -> None: ... + +def makeExtension(**kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/wikilinks.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/wikilinks.pyi new file mode 100644 index 00000000..81c6455f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/extensions/wikilinks.pyi @@ -0,0 +1,17 @@ +from typing import Any + +from markdown.core import Markdown +from markdown.extensions import Extension +from markdown.inlinepatterns import InlineProcessor + +def build_url(label, base, end): ... + +class WikiLinkExtension(Extension): + def __init__(self, **kwargs) -> None: ... + md: Markdown + +class WikiLinksInlineProcessor(InlineProcessor): + config: Any + def __init__(self, pattern, config) -> None: ... + +def makeExtension(**kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/inlinepatterns.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/inlinepatterns.pyi new file mode 100644 index 00000000..6cea38b1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/inlinepatterns.pyi @@ -0,0 +1,105 @@ +import re +from re import Match +from typing import Any, ClassVar +from xml.etree.ElementTree import Element + +from markdown.core import Markdown + +def build_inlinepatterns(md, **kwargs): ... + +NOIMG: str +BACKTICK_RE: str +ESCAPE_RE: str +EMPHASIS_RE: str +STRONG_RE: str +SMART_STRONG_RE: str +SMART_EMPHASIS_RE: str +SMART_STRONG_EM_RE: str +EM_STRONG_RE: str +EM_STRONG2_RE: str +STRONG_EM_RE: str +STRONG_EM2_RE: str +STRONG_EM3_RE: str +LINK_RE: str +IMAGE_LINK_RE: str +REFERENCE_RE: str +IMAGE_REFERENCE_RE: str +NOT_STRONG_RE: str +AUTOLINK_RE: str +AUTOMAIL_RE: str +HTML_RE: str +ENTITY_RE: str +LINE_BREAK_RE: str + +def dequote(string): ... + +class EmStrongItem: ... + +class Pattern: + ANCESTOR_EXCLUDES: Any + pattern: Any + compiled_re: Any + md: Markdown + def __init__(self, pattern, md: Markdown | None = ...) -> None: ... + def getCompiledRegExp(self): ... + def handleMatch(self, m: Match[str]) -> str | Element | None: ... + def type(self): ... + def unescape(self, text): ... + +class InlineProcessor(Pattern): + safe_mode: bool = ... + def __init__(self, pattern, md: Markdown | None = ...) -> None: ... + def handleMatch(self, m: Match[str], data) -> tuple[Element, int, int] | tuple[None, None, None]: ... # type: ignore[override] + +class SimpleTextPattern(Pattern): ... +class SimpleTextInlineProcessor(InlineProcessor): ... +class EscapeInlineProcessor(InlineProcessor): ... + +class SimpleTagPattern(Pattern): + tag: Any + def __init__(self, pattern, tag) -> None: ... + +class SimpleTagInlineProcessor(InlineProcessor): + tag: Any + def __init__(self, pattern, tag) -> None: ... + +class SubstituteTagPattern(SimpleTagPattern): ... +class SubstituteTagInlineProcessor(SimpleTagInlineProcessor): ... + +class BacktickInlineProcessor(InlineProcessor): + ESCAPED_BSLASH: Any + tag: str = ... + def __init__(self, pattern) -> None: ... + +class DoubleTagPattern(SimpleTagPattern): ... +class DoubleTagInlineProcessor(SimpleTagInlineProcessor): ... +class HtmlInlineProcessor(InlineProcessor): ... + +class AsteriskProcessor(InlineProcessor): + PATTERNS: Any + def build_single(self, m, tag, idx): ... + def build_double(self, m, tags, idx): ... + def build_double2(self, m, tags, idx): ... + def parse_sub_patterns(self, data, parent, last, idx) -> None: ... + def build_element(self, m, builder, tags, index): ... + +class UnderscoreProcessor(AsteriskProcessor): + PATTERNS: Any + +class LinkInlineProcessor(InlineProcessor): + RE_LINK: Any + RE_TITLE_CLEAN: Any + def getLink(self, data, index): ... + def getText(self, data, index): ... + +class ImageInlineProcessor(LinkInlineProcessor): ... + +class ReferenceInlineProcessor(LinkInlineProcessor): + NEWLINE_CLEANUP_RE: ClassVar[re.Pattern[str]] + def evalId(self, data, index, text): ... + def makeTag(self, href, title, text): ... + +class ShortReferenceInlineProcessor(ReferenceInlineProcessor): ... +class ImageReferenceInlineProcessor(ReferenceInlineProcessor): ... +class AutolinkInlineProcessor(InlineProcessor): ... +class AutomailInlineProcessor(InlineProcessor): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/postprocessors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/postprocessors.pyi new file mode 100644 index 00000000..6b14388b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/postprocessors.pyi @@ -0,0 +1,18 @@ +from re import Pattern +from typing import Any + +from . import util + +def build_postprocessors(md, **kwargs): ... + +class Postprocessor(util.Processor): + def run(self, text) -> Any: ... + +class RawHtmlPostprocessor(Postprocessor): + def isblocklevel(self, html): ... + +class AndSubstitutePostprocessor(Postprocessor): ... + +class UnescapePostprocessor(Postprocessor): # deprecated + RE: Pattern[str] + def unescape(self, m): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/preprocessors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/preprocessors.pyi new file mode 100644 index 00000000..9478ffdf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/preprocessors.pyi @@ -0,0 +1,24 @@ +from re import Pattern +from typing import Any + +from . import util + +def build_preprocessors(md, **kwargs): ... + +class Preprocessor(util.Processor): + def run(self, lines: list[str]) -> list[str]: ... + +class NormalizeWhitespace(Preprocessor): ... + +class HtmlBlockPreprocessor(Preprocessor): + right_tag_patterns: Any + attrs_pattern: str = ... + left_tag_pattern: Any + attrs_re: Any + left_tag_re: Any + markdown_in_raw: bool = ... + +class ReferencePreprocessor(Preprocessor): + TITLE: str = ... + RE: Pattern[str] + TITLE_RE: Pattern[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/serializers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/serializers.pyi new file mode 100644 index 00000000..c775a409 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/serializers.pyi @@ -0,0 +1,2 @@ +def to_html_string(element): ... +def to_xhtml_string(element): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/treeprocessors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/treeprocessors.pyi new file mode 100644 index 00000000..86968de1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/treeprocessors.pyi @@ -0,0 +1,26 @@ +from _typeshed import Incomplete +from re import Pattern +from typing import Any, ClassVar +from xml.etree.ElementTree import Element + +from . import util + +def build_treeprocessors(md, **kwargs): ... +def isString(s): ... + +class Treeprocessor(util.Processor): + def run(self, root: Element) -> Element | None: ... + +class InlineProcessor(Treeprocessor): + inlinePatterns: Any + ancestors: Any + def __init__(self, md) -> None: ... + stashed_nodes: Any + parent_map: Any + def run(self, tree: Element, ancestors: Incomplete | None = ...) -> Element: ... + +class PrettifyTreeprocessor(Treeprocessor): ... + +class UnescapeTreeprocessor(Treeprocessor): + RE: ClassVar[Pattern[str]] + def unescape(self, text: str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/util.pyi new file mode 100644 index 00000000..865abdd5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Markdown/markdown/util.pyi @@ -0,0 +1,51 @@ +from re import Pattern +from typing import Any, overload + +from markdown.core import Markdown + +BLOCK_LEVEL_ELEMENTS: Any +STX: str +ETX: str +INLINE_PLACEHOLDER_PREFIX: Any +INLINE_PLACEHOLDER: Any +INLINE_PLACEHOLDER_RE: Pattern[str] +AMP_SUBSTITUTE: Any +HTML_PLACEHOLDER: Any +HTML_PLACEHOLDER_RE: Pattern[str] +TAG_PLACEHOLDER: Any +RTL_BIDI_RANGES: Any + +def deprecated(message: str, stacklevel: int = ...): ... +def parseBoolValue(value: object, fail_on_errors: bool = ..., preserve_none: bool = ...) -> bool | None: ... +def code_escape(text: str) -> str: ... +def nearing_recursion_limit() -> bool: ... + +class AtomicString(str): ... + +class Processor: + md: Markdown + def __init__(self, md: Markdown | None = ...) -> None: ... + +class HtmlStash: + html_counter: int = ... + rawHtmlBlocks: list[str] + tag_counter: int = ... + tag_data: list[dict[str, Any]] + def __init__(self) -> None: ... + def store(self, html: str) -> str: ... + def reset(self) -> None: ... + def get_placeholder(self, key: int) -> str: ... + def store_tag(self, tag: str, attrs: list[Any], left_index: int, right_index: int) -> str: ... + +class Registry: + def __init__(self) -> None: ... + def __contains__(self, item: object) -> bool: ... + def __iter__(self) -> Any: ... + @overload + def __getitem__(self, key: slice) -> Registry: ... + @overload + def __getitem__(self, key: str | int) -> Any: ... + def __len__(self) -> int: ... + def get_index_for_name(self, name: str) -> int: ... + def register(self, item: Any, name: str, priority: float) -> None: ... + def deregister(self, name: str, strict: bool = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..ae84a43e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/@tests/stubtest_allowlist.txt @@ -0,0 +1,4 @@ +# Requires Pyside or PyQt to be installed. +# Not worth adding to tool.stubtest.stubtest_requirements +# because it's only used as a base type and it's a long install. +PIL.ImageQt.ImageQt diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/@tests/test_cases/check_tk_compat.py b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/@tests/test_cases/check_tk_compat.py new file mode 100644 index 00000000..05332d50 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/@tests/test_cases/check_tk_compat.py @@ -0,0 +1,15 @@ +# Verify that ImageTK images are valid to pass to TK code. +from __future__ import annotations + +import tkinter + +from PIL import ImageTk + +photo = ImageTk.PhotoImage() +bitmap = ImageTk.BitmapImage() + +tkinter.Label(image=photo) +tkinter.Label(image=bitmap) + +tkinter.Label().configure(image=photo) +tkinter.Label().configure(image=bitmap) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/METADATA.toml new file mode 100644 index 00000000..b4d21d1e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/METADATA.toml @@ -0,0 +1,5 @@ +version = "9.4.*" + +[tool.stubtest] +stubtest_requirements = ["olefile"] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/BdfFontFile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/BdfFontFile.pyi new file mode 100644 index 00000000..8f6ba64b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/BdfFontFile.pyi @@ -0,0 +1,11 @@ +from typing import Any + +from .FontFile import FontFile + +bdf_slant: Any +bdf_spacing: Any + +def bdf_char(f): ... + +class BdfFontFile(FontFile): + def __init__(self, fp) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/BlpImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/BlpImagePlugin.pyi new file mode 100644 index 00000000..1ad9df57 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/BlpImagePlugin.pyi @@ -0,0 +1,46 @@ +from enum import IntEnum +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile, PyDecoder + +class Format(IntEnum): + JPEG: int + +BLP_FORMAT_JPEG: Literal[Format.JPEG] + +class Encoding(IntEnum): + UNCOMPRESSED: int + DXT: int + UNCOMPRESSED_RAW_BGRA: int + +BLP_ENCODING_UNCOMPRESSED: Literal[Encoding.UNCOMPRESSED] +BLP_ENCODING_DXT: Literal[Encoding.DXT] +BLP_ENCODING_UNCOMPRESSED_RAW_BGRA: Literal[Encoding.UNCOMPRESSED_RAW_BGRA] + +class AlphaEncoding(IntEnum): + DXT1: int + DXT3: int + DXT5: int + +BLP_ALPHA_ENCODING_DXT1: Literal[AlphaEncoding.DXT1] +BLP_ALPHA_ENCODING_DXT3: Literal[AlphaEncoding.DXT3] +BLP_ALPHA_ENCODING_DXT5: Literal[AlphaEncoding.DXT5] + +def unpack_565(i): ... +def decode_dxt1(data, alpha: bool = ...): ... +def decode_dxt3(data): ... +def decode_dxt5(data): ... + +class BLPFormatError(NotImplementedError): ... + +class BlpImageFile(ImageFile): + format: ClassVar[Literal["BLP"]] + format_description: ClassVar[str] + +class _BLPBaseDecoder(PyDecoder): + magic: Any + def decode(self, buffer): ... + +class BLP1Decoder(_BLPBaseDecoder): ... +class BLP2Decoder(_BLPBaseDecoder): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/BmpImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/BmpImagePlugin.pyi new file mode 100644 index 00000000..3ebe5817 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/BmpImagePlugin.pyi @@ -0,0 +1,16 @@ +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +BIT2MODE: Any + +class BmpImageFile(ImageFile): + format_description: ClassVar[str] + format: ClassVar[Literal["BMP", "DIB", "CUR"]] + COMPRESSIONS: Any + +class DibImageFile(BmpImageFile): + format: ClassVar[Literal["DIB"]] + +SAVE: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/BufrStubImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/BufrStubImagePlugin.pyi new file mode 100644 index 00000000..812b4e6f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/BufrStubImagePlugin.pyi @@ -0,0 +1,10 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .ImageFile import StubImageFile + +def register_handler(handler) -> None: ... + +class BufrStubImageFile(StubImageFile): + format: ClassVar[Literal["BUFR"]] + format_description: ClassVar[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ContainerIO.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ContainerIO.pyi new file mode 100644 index 00000000..6ca38fbe --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ContainerIO.pyi @@ -0,0 +1,14 @@ +from typing import Any + +class ContainerIO: + fh: Any + pos: int + offset: Any + length: Any + def __init__(self, file, offset, length) -> None: ... + def isatty(self): ... + def seek(self, offset, mode=...) -> None: ... + def tell(self): ... + def read(self, n: int = ...): ... + def readline(self): ... + def readlines(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/CurImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/CurImagePlugin.pyi new file mode 100644 index 00000000..09d50204 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/CurImagePlugin.pyi @@ -0,0 +1,7 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .BmpImagePlugin import BmpImageFile + +class CurImageFile(BmpImageFile): + format: ClassVar[Literal["CUR"]] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/DcxImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/DcxImagePlugin.pyi new file mode 100644 index 00000000..f8f64a9f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/DcxImagePlugin.pyi @@ -0,0 +1,13 @@ +from typing import Any, ClassVar +from typing_extensions import Literal + +from .PcxImagePlugin import PcxImageFile + +MAGIC: int + +class DcxImageFile(PcxImageFile): + format: ClassVar[Literal["DCX"]] + frame: Any + fp: Any + def seek(self, frame) -> None: ... + def tell(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/DdsImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/DdsImagePlugin.pyi new file mode 100644 index 00000000..19893d9b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/DdsImagePlugin.pyi @@ -0,0 +1,71 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +DDS_MAGIC: int +DDSD_CAPS: int +DDSD_HEIGHT: int +DDSD_WIDTH: int +DDSD_PITCH: int +DDSD_PIXELFORMAT: int +DDSD_MIPMAPCOUNT: int +DDSD_LINEARSIZE: int +DDSD_DEPTH: int +DDSCAPS_COMPLEX: int +DDSCAPS_TEXTURE: int +DDSCAPS_MIPMAP: int +DDSCAPS2_CUBEMAP: int +DDSCAPS2_CUBEMAP_POSITIVEX: int +DDSCAPS2_CUBEMAP_NEGATIVEX: int +DDSCAPS2_CUBEMAP_POSITIVEY: int +DDSCAPS2_CUBEMAP_NEGATIVEY: int +DDSCAPS2_CUBEMAP_POSITIVEZ: int +DDSCAPS2_CUBEMAP_NEGATIVEZ: int +DDSCAPS2_VOLUME: int + +DDPF_ALPHAPIXELS: Literal[0x1] +DDPF_ALPHA: Literal[0x2] +DDPF_FOURCC: Literal[0x4] +DDPF_PALETTEINDEXED8: Literal[0x20] +DDPF_RGB: Literal[0x40] +DDPF_LUMINANCE: Literal[0x20000] + +DDS_FOURCC: Literal[0x4] +DDS_RGB: Literal[0x40] +DDS_RGBA: Literal[0x41] +DDS_LUMINANCE: Literal[0x20000] +DDS_LUMINANCEA: Literal[0x20001] +DDS_ALPHA: Literal[0x2] +DDS_PAL8: Literal[0x20] + +DDS_HEADER_FLAGS_TEXTURE: int +DDS_HEADER_FLAGS_MIPMAP: int +DDS_HEADER_FLAGS_VOLUME: int +DDS_HEADER_FLAGS_PITCH: int +DDS_HEADER_FLAGS_LINEARSIZE: int +DDS_HEIGHT: int +DDS_WIDTH: int +DDS_SURFACE_FLAGS_TEXTURE: int +DDS_SURFACE_FLAGS_MIPMAP: int +DDS_SURFACE_FLAGS_CUBEMAP: int +DDS_CUBEMAP_POSITIVEX: int +DDS_CUBEMAP_NEGATIVEX: int +DDS_CUBEMAP_POSITIVEY: int +DDS_CUBEMAP_NEGATIVEY: int +DDS_CUBEMAP_POSITIVEZ: int +DDS_CUBEMAP_NEGATIVEZ: int +DXT1_FOURCC: int +DXT3_FOURCC: int +DXT5_FOURCC: int +DXGI_FORMAT_R8G8B8A8_TYPELESS: int +DXGI_FORMAT_R8G8B8A8_UNORM: int +DXGI_FORMAT_R8G8B8A8_UNORM_SRGB: int +DXGI_FORMAT_BC7_TYPELESS: int +DXGI_FORMAT_BC7_UNORM: int +DXGI_FORMAT_BC7_UNORM_SRGB: int + +class DdsImageFile(ImageFile): + format: ClassVar[Literal["DDS"]] + format_description: ClassVar[str] + def load_seek(self, pos) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/EpsImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/EpsImagePlugin.pyi new file mode 100644 index 00000000..7e76e868 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/EpsImagePlugin.pyi @@ -0,0 +1,28 @@ +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +split: Any +field: Any +gs_windows_binary: Any + +def has_ghostscript(): ... +def Ghostscript(tile, size, fp, scale: int = ..., transparency: bool = ...): ... + +class PSFile: + fp: Any + char: Any + def __init__(self, fp) -> None: ... + def seek(self, offset, whence=...) -> None: ... + def readline(self): ... + +class EpsImageFile(ImageFile): + format: ClassVar[Literal["EPS"]] + format_description: ClassVar[str] + mode_map: Any + im: Any + mode: Any + tile: Any + def load(self, scale: int = ..., transparency: bool = ...) -> None: ... + def load_seek(self, *args, **kwargs) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ExifTags.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ExifTags.pyi new file mode 100644 index 00000000..7ac53b0a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ExifTags.pyi @@ -0,0 +1,42 @@ +from collections.abc import Mapping +from enum import IntEnum + +TAGS: Mapping[int, str] +GPSTAGS: Mapping[int, str] + +class Interop(IntEnum): + InteropIndex: int + InteropVersion: int + RelatedImageFileFormat: int + RelatedImageWidth: int + RleatedImageHeight: int + +class IFD(IntEnum): + Exif: int + GPSInfo: int + Makernote: int + Interop: int + IFD1: int + +class LightSource(IntEnum): + Unknown: int + Daylight: int + Fluorescent: int + Tungsten: int + Flash: int + Fine: int + Cloudy: int + Shade: int + DaylightFluorescent: int + DayWhiteFluorescent: int + CoolWhiteFluorescent: int + WhiteFluorescent: int + StandardLightA: int + StandardLightB: int + StandardLightC: int + D55: int + D65: int + D75: int + D50: int + ISO: int + Other: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/FitsStubImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/FitsStubImagePlugin.pyi new file mode 100644 index 00000000..cdd63750 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/FitsStubImagePlugin.pyi @@ -0,0 +1,10 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .ImageFile import StubImageFile + +def register_handler(handler) -> None: ... + +class FITSStubImageFile(StubImageFile): + format: ClassVar[Literal["FITS"]] + format_description: ClassVar[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/FliImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/FliImagePlugin.pyi new file mode 100644 index 00000000..9859378f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/FliImagePlugin.pyi @@ -0,0 +1,10 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +class FliImageFile(ImageFile): + format: ClassVar[Literal["FLI"]] + format_description: ClassVar[str] + def seek(self, frame) -> None: ... + def tell(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/FontFile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/FontFile.pyi new file mode 100644 index 00000000..fa8e103b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/FontFile.pyi @@ -0,0 +1,16 @@ +from typing import Any + +WIDTH: int + +def puti16(fp, values) -> None: ... + +class FontFile: + bitmap: Any + info: Any + glyph: Any + def __init__(self) -> None: ... + def __getitem__(self, ix): ... + ysize: Any + metrics: Any + def compile(self): ... + def save(self, filename) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/FpxImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/FpxImagePlugin.pyi new file mode 100644 index 00000000..226a10f2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/FpxImagePlugin.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete +from typing import Any, ClassVar +from typing_extensions import Literal, TypeAlias + +from .ImageFile import ImageFile + +_OleFileIO: TypeAlias = Any # olefile.OleFileIO +_OleStream: TypeAlias = Any # olefile.OleStream + +MODES: dict[tuple[int, ...], tuple[str, str]] + +class FpxImageFile(ImageFile): + ole: _OleFileIO + format: ClassVar[Literal["FPX"]] + format_description: ClassVar[str] + fp: _OleStream | None + maxid: int + rawmode: str + jpeg: dict[int, Incomplete] + tile_prefix: Incomplete + stream: list[str] + def load(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/FtexImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/FtexImagePlugin.pyi new file mode 100644 index 00000000..0535ec47 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/FtexImagePlugin.pyi @@ -0,0 +1,19 @@ +from enum import IntEnum +from typing import ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +MAGIC: bytes + +class Format(IntEnum): + DXT1: int + UNCOMPRESSED: int + +FORMAT_DXT1: Literal[Format.DXT1] +FORMAT_UNCOMPRESSED: Literal[Format.UNCOMPRESSED] + +class FtexImageFile(ImageFile): + format: ClassVar[Literal["FTEX"]] + format_description: ClassVar[str] + def load_seek(self, pos) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/GbrImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/GbrImagePlugin.pyi new file mode 100644 index 00000000..b5ede10c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/GbrImagePlugin.pyi @@ -0,0 +1,10 @@ +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +class GbrImageFile(ImageFile): + format: ClassVar[Literal["GBR"]] + format_description: ClassVar[str] + im: Any + def load(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/GdImageFile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/GdImageFile.pyi new file mode 100644 index 00000000..a50f3f3c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/GdImageFile.pyi @@ -0,0 +1,10 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +class GdImageFile(ImageFile): + format: ClassVar[Literal["GD"]] + format_description: ClassVar[str] + +def open(fp, mode: str = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/GifImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/GifImagePlugin.pyi new file mode 100644 index 00000000..26278948 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/GifImagePlugin.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +class GifImageFile(ImageFile): + format: ClassVar[Literal["GIF"]] + format_description: ClassVar[str] + global_palette: Any + def data(self): ... + @property + def n_frames(self): ... + @property + def is_animated(self): ... + im: Any + def seek(self, frame) -> None: ... + def tell(self): ... + +RAWMODE: Any + +def get_interlace(im): ... +def getheader(im, palette: Incomplete | None = ..., info: Incomplete | None = ...): ... +def getdata(im, offset=..., **params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/GimpGradientFile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/GimpGradientFile.pyi new file mode 100644 index 00000000..f4f61bd7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/GimpGradientFile.pyi @@ -0,0 +1,19 @@ +from typing import Any + +EPSILON: float + +def linear(middle, pos): ... +def curved(middle, pos): ... +def sine(middle, pos): ... +def sphere_increasing(middle, pos): ... +def sphere_decreasing(middle, pos): ... + +SEGMENTS: Any + +class GradientFile: + gradient: Any + def getpalette(self, entries: int = ...): ... + +class GimpGradientFile(GradientFile): + gradient: Any + def __init__(self, fp) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/GimpPaletteFile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/GimpPaletteFile.pyi new file mode 100644 index 00000000..441f85b7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/GimpPaletteFile.pyi @@ -0,0 +1,7 @@ +from typing import Any + +class GimpPaletteFile: + rawmode: str + palette: Any + def __init__(self, fp) -> None: ... + def getpalette(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/GribStubImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/GribStubImagePlugin.pyi new file mode 100644 index 00000000..70ebaadc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/GribStubImagePlugin.pyi @@ -0,0 +1,10 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .ImageFile import StubImageFile + +def register_handler(handler) -> None: ... + +class GribStubImageFile(StubImageFile): + format: ClassVar[Literal["GRIB"]] + format_description: ClassVar[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/Hdf5StubImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/Hdf5StubImagePlugin.pyi new file mode 100644 index 00000000..62760e41 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/Hdf5StubImagePlugin.pyi @@ -0,0 +1,10 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .ImageFile import StubImageFile + +def register_handler(handler) -> None: ... + +class HDF5StubImageFile(StubImageFile): + format: ClassVar[Literal["HDF5"]] + format_description: ClassVar[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/IcnsImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/IcnsImagePlugin.pyi new file mode 100644 index 00000000..bc0b5778 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/IcnsImagePlugin.pyi @@ -0,0 +1,36 @@ +from _typeshed import Incomplete +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +enable_jpeg2k: Any +HEADERSIZE: int + +def nextheader(fobj): ... +def read_32t(fobj, start_length, size): ... +def read_32(fobj, start_length, size): ... +def read_mk(fobj, start_length, size): ... +def read_png_or_jpeg2000(fobj, start_length, size): ... + +class IcnsFile: + SIZES: Any + dct: Any + fobj: Any + def __init__(self, fobj) -> None: ... + def itersizes(self): ... + def bestsize(self): ... + def dataforsize(self, size): ... + def getimage(self, size: Incomplete | None = ...): ... + +class IcnsImageFile(ImageFile): + format: ClassVar[Literal["ICNS"]] + format_description: ClassVar[str] + @property + def size(self): ... + @size.setter + def size(self, value) -> None: ... + best_size: Any + im: Any + mode: Any + def load(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/IcoImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/IcoImagePlugin.pyi new file mode 100644 index 00000000..f84ed76d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/IcoImagePlugin.pyi @@ -0,0 +1,26 @@ +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +class IcoFile: + buf: Any + entry: Any + nb_items: Any + def __init__(self, buf): ... + def sizes(self): ... + def getentryindex(self, size, bpp: bool = ...): ... + def getimage(self, size, bpp: bool = ...): ... + def frame(self, idx): ... + +class IcoImageFile(ImageFile): + format: ClassVar[Literal["ICO"]] + format_description: ClassVar[str] + @property + def size(self): ... + @size.setter + def size(self, value) -> None: ... + im: Any + mode: Any + def load(self) -> None: ... + def load_seek(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImImagePlugin.pyi new file mode 100644 index 00000000..09fd6841 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImImagePlugin.pyi @@ -0,0 +1,34 @@ +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +COMMENT: str +DATE: str +EQUIPMENT: str +FRAMES: str +LUT: str +NAME: str +SCALE: str +SIZE: str +MODE: str +TAGS: Any +OPEN: Any +split: Any + +def number(s): ... + +class ImImageFile(ImageFile): + format: ClassVar[Literal["IM"]] + format_description: ClassVar[str] + @property + def n_frames(self): ... + @property + def is_animated(self): ... + frame: Any + fp: Any + tile: Any + def seek(self, frame) -> None: ... + def tell(self): ... + +SAVE: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/Image.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/Image.pyi new file mode 100644 index 00000000..31b71700 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/Image.pyi @@ -0,0 +1,315 @@ +from _typeshed import Incomplete, SupportsRead, SupportsWrite +from collections.abc import Callable, Iterable, Iterator, MutableMapping, Sequence +from enum import IntEnum +from pathlib import Path +from typing import Any, ClassVar, Protocol, SupportsBytes +from typing_extensions import Literal, Self, TypeAlias + +from PIL.PyAccess import PyAccess + +from ._imaging import ( + DEFAULT_STRATEGY as DEFAULT_STRATEGY, + FILTERED as FILTERED, + FIXED as FIXED, + HUFFMAN_ONLY as HUFFMAN_ONLY, + RLE as RLE, +) +from .ImageFilter import Filter +from .ImagePalette import ImagePalette + +_Mode: TypeAlias = str +_Resample: TypeAlias = Literal[0, 1, 2, 3, 4, 5] +_Size: TypeAlias = tuple[int, int] +_Box: TypeAlias = tuple[int, int, int, int] + +_ConversionMatrix: TypeAlias = ( + tuple[float, float, float, float] | tuple[float, float, float, float, float, float, float, float, float, float, float, float] +) +# `str` values are only accepted if mode="RGB" for an `Image` object +# `float` values are only accepted for certain modes such as "F" +# See https://pillow.readthedocs.io/en/stable/reference/Image.html#PIL.Image.new +_Color: TypeAlias = int | tuple[int] | tuple[int, int, int] | tuple[int, int, int, int] | str | float | tuple[float] + +class _Writeable(SupportsWrite[bytes], Protocol): + def seek(self, __offset: int) -> Any: ... + +NORMAL: Literal[0] # deprecated +SEQUENCE: Literal[1] # deprecated +CONTAINER: Literal[2] # deprecated + +class DecompressionBombWarning(RuntimeWarning): ... +class DecompressionBombError(Exception): ... + +MAX_IMAGE_PIXELS: int | None + +LINEAR: Literal[Resampling.BILINEAR] # deprecated +CUBIC: Literal[Resampling.BICUBIC] # deprecated +ANTIALIAS: Literal[Resampling.LANCZOS] # deprecated + +class Transpose(IntEnum): + FLIP_LEFT_RIGHT: Literal[0] + FLIP_TOP_BOTTOM: Literal[1] + ROTATE_90: Literal[2] + ROTATE_180: Literal[3] + ROTATE_270: Literal[4] + TRANSPOSE: Literal[5] + TRANSVERSE: Literal[6] + +# All Transpose items +FLIP_LEFT_RIGHT: Literal[0] +FLIP_TOP_BOTTOM: Literal[1] +ROTATE_90: Literal[2] +ROTATE_180: Literal[3] +ROTATE_270: Literal[4] +TRANSPOSE: Literal[5] +TRANSVERSE: Literal[6] + +class Transform(IntEnum): + AFFINE: Literal[0] + EXTENT: Literal[1] + PERSPECTIVE: Literal[2] + QUAD: Literal[3] + MESH: Literal[4] + +# All Transform items +AFFINE: Literal[0] +EXTENT: Literal[1] +PERSPECTIVE: Literal[2] +QUAD: Literal[3] +MESH: Literal[4] + +class Resampling(IntEnum): + NEAREST: Literal[0] + LANCZOS: Literal[1] + BILINEAR: Literal[2] + BICUBIC: Literal[3] + BOX: Literal[4] + HAMMING: Literal[5] + +# All Resampling items +NEAREST: Literal[0] +LANCZOS: Literal[1] +BILINEAR: Literal[2] +BICUBIC: Literal[3] +BOX: Literal[4] +HAMMING: Literal[5] + +class Dither(IntEnum): + NONE: Literal[0] + ORDERED: Literal[1] + RASTERIZE: Literal[2] + FLOYDSTEINBERG: Literal[3] + +# All Dither items +NONE: Literal[0] +ORDERED: Literal[1] +RASTERIZE: Literal[2] +FLOYDSTEINBERG: Literal[3] + +class Palette(IntEnum): + WEB: Literal[0] + ADAPTIVE: Literal[1] + +# All Palette items +WEB: Literal[0] +ADAPTIVE: Literal[1] + +class Quantize(IntEnum): + MEDIANCUT: Literal[0] + MAXCOVERAGE: Literal[1] + FASTOCTREE: Literal[2] + LIBIMAGEQUANT: Literal[3] + +# All Quantize items +MEDIANCUT: Literal[0] +MAXCOVERAGE: Literal[1] +FASTOCTREE: Literal[2] +LIBIMAGEQUANT: Literal[3] + +ID: list[str] +OPEN: dict[str, Any] +MIME: dict[str, str] +SAVE: dict[str, Any] +SAVE_ALL: dict[str, Any] +EXTENSION: dict[str, str] +DECODERS: dict[str, Any] +ENCODERS: dict[str, Any] + +MODES: list[_Mode] + +def getmodebase(mode: _Mode) -> Literal["L", "RGB"]: ... +def getmodetype(mode: _Mode) -> Literal["L", "I", "F"]: ... +def getmodebandnames(mode: _Mode) -> tuple[str, ...]: ... +def getmodebands(mode: _Mode) -> int: ... +def preinit() -> None: ... +def init() -> None: ... +def coerce_e(value) -> _E: ... + +class _E: + def __init__(self, scale, data) -> None: ... + def __add__(self, other) -> _E: ... + def __mul__(self, other) -> _E: ... + +_ImageState: TypeAlias = tuple[dict[str, Any], str, tuple[int, int], Any, bytes] + +class Image: + format: ClassVar[str | None] + format_description: ClassVar[str | None] + im: Any + mode: _Mode + palette: Any + info: dict[Any, Any] + readonly: int + pyaccess: PyAccess | None + is_animated: bool # not present on all Image objects + n_frames: int # not present on all Image objects + # Only defined after a call to save(). + encoderconfig: tuple[Incomplete, ...] + @property + def width(self) -> int: ... + @property + def height(self) -> int: ... + @property + def size(self) -> tuple[int, int]: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: object) -> None: ... + def close(self) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __getstate__(self) -> _ImageState: ... + def __setstate__(self, state: _ImageState) -> None: ... + def tobytes(self, encoder_name: str = ..., *args) -> bytes: ... + def tobitmap(self, name: str = ...) -> bytes: ... + def frombytes(self, data: bytes, decoder_name: str = ..., *args) -> None: ... + def load(self) -> None: ... + def verify(self) -> None: ... + def convert( + self, + mode: _Mode | None = ..., + matrix: _ConversionMatrix | None = ..., + dither: int | None = ..., + palette: Palette | Literal[0, 1] = ..., + colors: int = ..., + ) -> Image: ... + def quantize( + self, + colors: int = ..., + method: Quantize | Literal[0, 1, 2, 3] | None = ..., + kmeans: int = ..., + palette: Image | None = ..., + dither: int = ..., + ) -> Image: ... + def copy(self) -> Image: ... + __copy__ = copy + def crop(self, box: _Box | None = ...) -> Image: ... + def draft(self, mode: _Mode, size: _Size) -> None: ... + def filter(self, filter: Filter | Callable[[], Filter]) -> Image: ... + def getbands(self) -> tuple[str, ...]: ... + def getbbox(self) -> tuple[int, int, int, int] | None: ... + def getcolors(self, maxcolors: int = ...) -> list[tuple[int, int]]: ... + def getdata(self, band: int | None = ...): ... + def getextrema(self): ... + def getexif(self) -> Exif: ... + def get_child_images(self) -> list[Image]: ... + def getim(self): ... + def getpalette(self, rawmode: str | None = ...) -> list[int] | None: ... + def getpixel(self, xy: tuple[int, int]): ... + def getprojection(self) -> tuple[list[int], list[int]]: ... + def histogram(self, mask: Image | None = ..., extrema: tuple[int, int] | tuple[float, float] | None = ...) -> list[int]: ... + def entropy(self, mask: Image | None = ..., extrema: tuple[int, int] | tuple[float, float] | None = ...) -> float: ... + def paste(self, im: Image | _Color, box: tuple[int, int] | _Box | None = ..., mask: Image | None = ...) -> None: ... + def alpha_composite(self, im: Image, dest: tuple[int, int] = ..., source: tuple[int, int] = ...) -> None: ... + def point(self, lut, mode: _Mode | None = ...) -> Image: ... + def putalpha(self, alpha: Image | int) -> None: ... + def putdata(self, data: Sequence[int], scale: float = ..., offset: float = ...) -> None: ... + def putpalette(self, data: ImagePalette | bytes | Iterable[int] | SupportsBytes, rawmode: _Mode | None = ...) -> None: ... + def putpixel(self, xy: tuple[int, int], value: _Color | list[float]) -> None: ... + def remap_palette(self, dest_map: Iterable[int], source_palette: Sequence[int] | None = ...) -> Image: ... + def resize( + self, + size: tuple[int, int], + resample: Resampling | _Resample | None = ..., + box: tuple[float, float, float, float] | None = ..., + reducing_gap: float | None = ..., + ) -> Image: ... + def reduce(self, factor: int | tuple[int, int] | list[int], box: _Box | None = ...) -> Image: ... + def rotate( + self, + angle: float, + resample: Resampling | _Resample = ..., + expand: bool = ..., + center: tuple[float, float] | None = ..., + translate: tuple[float, float] | None = ..., + fillcolor: _Color | None = ..., + ) -> Image: ... + def save( + self, + fp: str | bytes | Path | _Writeable, + format: str | None = ..., + *, + save_all: bool = ..., + bitmap_format: Literal["bmp", "png"] = ..., # for ICO files + optimize: bool = ..., + **params: Any, + ) -> None: ... + def seek(self, frame: int) -> None: ... + def show(self, title: str | None = ...) -> None: ... + def split(self) -> tuple[Image, ...]: ... + def getchannel(self, channel: int | str) -> Image: ... + def tell(self) -> int: ... + def thumbnail(self, size: tuple[int, int], resample: Resampling | _Resample = ..., reducing_gap: float = ...) -> None: ... + def transform( + self, + size: _Size, + method: Transform | Literal[0, 1, 2, 3, 4], + data=..., + resample: Resampling | _Resample = ..., + fill: int = ..., + fillcolor: _Color | int | None = ..., + ) -> Image: ... + def transpose(self, method: Transpose | Literal[0, 1, 2, 3, 4, 5, 6]) -> Image: ... + def effect_spread(self, distance: int) -> Image: ... + def toqimage(self): ... + def toqpixmap(self): ... + +class ImagePointHandler: ... +class ImageTransformHandler: ... + +def new(mode: _Mode, size: tuple[int, int], color: _Color = ...) -> Image: ... +def frombytes(mode: _Mode, size: tuple[int, int], data, decoder_name: str = ..., *args) -> Image: ... +def frombuffer(mode: _Mode, size: tuple[int, int], data, decoder_name: str = ..., *args) -> Image: ... +def fromarray(obj, mode: _Mode | None = ...) -> Image: ... +def fromqimage(im) -> Image: ... +def fromqpixmap(im) -> Image: ... +def open( + fp: str | bytes | Path | SupportsRead[bytes], mode: Literal["r"] = ..., formats: list[str] | tuple[str, ...] | None = ... +) -> Image: ... +def alpha_composite(im1: Image, im2: Image) -> Image: ... +def blend(im1: Image, im2: Image, alpha: float) -> Image: ... +def composite(image1: Image, image2: Image, mask: Image) -> Image: ... +def eval(image: Image, *args) -> Image: ... +def merge(mode: _Mode, bands: Sequence[Image]) -> Image: ... +def register_open(id: str, factory, accept=...) -> None: ... +def register_mime(id: str, mimetype: str) -> None: ... +def register_save(id: str, driver) -> None: ... +def register_save_all(id: str, driver) -> None: ... +def register_extension(id: str, extension: str) -> None: ... +def register_extensions(id: str, extensions: Iterable[str]) -> None: ... +def registered_extensions() -> dict[str, str]: ... +def register_decoder(name: str, decoder) -> None: ... +def register_encoder(name: str, encoder) -> None: ... +def effect_mandelbrot(size: tuple[int, int], extent: tuple[float, float, float, float], quality: int) -> Image: ... +def effect_noise(size: tuple[int, int], sigma: float) -> Image: ... +def linear_gradient(mode: _Mode) -> Image: ... +def radial_gradient(mode: _Mode) -> Image: ... + +class Exif(MutableMapping[int, Any]): + def load(self, data: bytes) -> None: ... + def tobytes(self, offset: int = ...) -> bytes: ... + def get_ifd(self, tag: int): ... + def hide_offsets(self) -> None: ... + def __len__(self) -> int: ... + def __getitem__(self, tag: int) -> Any: ... + def __contains__(self, tag: object) -> bool: ... + def __setitem__(self, tag: int, value: Any) -> None: ... + def __delitem__(self, tag: int) -> None: ... + def __iter__(self) -> Iterator[int]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageChops.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageChops.pyi new file mode 100644 index 00000000..0e9f5304 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageChops.pyi @@ -0,0 +1,23 @@ +from .Image import Image + +def constant(image: Image, value: int) -> Image: ... +def duplicate(image: Image) -> Image: ... +def invert(image: Image) -> Image: ... +def lighter(image1: Image, image2: Image) -> Image: ... +def darker(image1: Image, image2: Image) -> Image: ... +def difference(image1: Image, image2: Image) -> Image: ... +def multiply(image1: Image, image2: Image) -> Image: ... +def screen(image1: Image, image2: Image) -> Image: ... +def soft_light(image1: Image, image2: Image) -> Image: ... +def hard_light(image1: Image, image2: Image) -> Image: ... +def overlay(image1: Image, image2: Image) -> Image: ... +def add(image1: Image, image2: Image, scale: float = ..., offset: int = ...) -> Image: ... +def subtract(image1: Image, image2: Image, scale: float = ..., offset: int = ...) -> Image: ... +def add_modulo(image1: Image, image2: Image) -> Image: ... +def subtract_modulo(image1: Image, image2: Image) -> Image: ... +def logical_and(image1: Image, image2: Image) -> Image: ... +def logical_or(image1: Image, image2: Image) -> Image: ... +def logical_xor(image1: Image, image2: Image) -> Image: ... +def blend(image1: Image, image2: Image, alpha: float) -> Image: ... +def composite(image1: Image, image2: Image, mask: Image) -> Image: ... +def offset(image: Image, xoffset: int, yoffset: int | None = ...) -> Image: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageCms.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageCms.pyi new file mode 100644 index 00000000..cc750ee5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageCms.pyi @@ -0,0 +1,90 @@ +from _typeshed import Incomplete +from enum import IntEnum +from typing import Any +from typing_extensions import Literal + +from .Image import ImagePointHandler + +DESCRIPTION: str +VERSION: str +core: Any + +class Intent(IntEnum): + PERCEPTUAL: int + RELATIVE_COLORIMETRIC: int + SATURATION: int + ABSOLUTE_COLORIMETRIC: int + +INTENT_PERCEPTUAL: Literal[Intent.PERCEPTUAL] +INTENT_RELATIVE_COLORIMETRIC: Literal[Intent.RELATIVE_COLORIMETRIC] +INTENT_SATURATION: Literal[Intent.SATURATION] +INTENT_ABSOLUTE_COLORIMETRIC: Literal[Intent.ABSOLUTE_COLORIMETRIC] + +class Direction(IntEnum): + INPUT: int + OUTPUT: int + PROOF: int + +DIRECTION_INPUT: Literal[Direction.INPUT] +DIRECTION_OUTPUT: Literal[Direction.OUTPUT] +DIRECTION_PROOF: Literal[Direction.PROOF] + +FLAGS: Any + +class ImageCmsProfile: + def __init__(self, profile) -> None: ... + def tobytes(self): ... + +class ImageCmsTransform(ImagePointHandler): + transform: Any + input_mode: Any + output_mode: Any + output_profile: Any + def __init__( + self, + input, + output, + input_mode, + output_mode, + intent=..., + proof: Incomplete | None = ..., + proof_intent=..., + flags: int = ..., + ) -> None: ... + def point(self, im): ... + def apply(self, im, imOut: Incomplete | None = ...): ... + def apply_in_place(self, im): ... + +def get_display_profile(handle: Incomplete | None = ...): ... + +class PyCMSError(Exception): ... + +def profileToProfile( + im, + inputProfile, + outputProfile, + renderingIntent=..., + outputMode: Incomplete | None = ..., + inPlace: bool = ..., + flags: int = ..., +): ... +def getOpenProfile(profileFilename): ... +def buildTransform(inputProfile, outputProfile, inMode, outMode, renderingIntent=..., flags: int = ...): ... +def buildProofTransform( + inputProfile, outputProfile, proofProfile, inMode, outMode, renderingIntent=..., proofRenderingIntent=..., flags=... +): ... + +buildTransformFromOpenProfiles = buildTransform +buildProofTransformFromOpenProfiles = buildProofTransform + +def applyTransform(im, transform, inPlace: bool = ...): ... +def createProfile(colorSpace, colorTemp: int = ...): ... +def getProfileName(profile): ... +def getProfileInfo(profile): ... +def getProfileCopyright(profile): ... +def getProfileManufacturer(profile): ... +def getProfileModel(profile): ... +def getProfileDescription(profile): ... +def getDefaultIntent(profile): ... +def isIntentSupported(profile, intent, direction): ... +def versions(): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageColor.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageColor.pyi new file mode 100644 index 00000000..62fba216 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageColor.pyi @@ -0,0 +1,10 @@ +from typing_extensions import TypeAlias + +_RGB: TypeAlias = tuple[int, int, int] | tuple[int, int, int, int] +_Ink: TypeAlias = str | int | _RGB +_GreyScale: TypeAlias = tuple[int, int] + +def getrgb(color: _Ink) -> _RGB: ... +def getcolor(color: _Ink, mode: str) -> _RGB | _GreyScale: ... + +colormap: dict[str, str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageDraw.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageDraw.pyi new file mode 100644 index 00000000..a146d856 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageDraw.pyi @@ -0,0 +1,156 @@ +from _typeshed import Incomplete +from collections.abc import Container, Sequence +from typing import Any, overload +from typing_extensions import Literal, TypeAlias + +from .Image import Image +from .ImageColor import _Ink +from .ImageFont import _Font + +_XY: TypeAlias = Sequence[float | tuple[float, float]] +_Outline: TypeAlias = Any + +class ImageDraw: + def __init__(self, im: Image, mode: str | None = ...) -> None: ... + def getfont(self): ... + def arc(self, xy: _XY, start: float, end: float, fill: _Ink | None = ..., width: float = ...) -> None: ... + def bitmap(self, xy: _XY, bitmap: Image, fill: _Ink | None = ...) -> None: ... + def chord( + self, xy: _XY, start: float, end: float, fill: _Ink | None = ..., outline: _Ink | None = ..., width: float = ... + ) -> None: ... + def ellipse(self, xy: _XY, fill: _Ink | None = ..., outline: _Ink | None = ..., width: float = ...) -> None: ... + def line(self, xy: _XY, fill: _Ink | None = ..., width: float = ..., joint: Literal["curve"] | None = ...) -> None: ... + def shape(self, shape: _Outline, fill: _Ink | None = ..., outline: _Ink | None = ...) -> None: ... + def pieslice( + self, + xy: tuple[tuple[float, float], tuple[float, float]], + start: float, + end: float, + fill: _Ink | None = ..., + outline: _Ink | None = ..., + width: float = ..., + ) -> None: ... + def point(self, xy: _XY, fill: _Ink | None = ...) -> None: ... + def polygon(self, xy: _XY, fill: _Ink | None = ..., outline: _Ink | None = ..., width: float = ...) -> None: ... + def regular_polygon( + self, + bounding_circle: tuple[float, float] | tuple[float, float, float] | list[int], + n_sides: int, + rotation: float = ..., + fill: _Ink | None = ..., + outline: _Ink | None = ..., + ) -> None: ... + def rectangle( + self, + xy: tuple[float, float, float, float] | tuple[tuple[float, float], tuple[float, float]], + fill: _Ink | None = ..., + outline: _Ink | None = ..., + width: float = ..., + ) -> None: ... + def rounded_rectangle( + self, + xy: tuple[float, float, float, float] | tuple[tuple[float, float], tuple[float, float]], + radius: float = ..., + fill: _Ink | None = ..., + outline: _Ink | None = ..., + width: float = ..., + ) -> None: ... + def text( + self, + xy: tuple[float, float], + text: str | bytes, + fill: _Ink | None = ..., + font: _Font | None = ..., + anchor: str | None = ..., + spacing: float = ..., + align: Literal["left", "center", "right"] = ..., + direction: Literal["rtl", "ltr", "ttb"] | None = ..., + features: Sequence[str] | None = ..., + language: str | None = ..., + stroke_width: int = ..., + stroke_fill: _Ink | None = ..., + embedded_color: bool = ..., + *args, + **kwargs, + ) -> None: ... + def multiline_text( + self, + xy: tuple[float, float], + text: str | bytes, + fill: _Ink | None = ..., + font: _Font | None = ..., + anchor: str | None = ..., + spacing: float = ..., + align: Literal["left", "center", "right"] = ..., + direction: Literal["rtl", "ltr", "ttb"] | None = ..., + features: Incomplete | None = ..., + language: str | None = ..., + stroke_width: int = ..., + stroke_fill: _Ink | None = ..., + embedded_color: bool = ..., + ) -> None: ... + def textsize( + self, + text: str | bytes, + font: _Font | None = ..., + spacing: float = ..., + direction: Literal["rtl", "ltr", "ttb"] | None = ..., + features: Sequence[str] | None = ..., + language: str | None = ..., + stroke_width: int = ..., + ) -> tuple[int, int]: ... + def multiline_textsize( + self, + text: str | bytes, + font: _Font | None = ..., + spacing: float = ..., + direction: Literal["rtl", "ltr", "ttb"] | None = ..., + features: Sequence[str] | None = ..., + language: str | None = ..., + stroke_width: int = ..., + ) -> tuple[int, int]: ... + def textlength( + self, + text: str | bytes, + font: _Font | None = ..., + direction: Literal["rtl", "ltr", "ttb"] | None = ..., + features: Sequence[str] | None = ..., + language: str | None = ..., + embedded_color: bool = ..., + ) -> float: ... + def textbbox( + self, + xy: tuple[float, float], + text: str | bytes, + font: _Font | None = ..., + anchor: str | None = ..., + spacing: float = ..., + align: Literal["left", "center", "right"] = ..., + direction: Literal["rtl", "ltr", "ttb"] | None = ..., + features: Incomplete | None = ..., + language: str | None = ..., + stroke_width: int = ..., + embedded_color: bool = ..., + ) -> tuple[int, int, int, int]: ... + def multiline_textbbox( + self, + xy: tuple[float, float], + text: str | bytes, + font: _Font | None = ..., + anchor: str | None = ..., + spacing: float = ..., + align: Literal["left", "center", "right"] = ..., + direction: Literal["rtl", "ltr", "ttb"] | None = ..., + features: Incomplete | None = ..., + language: str | None = ..., + stroke_width: int = ..., + embedded_color: bool = ..., + ) -> tuple[int, int, int, int]: ... + +def Draw(im: Image, mode: str | None = ...) -> ImageDraw: ... +def Outline() -> _Outline: ... +@overload +def getdraw(im: None = ..., hints: Container[Literal["nicest"]] | None = ...) -> tuple[None, Any]: ... +@overload +def getdraw(im: Image, hints: Container[Literal["nicest"]] | None = ...) -> tuple[Image, Any]: ... +def floodfill(image: Image, xy: tuple[float, float], value, border=..., thresh: float = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageDraw2.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageDraw2.pyi new file mode 100644 index 00000000..b38d3e80 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageDraw2.pyi @@ -0,0 +1,34 @@ +from _typeshed import Incomplete +from typing import Any + +class Pen: + color: Any + width: Any + def __init__(self, color, width: int = ..., opacity: int = ...) -> None: ... + +class Brush: + color: Any + def __init__(self, color, opacity: int = ...) -> None: ... + +class Font: + color: Any + font: Any + def __init__(self, color, file, size: int = ...) -> None: ... + +class Draw: + draw: Any + image: Any + transform: Any + def __init__(self, image, size: Incomplete | None = ..., color: Incomplete | None = ...) -> None: ... + def flush(self): ... + def render(self, op, xy, pen, brush: Incomplete | None = ...) -> None: ... + def settransform(self, offset) -> None: ... + def arc(self, xy, start, end, *options) -> None: ... + def chord(self, xy, start, end, *options) -> None: ... + def ellipse(self, xy, *options) -> None: ... + def line(self, xy, *options) -> None: ... + def pieslice(self, xy, start, end, *options) -> None: ... + def polygon(self, xy, *options) -> None: ... + def rectangle(self, xy, *options) -> None: ... + def text(self, xy, text, font) -> None: ... + def textsize(self, text, font): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageEnhance.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageEnhance.pyi new file mode 100644 index 00000000..6f0afa8d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageEnhance.pyi @@ -0,0 +1,25 @@ +from .Image import Image + +class _Enhance: + def enhance(self, factor: float) -> Image: ... + +class Color(_Enhance): + image: Image + intermediate_mode: str + degenerate: Image + def __init__(self, image: Image) -> None: ... + +class Contrast(_Enhance): + image: Image + degenerate: Image + def __init__(self, image: Image) -> None: ... + +class Brightness(_Enhance): + image: Image + degenerate: Image + def __init__(self, image: Image) -> None: ... + +class Sharpness(_Enhance): + image: Image + degenerate: Image + def __init__(self, image: Image) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageFile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageFile.pyi new file mode 100644 index 00000000..b4c6ebdc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageFile.pyi @@ -0,0 +1,69 @@ +from _typeshed import Incomplete +from typing import Any, NoReturn +from typing_extensions import Self + +from .Image import Image + +MAXBLOCK: int +SAFEBLOCK: Any +LOAD_TRUNCATED_IMAGES: bool +ERRORS: Any + +def raise_oserror(error) -> NoReturn: ... + +class ImageFile(Image): + custom_mimetype: Any + tile: list[Incomplete] | None + readonly: int + decoderconfig: Any + decodermaxblock: Any + fp: Any + filename: Any + def __init__(self, fp: Incomplete | None = ..., filename: Incomplete | None = ...) -> None: ... + def get_format_mimetype(self): ... + def verify(self) -> None: ... + map: Any + im: Any + def load(self): ... + def load_prepare(self) -> None: ... + def load_end(self) -> None: ... + +class StubImageFile(ImageFile): + def load(self) -> None: ... + +class Parser: + incremental: Incomplete | None + image: Incomplete | None + data: Incomplete | None + decoder: Incomplete | None + offset: int + finished: bool + def reset(self) -> None: ... + decode: Any + def feed(self, data) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: object) -> None: ... + def close(self) -> Image: ... + +class PyCodecState: + xsize: int + ysize: int + xoff: int + yoff: int + def extents(self) -> tuple[int, int, int, int]: ... + +class PyDecoder: + im: Any + state: Any + fd: Any + mode: Any + def __init__(self, mode, *args) -> None: ... + args: Any + def init(self, args) -> None: ... + @property + def pulls_fd(self): ... + def decode(self, buffer) -> None: ... + def cleanup(self) -> None: ... + def setfd(self, fd) -> None: ... + def setimage(self, im, extents: Incomplete | None = ...) -> None: ... + def set_as_raw(self, data, rawmode: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageFilter.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageFilter.pyi new file mode 100644 index 00000000..bbb647a4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageFilter.pyi @@ -0,0 +1,138 @@ +from _typeshed import Incomplete +from collections.abc import Callable, Iterable, Sequence +from typing import Any +from typing_extensions import Literal, Self, TypeAlias + +from .Image import Image + +_FilterArgs: TypeAlias = tuple[Sequence[int], int, int, Sequence[int]] + +# filter image parameters below are the C images, i.e. Image().im. + +class Filter: ... +class MultibandFilter(Filter): ... + +class BuiltinFilter(MultibandFilter): + def filter(self, image) -> Image: ... + +class Kernel(BuiltinFilter): + name: str + filterargs: _FilterArgs + def __init__(self, size: Sequence[int], kernel: Sequence[int], scale: Incomplete | None = ..., offset: int = ...) -> None: ... + +class RankFilter(Filter): + name: str + size: int + rank: int + def __init__(self, size: int, rank: int) -> None: ... + def filter(self, image) -> Image: ... + +class MedianFilter(RankFilter): + name: str + size: int + rank: int + def __init__(self, size: int = ...) -> None: ... + +class MinFilter(RankFilter): + name: str + size: int + rank: int + def __init__(self, size: int = ...) -> None: ... + +class MaxFilter(RankFilter): + name: str + size: int + rank: int + def __init__(self, size: int = ...) -> None: ... + +class ModeFilter(Filter): + name: str + size: int + def __init__(self, size: int = ...) -> None: ... + def filter(self, image) -> Image: ... + +class GaussianBlur(MultibandFilter): + name: str + radius: float + def __init__(self, radius: float = ...) -> None: ... + def filter(self, image) -> Image: ... + +class BoxBlur(MultibandFilter): + name: str + radius: float + def __init__(self, radius: float) -> None: ... + def filter(self, image) -> Image: ... + +class UnsharpMask(MultibandFilter): + name: str + radius: float + percent: int + threshold: int + def __init__(self, radius: float = ..., percent: int = ..., threshold: int = ...) -> None: ... + def filter(self, image) -> Image: ... + +class BLUR(BuiltinFilter): + name: str + filterargs: _FilterArgs + +class CONTOUR(BuiltinFilter): + name: str + filterargs: _FilterArgs + +class DETAIL(BuiltinFilter): + name: str + filterargs: _FilterArgs + +class EDGE_ENHANCE(BuiltinFilter): + name: str + filterargs: _FilterArgs + +class EDGE_ENHANCE_MORE(BuiltinFilter): + name: str + filterargs: _FilterArgs + +class EMBOSS(BuiltinFilter): + name: str + filterargs: _FilterArgs + +class FIND_EDGES(BuiltinFilter): + name: str + filterargs: _FilterArgs + +class SHARPEN(BuiltinFilter): + name: str + filterargs: _FilterArgs + +class SMOOTH(BuiltinFilter): + name: str + filterargs: _FilterArgs + +class SMOOTH_MORE(BuiltinFilter): + name: str + filterargs: _FilterArgs + +class Color3DLUT(MultibandFilter): + name: str + size: list[int] + channels: int + mode: str | None + table: Any + def __init__( + self, size: int | Iterable[int], table, channels: int = ..., target_mode: str | None = ..., **kwargs + ) -> None: ... + @classmethod + def generate( + cls, + size: int | tuple[int, int, int], + callback: Callable[[float, float, float], Iterable[float]], + channels: int = ..., + target_mode: str | None = ..., + ) -> Self: ... + def transform( + self, + callback: Callable[..., Iterable[float]], + with_normals: bool = ..., + channels: Literal[3, 4] | None = ..., + target_mode: Incomplete | None = ..., + ) -> Self: ... + def filter(self, image) -> Image: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageFont.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageFont.pyi new file mode 100644 index 00000000..63e566dc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageFont.pyi @@ -0,0 +1,126 @@ +from _typeshed import FileDescriptorOrPath, Incomplete, SupportsRead +from enum import IntEnum +from typing import Protocol +from typing_extensions import Literal + +class Layout(IntEnum): + BASIC: Literal[0] + RAQM: Literal[1] + +LAYOUT_BASIC: Literal[Layout.BASIC] +LAYOUT_RAQM: Literal[Layout.RAQM] + +class _Font(Protocol): + def getmask(self, text: str | bytes, mode: str = ..., direction=..., features=...): ... + +class ImageFont: + def getsize(self, text: str | bytes, *args, **kwargs) -> tuple[int, int]: ... + def getmask(self, text: str | bytes, mode: str = ..., direction=..., features=...): ... + +class FreeTypeFont: + path: str | bytes | SupportsRead[bytes] | None + size: int + index: int + encoding: str + layout_engine: Layout + def __init__( + self, + font: str | bytes | SupportsRead[bytes] | None = ..., + size: int = ..., + index: int = ..., + encoding: str = ..., + layout_engine: Layout | None = ..., + ) -> None: ... + def getname(self) -> tuple[str, str]: ... + def getmetrics(self) -> tuple[int, int]: ... + def getlength( + self, + text: str | bytes, + mode: str = ..., + direction: Literal["ltr", "rtl", "ttb"] | None = ..., + features: Incomplete | None = ..., + language: str | None = ..., + ) -> int: ... + def getbbox( + self, + text: str | bytes, + mode: str = ..., + direction=..., + features=..., + language: str | None = ..., + stroke_width: int = ..., + anchor: str | None = ..., + ) -> tuple[int, int, int, int]: ... + def getsize( + self, + text: str | bytes, + direction: Literal["ltr", "rtl", "ttb"] | None = ..., + features: Incomplete | None = ..., + language: str | None = ..., + stroke_width: int = ..., + ) -> tuple[int, int]: ... + def getsize_multiline( + self, + text: str | bytes, + direction: Literal["ltr", "rtl", "ttb"] | None = ..., + spacing: float = ..., + features: Incomplete | None = ..., + language: str | None = ..., + stroke_width: float = ..., + ) -> tuple[int, int]: ... + def getoffset(self, text: str | bytes) -> tuple[int, int]: ... + def getmask( + self, + text: str | bytes, + mode: str = ..., + direction: Literal["ltr", "rtl", "ttb"] | None = ..., + features: Incomplete | None = ..., + language: str | None = ..., + stroke_width: float = ..., + anchor: str | None = ..., + ink=..., + start: tuple[float, float] | None = ..., + ): ... + def getmask2( + self, + text: str | bytes, + mode: str = ..., + fill=..., + direction: Literal["ltr", "rtl", "ttb"] | None = ..., + features: Incomplete | None = ..., + language: str | None = ..., + stroke_width: float = ..., + anchor: str | None = ..., + ink=..., + start: tuple[float, float] | None = ..., + *args, + **kwargs, + ): ... + def font_variant( + self, + font: str | bytes | SupportsRead[bytes] | None = ..., + size: int | None = ..., + index: int | None = ..., + encoding: str | None = ..., + layout_engine: Layout | None = ..., + ) -> FreeTypeFont: ... + def get_variation_names(self): ... + def set_variation_by_name(self, name): ... + def get_variation_axes(self): ... + def set_variation_by_axes(self, axes): ... + +class TransposedFont: + def __init__(self, font: _Font, orientation: int | None = ...) -> None: ... + def getsize(self, text: str | bytes, *args, **kwargs) -> tuple[int, int]: ... + def getmask(self, text: str | bytes, mode: str = ..., *args, **kwargs): ... + +def load(filename: FileDescriptorOrPath) -> ImageFont: ... +def truetype( + font: str | bytes | SupportsRead[bytes] | None = ..., + size: int = ..., + index: int = ..., + encoding: str = ..., + layout_engine: Layout | None = ..., +) -> FreeTypeFont: ... +def load_path(filename: str | bytes) -> ImageFont: ... +def load_default() -> ImageFont: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageGrab.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageGrab.pyi new file mode 100644 index 00000000..513b2caa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageGrab.pyi @@ -0,0 +1,8 @@ +from _typeshed import Incomplete + +from .Image import Image, _Box + +def grab( + bbox: _Box | None = ..., include_layered_windows: bool = ..., all_screens: bool = ..., xdisplay: Incomplete | None = ... +) -> Image: ... +def grabclipboard() -> Image | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageMath.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageMath.pyi new file mode 100644 index 00000000..6f524aa8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageMath.pyi @@ -0,0 +1,50 @@ +from _typeshed import Incomplete +from typing import Any + +class _Operand: + im: Any + def __init__(self, im) -> None: ... + def apply(self, op, im1, im2: Incomplete | None = ..., mode: Incomplete | None = ...): ... + def __bool__(self) -> bool: ... + def __abs__(self): ... + def __pos__(self): ... + def __neg__(self): ... + def __add__(self, other): ... + def __radd__(self, other): ... + def __sub__(self, other): ... + def __rsub__(self, other): ... + def __mul__(self, other): ... + def __rmul__(self, other): ... + def __truediv__(self, other): ... + def __rtruediv__(self, other): ... + def __mod__(self, other): ... + def __rmod__(self, other): ... + def __pow__(self, other): ... + def __rpow__(self, other): ... + def __invert__(self): ... + def __and__(self, other): ... + def __rand__(self, other): ... + def __or__(self, other): ... + def __ror__(self, other): ... + def __xor__(self, other): ... + def __rxor__(self, other): ... + def __lshift__(self, other): ... + def __rshift__(self, other): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __lt__(self, other): ... + def __le__(self, other): ... + def __gt__(self, other): ... + def __ge__(self, other): ... + +def imagemath_int(self): ... +def imagemath_float(self): ... +def imagemath_equal(self, other): ... +def imagemath_notequal(self, other): ... +def imagemath_min(self, other): ... +def imagemath_max(self, other): ... +def imagemath_convert(self, mode): ... + +ops: Any + +def eval(expression, _dict=..., **kw): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageMode.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageMode.pyi new file mode 100644 index 00000000..de5a9ab4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageMode.pyi @@ -0,0 +1,10 @@ +from typing import Any + +class ModeDescriptor: + mode: Any + bands: Any + basemode: Any + basetype: Any + def __init__(self, mode, bands, basemode, basetype, typestr) -> None: ... + +def getmode(mode): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageMorph.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageMorph.pyi new file mode 100644 index 00000000..2668787b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageMorph.pyi @@ -0,0 +1,26 @@ +from _typeshed import StrOrBytesPath + +from .Image import Image + +LUT_SIZE: int +ROTATION_MATRIX: list[int] +MIRROR_MATRIX: list[int] + +class LutBuilder: + patterns: list[str] + lut: bytearray + def __init__(self, patterns: list[str] | None = ..., op_name: str | None = ...) -> None: ... + def add_patterns(self, patterns: list[str]) -> None: ... + def build_default_lut(self) -> None: ... + def get_lut(self) -> bytearray: ... + def build_lut(self) -> bytearray: ... + +class MorphOp: + lut: bytearray + def __init__(self, lut: bytearray | None = ..., op_name: str | None = ..., patterns: list[str] | None = ...) -> None: ... + def apply(self, image: Image) -> tuple[int, Image]: ... + def match(self, image: Image) -> list[tuple[int, int]]: ... + def get_on_pixels(self, image: Image) -> list[tuple[int, int]]: ... + def load_lut(self, filename: StrOrBytesPath) -> None: ... + def save_lut(self, filename: StrOrBytesPath) -> None: ... + def set_lut(self, lut: bytearray) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageOps.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageOps.pyi new file mode 100644 index 00000000..9cc14801 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageOps.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete +from collections.abc import Iterable +from typing import Protocol +from typing_extensions import TypeAlias + +from .Image import Image, Resampling, _Resample, _Size +from .ImageColor import _Ink + +_Border: TypeAlias = int | tuple[int, int] | tuple[int, int, int, int] + +class _Deformer(Protocol): + def getmesh(self, image: Image): ... + +def autocontrast( + image: Image, cutoff: int = ..., ignore: int | None = ..., mask: Image | None = ..., preserve_tone: bool = ... +) -> Image: ... +def colorize( + image: Image, + black: int | str, + white: int | str, + mid: int | str | None = ..., + blackpoint: int = ..., + whitepoint: int = ..., + midpoint: int = ..., +) -> Image: ... +def contain(image: Image, size: _Size, method: Resampling | _Resample = ...) -> Image: ... +def pad( + image: Image, + size: _Size, + method: Resampling | _Resample = ..., + color: Incomplete | None = ..., + centering: Iterable[float] = ..., +) -> Image: ... +def crop(image: Image, border: _Border = ...) -> Image: ... +def scale(image: Image, factor: float, resample: Resampling | _Resample = ...) -> Image: ... +def deform(image: Image, deformer: _Deformer, resample: Resampling | _Resample = ...) -> Image: ... +def equalize(image: Image, mask: Incomplete | None = ...) -> Image: ... +def expand(image: Image, border: _Border = ..., fill: _Ink = ...) -> Image: ... +def fit( + image: Image, size: _Size, method: Resampling | _Resample = ..., bleed: float = ..., centering: Iterable[float] = ... +) -> Image: ... +def flip(image: Image) -> Image: ... +def grayscale(image: Image) -> Image: ... +def invert(image: Image) -> Image: ... +def mirror(image: Image) -> Image: ... +def posterize(image: Image, bits: int) -> Image: ... +def solarize(image: Image, threshold: int = ...) -> Image: ... +def exif_transpose(image: Image) -> Image: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImagePalette.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImagePalette.pyi new file mode 100644 index 00000000..76d6582f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImagePalette.pyi @@ -0,0 +1,27 @@ +from _typeshed import Incomplete +from typing import Any + +from .Image import Image + +class ImagePalette: + mode: Any + rawmode: Any + palette: Any + colors: Any + dirty: Any + def __init__(self, mode: str = ..., palette: Incomplete | None = ..., size: int = ...) -> None: ... + def copy(self) -> ImagePalette: ... + def getdata(self): ... + def tobytes(self) -> bytes: ... + tostring = tobytes + def getcolor(self, color: tuple[int, int, int], image: Image | None = ...) -> int: ... + def save(self, fp) -> None: ... + +def raw(rawmode, data): ... +def make_linear_lut(black, white): ... +def make_gamma_lut(exp): ... +def negative(mode: str = ...): ... +def random(mode: str = ...): ... +def sepia(white: str = ...): ... +def wedge(mode: str = ...): ... +def load(filename): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImagePath.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImagePath.pyi new file mode 100644 index 00000000..b0796511 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImagePath.pyi @@ -0,0 +1,3 @@ +from ._imaging import path + +Path = path diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageQt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageQt.pyi new file mode 100644 index 00000000..f11ae150 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageQt.pyi @@ -0,0 +1,25 @@ +from typing import Any +from typing_extensions import Literal, TypeAlias + +from .Image import Image + +# imported from either of {PyQt6,PySide6,PyQt5,PySide2}.QtGui +# These are way too complex, with 4 different possible sources (2 deprecated) +# And we don't want to force the user to install PyQt or Pyside when they may not even use it. +_QImage: TypeAlias = Any +_QPixmap: TypeAlias = Any + +qt_versions: Any +qt_is_installed: bool +qt_version: Any + +def rgb(r: int, g: int, b: int, a: int = ...) -> int: ... +def fromqimage(im: ImageQt | _QImage) -> Image: ... +def fromqpixmap(im: ImageQt | _QImage) -> Image: ... +def align8to32(bytes: bytes, width: int, mode: Literal["1", "L", "P"]) -> bytes: ... + +class ImageQt(_QImage): + def __init__(self, im: Image) -> None: ... + +def toqimage(im: Image) -> ImageQt: ... +def toqpixmap(im: Image) -> _QPixmap: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageSequence.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageSequence.pyi new file mode 100644 index 00000000..dcf42c4f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageSequence.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete +from typing import Any + +class Iterator: + im: Any + position: Any + def __init__(self, im) -> None: ... + def __getitem__(self, ix): ... + def __iter__(self): ... + def __next__(self): ... + +def all_frames(im, func: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageShow.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageShow.pyi new file mode 100644 index 00000000..851c8855 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageShow.pyi @@ -0,0 +1,51 @@ +from _typeshed import Incomplete +from typing import Any +from typing_extensions import Literal + +def register(viewer, order: int = ...) -> None: ... +def show(image, title: Incomplete | None = ..., **options): ... + +class Viewer: + def show(self, image, **options): ... + format: Any + options: Any + def get_format(self, image): ... + def get_command(self, file, **options) -> None: ... + def save_image(self, image): ... + def show_image(self, image, **options): ... + def show_file(self, path: Incomplete | None = ..., **options): ... + +class WindowsViewer(Viewer): + format: str + options: Any + def get_command(self, file, **options): ... + +class MacViewer(Viewer): + format: str + options: Any + def get_command(self, file, **options): ... + def show_file(self, path: Incomplete | None = ..., **options): ... + +class UnixViewer(Viewer): + format: str + options: Any + def get_command(self, file, **options): ... + def show_file(self, path: Incomplete | None = ..., **options): ... + +class XDGViewer(UnixViewer): + def get_command_ex(self, file, **options) -> tuple[Literal["xdg-open"], Literal["xdg-open"]]: ... + +class DisplayViewer(UnixViewer): + def get_command_ex(self, file, title: str | None = ..., **options): ... + +class GmDisplayViewer(UnixViewer): + def get_command_ex(self, file, **options): ... + +class EogViewer(UnixViewer): + def get_command_ex(self, file, **options): ... + +class XVViewer(UnixViewer): + def get_command_ex(self, file, title: Incomplete | None = ..., **options): ... + +class IPythonViewer(Viewer): + def show_image(self, image, **options): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageStat.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageStat.pyi new file mode 100644 index 00000000..82723e30 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageStat.pyi @@ -0,0 +1,10 @@ +from _typeshed import Incomplete +from typing import Any + +class Stat: + h: Any + bands: Any + def __init__(self, image_or_list, mask: Incomplete | None = ...) -> None: ... + def __getattr__(self, id: str): ... + +Global = Stat diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageTk.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageTk.pyi new file mode 100644 index 00000000..af3bd4fc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageTk.pyi @@ -0,0 +1,52 @@ +import _tkinter +import tkinter +from _typeshed import ReadableBuffer, StrOrBytesPath, SupportsRead +from typing import Any + +from PIL.Image import Image, _Box, _Mode, _Size + +class PhotoImage(tkinter._PhotoImageLike): + tk: _tkinter.TkappType + def __init__( + self, + image: Image | _Mode | None = None, + size: _Size | None = None, + *, + file: StrOrBytesPath | SupportsRead[bytes] = ..., + data: ReadableBuffer = ..., + # These are forwarded to tkinter.PhotoImage.__init__(): + name: str | None = None, + cnf: dict[str, Any] = ..., + format: str = ..., + gamma: float = ..., + height: int = ..., + palette: int | str = ..., + width: int = ..., + ) -> None: ... + def __del__(self) -> None: ... + def width(self) -> int: ... + def height(self) -> int: ... + # box is deprecated and unused + def paste(self, im: Image, box: _Box | None = ...) -> None: ... + +class BitmapImage(tkinter._BitmapImageLike): + def __init__( + self, + image: Image | None = None, + *, + file: StrOrBytesPath | SupportsRead[bytes] = ..., + data: ReadableBuffer = ..., + # These are forwarded to tkinter.Bitmap.__init__(): + name: str | None = None, + cnf: dict[str, Any] = ..., + master: tkinter.Misc | _tkinter.TkappType | None = None, + background: tkinter._Color = ..., + foreground: tkinter._Color = ..., + maskdata: str = ..., + maskfile: StrOrBytesPath = ..., + ) -> None: ... + def __del__(self) -> None: ... + def width(self) -> int: ... + def height(self) -> int: ... + +def getimage(photo: tkinter.PhotoImage) -> Image: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageTransform.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageTransform.pyi new file mode 100644 index 00000000..7b7d82b3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageTransform.pyi @@ -0,0 +1,21 @@ +from typing import Any + +from .Image import ImageTransformHandler + +class Transform(ImageTransformHandler): + data: Any + def __init__(self, data) -> None: ... + def getdata(self): ... + def transform(self, size, image, **options): ... + +class AffineTransform(Transform): + method: Any + +class ExtentTransform(Transform): + method: Any + +class QuadTransform(Transform): + method: Any + +class MeshTransform(Transform): + method: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageWin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageWin.pyi new file mode 100644 index 00000000..a51ae49b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImageWin.pyi @@ -0,0 +1,39 @@ +from _typeshed import Incomplete +from typing import Any + +class HDC: + dc: Any + def __init__(self, dc) -> None: ... + def __int__(self) -> int: ... + +class HWND: + wnd: Any + def __init__(self, wnd) -> None: ... + def __int__(self) -> int: ... + +class Dib: + image: Any + mode: Any + size: Any + def __init__(self, image, size: Incomplete | None = ...) -> None: ... + def expose(self, handle): ... + def draw(self, handle, dst, src: Incomplete | None = ...): ... + def query_palette(self, handle): ... + def paste(self, im, box: Incomplete | None = ...) -> None: ... + def frombytes(self, buffer): ... + def tobytes(self): ... + +class Window: + hwnd: Any + def __init__(self, title: str = ..., width: Incomplete | None = ..., height: Incomplete | None = ...) -> None: ... + def ui_handle_clear(self, dc, x0, y0, x1, y1) -> None: ... + def ui_handle_damage(self, x0, y0, x1, y1) -> None: ... + def ui_handle_destroy(self) -> None: ... + def ui_handle_repair(self, dc, x0, y0, x1, y1) -> None: ... + def ui_handle_resize(self, width, height) -> None: ... + def mainloop(self) -> None: ... + +class ImageWindow(Window): + image: Any + def __init__(self, image, title: str = ...) -> None: ... + def ui_handle_repair(self, dc, x0, y0, x1, y1) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImtImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImtImagePlugin.pyi new file mode 100644 index 00000000..eadbef84 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/ImtImagePlugin.pyi @@ -0,0 +1,10 @@ +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +field: Any + +class ImtImageFile(ImageFile): + format: ClassVar[Literal["IMT"]] + format_description: ClassVar[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/IptcImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/IptcImagePlugin.pyi new file mode 100644 index 00000000..738caa99 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/IptcImagePlugin.pyi @@ -0,0 +1,20 @@ +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +COMPRESSION: Any +PAD: Any + +def i(c): ... +def dump(c) -> None: ... + +class IptcImageFile(ImageFile): + format: ClassVar[Literal["IPTC"]] + format_description: ClassVar[str] + def getint(self, key): ... + def field(self): ... + im: Any + def load(self): ... + +def getiptcinfo(im): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/Jpeg2KImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/Jpeg2KImagePlugin.pyi new file mode 100644 index 00000000..a37d618f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/Jpeg2KImagePlugin.pyi @@ -0,0 +1,11 @@ +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +class Jpeg2KImageFile(ImageFile): + format: ClassVar[Literal["JPEG2000"]] + format_description: ClassVar[str] + reduce: Any + tile: Any + def load(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/JpegImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/JpegImagePlugin.pyi new file mode 100644 index 00000000..c558a132 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/JpegImagePlugin.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +def Skip(self, marker) -> None: ... +def APP(self, marker) -> None: ... +def COM(self, marker) -> None: ... +def SOF(self, marker) -> None: ... +def DQT(self, marker) -> None: ... + +MARKER: Any + +class JpegImageFile(ImageFile): + format: ClassVar[Literal["JPEG", "MPO"]] + format_description: ClassVar[str] + def load_read(self, read_bytes): ... + mode: Any + tile: Any + decoderconfig: Any + def draft(self, mode, size): ... + im: Any + def load_djpeg(self) -> None: ... + def getxmp(self): ... + +RAWMODE: Any +zigzag_index: Any +samplings: Any + +def convert_dict_qtables(qtables): ... +def get_sampling(im): ... +def jpeg_factory(fp: Incomplete | None = ..., filename: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/JpegPresets.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/JpegPresets.pyi new file mode 100644 index 00000000..8b213e66 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/JpegPresets.pyi @@ -0,0 +1,3 @@ +from typing import Any + +presets: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/McIdasImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/McIdasImagePlugin.pyi new file mode 100644 index 00000000..6e7be991 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/McIdasImagePlugin.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +class McIdasImageFile(ImageFile): + format: ClassVar[Literal["MCIDAS"]] + format_description: ClassVar[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/MicImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/MicImagePlugin.pyi new file mode 100644 index 00000000..839f91cd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/MicImagePlugin.pyi @@ -0,0 +1,18 @@ +from typing import Any, ClassVar +from typing_extensions import Literal, TypeAlias + +from .TiffImagePlugin import TiffImageFile + +_OleFileIO: TypeAlias = Any # olefile.OleFileIO +_OleStream: TypeAlias = Any # olefile.OleStream + +class MicImageFile(TiffImageFile): + ole: _OleFileIO + format: ClassVar[Literal["MIC"]] + format_description: ClassVar[str] + fp: _OleStream + frame: int | None + images: list[list[str]] + is_animated: bool + def seek(self, frame: int) -> None: ... + def tell(self) -> int | None: ... # type: ignore[override] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/MpegImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/MpegImagePlugin.pyi new file mode 100644 index 00000000..0f01f2b7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/MpegImagePlugin.pyi @@ -0,0 +1,18 @@ +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +class BitStream: + fp: Any + bits: int + bitbuffer: int + def __init__(self, fp) -> None: ... + def next(self): ... + def peek(self, bits): ... + def skip(self, bits) -> None: ... + def read(self, bits): ... + +class MpegImageFile(ImageFile): + format: ClassVar[Literal["MPEG"]] + format_description: ClassVar[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/MpoImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/MpoImagePlugin.pyi new file mode 100644 index 00000000..14761e5d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/MpoImagePlugin.pyi @@ -0,0 +1,16 @@ +from _typeshed import Incomplete +from typing import Any, ClassVar +from typing_extensions import Literal + +from .JpegImagePlugin import JpegImageFile + +class MpoImageFile(JpegImageFile): + format: ClassVar[Literal["MPO"]] + def load_seek(self, pos) -> None: ... + fp: Any + offset: Any + tile: Any + def seek(self, frame) -> None: ... + def tell(self): ... + @staticmethod + def adopt(jpeg_instance, mpheader: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/MspImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/MspImagePlugin.pyi new file mode 100644 index 00000000..bc3f25db --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/MspImagePlugin.pyi @@ -0,0 +1,11 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile, PyDecoder + +class MspImageFile(ImageFile): + format: ClassVar[Literal["MSP"]] + format_description: ClassVar[str] + +class MspDecoder(PyDecoder): + def decode(self, buffer): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PSDraw.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PSDraw.pyi new file mode 100644 index 00000000..d25d1cba --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PSDraw.pyi @@ -0,0 +1,19 @@ +from _typeshed import SupportsWrite, Unused + +from .Image import Image + +class PSDraw: + fp: SupportsWrite[bytes] + def __init__(self, fp: SupportsWrite[bytes] | None = ...) -> None: ... + isofont: dict[bytes, int] + def begin_document(self, id: Unused = None) -> None: ... + def end_document(self) -> None: ... + def setfont(self, font: str, size: int) -> None: ... + def line(self, xy0: tuple[int, int], xy1: tuple[int, int]) -> None: ... + def rectangle(self, box: tuple[int, int, int, int]) -> None: ... + def text(self, xy: tuple[int, int], text: str) -> None: ... + def image(self, box: tuple[int, int, int, int], im: Image, dpi: float | None = ...) -> None: ... + +EDROFF_PS: bytes +VDI_PS: bytes +ERROR_PS: bytes diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PaletteFile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PaletteFile.pyi new file mode 100644 index 00000000..7f478448 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PaletteFile.pyi @@ -0,0 +1,7 @@ +from typing import Any + +class PaletteFile: + rawmode: str + palette: Any + def __init__(self, fp) -> None: ... + def getpalette(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PalmImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PalmImagePlugin.pyi new file mode 100644 index 00000000..1cf530ce --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PalmImagePlugin.pyi @@ -0,0 +1,5 @@ +from typing import Any + +def build_prototype_image(): ... + +Palm8BitColormapImage: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PcdImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PcdImagePlugin.pyi new file mode 100644 index 00000000..a5ea3dd0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PcdImagePlugin.pyi @@ -0,0 +1,10 @@ +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +class PcdImageFile(ImageFile): + format: ClassVar[Literal["PCD"]] + format_description: ClassVar[str] + im: Any + def load_end(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PcfFontFile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PcfFontFile.pyi new file mode 100644 index 00000000..97836cb7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PcfFontFile.pyi @@ -0,0 +1,25 @@ +from typing import Any + +from .FontFile import FontFile + +PCF_MAGIC: int +PCF_PROPERTIES: Any +PCF_ACCELERATORS: Any +PCF_METRICS: Any +PCF_BITMAPS: Any +PCF_INK_METRICS: Any +PCF_BDF_ENCODINGS: Any +PCF_SWIDTHS: Any +PCF_GLYPH_NAMES: Any +PCF_BDF_ACCELERATORS: Any +BYTES_PER_ROW: Any + +def sz(s, o): ... + +class PcfFontFile(FontFile): + name: str + charset_encoding: Any + toc: Any + fp: Any + info: Any + def __init__(self, fp, charset_encoding: str = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PcxImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PcxImagePlugin.pyi new file mode 100644 index 00000000..b8916662 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PcxImagePlugin.pyi @@ -0,0 +1,12 @@ +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +logger: Any + +class PcxImageFile(ImageFile): + format: ClassVar[Literal["PCX", "DCX"]] + format_description: ClassVar[str] + +SAVE: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PdfImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PdfImagePlugin.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PdfParser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PdfParser.pyi new file mode 100644 index 00000000..f734e216 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PdfParser.pyi @@ -0,0 +1,164 @@ +import collections +from _typeshed import Incomplete +from typing import Any + +def encode_text(s: str) -> bytes: ... + +PDFDocEncoding: dict[int, str] + +def decode_text(b: bytes) -> str: ... + +class PdfFormatError(RuntimeError): ... + +def check_format_condition(condition, error_message) -> None: ... + +class IndirectReference: + def __bytes__(self) -> bytes: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __hash__(self) -> int: ... + +class IndirectObjectDef(IndirectReference): ... + +class XrefTable: + existing_entries: Any + new_entries: Any + deleted_entries: Any + reading_finished: bool + def __init__(self) -> None: ... + def __setitem__(self, key, value) -> None: ... + def __getitem__(self, key): ... + def __delitem__(self, key) -> None: ... + def __contains__(self, key): ... + def __len__(self) -> int: ... + def keys(self): ... + def write(self, f): ... + +class PdfName: + name: Any + def __init__(self, name) -> None: ... + def name_as_str(self): ... + def __eq__(self, other): ... + def __hash__(self) -> int: ... + @classmethod + def from_pdf_stream(cls, data): ... + allowed_chars: Any + def __bytes__(self) -> bytes: ... + +class PdfArray(list[Any]): + def __bytes__(self) -> bytes: ... + +class PdfDict(collections.UserDict[bytes, Any]): + def __setattr__(self, key: str, value) -> None: ... + def __getattr__(self, key: str): ... + def __bytes__(self) -> bytes: ... + +class PdfBinary: + data: Any + def __init__(self, data) -> None: ... + def __bytes__(self) -> bytes: ... + +class PdfStream: + dictionary: Any + buf: Any + def __init__(self, dictionary, buf) -> None: ... + def decode(self): ... + +def pdf_repr(x: Any) -> bytes: ... + +class PdfParser: + filename: Any + buf: Any + f: Any + start_offset: Any + should_close_buf: bool + should_close_file: bool + cached_objects: Any + file_size_total: int + root: Any + root_ref: Any + info: Any + info_ref: Any + page_tree_root: Any + pages: Any + orig_pages: Any + pages_ref: Any + last_xref_section_offset: Any + trailer_dict: Any + xref_table: Any + def __init__( + self, + filename: Incomplete | None = ..., + f: Incomplete | None = ..., + buf: Incomplete | None = ..., + start_offset: int = ..., + mode: str = ..., + ) -> None: ... + def __enter__(self): ... + def __exit__(self, exc_type, exc_value, traceback): ... + def start_writing(self) -> None: ... + def close_buf(self) -> None: ... + def close(self) -> None: ... + def seek_end(self) -> None: ... + def write_header(self) -> None: ... + def write_comment(self, s) -> None: ... + def write_catalog(self): ... + def rewrite_pages(self) -> None: ... + def write_xref_and_trailer(self, new_root_ref: Incomplete | None = ...) -> None: ... + def write_page(self, ref, *objs, **dict_obj): ... + def write_obj(self, ref, *objs, **dict_obj): ... + def del_root(self) -> None: ... + @staticmethod + def get_buf_from_file(f): ... + file_size_this: Any + def read_pdf_info(self) -> None: ... + def next_object_id(self, offset: Incomplete | None = ...): ... + delimiter: bytes + delimiter_or_ws: bytes + whitespace: bytes + whitespace_or_hex: bytes + whitespace_optional: Any + whitespace_mandatory: Any + whitespace_optional_no_nl: bytes + newline_only: bytes + newline: Any + re_trailer_end: Any + re_trailer_prev: Any + def read_trailer(self) -> None: ... + def read_prev_trailer(self, xref_section_offset) -> None: ... + re_whitespace_optional: Any + re_name: Any + re_dict_start: Any + re_dict_end: Any + @classmethod + def interpret_trailer(cls, trailer_data): ... + re_hashes_in_name: Any + @classmethod + def interpret_name(cls, raw, as_text: bool = ...): ... + re_null: Any + re_true: Any + re_false: Any + re_int: Any + re_real: Any + re_array_start: Any + re_array_end: Any + re_string_hex: Any + re_string_lit: Any + re_indirect_reference: Any + re_indirect_def_start: Any + re_indirect_def_end: Any + re_comment: Any + re_stream_start: Any + re_stream_end: Any + @classmethod + def get_value(cls, data, offset, expect_indirect: Incomplete | None = ..., max_nesting: int = ...): ... + re_lit_str_token: Any + escaped_chars: Any + @classmethod + def get_literal_string(cls, data, offset): ... + re_xref_section_start: Any + re_xref_subsection_start: Any + re_xref_entry: Any + def read_xref_table(self, xref_section_offset): ... + def read_indirect(self, ref, max_nesting: int = ...): ... + def linearize_page_tree(self, node: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PixarImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PixarImagePlugin.pyi new file mode 100644 index 00000000..8c9bcaf2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PixarImagePlugin.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +class PixarImageFile(ImageFile): + format: ClassVar[Literal["PIXAR"]] + format_description: ClassVar[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PngImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PngImagePlugin.pyi new file mode 100644 index 00000000..54d017bc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PngImagePlugin.pyi @@ -0,0 +1,124 @@ +from _typeshed import Incomplete +from enum import IntEnum +from typing import Any, ClassVar +from typing_extensions import Literal + +from ._binary import o8 as o8 +from .ImageFile import ImageFile + +logger: Any +is_cid: Any +MAX_TEXT_CHUNK: Any +MAX_TEXT_MEMORY: Any + +class Disposal(IntEnum): + OP_NONE: int + OP_BACKGROUND: int + OP_PREVIOUS: int + +APNG_DISPOSE_OP_NONE: Literal[Disposal.OP_NONE] +APNG_DISPOSE_OP_BACKGROUND: Literal[Disposal.OP_BACKGROUND] +APNG_DISPOSE_OP_PREVIOUS: Literal[Disposal.OP_PREVIOUS] + +class Blend(IntEnum): + OP_SOURCE: int + OP_OVER: int + +APNG_BLEND_OP_SOURCE: Literal[Blend.OP_SOURCE] +APNG_BLEND_OP_OVER: Literal[Blend.OP_OVER] + +class ChunkStream: + fp: Any + queue: Any + def __init__(self, fp) -> None: ... + def read(self): ... + def __enter__(self): ... + def __exit__(self, *args) -> None: ... + def close(self) -> None: ... + def push(self, cid, pos, length) -> None: ... + def call(self, cid, pos, length): ... + def crc(self, cid, data) -> None: ... + def crc_skip(self, cid, data) -> None: ... + def verify(self, endchunk: bytes = ...): ... + +class iTXt(str): + lang: Any + tkey: Any + @staticmethod + def __new__(cls, text, lang: Incomplete | None = ..., tkey: Incomplete | None = ...): ... + +class PngInfo: + chunks: Any + def __init__(self) -> None: ... + def add(self, cid, data, after_idat: bool = ...) -> None: ... + def add_itxt(self, key, value, lang: str = ..., tkey: str = ..., zip: bool = ...) -> None: ... + def add_text(self, key, value, zip: bool = ...): ... + +class PngStream(ChunkStream): + im_info: Any + im_text: Any + im_size: Any + im_mode: Any + im_tile: Any + im_palette: Any + im_custom_mimetype: Any + im_n_frames: Any + rewind_state: Any + text_memory: int + def __init__(self, fp) -> None: ... + def check_text_memory(self, chunklen) -> None: ... + def save_rewind(self) -> None: ... + def rewind(self) -> None: ... + def chunk_iCCP(self, pos, length): ... + def chunk_IHDR(self, pos, length): ... + im_idat: Any + def chunk_IDAT(self, pos, length) -> None: ... + def chunk_IEND(self, pos, length) -> None: ... + def chunk_PLTE(self, pos, length): ... + def chunk_tRNS(self, pos, length): ... + def chunk_gAMA(self, pos, length): ... + def chunk_cHRM(self, pos, length): ... + def chunk_sRGB(self, pos, length): ... + def chunk_pHYs(self, pos, length): ... + def chunk_tEXt(self, pos, length): ... + def chunk_zTXt(self, pos, length): ... + def chunk_iTXt(self, pos, length): ... + def chunk_eXIf(self, pos, length): ... + def chunk_acTL(self, pos, length): ... + def chunk_fcTL(self, pos, length): ... + def chunk_fdAT(self, pos, length): ... + +class PngImageFile(ImageFile): + format: ClassVar[Literal["PNG"]] + format_description: ClassVar[str] + @property + def text(self): ... + fp: Any + def verify(self) -> None: ... + def seek(self, frame) -> None: ... + def tell(self): ... + decoderconfig: Any + def load_prepare(self) -> None: ... + def load_read(self, read_bytes): ... + png: Any + im: Any + pyaccess: Any + def load_end(self) -> None: ... + def getexif(self): ... + +def putchunk(fp, cid, *data) -> None: ... + +class _idat: + fp: Any + chunk: Any + def __init__(self, fp, chunk) -> None: ... + def write(self, data) -> None: ... + +class _fdat: + fp: Any + chunk: Any + seq_num: Any + def __init__(self, fp, chunk, seq_num) -> None: ... + def write(self, data) -> None: ... + +def getchunks(im, **params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PpmImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PpmImagePlugin.pyi new file mode 100644 index 00000000..797bbd79 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PpmImagePlugin.pyi @@ -0,0 +1,11 @@ +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +b_whitespace: bytes +MODES: Any + +class PpmImageFile(ImageFile): + format: ClassVar[Literal["PPM"]] + format_description: ClassVar[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PsdImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PsdImagePlugin.pyi new file mode 100644 index 00000000..0d383501 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PsdImagePlugin.pyi @@ -0,0 +1,18 @@ +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +MODES: Any + +class PsdImageFile(ImageFile): + format: ClassVar[Literal["PSD"]] + format_description: ClassVar[str] + mode: Any + tile: Any + frame: Any + fp: Any + def seek(self, layer): ... + def tell(self): ... + im: Any + def load_prepare(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PyAccess.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PyAccess.pyi new file mode 100644 index 00000000..3d923127 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/PyAccess.pyi @@ -0,0 +1,62 @@ +from logging import Logger +from typing import Any + +ffi: Any +logger: Logger + +class PyAccess: + readonly: Any + image8: Any + image32: Any + image: Any + def __init__(self, img, readonly: bool = ...) -> None: ... + def __setitem__(self, xy, color) -> None: ... + def __getitem__(self, xy): ... + putpixel: Any + getpixel: Any + def check_xy(self, xy): ... + +class _PyAccess32_2(PyAccess): + def get_pixel(self, x, y): ... + def set_pixel(self, x, y, color) -> None: ... + +class _PyAccess32_3(PyAccess): + def get_pixel(self, x, y): ... + def set_pixel(self, x, y, color) -> None: ... + +class _PyAccess32_4(PyAccess): + def get_pixel(self, x, y): ... + def set_pixel(self, x, y, color) -> None: ... + +class _PyAccess8(PyAccess): + def get_pixel(self, x, y): ... + def set_pixel(self, x, y, color) -> None: ... + +class _PyAccessI16_N(PyAccess): + def get_pixel(self, x, y): ... + def set_pixel(self, x, y, color) -> None: ... + +class _PyAccessI16_L(PyAccess): + def get_pixel(self, x, y): ... + def set_pixel(self, x, y, color) -> None: ... + +class _PyAccessI16_B(PyAccess): + def get_pixel(self, x, y): ... + def set_pixel(self, x, y, color) -> None: ... + +class _PyAccessI32_N(PyAccess): + def get_pixel(self, x, y): ... + def set_pixel(self, x, y, color) -> None: ... + +class _PyAccessI32_Swap(PyAccess): + def reverse(self, i): ... + def get_pixel(self, x, y): ... + def set_pixel(self, x, y, color) -> None: ... + +class _PyAccessF(PyAccess): + def get_pixel(self, x, y): ... + def set_pixel(self, x, y, color) -> None: ... + +mode_map: Any + +def new(img, readonly: bool = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/SgiImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/SgiImagePlugin.pyi new file mode 100644 index 00000000..e7f4ff96 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/SgiImagePlugin.pyi @@ -0,0 +1,13 @@ +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile, PyDecoder + +MODES: Any + +class SgiImageFile(ImageFile): + format: ClassVar[Literal["SGI"]] + format_description: ClassVar[str] + +class SGI16Decoder(PyDecoder): + def decode(self, buffer): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/SpiderImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/SpiderImagePlugin.pyi new file mode 100644 index 00000000..2c115b34 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/SpiderImagePlugin.pyi @@ -0,0 +1,29 @@ +from _typeshed import Incomplete +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +def isInt(f: object) -> Literal[0, 1]: ... + +iforms: Any + +def isSpiderHeader(t): ... +def isSpiderImage(filename): ... + +class SpiderImageFile(ImageFile): + format: ClassVar[Literal["SPIDER"]] + format_description: ClassVar[str] + @property + def n_frames(self): ... + @property + def is_animated(self): ... + def tell(self): ... + stkoffset: Any + fp: Any + def seek(self, frame) -> None: ... + def convert2byte(self, depth: int = ...): ... + def tkPhotoImage(self): ... + +def loadImageSeries(filelist: Incomplete | None = ...): ... +def makeSpiderHeader(im): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/SunImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/SunImagePlugin.pyi new file mode 100644 index 00000000..27ac28a5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/SunImagePlugin.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +class SunImageFile(ImageFile): + format: ClassVar[Literal["SUN"]] + format_description: ClassVar[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/TarIO.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/TarIO.pyi new file mode 100644 index 00000000..54748270 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/TarIO.pyi @@ -0,0 +1,10 @@ +from typing import Any + +from .ContainerIO import ContainerIO + +class TarIO(ContainerIO): + fh: Any + def __init__(self, tarfile, file) -> None: ... + def __enter__(self): ... + def __exit__(self, *args) -> None: ... + def close(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/TgaImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/TgaImagePlugin.pyi new file mode 100644 index 00000000..5022107b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/TgaImagePlugin.pyi @@ -0,0 +1,12 @@ +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +MODES: Any + +class TgaImageFile(ImageFile): + format: ClassVar[Literal["TGA"]] + format_description: ClassVar[str] + +SAVE: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/TiffImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/TiffImagePlugin.pyi new file mode 100644 index 00000000..9c4851dd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/TiffImagePlugin.pyi @@ -0,0 +1,194 @@ +from _typeshed import Incomplete +from collections.abc import MutableMapping +from numbers import Rational +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +logger: Any +READ_LIBTIFF: bool +WRITE_LIBTIFF: bool +IFD_LEGACY_API: bool +II: bytes +MM: bytes +IMAGEWIDTH: int +IMAGELENGTH: int +BITSPERSAMPLE: int +COMPRESSION: int +PHOTOMETRIC_INTERPRETATION: int +FILLORDER: int +IMAGEDESCRIPTION: int +STRIPOFFSETS: int +SAMPLESPERPIXEL: int +ROWSPERSTRIP: int +STRIPBYTECOUNTS: int +X_RESOLUTION: int +Y_RESOLUTION: int +PLANAR_CONFIGURATION: int +RESOLUTION_UNIT: int +TRANSFERFUNCTION: int +SOFTWARE: int +DATE_TIME: int +ARTIST: int +PREDICTOR: int +COLORMAP: int +TILEOFFSETS: int +SUBIFD: int +EXTRASAMPLES: int +SAMPLEFORMAT: int +JPEGTABLES: int +REFERENCEBLACKWHITE: int +COPYRIGHT: int +IPTC_NAA_CHUNK: int +PHOTOSHOP_CHUNK: int +ICCPROFILE: int +EXIFIFD: int +XMP: int +JPEGQUALITY: int +IMAGEJ_META_DATA_BYTE_COUNTS: int +IMAGEJ_META_DATA: int +COMPRESSION_INFO: Any +COMPRESSION_INFO_REV: Any +OPEN_INFO: Any +PREFIXES: Any + +class IFDRational(Rational): + def __init__(self, value, denominator: int = ...) -> None: ... + @property + def numerator(a): ... + @property + def denominator(a): ... + def limit_rational(self, max_denominator): ... + def __hash__(self) -> int: ... + def __eq__(self, other): ... + __add__: Any + __radd__: Any + __sub__: Any + __rsub__: Any + __mul__: Any + __rmul__: Any + __truediv__: Any + __rtruediv__: Any + __floordiv__: Any + __rfloordiv__: Any + __mod__: Any + __rmod__: Any + __pow__: Any + __rpow__: Any + __pos__: Any + __neg__: Any + __abs__: Any + __trunc__: Any + __lt__: Any + __gt__: Any + __le__: Any + __ge__: Any + __bool__: Any + __ceil__: Any + __floor__: Any + __round__: Any + +class ImageFileDirectory_v2(MutableMapping[int, Any]): + group: int | None + tagtype: dict[int, int] + def __init__(self, ifh: bytes = ..., prefix: bytes | None = ..., group: int | None = ...) -> None: ... + @property + def prefix(self) -> bytes: ... + @property + def offset(self) -> int | None: ... + @property + def legacy_api(self) -> bool: ... + def reset(self) -> None: ... + def named(self): ... + def __len__(self) -> int: ... + def __getitem__(self, tag): ... + def __contains__(self, tag): ... + def __setitem__(self, tag, value) -> None: ... + def __delitem__(self, tag) -> None: ... + def __iter__(self): ... + def load_byte(self, data, legacy_api: bool = ...): ... + def write_byte(self, data): ... + def load_string(self, data, legacy_api: bool = ...): ... + def write_string(self, value: int | str | bytes) -> bytes: ... + def load_rational(self, data, legacy_api: bool = ...): ... + def write_rational(self, *values): ... + def load_undefined(self, data, legacy_api: bool = ...): ... + def write_undefined(self, value): ... + def load_signed_rational(self, data, legacy_api: bool = ...): ... + def write_signed_rational(self, *values): ... + def load(self, fp) -> None: ... + def tobytes(self, offset: int = ...): ... + def save(self, fp): ... + +class ImageFileDirectory_v1(ImageFileDirectory_v2): + def __init__(self, *args, **kwargs) -> None: ... + @property + def tags(self): ... + @property + def tagdata(self): ... + tagtype: dict[int, int] + @classmethod + def from_v2(cls, original): ... + def to_v2(self): ... + def __contains__(self, tag): ... + def __len__(self) -> int: ... + def __iter__(self): ... + def __setitem__(self, tag, value) -> None: ... + def __getitem__(self, tag): ... + +ImageFileDirectory = ImageFileDirectory_v1 + +class TiffImageFile(ImageFile): + format: ClassVar[Literal["TIFF", "MIC"]] + format_description: ClassVar[str] + tag_v2: Any + tag: Any + def __init__(self, fp: Incomplete | None = ..., filename: Incomplete | None = ...) -> None: ... + @property + def n_frames(self): ... + im: Any + def seek(self, frame) -> None: ... + def tell(self): ... + def load(self): ... + def load_end(self) -> None: ... + +SAVE_INFO: Any + +class AppendingTiffWriter: + fieldSizes: Any + Tags: Any + f: Any + close_fp: bool + name: Any + beginning: Any + def __init__(self, fn, new: bool = ...) -> None: ... + whereToWriteNewIFDOffset: Any + offsetOfNewPage: int + IIMM: Any + isFirst: bool + def setup(self) -> None: ... + def finalize(self) -> None: ... + def newFrame(self) -> None: ... + def __enter__(self): ... + def __exit__(self, exc_type, exc_value, traceback): ... + def tell(self): ... + def seek(self, offset, whence=...): ... + def goToEnd(self) -> None: ... + endian: Any + longFmt: Any + shortFmt: Any + tagFormat: Any + def setEndian(self, endian) -> None: ... + def skipIFDs(self) -> None: ... + def write(self, data): ... + def readShort(self): ... + def readLong(self): ... + def rewriteLastShortToLong(self, value) -> None: ... + def rewriteLastShort(self, value) -> None: ... + def rewriteLastLong(self, value) -> None: ... + def writeShort(self, value) -> None: ... + def writeLong(self, value) -> None: ... + def close(self) -> None: ... + def fixIFD(self) -> None: ... + def fixOffsets(self, count, isShort: bool = ..., isLong: bool = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/TiffTags.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/TiffTags.pyi new file mode 100644 index 00000000..f546f604 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/TiffTags.pyi @@ -0,0 +1,46 @@ +from _typeshed import Incomplete +from typing import Any, NamedTuple +from typing_extensions import Literal, TypeAlias + +class _TagInfo(NamedTuple): + value: Any + name: str + type: _TagType + length: int + enum: dict[str, int] + +class TagInfo(_TagInfo): + def __new__( + cls, + value: Incomplete | None = ..., + name: str = ..., + type: _TagType | None = ..., + length: int | None = ..., + enum: dict[str, int] | None = ..., + ): ... + def cvt_enum(self, value): ... + +def lookup(tag: int, group: int | None = ...) -> _TagInfo: ... + +BYTE: Literal[1] +ASCII: Literal[2] +SHORT: Literal[3] +LONG: Literal[4] +RATIONAL: Literal[5] +SIGNED_BYTE: Literal[6] +UNDEFINED: Literal[7] +SIGNED_SHORT: Literal[8] +SIGNED_LONG: Literal[9] +SIGNED_RATIONAL: Literal[10] +FLOAT: Literal[11] +DOUBLE: Literal[12] +IFD: Literal[13] + +_TagType: TypeAlias = Literal[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13] +_TagTuple: TypeAlias = tuple[str, _TagType, int] | tuple[str, _TagInfo, int, dict[str, int]] + +TAGS_V2: dict[int, _TagTuple] +TAGS_V2_GROUPS: dict[int, dict[int, _TagTuple]] +TAGS: dict[int, str] +TYPES: dict[int, str] +LIBTIFF_CORE: set[int] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/WalImageFile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/WalImageFile.pyi new file mode 100644 index 00000000..24d1cb5c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/WalImageFile.pyi @@ -0,0 +1,13 @@ +from typing import ClassVar +from typing_extensions import Literal + +from . import ImageFile + +class WalImageFile(ImageFile.ImageFile): + format: ClassVar[Literal["WAL"]] + format_description: ClassVar[str] + def load(self) -> None: ... + +def open(filename): ... + +quake2palette: bytes diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/WebPImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/WebPImagePlugin.pyi new file mode 100644 index 00000000..1b3d9771 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/WebPImagePlugin.pyi @@ -0,0 +1,17 @@ +from typing import Any, ClassVar +from typing_extensions import Literal, TypeAlias + +from .ImageFile import ImageFile + +SUPPORTED: bool +_XMP_Tags: TypeAlias = dict[str, str | _XMP_Tags] + +class WebPImageFile(ImageFile): + format: ClassVar[Literal["WEBP"]] + format_description: ClassVar[str] + def getxmp(self) -> _XMP_Tags: ... + def seek(self, frame) -> None: ... + fp: Any + tile: Any + def load(self): ... + def tell(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/WmfImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/WmfImagePlugin.pyi new file mode 100644 index 00000000..0834c0b2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/WmfImagePlugin.pyi @@ -0,0 +1,19 @@ +import sys +from _typeshed import Incomplete +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import StubImageFile + +def register_handler(handler) -> None: ... + +if sys.platform == "win32": + class WmfHandler: + bbox: Any + def open(self, im) -> None: ... + def load(self, im): ... + +class WmfStubImageFile(StubImageFile): + format: ClassVar[Literal["WMF"]] + format_description: ClassVar[str] + def load(self, dpi: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/XVThumbImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/XVThumbImagePlugin.pyi new file mode 100644 index 00000000..ebfc7d98 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/XVThumbImagePlugin.pyi @@ -0,0 +1,10 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +PALETTE: bytes + +class XVThumbImageFile(ImageFile): + format: ClassVar[Literal["XVThumb"]] + format_description: ClassVar[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/XbmImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/XbmImagePlugin.pyi new file mode 100644 index 00000000..588aaac2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/XbmImagePlugin.pyi @@ -0,0 +1,10 @@ +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +xbm_head: Any + +class XbmImageFile(ImageFile): + format: ClassVar[Literal["XBM"]] + format_description: ClassVar[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/XpmImagePlugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/XpmImagePlugin.pyi new file mode 100644 index 00000000..d0df39a6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/XpmImagePlugin.pyi @@ -0,0 +1,11 @@ +from typing import Any, ClassVar +from typing_extensions import Literal + +from .ImageFile import ImageFile + +xpm_head: Any + +class XpmImageFile(ImageFile): + format: ClassVar[Literal["XPM"]] + format_description: ClassVar[str] + def load_read(self, bytes): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/__init__.pyi new file mode 100644 index 00000000..73d7019d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/__init__.pyi @@ -0,0 +1,3 @@ +__version__: str + +class UnidentifiedImageError(OSError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/_binary.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/_binary.pyi new file mode 100644 index 00000000..4db3be89 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/_binary.pyi @@ -0,0 +1,12 @@ +def i8(c): ... +def o8(i): ... +def i16le(c, o: int = ...): ... +def si16le(c, o: int = ...): ... +def i32le(c, o: int = ...): ... +def si32le(c, o: int = ...): ... +def i16be(c, o: int = ...): ... +def i32be(c, o: int = ...): ... +def o16le(i): ... +def o32le(i): ... +def o16be(i): ... +def o32be(i): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/_imaging.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/_imaging.pyi new file mode 100644 index 00000000..b9435b8f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/_imaging.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete +from collections.abc import Sequence +from typing_extensions import Literal + +DEFAULT_STRATEGY: Literal[0] +FILTERED: Literal[1] +HUFFMAN_ONLY: Literal[2] +RLE: Literal[3] +FIXED: Literal[4] + +class _Path: + def __getattr__(self, item: str) -> Incomplete: ... + +def path(__x: Sequence[tuple[float, float]] | Sequence[float]) -> _Path: ... +def __getattr__(__name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/_tkinter_finder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/_tkinter_finder.pyi new file mode 100644 index 00000000..c2695c01 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/_tkinter_finder.pyi @@ -0,0 +1,2 @@ +TKINTER_LIB: str +tk_version: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/_util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/_util.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/_version.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/_version.pyi new file mode 100644 index 00000000..bda5b5a7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/_version.pyi @@ -0,0 +1 @@ +__version__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/features.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/features.pyi new file mode 100644 index 00000000..a6deedd4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pillow/PIL/features.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete +from typing import Any + +modules: Any + +def check_module(feature): ... +def version_module(feature): ... +def get_supported_modules(): ... + +codecs: Any + +def check_codec(feature): ... +def version_codec(feature): ... +def get_supported_codecs(): ... + +features: Any + +def check_feature(feature): ... +def version_feature(feature): ... +def get_supported_features(): ... +def check(feature): ... +def version(feature): ... +def get_supported(): ... +def pilinfo(out: Incomplete | None = ..., supported_formats: bool = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyAutoGUI/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyAutoGUI/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..a511c3b3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyAutoGUI/@tests/stubtest_allowlist.txt @@ -0,0 +1 @@ +pyautogui.__main__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyAutoGUI/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyAutoGUI/METADATA.toml new file mode 100644 index 00000000..db0b3838 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyAutoGUI/METADATA.toml @@ -0,0 +1,2 @@ +version = "0.9.*" +requires = ["types-Pillow", "types-PyScreeze"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyAutoGUI/pyautogui/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyAutoGUI/pyautogui/__init__.pyi new file mode 100644 index 00000000..771f0cd2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyAutoGUI/pyautogui/__init__.pyi @@ -0,0 +1,244 @@ +import contextlib +from collections.abc import Callable, Iterable, Sequence +from datetime import datetime +from typing import NamedTuple, SupportsInt, TypeVar +from typing_extensions import Final, ParamSpec, SupportsIndex, TypeAlias + +from pyscreeze import ( + center as center, + grab as grab, + locate as locate, + locateAll as locateAll, + locateAllOnScreen as locateAllOnScreen, + locateCenterOnScreen as locateCenterOnScreen, + locateOnScreen as locateOnScreen, + locateOnWindow as locateOnWindow, + pixel as pixel, + pixelMatchesColor as pixelMatchesColor, + screenshot as screenshot, +) + +_P = ParamSpec("_P") +_R = TypeVar("_R") +_NormalizeableXArg: TypeAlias = str | SupportsInt | Sequence[SupportsInt] + +# Constants +KEY_NAMES: list[str] +KEYBOARD_KEYS: list[str] +LEFT: Final = "left" +MIDDLE: Final = "middle" +RIGHT: Final = "right" +PRIMARY: Final = "primary" +SECONDARY: Final = "secondary" +G_LOG_SCREENSHOTS_FILENAMES: list[str] +# Implementation details +QWERTY: Final[str] +QWERTZ: Final[str] +MINIMUM_SLEEP: Final[float] + +# These are meant to be overridable +LOG_SCREENSHOTS: bool +LOG_SCREENSHOTS_LIMIT: int | None +# https://pyautogui.readthedocs.io/en/latest/index.html#fail-safes +FAILSAFE: bool +PAUSE: float +DARWIN_CATCH_UP_TIME: float +FAILSAFE_POINTS: list[tuple[int, int]] +# https://pyautogui.readthedocs.io/en/latest/mouse.htmln#mouse-movement +MINIMUM_DURATION: float + +class PyAutoGUIException(Exception): ... +class FailSafeException(PyAutoGUIException): ... +class ImageNotFoundException(PyAutoGUIException): ... + +def raisePyAutoGUIImageNotFoundException(wrappedFunction: Callable[_P, _R]) -> Callable[_P, _R]: ... +def mouseInfo() -> None: ... +def useImageNotFoundException(value: bool | None = None) -> None: ... +def isShiftCharacter(character: str) -> bool: ... + +class Point(NamedTuple): + x: float + y: float + +class Size(NamedTuple): + width: int + height: int + +def getPointOnLine(x1: float, y1: float, x2: float, y2: float, n: float) -> tuple[float, float]: ... +def linear(n: float) -> float: ... +def position(x: int | None = None, y: int | None = None) -> Point: ... +def size() -> Size: ... +def onScreen(x: _NormalizeableXArg | None, y: SupportsInt | None = None) -> bool: ... +def mouseDown( + x: _NormalizeableXArg | None = None, + y: SupportsInt | None = None, + # Docstring says `button` can also be `int`, but `.lower()` is called unconditionally in `_normalizeButton()` + button: str = "primary", + duration: float = 0.0, + tween: Callable[[float], float] = ..., + logScreenshot: bool | None = None, + _pause: bool = True, +) -> None: ... +def mouseUp( + x: _NormalizeableXArg | None = None, + y: SupportsInt | None = None, + # Docstring says `button` can also be `int`, but `.lower()` is called unconditionally in `_normalizeButton()` + button: str = "primary", + duration: float = 0.0, + tween: Callable[[float], float] = ..., + logScreenshot: bool | None = None, + _pause: bool = True, +) -> None: ... +def click( + x: _NormalizeableXArg | None = None, + y: SupportsInt | None = None, + clicks: SupportsIndex = 1, + interval: float = 0.0, + # Docstring says `button` can also be `int`, but `.lower()` is called unconditionally in `_normalizeButton()` + button: str = "primary", + duration: float = 0.0, + tween: Callable[[float], float] = ..., + logScreenshot: bool | None = None, + _pause: bool = True, +) -> None: ... +def leftClick( + x: _NormalizeableXArg | None = None, + y: SupportsInt | None = None, + interval: float = 0.0, + duration: float = 0.0, + tween: Callable[[float], float] = ..., + logScreenshot: bool | None = None, + _pause: bool = True, +) -> None: ... +def rightClick( + x: _NormalizeableXArg | None = None, + y: SupportsInt | None = None, + interval: float = 0.0, + duration: float = 0.0, + tween: Callable[[float], float] = ..., + logScreenshot: bool | None = None, + _pause: bool = True, +) -> None: ... +def middleClick( + x: _NormalizeableXArg | None = None, + y: SupportsInt | None = None, + interval: float = 0.0, + duration: float = 0.0, + tween: Callable[[float], float] = ..., + logScreenshot: bool | None = None, + _pause: bool = True, +) -> None: ... +def doubleClick( + x: _NormalizeableXArg | None = None, + y: SupportsInt | None = None, + interval: float = 0.0, + # Docstring says `button` can also be `int`, but `.lower()` is called unconditionally in `_normalizeButton()` + button: str = "left", + duration: float = 0.0, + tween: Callable[[float], float] = ..., + logScreenshot: bool | None = None, + _pause: bool = True, +) -> None: ... +def tripleClick( + x: _NormalizeableXArg | None = None, + y: SupportsInt | None = None, + interval: float = 0.0, + # Docstring says `button` can also be `int`, but `.lower()` is called unconditionally in `_normalizeButton()` + button: str = "left", + duration: float = 0.0, + tween: Callable[[float], float] = ..., + logScreenshot: bool | None = None, + _pause: bool = True, +) -> None: ... +def scroll( + clicks: float, + x: _NormalizeableXArg | None = None, + y: SupportsInt | None = None, + logScreenshot: bool | None = None, + _pause: bool = True, +) -> None: ... +def hscroll( + clicks: float, + x: _NormalizeableXArg | None = None, + y: SupportsInt | None = None, + logScreenshot: bool | None = None, + _pause: bool = True, +) -> None: ... +def vscroll( + clicks: float, + x: _NormalizeableXArg | None = None, + y: SupportsInt | None = None, + logScreenshot: bool | None = None, + _pause: bool = True, +) -> None: ... +def moveTo( + x: _NormalizeableXArg | None = None, + y: SupportsInt | None = None, + duration: float = 0.0, + tween: Callable[[float], float] = ..., + logScreenshot: bool = False, + _pause: bool = True, +) -> None: ... +def moveRel( + xOffset: _NormalizeableXArg | None = None, + yOffset: SupportsInt | None = None, + duration: float = 0.0, + tween: Callable[[float], float] = ..., + logScreenshot: bool = False, + _pause: bool = True, +) -> None: ... + +move = moveRel + +def dragTo( + x: _NormalizeableXArg | None = None, + y: SupportsInt | None = None, + duration: float = 0.0, + tween: Callable[[float], float] = ..., + # Docstring says `button` can also be `int`, but `.lower()` is called unconditionally in `_normalizeButton()` + button: str = "primary", + logScreenshot: bool | None = None, + _pause: bool = True, + mouseDownUp: bool = True, +) -> None: ... +def dragRel( + xOffset: _NormalizeableXArg | None = 0, + yOffset: SupportsInt | None = 0, + duration: float = 0.0, + tween: Callable[[float], float] = ..., + # Docstring says `button` can also be `int`, but `.lower()` is called unconditionally in `_normalizeButton()` + button: str = "primary", + logScreenshot: bool | None = None, + _pause: bool = True, + mouseDownUp: bool = True, +) -> None: ... + +drag = dragRel + +def isValidKey(key: str) -> bool: ... +def keyDown(key: str, logScreenshot: bool | None = None, _pause: bool = True) -> None: ... +def keyUp(key: str, logScreenshot: bool | None = None, _pause: bool = True) -> None: ... +def press( + keys: str | Iterable[str], + presses: SupportsIndex = 1, + interval: float = 0.0, + logScreenshot: bool | None = None, + _pause: bool = True, +) -> None: ... +def hold( + keys: str | Iterable[str], logScreenshot: bool | None = None, _pause: bool = True +) -> contextlib._GeneratorContextManager[None]: ... +def typewrite( + message: str | Sequence[str], interval: float = 0.0, logScreenshot: bool | None = None, _pause: bool = True +) -> None: ... + +write = typewrite + +def hotkey(*args: str, logScreenshot: bool | None = None, interval: float = 0.0) -> None: ... +def failSafeCheck() -> None: ... +def displayMousePosition(xOffset: float = 0, yOffset: float = 0) -> None: ... +def sleep(seconds: float) -> None: ... +def countdown(seconds: SupportsIndex) -> None: ... +def run(commandStr: str, _ssCount: Sequence[int] | None = None) -> None: ... +def printInfo(dontPrint: bool = False) -> str: ... +def getInfo() -> tuple[str, str, str, str, Size, datetime]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..f8fe0659 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/@tests/stubtest_allowlist.txt @@ -0,0 +1,12 @@ +pymysql.connections.byte2int +pymysql.connections.int2byte +pymysql.connections.lenenc_int +pymysql.connections.pack_int24 +pymysql.cursors.Cursor.__del__ +# DictCursorMixin changes method types of inherited classes, but doesn't contain much at runtime +pymysql.cursors.DictCursorMixin.__iter__ +pymysql.cursors.DictCursorMixin.fetch[a-z]* +pymysql.escape_dict +pymysql.escape_sequence +pymysql.escape_string +pymysql.util diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/METADATA.toml new file mode 100644 index 00000000..50057fed --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/METADATA.toml @@ -0,0 +1,4 @@ +version = "1.0.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/__init__.pyi new file mode 100644 index 00000000..163ca68b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/__init__.pyi @@ -0,0 +1,56 @@ +from .connections import Connection as Connection +from .constants import FIELD_TYPE as FIELD_TYPE +from .converters import escape_dict as escape_dict, escape_sequence as escape_sequence, escape_string as escape_string +from .err import ( + DatabaseError as DatabaseError, + DataError as DataError, + Error as Error, + IntegrityError as IntegrityError, + InterfaceError as InterfaceError, + InternalError as InternalError, + MySQLError as MySQLError, + NotSupportedError as NotSupportedError, + OperationalError as OperationalError, + ProgrammingError as ProgrammingError, + Warning as Warning, +) +from .times import ( + Date as Date, + DateFromTicks as DateFromTicks, + Time as Time, + TimeFromTicks as TimeFromTicks, + Timestamp as Timestamp, + TimestampFromTicks as TimestampFromTicks, +) + +threadsafety: int +apilevel: str +paramstyle: str + +class DBAPISet(frozenset[int]): + def __ne__(self, other) -> bool: ... + def __eq__(self, other) -> bool: ... + def __hash__(self) -> int: ... + +STRING: DBAPISet +BINARY: DBAPISet +NUMBER: DBAPISet +DATE: DBAPISet +TIME: DBAPISet +TIMESTAMP: DBAPISet +DATETIME: DBAPISet +ROWID: DBAPISet + +def Binary(x) -> bytes: ... +def get_client_info() -> str: ... + +__version__: str +version_info: tuple[int, int, int, str, int] +NULL: str + +# pymysql/__init__.py says "Connect = connect = Connection = connections.Connection" +Connect = Connection +connect = Connection + +def thread_safe() -> bool: ... +def install_as_MySQLdb() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/charset.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/charset.pyi new file mode 100644 index 00000000..0a36fa69 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/charset.pyi @@ -0,0 +1,15 @@ +from typing import Any + +MBLENGTH: Any + +class Charset: + is_default: Any + def __init__(self, id, name, collation, is_default): ... + +class Charsets: + def add(self, c): ... + def by_id(self, id): ... + def by_name(self, name): ... + +def charset_by_name(name): ... +def charset_by_id(id): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/connections.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/connections.pyi new file mode 100644 index 00000000..e17b9180 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/connections.pyi @@ -0,0 +1,234 @@ +from _typeshed import Incomplete +from collections.abc import Mapping +from socket import socket as _socket +from typing import Any, AnyStr, Generic, TypeVar, overload +from typing_extensions import Self + +from .charset import charset_by_id as charset_by_id, charset_by_name as charset_by_name +from .constants import CLIENT as CLIENT, COMMAND as COMMAND, FIELD_TYPE as FIELD_TYPE, SERVER_STATUS as SERVER_STATUS +from .cursors import Cursor +from .util import byte2int as byte2int, int2byte as int2byte + +SSL_ENABLED: Any +DEFAULT_USER: Any +DEBUG: Any +DEFAULT_CHARSET: Any + +_C = TypeVar("_C", bound=Cursor) +_C2 = TypeVar("_C2", bound=Cursor) + +def dump_packet(data): ... +def pack_int24(n): ... +def lenenc_int(i: int) -> bytes: ... + +class MysqlPacket: + connection: Any + def __init__(self, data, encoding): ... + def get_all_data(self): ... + def read(self, size): ... + def read_all(self): ... + def advance(self, length): ... + def rewind(self, position: int = ...): ... + def get_bytes(self, position, length: int = ...): ... + def read_string(self) -> bytes: ... + def read_uint8(self) -> Any: ... + def read_uint16(self) -> Any: ... + def read_uint24(self) -> Any: ... + def read_uint32(self) -> Any: ... + def read_uint64(self) -> Any: ... + def read_length_encoded_integer(self) -> int: ... + def read_length_coded_string(self) -> bytes: ... + def read_struct(self, fmt: str) -> tuple[Any, ...]: ... + def is_ok_packet(self) -> bool: ... + def is_eof_packet(self) -> bool: ... + def is_auth_switch_request(self) -> bool: ... + def is_extra_auth_data(self) -> bool: ... + def is_resultset_packet(self) -> bool: ... + def is_load_local_packet(self) -> bool: ... + def is_error_packet(self) -> bool: ... + def check_error(self): ... + def raise_for_error(self) -> None: ... + def dump(self): ... + +class FieldDescriptorPacket(MysqlPacket): + def __init__(self, data, encoding): ... + def description(self): ... + def get_column_length(self): ... + +class Connection(Generic[_C]): + ssl: Any + host: Any + port: Any + user: Any + password: Any + db: Any + unix_socket: Any + bind_address: Any + charset: Any + use_unicode: Any + client_flag: Any + cursorclass: Any + connect_timeout: Any + messages: Any + encoders: Any + decoders: Any + host_info: Any + sql_mode: Any + init_command: Any + max_allowed_packet: int + server_public_key: bytes + @overload + def __init__( + self: Connection[Cursor], # different between overloads + *, + host: str | None = ..., + user: Incomplete | None = ..., + password: str = ..., + database: Incomplete | None = ..., + port: int = ..., + unix_socket: Incomplete | None = ..., + charset: str = ..., + sql_mode: Incomplete | None = ..., + read_default_file: Incomplete | None = ..., + conv=..., + use_unicode: bool | None = ..., + client_flag: int = ..., + cursorclass: None = ..., # different between overloads + init_command: Incomplete | None = ..., + connect_timeout: int | None = ..., + ssl: Mapping[Any, Any] | None = ..., + ssl_ca=..., + ssl_cert=..., + ssl_disabled=..., + ssl_key=..., + ssl_verify_cert=..., + ssl_verify_identity=..., + read_default_group: Incomplete | None = ..., + compress: Incomplete | None = ..., + named_pipe: Incomplete | None = ..., + autocommit: bool | None = ..., + db: Incomplete | None = ..., + passwd: Incomplete | None = ..., + local_infile: Incomplete | None = ..., + max_allowed_packet: int = ..., + defer_connect: bool | None = ..., + auth_plugin_map: Mapping[Any, Any] | None = ..., + read_timeout: float | None = ..., + write_timeout: float | None = ..., + bind_address: Incomplete | None = ..., + binary_prefix: bool | None = ..., + program_name: Incomplete | None = ..., + server_public_key: bytes | None = ..., + ): ... + @overload + def __init__( + self: Connection[_C], # different between overloads + *, + host: str | None = ..., + user: Incomplete | None = ..., + password: str = ..., + database: Incomplete | None = ..., + port: int = ..., + unix_socket: Incomplete | None = ..., + charset: str = ..., + sql_mode: Incomplete | None = ..., + read_default_file: Incomplete | None = ..., + conv=..., + use_unicode: bool | None = ..., + client_flag: int = ..., + cursorclass: type[_C] = ..., # different between overloads + init_command: Incomplete | None = ..., + connect_timeout: int | None = ..., + ssl: Mapping[Any, Any] | None = ..., + ssl_ca=..., + ssl_cert=..., + ssl_disabled=..., + ssl_key=..., + ssl_verify_cert=..., + ssl_verify_identity=..., + read_default_group: Incomplete | None = ..., + compress: Incomplete | None = ..., + named_pipe: Incomplete | None = ..., + autocommit: bool | None = ..., + db: Incomplete | None = ..., + passwd: Incomplete | None = ..., + local_infile: Incomplete | None = ..., + max_allowed_packet: int = ..., + defer_connect: bool | None = ..., + auth_plugin_map: Mapping[Any, Any] | None = ..., + read_timeout: float | None = ..., + write_timeout: float | None = ..., + bind_address: Incomplete | None = ..., + binary_prefix: bool | None = ..., + program_name: Incomplete | None = ..., + server_public_key: bytes | None = ..., + ): ... + socket: Any + rfile: Any + wfile: Any + def close(self) -> None: ... + @property + def open(self) -> bool: ... + def autocommit(self, value) -> None: ... + def get_autocommit(self) -> bool: ... + def commit(self) -> None: ... + def begin(self) -> None: ... + def rollback(self) -> None: ... + def select_db(self, db) -> None: ... + def escape(self, obj, mapping: Mapping[Any, Any] | None = ...): ... + def literal(self, obj): ... + def escape_string(self, s: AnyStr) -> AnyStr: ... + @overload + def cursor(self, cursor: None = ...) -> _C: ... + @overload + def cursor(self, cursor: type[_C2]) -> _C2: ... + def query(self, sql, unbuffered: bool = ...) -> int: ... + def next_result(self, unbuffered: bool = ...) -> int: ... + def affected_rows(self): ... + def kill(self, thread_id): ... + def ping(self, reconnect: bool = ...) -> None: ... + def set_charset(self, charset) -> None: ... + def connect(self, sock: _socket | None = ...) -> None: ... + def write_packet(self, payload) -> None: ... + def _read_packet(self, packet_type=...): ... + def insert_id(self): ... + def thread_id(self): ... + def character_set_name(self): ... + def get_host_info(self): ... + def get_proto_info(self): ... + def get_server_info(self): ... + def show_warnings(self): ... + def __enter__(self) -> Self: ... + def __exit__(self, *exc_info: object) -> None: ... + Warning: Any + Error: Any + InterfaceError: Any + DatabaseError: Any + DataError: Any + OperationalError: Any + IntegrityError: Any + InternalError: Any + ProgrammingError: Any + NotSupportedError: Any + +class MySQLResult: + connection: Any + affected_rows: Any + insert_id: Any + server_status: Any + warning_count: Any + message: Any + field_count: Any + description: Any + rows: Any + has_next: Any + def __init__(self, connection: Connection[Any]) -> None: ... + first_packet: Any + def read(self) -> None: ... + def init_unbuffered_query(self) -> None: ... + +class LoadLocalFile: + filename: Any + connection: Connection[Any] + def __init__(self, filename: Any, connection: Connection[Any]) -> None: ... + def send_data(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/constants/CLIENT.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/constants/CLIENT.pyi new file mode 100644 index 00000000..21019e02 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/constants/CLIENT.pyi @@ -0,0 +1,25 @@ +LONG_PASSWORD: int +FOUND_ROWS: int +LONG_FLAG: int +CONNECT_WITH_DB: int +NO_SCHEMA: int +COMPRESS: int +ODBC: int +LOCAL_FILES: int +IGNORE_SPACE: int +PROTOCOL_41: int +INTERACTIVE: int +SSL: int +IGNORE_SIGPIPE: int +TRANSACTIONS: int +SECURE_CONNECTION: int +MULTI_STATEMENTS: int +MULTI_RESULTS: int +PS_MULTI_RESULTS: int +PLUGIN_AUTH: int +CONNECT_ATTRS: int +PLUGIN_AUTH_LENENC_CLIENT_DATA: int +CAPABILITIES: int +HANDLE_EXPIRED_PASSWORDS: int +SESSION_TRACK: int +DEPRECATE_EOF: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/constants/COMMAND.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/constants/COMMAND.pyi new file mode 100644 index 00000000..fe74c540 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/constants/COMMAND.pyi @@ -0,0 +1,32 @@ +COM_SLEEP: int +COM_QUIT: int +COM_INIT_DB: int +COM_QUERY: int +COM_FIELD_LIST: int +COM_CREATE_DB: int +COM_DROP_DB: int +COM_REFRESH: int +COM_SHUTDOWN: int +COM_STATISTICS: int +COM_PROCESS_INFO: int +COM_CONNECT: int +COM_PROCESS_KILL: int +COM_DEBUG: int +COM_PING: int +COM_TIME: int +COM_DELAYED_INSERT: int +COM_CHANGE_USER: int +COM_BINLOG_DUMP: int +COM_TABLE_DUMP: int +COM_CONNECT_OUT: int +COM_REGISTER_SLAVE: int +COM_STMT_PREPARE: int +COM_STMT_EXECUTE: int +COM_STMT_SEND_LONG_DATA: int +COM_STMT_CLOSE: int +COM_STMT_RESET: int +COM_SET_OPTION: int +COM_STMT_FETCH: int +COM_DAEMON: int +COM_BINLOG_DUMP_GTID: int +COM_END: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/constants/CR.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/constants/CR.pyi new file mode 100644 index 00000000..01037396 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/constants/CR.pyi @@ -0,0 +1,64 @@ +CR_ERROR_FIRST: int +CR_UNKNOWN_ERROR: int +CR_SOCKET_CREATE_ERROR: int +CR_CONNECTION_ERROR: int +CR_CONN_HOST_ERROR: int +CR_IPSOCK_ERROR: int +CR_UNKNOWN_HOST: int +CR_SERVER_GONE_ERROR: int +CR_VERSION_ERROR: int +CR_OUT_OF_MEMORY: int +CR_WRONG_HOST_INFO: int +CR_LOCALHOST_CONNECTION: int +CR_TCP_CONNECTION: int +CR_SERVER_HANDSHAKE_ERR: int +CR_SERVER_LOST: int +CR_COMMANDS_OUT_OF_SYNC: int +CR_NAMEDPIPE_CONNECTION: int +CR_NAMEDPIPEWAIT_ERROR: int +CR_NAMEDPIPEOPEN_ERROR: int +CR_NAMEDPIPESETSTATE_ERROR: int +CR_CANT_READ_CHARSET: int +CR_NET_PACKET_TOO_LARGE: int +CR_EMBEDDED_CONNECTION: int +CR_PROBE_SLAVE_STATUS: int +CR_PROBE_SLAVE_HOSTS: int +CR_PROBE_SLAVE_CONNECT: int +CR_PROBE_MASTER_CONNECT: int +CR_SSL_CONNECTION_ERROR: int +CR_MALFORMED_PACKET: int +CR_WRONG_LICENSE: int +CR_NULL_POINTER: int +CR_NO_PREPARE_STMT: int +CR_PARAMS_NOT_BOUND: int +CR_DATA_TRUNCATED: int +CR_NO_PARAMETERS_EXISTS: int +CR_INVALID_PARAMETER_NO: int +CR_INVALID_BUFFER_USE: int +CR_UNSUPPORTED_PARAM_TYPE: int +CR_SHARED_MEMORY_CONNECTION: int +CR_SHARED_MEMORY_CONNECT_REQUEST_ERROR: int +CR_SHARED_MEMORY_CONNECT_ANSWER_ERROR: int +CR_SHARED_MEMORY_CONNECT_FILE_MAP_ERROR: int +CR_SHARED_MEMORY_CONNECT_MAP_ERROR: int +CR_SHARED_MEMORY_FILE_MAP_ERROR: int +CR_SHARED_MEMORY_MAP_ERROR: int +CR_SHARED_MEMORY_EVENT_ERROR: int +CR_SHARED_MEMORY_CONNECT_ABANDONED_ERROR: int +CR_SHARED_MEMORY_CONNECT_SET_ERROR: int +CR_CONN_UNKNOW_PROTOCOL: int +CR_INVALID_CONN_HANDLE: int +CR_SECURE_AUTH: int +CR_FETCH_CANCELED: int +CR_NO_DATA: int +CR_NO_STMT_METADATA: int +CR_NO_RESULT_SET: int +CR_NOT_IMPLEMENTED: int +CR_SERVER_LOST_EXTENDED: int +CR_STMT_CLOSED: int +CR_NEW_STMT_METADATA: int +CR_ALREADY_CONNECTED: int +CR_AUTH_PLUGIN_CANNOT_LOAD: int +CR_DUPLICATE_CONNECTION_ATTR: int +CR_AUTH_PLUGIN_ERR: int +CR_ERROR_LAST: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/constants/ER.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/constants/ER.pyi new file mode 100644 index 00000000..0f4c17f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/constants/ER.pyi @@ -0,0 +1,472 @@ +ERROR_FIRST: int +HASHCHK: int +NISAMCHK: int +NO: int +YES: int +CANT_CREATE_FILE: int +CANT_CREATE_TABLE: int +CANT_CREATE_DB: int +DB_CREATE_EXISTS: int +DB_DROP_EXISTS: int +DB_DROP_DELETE: int +DB_DROP_RMDIR: int +CANT_DELETE_FILE: int +CANT_FIND_SYSTEM_REC: int +CANT_GET_STAT: int +CANT_GET_WD: int +CANT_LOCK: int +CANT_OPEN_FILE: int +FILE_NOT_FOUND: int +CANT_READ_DIR: int +CANT_SET_WD: int +CHECKREAD: int +DISK_FULL: int +DUP_KEY: int +ERROR_ON_CLOSE: int +ERROR_ON_READ: int +ERROR_ON_RENAME: int +ERROR_ON_WRITE: int +FILE_USED: int +FILSORT_ABORT: int +FORM_NOT_FOUND: int +GET_ERRNO: int +ILLEGAL_HA: int +KEY_NOT_FOUND: int +NOT_FORM_FILE: int +NOT_KEYFILE: int +OLD_KEYFILE: int +OPEN_AS_READONLY: int +OUTOFMEMORY: int +OUT_OF_SORTMEMORY: int +UNEXPECTED_EOF: int +CON_COUNT_ERROR: int +OUT_OF_RESOURCES: int +BAD_HOST_ERROR: int +HANDSHAKE_ERROR: int +DBACCESS_DENIED_ERROR: int +ACCESS_DENIED_ERROR: int +NO_DB_ERROR: int +UNKNOWN_COM_ERROR: int +BAD_NULL_ERROR: int +BAD_DB_ERROR: int +TABLE_EXISTS_ERROR: int +BAD_TABLE_ERROR: int +NON_UNIQ_ERROR: int +SERVER_SHUTDOWN: int +BAD_FIELD_ERROR: int +WRONG_FIELD_WITH_GROUP: int +WRONG_GROUP_FIELD: int +WRONG_SUM_SELECT: int +WRONG_VALUE_COUNT: int +TOO_LONG_IDENT: int +DUP_FIELDNAME: int +DUP_KEYNAME: int +DUP_ENTRY: int +WRONG_FIELD_SPEC: int +PARSE_ERROR: int +EMPTY_QUERY: int +NONUNIQ_TABLE: int +INVALID_DEFAULT: int +MULTIPLE_PRI_KEY: int +TOO_MANY_KEYS: int +TOO_MANY_KEY_PARTS: int +TOO_LONG_KEY: int +KEY_COLUMN_DOES_NOT_EXITS: int +BLOB_USED_AS_KEY: int +TOO_BIG_FIELDLENGTH: int +WRONG_AUTO_KEY: int +READY: int +NORMAL_SHUTDOWN: int +GOT_SIGNAL: int +SHUTDOWN_COMPLETE: int +FORCING_CLOSE: int +IPSOCK_ERROR: int +NO_SUCH_INDEX: int +WRONG_FIELD_TERMINATORS: int +BLOBS_AND_NO_TERMINATED: int +TEXTFILE_NOT_READABLE: int +FILE_EXISTS_ERROR: int +LOAD_INFO: int +ALTER_INFO: int +WRONG_SUB_KEY: int +CANT_REMOVE_ALL_FIELDS: int +CANT_DROP_FIELD_OR_KEY: int +INSERT_INFO: int +UPDATE_TABLE_USED: int +NO_SUCH_THREAD: int +KILL_DENIED_ERROR: int +NO_TABLES_USED: int +TOO_BIG_SET: int +NO_UNIQUE_LOGFILE: int +TABLE_NOT_LOCKED_FOR_WRITE: int +TABLE_NOT_LOCKED: int +BLOB_CANT_HAVE_DEFAULT: int +WRONG_DB_NAME: int +WRONG_TABLE_NAME: int +TOO_BIG_SELECT: int +UNKNOWN_ERROR: int +UNKNOWN_PROCEDURE: int +WRONG_PARAMCOUNT_TO_PROCEDURE: int +WRONG_PARAMETERS_TO_PROCEDURE: int +UNKNOWN_TABLE: int +FIELD_SPECIFIED_TWICE: int +INVALID_GROUP_FUNC_USE: int +UNSUPPORTED_EXTENSION: int +TABLE_MUST_HAVE_COLUMNS: int +RECORD_FILE_FULL: int +UNKNOWN_CHARACTER_SET: int +TOO_MANY_TABLES: int +TOO_MANY_FIELDS: int +TOO_BIG_ROWSIZE: int +STACK_OVERRUN: int +WRONG_OUTER_JOIN: int +NULL_COLUMN_IN_INDEX: int +CANT_FIND_UDF: int +CANT_INITIALIZE_UDF: int +UDF_NO_PATHS: int +UDF_EXISTS: int +CANT_OPEN_LIBRARY: int +CANT_FIND_DL_ENTRY: int +FUNCTION_NOT_DEFINED: int +HOST_IS_BLOCKED: int +HOST_NOT_PRIVILEGED: int +PASSWORD_ANONYMOUS_USER: int +PASSWORD_NOT_ALLOWED: int +PASSWORD_NO_MATCH: int +UPDATE_INFO: int +CANT_CREATE_THREAD: int +WRONG_VALUE_COUNT_ON_ROW: int +CANT_REOPEN_TABLE: int +INVALID_USE_OF_NULL: int +REGEXP_ERROR: int +MIX_OF_GROUP_FUNC_AND_FIELDS: int +NONEXISTING_GRANT: int +TABLEACCESS_DENIED_ERROR: int +COLUMNACCESS_DENIED_ERROR: int +ILLEGAL_GRANT_FOR_TABLE: int +GRANT_WRONG_HOST_OR_USER: int +NO_SUCH_TABLE: int +NONEXISTING_TABLE_GRANT: int +NOT_ALLOWED_COMMAND: int +SYNTAX_ERROR: int +DELAYED_CANT_CHANGE_LOCK: int +TOO_MANY_DELAYED_THREADS: int +ABORTING_CONNECTION: int +NET_PACKET_TOO_LARGE: int +NET_READ_ERROR_FROM_PIPE: int +NET_FCNTL_ERROR: int +NET_PACKETS_OUT_OF_ORDER: int +NET_UNCOMPRESS_ERROR: int +NET_READ_ERROR: int +NET_READ_INTERRUPTED: int +NET_ERROR_ON_WRITE: int +NET_WRITE_INTERRUPTED: int +TOO_LONG_STRING: int +TABLE_CANT_HANDLE_BLOB: int +TABLE_CANT_HANDLE_AUTO_INCREMENT: int +DELAYED_INSERT_TABLE_LOCKED: int +WRONG_COLUMN_NAME: int +WRONG_KEY_COLUMN: int +WRONG_MRG_TABLE: int +DUP_UNIQUE: int +BLOB_KEY_WITHOUT_LENGTH: int +PRIMARY_CANT_HAVE_NULL: int +TOO_MANY_ROWS: int +REQUIRES_PRIMARY_KEY: int +NO_RAID_COMPILED: int +UPDATE_WITHOUT_KEY_IN_SAFE_MODE: int +KEY_DOES_NOT_EXITS: int +CHECK_NO_SUCH_TABLE: int +CHECK_NOT_IMPLEMENTED: int +CANT_DO_THIS_DURING_AN_TRANSACTION: int +ERROR_DURING_COMMIT: int +ERROR_DURING_ROLLBACK: int +ERROR_DURING_FLUSH_LOGS: int +ERROR_DURING_CHECKPOINT: int +NEW_ABORTING_CONNECTION: int +DUMP_NOT_IMPLEMENTED: int +FLUSH_MASTER_BINLOG_CLOSED: int +INDEX_REBUILD: int +MASTER: int +MASTER_NET_READ: int +MASTER_NET_WRITE: int +FT_MATCHING_KEY_NOT_FOUND: int +LOCK_OR_ACTIVE_TRANSACTION: int +UNKNOWN_SYSTEM_VARIABLE: int +CRASHED_ON_USAGE: int +CRASHED_ON_REPAIR: int +WARNING_NOT_COMPLETE_ROLLBACK: int +TRANS_CACHE_FULL: int +SLAVE_MUST_STOP: int +SLAVE_NOT_RUNNING: int +BAD_SLAVE: int +MASTER_INFO: int +SLAVE_THREAD: int +TOO_MANY_USER_CONNECTIONS: int +SET_CONSTANTS_ONLY: int +LOCK_WAIT_TIMEOUT: int +LOCK_TABLE_FULL: int +READ_ONLY_TRANSACTION: int +DROP_DB_WITH_READ_LOCK: int +CREATE_DB_WITH_READ_LOCK: int +WRONG_ARGUMENTS: int +NO_PERMISSION_TO_CREATE_USER: int +UNION_TABLES_IN_DIFFERENT_DIR: int +LOCK_DEADLOCK: int +TABLE_CANT_HANDLE_FT: int +CANNOT_ADD_FOREIGN: int +NO_REFERENCED_ROW: int +ROW_IS_REFERENCED: int +CONNECT_TO_MASTER: int +QUERY_ON_MASTER: int +ERROR_WHEN_EXECUTING_COMMAND: int +WRONG_USAGE: int +WRONG_NUMBER_OF_COLUMNS_IN_SELECT: int +CANT_UPDATE_WITH_READLOCK: int +MIXING_NOT_ALLOWED: int +DUP_ARGUMENT: int +USER_LIMIT_REACHED: int +SPECIFIC_ACCESS_DENIED_ERROR: int +LOCAL_VARIABLE: int +GLOBAL_VARIABLE: int +NO_DEFAULT: int +WRONG_VALUE_FOR_VAR: int +WRONG_TYPE_FOR_VAR: int +VAR_CANT_BE_READ: int +CANT_USE_OPTION_HERE: int +NOT_SUPPORTED_YET: int +MASTER_FATAL_ERROR_READING_BINLOG: int +SLAVE_IGNORED_TABLE: int +INCORRECT_GLOBAL_LOCAL_VAR: int +WRONG_FK_DEF: int +KEY_REF_DO_NOT_MATCH_TABLE_REF: int +OPERAND_COLUMNS: int +SUBQUERY_NO_1_ROW: int +UNKNOWN_STMT_HANDLER: int +CORRUPT_HELP_DB: int +CYCLIC_REFERENCE: int +AUTO_CONVERT: int +ILLEGAL_REFERENCE: int +DERIVED_MUST_HAVE_ALIAS: int +SELECT_REDUCED: int +TABLENAME_NOT_ALLOWED_HERE: int +NOT_SUPPORTED_AUTH_MODE: int +SPATIAL_CANT_HAVE_NULL: int +COLLATION_CHARSET_MISMATCH: int +SLAVE_WAS_RUNNING: int +SLAVE_WAS_NOT_RUNNING: int +TOO_BIG_FOR_UNCOMPRESS: int +ZLIB_Z_MEM_ERROR: int +ZLIB_Z_BUF_ERROR: int +ZLIB_Z_DATA_ERROR: int +CUT_VALUE_GROUP_CONCAT: int +WARN_TOO_FEW_RECORDS: int +WARN_TOO_MANY_RECORDS: int +WARN_NULL_TO_NOTNULL: int +WARN_DATA_OUT_OF_RANGE: int +WARN_DATA_TRUNCATED: int +WARN_USING_OTHER_HANDLER: int +CANT_AGGREGATE_2COLLATIONS: int +DROP_USER: int +REVOKE_GRANTS: int +CANT_AGGREGATE_3COLLATIONS: int +CANT_AGGREGATE_NCOLLATIONS: int +VARIABLE_IS_NOT_STRUCT: int +UNKNOWN_COLLATION: int +SLAVE_IGNORED_SSL_PARAMS: int +SERVER_IS_IN_SECURE_AUTH_MODE: int +WARN_FIELD_RESOLVED: int +BAD_SLAVE_UNTIL_COND: int +MISSING_SKIP_SLAVE: int +UNTIL_COND_IGNORED: int +WRONG_NAME_FOR_INDEX: int +WRONG_NAME_FOR_CATALOG: int +WARN_QC_RESIZE: int +BAD_FT_COLUMN: int +UNKNOWN_KEY_CACHE: int +WARN_HOSTNAME_WONT_WORK: int +UNKNOWN_STORAGE_ENGINE: int +WARN_DEPRECATED_SYNTAX: int +NON_UPDATABLE_TABLE: int +FEATURE_DISABLED: int +OPTION_PREVENTS_STATEMENT: int +DUPLICATED_VALUE_IN_TYPE: int +TRUNCATED_WRONG_VALUE: int +TOO_MUCH_AUTO_TIMESTAMP_COLS: int +INVALID_ON_UPDATE: int +UNSUPPORTED_PS: int +GET_ERRMSG: int +GET_TEMPORARY_ERRMSG: int +UNKNOWN_TIME_ZONE: int +WARN_INVALID_TIMESTAMP: int +INVALID_CHARACTER_STRING: int +WARN_ALLOWED_PACKET_OVERFLOWED: int +CONFLICTING_DECLARATIONS: int +SP_NO_RECURSIVE_CREATE: int +SP_ALREADY_EXISTS: int +SP_DOES_NOT_EXIST: int +SP_DROP_FAILED: int +SP_STORE_FAILED: int +SP_LILABEL_MISMATCH: int +SP_LABEL_REDEFINE: int +SP_LABEL_MISMATCH: int +SP_UNINIT_VAR: int +SP_BADSELECT: int +SP_BADRETURN: int +SP_BADSTATEMENT: int +UPDATE_LOG_DEPRECATED_IGNORED: int +UPDATE_LOG_DEPRECATED_TRANSLATED: int +QUERY_INTERRUPTED: int +SP_WRONG_NO_OF_ARGS: int +SP_COND_MISMATCH: int +SP_NORETURN: int +SP_NORETURNEND: int +SP_BAD_CURSOR_QUERY: int +SP_BAD_CURSOR_SELECT: int +SP_CURSOR_MISMATCH: int +SP_CURSOR_ALREADY_OPEN: int +SP_CURSOR_NOT_OPEN: int +SP_UNDECLARED_VAR: int +SP_WRONG_NO_OF_FETCH_ARGS: int +SP_FETCH_NO_DATA: int +SP_DUP_PARAM: int +SP_DUP_VAR: int +SP_DUP_COND: int +SP_DUP_CURS: int +SP_CANT_ALTER: int +SP_SUBSELECT_NYI: int +STMT_NOT_ALLOWED_IN_SF_OR_TRG: int +SP_VARCOND_AFTER_CURSHNDLR: int +SP_CURSOR_AFTER_HANDLER: int +SP_CASE_NOT_FOUND: int +FPARSER_TOO_BIG_FILE: int +FPARSER_BAD_HEADER: int +FPARSER_EOF_IN_COMMENT: int +FPARSER_ERROR_IN_PARAMETER: int +FPARSER_EOF_IN_UNKNOWN_PARAMETER: int +VIEW_NO_EXPLAIN: int +FRM_UNKNOWN_TYPE: int +WRONG_OBJECT: int +NONUPDATEABLE_COLUMN: int +VIEW_SELECT_DERIVED: int +VIEW_SELECT_CLAUSE: int +VIEW_SELECT_VARIABLE: int +VIEW_SELECT_TMPTABLE: int +VIEW_WRONG_LIST: int +WARN_VIEW_MERGE: int +WARN_VIEW_WITHOUT_KEY: int +VIEW_INVALID: int +SP_NO_DROP_SP: int +SP_GOTO_IN_HNDLR: int +TRG_ALREADY_EXISTS: int +TRG_DOES_NOT_EXIST: int +TRG_ON_VIEW_OR_TEMP_TABLE: int +TRG_CANT_CHANGE_ROW: int +TRG_NO_SUCH_ROW_IN_TRG: int +NO_DEFAULT_FOR_FIELD: int +DIVISION_BY_ZERO: int +TRUNCATED_WRONG_VALUE_FOR_FIELD: int +ILLEGAL_VALUE_FOR_TYPE: int +VIEW_NONUPD_CHECK: int +VIEW_CHECK_FAILED: int +PROCACCESS_DENIED_ERROR: int +RELAY_LOG_FAIL: int +PASSWD_LENGTH: int +UNKNOWN_TARGET_BINLOG: int +IO_ERR_LOG_INDEX_READ: int +BINLOG_PURGE_PROHIBITED: int +FSEEK_FAIL: int +BINLOG_PURGE_FATAL_ERR: int +LOG_IN_USE: int +LOG_PURGE_UNKNOWN_ERR: int +RELAY_LOG_INIT: int +NO_BINARY_LOGGING: int +RESERVED_SYNTAX: int +WSAS_FAILED: int +DIFF_GROUPS_PROC: int +NO_GROUP_FOR_PROC: int +ORDER_WITH_PROC: int +LOGGING_PROHIBIT_CHANGING_OF: int +NO_FILE_MAPPING: int +WRONG_MAGIC: int +PS_MANY_PARAM: int +KEY_PART_0: int +VIEW_CHECKSUM: int +VIEW_MULTIUPDATE: int +VIEW_NO_INSERT_FIELD_LIST: int +VIEW_DELETE_MERGE_VIEW: int +CANNOT_USER: int +XAER_NOTA: int +XAER_INVAL: int +XAER_RMFAIL: int +XAER_OUTSIDE: int +XAER_RMERR: int +XA_RBROLLBACK: int +NONEXISTING_PROC_GRANT: int +PROC_AUTO_GRANT_FAIL: int +PROC_AUTO_REVOKE_FAIL: int +DATA_TOO_LONG: int +SP_BAD_SQLSTATE: int +STARTUP: int +LOAD_FROM_FIXED_SIZE_ROWS_TO_VAR: int +CANT_CREATE_USER_WITH_GRANT: int +WRONG_VALUE_FOR_TYPE: int +TABLE_DEF_CHANGED: int +SP_DUP_HANDLER: int +SP_NOT_VAR_ARG: int +SP_NO_RETSET: int +CANT_CREATE_GEOMETRY_OBJECT: int +FAILED_ROUTINE_BREAK_BINLOG: int +BINLOG_UNSAFE_ROUTINE: int +BINLOG_CREATE_ROUTINE_NEED_SUPER: int +EXEC_STMT_WITH_OPEN_CURSOR: int +STMT_HAS_NO_OPEN_CURSOR: int +COMMIT_NOT_ALLOWED_IN_SF_OR_TRG: int +NO_DEFAULT_FOR_VIEW_FIELD: int +SP_NO_RECURSION: int +TOO_BIG_SCALE: int +TOO_BIG_PRECISION: int +M_BIGGER_THAN_D: int +WRONG_LOCK_OF_SYSTEM_TABLE: int +CONNECT_TO_FOREIGN_DATA_SOURCE: int +QUERY_ON_FOREIGN_DATA_SOURCE: int +FOREIGN_DATA_SOURCE_DOESNT_EXIST: int +FOREIGN_DATA_STRING_INVALID_CANT_CREATE: int +FOREIGN_DATA_STRING_INVALID: int +CANT_CREATE_FEDERATED_TABLE: int +TRG_IN_WRONG_SCHEMA: int +STACK_OVERRUN_NEED_MORE: int +TOO_LONG_BODY: int +WARN_CANT_DROP_DEFAULT_KEYCACHE: int +TOO_BIG_DISPLAYWIDTH: int +XAER_DUPID: int +DATETIME_FUNCTION_OVERFLOW: int +CANT_UPDATE_USED_TABLE_IN_SF_OR_TRG: int +VIEW_PREVENT_UPDATE: int +PS_NO_RECURSION: int +SP_CANT_SET_AUTOCOMMIT: int +MALFORMED_DEFINER: int +VIEW_FRM_NO_USER: int +VIEW_OTHER_USER: int +NO_SUCH_USER: int +FORBID_SCHEMA_CHANGE: int +ROW_IS_REFERENCED_2: int +NO_REFERENCED_ROW_2: int +SP_BAD_VAR_SHADOW: int +TRG_NO_DEFINER: int +OLD_FILE_FORMAT: int +SP_RECURSION_LIMIT: int +SP_PROC_TABLE_CORRUPT: int +SP_WRONG_NAME: int +TABLE_NEEDS_UPGRADE: int +SP_NO_AGGREGATE: int +MAX_PREPARED_STMT_COUNT_REACHED: int +VIEW_RECURSIVE: int +NON_GROUPING_FIELD_USED: int +TABLE_CANT_HANDLE_SPKEYS: int +NO_TRIGGERS_ON_SYSTEM_SCHEMA: int +USERNAME: int +HOSTNAME: int +WRONG_STRING_LENGTH: int +ERROR_LAST: int +CONSTRAINT_FAILED: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/constants/FIELD_TYPE.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/constants/FIELD_TYPE.pyi new file mode 100644 index 00000000..4790d922 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/constants/FIELD_TYPE.pyi @@ -0,0 +1,30 @@ +DECIMAL: int +TINY: int +SHORT: int +LONG: int +FLOAT: int +DOUBLE: int +NULL: int +TIMESTAMP: int +LONGLONG: int +INT24: int +DATE: int +TIME: int +DATETIME: int +YEAR: int +NEWDATE: int +VARCHAR: int +BIT: int +JSON: int +NEWDECIMAL: int +ENUM: int +SET: int +TINY_BLOB: int +MEDIUM_BLOB: int +LONG_BLOB: int +BLOB: int +VAR_STRING: int +STRING: int +GEOMETRY: int +CHAR: int +INTERVAL: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/constants/FLAG.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/constants/FLAG.pyi new file mode 100644 index 00000000..421f2315 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/constants/FLAG.pyi @@ -0,0 +1,15 @@ +NOT_NULL: int +PRI_KEY: int +UNIQUE_KEY: int +MULTIPLE_KEY: int +BLOB: int +UNSIGNED: int +ZEROFILL: int +BINARY: int +ENUM: int +AUTO_INCREMENT: int +TIMESTAMP: int +SET: int +PART_KEY: int +GROUP: int +UNIQUE: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/constants/SERVER_STATUS.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/constants/SERVER_STATUS.pyi new file mode 100644 index 00000000..437b8936 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/constants/SERVER_STATUS.pyi @@ -0,0 +1,10 @@ +SERVER_STATUS_IN_TRANS: int +SERVER_STATUS_AUTOCOMMIT: int +SERVER_MORE_RESULTS_EXISTS: int +SERVER_QUERY_NO_GOOD_INDEX_USED: int +SERVER_QUERY_NO_INDEX_USED: int +SERVER_STATUS_CURSOR_EXISTS: int +SERVER_STATUS_LAST_ROW_SENT: int +SERVER_STATUS_DB_DROPPED: int +SERVER_STATUS_NO_BACKSLASH_ESCAPES: int +SERVER_STATUS_METADATA_CHANGED: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/constants/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/constants/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/converters.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/converters.pyi new file mode 100644 index 00000000..6ce9f25e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/converters.pyi @@ -0,0 +1,41 @@ +import datetime +import time +from _typeshed import Unused +from collections.abc import Callable, Mapping, Sequence +from decimal import Decimal +from typing import Any, TypeVar +from typing_extensions import TypeAlias + +_EscaperMapping: TypeAlias = Mapping[type[object], Callable[..., str]] | None +_T = TypeVar("_T") + +def escape_item(val: object, charset: object, mapping: _EscaperMapping = ...) -> str: ... +def escape_dict(val: Mapping[str, object], charset: object, mapping: _EscaperMapping = ...) -> dict[str, str]: ... +def escape_sequence(val: Sequence[object], charset: object, mapping: _EscaperMapping = ...) -> str: ... +def escape_set(val: set[object], charset: object, mapping: _EscaperMapping = ...) -> str: ... +def escape_bool(value: bool, mapping: _EscaperMapping = ...) -> str: ... +def escape_int(value: int, mapping: _EscaperMapping = ...) -> str: ... +def escape_float(value: float, mapping: _EscaperMapping = ...) -> str: ... +def escape_string(value: str, mapping: _EscaperMapping = ...) -> str: ... +def escape_bytes_prefixed(value: bytes, mapping: _EscaperMapping = ...) -> str: ... +def escape_bytes(value: bytes, mapping: _EscaperMapping = ...) -> str: ... +def escape_str(value: str, mapping: _EscaperMapping = ...) -> str: ... +def escape_None(value: None, mapping: _EscaperMapping = ...) -> str: ... +def escape_timedelta(obj: datetime.timedelta, mapping: _EscaperMapping = ...) -> str: ... +def escape_time(obj: datetime.time, mapping: _EscaperMapping = ...) -> str: ... +def escape_datetime(obj: datetime.datetime, mapping: _EscaperMapping = ...) -> str: ... +def escape_date(obj: datetime.date, mapping: _EscaperMapping = ...) -> str: ... +def escape_struct_time(obj: time.struct_time, mapping: _EscaperMapping = ...) -> str: ... +def Decimal2Literal(o: Decimal, d: Unused) -> str: ... +def convert_datetime(obj: str | bytes) -> datetime.datetime | str: ... +def convert_timedelta(obj: str | bytes) -> datetime.timedelta | str: ... +def convert_time(obj: str | bytes) -> datetime.time | str: ... +def convert_date(obj: str | bytes) -> datetime.date | str: ... +def through(x: _T) -> _T: ... + +convert_bit = through + +encoders: dict[type[object], Callable[..., str]] +decoders: dict[int, Callable[[str | bytes], Any]] +conversions: dict[type[object] | int, Callable[..., Any]] +Thing2Literal = escape_str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/cursors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/cursors.pyi new file mode 100644 index 00000000..4f2d374b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/cursors.pyi @@ -0,0 +1,50 @@ +from collections.abc import Iterable, Iterator +from typing import Any +from typing_extensions import Self + +from .connections import Connection + +class Cursor: + connection: Connection[Any] + description: tuple[str, ...] + rownumber: int + rowcount: int + arraysize: int + messages: Any + errorhandler: Any + lastrowid: int + def __init__(self, connection: Connection[Any]) -> None: ... + def __del__(self) -> None: ... + def close(self) -> None: ... + def setinputsizes(self, *args) -> None: ... + def setoutputsizes(self, *args) -> None: ... + def nextset(self) -> bool | None: ... + def mogrify(self, query: str, args: object = ...) -> str: ... + def execute(self, query: str, args: object = ...) -> int: ... + def executemany(self, query: str, args: Iterable[object]) -> int | None: ... + def callproc(self, procname: str, args: Iterable[Any] = ...) -> Any: ... + def scroll(self, value: int, mode: str = ...) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *exc_info: object) -> None: ... + # Methods returning result tuples are below. + def fetchone(self) -> tuple[Any, ...] | None: ... + def fetchmany(self, size: int | None = ...) -> tuple[tuple[Any, ...], ...]: ... + def fetchall(self) -> tuple[tuple[Any, ...], ...]: ... + def __iter__(self) -> Iterator[tuple[Any, ...]]: ... + +class DictCursorMixin: + dict_type: Any # TODO: add support if someone needs this + def fetchone(self) -> dict[str, Any] | None: ... + def fetchmany(self, size: int | None = ...) -> tuple[dict[str, Any], ...]: ... + def fetchall(self) -> tuple[dict[str, Any], ...]: ... + def __iter__(self) -> Iterator[dict[str, Any]]: ... + +class SSCursor(Cursor): + def fetchall(self) -> list[tuple[Any, ...]]: ... # type: ignore[override] + def fetchall_unbuffered(self) -> Iterator[tuple[Any, ...]]: ... + def scroll(self, value: int, mode: str = ...) -> None: ... + +class DictCursor(DictCursorMixin, Cursor): ... # type: ignore[misc] + +class SSDictCursor(DictCursorMixin, SSCursor): # type: ignore[misc] + def fetchall_unbuffered(self) -> Iterator[dict[str, Any]]: ... # type: ignore[override] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/err.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/err.pyi new file mode 100644 index 00000000..8aec38f5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/err.pyi @@ -0,0 +1,20 @@ +import builtins +from typing import NoReturn + +from .constants import ER as ER + +class MySQLError(Exception): ... +class Warning(builtins.Warning, MySQLError): ... +class Error(MySQLError): ... +class InterfaceError(Error): ... +class DatabaseError(Error): ... +class DataError(DatabaseError): ... +class OperationalError(DatabaseError): ... +class IntegrityError(DatabaseError): ... +class InternalError(DatabaseError): ... +class ProgrammingError(DatabaseError): ... +class NotSupportedError(DatabaseError): ... + +error_map: dict[int, type[DatabaseError]] + +def raise_mysql_exception(data) -> NoReturn: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/times.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/times.pyi new file mode 100644 index 00000000..c798e654 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/times.pyi @@ -0,0 +1,10 @@ +from typing import Any + +Date: Any +Time: Any +TimeDelta: Any +Timestamp: Any + +def DateFromTicks(ticks): ... +def TimeFromTicks(ticks): ... +def TimestampFromTicks(ticks): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/util.pyi new file mode 100644 index 00000000..3d9a65b4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyMySQL/pymysql/util.pyi @@ -0,0 +1,3 @@ +def byte2int(b): ... +def int2byte(i): ... +def join_bytes(bs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyScreeze/@tests/stubtest_allowlist_linux.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyScreeze/@tests/stubtest_allowlist_linux.txt new file mode 100644 index 00000000..fa9bd6fa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyScreeze/@tests/stubtest_allowlist_linux.txt @@ -0,0 +1,2 @@ +# temp variable used to define scrotExists by checking if the command "scrot" exists +pyscreeze.whichProc diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyScreeze/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyScreeze/METADATA.toml new file mode 100644 index 00000000..52120c0b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyScreeze/METADATA.toml @@ -0,0 +1,2 @@ +version = "0.1.*" +requires = ["types-Pillow"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyScreeze/pyscreeze/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyScreeze/pyscreeze/__init__.pyi new file mode 100644 index 00000000..5c52490e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyScreeze/pyscreeze/__init__.pyi @@ -0,0 +1,203 @@ +from _typeshed import Incomplete, StrOrBytesPath, Unused +from collections.abc import Callable, Generator +from typing import NamedTuple, SupportsFloat, TypeVar, overload +from typing_extensions import Final, ParamSpec, SupportsIndex, TypeAlias + +from PIL import Image + +_P = ParamSpec("_P") +_R = TypeVar("_R") +# TODO: cv2.Mat is not available as a type yet: +# https://github.com/microsoft/python-type-stubs/issues/211 +# https://github.com/microsoft/python-type-stubs/tree/main/cv2 +# https://github.com/opencv/opencv/pull/20370 +# cv2.Mat is just an alias for a numpy NDArray, but can't import that either. +# Because pyscreeze does not declare it as a dependency, stub_uploader won't let it. +_Mat: TypeAlias = Incomplete + +useOpenCV: Final[bool] +RUNNING_PYTHON_2: Final = False +GRAYSCALE_DEFAULT: Final = False +scrotExists: Final[bool] +# Meant to be overridable for backward-compatibility +USE_IMAGE_NOT_FOUND_EXCEPTION: bool + +class Box(NamedTuple): + left: int + top: int + width: int + height: int + +class Point(NamedTuple): + x: int + y: int + +class RGB(NamedTuple): + red: int + green: int + blue: int + +class PyScreezeException(Exception): ... +class ImageNotFoundException(PyScreezeException): ... + +# _locateAll_opencv +def requiresPillow(wrappedFunction: Callable[_P, _R]) -> Callable[_P, _R]: ... +@overload +def locate( + needleImage: str | Image.Image | _Mat, + haystackImage: str | Image.Image | _Mat, + *, + grayscale: bool | None = None, + limit: Unused = 1, + region: tuple[int, int, int, int] | None = None, + step: int = 1, + confidence: SupportsFloat | SupportsIndex | str = 0.999, +) -> Box | None: ... + +# _locateAll_python / _locateAll_pillow +@overload +def locate( + needleImage: str | Image.Image, + haystackImage: str | Image.Image, + *, + grayscale: bool | None = None, + limit: Unused = 1, + region: tuple[int, int, int, int] | None = None, + step: int = 1, + confidence: None = None, +) -> Box | None: ... + +# _locateAll_opencv +@overload +def locateOnScreen( + image: str | Image.Image | _Mat, + minSearchTime: float = 0, + *, + grayscale: bool | None = None, + limit: Unused = 1, + region: tuple[int, int, int, int] | None = None, + step: int = 1, + confidence: SupportsFloat | SupportsIndex | str = 0.999, +) -> Box | None: ... + +# _locateAll_python / _locateAll_pillow +@overload +def locateOnScreen( + image: str | Image.Image, + minSearchTime: float = 0, + *, + grayscale: bool | None = None, + limit: Unused = 1, + region: tuple[int, int, int, int] | None = None, + step: int = 1, + confidence: None = None, +) -> Box | None: ... + +# _locateAll_opencv +@overload +def locateAllOnScreen( + image: str | Image.Image | _Mat, + *, + grayscale: bool | None = None, + limit: int = 1000, + region: tuple[int, int, int, int] | None = None, + step: int = 1, + confidence: SupportsFloat | SupportsIndex | str = 0.999, +) -> Generator[Box, None, None]: ... + +# _locateAll_python / _locateAll_pillow +@overload +def locateAllOnScreen( + image: str | Image.Image, + *, + grayscale: bool | None = None, + limit: int | None = None, + region: tuple[int, int, int, int] | None = None, + step: int = 1, + confidence: None = None, +) -> Generator[Box, None, None]: ... + +# _locateAll_opencv +@overload +def locateCenterOnScreen( + image: str | Image.Image | _Mat, + *, + minSearchTime: float, + grayscale: bool | None = None, + limit: Unused = 1, + region: tuple[int, int, int, int] | None = None, + step: int = 1, + confidence: SupportsFloat | SupportsIndex | str = 0.999, +) -> Point | None: ... + +# _locateAll_python / _locateAll_pillow +@overload +def locateCenterOnScreen( + image: str | Image.Image, + *, + minSearchTime: float, + grayscale: bool | None = None, + limit: Unused = 1, + region: tuple[int, int, int, int] | None = None, + step: int = 1, + confidence: None = None, +) -> Point | None: ... + +# _locateAll_opencv +@overload +def locateOnWindow( + image: str | Image.Image | _Mat, + title: str, + *, + grayscale: bool | None = None, + limit: Unused = 1, + step: int = 1, + confidence: SupportsFloat | SupportsIndex | str = 0.999, +) -> Box | None: ... + +# _locateAll_python / _locateAll_pillow +@overload +def locateOnWindow( + image: str | Image.Image, + title: str, + *, + grayscale: bool | None = None, + limit: Unused = 1, + step: int = 1, + confidence: None = None, +) -> Box | None: ... +def showRegionOnScreen( + region: tuple[int, int, int, int], outlineColor: str = "red", filename: str = "_showRegionOnScreen.png" +) -> None: ... +def center(coords: tuple[int, int, int, int]) -> Point: ... +def pixelMatchesColor( + x: int, y: int, expectedRGBColor: tuple[int, int, int] | tuple[int, int, int, int], tolerance: int = 0 +) -> bool: ... +def pixel(x: int, y: int) -> tuple[int, int, int]: ... +def screenshot(imageFilename: StrOrBytesPath | None = None, region: tuple[int, int, int, int] | None = None) -> Image.Image: ... + +grab = screenshot + +# _locateAll_opencv +@overload +def locateAll( + needleImage: str | Image.Image | _Mat, + haystackImage: str | Image.Image | _Mat, + grayscale: bool | None = None, + limit: int = 1000, + region: tuple[int, int, int, int] | None = None, + step: int = 1, + confidence: SupportsFloat | SupportsIndex | str = 0.999, +) -> Generator[Box, None, None]: ... + +# _locateAll_python / _locateAll_pillow +@overload +def locateAll( + needleImage: str | Image.Image, + haystackImage: str | Image.Image, + grayscale: bool | None = None, + limit: int | None = None, + region: tuple[int, int, int, int] | None = None, + step: int = 1, + confidence: None = None, +) -> Generator[Box, None, None]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..abc7ed91 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/@tests/stubtest_allowlist.txt @@ -0,0 +1,2 @@ +# yaml._yaml is for backwards compatibility so none of it matters anyway +yaml._yaml.__test__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/METADATA.toml new file mode 100644 index 00000000..39a61569 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/METADATA.toml @@ -0,0 +1 @@ +version = "6.0.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/__init__.pyi new file mode 100644 index 00000000..e42fc881 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/__init__.pyi @@ -0,0 +1,326 @@ +from collections.abc import Callable, Iterable, Iterator, Mapping +from re import Pattern +from typing import Any, TypeVar, overload +from typing_extensions import TypeAlias + +from . import resolver as resolver # Help mypy a bit; this is implied by loader and dumper +from .constructor import BaseConstructor +from .cyaml import * +from .cyaml import _CLoader +from .dumper import * +from .dumper import _Inf +from .emitter import _WriteStream +from .error import * +from .events import * +from .loader import * +from .loader import _Loader +from .nodes import * +from .reader import _ReadStream +from .representer import BaseRepresenter +from .resolver import BaseResolver +from .tokens import * + +# FIXME: the functions really return str if encoding is None, otherwise bytes. Waiting for python/mypy#5621 +_Yaml: TypeAlias = Any + +_T = TypeVar("_T") +_Constructor = TypeVar("_Constructor", bound=BaseConstructor) +_Representer = TypeVar("_Representer", bound=BaseRepresenter) + +__with_libyaml__: bool +__version__: str + +def warnings(settings=...): ... +def scan(stream, Loader: type[_Loader | _CLoader] = ...): ... +def parse(stream, Loader: type[_Loader | _CLoader] = ...): ... +def compose(stream, Loader: type[_Loader | _CLoader] = ...): ... +def compose_all(stream, Loader: type[_Loader | _CLoader] = ...): ... +def load(stream: _ReadStream, Loader: type[_Loader | _CLoader]) -> Any: ... +def load_all(stream: _ReadStream, Loader: type[_Loader | _CLoader]) -> Iterator[Any]: ... +def full_load(stream: _ReadStream) -> Any: ... +def full_load_all(stream: _ReadStream) -> Iterator[Any]: ... +def safe_load(stream: _ReadStream) -> Any: ... +def safe_load_all(stream: _ReadStream) -> Iterator[Any]: ... +def unsafe_load(stream: _ReadStream) -> Any: ... +def unsafe_load_all(stream: _ReadStream) -> Iterator[Any]: ... +def emit( + events, + stream: _WriteStream[Any] | None = ..., + Dumper=..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | _Inf | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., +): ... +@overload +def serialize_all( + nodes, + stream: _WriteStream[Any], + Dumper=..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | _Inf | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., +) -> None: ... +@overload +def serialize_all( + nodes, + stream: None = ..., + Dumper=..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | _Inf | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., +) -> _Yaml: ... +@overload +def serialize( + node, + stream: _WriteStream[Any], + Dumper=..., + *, + canonical: bool | None = ..., + indent: int | None = ..., + width: int | _Inf | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., +) -> None: ... +@overload +def serialize( + node, + stream: None = ..., + Dumper=..., + *, + canonical: bool | None = ..., + indent: int | None = ..., + width: int | _Inf | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., +) -> _Yaml: ... +@overload +def dump_all( + documents: Iterable[Any], + stream: _WriteStream[Any], + Dumper=..., + default_style: str | None = ..., + default_flow_style: bool | None = ..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | _Inf | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., + sort_keys: bool = ..., +) -> None: ... +@overload +def dump_all( + documents: Iterable[Any], + stream: None = ..., + Dumper=..., + default_style: str | None = ..., + default_flow_style: bool | None = ..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | _Inf | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., + sort_keys: bool = ..., +) -> _Yaml: ... +@overload +def dump( + data: Any, + stream: _WriteStream[Any], + Dumper=..., + *, + default_style: str | None = ..., + default_flow_style: bool | None = ..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | _Inf | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., + sort_keys: bool = ..., +) -> None: ... +@overload +def dump( + data: Any, + stream: None = ..., + Dumper=..., + *, + default_style: str | None = ..., + default_flow_style: bool | None = ..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | _Inf | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., + sort_keys: bool = ..., +) -> _Yaml: ... +@overload +def safe_dump_all( + documents: Iterable[Any], + stream: _WriteStream[Any], + *, + default_style: str | None = ..., + default_flow_style: bool | None = ..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | _Inf | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., + sort_keys: bool = ..., +) -> None: ... +@overload +def safe_dump_all( + documents: Iterable[Any], + stream: None = ..., + *, + default_style: str | None = ..., + default_flow_style: bool | None = ..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | _Inf | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., + sort_keys: bool = ..., +) -> _Yaml: ... +@overload +def safe_dump( + data: Any, + stream: _WriteStream[Any], + *, + default_style: str | None = ..., + default_flow_style: bool | None = ..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | _Inf | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., + sort_keys: bool = ..., +) -> None: ... +@overload +def safe_dump( + data: Any, + stream: None = ..., + *, + default_style: str | None = ..., + default_flow_style: bool | None = ..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | _Inf | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., + sort_keys: bool = ..., +) -> _Yaml: ... +def add_implicit_resolver( + tag: str, + regexp: Pattern[str], + first: Iterable[Any] | None = ..., + Loader: type[BaseResolver] | None = ..., + Dumper: type[BaseResolver] = ..., +) -> None: ... +def add_path_resolver( + tag: str, + path: Iterable[Any], + kind: type[Any] | None = ..., + Loader: type[BaseResolver] | None = ..., + Dumper: type[BaseResolver] = ..., +) -> None: ... +@overload +def add_constructor( + tag: str, constructor: Callable[[Loader | FullLoader | UnsafeLoader, Node], Any], Loader: None = ... +) -> None: ... +@overload +def add_constructor(tag: str, constructor: Callable[[_Constructor, Node], Any], Loader: type[_Constructor]) -> None: ... +@overload +def add_multi_constructor( + tag_prefix: str, multi_constructor: Callable[[Loader | FullLoader | UnsafeLoader, str, Node], Any], Loader: None = ... +) -> None: ... +@overload +def add_multi_constructor( + tag_prefix: str, multi_constructor: Callable[[_Constructor, str, Node], Any], Loader: type[_Constructor] +) -> None: ... +@overload +def add_representer(data_type: type[_T], representer: Callable[[Dumper, _T], Node]) -> None: ... +@overload +def add_representer(data_type: type[_T], representer: Callable[[_Representer, _T], Node], Dumper: type[_Representer]) -> None: ... +@overload +def add_multi_representer(data_type: type[_T], multi_representer: Callable[[Dumper, _T], Node]) -> None: ... +@overload +def add_multi_representer( + data_type: type[_T], multi_representer: Callable[[_Representer, _T], Node], Dumper: type[_Representer] +) -> None: ... + +class YAMLObjectMetaclass(type): + def __init__(cls, name, bases, kwds) -> None: ... + +class YAMLObject(metaclass=YAMLObjectMetaclass): + yaml_loader: Any + yaml_dumper: Any + yaml_tag: Any + yaml_flow_style: Any + @classmethod + def from_yaml(cls, loader, node): ... + @classmethod + def to_yaml(cls, dumper, data): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/_yaml.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/_yaml.pyi new file mode 100644 index 00000000..33dfb596 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/_yaml.pyi @@ -0,0 +1,56 @@ +from _typeshed import Incomplete, SupportsRead +from collections.abc import Mapping, Sequence +from typing import IO, Any + +from .events import Event +from .nodes import Node +from .tokens import Token + +def get_version_string() -> str: ... +def get_version() -> tuple[int, int, int]: ... + +class Mark: + name: Any + index: int + line: int + column: int + buffer: Any + pointer: Any + def __init__(self, name, index: int, line: int, column: int, buffer, pointer) -> None: ... + def get_snippet(self): ... + +class CParser: + def __init__(self, stream: str | bytes | SupportsRead[str | bytes]) -> None: ... + def dispose(self) -> None: ... + def get_token(self) -> Token | None: ... + def peek_token(self) -> Token | None: ... + def check_token(self, *choices) -> bool: ... + def get_event(self) -> Event | None: ... + def peek_event(self) -> Event | None: ... + def check_event(self, *choices) -> bool: ... + def check_node(self) -> bool: ... + def get_node(self) -> Node | None: ... + def get_single_node(self) -> Node | None: ... + def raw_parse(self) -> int: ... + def raw_scan(self) -> int: ... + +class CEmitter: + def __init__( + self, + stream: IO[Any], + canonical: Incomplete | None = ..., + indent: int | None = ..., + width: int | None = ..., + allow_unicode: Incomplete | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: Incomplete | None = ..., + explicit_end: Incomplete | None = ..., + version: Sequence[int] | None = ..., + tags: Mapping[str, str] | None = ..., + ) -> None: ... + def dispose(self) -> None: ... + def emit(self, event_object) -> None: ... + def open(self) -> None: ... + def close(self) -> None: ... + def serialize(self, node) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/composer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/composer.pyi new file mode 100644 index 00000000..7bc87357 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/composer.pyi @@ -0,0 +1,18 @@ +from typing import Any + +from yaml.error import MarkedYAMLError +from yaml.nodes import MappingNode, Node, ScalarNode, SequenceNode + +class ComposerError(MarkedYAMLError): ... + +class Composer: + anchors: dict[Any, Node] + def __init__(self) -> None: ... + def check_node(self) -> bool: ... + def get_node(self) -> Node | None: ... + def get_single_node(self) -> Node | None: ... + def compose_document(self) -> Node | None: ... + def compose_node(self, parent: Node | None, index: int) -> Node | None: ... + def compose_scalar_node(self, anchor: dict[Any, Node]) -> ScalarNode: ... + def compose_sequence_node(self, anchor: dict[Any, Node]) -> SequenceNode: ... + def compose_mapping_node(self, anchor: dict[Any, Node]) -> MappingNode: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/constructor.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/constructor.pyi new file mode 100644 index 00000000..fb383a7d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/constructor.pyi @@ -0,0 +1,106 @@ +from collections.abc import Callable, Hashable +from datetime import date +from re import Pattern +from typing import Any, ClassVar, TypeVar +from typing_extensions import TypeAlias + +from yaml.error import MarkedYAMLError +from yaml.nodes import MappingNode, Node, ScalarNode, SequenceNode + +from .cyaml import _CLoader +from .loader import _Loader + +_L = TypeVar("_L", bound=_Loader | _CLoader) +_N = TypeVar("_N", bound=Node) + +_Scalar: TypeAlias = str | int | float | bool | None + +class ConstructorError(MarkedYAMLError): ... + +class BaseConstructor: + yaml_constructors: Any + yaml_multi_constructors: Any + constructed_objects: Any + recursive_objects: Any + state_generators: Any + deep_construct: Any + def __init__(self) -> None: ... + def check_data(self): ... + def check_state_key(self, key: str) -> None: ... + def get_data(self): ... + def get_single_data(self) -> Any: ... + def construct_document(self, node): ... + def construct_object(self, node, deep=...): ... + def construct_scalar(self, node: ScalarNode) -> _Scalar: ... + def construct_sequence(self, node: SequenceNode, deep: bool = ...) -> list[Any]: ... + def construct_mapping(self, node: MappingNode, deep: bool = ...) -> dict[Hashable, Any]: ... + def construct_pairs(self, node, deep=...): ... + @classmethod + # Use typevars so we can have covariant behaviour in the parameter types + def add_constructor(cls, tag: str, constructor: Callable[[_L, _N], Any]) -> None: ... + @classmethod + def add_multi_constructor(cls, tag_prefix, multi_constructor): ... + +class SafeConstructor(BaseConstructor): + def construct_scalar(self, node: ScalarNode) -> _Scalar: ... + def flatten_mapping(self, node: MappingNode) -> None: ... + def construct_mapping(self, node: MappingNode, deep: bool = ...) -> dict[Hashable, Any]: ... + def construct_yaml_null(self, node: ScalarNode) -> None: ... + bool_values: ClassVar[dict[str, bool]] + def construct_yaml_bool(self, node: ScalarNode) -> bool: ... + def construct_yaml_int(self, node: ScalarNode) -> int: ... + inf_value: ClassVar[float] + nan_value: ClassVar[float] + def construct_yaml_float(self, node: ScalarNode) -> float: ... + def construct_yaml_binary(self, node: ScalarNode) -> bytes: ... + timestamp_regexp: ClassVar[Pattern[str]] + def construct_yaml_timestamp(self, node: ScalarNode) -> date: ... + def construct_yaml_omap(self, node): ... + def construct_yaml_pairs(self, node): ... + def construct_yaml_set(self, node): ... + def construct_yaml_str(self, node): ... + def construct_yaml_seq(self, node): ... + def construct_yaml_map(self, node): ... + def construct_yaml_object(self, node, cls): ... + def construct_undefined(self, node): ... + +class FullConstructor(SafeConstructor): + def get_state_keys_blacklist(self) -> list[str]: ... + def get_state_keys_blacklist_regexp(self) -> Pattern[str]: ... + def construct_python_str(self, node): ... + def construct_python_unicode(self, node): ... + def construct_python_bytes(self, node): ... + def construct_python_long(self, node): ... + def construct_python_complex(self, node): ... + def construct_python_tuple(self, node): ... + def find_python_module(self, name, mark, unsafe=...): ... + def find_python_name(self, name, mark, unsafe=...): ... + def construct_python_name(self, suffix, node): ... + def construct_python_module(self, suffix, node): ... + def make_python_instance(self, suffix, node, args=..., kwds=..., newobj=..., unsafe=...): ... + def set_python_instance_state(self, instance, state, unsafe: bool = ...) -> None: ... + def construct_python_object(self, suffix, node): ... + def construct_python_object_apply(self, suffix, node, newobj=...): ... + def construct_python_object_new(self, suffix, node): ... + +class UnsafeConstructor(FullConstructor): + def find_python_module(self, name, mark): ... + def find_python_name(self, name, mark): ... + def make_python_instance(self, suffix, node, args=..., kwds=..., newobj=...): ... + def set_python_instance_state(self, instance, state): ... + +class Constructor(SafeConstructor): + def construct_python_str(self, node): ... + def construct_python_unicode(self, node): ... + def construct_python_long(self, node): ... + def construct_python_complex(self, node): ... + def construct_python_tuple(self, node): ... + def find_python_module(self, name, mark): ... + def find_python_name(self, name, mark): ... + def construct_python_name(self, suffix, node): ... + def construct_python_module(self, suffix, node): ... + def make_python_instance(self, suffix, node, args=..., kwds=..., newobj=...): ... + def set_python_instance_state(self, instance, state): ... + def construct_python_object(self, suffix, node): ... + def construct_python_object_apply(self, suffix, node, newobj=...): ... + def construct_python_object_new(self, suffix, node): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/cyaml.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/cyaml.pyi new file mode 100644 index 00000000..90870b0e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/cyaml.pyi @@ -0,0 +1,69 @@ +from _typeshed import Incomplete, SupportsRead +from collections.abc import Mapping, Sequence +from typing import IO, Any +from typing_extensions import TypeAlias + +from ._yaml import CEmitter, CParser +from .constructor import BaseConstructor, FullConstructor, SafeConstructor, UnsafeConstructor +from .representer import BaseRepresenter, SafeRepresenter +from .resolver import BaseResolver, Resolver + +__all__ = ["CBaseLoader", "CSafeLoader", "CFullLoader", "CUnsafeLoader", "CLoader", "CBaseDumper", "CSafeDumper", "CDumper"] + +_Readable: TypeAlias = SupportsRead[str | bytes] +_CLoader: TypeAlias = CLoader | CBaseLoader | CFullLoader | CSafeLoader | CUnsafeLoader # noqa: Y047 # Used in other modules + +class CBaseLoader(CParser, BaseConstructor, BaseResolver): + def __init__(self, stream: str | bytes | _Readable) -> None: ... + +class CLoader(CParser, SafeConstructor, Resolver): + def __init__(self, stream: str | bytes | _Readable) -> None: ... + +class CSafeLoader(CParser, SafeConstructor, Resolver): + def __init__(self, stream: str | bytes | _Readable) -> None: ... + +class CFullLoader(CParser, FullConstructor, Resolver): + def __init__(self, stream: str | bytes | _Readable) -> None: ... + +class CUnsafeLoader(CParser, UnsafeConstructor, Resolver): + def __init__(self, stream: str | bytes | _Readable) -> None: ... + +class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): + def __init__( + self, + stream: IO[Any], + default_style: str | None = ..., + default_flow_style: bool | None = ..., + canonical: Incomplete | None = ..., + indent: int | None = ..., + width: int | None = ..., + allow_unicode: Incomplete | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: Incomplete | None = ..., + explicit_end: Incomplete | None = ..., + version: Sequence[int] | None = ..., + tags: Mapping[str, str] | None = ..., + sort_keys: bool = ..., + ) -> None: ... + +class CDumper(CEmitter, SafeRepresenter, Resolver): + def __init__( + self, + stream: IO[Any], + default_style: str | None = ..., + default_flow_style: bool = ..., + canonical: Incomplete | None = ..., + indent: int | None = ..., + width: int | None = ..., + allow_unicode: Incomplete | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: Incomplete | None = ..., + explicit_end: Incomplete | None = ..., + version: Sequence[int] | None = ..., + tags: Mapping[str, str] | None = ..., + sort_keys: bool = ..., + ) -> None: ... + +CSafeDumper = CDumper diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/dumper.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/dumper.pyi new file mode 100644 index 00000000..7203aaea --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/dumper.pyi @@ -0,0 +1,71 @@ +from collections.abc import Mapping +from typing import Any +from typing_extensions import TypeAlias + +from yaml.emitter import Emitter +from yaml.representer import BaseRepresenter, Representer, SafeRepresenter +from yaml.resolver import BaseResolver, Resolver +from yaml.serializer import Serializer + +from .emitter import _WriteStream + +# Ideally, there would be a way to limit these values to only +/- float("inf"), +# but that's not possible at the moment (https://github.com/python/typing/issues/1160). +_Inf: TypeAlias = float + +class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver): + def __init__( + self, + stream: _WriteStream[Any], + default_style: str | None = ..., + default_flow_style: bool | None = ..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | _Inf | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., + sort_keys: bool = ..., + ) -> None: ... + +class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver): + def __init__( + self, + stream: _WriteStream[Any], + default_style: str | None = ..., + default_flow_style: bool | None = ..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | _Inf | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., + sort_keys: bool = ..., + ) -> None: ... + +class Dumper(Emitter, Serializer, Representer, Resolver): + def __init__( + self, + stream: _WriteStream[Any], + default_style: str | None = ..., + default_flow_style: bool | None = ..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | _Inf | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., + sort_keys: bool = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/emitter.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/emitter.pyi new file mode 100644 index 00000000..aaf3b02d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/emitter.pyi @@ -0,0 +1,119 @@ +from typing import Any, Protocol, TypeVar + +from yaml.error import YAMLError + +_T_contra = TypeVar("_T_contra", str, bytes, contravariant=True) + +class _WriteStream(Protocol[_T_contra]): + def write(self, __data: _T_contra) -> object: ... + # Optional fields: + # encoding: str + # def flush(self) -> object: ... + +class EmitterError(YAMLError): ... + +class ScalarAnalysis: + scalar: Any + empty: Any + multiline: Any + allow_flow_plain: Any + allow_block_plain: Any + allow_single_quoted: Any + allow_double_quoted: Any + allow_block: Any + def __init__( + self, scalar, empty, multiline, allow_flow_plain, allow_block_plain, allow_single_quoted, allow_double_quoted, allow_block + ) -> None: ... + +class Emitter: + DEFAULT_TAG_PREFIXES: Any + stream: _WriteStream[Any] + encoding: Any + states: Any + state: Any + events: Any + event: Any + indents: Any + indent: Any + flow_level: Any + root_context: Any + sequence_context: Any + mapping_context: Any + simple_key_context: Any + line: Any + column: Any + whitespace: Any + indention: Any + open_ended: Any + canonical: Any + allow_unicode: Any + best_indent: Any + best_width: Any + best_line_break: Any + tag_prefixes: Any + prepared_anchor: Any + prepared_tag: Any + analysis: Any + style: Any + def __init__( + self, stream: _WriteStream[Any], canonical=..., indent=..., width=..., allow_unicode=..., line_break=... + ) -> None: ... + def dispose(self): ... + def emit(self, event): ... + def need_more_events(self): ... + def need_events(self, count): ... + def increase_indent(self, flow=..., indentless=...): ... + def expect_stream_start(self): ... + def expect_nothing(self): ... + def expect_first_document_start(self): ... + def expect_document_start(self, first=...): ... + def expect_document_end(self): ... + def expect_document_root(self): ... + def expect_node(self, root=..., sequence=..., mapping=..., simple_key=...): ... + def expect_alias(self): ... + def expect_scalar(self): ... + def expect_flow_sequence(self): ... + def expect_first_flow_sequence_item(self): ... + def expect_flow_sequence_item(self): ... + def expect_flow_mapping(self): ... + def expect_first_flow_mapping_key(self): ... + def expect_flow_mapping_key(self): ... + def expect_flow_mapping_simple_value(self): ... + def expect_flow_mapping_value(self): ... + def expect_block_sequence(self): ... + def expect_first_block_sequence_item(self): ... + def expect_block_sequence_item(self, first=...): ... + def expect_block_mapping(self): ... + def expect_first_block_mapping_key(self): ... + def expect_block_mapping_key(self, first=...): ... + def expect_block_mapping_simple_value(self): ... + def expect_block_mapping_value(self): ... + def check_empty_sequence(self): ... + def check_empty_mapping(self): ... + def check_empty_document(self): ... + def check_simple_key(self): ... + def process_anchor(self, indicator): ... + def process_tag(self): ... + def choose_scalar_style(self): ... + def process_scalar(self): ... + def prepare_version(self, version): ... + def prepare_tag_handle(self, handle): ... + def prepare_tag_prefix(self, prefix): ... + def prepare_tag(self, tag): ... + def prepare_anchor(self, anchor): ... + def analyze_scalar(self, scalar): ... + def flush_stream(self): ... + def write_stream_start(self): ... + def write_stream_end(self): ... + def write_indicator(self, indicator, need_whitespace, whitespace=..., indention=...): ... + def write_indent(self): ... + def write_line_break(self, data=...): ... + def write_version_directive(self, version_text): ... + def write_tag_directive(self, handle_text, prefix_text): ... + def write_single_quoted(self, text, split=...): ... + ESCAPE_REPLACEMENTS: Any + def write_double_quoted(self, text, split=...): ... + def determine_block_hints(self, text): ... + def write_folded(self, text): ... + def write_literal(self, text): ... + def write_plain(self, text, split=...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/error.pyi new file mode 100644 index 00000000..4bb59d35 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/error.pyi @@ -0,0 +1,26 @@ +class Mark: + name: str + index: int + line: int + column: int + buffer: str | None + pointer: int + def __init__(self, name: str, index: int, line: int, column: int, buffer: str | None, pointer: int) -> None: ... + def get_snippet(self, indent: int = ..., max_length: int = ...) -> str | None: ... + +class YAMLError(Exception): ... + +class MarkedYAMLError(YAMLError): + context: str | None + context_mark: Mark | None + problem: str | None + problem_mark: Mark | None + note: str | None + def __init__( + self, + context: str | None = ..., + context_mark: Mark | None = ..., + problem: str | None = ..., + problem_mark: Mark | None = ..., + note: str | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/events.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/events.pyi new file mode 100644 index 00000000..7096d157 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/events.pyi @@ -0,0 +1,62 @@ +from typing import Any + +class Event: + start_mark: Any + end_mark: Any + def __init__(self, start_mark=..., end_mark=...) -> None: ... + +class NodeEvent(Event): + anchor: Any + start_mark: Any + end_mark: Any + def __init__(self, anchor, start_mark=..., end_mark=...) -> None: ... + +class CollectionStartEvent(NodeEvent): + anchor: Any + tag: Any + implicit: Any + start_mark: Any + end_mark: Any + flow_style: Any + def __init__(self, anchor, tag, implicit, start_mark=..., end_mark=..., flow_style=...) -> None: ... + +class CollectionEndEvent(Event): ... + +class StreamStartEvent(Event): + start_mark: Any + end_mark: Any + encoding: Any + def __init__(self, start_mark=..., end_mark=..., encoding=...) -> None: ... + +class StreamEndEvent(Event): ... + +class DocumentStartEvent(Event): + start_mark: Any + end_mark: Any + explicit: Any + version: Any + tags: Any + def __init__(self, start_mark=..., end_mark=..., explicit=..., version=..., tags=...) -> None: ... + +class DocumentEndEvent(Event): + start_mark: Any + end_mark: Any + explicit: Any + def __init__(self, start_mark=..., end_mark=..., explicit=...) -> None: ... + +class AliasEvent(NodeEvent): ... + +class ScalarEvent(NodeEvent): + anchor: Any + tag: Any + implicit: Any + value: Any + start_mark: Any + end_mark: Any + style: Any + def __init__(self, anchor, tag, implicit, value, start_mark=..., end_mark=..., style=...) -> None: ... + +class SequenceStartEvent(CollectionStartEvent): ... +class SequenceEndEvent(CollectionEndEvent): ... +class MappingStartEvent(CollectionStartEvent): ... +class MappingEndEvent(CollectionEndEvent): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/loader.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/loader.pyi new file mode 100644 index 00000000..950f18f9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/loader.pyi @@ -0,0 +1,27 @@ +from typing_extensions import TypeAlias + +from yaml.composer import Composer +from yaml.constructor import BaseConstructor, Constructor, FullConstructor, SafeConstructor +from yaml.parser import Parser +from yaml.reader import Reader +from yaml.resolver import BaseResolver, Resolver +from yaml.scanner import Scanner + +from .reader import _ReadStream + +_Loader: TypeAlias = Loader | BaseLoader | FullLoader | SafeLoader | UnsafeLoader # noqa: Y047 # Used in other modules + +class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver): + def __init__(self, stream: _ReadStream) -> None: ... + +class FullLoader(Reader, Scanner, Parser, Composer, FullConstructor, Resolver): + def __init__(self, stream: _ReadStream) -> None: ... + +class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver): + def __init__(self, stream: _ReadStream) -> None: ... + +class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver): + def __init__(self, stream: _ReadStream) -> None: ... + +class UnsafeLoader(Reader, Scanner, Parser, Composer, Constructor, Resolver): + def __init__(self, stream: _ReadStream) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/nodes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/nodes.pyi new file mode 100644 index 00000000..4ee97126 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/nodes.pyi @@ -0,0 +1,32 @@ +from typing import Any, ClassVar + +from yaml.error import Mark + +# Any Unions: Avoid forcing the user to check for None when they know what Node was instantiated with +# Using generics may be overkill without support for default Generics +# Permissive Unions could also be useful here. +class Node: + tag: str + value: Any + start_mark: Mark | Any + end_mark: Mark | Any + def __init__(self, tag: str, value, start_mark: Mark | None, end_mark: Mark | None) -> None: ... + +class ScalarNode(Node): + id: ClassVar[str] + style: str | Any + def __init__( + self, tag: str, value, start_mark: Mark | None = ..., end_mark: Mark | None = ..., style: str | None = ... + ) -> None: ... + +class CollectionNode(Node): + flow_style: bool | Any + def __init__( + self, tag: str, value, start_mark: Mark | None = ..., end_mark: Mark | None = ..., flow_style: bool | None = ... + ) -> None: ... + +class SequenceNode(CollectionNode): + id: ClassVar[str] + +class MappingNode(CollectionNode): + id: ClassVar[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/parser.pyi new file mode 100644 index 00000000..9dc41d2a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/parser.pyi @@ -0,0 +1,45 @@ +from typing import Any + +from yaml.error import MarkedYAMLError + +class ParserError(MarkedYAMLError): ... + +class Parser: + DEFAULT_TAGS: Any + current_event: Any + yaml_version: Any + tag_handles: Any + states: Any + marks: Any + state: Any + def __init__(self) -> None: ... + def dispose(self): ... + def check_event(self, *choices): ... + def peek_event(self): ... + def get_event(self): ... + def parse_stream_start(self): ... + def parse_implicit_document_start(self): ... + def parse_document_start(self): ... + def parse_document_end(self): ... + def parse_document_content(self): ... + def process_directives(self): ... + def parse_block_node(self): ... + def parse_flow_node(self): ... + def parse_block_node_or_indentless_sequence(self): ... + def parse_node(self, block=..., indentless_sequence=...): ... + def parse_block_sequence_first_entry(self): ... + def parse_block_sequence_entry(self): ... + def parse_indentless_sequence_entry(self): ... + def parse_block_mapping_first_key(self): ... + def parse_block_mapping_key(self): ... + def parse_block_mapping_value(self): ... + def parse_flow_sequence_first_entry(self): ... + def parse_flow_sequence_entry(self, first=...): ... + def parse_flow_sequence_entry_mapping_key(self): ... + def parse_flow_sequence_entry_mapping_value(self): ... + def parse_flow_sequence_entry_mapping_end(self): ... + def parse_flow_mapping_first_key(self): ... + def parse_flow_mapping_key(self, first=...): ... + def parse_flow_mapping_value(self): ... + def parse_flow_mapping_empty_value(self): ... + def process_empty_scalar(self, mark): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/reader.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/reader.pyi new file mode 100644 index 00000000..01d74cc9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/reader.pyi @@ -0,0 +1,39 @@ +from _typeshed import SupportsRead +from typing import Any +from typing_extensions import TypeAlias + +from yaml.error import YAMLError + +_ReadStream: TypeAlias = str | bytes | SupportsRead[str] | SupportsRead[bytes] + +class ReaderError(YAMLError): + name: Any + character: Any + position: Any + encoding: Any + reason: Any + def __init__(self, name, position, character, encoding, reason) -> None: ... + +class Reader: + name: Any + stream: SupportsRead[str] | SupportsRead[bytes] | None + stream_pointer: Any + eof: Any + buffer: Any + pointer: Any + raw_buffer: Any + raw_decode: Any + encoding: Any + index: Any + line: Any + column: Any + def __init__(self, stream: _ReadStream) -> None: ... + def peek(self, index=...): ... + def prefix(self, length=...): ... + def forward(self, length=...): ... + def get_mark(self): ... + def determine_encoding(self): ... + NON_PRINTABLE: Any + def check_printable(self, data): ... + def update(self, length): ... + def update_raw(self, size=...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/representer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/representer.pyi new file mode 100644 index 00000000..35f82355 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/representer.pyi @@ -0,0 +1,61 @@ +import datetime +from _typeshed import Incomplete, ReadableBuffer, SupportsItems +from collections.abc import Callable, Iterable, Mapping +from types import BuiltinFunctionType, FunctionType, ModuleType +from typing import Any, ClassVar, NoReturn, TypeVar + +from yaml.error import YAMLError as YAMLError +from yaml.nodes import MappingNode as MappingNode, Node as Node, ScalarNode as ScalarNode, SequenceNode as SequenceNode + +_T = TypeVar("_T") +_R = TypeVar("_R", bound=BaseRepresenter) + +class RepresenterError(YAMLError): ... + +class BaseRepresenter: + yaml_representers: ClassVar[dict[type[Any], Callable[[BaseRepresenter, Any], Node]]] + yaml_multi_representers: ClassVar[dict[type[Any], Callable[[BaseRepresenter, Any], Node]]] + default_style: str | Incomplete + sort_keys: bool + default_flow_style: bool + represented_objects: dict[int, Node] + object_keeper: list[Any] + alias_key: int | Incomplete + def __init__(self, default_style: str | None = ..., default_flow_style: bool = ..., sort_keys: bool = ...) -> None: ... + def represent(self, data) -> None: ... + def represent_data(self, data) -> Node: ... + @classmethod + def add_representer(cls: type[_R], data_type: type[_T], representer: Callable[[_R, _T], Node]) -> None: ... + @classmethod + def add_multi_representer(cls: type[_R], data_type: type[_T], representer: Callable[[_R, _T], Node]) -> None: ... + def represent_scalar(self, tag: str, value, style: str | None = ...) -> ScalarNode: ... + def represent_sequence(self, tag: str, sequence: Iterable[Any], flow_style: bool | None = ...) -> SequenceNode: ... + def represent_mapping( + self, tag: str, mapping: SupportsItems[Any, Any] | Iterable[tuple[Any, Any]], flow_style: bool | None = ... + ) -> MappingNode: ... + def ignore_aliases(self, data) -> bool: ... + +class SafeRepresenter(BaseRepresenter): + inf_value: ClassVar[float] + def ignore_aliases(self, data) -> bool: ... + def represent_none(self, data) -> ScalarNode: ... + def represent_str(self, data: str) -> ScalarNode: ... + def represent_binary(self, data: ReadableBuffer) -> ScalarNode: ... + def represent_bool(self, data: bool) -> ScalarNode: ... + def represent_int(self, data: int) -> ScalarNode: ... + def represent_float(self, data: float) -> ScalarNode: ... + def represent_list(self, data: Iterable[Any]) -> SequenceNode: ... + def represent_dict(self, data: SupportsItems[Any, Any] | Iterable[tuple[Any, Any]]) -> MappingNode: ... + def represent_set(self, data: Iterable[Any]) -> MappingNode: ... + def represent_date(self, data: datetime.date) -> ScalarNode: ... + def represent_datetime(self, data: datetime.datetime) -> ScalarNode: ... + def represent_yaml_object(self, tag: str, data, cls, flow_style: bool | None = ...) -> MappingNode: ... + def represent_undefined(self, data) -> NoReturn: ... + +class Representer(SafeRepresenter): + def represent_complex(self, data: complex) -> ScalarNode: ... + def represent_tuple(self, data: Iterable[Any]) -> SequenceNode: ... + def represent_name(self, data: BuiltinFunctionType | FunctionType) -> ScalarNode: ... + def represent_module(self, data: ModuleType) -> ScalarNode: ... + def represent_object(self, data) -> SequenceNode | MappingNode: ... + def represent_ordered_dict(self, data: Mapping[Any, Any]) -> SequenceNode: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/resolver.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/resolver.pyi new file mode 100644 index 00000000..f5f534de --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/resolver.pyi @@ -0,0 +1,25 @@ +from typing import Any + +from yaml.error import YAMLError + +class ResolverError(YAMLError): ... + +class BaseResolver: + DEFAULT_SCALAR_TAG: Any + DEFAULT_SEQUENCE_TAG: Any + DEFAULT_MAPPING_TAG: Any + yaml_implicit_resolvers: Any + yaml_path_resolvers: Any + resolver_exact_paths: Any + resolver_prefix_paths: Any + def __init__(self) -> None: ... + @classmethod + def add_implicit_resolver(cls, tag, regexp, first): ... + @classmethod + def add_path_resolver(cls, tag, path, kind=...): ... + def descend_resolver(self, current_node, current_index): ... + def ascend_resolver(self): ... + def check_resolver_prefix(self, depth, path, kind, current_node, current_index): ... + def resolve(self, kind, value, implicit): ... + +class Resolver(BaseResolver): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/scanner.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/scanner.pyi new file mode 100644 index 00000000..64890a19 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/scanner.pyi @@ -0,0 +1,97 @@ +from typing import Any + +from yaml.error import MarkedYAMLError + +class ScannerError(MarkedYAMLError): ... + +class SimpleKey: + token_number: Any + required: Any + index: Any + line: Any + column: Any + mark: Any + def __init__(self, token_number, required, index, line, column, mark) -> None: ... + +class Scanner: + done: Any + flow_level: Any + tokens: Any + tokens_taken: Any + indent: Any + indents: Any + allow_simple_key: Any + possible_simple_keys: Any + def __init__(self) -> None: ... + def check_token(self, *choices): ... + def peek_token(self): ... + def get_token(self): ... + def need_more_tokens(self): ... + def fetch_more_tokens(self): ... + def next_possible_simple_key(self): ... + def stale_possible_simple_keys(self): ... + def save_possible_simple_key(self): ... + def remove_possible_simple_key(self): ... + def unwind_indent(self, column): ... + def add_indent(self, column): ... + def fetch_stream_start(self): ... + def fetch_stream_end(self): ... + def fetch_directive(self): ... + def fetch_document_start(self): ... + def fetch_document_end(self): ... + def fetch_document_indicator(self, TokenClass): ... + def fetch_flow_sequence_start(self): ... + def fetch_flow_mapping_start(self): ... + def fetch_flow_collection_start(self, TokenClass): ... + def fetch_flow_sequence_end(self): ... + def fetch_flow_mapping_end(self): ... + def fetch_flow_collection_end(self, TokenClass): ... + def fetch_flow_entry(self): ... + def fetch_block_entry(self): ... + def fetch_key(self): ... + def fetch_value(self): ... + def fetch_alias(self): ... + def fetch_anchor(self): ... + def fetch_tag(self): ... + def fetch_literal(self): ... + def fetch_folded(self): ... + def fetch_block_scalar(self, style): ... + def fetch_single(self): ... + def fetch_double(self): ... + def fetch_flow_scalar(self, style): ... + def fetch_plain(self): ... + def check_directive(self): ... + def check_document_start(self): ... + def check_document_end(self): ... + def check_block_entry(self): ... + def check_key(self): ... + def check_value(self): ... + def check_plain(self): ... + def scan_to_next_token(self): ... + def scan_directive(self): ... + def scan_directive_name(self, start_mark): ... + def scan_yaml_directive_value(self, start_mark): ... + def scan_yaml_directive_number(self, start_mark): ... + def scan_tag_directive_value(self, start_mark): ... + def scan_tag_directive_handle(self, start_mark): ... + def scan_tag_directive_prefix(self, start_mark): ... + def scan_directive_ignored_line(self, start_mark): ... + def scan_anchor(self, TokenClass): ... + def scan_tag(self): ... + def scan_block_scalar(self, style): ... + def scan_block_scalar_indicators(self, start_mark): ... + def scan_block_scalar_ignored_line(self, start_mark): ... + def scan_block_scalar_indentation(self): ... + def scan_block_scalar_breaks(self, indent): ... + def scan_flow_scalar(self, style): ... + ESCAPE_REPLACEMENTS: Any + ESCAPE_CODES: Any + def scan_flow_scalar_non_spaces(self, double, start_mark): ... + def scan_flow_scalar_spaces(self, double, start_mark): ... + def scan_flow_scalar_breaks(self, double, start_mark): ... + def scan_plain(self): ... + def scan_plain_spaces(self, indent, start_mark): ... + def scan_tag_handle(self, name, start_mark): ... + def scan_tag_uri(self, name, start_mark): ... + def scan_uri_escapes(self, name, start_mark): ... + def scan_line_break(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/serializer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/serializer.pyi new file mode 100644 index 00000000..0c51590f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/serializer.pyi @@ -0,0 +1,25 @@ +from typing import Any + +from yaml.error import YAMLError +from yaml.nodes import Node + +class SerializerError(YAMLError): ... + +class Serializer: + ANCHOR_TEMPLATE: Any + use_encoding: Any + use_explicit_start: Any + use_explicit_end: Any + use_version: Any + use_tags: Any + serialized_nodes: Any + anchors: Any + last_anchor_id: Any + closed: Any + def __init__(self, encoding=..., explicit_start=..., explicit_end=..., version=..., tags=...) -> None: ... + def open(self) -> None: ... + def close(self) -> None: ... + def serialize(self, node: Node) -> None: ... + def anchor_node(self, node): ... + def generate_anchor(self, node): ... + def serialize_node(self, node, parent, index): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/tokens.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/tokens.pyi new file mode 100644 index 00000000..b258ec7b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/PyYAML/yaml/tokens.pyi @@ -0,0 +1,93 @@ +from typing import Any + +class Token: + start_mark: Any + end_mark: Any + def __init__(self, start_mark, end_mark) -> None: ... + +class DirectiveToken(Token): + id: Any + name: Any + value: Any + start_mark: Any + end_mark: Any + def __init__(self, name, value, start_mark, end_mark) -> None: ... + +class DocumentStartToken(Token): + id: Any + +class DocumentEndToken(Token): + id: Any + +class StreamStartToken(Token): + id: Any + start_mark: Any + end_mark: Any + encoding: Any + def __init__(self, start_mark=..., end_mark=..., encoding=...) -> None: ... + +class StreamEndToken(Token): + id: Any + +class BlockSequenceStartToken(Token): + id: Any + +class BlockMappingStartToken(Token): + id: Any + +class BlockEndToken(Token): + id: Any + +class FlowSequenceStartToken(Token): + id: Any + +class FlowMappingStartToken(Token): + id: Any + +class FlowSequenceEndToken(Token): + id: Any + +class FlowMappingEndToken(Token): + id: Any + +class KeyToken(Token): + id: Any + +class ValueToken(Token): + id: Any + +class BlockEntryToken(Token): + id: Any + +class FlowEntryToken(Token): + id: Any + +class AliasToken(Token): + id: Any + value: Any + start_mark: Any + end_mark: Any + def __init__(self, value, start_mark, end_mark) -> None: ... + +class AnchorToken(Token): + id: Any + value: Any + start_mark: Any + end_mark: Any + def __init__(self, value, start_mark, end_mark) -> None: ... + +class TagToken(Token): + id: Any + value: Any + start_mark: Any + end_mark: Any + def __init__(self, value, start_mark, end_mark) -> None: ... + +class ScalarToken(Token): + id: Any + value: Any + plain: Any + start_mark: Any + end_mark: Any + style: Any + def __init__(self, value, plain, start_mark, end_mark, style=...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..f72c29ab --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/@tests/stubtest_allowlist.txt @@ -0,0 +1,9 @@ +# Pygments uses mcs, pyright wants cls +pygments.lexer.LexerMeta.__new__ +pygments.style.StyleMeta.__new__ + +# Defined in lexer classes, intended to be used as static method, but doesn't use @staticmethod +pygments.lexer.LexerMeta.analyse_text + +# Inheriting from tuple is weird +pygments.token._TokenType.__init__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/METADATA.toml new file mode 100644 index 00000000..3f4ba4a9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/METADATA.toml @@ -0,0 +1,6 @@ +version = "2.14.*" +requires = ["types-docutils", "types-setuptools"] + +[tool.stubtest] +stubtest_requirements = ["sphinx"] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/__init__.pyi new file mode 100644 index 00000000..59c06c2a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/__init__.pyi @@ -0,0 +1,19 @@ +from _typeshed import SupportsWrite +from typing import TypeVar, overload + +from pygments.formatter import Formatter + +_T = TypeVar("_T", str, bytes) + +__version__: str +__all__ = ["lex", "format", "highlight"] + +def lex(code, lexer): ... +@overload +def format(tokens, formatter: Formatter[_T], outfile: SupportsWrite[_T]) -> None: ... +@overload +def format(tokens, formatter: Formatter[_T], outfile: None = ...) -> _T: ... +@overload +def highlight(code, lexer, formatter: Formatter[_T], outfile: SupportsWrite[_T]) -> None: ... +@overload +def highlight(code, lexer, formatter: Formatter[_T], outfile: None = ...) -> _T: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/cmdline.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/cmdline.pyi new file mode 100644 index 00000000..76dbb3b3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/cmdline.pyi @@ -0,0 +1,11 @@ +import argparse +from _typeshed import Incomplete + +def main_inner(parser, argns): ... + +class HelpFormatter(argparse.HelpFormatter): + def __init__( + self, prog, indent_increment: int = ..., max_help_position: int = ..., width: Incomplete | None = ... + ) -> None: ... + +def main(args=...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/console.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/console.pyi new file mode 100644 index 00000000..0de5c60f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/console.pyi @@ -0,0 +1,10 @@ +from typing import Any + +esc: str +codes: Any +dark_colors: Any +light_colors: Any + +def reset_color(): ... +def colorize(color_key, text): ... +def ansiformat(attr, text): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/filter.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/filter.pyi new file mode 100644 index 00000000..52676b63 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/filter.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete +from collections.abc import Iterable, Iterator +from typing import Any + +from pygments.lexer import Lexer +from pygments.token import _TokenType + +def apply_filters(stream, filters, lexer: Incomplete | None = ...): ... +def simplefilter(f): ... + +class Filter: + options: Any + def __init__(self, **options) -> None: ... + def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... + +class FunctionFilter(Filter): + function: Any + def __init__(self, **options) -> None: ... + def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/filters/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/filters/__init__.pyi new file mode 100644 index 00000000..05325c8c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/filters/__init__.pyi @@ -0,0 +1,58 @@ +from collections.abc import Generator, Iterable, Iterator +from typing import Any + +from pygments.filter import Filter +from pygments.lexer import Lexer +from pygments.token import _TokenType + +def find_filter_class(filtername): ... +def get_filter_by_name(filtername, **options): ... +def get_all_filters() -> Generator[str, None, None]: ... + +class CodeTagFilter(Filter): + tag_re: Any + def __init__(self, **options) -> None: ... + def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... + +class SymbolFilter(Filter): + latex_symbols: Any + isabelle_symbols: Any + lang_map: Any + symbols: Any + def __init__(self, **options) -> None: ... + def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... + +class KeywordCaseFilter(Filter): + convert: Any + def __init__(self, **options) -> None: ... + def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... + +class NameHighlightFilter(Filter): + names: Any + tokentype: Any + def __init__(self, **options) -> None: ... + def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... + +class ErrorToken(Exception): ... + +class RaiseOnErrorTokenFilter(Filter): + exception: Any + def __init__(self, **options) -> None: ... + def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... + +class VisibleWhitespaceFilter(Filter): + wstt: Any + def __init__(self, **options) -> None: ... + def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... + +class GobbleFilter(Filter): + n: Any + def __init__(self, **options) -> None: ... + def gobble(self, value, left): ... + def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... + +class TokenMergeFilter(Filter): + def __init__(self, **options) -> None: ... + def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... + +FILTERS: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatter.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatter.pyi new file mode 100644 index 00000000..f441d487 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatter.pyi @@ -0,0 +1,22 @@ +from typing import Any, Generic, TypeVar, overload + +_T = TypeVar("_T", str, bytes) + +class Formatter(Generic[_T]): + name: Any + aliases: Any + filenames: Any + unicodeoutput: bool + style: Any + full: Any + title: Any + encoding: Any + options: Any + @overload + def __init__(self: Formatter[str], *, encoding: None = ..., outencoding: None = ..., **options) -> None: ... + @overload + def __init__(self: Formatter[bytes], *, encoding: str, outencoding: None = ..., **options) -> None: ... + @overload + def __init__(self: Formatter[bytes], *, encoding: None = ..., outencoding: str, **options) -> None: ... + def get_style_defs(self, arg: str = ...): ... + def format(self, tokensource, outfile): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/__init__.pyi new file mode 100644 index 00000000..217aa9e4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/__init__.pyi @@ -0,0 +1,25 @@ +from collections.abc import Generator +from typing import Any + +from ..formatter import Formatter +from .bbcode import BBCodeFormatter as BBCodeFormatter +from .html import HtmlFormatter as HtmlFormatter +from .img import ( + BmpImageFormatter as BmpImageFormatter, + GifImageFormatter as GifImageFormatter, + ImageFormatter as ImageFormatter, + JpgImageFormatter as JpgImageFormatter, +) +from .irc import IRCFormatter as IRCFormatter +from .latex import LatexFormatter as LatexFormatter +from .other import NullFormatter as NullFormatter, RawTokenFormatter as RawTokenFormatter, TestcaseFormatter as TestcaseFormatter +from .pangomarkup import PangoMarkupFormatter as PangoMarkupFormatter +from .rtf import RtfFormatter as RtfFormatter +from .svg import SvgFormatter as SvgFormatter +from .terminal import TerminalFormatter as TerminalFormatter +from .terminal256 import Terminal256Formatter as Terminal256Formatter, TerminalTrueColorFormatter as TerminalTrueColorFormatter + +def get_all_formatters() -> Generator[type[Formatter[Any]], None, None]: ... +def get_formatter_by_name(_alias, **options): ... +def load_formatter_from_file(filename, formattername: str = ..., **options): ... +def get_formatter_for_filename(fn, **options): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/_mapping.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/_mapping.pyi new file mode 100644 index 00000000..a9e5864b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/_mapping.pyi @@ -0,0 +1,3 @@ +from typing import Any + +FORMATTERS: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/bbcode.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/bbcode.pyi new file mode 100644 index 00000000..df1708be --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/bbcode.pyi @@ -0,0 +1,12 @@ +from typing import Any, TypeVar + +from pygments.formatter import Formatter + +_T = TypeVar("_T", str, bytes) + +class BBCodeFormatter(Formatter[_T]): + name: str + aliases: Any + filenames: Any + styles: Any + def format_unencoded(self, tokensource, outfile) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/html.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/html.pyi new file mode 100644 index 00000000..b34348ff --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/html.pyi @@ -0,0 +1,42 @@ +from _typeshed import Incomplete +from typing import Any, TypeVar + +from pygments.formatter import Formatter + +_T = TypeVar("_T", str, bytes) + +class HtmlFormatter(Formatter[_T]): + name: str + aliases: Any + filenames: Any + title: Any + nowrap: Any + noclasses: Any + classprefix: Any + cssclass: Any + cssstyles: Any + prestyles: Any + cssfile: Any + noclobber_cssfile: Any + tagsfile: Any + tagurlformat: Any + filename: Any + wrapcode: Any + span_element_openers: Any + linenos: int + linenostart: Any + linenostep: Any + linenospecial: Any + nobackground: Any + lineseparator: Any + lineanchors: Any + linespans: Any + anchorlinenos: Any + hl_lines: Any + def get_style_defs(self, arg: Incomplete | None = ...): ... + def get_token_style_defs(self, arg: Incomplete | None = ...): ... + def get_background_style_defs(self, arg: Incomplete | None = ...): ... + def get_linenos_style_defs(self): ... + def get_css_prefix(self, arg): ... + def wrap(self, source): ... + def format_unencoded(self, tokensource, outfile) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/img.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/img.pyi new file mode 100644 index 00000000..255dc80f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/img.pyi @@ -0,0 +1,66 @@ +from typing import Any, TypeVar + +from pygments.formatter import Formatter + +_T = TypeVar("_T", str, bytes) + +class PilNotAvailable(ImportError): ... +class FontNotFound(Exception): ... + +class FontManager: + font_name: Any + font_size: Any + fonts: Any + encoding: Any + def __init__(self, font_name, font_size: int = ...) -> None: ... + def get_char_size(self): ... + def get_text_size(self, text): ... + def get_font(self, bold, oblique): ... + +class ImageFormatter(Formatter[_T]): + name: str + aliases: Any + filenames: Any + unicodeoutput: bool + default_image_format: str + encoding: str + styles: Any + background_color: str + image_format: Any + image_pad: Any + line_pad: Any + fonts: Any + line_number_fg: Any + line_number_bg: Any + line_number_chars: Any + line_number_bold: Any + line_number_italic: Any + line_number_pad: Any + line_numbers: Any + line_number_separator: Any + line_number_step: Any + line_number_start: Any + line_number_width: Any + hl_lines: Any + hl_color: Any + drawables: Any + def get_style_defs(self, arg: str = ...) -> None: ... + def format(self, tokensource, outfile) -> None: ... + +class GifImageFormatter(ImageFormatter[_T]): + name: str + aliases: Any + filenames: Any + default_image_format: str + +class JpgImageFormatter(ImageFormatter[_T]): + name: str + aliases: Any + filenames: Any + default_image_format: str + +class BmpImageFormatter(ImageFormatter[_T]): + name: str + aliases: Any + filenames: Any + default_image_format: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/irc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/irc.pyi new file mode 100644 index 00000000..7af728d1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/irc.pyi @@ -0,0 +1,14 @@ +from typing import Any, TypeVar + +from pygments.formatter import Formatter + +_T = TypeVar("_T", str, bytes) + +class IRCFormatter(Formatter[_T]): + name: str + aliases: Any + filenames: Any + darkbg: Any + colorscheme: Any + linenos: Any + def format_unencoded(self, tokensource, outfile) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/latex.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/latex.pyi new file mode 100644 index 00000000..83147436 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/latex.pyi @@ -0,0 +1,34 @@ +from typing import Any, TypeVar + +from pygments.formatter import Formatter +from pygments.lexer import Lexer + +_T = TypeVar("_T", str, bytes) + +class LatexFormatter(Formatter[_T]): + name: str + aliases: Any + filenames: Any + docclass: Any + preamble: Any + linenos: Any + linenostart: Any + linenostep: Any + verboptions: Any + nobackground: Any + commandprefix: Any + texcomments: Any + mathescape: Any + escapeinside: Any + left: Any + right: Any + envname: Any + def get_style_defs(self, arg: str = ...): ... + def format_unencoded(self, tokensource, outfile) -> None: ... + +class LatexEmbeddedLexer(Lexer): + left: Any + right: Any + lang: Any + def __init__(self, left, right, lang, **options) -> None: ... + def get_tokens_unprocessed(self, text): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/other.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/other.pyi new file mode 100644 index 00000000..e02007b0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/other.pyi @@ -0,0 +1,26 @@ +from typing import Any, TypeVar + +from pygments.formatter import Formatter + +_T = TypeVar("_T", str, bytes) + +class NullFormatter(Formatter[_T]): + name: str + aliases: Any + filenames: Any + def format(self, tokensource, outfile) -> None: ... + +class RawTokenFormatter(Formatter[_T]): + name: str + aliases: Any + filenames: Any + unicodeoutput: bool + encoding: str + compress: Any + error_color: Any + def format(self, tokensource, outfile) -> None: ... + +class TestcaseFormatter(Formatter[_T]): + name: str + aliases: Any + def format(self, tokensource, outfile) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/pangomarkup.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/pangomarkup.pyi new file mode 100644 index 00000000..d266bbe3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/pangomarkup.pyi @@ -0,0 +1,12 @@ +from typing import Any, TypeVar + +from pygments.formatter import Formatter + +_T = TypeVar("_T", str, bytes) + +class PangoMarkupFormatter(Formatter[_T]): + name: str + aliases: Any + filenames: Any + styles: Any + def format_unencoded(self, tokensource, outfile) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/rtf.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/rtf.pyi new file mode 100644 index 00000000..900f43d6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/rtf.pyi @@ -0,0 +1,13 @@ +from typing import Any, TypeVar + +from pygments.formatter import Formatter + +_T = TypeVar("_T", str, bytes) + +class RtfFormatter(Formatter[_T]): + name: str + aliases: Any + filenames: Any + fontface: Any + fontsize: Any + def format_unencoded(self, tokensource, outfile) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/svg.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/svg.pyi new file mode 100644 index 00000000..f349157c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/svg.pyi @@ -0,0 +1,22 @@ +from typing import Any, TypeVar + +from pygments.formatter import Formatter + +_T = TypeVar("_T", str, bytes) + +class SvgFormatter(Formatter[_T]): + name: str + aliases: Any + filenames: Any + nowrap: Any + fontfamily: Any + fontsize: Any + xoffset: Any + yoffset: Any + ystep: Any + spacehack: Any + linenos: Any + linenostart: Any + linenostep: Any + linenowidth: Any + def format_unencoded(self, tokensource, outfile) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/terminal.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/terminal.pyi new file mode 100644 index 00000000..7448f17a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/terminal.pyi @@ -0,0 +1,15 @@ +from typing import Any, TypeVar + +from pygments.formatter import Formatter + +_T = TypeVar("_T", str, bytes) + +class TerminalFormatter(Formatter[_T]): + name: str + aliases: Any + filenames: Any + darkbg: Any + colorscheme: Any + linenos: Any + def format(self, tokensource, outfile): ... + def format_unencoded(self, tokensource, outfile) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/terminal256.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/terminal256.pyi new file mode 100644 index 00000000..82b06aec --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/formatters/terminal256.pyi @@ -0,0 +1,44 @@ +from _typeshed import Incomplete +from typing import Any, TypeVar + +from pygments.formatter import Formatter + +_T = TypeVar("_T", str, bytes) + +class EscapeSequence: + fg: Any + bg: Any + bold: Any + underline: Any + italic: Any + def __init__( + self, + fg: Incomplete | None = ..., + bg: Incomplete | None = ..., + bold: bool = ..., + underline: bool = ..., + italic: bool = ..., + ) -> None: ... + def escape(self, attrs): ... + def color_string(self): ... + def true_color_string(self): ... + def reset_string(self): ... + +class Terminal256Formatter(Formatter[_T]): + name: str + aliases: Any + filenames: Any + xterm_colors: Any + best_match: Any + style_string: Any + usebold: Any + useunderline: Any + useitalic: Any + linenos: Any + def format(self, tokensource, outfile): ... + def format_unencoded(self, tokensource, outfile) -> None: ... + +class TerminalTrueColorFormatter(Terminal256Formatter[_T]): + name: str + aliases: Any + filenames: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/lexer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/lexer.pyi new file mode 100644 index 00000000..deda4ff5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/lexer.pyi @@ -0,0 +1,100 @@ +from _typeshed import Incomplete +from collections.abc import Iterable, Iterator, Sequence +from typing import Any + +from pygments.token import _TokenType +from pygments.util import Future + +class LexerMeta(type): + def __new__(cls, name, bases, d): ... + def analyse_text(self, text) -> None: ... # actually defined in class Lexer + # ClassVars of Lexer, but same situation as with StyleMeta and Style + name: str + aliases: Sequence[str] # not intended mutable + filenames: Sequence[str] + alias_filenames: Sequence[str] + mimetypes: Sequence[str] + priority: int + +class Lexer(metaclass=LexerMeta): + options: Any + stripnl: Any + stripall: Any + ensurenl: Any + tabsize: Any + encoding: Any + filters: Any + def __init__(self, **options) -> None: ... + def add_filter(self, filter_, **options) -> None: ... + def get_tokens(self, text: str, unfiltered: bool = ...) -> Iterator[tuple[_TokenType, str]]: ... + def get_tokens_unprocessed(self, text: str) -> Iterator[tuple[int, _TokenType, str]]: ... + +class DelegatingLexer(Lexer): + root_lexer: Any + language_lexer: Any + needle: Any + def __init__(self, _root_lexer, _language_lexer, _needle=..., **options) -> None: ... + def get_tokens_unprocessed(self, text: str) -> Iterator[tuple[int, _TokenType, str]]: ... + +class include(str): ... +class _inherit: ... + +inherit: Any + +class combined(tuple[Any, ...]): + def __new__(cls, *args): ... + def __init__(self, *args) -> None: ... + +class _PseudoMatch: + def __init__(self, start, text) -> None: ... + def start(self, arg: Incomplete | None = ...): ... + def end(self, arg: Incomplete | None = ...): ... + def group(self, arg: Incomplete | None = ...): ... + def groups(self): ... + def groupdict(self): ... + +def bygroups(*args): ... + +class _This: ... + +this: Any + +def using(_other, **kwargs): ... + +class default: + state: Any + def __init__(self, state) -> None: ... + +class words(Future): + words: Any + prefix: Any + suffix: Any + def __init__(self, words, prefix: str = ..., suffix: str = ...) -> None: ... + def get(self): ... + +class RegexLexerMeta(LexerMeta): + def process_tokendef(cls, name, tokendefs: Incomplete | None = ...): ... + def get_tokendefs(cls): ... + def __call__(cls, *args, **kwds): ... + +class RegexLexer(Lexer, metaclass=RegexLexerMeta): + flags: Any + tokens: Any + def get_tokens_unprocessed(self, text: str, stack: Iterable[str] = ...) -> Iterator[tuple[int, _TokenType, str]]: ... + +class LexerContext: + text: Any + pos: Any + end: Any + stack: Any + def __init__(self, text, pos, stack: Incomplete | None = ..., end: Incomplete | None = ...) -> None: ... + +class ExtendedRegexLexer(RegexLexer): + def get_tokens_unprocessed( # type: ignore[override] + self, text: str | None = ..., context: LexerContext | None = ... + ) -> Iterator[tuple[int, _TokenType, str]]: ... + +class ProfilingRegexLexerMeta(RegexLexerMeta): ... + +class ProfilingRegexLexer(RegexLexer, metaclass=ProfilingRegexLexerMeta): + def get_tokens_unprocessed(self, text: str, stack: Iterable[str] = ...) -> Iterator[tuple[int, _TokenType, str]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/lexers/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/lexers/__init__.pyi new file mode 100644 index 00000000..4421d7a2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/lexers/__init__.pyi @@ -0,0 +1,19 @@ +from _typeshed import FileDescriptorOrPath, Incomplete, StrPath +from collections.abc import Iterator +from typing import Any + +from pygments.lexer import Lexer, LexerMeta + +def get_all_lexers(plugins: bool = ...) -> Iterator[tuple[str, tuple[str, ...], tuple[str, ...], tuple[str, ...]]]: ... +def find_lexer_class(name: str) -> LexerMeta | None: ... +def find_lexer_class_by_name(_alias: str) -> LexerMeta: ... +def get_lexer_by_name(_alias: str, **options: Any) -> Lexer: ... +def load_lexer_from_file(filename: FileDescriptorOrPath, lexername: str = ..., **options: Any) -> Lexer: ... +def find_lexer_class_for_filename(_fn: StrPath, code: str | bytes | None = ...) -> LexerMeta | None: ... +def get_lexer_for_filename(_fn: StrPath, code: str | bytes | None = ..., **options: Any) -> Lexer: ... +def get_lexer_for_mimetype(_mime: str, **options: Any) -> Lexer: ... +def guess_lexer_for_filename(_fn: StrPath, _text: str, **options: Any) -> LexerMeta: ... +def guess_lexer(_text: str | bytes, **options: Any) -> Lexer: ... + +# Having every lexer class here doesn't seem to be worth it +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/modeline.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/modeline.pyi new file mode 100644 index 00000000..3bfa9860 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/modeline.pyi @@ -0,0 +1 @@ +def get_filetype_from_buffer(buf, max_lines: int = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/plugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/plugin.pyi new file mode 100644 index 00000000..308e2195 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/plugin.pyi @@ -0,0 +1,19 @@ +from collections.abc import Generator, Iterable +from typing import Any + +from pkg_resources import EntryPoint +from pygments.filter import Filter +from pygments.formatter import Formatter +from pygments.lexer import Lexer +from pygments.style import Style + +LEXER_ENTRY_POINT: str +FORMATTER_ENTRY_POINT: str +STYLE_ENTRY_POINT: str +FILTER_ENTRY_POINT: str + +def iter_entry_points(group_name: str) -> Iterable[EntryPoint]: ... +def find_plugin_lexers() -> Generator[type[Lexer], None, None]: ... +def find_plugin_formatters() -> Generator[tuple[str, type[Formatter[Any]]], None, None]: ... +def find_plugin_styles() -> Generator[tuple[str, type[Style]], None, None]: ... +def find_plugin_filters() -> Generator[tuple[str, type[Filter]], None, None]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/regexopt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/regexopt.pyi new file mode 100644 index 00000000..799bebfc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/regexopt.pyi @@ -0,0 +1,8 @@ +from typing import Any + +CS_ESCAPE: Any +FIRST_ELEMENT: Any + +def make_charset(letters): ... +def regex_opt_inner(strings, open_paren): ... +def regex_opt(strings, prefix: str = ..., suffix: str = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/scanner.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/scanner.pyi new file mode 100644 index 00000000..d43e47da --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/scanner.pyi @@ -0,0 +1,19 @@ +from typing import Any + +class EndOfText(RuntimeError): ... + +class Scanner: + data: Any + data_length: Any + start_pos: int + pos: int + flags: Any + last: Any + match: Any + def __init__(self, text, flags: int = ...) -> None: ... + @property + def eos(self): ... + def check(self, pattern): ... + def test(self, pattern): ... + def scan(self, pattern): ... + def get_char(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/sphinxext.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/sphinxext.pyi new file mode 100644 index 00000000..ef79a1ea --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/sphinxext.pyi @@ -0,0 +1,22 @@ +from typing import Any + +from docutils.parsers.rst import Directive + +MODULEDOC: str +LEXERDOC: str +FMTERDOC: str +FILTERDOC: str + +class PygmentsDoc(Directive): + has_content: bool + required_arguments: int + optional_arguments: int + final_argument_whitespace: bool + option_spec: Any + filenames: Any + def run(self): ... + def document_lexers(self): ... + def document_formatters(self): ... + def document_filters(self): ... + +def setup(app) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/style.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/style.pyi new file mode 100644 index 00000000..97ff2ff8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/style.pyi @@ -0,0 +1,40 @@ +from collections.abc import Iterator, Mapping, Set as AbstractSet +from typing_extensions import TypedDict + +from pygments.token import _TokenType + +ansicolors: AbstractSet[str] # not intended to be mutable + +class _StyleDict(TypedDict): + color: str | None + bold: bool + italic: bool + underline: bool + bgcolor: str | None + border: str | None + roman: bool | None # lol yes, can be True or False or None + sans: bool | None + mono: bool | None + ansicolor: str | None + bgansicolor: str | None + +class StyleMeta(type): + def __new__(cls, name, bases, dct): ... + def style_for_token(cls, token: _TokenType) -> _StyleDict: ... + def styles_token(cls, ttype: _TokenType) -> bool: ... + def list_styles(cls) -> list[tuple[_TokenType, _StyleDict]]: ... + def __iter__(cls) -> Iterator[tuple[_TokenType, _StyleDict]]: ... + def __len__(cls) -> int: ... + # These are a bit tricky. + # Technically should be ClassVar in class Style. + # But then we can't use StyleMeta to denote a style class. + # We need that because Type[Style] is not iterable, for example. + background_color: str + highlight_color: str + line_number_color: str + line_number_background_color: str + line_number_special_color: str + line_number_special_background_color: str + styles: Mapping[_TokenType, str] # not intended to be mutable + +class Style(metaclass=StyleMeta): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/styles/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/styles/__init__.pyi new file mode 100644 index 00000000..18906e41 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/styles/__init__.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete +from collections.abc import Iterator, Mapping + +from pygments.style import StyleMeta +from pygments.util import ClassNotFound as ClassNotFound + +STYLE_MAP: Mapping[str, str] + +def get_style_by_name(name) -> StyleMeta: ... +def get_all_styles() -> Iterator[str]: ... + +# Having every style class here doesn't seem to be worth it +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/token.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/token.pyi new file mode 100644 index 00000000..cd63f2b6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/token.pyi @@ -0,0 +1,34 @@ +from collections.abc import Mapping +from typing import Any +from typing_extensions import Self + +class _TokenType(tuple[str, ...]): + parent: _TokenType | None + def split(self) -> list[_TokenType]: ... + subtypes: set[_TokenType] + def __contains__(self, val: _TokenType) -> bool: ... # type: ignore[override] + def __getattr__(self, name: str) -> _TokenType: ... + def __copy__(self) -> Self: ... + def __deepcopy__(self, memo: Any) -> Self: ... + +Token: _TokenType +Text: _TokenType +Whitespace: _TokenType +Escape: _TokenType +Error: _TokenType +Other: _TokenType +Keyword: _TokenType +Name: _TokenType +Literal: _TokenType +String: _TokenType +Number: _TokenType +Punctuation: _TokenType +Operator: _TokenType +Comment: _TokenType +Generic: _TokenType + +def is_token_subtype(ttype, other): ... +def string_to_tokentype(s): ... + +# dict, but shouldn't be mutated +STANDARD_TYPES: Mapping[_TokenType, str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/unistring.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/unistring.pyi new file mode 100644 index 00000000..6dd2b3fc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/unistring.pyi @@ -0,0 +1,38 @@ +from typing import Any + +Cc: str +Cf: str +Cn: str +Co: str +Cs: str +Ll: str +Lm: str +Lo: str +Lt: str +Lu: str +Mc: str +Me: str +Mn: str +Nd: str +Nl: str +No: str +Pc: str +Pd: str +Pe: str +Pf: str +Pi: str +Po: str +Ps: str +Sc: str +Sk: str +Sm: str +So: str +Zl: str +Zp: str +Zs: str +xid_continue: str +xid_start: str +cats: Any + +def combine(*args): ... +def allexcept(*args): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/util.pyi new file mode 100644 index 00000000..13ecaabe --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Pygments/pygments/util.pyi @@ -0,0 +1,35 @@ +from _typeshed import Incomplete +from io import TextIOWrapper +from typing import Any + +split_path_re: Any +doctype_lookup_re: Any +tag_re: Any +xml_decl_re: Any + +class ClassNotFound(ValueError): ... +class OptionError(Exception): ... + +def get_choice_opt(options, optname, allowed, default: Incomplete | None = ..., normcase: bool = ...): ... +def get_bool_opt(options, optname, default: Incomplete | None = ...): ... +def get_int_opt(options, optname, default: Incomplete | None = ...): ... +def get_list_opt(options, optname, default: Incomplete | None = ...): ... +def docstring_headline(obj): ... +def make_analysator(f): ... +def shebang_matches(text, regex): ... +def doctype_matches(text, regex): ... +def html_doctype_matches(text): ... +def looks_like_xml(text): ... +def surrogatepair(c): ... +def format_lines(var_name, seq, raw: bool = ..., indent_level: int = ...): ... +def duplicates_removed(it, already_seen=...): ... + +class Future: + def get(self) -> None: ... + +def guess_decode(text): ... +def guess_decode_from_terminal(text, term): ... +def terminal_encoding(term): ... + +class UnclosingTextIOWrapper(TextIOWrapper): + def close(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..21c09df7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/@tests/stubtest_allowlist.txt @@ -0,0 +1,1192 @@ +# wrong argument name in implementation ("self" instead of "cls") +sqlalchemy.engine.URL.__new__ +sqlalchemy.engine.url.URL.__new__ +sqlalchemy.util.langhelpers._symbol.__new__ + +# unnecessary re-exports +sqlalchemy.util._collections.* +sqlalchemy.util.compat.* + +# forwards arguments to another function +sqlalchemy.ext.declarative.as_declarative + +# stdlib re-exports with stubtest issues +sqlalchemy.orm.collections.InstrumentedList.* +sqlalchemy.orm.collections.InstrumentedSet.* +sqlalchemy.orm.collections.MappedCollection.* +sqlalchemy.util.StringIO.* + +# method arguments starting with double underscores in the implementation +sqlalchemy.testing.resolve_lambda +sqlalchemy.testing.util.resolve_lambda +sqlalchemy.util.WeakSequence.__init__ + +# not always present +sqlalchemy.engine.Engine.logging_name # initialized if not None +sqlalchemy.engine.base.Engine.logging_name # initialized if not None +sqlalchemy.sql.lambdas.PyWrapper.__clause_element__ +sqlalchemy.testing.util.non_refcount_gc_collect + +# replaced at runtime +sqlalchemy.orm.strategy_options.contains_eager +sqlalchemy.orm.strategy_options.load_only +sqlalchemy.orm.strategy_options.joinedload +sqlalchemy.orm.strategy_options.subqueryload +sqlalchemy.orm.strategy_options.selectinload +sqlalchemy.orm.strategy_options.lazyload +sqlalchemy.orm.strategy_options.immediateload +sqlalchemy.orm.strategy_options.noload +sqlalchemy.orm.strategy_options.raiseload +sqlalchemy.orm.strategy_options.defaultload +sqlalchemy.orm.strategy_options.defer +sqlalchemy.orm.strategy_options.undefer +sqlalchemy.orm.strategy_options.undefer_group +sqlalchemy.orm.strategy_options.with_expression +sqlalchemy.orm.strategy_options.selectin_polymorphic +sqlalchemy.testing.provision.configure_follower +sqlalchemy.testing.provision.create_db +sqlalchemy.testing.provision.drop_all_schema_objects_post_tables +sqlalchemy.testing.provision.drop_all_schema_objects_pre_tables +sqlalchemy.testing.provision.drop_db +sqlalchemy.testing.provision.follower_url_from_main +sqlalchemy.testing.provision.generate_driver_url +sqlalchemy.testing.provision.get_temp_table_name +sqlalchemy.testing.provision.post_configure_engine +sqlalchemy.testing.provision.prepare_for_drop_tables +sqlalchemy.testing.provision.run_reap_dbs +sqlalchemy.testing.provision.set_default_schema_on_connection +sqlalchemy.testing.provision.stop_test_class_outside_fixtures +sqlalchemy.testing.provision.temp_table_keyword_args +sqlalchemy.testing.provision.update_db_opts + +# KeyError/AttributeError on import due to dynamic initialization from a different module +sqlalchemy.testing.fixtures +sqlalchemy.testing.pickleable +sqlalchemy.testing.plugin.bootstrap + +# Initialized to bool during __init__() +sqlalchemy.orm.Mapper.single +sqlalchemy.orm.mapper.Mapper.single + +# Dynamically added methods where the first argument is not named "self" +sqlalchemy.orm.Load.contains_eager +sqlalchemy.orm.Load.defaultload +sqlalchemy.orm.Load.defer +sqlalchemy.orm.Load.immediateload +sqlalchemy.orm.Load.joinedload +sqlalchemy.orm.Load.lazyload +sqlalchemy.orm.Load.load_only +sqlalchemy.orm.Load.noload +sqlalchemy.orm.Load.raiseload +sqlalchemy.orm.Load.selectin_polymorphic +sqlalchemy.orm.Load.selectinload +sqlalchemy.orm.Load.subqueryload +sqlalchemy.orm.Load.undefer +sqlalchemy.orm.Load.undefer_group +sqlalchemy.orm.Load.with_expression +sqlalchemy.orm.strategy_options.Load.contains_eager +sqlalchemy.orm.strategy_options.Load.defaultload +sqlalchemy.orm.strategy_options.Load.defer +sqlalchemy.orm.strategy_options.Load.immediateload +sqlalchemy.orm.strategy_options.Load.joinedload +sqlalchemy.orm.strategy_options.Load.lazyload +sqlalchemy.orm.strategy_options.Load.load_only +sqlalchemy.orm.strategy_options.Load.noload +sqlalchemy.orm.strategy_options.Load.raiseload +sqlalchemy.orm.strategy_options.Load.selectin_polymorphic +sqlalchemy.orm.strategy_options.Load.selectinload +sqlalchemy.orm.strategy_options.Load.subqueryload +sqlalchemy.orm.strategy_options.Load.undefer +sqlalchemy.orm.strategy_options.Load.undefer_group +sqlalchemy.orm.strategy_options.Load.with_expression + +# abstract fields not present at runtime +sqlalchemy.engine.Transaction.connection +sqlalchemy.engine.Transaction.is_active +sqlalchemy.engine.base.Transaction.connection +sqlalchemy.engine.base.Transaction.is_active + +# initialized to None during class construction, but overridden during __init__() +sqlalchemy.engine.Connection.engine +sqlalchemy.engine.base.Connection.engine + +# uses @memoized_property at runtime, but we use @property for compatibility +sqlalchemy.engine.URL.normalized_query +sqlalchemy.engine.url.URL.normalized_query + +# runtime has extra internal arguments that are inconsistent across micro versions +sqlalchemy.testing.engines.testing_engine + +# __new__ signature conflicts with __init__ signature (which is more precise), +# so __new__ is deliberately omitted in the stub +sqlalchemy.sql.annotation.Annotated.__new__ + +# At runtime __new__ is defined, but we define __init__ in the stub +# because otherwise all subclasses would be identified by pyright +# as having conflicting __new__/__init__ methods +sqlalchemy.orm.unitofwork.PostSortRec.__new__ + +# unclear problems +sqlalchemy.sql.elements.quoted_name.lower +sqlalchemy.sql.elements.quoted_name.upper +sqlalchemy.sql.expression.quoted_name.lower +sqlalchemy.sql.expression.quoted_name.upper +sqlalchemy.sql.quoted_name.lower +sqlalchemy.sql.quoted_name.upper +sqlalchemy.util.callable + +sqlalchemy.dialects.mssql.base.MSExecutionContext.get_result_cursor_strategy +sqlalchemy.dialects.postgresql.base.PGDDLCompiler.visit_foreign_key_constraint +sqlalchemy.engine.ExecutionContext.get_result_cursor_strategy +sqlalchemy.engine.interfaces.ExecutionContext.get_result_cursor_strategy +sqlalchemy.orm.ColumnProperty.Comparator.__clause_element__ +sqlalchemy.orm.properties.ColumnProperty.Comparator.__clause_element__ + +# Metaclass differs: +sqlalchemy.ARRAY +sqlalchemy.BIGINT +sqlalchemy.BINARY +sqlalchemy.BLOB +sqlalchemy.BOOLEAN +sqlalchemy.BigInteger +sqlalchemy.Boolean +sqlalchemy.CHAR +sqlalchemy.CLOB +sqlalchemy.CheckConstraint +sqlalchemy.Column +sqlalchemy.ColumnDefault +sqlalchemy.Computed +sqlalchemy.Constraint +sqlalchemy.DATE +sqlalchemy.DATETIME +sqlalchemy.DDL +sqlalchemy.DECIMAL +sqlalchemy.Date +sqlalchemy.DateTime +sqlalchemy.Enum +sqlalchemy.FLOAT +sqlalchemy.Float +sqlalchemy.ForeignKey +sqlalchemy.ForeignKeyConstraint +sqlalchemy.INTEGER +sqlalchemy.Identity +sqlalchemy.Index +sqlalchemy.Integer +sqlalchemy.Interval +sqlalchemy.JSON +sqlalchemy.JSON.JSONElementType +sqlalchemy.JSON.JSONIndexType +sqlalchemy.JSON.JSONIntIndexType +sqlalchemy.JSON.JSONPathType +sqlalchemy.JSON.JSONStrIndexType +sqlalchemy.LargeBinary +sqlalchemy.MetaData +sqlalchemy.NCHAR +sqlalchemy.NUMERIC +sqlalchemy.NVARCHAR +sqlalchemy.Numeric +sqlalchemy.PickleType +sqlalchemy.PrimaryKeyConstraint +sqlalchemy.REAL +sqlalchemy.SMALLINT +sqlalchemy.Sequence +sqlalchemy.SmallInteger +sqlalchemy.String +sqlalchemy.TEXT +sqlalchemy.TIME +sqlalchemy.TIMESTAMP +sqlalchemy.Table +sqlalchemy.Text +sqlalchemy.ThreadLocalMetaData +sqlalchemy.Time +sqlalchemy.TupleType +sqlalchemy.TypeDecorator +sqlalchemy.Unicode +sqlalchemy.UnicodeText +sqlalchemy.UniqueConstraint +sqlalchemy.VARBINARY +sqlalchemy.VARCHAR +sqlalchemy.dialects.firebird.BIGINT +sqlalchemy.dialects.firebird.BLOB +sqlalchemy.dialects.firebird.CHAR +sqlalchemy.dialects.firebird.DATE +sqlalchemy.dialects.firebird.FLOAT +sqlalchemy.dialects.firebird.NUMERIC +sqlalchemy.dialects.firebird.SMALLINT +sqlalchemy.dialects.firebird.TEXT +sqlalchemy.dialects.firebird.TIME +sqlalchemy.dialects.firebird.TIMESTAMP +sqlalchemy.dialects.firebird.VARCHAR +sqlalchemy.dialects.firebird.base.BIGINT +sqlalchemy.dialects.firebird.base.BLOB +sqlalchemy.dialects.firebird.base.CHAR +sqlalchemy.dialects.firebird.base.DATE +sqlalchemy.dialects.firebird.base.FBTypeCompiler +sqlalchemy.dialects.firebird.base.FLOAT +sqlalchemy.dialects.firebird.base.INTEGER +sqlalchemy.dialects.firebird.base.Integer +sqlalchemy.dialects.firebird.base.NUMERIC +sqlalchemy.dialects.firebird.base.SMALLINT +sqlalchemy.dialects.firebird.base.TEXT +sqlalchemy.dialects.firebird.base.TIME +sqlalchemy.dialects.firebird.base.TIMESTAMP +sqlalchemy.dialects.firebird.base.VARCHAR +sqlalchemy.dialects.firebird.base._FBDateTime +sqlalchemy.dialects.firebird.base._StringType +sqlalchemy.dialects.firebird.kinterbasdb._FBFloat_kinterbasdb +sqlalchemy.dialects.firebird.kinterbasdb._FBNumeric_kinterbasdb +sqlalchemy.dialects.mssql.BIGINT +sqlalchemy.dialects.mssql.BINARY +sqlalchemy.dialects.mssql.BIT +sqlalchemy.dialects.mssql.CHAR +sqlalchemy.dialects.mssql.DATE +sqlalchemy.dialects.mssql.DATETIME +sqlalchemy.dialects.mssql.DATETIME2 +sqlalchemy.dialects.mssql.DATETIMEOFFSET +sqlalchemy.dialects.mssql.DECIMAL +sqlalchemy.dialects.mssql.FLOAT +sqlalchemy.dialects.mssql.IMAGE +sqlalchemy.dialects.mssql.INTEGER +sqlalchemy.dialects.mssql.JSON +sqlalchemy.dialects.mssql.MONEY +sqlalchemy.dialects.mssql.NCHAR +sqlalchemy.dialects.mssql.NTEXT +sqlalchemy.dialects.mssql.NUMERIC +sqlalchemy.dialects.mssql.NVARCHAR +sqlalchemy.dialects.mssql.REAL +sqlalchemy.dialects.mssql.ROWVERSION +sqlalchemy.dialects.mssql.SMALLDATETIME +sqlalchemy.dialects.mssql.SMALLINT +sqlalchemy.dialects.mssql.SMALLMONEY +sqlalchemy.dialects.mssql.SQL_VARIANT +sqlalchemy.dialects.mssql.TEXT +sqlalchemy.dialects.mssql.TIME +sqlalchemy.dialects.mssql.TIMESTAMP +sqlalchemy.dialects.mssql.TINYINT +sqlalchemy.dialects.mssql.UNIQUEIDENTIFIER +sqlalchemy.dialects.mssql.VARBINARY +sqlalchemy.dialects.mssql.VARCHAR +sqlalchemy.dialects.mssql.XML +sqlalchemy.dialects.mssql.base.BIGINT +sqlalchemy.dialects.mssql.base.BINARY +sqlalchemy.dialects.mssql.base.BIT +sqlalchemy.dialects.mssql.base.CHAR +sqlalchemy.dialects.mssql.base.DATE +sqlalchemy.dialects.mssql.base.DATETIME +sqlalchemy.dialects.mssql.base.DATETIME2 +sqlalchemy.dialects.mssql.base.DATETIMEOFFSET +sqlalchemy.dialects.mssql.base.DECIMAL +sqlalchemy.dialects.mssql.base.FLOAT +sqlalchemy.dialects.mssql.base.IMAGE +sqlalchemy.dialects.mssql.base.INTEGER +sqlalchemy.dialects.mssql.base.JSON +sqlalchemy.dialects.mssql.base.MONEY +sqlalchemy.dialects.mssql.base.MSTypeCompiler +sqlalchemy.dialects.mssql.base.NCHAR +sqlalchemy.dialects.mssql.base.NTEXT +sqlalchemy.dialects.mssql.base.NUMERIC +sqlalchemy.dialects.mssql.base.NVARCHAR +sqlalchemy.dialects.mssql.base.REAL +sqlalchemy.dialects.mssql.base.ROWVERSION +sqlalchemy.dialects.mssql.base.SMALLDATETIME +sqlalchemy.dialects.mssql.base.SMALLINT +sqlalchemy.dialects.mssql.base.SMALLMONEY +sqlalchemy.dialects.mssql.base.SQL_VARIANT +sqlalchemy.dialects.mssql.base.TEXT +sqlalchemy.dialects.mssql.base.TIME +sqlalchemy.dialects.mssql.base.TIMESTAMP +sqlalchemy.dialects.mssql.base.TINYINT +sqlalchemy.dialects.mssql.base.TryCast +sqlalchemy.dialects.mssql.base.UNIQUEIDENTIFIER +sqlalchemy.dialects.mssql.base.VARBINARY +sqlalchemy.dialects.mssql.base.VARCHAR +sqlalchemy.dialects.mssql.base.XML +sqlalchemy.dialects.mssql.base._BASETIMEIMPL +sqlalchemy.dialects.mssql.base._MSDate +sqlalchemy.dialects.mssql.base._MSDateTime +sqlalchemy.dialects.mssql.base._MSUnicode +sqlalchemy.dialects.mssql.base._MSUnicodeText +sqlalchemy.dialects.mssql.information_schema.CoerceUnicode +sqlalchemy.dialects.mssql.information_schema.IdentitySqlVariant +sqlalchemy.dialects.mssql.information_schema._cast_on_2005 +sqlalchemy.dialects.mssql.json.JSON +sqlalchemy.dialects.mssql.json.JSONIndexType +sqlalchemy.dialects.mssql.json.JSONPathType +sqlalchemy.dialects.mssql.mxodbc._MSDate_mxodbc +sqlalchemy.dialects.mssql.mxodbc._MSNumeric_mxodbc +sqlalchemy.dialects.mssql.mxodbc._MSTime_mxodbc +sqlalchemy.dialects.mssql.mxodbc._VARBINARY_mxodbc +sqlalchemy.dialects.mssql.pymssql._MSNumeric_pymssql +sqlalchemy.dialects.mssql.pyodbc._BINARY_pyodbc +sqlalchemy.dialects.mssql.pyodbc._MSFloat_pyodbc +sqlalchemy.dialects.mssql.pyodbc._MSNumeric_pyodbc +sqlalchemy.dialects.mssql.pyodbc._ODBCDATETIMEOFFSET +sqlalchemy.dialects.mssql.pyodbc._ODBCDateTime +sqlalchemy.dialects.mssql.pyodbc._VARBINARY_pyodbc +sqlalchemy.dialects.mysql.BIGINT +sqlalchemy.dialects.mysql.BINARY +sqlalchemy.dialects.mysql.BIT +sqlalchemy.dialects.mysql.BLOB +sqlalchemy.dialects.mysql.BOOLEAN +sqlalchemy.dialects.mysql.CHAR +sqlalchemy.dialects.mysql.DATE +sqlalchemy.dialects.mysql.DATETIME +sqlalchemy.dialects.mysql.DECIMAL +sqlalchemy.dialects.mysql.DOUBLE +sqlalchemy.dialects.mysql.ENUM +sqlalchemy.dialects.mysql.FLOAT +sqlalchemy.dialects.mysql.INTEGER +sqlalchemy.dialects.mysql.Insert +sqlalchemy.dialects.mysql.JSON +sqlalchemy.dialects.mysql.LONGBLOB +sqlalchemy.dialects.mysql.LONGTEXT +sqlalchemy.dialects.mysql.MEDIUMBLOB +sqlalchemy.dialects.mysql.MEDIUMINT +sqlalchemy.dialects.mysql.MEDIUMTEXT +sqlalchemy.dialects.mysql.NCHAR +sqlalchemy.dialects.mysql.NUMERIC +sqlalchemy.dialects.mysql.NVARCHAR +sqlalchemy.dialects.mysql.REAL +sqlalchemy.dialects.mysql.SET +sqlalchemy.dialects.mysql.SMALLINT +sqlalchemy.dialects.mysql.TEXT +sqlalchemy.dialects.mysql.TIME +sqlalchemy.dialects.mysql.TIMESTAMP +sqlalchemy.dialects.mysql.TINYBLOB +sqlalchemy.dialects.mysql.TINYINT +sqlalchemy.dialects.mysql.TINYTEXT +sqlalchemy.dialects.mysql.VARBINARY +sqlalchemy.dialects.mysql.VARCHAR +sqlalchemy.dialects.mysql.YEAR +sqlalchemy.dialects.mysql.base.BIGINT +sqlalchemy.dialects.mysql.base.BINARY +sqlalchemy.dialects.mysql.base.BIT +sqlalchemy.dialects.mysql.base.BLOB +sqlalchemy.dialects.mysql.base.BOOLEAN +sqlalchemy.dialects.mysql.base.CHAR +sqlalchemy.dialects.mysql.base.DATE +sqlalchemy.dialects.mysql.base.DATETIME +sqlalchemy.dialects.mysql.base.DECIMAL +sqlalchemy.dialects.mysql.base.DOUBLE +sqlalchemy.dialects.mysql.base.ENUM +sqlalchemy.dialects.mysql.base.FLOAT +sqlalchemy.dialects.mysql.base.INTEGER +sqlalchemy.dialects.mysql.base.JSON +sqlalchemy.dialects.mysql.base.LONGBLOB +sqlalchemy.dialects.mysql.base.LONGTEXT +sqlalchemy.dialects.mysql.base.MEDIUMBLOB +sqlalchemy.dialects.mysql.base.MEDIUMINT +sqlalchemy.dialects.mysql.base.MEDIUMTEXT +sqlalchemy.dialects.mysql.base.MySQLTypeCompiler +sqlalchemy.dialects.mysql.base.NCHAR +sqlalchemy.dialects.mysql.base.NUMERIC +sqlalchemy.dialects.mysql.base.NVARCHAR +sqlalchemy.dialects.mysql.base.REAL +sqlalchemy.dialects.mysql.base.SET +sqlalchemy.dialects.mysql.base.SMALLINT +sqlalchemy.dialects.mysql.base.TEXT +sqlalchemy.dialects.mysql.base.TIME +sqlalchemy.dialects.mysql.base.TIMESTAMP +sqlalchemy.dialects.mysql.base.TINYBLOB +sqlalchemy.dialects.mysql.base.TINYINT +sqlalchemy.dialects.mysql.base.TINYTEXT +sqlalchemy.dialects.mysql.base.VARBINARY +sqlalchemy.dialects.mysql.base.VARCHAR +sqlalchemy.dialects.mysql.base.YEAR +sqlalchemy.dialects.mysql.cymysql._cymysqlBIT +sqlalchemy.dialects.mysql.dml.Insert +sqlalchemy.dialects.mysql.dml.OnDuplicateClause +sqlalchemy.dialects.mysql.enumerated.ENUM +sqlalchemy.dialects.mysql.enumerated.SET +sqlalchemy.dialects.mysql.expression.match +sqlalchemy.dialects.mysql.json.JSON +sqlalchemy.dialects.mysql.json.JSONIndexType +sqlalchemy.dialects.mysql.json.JSONPathType +sqlalchemy.dialects.mysql.match +sqlalchemy.dialects.mysql.mysqlconnector._myconnpyBIT +sqlalchemy.dialects.mysql.oursql._oursqlBIT +sqlalchemy.dialects.mysql.pyodbc._pyodbcTIME +sqlalchemy.dialects.mysql.types.BIGINT +sqlalchemy.dialects.mysql.types.BIT +sqlalchemy.dialects.mysql.types.CHAR +sqlalchemy.dialects.mysql.types.DATETIME +sqlalchemy.dialects.mysql.types.DECIMAL +sqlalchemy.dialects.mysql.types.DOUBLE +sqlalchemy.dialects.mysql.types.FLOAT +sqlalchemy.dialects.mysql.types.INTEGER +sqlalchemy.dialects.mysql.types.LONGBLOB +sqlalchemy.dialects.mysql.types.LONGTEXT +sqlalchemy.dialects.mysql.types.MEDIUMBLOB +sqlalchemy.dialects.mysql.types.MEDIUMINT +sqlalchemy.dialects.mysql.types.MEDIUMTEXT +sqlalchemy.dialects.mysql.types.NCHAR +sqlalchemy.dialects.mysql.types.NUMERIC +sqlalchemy.dialects.mysql.types.NVARCHAR +sqlalchemy.dialects.mysql.types.REAL +sqlalchemy.dialects.mysql.types.SMALLINT +sqlalchemy.dialects.mysql.types.TEXT +sqlalchemy.dialects.mysql.types.TIME +sqlalchemy.dialects.mysql.types.TIMESTAMP +sqlalchemy.dialects.mysql.types.TINYBLOB +sqlalchemy.dialects.mysql.types.TINYINT +sqlalchemy.dialects.mysql.types.TINYTEXT +sqlalchemy.dialects.mysql.types.VARCHAR +sqlalchemy.dialects.mysql.types.YEAR +sqlalchemy.dialects.mysql.types._FloatType +sqlalchemy.dialects.mysql.types._IntegerType +sqlalchemy.dialects.mysql.types._MatchType +sqlalchemy.dialects.mysql.types._StringType +sqlalchemy.dialects.oracle.BFILE +sqlalchemy.dialects.oracle.BINARY_DOUBLE +sqlalchemy.dialects.oracle.BINARY_FLOAT +sqlalchemy.dialects.oracle.BLOB +sqlalchemy.dialects.oracle.CHAR +sqlalchemy.dialects.oracle.CLOB +sqlalchemy.dialects.oracle.DATE +sqlalchemy.dialects.oracle.DOUBLE_PRECISION +sqlalchemy.dialects.oracle.FLOAT +sqlalchemy.dialects.oracle.INTERVAL +sqlalchemy.dialects.oracle.LONG +sqlalchemy.dialects.oracle.NCHAR +sqlalchemy.dialects.oracle.NCLOB +sqlalchemy.dialects.oracle.NUMBER +sqlalchemy.dialects.oracle.NVARCHAR +sqlalchemy.dialects.oracle.RAW +sqlalchemy.dialects.oracle.ROWID +sqlalchemy.dialects.oracle.TIMESTAMP +sqlalchemy.dialects.oracle.VARCHAR +sqlalchemy.dialects.oracle.VARCHAR2 +sqlalchemy.dialects.oracle.base.BFILE +sqlalchemy.dialects.oracle.base.BINARY_DOUBLE +sqlalchemy.dialects.oracle.base.BINARY_FLOAT +sqlalchemy.dialects.oracle.base.BLOB +sqlalchemy.dialects.oracle.base.CHAR +sqlalchemy.dialects.oracle.base.CLOB +sqlalchemy.dialects.oracle.base.DATE +sqlalchemy.dialects.oracle.base.DOUBLE_PRECISION +sqlalchemy.dialects.oracle.base.FLOAT +sqlalchemy.dialects.oracle.base.INTEGER +sqlalchemy.dialects.oracle.base.INTERVAL +sqlalchemy.dialects.oracle.base.LONG +sqlalchemy.dialects.oracle.base.NCHAR +sqlalchemy.dialects.oracle.base.NCLOB +sqlalchemy.dialects.oracle.base.NUMBER +sqlalchemy.dialects.oracle.base.NVARCHAR +sqlalchemy.dialects.oracle.base.OracleTypeCompiler +sqlalchemy.dialects.oracle.base.RAW +sqlalchemy.dialects.oracle.base.ROWID +sqlalchemy.dialects.oracle.base.TIMESTAMP +sqlalchemy.dialects.oracle.base.VARCHAR +sqlalchemy.dialects.oracle.base.VARCHAR2 +sqlalchemy.dialects.oracle.base._OracleBoolean +sqlalchemy.dialects.oracle.base._OuterJoinColumn +sqlalchemy.dialects.oracle.cx_oracle._OracleBINARY_DOUBLE +sqlalchemy.dialects.oracle.cx_oracle._OracleBINARY_FLOAT +sqlalchemy.dialects.oracle.cx_oracle._OracleBinary +sqlalchemy.dialects.oracle.cx_oracle._OracleBinaryFloat +sqlalchemy.dialects.oracle.cx_oracle._OracleChar +sqlalchemy.dialects.oracle.cx_oracle._OracleDate +sqlalchemy.dialects.oracle.cx_oracle._OracleEnum +sqlalchemy.dialects.oracle.cx_oracle._OracleInteger +sqlalchemy.dialects.oracle.cx_oracle._OracleInterval +sqlalchemy.dialects.oracle.cx_oracle._OracleLong +sqlalchemy.dialects.oracle.cx_oracle._OracleNChar +sqlalchemy.dialects.oracle.cx_oracle._OracleNUMBER +sqlalchemy.dialects.oracle.cx_oracle._OracleNumeric +sqlalchemy.dialects.oracle.cx_oracle._OracleRaw +sqlalchemy.dialects.oracle.cx_oracle._OracleRowid +sqlalchemy.dialects.oracle.cx_oracle._OracleString +sqlalchemy.dialects.oracle.cx_oracle._OracleText +sqlalchemy.dialects.oracle.cx_oracle._OracleUnicodeStringCHAR +sqlalchemy.dialects.oracle.cx_oracle._OracleUnicodeStringNCHAR +sqlalchemy.dialects.oracle.cx_oracle._OracleUnicodeTextCLOB +sqlalchemy.dialects.oracle.cx_oracle._OracleUnicodeTextNCLOB +sqlalchemy.dialects.postgresql.ARRAY +sqlalchemy.dialects.postgresql.BIGINT +sqlalchemy.dialects.postgresql.BIT +sqlalchemy.dialects.postgresql.BOOLEAN +sqlalchemy.dialects.postgresql.BYTEA +sqlalchemy.dialects.postgresql.CHAR +sqlalchemy.dialects.postgresql.CIDR +sqlalchemy.dialects.postgresql.CreateEnumType +sqlalchemy.dialects.postgresql.DATE +sqlalchemy.dialects.postgresql.DATERANGE +sqlalchemy.dialects.postgresql.DOUBLE_PRECISION +sqlalchemy.dialects.postgresql.DropEnumType +sqlalchemy.dialects.postgresql.ENUM +sqlalchemy.dialects.postgresql.ExcludeConstraint +sqlalchemy.dialects.postgresql.FLOAT +sqlalchemy.dialects.postgresql.HSTORE +sqlalchemy.dialects.postgresql.INET +sqlalchemy.dialects.postgresql.INT4RANGE +sqlalchemy.dialects.postgresql.INT8RANGE +sqlalchemy.dialects.postgresql.INTEGER +sqlalchemy.dialects.postgresql.INTERVAL +sqlalchemy.dialects.postgresql.Insert +sqlalchemy.dialects.postgresql.JSON +sqlalchemy.dialects.postgresql.JSONB +sqlalchemy.dialects.postgresql.MACADDR +sqlalchemy.dialects.postgresql.MACADDR8 +sqlalchemy.dialects.postgresql.MONEY +sqlalchemy.dialects.postgresql.NUMERIC +sqlalchemy.dialects.postgresql.NUMRANGE +sqlalchemy.dialects.postgresql.OID +sqlalchemy.dialects.postgresql.REAL +sqlalchemy.dialects.postgresql.REGCLASS +sqlalchemy.dialects.postgresql.SMALLINT +sqlalchemy.dialects.postgresql.TEXT +sqlalchemy.dialects.postgresql.TIME +sqlalchemy.dialects.postgresql.TIMESTAMP +sqlalchemy.dialects.postgresql.TSRANGE +sqlalchemy.dialects.postgresql.TSTZRANGE +sqlalchemy.dialects.postgresql.TSVECTOR +sqlalchemy.dialects.postgresql.UUID +sqlalchemy.dialects.postgresql.VARCHAR +sqlalchemy.dialects.postgresql.aggregate_order_by +sqlalchemy.dialects.postgresql.array +sqlalchemy.dialects.postgresql.array.ARRAY +sqlalchemy.dialects.postgresql.array.array +sqlalchemy.dialects.postgresql.asyncpg.AsyncPgEnum +sqlalchemy.dialects.postgresql.asyncpg.AsyncPgInterval +sqlalchemy.dialects.postgresql.asyncpg.AsyncpgBigInteger +sqlalchemy.dialects.postgresql.asyncpg.AsyncpgBoolean +sqlalchemy.dialects.postgresql.asyncpg.AsyncpgDate +sqlalchemy.dialects.postgresql.asyncpg.AsyncpgDateTime +sqlalchemy.dialects.postgresql.asyncpg.AsyncpgFloat +sqlalchemy.dialects.postgresql.asyncpg.AsyncpgInteger +sqlalchemy.dialects.postgresql.asyncpg.AsyncpgJSON +sqlalchemy.dialects.postgresql.asyncpg.AsyncpgJSONB +sqlalchemy.dialects.postgresql.asyncpg.AsyncpgJSONIndexType +sqlalchemy.dialects.postgresql.asyncpg.AsyncpgJSONIntIndexType +sqlalchemy.dialects.postgresql.asyncpg.AsyncpgJSONPathType +sqlalchemy.dialects.postgresql.asyncpg.AsyncpgJSONStrIndexType +sqlalchemy.dialects.postgresql.asyncpg.AsyncpgNumeric +sqlalchemy.dialects.postgresql.asyncpg.AsyncpgOID +sqlalchemy.dialects.postgresql.asyncpg.AsyncpgREGCLASS +sqlalchemy.dialects.postgresql.asyncpg.AsyncpgTime +sqlalchemy.dialects.postgresql.asyncpg.AsyncpgUUID +sqlalchemy.dialects.postgresql.base.BIGINT +sqlalchemy.dialects.postgresql.base.BIT +sqlalchemy.dialects.postgresql.base.BOOLEAN +sqlalchemy.dialects.postgresql.base.BYTEA +sqlalchemy.dialects.postgresql.base.CHAR +sqlalchemy.dialects.postgresql.base.CIDR +sqlalchemy.dialects.postgresql.base.CreateEnumType +sqlalchemy.dialects.postgresql.base.DATE +sqlalchemy.dialects.postgresql.base.DOUBLE_PRECISION +sqlalchemy.dialects.postgresql.base.DropEnumType +sqlalchemy.dialects.postgresql.base.ENUM +sqlalchemy.dialects.postgresql.base.FLOAT +sqlalchemy.dialects.postgresql.base.INET +sqlalchemy.dialects.postgresql.base.INTEGER +sqlalchemy.dialects.postgresql.base.INTERVAL +sqlalchemy.dialects.postgresql.base.MACADDR +sqlalchemy.dialects.postgresql.base.MACADDR8 +sqlalchemy.dialects.postgresql.base.MONEY +sqlalchemy.dialects.postgresql.base.NUMERIC +sqlalchemy.dialects.postgresql.base.OID +sqlalchemy.dialects.postgresql.base.PGTypeCompiler +sqlalchemy.dialects.postgresql.base.REAL +sqlalchemy.dialects.postgresql.base.REGCLASS +sqlalchemy.dialects.postgresql.base.SMALLINT +sqlalchemy.dialects.postgresql.base.TEXT +sqlalchemy.dialects.postgresql.base.TIME +sqlalchemy.dialects.postgresql.base.TIMESTAMP +sqlalchemy.dialects.postgresql.base.TSVECTOR +sqlalchemy.dialects.postgresql.base.UUID +sqlalchemy.dialects.postgresql.base.VARCHAR +sqlalchemy.dialects.postgresql.base._ColonCast +sqlalchemy.dialects.postgresql.dml.Insert +sqlalchemy.dialects.postgresql.dml.OnConflictClause +sqlalchemy.dialects.postgresql.dml.OnConflictDoNothing +sqlalchemy.dialects.postgresql.dml.OnConflictDoUpdate +sqlalchemy.dialects.postgresql.ext.ExcludeConstraint +sqlalchemy.dialects.postgresql.ext.aggregate_order_by +sqlalchemy.dialects.postgresql.hstore +sqlalchemy.dialects.postgresql.hstore.HSTORE +sqlalchemy.dialects.postgresql.hstore._HStoreArrayFunction +sqlalchemy.dialects.postgresql.hstore._HStoreDefinedFunction +sqlalchemy.dialects.postgresql.hstore._HStoreDeleteFunction +sqlalchemy.dialects.postgresql.hstore._HStoreKeysFunction +sqlalchemy.dialects.postgresql.hstore._HStoreMatrixFunction +sqlalchemy.dialects.postgresql.hstore._HStoreSliceFunction +sqlalchemy.dialects.postgresql.hstore._HStoreValsFunction +sqlalchemy.dialects.postgresql.hstore.hstore +sqlalchemy.dialects.postgresql.json.JSON +sqlalchemy.dialects.postgresql.json.JSONB +sqlalchemy.dialects.postgresql.json.JSONPathType +sqlalchemy.dialects.postgresql.pg8000._PGARRAY +sqlalchemy.dialects.postgresql.pg8000._PGBigInteger +sqlalchemy.dialects.postgresql.pg8000._PGBoolean +sqlalchemy.dialects.postgresql.pg8000._PGEnum +sqlalchemy.dialects.postgresql.pg8000._PGInteger +sqlalchemy.dialects.postgresql.pg8000._PGInterval +sqlalchemy.dialects.postgresql.pg8000._PGJSON +sqlalchemy.dialects.postgresql.pg8000._PGJSONB +sqlalchemy.dialects.postgresql.pg8000._PGJSONIndexType +sqlalchemy.dialects.postgresql.pg8000._PGJSONIntIndexType +sqlalchemy.dialects.postgresql.pg8000._PGJSONPathType +sqlalchemy.dialects.postgresql.pg8000._PGJSONStrIndexType +sqlalchemy.dialects.postgresql.pg8000._PGNullType +sqlalchemy.dialects.postgresql.pg8000._PGNumeric +sqlalchemy.dialects.postgresql.pg8000._PGNumericNoBind +sqlalchemy.dialects.postgresql.pg8000._PGSmallInteger +sqlalchemy.dialects.postgresql.pg8000._PGTime +sqlalchemy.dialects.postgresql.pg8000._PGTimeStamp +sqlalchemy.dialects.postgresql.pg8000._PGUUID +sqlalchemy.dialects.postgresql.psycopg2._PGARRAY +sqlalchemy.dialects.postgresql.psycopg2._PGEnum +sqlalchemy.dialects.postgresql.psycopg2._PGHStore +sqlalchemy.dialects.postgresql.psycopg2._PGJSON +sqlalchemy.dialects.postgresql.psycopg2._PGJSONB +sqlalchemy.dialects.postgresql.psycopg2._PGNumeric +sqlalchemy.dialects.postgresql.psycopg2._PGUUID +sqlalchemy.dialects.postgresql.pygresql._PGHStore +sqlalchemy.dialects.postgresql.pygresql._PGJSON +sqlalchemy.dialects.postgresql.pygresql._PGJSONB +sqlalchemy.dialects.postgresql.pygresql._PGNumeric +sqlalchemy.dialects.postgresql.pygresql._PGUUID +sqlalchemy.dialects.postgresql.pypostgresql.PGNumeric +sqlalchemy.dialects.postgresql.ranges.DATERANGE +sqlalchemy.dialects.postgresql.ranges.INT4RANGE +sqlalchemy.dialects.postgresql.ranges.INT8RANGE +sqlalchemy.dialects.postgresql.ranges.NUMRANGE +sqlalchemy.dialects.postgresql.ranges.TSRANGE +sqlalchemy.dialects.postgresql.ranges.TSTZRANGE +sqlalchemy.dialects.sqlite.BLOB +sqlalchemy.dialects.sqlite.BOOLEAN +sqlalchemy.dialects.sqlite.CHAR +sqlalchemy.dialects.sqlite.DATE +sqlalchemy.dialects.sqlite.DATETIME +sqlalchemy.dialects.sqlite.DECIMAL +sqlalchemy.dialects.sqlite.FLOAT +sqlalchemy.dialects.sqlite.INTEGER +sqlalchemy.dialects.sqlite.Insert +sqlalchemy.dialects.sqlite.JSON +sqlalchemy.dialects.sqlite.NUMERIC +sqlalchemy.dialects.sqlite.REAL +sqlalchemy.dialects.sqlite.SMALLINT +sqlalchemy.dialects.sqlite.TEXT +sqlalchemy.dialects.sqlite.TIME +sqlalchemy.dialects.sqlite.TIMESTAMP +sqlalchemy.dialects.sqlite.VARCHAR +sqlalchemy.dialects.sqlite.base.BLOB +sqlalchemy.dialects.sqlite.base.BOOLEAN +sqlalchemy.dialects.sqlite.base.CHAR +sqlalchemy.dialects.sqlite.base.DATE +sqlalchemy.dialects.sqlite.base.DATETIME +sqlalchemy.dialects.sqlite.base.DECIMAL +sqlalchemy.dialects.sqlite.base.FLOAT +sqlalchemy.dialects.sqlite.base.INTEGER +sqlalchemy.dialects.sqlite.base.JSON +sqlalchemy.dialects.sqlite.base.NUMERIC +sqlalchemy.dialects.sqlite.base.REAL +sqlalchemy.dialects.sqlite.base.SMALLINT +sqlalchemy.dialects.sqlite.base.SQLiteTypeCompiler +sqlalchemy.dialects.sqlite.base.TEXT +sqlalchemy.dialects.sqlite.base.TIME +sqlalchemy.dialects.sqlite.base.TIMESTAMP +sqlalchemy.dialects.sqlite.base.VARCHAR +sqlalchemy.dialects.sqlite.base._SQliteJson +sqlalchemy.dialects.sqlite.dml.Insert +sqlalchemy.dialects.sqlite.dml.OnConflictClause +sqlalchemy.dialects.sqlite.dml.OnConflictDoNothing +sqlalchemy.dialects.sqlite.dml.OnConflictDoUpdate +sqlalchemy.dialects.sqlite.json.JSON +sqlalchemy.dialects.sqlite.json.JSONIndexType +sqlalchemy.dialects.sqlite.json.JSONPathType +sqlalchemy.dialects.sqlite.pysqlite._SQLite_pysqliteDate +sqlalchemy.dialects.sqlite.pysqlite._SQLite_pysqliteTimeStamp +sqlalchemy.dialects.sybase.BIGINT +sqlalchemy.dialects.sybase.BINARY +sqlalchemy.dialects.sybase.BIT +sqlalchemy.dialects.sybase.CHAR +sqlalchemy.dialects.sybase.DATE +sqlalchemy.dialects.sybase.DATETIME +sqlalchemy.dialects.sybase.FLOAT +sqlalchemy.dialects.sybase.IMAGE +sqlalchemy.dialects.sybase.INTEGER +sqlalchemy.dialects.sybase.MONEY +sqlalchemy.dialects.sybase.NCHAR +sqlalchemy.dialects.sybase.NUMERIC +sqlalchemy.dialects.sybase.NVARCHAR +sqlalchemy.dialects.sybase.SMALLINT +sqlalchemy.dialects.sybase.SMALLMONEY +sqlalchemy.dialects.sybase.TEXT +sqlalchemy.dialects.sybase.TIME +sqlalchemy.dialects.sybase.TINYINT +sqlalchemy.dialects.sybase.UNICHAR +sqlalchemy.dialects.sybase.UNITEXT +sqlalchemy.dialects.sybase.UNIVARCHAR +sqlalchemy.dialects.sybase.VARBINARY +sqlalchemy.dialects.sybase.VARCHAR +sqlalchemy.dialects.sybase.base.BIGINT +sqlalchemy.dialects.sybase.base.BINARY +sqlalchemy.dialects.sybase.base.BIT +sqlalchemy.dialects.sybase.base.CHAR +sqlalchemy.dialects.sybase.base.DATE +sqlalchemy.dialects.sybase.base.DATETIME +sqlalchemy.dialects.sybase.base.DECIMAL +sqlalchemy.dialects.sybase.base.FLOAT +sqlalchemy.dialects.sybase.base.IMAGE +sqlalchemy.dialects.sybase.base.INTEGER +sqlalchemy.dialects.sybase.base.MONEY +sqlalchemy.dialects.sybase.base.NCHAR +sqlalchemy.dialects.sybase.base.NUMERIC +sqlalchemy.dialects.sybase.base.NVARCHAR +sqlalchemy.dialects.sybase.base.REAL +sqlalchemy.dialects.sybase.base.SMALLINT +sqlalchemy.dialects.sybase.base.SMALLMONEY +sqlalchemy.dialects.sybase.base.SybaseTypeCompiler +sqlalchemy.dialects.sybase.base.TEXT +sqlalchemy.dialects.sybase.base.TIME +sqlalchemy.dialects.sybase.base.TIMESTAMP +sqlalchemy.dialects.sybase.base.TINYINT +sqlalchemy.dialects.sybase.base.UNICHAR +sqlalchemy.dialects.sybase.base.UNIQUEIDENTIFIER +sqlalchemy.dialects.sybase.base.UNITEXT +sqlalchemy.dialects.sybase.base.UNIVARCHAR +sqlalchemy.dialects.sybase.base.Unicode +sqlalchemy.dialects.sybase.base.VARBINARY +sqlalchemy.dialects.sybase.base.VARCHAR +sqlalchemy.dialects.sybase.pyodbc._SybNumeric_pyodbc +sqlalchemy.dialects.sybase.pysybase._SybNumeric +sqlalchemy.engine.TypeCompiler +sqlalchemy.engine.default._StrDate +sqlalchemy.engine.default._StrDateTime +sqlalchemy.engine.default._StrTime +sqlalchemy.engine.events.ConnectionEvents +sqlalchemy.engine.events.DialectEvents +sqlalchemy.engine.interfaces.TypeCompiler +sqlalchemy.event.Events +sqlalchemy.event.base.Events +sqlalchemy.events.ConnectionEvents +sqlalchemy.events.DDLEvents +sqlalchemy.events.DialectEvents +sqlalchemy.events.PoolEvents +sqlalchemy.ext.asyncio.AsyncConnectionEvents +sqlalchemy.ext.asyncio.AsyncSessionEvents +sqlalchemy.ext.asyncio.events.AsyncConnectionEvents +sqlalchemy.ext.asyncio.events.AsyncSessionEvents +sqlalchemy.orm.AttributeEvents +sqlalchemy.orm.FromStatement +sqlalchemy.orm.InstanceEvents +sqlalchemy.orm.InstrumentationEvents +sqlalchemy.orm.MapperEvents +sqlalchemy.orm.QueryContext.default_load_options +sqlalchemy.orm.QueryEvents +sqlalchemy.orm.SessionEvents +sqlalchemy.orm.context.ORMCompileState.default_compile_options +sqlalchemy.orm.context.QueryContext.default_load_options +sqlalchemy.orm.events.AttributeEvents +sqlalchemy.orm.events.InstanceEvents +sqlalchemy.orm.events.InstrumentationEvents +sqlalchemy.orm.events.MapperEvents +sqlalchemy.orm.events.QueryEvents +sqlalchemy.orm.events.SessionEvents +sqlalchemy.orm.events._InstanceEventsHold.HoldInstanceEvents +sqlalchemy.orm.events._MapperEventsHold.HoldMapperEvents +sqlalchemy.orm.persistence.BulkUDCompileState.default_update_options +sqlalchemy.orm.query.FromStatement +sqlalchemy.orm.query.QueryContext.default_load_options +sqlalchemy.orm.util._ORMJoin +sqlalchemy.pool.events.PoolEvents +sqlalchemy.schema.AddConstraint +sqlalchemy.schema.CheckConstraint +sqlalchemy.schema.Column +sqlalchemy.schema.ColumnCollectionConstraint +sqlalchemy.schema.ColumnDefault +sqlalchemy.schema.Computed +sqlalchemy.schema.Constraint +sqlalchemy.schema.CreateColumn +sqlalchemy.schema.CreateIndex +sqlalchemy.schema.CreateSchema +sqlalchemy.schema.CreateSequence +sqlalchemy.schema.CreateTable +sqlalchemy.schema.DDL +sqlalchemy.schema.DDLElement +sqlalchemy.schema.DefaultGenerator +sqlalchemy.schema.DropColumnComment +sqlalchemy.schema.DropConstraint +sqlalchemy.schema.DropIndex +sqlalchemy.schema.DropSchema +sqlalchemy.schema.DropSequence +sqlalchemy.schema.DropTable +sqlalchemy.schema.DropTableComment +sqlalchemy.schema.ForeignKey +sqlalchemy.schema.ForeignKeyConstraint +sqlalchemy.schema.Identity +sqlalchemy.schema.Index +sqlalchemy.schema.MetaData +sqlalchemy.schema.PrimaryKeyConstraint +sqlalchemy.schema.SchemaItem +sqlalchemy.schema.Sequence +sqlalchemy.schema.SetColumnComment +sqlalchemy.schema.SetTableComment +sqlalchemy.schema.Table +sqlalchemy.schema.ThreadLocalMetaData +sqlalchemy.schema.UniqueConstraint +sqlalchemy.schema._CreateDropBase +sqlalchemy.schema._DDLCompiles +sqlalchemy.schema._DropView +sqlalchemy.sql.Alias +sqlalchemy.sql.ClauseElement +sqlalchemy.sql.ColumnElement +sqlalchemy.sql.CompoundSelect +sqlalchemy.sql.Delete +sqlalchemy.sql.False_ +sqlalchemy.sql.FromClause +sqlalchemy.sql.Insert +sqlalchemy.sql.Join +sqlalchemy.sql.LambdaElement +sqlalchemy.sql.Select +sqlalchemy.sql.Selectable +sqlalchemy.sql.StatementLambdaElement +sqlalchemy.sql.Subquery +sqlalchemy.sql.TableClause +sqlalchemy.sql.TableSample +sqlalchemy.sql.True_ +sqlalchemy.sql.Update +sqlalchemy.sql.Values +sqlalchemy.sql.base.CacheableOptions +sqlalchemy.sql.base.Options +sqlalchemy.sql.compiler.GenericTypeCompiler +sqlalchemy.sql.compiler.StrSQLTypeCompiler +sqlalchemy.sql.compiler.TypeCompiler +sqlalchemy.sql.compiler._CompileLabel +sqlalchemy.sql.crud._multiparam_column +sqlalchemy.sql.ddl.AddConstraint +sqlalchemy.sql.ddl.CreateColumn +sqlalchemy.sql.ddl.CreateIndex +sqlalchemy.sql.ddl.CreateSchema +sqlalchemy.sql.ddl.CreateSequence +sqlalchemy.sql.ddl.CreateTable +sqlalchemy.sql.ddl.DDL +sqlalchemy.sql.ddl.DDLElement +sqlalchemy.sql.ddl.DropColumnComment +sqlalchemy.sql.ddl.DropConstraint +sqlalchemy.sql.ddl.DropIndex +sqlalchemy.sql.ddl.DropSchema +sqlalchemy.sql.ddl.DropSequence +sqlalchemy.sql.ddl.DropTable +sqlalchemy.sql.ddl.DropTableComment +sqlalchemy.sql.ddl.SetColumnComment +sqlalchemy.sql.ddl.SetTableComment +sqlalchemy.sql.ddl._CreateDropBase +sqlalchemy.sql.ddl._DDLCompiles +sqlalchemy.sql.ddl._DropView +sqlalchemy.sql.dml.Delete +sqlalchemy.sql.dml.Insert +sqlalchemy.sql.dml.Update +sqlalchemy.sql.dml.UpdateBase +sqlalchemy.sql.dml.ValuesBase +sqlalchemy.sql.elements.AsBoolean +sqlalchemy.sql.elements.BinaryExpression +sqlalchemy.sql.elements.BindParameter +sqlalchemy.sql.elements.BooleanClauseList +sqlalchemy.sql.elements.Case +sqlalchemy.sql.elements.Cast +sqlalchemy.sql.elements.ClauseElement +sqlalchemy.sql.elements.ClauseList +sqlalchemy.sql.elements.CollationClause +sqlalchemy.sql.elements.CollectionAggregate +sqlalchemy.sql.elements.ColumnClause +sqlalchemy.sql.elements.ColumnElement +sqlalchemy.sql.elements.Extract +sqlalchemy.sql.elements.False_ +sqlalchemy.sql.elements.FunctionFilter +sqlalchemy.sql.elements.GroupedElement +sqlalchemy.sql.elements.Grouping +sqlalchemy.sql.elements.IndexExpression +sqlalchemy.sql.elements.Label +sqlalchemy.sql.elements.NamedColumn +sqlalchemy.sql.elements.Null +sqlalchemy.sql.elements.Over +sqlalchemy.sql.elements.ReleaseSavepointClause +sqlalchemy.sql.elements.RollbackToSavepointClause +sqlalchemy.sql.elements.SavepointClause +sqlalchemy.sql.elements.Slice +sqlalchemy.sql.elements.TableValuedColumn +sqlalchemy.sql.elements.TextClause +sqlalchemy.sql.elements.True_ +sqlalchemy.sql.elements.Tuple +sqlalchemy.sql.elements.TypeClause +sqlalchemy.sql.elements.TypeCoerce +sqlalchemy.sql.elements.UnaryExpression +sqlalchemy.sql.elements.WithinGroup +sqlalchemy.sql.elements._IdentifiedClause +sqlalchemy.sql.elements._label_reference +sqlalchemy.sql.elements._textual_label_reference +sqlalchemy.sql.events.DDLEvents +sqlalchemy.sql.expression.Alias +sqlalchemy.sql.expression.AliasedReturnsRows +sqlalchemy.sql.expression.BinaryExpression +sqlalchemy.sql.expression.BindParameter +sqlalchemy.sql.expression.BooleanClauseList +sqlalchemy.sql.expression.CTE +sqlalchemy.sql.expression.Case +sqlalchemy.sql.expression.Cast +sqlalchemy.sql.expression.ClauseElement +sqlalchemy.sql.expression.ClauseList +sqlalchemy.sql.expression.CollectionAggregate +sqlalchemy.sql.expression.ColumnClause +sqlalchemy.sql.expression.ColumnElement +sqlalchemy.sql.expression.CompoundSelect +sqlalchemy.sql.expression.Delete +sqlalchemy.sql.expression.Exists +sqlalchemy.sql.expression.Extract +sqlalchemy.sql.expression.False_ +sqlalchemy.sql.expression.FromClause +sqlalchemy.sql.expression.FromGrouping +sqlalchemy.sql.expression.Function +sqlalchemy.sql.expression.FunctionElement +sqlalchemy.sql.expression.FunctionFilter +sqlalchemy.sql.expression.GenerativeSelect +sqlalchemy.sql.expression.Grouping +sqlalchemy.sql.expression.Insert +sqlalchemy.sql.expression.Join +sqlalchemy.sql.expression.Label +sqlalchemy.sql.expression.LambdaElement +sqlalchemy.sql.expression.Lateral +sqlalchemy.sql.expression.Null +sqlalchemy.sql.expression.Over +sqlalchemy.sql.expression.ReleaseSavepointClause +sqlalchemy.sql.expression.ReturnsRows +sqlalchemy.sql.expression.RollbackToSavepointClause +sqlalchemy.sql.expression.SavepointClause +sqlalchemy.sql.expression.ScalarSelect +sqlalchemy.sql.expression.Select +sqlalchemy.sql.expression.SelectBase +sqlalchemy.sql.expression.Selectable +sqlalchemy.sql.expression.StatementLambdaElement +sqlalchemy.sql.expression.Subquery +sqlalchemy.sql.expression.TableClause +sqlalchemy.sql.expression.TableSample +sqlalchemy.sql.expression.TableValuedAlias +sqlalchemy.sql.expression.TextClause +sqlalchemy.sql.expression.TextualSelect +sqlalchemy.sql.expression.True_ +sqlalchemy.sql.expression.Tuple +sqlalchemy.sql.expression.TypeClause +sqlalchemy.sql.expression.TypeCoerce +sqlalchemy.sql.expression.UnaryExpression +sqlalchemy.sql.expression.Update +sqlalchemy.sql.expression.UpdateBase +sqlalchemy.sql.expression.Values +sqlalchemy.sql.expression.ValuesBase +sqlalchemy.sql.expression.WithinGroup +sqlalchemy.sql.functions.AnsiFunction +sqlalchemy.sql.functions.Function +sqlalchemy.sql.functions.FunctionAsBinary +sqlalchemy.sql.functions.FunctionElement +sqlalchemy.sql.functions.GenericFunction +sqlalchemy.sql.functions.OrderedSetAgg +sqlalchemy.sql.functions.ReturnTypeFromArgs +sqlalchemy.sql.functions.ScalarFunctionColumn +sqlalchemy.sql.functions.array_agg +sqlalchemy.sql.functions.char_length +sqlalchemy.sql.functions.coalesce +sqlalchemy.sql.functions.concat +sqlalchemy.sql.functions.count +sqlalchemy.sql.functions.cube +sqlalchemy.sql.functions.cume_dist +sqlalchemy.sql.functions.current_date +sqlalchemy.sql.functions.current_time +sqlalchemy.sql.functions.current_timestamp +sqlalchemy.sql.functions.current_user +sqlalchemy.sql.functions.dense_rank +sqlalchemy.sql.functions.grouping_sets +sqlalchemy.sql.functions.localtime +sqlalchemy.sql.functions.localtimestamp +sqlalchemy.sql.functions.max +sqlalchemy.sql.functions.min +sqlalchemy.sql.functions.mode +sqlalchemy.sql.functions.next_value +sqlalchemy.sql.functions.now +sqlalchemy.sql.functions.percent_rank +sqlalchemy.sql.functions.percentile_cont +sqlalchemy.sql.functions.percentile_disc +sqlalchemy.sql.functions.random +sqlalchemy.sql.functions.rank +sqlalchemy.sql.functions.rollup +sqlalchemy.sql.functions.session_user +sqlalchemy.sql.functions.sum +sqlalchemy.sql.functions.sysdate +sqlalchemy.sql.functions.user +sqlalchemy.sql.lambdas.DeferredLambdaElement +sqlalchemy.sql.lambdas.LambdaElement +sqlalchemy.sql.lambdas.LambdaOptions +sqlalchemy.sql.lambdas.LinkedLambdaElement +sqlalchemy.sql.lambdas.NullLambdaStatement +sqlalchemy.sql.lambdas.StatementLambdaElement +sqlalchemy.sql.schema.CheckConstraint +sqlalchemy.sql.schema.Column +sqlalchemy.sql.schema.ColumnCollectionConstraint +sqlalchemy.sql.schema.ColumnDefault +sqlalchemy.sql.schema.Computed +sqlalchemy.sql.schema.Constraint +sqlalchemy.sql.schema.DefaultGenerator +sqlalchemy.sql.schema.ForeignKey +sqlalchemy.sql.schema.ForeignKeyConstraint +sqlalchemy.sql.schema.Identity +sqlalchemy.sql.schema.Index +sqlalchemy.sql.schema.MetaData +sqlalchemy.sql.schema.PrimaryKeyConstraint +sqlalchemy.sql.schema.SchemaItem +sqlalchemy.sql.schema.Sequence +sqlalchemy.sql.schema.Table +sqlalchemy.sql.schema.ThreadLocalMetaData +sqlalchemy.sql.schema.UniqueConstraint +sqlalchemy.sql.selectable.Alias +sqlalchemy.sql.selectable.AliasedReturnsRows +sqlalchemy.sql.selectable.BindParameter +sqlalchemy.sql.selectable.BooleanClauseList +sqlalchemy.sql.selectable.CTE +sqlalchemy.sql.selectable.ClauseElement +sqlalchemy.sql.selectable.ClauseList +sqlalchemy.sql.selectable.ColumnClause +sqlalchemy.sql.selectable.CompoundSelect +sqlalchemy.sql.selectable.Exists +sqlalchemy.sql.selectable.ForUpdateArg +sqlalchemy.sql.selectable.FromClause +sqlalchemy.sql.selectable.FromGrouping +sqlalchemy.sql.selectable.GenerativeSelect +sqlalchemy.sql.selectable.GroupedElement +sqlalchemy.sql.selectable.Grouping +sqlalchemy.sql.selectable.Join +sqlalchemy.sql.selectable.Lateral +sqlalchemy.sql.selectable.ReturnsRows +sqlalchemy.sql.selectable.ScalarSelect +sqlalchemy.sql.selectable.Select +sqlalchemy.sql.selectable.SelectBase +sqlalchemy.sql.selectable.SelectState.default_select_compile_options +sqlalchemy.sql.selectable.SelectStatementGrouping +sqlalchemy.sql.selectable.Selectable +sqlalchemy.sql.selectable.Subquery +sqlalchemy.sql.selectable.TableClause +sqlalchemy.sql.selectable.TableSample +sqlalchemy.sql.selectable.TableValuedAlias +sqlalchemy.sql.selectable.TableValuedColumn +sqlalchemy.sql.selectable.TextualSelect +sqlalchemy.sql.selectable.UnaryExpression +sqlalchemy.sql.selectable.Values +sqlalchemy.sql.selectable._MemoizedSelectEntities +sqlalchemy.sql.selectable._OffsetLimitParam +sqlalchemy.sql.sqltypes.ARRAY +sqlalchemy.sql.sqltypes.BIGINT +sqlalchemy.sql.sqltypes.BINARY +sqlalchemy.sql.sqltypes.BLOB +sqlalchemy.sql.sqltypes.BOOLEAN +sqlalchemy.sql.sqltypes.BigInteger +sqlalchemy.sql.sqltypes.Boolean +sqlalchemy.sql.sqltypes.CHAR +sqlalchemy.sql.sqltypes.CLOB +sqlalchemy.sql.sqltypes.DATE +sqlalchemy.sql.sqltypes.DATETIME +sqlalchemy.sql.sqltypes.DECIMAL +sqlalchemy.sql.sqltypes.Date +sqlalchemy.sql.sqltypes.DateTime +sqlalchemy.sql.sqltypes.Enum +sqlalchemy.sql.sqltypes.FLOAT +sqlalchemy.sql.sqltypes.Float +sqlalchemy.sql.sqltypes.INTEGER +sqlalchemy.sql.sqltypes.Integer +sqlalchemy.sql.sqltypes.Interval +sqlalchemy.sql.sqltypes.JSON +sqlalchemy.sql.sqltypes.JSON.JSONElementType +sqlalchemy.sql.sqltypes.JSON.JSONIndexType +sqlalchemy.sql.sqltypes.JSON.JSONIntIndexType +sqlalchemy.sql.sqltypes.JSON.JSONPathType +sqlalchemy.sql.sqltypes.JSON.JSONStrIndexType +sqlalchemy.sql.sqltypes.LargeBinary +sqlalchemy.sql.sqltypes.MatchType +sqlalchemy.sql.sqltypes.NCHAR +sqlalchemy.sql.sqltypes.NUMERIC +sqlalchemy.sql.sqltypes.NVARCHAR +sqlalchemy.sql.sqltypes.NullType +sqlalchemy.sql.sqltypes.Numeric +sqlalchemy.sql.sqltypes.PickleType +sqlalchemy.sql.sqltypes.REAL +sqlalchemy.sql.sqltypes.SMALLINT +sqlalchemy.sql.sqltypes.SmallInteger +sqlalchemy.sql.sqltypes.String +sqlalchemy.sql.sqltypes.TEXT +sqlalchemy.sql.sqltypes.TIME +sqlalchemy.sql.sqltypes.TIMESTAMP +sqlalchemy.sql.sqltypes.TableValueType +sqlalchemy.sql.sqltypes.Text +sqlalchemy.sql.sqltypes.Time +sqlalchemy.sql.sqltypes.TupleType +sqlalchemy.sql.sqltypes.TypeDecorator +sqlalchemy.sql.sqltypes.TypeEngine +sqlalchemy.sql.sqltypes.Unicode +sqlalchemy.sql.sqltypes.UnicodeText +sqlalchemy.sql.sqltypes.VARBINARY +sqlalchemy.sql.sqltypes.VARCHAR +sqlalchemy.sql.sqltypes.Variant +sqlalchemy.sql.sqltypes._AbstractInterval +sqlalchemy.sql.sqltypes._Binary +sqlalchemy.sql.traversals.ColIdentityComparatorStrategy +sqlalchemy.sql.traversals.TraversalComparatorStrategy +sqlalchemy.sql.traversals._CacheKey +sqlalchemy.sql.traversals._CopyInternals +sqlalchemy.sql.traversals._GetChildren +sqlalchemy.sql.type_api.TypeDecorator +sqlalchemy.sql.type_api.TypeEngine +sqlalchemy.sql.type_api.UserDefinedType +sqlalchemy.sql.type_api.Variant +sqlalchemy.sql.visitors.ExtendedInternalTraversal +sqlalchemy.sql.visitors.InternalTraversal +sqlalchemy.sql.visitors.Traversible +sqlalchemy.types.ARRAY +sqlalchemy.types.BIGINT +sqlalchemy.types.BINARY +sqlalchemy.types.BLOB +sqlalchemy.types.BOOLEAN +sqlalchemy.types.BigInteger +sqlalchemy.types.Boolean +sqlalchemy.types.CHAR +sqlalchemy.types.CLOB +sqlalchemy.types.DATE +sqlalchemy.types.DATETIME +sqlalchemy.types.DECIMAL +sqlalchemy.types.Date +sqlalchemy.types.DateTime +sqlalchemy.types.Enum +sqlalchemy.types.FLOAT +sqlalchemy.types.Float +sqlalchemy.types.INTEGER +sqlalchemy.types.Integer +sqlalchemy.types.Interval +sqlalchemy.types.JSON +sqlalchemy.types.JSON.JSONElementType +sqlalchemy.types.JSON.JSONIndexType +sqlalchemy.types.JSON.JSONIntIndexType +sqlalchemy.types.JSON.JSONPathType +sqlalchemy.types.JSON.JSONStrIndexType +sqlalchemy.types.LargeBinary +sqlalchemy.types.MatchType +sqlalchemy.types.NCHAR +sqlalchemy.types.NUMERIC +sqlalchemy.types.NVARCHAR +sqlalchemy.types.NullType +sqlalchemy.types.Numeric +sqlalchemy.types.PickleType +sqlalchemy.types.REAL +sqlalchemy.types.SMALLINT +sqlalchemy.types.SmallInteger +sqlalchemy.types.String +sqlalchemy.types.TEXT +sqlalchemy.types.TIME +sqlalchemy.types.TIMESTAMP +sqlalchemy.types.Text +sqlalchemy.types.Time +sqlalchemy.types.TupleType +sqlalchemy.types.TypeDecorator +sqlalchemy.types.TypeEngine +sqlalchemy.types.Unicode +sqlalchemy.types.UnicodeText +sqlalchemy.types.UserDefinedType +sqlalchemy.types.VARBINARY +sqlalchemy.types.VARCHAR +sqlalchemy.types._Binary diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/METADATA.toml new file mode 100644 index 00000000..ac906398 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/METADATA.toml @@ -0,0 +1,10 @@ +version = "1.4.46" +extra_description = """\ + The `sqlalchemy-stubs` package is an alternative to this package and also \ + includes a mypy plugin for more precise types.\ +""" +obsolete_since = "2.0.0" # Released on 2023-01-26 + +[tool.stubtest] +stubtest_requirements = ["pytest"] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/__init__.pyi new file mode 100644 index 00000000..dde3cbce --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/__init__.pyi @@ -0,0 +1,133 @@ +from .engine import ( + create_engine as create_engine, + create_mock_engine as create_mock_engine, + engine_from_config as engine_from_config, +) +from .inspection import inspect as inspect +from .schema import ( + BLANK_SCHEMA as BLANK_SCHEMA, + DDL as DDL, + CheckConstraint as CheckConstraint, + Column as Column, + ColumnDefault as ColumnDefault, + Computed as Computed, + Constraint as Constraint, + DefaultClause as DefaultClause, + FetchedValue as FetchedValue, + ForeignKey as ForeignKey, + ForeignKeyConstraint as ForeignKeyConstraint, + Identity as Identity, + Index as Index, + MetaData as MetaData, + PrimaryKeyConstraint as PrimaryKeyConstraint, + Sequence as Sequence, + Table as Table, + ThreadLocalMetaData as ThreadLocalMetaData, + UniqueConstraint as UniqueConstraint, +) +from .sql import ( + LABEL_STYLE_DEFAULT as LABEL_STYLE_DEFAULT, + LABEL_STYLE_DISAMBIGUATE_ONLY as LABEL_STYLE_DISAMBIGUATE_ONLY, + LABEL_STYLE_NONE as LABEL_STYLE_NONE, + LABEL_STYLE_TABLENAME_PLUS_COL as LABEL_STYLE_TABLENAME_PLUS_COL, + alias as alias, + all_ as all_, + and_ as and_, + any_ as any_, + asc as asc, + between as between, + bindparam as bindparam, + case as case, + cast as cast, + collate as collate, + column as column, + delete as delete, + desc as desc, + distinct as distinct, + except_ as except_, + except_all as except_all, + exists as exists, + extract as extract, + false as false, + func as func, + funcfilter as funcfilter, + insert as insert, + intersect as intersect, + intersect_all as intersect_all, + join as join, + lambda_stmt as lambda_stmt, + lateral as lateral, + literal as literal, + literal_column as literal_column, + modifier as modifier, + not_ as not_, + null as null, + nulls_first as nulls_first, + nulls_last as nulls_last, + nullsfirst as nullsfirst, + nullslast as nullslast, + or_ as or_, + outerjoin as outerjoin, + outparam as outparam, + over as over, + select as select, + subquery as subquery, + table as table, + tablesample as tablesample, + text as text, + true as true, + tuple_ as tuple_, + type_coerce as type_coerce, + union as union, + union_all as union_all, + update as update, + values as values, + within_group as within_group, +) +from .types import ( + ARRAY as ARRAY, + BIGINT as BIGINT, + BINARY as BINARY, + BLOB as BLOB, + BOOLEAN as BOOLEAN, + CHAR as CHAR, + CLOB as CLOB, + DATE as DATE, + DATETIME as DATETIME, + DECIMAL as DECIMAL, + FLOAT as FLOAT, + INT as INT, + INTEGER as INTEGER, + JSON as JSON, + NCHAR as NCHAR, + NUMERIC as NUMERIC, + NVARCHAR as NVARCHAR, + REAL as REAL, + SMALLINT as SMALLINT, + TEXT as TEXT, + TIME as TIME, + TIMESTAMP as TIMESTAMP, + VARBINARY as VARBINARY, + VARCHAR as VARCHAR, + BigInteger as BigInteger, + Boolean as Boolean, + Date as Date, + DateTime as DateTime, + Enum as Enum, + Float as Float, + Integer as Integer, + Interval as Interval, + LargeBinary as LargeBinary, + Numeric as Numeric, + PickleType as PickleType, + SmallInteger as SmallInteger, + String as String, + Text as Text, + Time as Time, + TupleType as TupleType, + TypeDecorator as TypeDecorator, + Unicode as Unicode, + UnicodeText as UnicodeText, +) + +__version__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/cimmutabledict.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/cimmutabledict.pyi new file mode 100644 index 00000000..9883315d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/cimmutabledict.pyi @@ -0,0 +1,19 @@ +from _typeshed import SupportsKeysAndGetItem +from collections.abc import Iterable +from typing import Generic, TypeVar, overload +from typing_extensions import final + +_KT = TypeVar("_KT") +_KT2 = TypeVar("_KT2") +_VT = TypeVar("_VT") +_VT2 = TypeVar("_VT2") + +@final +class immutabledict(dict[_KT, _VT], Generic[_KT, _VT]): + @overload + def union(self, __dict: dict[_KT2, _VT2]) -> immutabledict[_KT | _KT2, _VT | _VT2]: ... + @overload + def union(self, __dict: None = ..., **kw: SupportsKeysAndGetItem[_KT2, _VT2]) -> immutabledict[_KT | _KT2, _VT | _VT2]: ... + def merge_with( + self, *args: SupportsKeysAndGetItem[_KT | _KT2, _VT2] | Iterable[tuple[_KT2, _VT2]] | None + ) -> immutabledict[_KT | _KT2, _VT | _VT2]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/connectors/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/connectors/__init__.pyi new file mode 100644 index 00000000..b66d337f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/connectors/__init__.pyi @@ -0,0 +1 @@ +class Connector: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/connectors/mxodbc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/connectors/mxodbc.pyi new file mode 100644 index 00000000..350b3df4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/connectors/mxodbc.pyi @@ -0,0 +1,17 @@ +from _typeshed import Incomplete + +from . import Connector + +class MxODBCConnector(Connector): + driver: str + supports_sane_multi_rowcount: bool + supports_unicode_statements: bool + supports_unicode_binds: bool + supports_native_decimal: bool + @classmethod + def dbapi(cls): ... + def on_connect(self): ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + def do_executemany(self, cursor, statement, parameters, context: Incomplete | None = ...) -> None: ... + def do_execute(self, cursor, statement, parameters, context: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/connectors/pyodbc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/connectors/pyodbc.pyi new file mode 100644 index 00000000..b0b7ce25 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/connectors/pyodbc.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete +from typing import Any + +from . import Connector + +class PyODBCConnector(Connector): + driver: str + supports_sane_rowcount_returning: bool + supports_sane_multi_rowcount: bool + supports_unicode_statements: bool + supports_unicode_binds: bool + supports_native_decimal: bool + default_paramstyle: str + use_setinputsizes: bool + pyodbc_driver_name: Any + def __init__(self, supports_unicode_binds: Incomplete | None = ..., use_setinputsizes: bool = ..., **kw) -> None: ... + @classmethod + def dbapi(cls): ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + def do_set_input_sizes(self, cursor, list_of_tuples, context) -> None: ... + def set_isolation_level(self, connection, level) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/cresultproxy.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/cresultproxy.pyi new file mode 100644 index 00000000..ecb5b706 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/cresultproxy.pyi @@ -0,0 +1,11 @@ +from typing import Any + +class BaseRow: + def __init__(self, parent, processors, keymap, key_style, data) -> None: ... + def __reduce__(self): ... + def __iter__(self): ... + def __len__(self) -> int: ... + def __hash__(self) -> int: ... + __getitem__: Any + +def safe_rowproxy_reconstructor(__cls, __state): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/databases/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/databases/__init__.pyi new file mode 100644 index 00000000..58f463cb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/databases/__init__.pyi @@ -0,0 +1,18 @@ +from ..dialects.firebird import base as firebird_base +from ..dialects.mssql import base as mssql_base +from ..dialects.mysql import base as mysql_base +from ..dialects.oracle import base as oracle_base +from ..dialects.postgresql import base as postgresql_base +from ..dialects.sqlite import base as sqlite_base +from ..dialects.sybase import base as sybase_base + +__all__ = ("firebird", "mssql", "mysql", "postgresql", "sqlite", "oracle", "sybase") + +firebird = firebird_base +mssql = mssql_base +mysql = mysql_base +oracle = oracle_base +postgresql = postgresql_base +postgres = postgresql_base +sqlite = sqlite_base +sybase = sybase_base diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/__init__.pyi new file mode 100644 index 00000000..63292f37 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/__init__.pyi @@ -0,0 +1,16 @@ +from typing import Any + +from . import ( + firebird as firebird, + mssql as mssql, + mysql as mysql, + oracle as oracle, + postgresql as postgresql, + sqlite as sqlite, + sybase as sybase, +) + +__all__ = ("firebird", "mssql", "mysql", "oracle", "postgresql", "sqlite", "sybase") + +registry: Any +plugins: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/__init__.pyi new file mode 100644 index 00000000..c421f787 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/__init__.pyi @@ -0,0 +1,34 @@ +from typing import Any + +from sqlalchemy.dialects.firebird.base import ( + BIGINT as BIGINT, + BLOB as BLOB, + CHAR as CHAR, + DATE as DATE, + FLOAT as FLOAT, + NUMERIC as NUMERIC, + SMALLINT as SMALLINT, + TEXT as TEXT, + TIME as TIME, + TIMESTAMP as TIMESTAMP, + VARCHAR as VARCHAR, +) + +__all__ = ( + "SMALLINT", + "BIGINT", + "FLOAT", + "FLOAT", + "DATE", + "TIME", + "TEXT", + "NUMERIC", + "FLOAT", + "TIMESTAMP", + "VARCHAR", + "CHAR", + "BLOB", + "dialect", +) + +dialect: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/base.pyi new file mode 100644 index 00000000..564282e8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/base.pyi @@ -0,0 +1,109 @@ +from _typeshed import Incomplete +from typing import Any + +from sqlalchemy import sql, types as sqltypes +from sqlalchemy.engine import default +from sqlalchemy.sql import compiler +from sqlalchemy.types import ( + BIGINT as BIGINT, + BLOB as BLOB, + DATE as DATE, + FLOAT as FLOAT, + INTEGER as INTEGER, + NUMERIC as NUMERIC, + SMALLINT as SMALLINT, + TEXT as TEXT, + TIME as TIME, + TIMESTAMP as TIMESTAMP, + Integer as Integer, +) + +RESERVED_WORDS: Any + +class _StringType(sqltypes.String): + charset: Any + def __init__(self, charset: Incomplete | None = ..., **kw) -> None: ... + +class VARCHAR(_StringType, sqltypes.VARCHAR): + __visit_name__: str + def __init__(self, length: Incomplete | None = ..., **kwargs) -> None: ... + +class CHAR(_StringType, sqltypes.CHAR): + __visit_name__: str + def __init__(self, length: Incomplete | None = ..., **kwargs) -> None: ... + +class _FBDateTime(sqltypes.DateTime): + def bind_processor(self, dialect): ... + +colspecs: Any +ischema_names: Any + +class FBTypeCompiler(compiler.GenericTypeCompiler): + def visit_boolean(self, type_, **kw): ... + def visit_datetime(self, type_, **kw): ... + def visit_TEXT(self, type_, **kw): ... + def visit_BLOB(self, type_, **kw): ... + def visit_CHAR(self, type_, **kw): ... + def visit_VARCHAR(self, type_, **kw): ... + +class FBCompiler(sql.compiler.SQLCompiler): + ansi_bind_rules: bool + def visit_now_func(self, fn, **kw): ... + def visit_startswith_op_binary(self, binary, operator, **kw): ... + def visit_not_startswith_op_binary(self, binary, operator, **kw): ... + def visit_mod_binary(self, binary, operator, **kw): ... + def visit_alias(self, alias, asfrom: bool = ..., **kwargs): ... # type: ignore[override] + def visit_substring_func(self, func, **kw): ... + def visit_length_func(self, function, **kw): ... + visit_char_length_func: Any + def function_argspec(self, func, **kw): ... + def default_from(self): ... + def visit_sequence(self, seq, **kw): ... + def get_select_precolumns(self, select, **kw): ... + def limit_clause(self, select, **kw): ... + def returning_clause(self, stmt, returning_cols): ... + +class FBDDLCompiler(sql.compiler.DDLCompiler): + def visit_create_sequence(self, create): ... + def visit_drop_sequence(self, drop): ... + def visit_computed_column(self, generated): ... + +class FBIdentifierPreparer(sql.compiler.IdentifierPreparer): + reserved_words: Any + illegal_initial_characters: Any + def __init__(self, dialect) -> None: ... + +class FBExecutionContext(default.DefaultExecutionContext): + def fire_sequence(self, seq, type_): ... + +class FBDialect(default.DefaultDialect): + name: str + supports_statement_cache: bool + max_identifier_length: int + supports_sequences: bool + sequences_optional: bool + supports_default_values: bool + postfetch_lastrowid: bool + supports_native_boolean: bool + requires_name_normalize: bool + supports_empty_insert: bool + statement_compiler: Any + ddl_compiler: Any + preparer: Any + type_compiler: Any + colspecs: Any + ischema_names: Any + construct_arguments: Any + def __init__(self, *args, **kwargs) -> None: ... + implicit_returning: Any + def initialize(self, connection) -> None: ... + def has_table(self, connection, table_name, schema: Incomplete | None = ...): ... # type: ignore[override] + def has_sequence(self, connection, sequence_name, schema: Incomplete | None = ...): ... # type: ignore[override] + def get_table_names(self, connection, schema: Incomplete | None = ..., **kw): ... + def get_view_names(self, connection, schema: Incomplete | None = ..., **kw): ... + def get_view_definition(self, connection, view_name, schema: Incomplete | None = ..., **kw): ... + def get_pk_constraint(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_column_sequence(self, connection, table_name, column_name, schema: Incomplete | None = ..., **kw): ... + def get_columns(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_foreign_keys(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_indexes(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/fdb.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/fdb.pyi new file mode 100644 index 00000000..4bc56c5a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/fdb.pyi @@ -0,0 +1,10 @@ +from .kinterbasdb import FBDialect_kinterbasdb + +class FBDialect_fdb(FBDialect_kinterbasdb): + supports_statement_cache: bool + def __init__(self, enable_rowcount: bool = ..., retaining: bool = ..., **kwargs) -> None: ... + @classmethod + def dbapi(cls): ... + def create_connect_args(self, url): ... + +dialect = FBDialect_fdb diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/kinterbasdb.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/kinterbasdb.pyi new file mode 100644 index 00000000..09d3dabd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/kinterbasdb.pyi @@ -0,0 +1,39 @@ +from _typeshed import Incomplete +from typing import Any + +from ...types import Float, Numeric +from .base import FBDialect, FBExecutionContext + +class _kinterbasdb_numeric: + def bind_processor(self, dialect): ... + +class _FBNumeric_kinterbasdb(_kinterbasdb_numeric, Numeric): ... +class _FBFloat_kinterbasdb(_kinterbasdb_numeric, Float): ... + +class FBExecutionContext_kinterbasdb(FBExecutionContext): + @property + def rowcount(self): ... + +class FBDialect_kinterbasdb(FBDialect): + driver: str + supports_statement_cache: bool + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + supports_native_decimal: bool + colspecs: Any + enable_rowcount: Any + type_conv: Any + concurrency_level: Any + retaining: Any + def __init__( + self, type_conv: int = ..., concurrency_level: int = ..., enable_rowcount: bool = ..., retaining: bool = ..., **kwargs + ) -> None: ... + @classmethod + def dbapi(cls): ... + def do_execute(self, cursor, statement, parameters, context: Incomplete | None = ...) -> None: ... + def do_rollback(self, dbapi_connection) -> None: ... + def do_commit(self, dbapi_connection) -> None: ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + +dialect = FBDialect_kinterbasdb diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/__init__.pyi new file mode 100644 index 00000000..c4b6c72f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/__init__.pyi @@ -0,0 +1,76 @@ +from typing import Any + +from .base import ( + BIGINT as BIGINT, + BINARY as BINARY, + BIT as BIT, + CHAR as CHAR, + DATE as DATE, + DATETIME as DATETIME, + DATETIME2 as DATETIME2, + DATETIMEOFFSET as DATETIMEOFFSET, + DECIMAL as DECIMAL, + FLOAT as FLOAT, + IMAGE as IMAGE, + INTEGER as INTEGER, + JSON as JSON, + MONEY as MONEY, + NCHAR as NCHAR, + NTEXT as NTEXT, + NUMERIC as NUMERIC, + NVARCHAR as NVARCHAR, + REAL as REAL, + ROWVERSION as ROWVERSION, + SMALLDATETIME as SMALLDATETIME, + SMALLINT as SMALLINT, + SMALLMONEY as SMALLMONEY, + SQL_VARIANT as SQL_VARIANT, + TEXT as TEXT, + TIME as TIME, + TIMESTAMP as TIMESTAMP, + TINYINT as TINYINT, + UNIQUEIDENTIFIER as UNIQUEIDENTIFIER, + VARBINARY as VARBINARY, + VARCHAR as VARCHAR, + XML as XML, + try_cast as try_cast, +) + +__all__ = ( + "JSON", + "INTEGER", + "BIGINT", + "SMALLINT", + "TINYINT", + "VARCHAR", + "NVARCHAR", + "CHAR", + "NCHAR", + "TEXT", + "NTEXT", + "DECIMAL", + "NUMERIC", + "FLOAT", + "DATETIME", + "DATETIME2", + "DATETIMEOFFSET", + "DATE", + "TIME", + "SMALLDATETIME", + "BINARY", + "VARBINARY", + "BIT", + "REAL", + "IMAGE", + "TIMESTAMP", + "ROWVERSION", + "MONEY", + "SMALLMONEY", + "UNIQUEIDENTIFIER", + "SQL_VARIANT", + "XML", + "dialect", + "try_cast", +) + +dialect: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/base.pyi new file mode 100644 index 00000000..47b9703a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/base.pyi @@ -0,0 +1,326 @@ +from _typeshed import Incomplete +from typing import Any, overload +from typing_extensions import Literal + +import sqlalchemy.types as sqltypes + +from ...engine import default +from ...sql import compiler +from ...sql.elements import Cast +from ...types import ( + BIGINT as BIGINT, + BINARY as BINARY, + CHAR as CHAR, + DATE as DATE, + DATETIME as DATETIME, + DECIMAL as DECIMAL, + FLOAT as FLOAT, + INTEGER as INTEGER, + NCHAR as NCHAR, + NUMERIC as NUMERIC, + NVARCHAR as NVARCHAR, + SMALLINT as SMALLINT, + TEXT as TEXT, + VARCHAR as VARCHAR, +) +from .json import JSON as JSON + +MS_2017_VERSION: Any +MS_2016_VERSION: Any +MS_2014_VERSION: Any +MS_2012_VERSION: Any +MS_2008_VERSION: Any +MS_2005_VERSION: Any +MS_2000_VERSION: Any +RESERVED_WORDS: Any + +class REAL(sqltypes.REAL): + __visit_name__: str + def __init__(self, **kw) -> None: ... + +class TINYINT(sqltypes.Integer): + __visit_name__: str + +class _MSDate(sqltypes.Date): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class TIME(sqltypes.TIME): + precision: Any + def __init__(self, precision: Incomplete | None = ..., **kwargs) -> None: ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +_MSTime = TIME + +class _BASETIMEIMPL(TIME): + __visit_name__: str + +class _DateTimeBase: + def bind_processor(self, dialect): ... + +class _MSDateTime(_DateTimeBase, sqltypes.DateTime): ... + +class SMALLDATETIME(_DateTimeBase, sqltypes.DateTime): + __visit_name__: str + +class DATETIME2(_DateTimeBase, sqltypes.DateTime): + __visit_name__: str + precision: Any + def __init__(self, precision: Incomplete | None = ..., **kw) -> None: ... + +class DATETIMEOFFSET(_DateTimeBase, sqltypes.DateTime): + __visit_name__: str + precision: Any + def __init__(self, precision: Incomplete | None = ..., **kw) -> None: ... + +class _UnicodeLiteral: + def literal_processor(self, dialect): ... + +class _MSUnicode(_UnicodeLiteral, sqltypes.Unicode): ... +class _MSUnicodeText(_UnicodeLiteral, sqltypes.UnicodeText): ... + +class TIMESTAMP(sqltypes._Binary): + __visit_name__: str + length: Any + convert_int: Any + def __init__(self, convert_int: bool = ...) -> None: ... + def result_processor(self, dialect, coltype): ... + +class ROWVERSION(TIMESTAMP): + __visit_name__: str + +class NTEXT(sqltypes.UnicodeText): + __visit_name__: str + +class VARBINARY(sqltypes.VARBINARY, sqltypes.LargeBinary): + __visit_name__: str + filestream: bool + @overload + def __init__(self, length: Literal["max"] | None, filestream: Literal[True]) -> None: ... + @overload + def __init__(self, *, filestream: Literal[True]) -> None: ... + @overload + def __init__(self, length: Incomplete | None = ..., filestream: Literal[False] = ...) -> None: ... + +class IMAGE(sqltypes.LargeBinary): + __visit_name__: str + +class XML(sqltypes.Text): + __visit_name__: str + +class BIT(sqltypes.Boolean): + __visit_name__: str + +class MONEY(sqltypes.TypeEngine): + __visit_name__: str + +class SMALLMONEY(sqltypes.TypeEngine): + __visit_name__: str + +class UNIQUEIDENTIFIER(sqltypes.TypeEngine): + __visit_name__: str + +class SQL_VARIANT(sqltypes.TypeEngine): + __visit_name__: str + +class TryCast(Cast): + __visit_name__: str + stringify_dialect: str + inherit_cache: bool + def __init__(self, *arg, **kw) -> None: ... + +try_cast: Any +MSDateTime: Any +MSDate: Any +MSReal = REAL +MSTinyInteger = TINYINT +MSTime = TIME +MSSmallDateTime = SMALLDATETIME +MSDateTime2 = DATETIME2 +MSDateTimeOffset = DATETIMEOFFSET +MSText = TEXT +MSNText = NTEXT +MSString = VARCHAR +MSNVarchar = NVARCHAR +MSChar = CHAR +MSNChar = NCHAR +MSBinary = BINARY +MSVarBinary = VARBINARY +MSImage = IMAGE +MSBit = BIT +MSMoney = MONEY +MSSmallMoney = SMALLMONEY +MSUniqueIdentifier = UNIQUEIDENTIFIER +MSVariant = SQL_VARIANT +ischema_names: Any + +class MSTypeCompiler(compiler.GenericTypeCompiler): + def visit_FLOAT(self, type_, **kw): ... + def visit_TINYINT(self, type_, **kw): ... + def visit_TIME(self, type_, **kw): ... + def visit_TIMESTAMP(self, type_, **kw): ... + def visit_ROWVERSION(self, type_, **kw): ... + def visit_datetime(self, type_, **kw): ... + def visit_DATETIMEOFFSET(self, type_, **kw): ... + def visit_DATETIME2(self, type_, **kw): ... + def visit_SMALLDATETIME(self, type_, **kw): ... + def visit_unicode(self, type_, **kw): ... + def visit_text(self, type_, **kw): ... + def visit_unicode_text(self, type_, **kw): ... + def visit_NTEXT(self, type_, **kw): ... + def visit_TEXT(self, type_, **kw): ... + def visit_VARCHAR(self, type_, **kw): ... + def visit_CHAR(self, type_, **kw): ... + def visit_NCHAR(self, type_, **kw): ... + def visit_NVARCHAR(self, type_, **kw): ... + def visit_date(self, type_, **kw): ... + def visit__BASETIMEIMPL(self, type_, **kw): ... + def visit_time(self, type_, **kw): ... + def visit_large_binary(self, type_, **kw): ... + def visit_IMAGE(self, type_, **kw): ... + def visit_XML(self, type_, **kw): ... + def visit_VARBINARY(self, type_, **kw): ... + def visit_boolean(self, type_, **kw): ... + def visit_BIT(self, type_, **kw): ... + def visit_JSON(self, type_, **kw): ... + def visit_MONEY(self, type_, **kw): ... + def visit_SMALLMONEY(self, type_, **kw): ... + def visit_UNIQUEIDENTIFIER(self, type_, **kw): ... + def visit_SQL_VARIANT(self, type_, **kw): ... + +class MSExecutionContext(default.DefaultExecutionContext): + def pre_exec(self) -> None: ... + cursor_fetch_strategy: Any + def post_exec(self) -> None: ... + def get_lastrowid(self): ... + @property + def rowcount(self): ... + def handle_dbapi_exception(self, e) -> None: ... + def get_result_cursor_strategy(self, result): ... + def fire_sequence(self, seq, type_): ... + def get_insert_default(self, column): ... + +class MSSQLCompiler(compiler.SQLCompiler): + returning_precedes_values: bool + extract_map: Any + tablealiases: Any + def __init__(self, *args, **kwargs) -> None: ... + def visit_now_func(self, fn, **kw): ... + def visit_current_date_func(self, fn, **kw): ... + def visit_length_func(self, fn, **kw): ... + def visit_char_length_func(self, fn, **kw): ... + def visit_concat_op_binary(self, binary, operator, **kw): ... + def visit_true(self, expr, **kw): ... + def visit_false(self, expr, **kw): ... + def visit_match_op_binary(self, binary, operator, **kw): ... + def get_select_precolumns(self, select, **kw): ... + def get_from_hint_text(self, table, text): ... + def get_crud_hint_text(self, table, text): ... + def fetch_clause(self, cs, **kwargs): ... + def limit_clause(self, cs, **kwargs): ... + def visit_try_cast(self, element, **kw): ... + def translate_select_structure(self, select_stmt, **kwargs): ... + def visit_table(self, table, mssql_aliased: bool = ..., iscrud: bool = ..., **kwargs): ... # type: ignore[override] + def visit_alias(self, alias, **kw): ... + def visit_column(self, column, add_to_result_map: Incomplete | None = ..., **kw): ... # type: ignore[override] + def visit_extract(self, extract, **kw): ... + def visit_savepoint(self, savepoint_stmt): ... + def visit_rollback_to_savepoint(self, savepoint_stmt): ... + def visit_binary(self, binary, **kwargs): ... + def returning_clause(self, stmt, returning_cols): ... + def get_cte_preamble(self, recursive): ... + def label_select_column(self, select, column, asfrom): ... + def for_update_clause(self, select, **kw): ... + def order_by_clause(self, select, **kw): ... + def update_from_clause(self, update_stmt, from_table, extra_froms, from_hints, **kw): ... + def delete_table_clause(self, delete_stmt, from_table, extra_froms): ... + def delete_extra_from_clause(self, delete_stmt, from_table, extra_froms, from_hints, **kw): ... + def visit_empty_set_expr(self, type_): ... + def visit_is_distinct_from_binary(self, binary, operator, **kw): ... + def visit_is_not_distinct_from_binary(self, binary, operator, **kw): ... + def visit_json_getitem_op_binary(self, binary, operator, **kw): ... + def visit_json_path_getitem_op_binary(self, binary, operator, **kw): ... + def visit_sequence(self, seq, **kw): ... + +class MSSQLStrictCompiler(MSSQLCompiler): + ansi_bind_rules: bool + def visit_in_op_binary(self, binary, operator, **kw): ... + def visit_not_in_op_binary(self, binary, operator, **kw): ... + def render_literal_value(self, value, type_): ... + +class MSDDLCompiler(compiler.DDLCompiler): + def get_column_specification(self, column, **kwargs): ... + def visit_create_index(self, create, include_schema: bool = ...): ... # type: ignore[override] + def visit_drop_index(self, drop): ... + def visit_primary_key_constraint(self, constraint): ... + def visit_unique_constraint(self, constraint): ... + def visit_computed_column(self, generated): ... + def visit_create_sequence(self, create, **kw): ... + def visit_identity_column(self, identity, **kw): ... + +class MSIdentifierPreparer(compiler.IdentifierPreparer): + reserved_words: Any + def __init__(self, dialect) -> None: ... + def quote_schema(self, schema, force: Incomplete | None = ...): ... + +class MSDialect(default.DefaultDialect): + name: str + supports_statement_cache: bool + supports_default_values: bool + supports_empty_insert: bool + use_scope_identity: bool + max_identifier_length: int + schema_name: str + implicit_returning: bool + full_returning: bool + colspecs: Any + engine_config_types: Any + ischema_names: Any + supports_sequences: bool + sequences_optional: bool + default_sequence_base: int + supports_native_boolean: bool + non_native_boolean_check_constraint: bool + supports_unicode_binds: bool + postfetch_lastrowid: bool + legacy_schema_aliasing: bool + server_version_info: Any + statement_compiler: Any + ddl_compiler: Any + type_compiler: Any + preparer: Any + construct_arguments: Any + query_timeout: Any + deprecate_large_types: Any + isolation_level: Any + def __init__( + self, + query_timeout: Incomplete | None = ..., + use_scope_identity: bool = ..., + schema_name: str = ..., + isolation_level: Incomplete | None = ..., + deprecate_large_types: Incomplete | None = ..., + json_serializer: Incomplete | None = ..., + json_deserializer: Incomplete | None = ..., + legacy_schema_aliasing: Incomplete | None = ..., + ignore_no_transaction_on_rollback: bool = ..., + **opts, + ) -> None: ... + def do_savepoint(self, connection, name) -> None: ... + def do_release_savepoint(self, connection, name) -> None: ... + def set_isolation_level(self, connection, level) -> None: ... + def get_isolation_level(self, dbapi_connection): ... + def initialize(self, connection) -> None: ... + def on_connect(self): ... + def has_table(self, connection, tablename, dbname, owner, schema): ... + def has_sequence(self, connection, sequencename, dbname, owner, schema): ... + def get_sequence_names(self, connection, dbname, owner, schema, **kw): ... + def get_schema_names(self, connection, **kw): ... + def get_table_names(self, connection, dbname, owner, schema, **kw): ... + def get_view_names(self, connection, dbname, owner, schema, **kw): ... + def get_indexes(self, connection, tablename, dbname, owner, schema, **kw): ... + def get_view_definition(self, connection, viewname, dbname, owner, schema, **kw): ... + def get_columns(self, connection, tablename, dbname, owner, schema, **kw): ... + def get_pk_constraint(self, connection, tablename, dbname, owner, schema, **kw): ... + def get_foreign_keys(self, connection, tablename, dbname, owner, schema, **kw): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/information_schema.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/information_schema.pyi new file mode 100644 index 00000000..0a97a197 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/information_schema.pyi @@ -0,0 +1,35 @@ +from typing import Any + +from ...sql import expression +from ...types import TypeDecorator + +ischema: Any + +class CoerceUnicode(TypeDecorator): + impl: Any + cache_ok: bool + def process_bind_param(self, value, dialect): ... + def bind_expression(self, bindvalue): ... + +class _cast_on_2005(expression.ColumnElement[Any]): + bindvalue: Any + def __init__(self, bindvalue) -> None: ... + +schemata: Any +tables: Any +columns: Any +mssql_temp_table_columns: Any +constraints: Any +column_constraints: Any +key_constraints: Any +ref_constraints: Any +views: Any +computed_columns: Any +sequences: Any + +class IdentitySqlVariant(TypeDecorator): + impl: Any + cache_ok: bool + def column_expression(self, colexpr): ... + +identity_columns: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/json.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/json.pyi new file mode 100644 index 00000000..2ced3bee --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/json.pyi @@ -0,0 +1,10 @@ +from ...types import JSON as _JSON + +class JSON(_JSON): ... + +class _FormatTypeMixin: + def bind_processor(self, dialect): ... + def literal_processor(self, dialect): ... + +class JSONIndexType(_FormatTypeMixin, _JSON.JSONIndexType): ... +class JSONPathType(_FormatTypeMixin, _JSON.JSONPathType): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/mxodbc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/mxodbc.pyi new file mode 100644 index 00000000..ee58d3de --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/mxodbc.pyi @@ -0,0 +1,27 @@ +from _typeshed import Incomplete +from typing import Any + +from ...connectors.mxodbc import MxODBCConnector +from .base import VARBINARY, MSDialect, _MSDate, _MSTime +from .pyodbc import MSExecutionContext_pyodbc, _MSNumeric_pyodbc + +class _MSNumeric_mxodbc(_MSNumeric_pyodbc): ... + +class _MSDate_mxodbc(_MSDate): + def bind_processor(self, dialect): ... + +class _MSTime_mxodbc(_MSTime): + def bind_processor(self, dialect): ... + +class _VARBINARY_mxodbc(VARBINARY): + def bind_processor(self, dialect): ... + +class MSExecutionContext_mxodbc(MSExecutionContext_pyodbc): ... + +class MSDialect_mxodbc(MxODBCConnector, MSDialect): + supports_statement_cache: bool + colspecs: Any + description_encoding: Any + def __init__(self, description_encoding: Incomplete | None = ..., **params) -> None: ... + +dialect = MSDialect_mxodbc diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/provision.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/provision.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/pymssql.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/pymssql.pyi new file mode 100644 index 00000000..b16a0b4f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/pymssql.pyi @@ -0,0 +1,24 @@ +from typing import Any + +from ...types import Numeric +from .base import MSDialect, MSIdentifierPreparer + +class _MSNumeric_pymssql(Numeric): + def result_processor(self, dialect, type_): ... + +class MSIdentifierPreparer_pymssql(MSIdentifierPreparer): + def __init__(self, dialect) -> None: ... + +class MSDialect_pymssql(MSDialect): + supports_statement_cache: bool + supports_native_decimal: bool + driver: str + preparer: Any + colspecs: Any + @classmethod + def dbapi(cls): ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + def set_isolation_level(self, connection, level) -> None: ... + +dialect = MSDialect_pymssql diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/pyodbc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/pyodbc.pyi new file mode 100644 index 00000000..0ee156b6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/pyodbc.pyi @@ -0,0 +1,45 @@ +from _typeshed import Incomplete +from typing import Any + +from ...connectors.pyodbc import PyODBCConnector +from ...types import DateTime, Float, Numeric +from .base import BINARY, DATETIMEOFFSET, VARBINARY, MSDialect, MSExecutionContext + +class _ms_numeric_pyodbc: + def bind_processor(self, dialect): ... + +class _MSNumeric_pyodbc(_ms_numeric_pyodbc, Numeric): ... +class _MSFloat_pyodbc(_ms_numeric_pyodbc, Float): ... + +class _ms_binary_pyodbc: + def bind_processor(self, dialect): ... + +class _ODBCDateTimeBindProcessor: + has_tz: bool + def bind_processor(self, dialect): ... + +class _ODBCDateTime(_ODBCDateTimeBindProcessor, DateTime): ... + +class _ODBCDATETIMEOFFSET(_ODBCDateTimeBindProcessor, DATETIMEOFFSET): + has_tz: bool + +class _VARBINARY_pyodbc(_ms_binary_pyodbc, VARBINARY): ... +class _BINARY_pyodbc(_ms_binary_pyodbc, BINARY): ... + +class MSExecutionContext_pyodbc(MSExecutionContext): + def pre_exec(self) -> None: ... + def post_exec(self) -> None: ... + +class MSDialect_pyodbc(PyODBCConnector, MSDialect): + supports_statement_cache: bool + supports_sane_rowcount_returning: bool + colspecs: Any + description_encoding: Any + use_scope_identity: Any + fast_executemany: Any + def __init__(self, description_encoding: Incomplete | None = ..., fast_executemany: bool = ..., **params) -> None: ... + def on_connect(self): ... + def do_executemany(self, cursor, statement, parameters, context: Incomplete | None = ...) -> None: ... + def is_disconnect(self, e, connection, cursor): ... + +dialect = MSDialect_pyodbc diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/__init__.pyi new file mode 100644 index 00000000..faaa7d6e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/__init__.pyi @@ -0,0 +1,85 @@ +from typing import Any + +from .base import ( + BIGINT as BIGINT, + BINARY as BINARY, + BIT as BIT, + BLOB as BLOB, + BOOLEAN as BOOLEAN, + CHAR as CHAR, + DATE as DATE, + DATETIME as DATETIME, + DECIMAL as DECIMAL, + DOUBLE as DOUBLE, + ENUM as ENUM, + FLOAT as FLOAT, + INTEGER as INTEGER, + JSON as JSON, + LONGBLOB as LONGBLOB, + LONGTEXT as LONGTEXT, + MEDIUMBLOB as MEDIUMBLOB, + MEDIUMINT as MEDIUMINT, + MEDIUMTEXT as MEDIUMTEXT, + NCHAR as NCHAR, + NUMERIC as NUMERIC, + NVARCHAR as NVARCHAR, + REAL as REAL, + SET as SET, + SMALLINT as SMALLINT, + TEXT as TEXT, + TIME as TIME, + TIMESTAMP as TIMESTAMP, + TINYBLOB as TINYBLOB, + TINYINT as TINYINT, + TINYTEXT as TINYTEXT, + VARBINARY as VARBINARY, + VARCHAR as VARCHAR, + YEAR as YEAR, +) +from .dml import Insert as Insert, insert as insert +from .expression import match as match + +__all__ = ( + "BIGINT", + "BINARY", + "BIT", + "BLOB", + "BOOLEAN", + "CHAR", + "DATE", + "DATETIME", + "DECIMAL", + "DOUBLE", + "ENUM", + "DECIMAL", + "FLOAT", + "INTEGER", + "INTEGER", + "JSON", + "LONGBLOB", + "LONGTEXT", + "MEDIUMBLOB", + "MEDIUMINT", + "MEDIUMTEXT", + "NCHAR", + "NVARCHAR", + "NUMERIC", + "SET", + "SMALLINT", + "REAL", + "TEXT", + "TIME", + "TIMESTAMP", + "TINYBLOB", + "TINYINT", + "TINYTEXT", + "VARBINARY", + "VARCHAR", + "YEAR", + "dialect", + "insert", + "Insert", + "match", +) + +dialect: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/aiomysql.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/aiomysql.pyi new file mode 100644 index 00000000..3dc245d0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/aiomysql.pyi @@ -0,0 +1,74 @@ +from _typeshed import Incomplete +from typing import Any + +from ...engine import AdaptedConnection +from .pymysql import MySQLDialect_pymysql + +class AsyncAdapt_aiomysql_cursor: + server_side: bool + await_: Any + def __init__(self, adapt_connection) -> None: ... + @property + def description(self): ... + @property + def rowcount(self): ... + @property + def arraysize(self): ... + @arraysize.setter + def arraysize(self, value) -> None: ... + @property + def lastrowid(self): ... + def close(self) -> None: ... + def execute(self, operation, parameters: Incomplete | None = ...): ... + def executemany(self, operation, seq_of_parameters): ... + def setinputsizes(self, *inputsizes) -> None: ... + def __iter__(self): ... + def fetchone(self): ... + def fetchmany(self, size: Incomplete | None = ...): ... + def fetchall(self): ... + +class AsyncAdapt_aiomysql_ss_cursor(AsyncAdapt_aiomysql_cursor): + server_side: bool + await_: Any + def __init__(self, adapt_connection) -> None: ... + def close(self) -> None: ... + def fetchone(self): ... + def fetchmany(self, size: Incomplete | None = ...): ... + def fetchall(self): ... + +class AsyncAdapt_aiomysql_connection(AdaptedConnection): + await_: Any + dbapi: Any + def __init__(self, dbapi, connection) -> None: ... + def ping(self, reconnect): ... + def character_set_name(self): ... + def autocommit(self, value) -> None: ... + def cursor(self, server_side: bool = ...): ... + def rollback(self) -> None: ... + def commit(self) -> None: ... + def close(self) -> None: ... + +class AsyncAdaptFallback_aiomysql_connection(AsyncAdapt_aiomysql_connection): + await_: Any + +class AsyncAdapt_aiomysql_dbapi: + aiomysql: Any + pymysql: Any + paramstyle: str + def __init__(self, aiomysql, pymysql) -> None: ... + def connect(self, *arg, **kw): ... + +class MySQLDialect_aiomysql(MySQLDialect_pymysql): + driver: str + supports_statement_cache: bool + supports_server_side_cursors: bool + is_async: bool + @classmethod + def dbapi(cls): ... + @classmethod + def get_pool_class(cls, url): ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + def get_driver_connection(self, connection): ... + +dialect = MySQLDialect_aiomysql diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/asyncmy.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/asyncmy.pyi new file mode 100644 index 00000000..69a9d557 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/asyncmy.pyi @@ -0,0 +1,74 @@ +from _typeshed import Incomplete +from typing import Any + +from ...engine import AdaptedConnection +from .pymysql import MySQLDialect_pymysql + +class AsyncAdapt_asyncmy_cursor: + server_side: bool + await_: Any + def __init__(self, adapt_connection) -> None: ... + @property + def description(self): ... + @property + def rowcount(self): ... + @property + def arraysize(self): ... + @arraysize.setter + def arraysize(self, value) -> None: ... + @property + def lastrowid(self): ... + def close(self) -> None: ... + def execute(self, operation, parameters: Incomplete | None = ...): ... + def executemany(self, operation, seq_of_parameters): ... + def setinputsizes(self, *inputsizes) -> None: ... + def __iter__(self): ... + def fetchone(self): ... + def fetchmany(self, size: Incomplete | None = ...): ... + def fetchall(self): ... + +class AsyncAdapt_asyncmy_ss_cursor(AsyncAdapt_asyncmy_cursor): + server_side: bool + await_: Any + def __init__(self, adapt_connection) -> None: ... + def close(self) -> None: ... + def fetchone(self): ... + def fetchmany(self, size: Incomplete | None = ...): ... + def fetchall(self): ... + +class AsyncAdapt_asyncmy_connection(AdaptedConnection): + await_: Any + dbapi: Any + def __init__(self, dbapi, connection) -> None: ... + def ping(self, reconnect): ... + def character_set_name(self): ... + def autocommit(self, value) -> None: ... + def cursor(self, server_side: bool = ...): ... + def rollback(self) -> None: ... + def commit(self) -> None: ... + def close(self) -> None: ... + +class AsyncAdaptFallback_asyncmy_connection(AsyncAdapt_asyncmy_connection): + await_: Any + +class AsyncAdapt_asyncmy_dbapi: + asyncmy: Any + pymysql: Any + paramstyle: str + def __init__(self, asyncmy: Any) -> None: ... + def connect(self, *arg, **kw): ... + +class MySQLDialect_asyncmy(MySQLDialect_pymysql): + driver: str + supports_statement_cache: bool + supports_server_side_cursors: bool + is_async: bool + @classmethod + def dbapi(cls): ... + @classmethod + def get_pool_class(cls, url): ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + def get_driver_connection(self, connection): ... + +dialect = MySQLDialect_asyncmy diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/base.pyi new file mode 100644 index 00000000..129de51e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/base.pyi @@ -0,0 +1,240 @@ +from _typeshed import Incomplete +from typing import Any + +from ...engine import default +from ...sql import compiler +from ...types import BINARY as BINARY, BLOB as BLOB, BOOLEAN as BOOLEAN, DATE as DATE, VARBINARY as VARBINARY +from .enumerated import ENUM as ENUM, SET as SET +from .json import JSON as JSON +from .types import ( + BIGINT as BIGINT, + BIT as BIT, + CHAR as CHAR, + DATETIME as DATETIME, + DECIMAL as DECIMAL, + DOUBLE as DOUBLE, + FLOAT as FLOAT, + INTEGER as INTEGER, + LONGBLOB as LONGBLOB, + LONGTEXT as LONGTEXT, + MEDIUMBLOB as MEDIUMBLOB, + MEDIUMINT as MEDIUMINT, + MEDIUMTEXT as MEDIUMTEXT, + NCHAR as NCHAR, + NUMERIC as NUMERIC, + NVARCHAR as NVARCHAR, + REAL as REAL, + SMALLINT as SMALLINT, + TEXT as TEXT, + TIME as TIME, + TIMESTAMP as TIMESTAMP, + TINYBLOB as TINYBLOB, + TINYINT as TINYINT, + TINYTEXT as TINYTEXT, + VARCHAR as VARCHAR, + YEAR as YEAR, +) + +AUTOCOMMIT_RE: Any +SET_RE: Any +MSTime = TIME +MSSet = SET +MSEnum = ENUM +MSLongBlob = LONGBLOB +MSMediumBlob = MEDIUMBLOB +MSTinyBlob = TINYBLOB +MSBlob = BLOB +MSBinary = BINARY +MSVarBinary = VARBINARY +MSNChar = NCHAR +MSNVarChar = NVARCHAR +MSChar = CHAR +MSString = VARCHAR +MSLongText = LONGTEXT +MSMediumText = MEDIUMTEXT +MSTinyText = TINYTEXT +MSText = TEXT +MSYear = YEAR +MSTimeStamp = TIMESTAMP +MSBit = BIT +MSSmallInteger = SMALLINT +MSTinyInteger = TINYINT +MSMediumInteger = MEDIUMINT +MSBigInteger = BIGINT +MSNumeric = NUMERIC +MSDecimal = DECIMAL +MSDouble = DOUBLE +MSReal = REAL +MSFloat = FLOAT +MSInteger = INTEGER +colspecs: Any +ischema_names: Any + +class MySQLExecutionContext(default.DefaultExecutionContext): + def should_autocommit_text(self, statement): ... + def create_server_side_cursor(self): ... + def fire_sequence(self, seq, type_): ... + +class MySQLCompiler(compiler.SQLCompiler): + render_table_with_column_in_update_from: bool + extract_map: Any + def default_from(self): ... + def visit_random_func(self, fn, **kw): ... + def visit_sequence(self, seq, **kw): ... + def visit_sysdate_func(self, fn, **kw): ... + def visit_json_getitem_op_binary(self, binary, operator, **kw): ... + def visit_json_path_getitem_op_binary(self, binary, operator, **kw): ... + def visit_on_duplicate_key_update(self, on_duplicate, **kw): ... + def visit_concat_op_binary(self, binary, operator, **kw): ... + def visit_mysql_match(self, element, **kw): ... + def visit_match_op_binary(self, binary, operator, **kw): ... + def get_from_hint_text(self, table, text): ... + def visit_typeclause(self, typeclause, type_: Incomplete | None = ..., **kw): ... + def visit_cast(self, cast, **kw): ... + def render_literal_value(self, value, type_): ... + def visit_true(self, element, **kw): ... + def visit_false(self, element, **kw): ... + def get_select_precolumns(self, select, **kw): ... + def visit_join(self, join, asfrom: bool = ..., from_linter: Incomplete | None = ..., **kwargs): ... + def for_update_clause(self, select, **kw): ... + def limit_clause(self, select, **kw): ... + def update_limit_clause(self, update_stmt): ... + def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw): ... + def update_from_clause(self, update_stmt, from_table, extra_froms, from_hints, **kw) -> None: ... + def delete_table_clause(self, delete_stmt, from_table, extra_froms): ... + def delete_extra_from_clause(self, delete_stmt, from_table, extra_froms, from_hints, **kw): ... + def visit_empty_set_expr(self, element_types): ... + def visit_is_distinct_from_binary(self, binary, operator, **kw): ... + def visit_is_not_distinct_from_binary(self, binary, operator, **kw): ... + def visit_regexp_match_op_binary(self, binary, operator, **kw): ... + def visit_not_regexp_match_op_binary(self, binary, operator, **kw): ... + def visit_regexp_replace_op_binary(self, binary, operator, **kw): ... + +class MySQLDDLCompiler(compiler.DDLCompiler): + def get_column_specification(self, column, **kw): ... + def post_create_table(self, table): ... + def visit_create_index(self, create, **kw): ... + def visit_primary_key_constraint(self, constraint): ... + def visit_drop_index(self, drop): ... + def visit_drop_constraint(self, drop): ... + def define_constraint_match(self, constraint): ... + def visit_set_table_comment(self, create): ... + def visit_drop_table_comment(self, create): ... + def visit_set_column_comment(self, create): ... + +class MySQLTypeCompiler(compiler.GenericTypeCompiler): + def visit_NUMERIC(self, type_, **kw): ... + def visit_DECIMAL(self, type_, **kw): ... + def visit_DOUBLE(self, type_, **kw): ... + def visit_REAL(self, type_, **kw): ... + def visit_FLOAT(self, type_, **kw): ... + def visit_INTEGER(self, type_, **kw): ... + def visit_BIGINT(self, type_, **kw): ... + def visit_MEDIUMINT(self, type_, **kw): ... + def visit_TINYINT(self, type_, **kw): ... + def visit_SMALLINT(self, type_, **kw): ... + def visit_BIT(self, type_, **kw): ... + def visit_DATETIME(self, type_, **kw): ... + def visit_DATE(self, type_, **kw): ... + def visit_TIME(self, type_, **kw): ... + def visit_TIMESTAMP(self, type_, **kw): ... + def visit_YEAR(self, type_, **kw): ... + def visit_TEXT(self, type_, **kw): ... + def visit_TINYTEXT(self, type_, **kw): ... + def visit_MEDIUMTEXT(self, type_, **kw): ... + def visit_LONGTEXT(self, type_, **kw): ... + def visit_VARCHAR(self, type_, **kw): ... + def visit_CHAR(self, type_, **kw): ... + def visit_NVARCHAR(self, type_, **kw): ... + def visit_NCHAR(self, type_, **kw): ... + def visit_VARBINARY(self, type_, **kw): ... + def visit_JSON(self, type_, **kw): ... + def visit_large_binary(self, type_, **kw): ... + def visit_enum(self, type_, **kw): ... + def visit_BLOB(self, type_, **kw): ... + def visit_TINYBLOB(self, type_, **kw): ... + def visit_MEDIUMBLOB(self, type_, **kw): ... + def visit_LONGBLOB(self, type_, **kw): ... + def visit_ENUM(self, type_, **kw): ... + def visit_SET(self, type_, **kw): ... + def visit_BOOLEAN(self, type_, **kw): ... + +class MySQLIdentifierPreparer(compiler.IdentifierPreparer): + reserved_words: Any + def __init__(self, dialect, server_ansiquotes: bool = ..., **kw) -> None: ... + +class MariaDBIdentifierPreparer(MySQLIdentifierPreparer): + reserved_words: Any + +class MySQLDialect(default.DefaultDialect): + logger: Any + name: str + supports_statement_cache: bool + supports_alter: bool + supports_native_boolean: bool + max_identifier_length: int + max_index_name_length: int + max_constraint_name_length: int + supports_native_enum: bool + supports_sequences: bool + sequences_optional: bool + supports_for_update_of: bool + supports_default_values: bool + supports_default_metavalue: bool + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + supports_multivalues_insert: bool + supports_comments: bool + inline_comments: bool + default_paramstyle: str + colspecs: Any + cte_follows_insert: bool + statement_compiler: Any + ddl_compiler: Any + type_compiler: Any + ischema_names: Any + preparer: Any + is_mariadb: bool + construct_arguments: Any + isolation_level: Any + def __init__( + self, + isolation_level: Incomplete | None = ..., + json_serializer: Incomplete | None = ..., + json_deserializer: Incomplete | None = ..., + is_mariadb: Incomplete | None = ..., + **kwargs, + ) -> None: ... + def on_connect(self): ... + def set_isolation_level(self, connection, level) -> None: ... + def get_isolation_level(self, connection): ... + def do_begin_twophase(self, connection, xid) -> None: ... + def do_prepare_twophase(self, connection, xid) -> None: ... + def do_rollback_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_commit_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_recover_twophase(self, connection): ... + def is_disconnect(self, e, connection, cursor): ... + def has_table(self, connection, table_name, schema: Incomplete | None = ...): ... # type: ignore[override] + def has_sequence(self, connection, sequence_name, schema: Incomplete | None = ...): ... # type: ignore[override] + def get_sequence_names(self, connection, schema: Incomplete | None = ..., **kw): ... + identifier_preparer: Any + def initialize(self, connection) -> None: ... + def get_schema_names(self, connection, **kw): ... + def get_table_names(self, connection, schema: Incomplete | None = ..., **kw): ... + def get_view_names(self, connection, schema: Incomplete | None = ..., **kw): ... + def get_table_options(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_columns(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_pk_constraint(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_foreign_keys(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_check_constraints(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_table_comment(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_indexes(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_unique_constraints(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_view_definition(self, connection, view_name, schema: Incomplete | None = ..., **kw): ... + +class _DecodingRow: + rowproxy: Any + charset: Any + def __init__(self, rowproxy, charset) -> None: ... + def __getitem__(self, index): ... + def __getattr__(self, attr: str): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/cymysql.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/cymysql.pyi new file mode 100644 index 00000000..408d931d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/cymysql.pyi @@ -0,0 +1,21 @@ +from typing import Any + +from .base import BIT +from .mysqldb import MySQLDialect_mysqldb + +class _cymysqlBIT(BIT): + def result_processor(self, dialect, coltype): ... + +class MySQLDialect_cymysql(MySQLDialect_mysqldb): + driver: str + supports_statement_cache: bool + description_encoding: Any + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + supports_unicode_statements: bool + colspecs: Any + @classmethod + def dbapi(cls): ... + def is_disconnect(self, e, connection, cursor): ... + +dialect = MySQLDialect_cymysql diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/dml.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/dml.pyi new file mode 100644 index 00000000..77b095c1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/dml.pyi @@ -0,0 +1,23 @@ +from typing import Any + +from ...sql.dml import Insert as StandardInsert +from ...sql.elements import ClauseElement +from ...util import memoized_property + +class Insert(StandardInsert): + stringify_dialect: str + inherit_cache: bool + @property + def inserted(self): ... + @memoized_property + def inserted_alias(self): ... + def on_duplicate_key_update(self, *args, **kw) -> None: ... + +insert: Any + +class OnDuplicateClause(ClauseElement): + __visit_name__: str + stringify_dialect: str + inserted_alias: Any + update: Any + def __init__(self, inserted_alias, update) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/enumerated.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/enumerated.pyi new file mode 100644 index 00000000..e75307ef --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/enumerated.pyi @@ -0,0 +1,21 @@ +from typing import Any + +from ...sql import sqltypes +from .types import _StringType + +class ENUM(sqltypes.NativeForEmulated, sqltypes.Enum, _StringType): # type: ignore[misc] # incompatible with base class + __visit_name__: str + native_enum: bool + def __init__(self, *enums, **kw) -> None: ... + @classmethod + def adapt_emulated_to_native(cls, impl, **kw): ... + +class SET(_StringType): + __visit_name__: str + retrieve_as_bitwise: Any + values: Any + def __init__(self, *values, **kw) -> None: ... + def column_expression(self, colexpr): ... + def result_processor(self, dialect, coltype): ... + def bind_processor(self, dialect): ... + def adapt(self, impltype, **kw): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/expression.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/expression.pyi new file mode 100644 index 00000000..24d63634 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/expression.pyi @@ -0,0 +1,13 @@ +from typing import Any + +from ...sql import elements +from ...sql.base import Generative + +class match(Generative, elements.BinaryExpression): + __visit_name__: str + inherit_cache: bool + def __init__(self, *cols, **kw) -> None: ... + modifiers: Any + def in_boolean_mode(self) -> None: ... + def in_natural_language_mode(self) -> None: ... + def with_query_expansion(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/json.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/json.pyi new file mode 100644 index 00000000..c35f9c44 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/json.pyi @@ -0,0 +1,10 @@ +import sqlalchemy.types as sqltypes + +class JSON(sqltypes.JSON): ... + +class _FormatTypeMixin: + def bind_processor(self, dialect): ... + def literal_processor(self, dialect): ... + +class JSONIndexType(_FormatTypeMixin, sqltypes.JSON.JSONIndexType): ... +class JSONPathType(_FormatTypeMixin, sqltypes.JSON.JSONPathType): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mariadb.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mariadb.pyi new file mode 100644 index 00000000..db8f5abd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mariadb.pyi @@ -0,0 +1,11 @@ +from typing import Any + +from .base import MySQLDialect + +class MariaDBDialect(MySQLDialect): + is_mariadb: bool + supports_statement_cache: bool + name: str + preparer: Any + +def loader(driver): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mariadbconnector.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mariadbconnector.pyi new file mode 100644 index 00000000..0735fb75 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mariadbconnector.pyi @@ -0,0 +1,36 @@ +from typing import Any + +from .base import MySQLCompiler, MySQLDialect, MySQLExecutionContext + +mariadb_cpy_minimum_version: Any + +class MySQLExecutionContext_mariadbconnector(MySQLExecutionContext): + def create_server_side_cursor(self): ... + def create_default_cursor(self): ... + +class MySQLCompiler_mariadbconnector(MySQLCompiler): ... + +class MySQLDialect_mariadbconnector(MySQLDialect): + driver: str + supports_statement_cache: bool + supports_unicode_statements: bool + encoding: str + convert_unicode: bool + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + supports_native_decimal: bool + default_paramstyle: str + statement_compiler: Any + supports_server_side_cursors: bool + paramstyle: str + def __init__(self, **kwargs) -> None: ... + @classmethod + def dbapi(cls): ... + def is_disconnect(self, e, connection, cursor): ... + def create_connect_args(self, url): ... + def do_begin_twophase(self, connection, xid) -> None: ... + def do_prepare_twophase(self, connection, xid) -> None: ... + def do_rollback_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_commit_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + +dialect = MySQLDialect_mariadbconnector diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mysqlconnector.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mysqlconnector.pyi new file mode 100644 index 00000000..df0a6358 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mysqlconnector.pyi @@ -0,0 +1,38 @@ +from typing import Any + +from ...util import memoized_property +from .base import BIT, MySQLCompiler, MySQLDialect, MySQLIdentifierPreparer + +class MySQLCompiler_mysqlconnector(MySQLCompiler): + def visit_mod_binary(self, binary, operator, **kw): ... + def post_process_text(self, text): ... + def escape_literal_column(self, text): ... + +class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer): ... + +class _myconnpyBIT(BIT): + def result_processor(self, dialect, coltype) -> None: ... + +class MySQLDialect_mysqlconnector(MySQLDialect): + driver: str + supports_statement_cache: bool + supports_unicode_binds: bool + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + supports_native_decimal: bool + default_paramstyle: str + statement_compiler: Any + preparer: Any + colspecs: Any + def __init__(self, *arg, **kw) -> None: ... + @property + def description_encoding(self): ... + @memoized_property + def supports_unicode_statements(self): ... + @classmethod + def dbapi(cls): ... + def do_ping(self, dbapi_connection): ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + +dialect = MySQLDialect_mysqlconnector diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mysqldb.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mysqldb.pyi new file mode 100644 index 00000000..a8479ad8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mysqldb.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete +from typing import Any + +from ...util import memoized_property +from .base import MySQLCompiler, MySQLDialect, MySQLExecutionContext + +class MySQLExecutionContext_mysqldb(MySQLExecutionContext): + @property + def rowcount(self): ... + +class MySQLCompiler_mysqldb(MySQLCompiler): ... + +class MySQLDialect_mysqldb(MySQLDialect): + driver: str + supports_statement_cache: bool + supports_unicode_statements: bool + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + supports_native_decimal: bool + default_paramstyle: str + statement_compiler: Any + preparer: Any + def __init__(self, **kwargs) -> None: ... + @memoized_property + def supports_server_side_cursors(self): ... + @classmethod + def dbapi(cls): ... + def on_connect(self): ... + def do_ping(self, dbapi_connection): ... + def do_executemany(self, cursor, statement, parameters, context: Incomplete | None = ...) -> None: ... + def create_connect_args(self, url, _translate_args: Incomplete | None = ...): ... + +dialect = MySQLDialect_mysqldb diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/oursql.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/oursql.pyi new file mode 100644 index 00000000..1bc65d97 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/oursql.pyi @@ -0,0 +1,40 @@ +from _typeshed import Incomplete +from typing import Any + +from .base import BIT, MySQLDialect, MySQLExecutionContext + +class _oursqlBIT(BIT): + def result_processor(self, dialect, coltype) -> None: ... + +class MySQLExecutionContext_oursql(MySQLExecutionContext): + @property + def plain_query(self): ... + +class MySQLDialect_oursql(MySQLDialect): + driver: str + supports_statement_cache: bool + supports_unicode_binds: bool + supports_unicode_statements: bool + supports_native_decimal: bool + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + colspecs: Any + @classmethod + def dbapi(cls): ... + def do_execute(self, cursor, statement, parameters, context: Incomplete | None = ...) -> None: ... + def do_begin(self, connection) -> None: ... + def do_begin_twophase(self, connection, xid) -> None: ... + def do_prepare_twophase(self, connection, xid) -> None: ... + def do_rollback_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_commit_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def has_table(self, connection, table_name, schema: Incomplete | None = ...): ... # type: ignore[override] + def get_table_options(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_columns(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_view_names(self, connection, schema: Incomplete | None = ..., **kw): ... + def get_table_names(self, connection, schema: Incomplete | None = ..., **kw): ... + def get_schema_names(self, connection, **kw): ... + def initialize(self, connection): ... + def is_disconnect(self, e, connection, cursor): ... + def create_connect_args(self, url): ... + +dialect = MySQLDialect_oursql diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/provision.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/provision.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/pymysql.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/pymysql.pyi new file mode 100644 index 00000000..fa7a4908 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/pymysql.pyi @@ -0,0 +1,20 @@ +from _typeshed import Incomplete +from typing import Any + +from ...util import memoized_property +from .mysqldb import MySQLDialect_mysqldb + +class MySQLDialect_pymysql(MySQLDialect_mysqldb): + driver: str + supports_statement_cache: bool + description_encoding: Any + supports_unicode_statements: bool + supports_unicode_binds: bool + @memoized_property + def supports_server_side_cursors(self): ... + @classmethod + def dbapi(cls): ... + def create_connect_args(self, url, _translate_args: Incomplete | None = ...): ... + def is_disconnect(self, e, connection, cursor): ... + +dialect = MySQLDialect_pymysql diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/pyodbc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/pyodbc.pyi new file mode 100644 index 00000000..f9363c3c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/pyodbc.pyi @@ -0,0 +1,20 @@ +from typing import Any + +from ...connectors.pyodbc import PyODBCConnector +from .base import MySQLDialect, MySQLExecutionContext +from .types import TIME + +class _pyodbcTIME(TIME): + def result_processor(self, dialect, coltype): ... + +class MySQLExecutionContext_pyodbc(MySQLExecutionContext): + def get_lastrowid(self): ... + +class MySQLDialect_pyodbc(PyODBCConnector, MySQLDialect): + supports_statement_cache: bool + colspecs: Any + supports_unicode_statements: bool + pyodbc_driver_name: str + def on_connect(self): ... + +dialect = MySQLDialect_pyodbc diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/reflection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/reflection.pyi new file mode 100644 index 00000000..f27c1852 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/reflection.pyi @@ -0,0 +1,16 @@ +from typing import Any + +class ReflectedState: + columns: Any + table_options: Any + table_name: Any + keys: Any + fk_constraints: Any + ck_constraints: Any + +class MySQLTableDefinitionParser: + logger: Any + dialect: Any + preparer: Any + def __init__(self, dialect, preparer) -> None: ... + def parse(self, show_create, charset): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/reserved_words.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/reserved_words.pyi new file mode 100644 index 00000000..28a741b2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/reserved_words.pyi @@ -0,0 +1,4 @@ +from typing import Any + +RESERVED_WORDS_MARIADB: Any +RESERVED_WORDS_MYSQL: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/types.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/types.pyi new file mode 100644 index 00000000..5db2c925 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/types.pyi @@ -0,0 +1,158 @@ +from _typeshed import Incomplete +from typing import Any + +import sqlalchemy.types as sqltypes + +class _NumericType: + unsigned: Any + zerofill: Any + def __init__(self, unsigned: bool = ..., zerofill: bool = ..., **kw) -> None: ... + +class _FloatType(_NumericType, sqltypes.Float): + scale: Any + def __init__( + self, precision: Incomplete | None = ..., scale: Incomplete | None = ..., asdecimal: bool = ..., **kw + ) -> None: ... + +class _IntegerType(_NumericType, sqltypes.Integer): + display_width: Any + def __init__(self, display_width: Incomplete | None = ..., **kw) -> None: ... + +class _StringType(sqltypes.String): + charset: Any + ascii: Any + unicode: Any + binary: Any + national: Any + def __init__( + self, + charset: Incomplete | None = ..., + collation: Incomplete | None = ..., + ascii: bool = ..., + binary: bool = ..., + unicode: bool = ..., + national: bool = ..., + **kw, + ) -> None: ... + +class _MatchType(sqltypes.Float, sqltypes.MatchType): # type: ignore[misc] # incompatible with base class + def __init__(self, **kw) -> None: ... + +class NUMERIC(_NumericType, sqltypes.NUMERIC): + __visit_name__: str + def __init__( + self, precision: Incomplete | None = ..., scale: Incomplete | None = ..., asdecimal: bool = ..., **kw + ) -> None: ... + +class DECIMAL(_NumericType, sqltypes.DECIMAL): + __visit_name__: str + def __init__( + self, precision: Incomplete | None = ..., scale: Incomplete | None = ..., asdecimal: bool = ..., **kw + ) -> None: ... + +class DOUBLE(_FloatType): + __visit_name__: str + def __init__( + self, precision: Incomplete | None = ..., scale: Incomplete | None = ..., asdecimal: bool = ..., **kw + ) -> None: ... + +class REAL(_FloatType, sqltypes.REAL): + __visit_name__: str + def __init__( + self, precision: Incomplete | None = ..., scale: Incomplete | None = ..., asdecimal: bool = ..., **kw + ) -> None: ... + +class FLOAT(_FloatType, sqltypes.FLOAT): + __visit_name__: str + def __init__( + self, precision: Incomplete | None = ..., scale: Incomplete | None = ..., asdecimal: bool = ..., **kw + ) -> None: ... + def bind_processor(self, dialect) -> None: ... + +class INTEGER(_IntegerType, sqltypes.INTEGER): + __visit_name__: str + def __init__(self, display_width: Incomplete | None = ..., **kw) -> None: ... + +class BIGINT(_IntegerType, sqltypes.BIGINT): + __visit_name__: str + def __init__(self, display_width: Incomplete | None = ..., **kw) -> None: ... + +class MEDIUMINT(_IntegerType): + __visit_name__: str + def __init__(self, display_width: Incomplete | None = ..., **kw) -> None: ... + +class TINYINT(_IntegerType): + __visit_name__: str + def __init__(self, display_width: Incomplete | None = ..., **kw) -> None: ... + +class SMALLINT(_IntegerType, sqltypes.SMALLINT): + __visit_name__: str + def __init__(self, display_width: Incomplete | None = ..., **kw) -> None: ... + +class BIT(sqltypes.TypeEngine): + __visit_name__: str + length: Any + def __init__(self, length: Incomplete | None = ...) -> None: ... + def result_processor(self, dialect, coltype): ... + +class TIME(sqltypes.TIME): + __visit_name__: str + fsp: Any + def __init__(self, timezone: bool = ..., fsp: Incomplete | None = ...) -> None: ... + def result_processor(self, dialect, coltype): ... + +class TIMESTAMP(sqltypes.TIMESTAMP): + __visit_name__: str + fsp: Any + def __init__(self, timezone: bool = ..., fsp: Incomplete | None = ...) -> None: ... + +class DATETIME(sqltypes.DATETIME): + __visit_name__: str + fsp: Any + def __init__(self, timezone: bool = ..., fsp: Incomplete | None = ...) -> None: ... + +class YEAR(sqltypes.TypeEngine): + __visit_name__: str + display_width: Any + def __init__(self, display_width: Incomplete | None = ...) -> None: ... + +class TEXT(_StringType, sqltypes.TEXT): + __visit_name__: str + def __init__(self, length: Incomplete | None = ..., **kw) -> None: ... + +class TINYTEXT(_StringType): + __visit_name__: str + def __init__(self, **kwargs) -> None: ... + +class MEDIUMTEXT(_StringType): + __visit_name__: str + def __init__(self, **kwargs) -> None: ... + +class LONGTEXT(_StringType): + __visit_name__: str + def __init__(self, **kwargs) -> None: ... + +class VARCHAR(_StringType, sqltypes.VARCHAR): + __visit_name__: str + def __init__(self, length: Incomplete | None = ..., **kwargs) -> None: ... + +class CHAR(_StringType, sqltypes.CHAR): + __visit_name__: str + def __init__(self, length: Incomplete | None = ..., **kwargs) -> None: ... + +class NVARCHAR(_StringType, sqltypes.NVARCHAR): + __visit_name__: str + def __init__(self, length: Incomplete | None = ..., **kwargs) -> None: ... + +class NCHAR(_StringType, sqltypes.NCHAR): + __visit_name__: str + def __init__(self, length: Incomplete | None = ..., **kwargs) -> None: ... + +class TINYBLOB(sqltypes._Binary): + __visit_name__: str + +class MEDIUMBLOB(sqltypes._Binary): + __visit_name__: str + +class LONGBLOB(sqltypes._Binary): + __visit_name__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/oracle/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/oracle/__init__.pyi new file mode 100644 index 00000000..3cc1662f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/oracle/__init__.pyi @@ -0,0 +1,52 @@ +from typing import Any + +from .base import ( + BFILE as BFILE, + BINARY_DOUBLE as BINARY_DOUBLE, + BINARY_FLOAT as BINARY_FLOAT, + BLOB as BLOB, + CHAR as CHAR, + CLOB as CLOB, + DATE as DATE, + DOUBLE_PRECISION as DOUBLE_PRECISION, + FLOAT as FLOAT, + INTERVAL as INTERVAL, + LONG as LONG, + NCHAR as NCHAR, + NCLOB as NCLOB, + NUMBER as NUMBER, + NVARCHAR as NVARCHAR, + NVARCHAR2 as NVARCHAR2, + RAW as RAW, + ROWID as ROWID, + TIMESTAMP as TIMESTAMP, + VARCHAR as VARCHAR, + VARCHAR2 as VARCHAR2, +) + +__all__ = ( + "VARCHAR", + "NVARCHAR", + "CHAR", + "NCHAR", + "DATE", + "NUMBER", + "BLOB", + "BFILE", + "CLOB", + "NCLOB", + "TIMESTAMP", + "RAW", + "FLOAT", + "DOUBLE_PRECISION", + "BINARY_DOUBLE", + "BINARY_FLOAT", + "LONG", + "dialect", + "INTERVAL", + "VARCHAR2", + "NVARCHAR2", + "ROWID", +) + +dialect: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/oracle/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/oracle/base.pyi new file mode 100644 index 00000000..9b41e88b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/oracle/base.pyi @@ -0,0 +1,222 @@ +from _typeshed import Incomplete +from typing import Any + +from sqlalchemy.sql import ClauseElement + +from ...engine import default +from ...sql import compiler, sqltypes +from ...types import ( + BLOB as BLOB, + CHAR as CHAR, + CLOB as CLOB, + FLOAT as FLOAT, + INTEGER as INTEGER, + NCHAR as NCHAR, + NVARCHAR as NVARCHAR, + TIMESTAMP as TIMESTAMP, + VARCHAR as VARCHAR, +) + +RESERVED_WORDS: Any +NO_ARG_FNS: Any + +class RAW(sqltypes._Binary): + __visit_name__: str + +OracleRaw = RAW + +class NCLOB(sqltypes.Text): + __visit_name__: str + +class VARCHAR2(VARCHAR): + __visit_name__: str + +NVARCHAR2 = NVARCHAR + +class NUMBER(sqltypes.Numeric, sqltypes.Integer): + __visit_name__: str + def __init__( + self, precision: Incomplete | None = ..., scale: Incomplete | None = ..., asdecimal: Incomplete | None = ... + ) -> None: ... + def adapt(self, impltype): ... + +class DOUBLE_PRECISION(sqltypes.Float): + __visit_name__: str + +class BINARY_DOUBLE(sqltypes.Float): + __visit_name__: str + +class BINARY_FLOAT(sqltypes.Float): + __visit_name__: str + +class BFILE(sqltypes.LargeBinary): + __visit_name__: str + +class LONG(sqltypes.Text): + __visit_name__: str + +class DATE(sqltypes.DateTime): + __visit_name__: str + +class INTERVAL(sqltypes.NativeForEmulated, sqltypes._AbstractInterval): + __visit_name__: str + day_precision: Any + second_precision: Any + def __init__(self, day_precision: Incomplete | None = ..., second_precision: Incomplete | None = ...) -> None: ... + def as_generic(self, allow_nulltype: bool = ...): ... + def coerce_compared_value(self, op, value): ... + +class ROWID(sqltypes.TypeEngine): + __visit_name__: str + +class _OracleBoolean(sqltypes.Boolean): + def get_dbapi_type(self, dbapi): ... + +colspecs: Any +ischema_names: Any + +class OracleTypeCompiler(compiler.GenericTypeCompiler): + def visit_datetime(self, type_, **kw): ... + def visit_float(self, type_, **kw): ... + def visit_unicode(self, type_, **kw): ... + def visit_INTERVAL(self, type_, **kw): ... + def visit_LONG(self, type_, **kw): ... + def visit_TIMESTAMP(self, type_, **kw): ... + def visit_DOUBLE_PRECISION(self, type_, **kw): ... + def visit_BINARY_DOUBLE(self, type_, **kw): ... + def visit_BINARY_FLOAT(self, type_, **kw): ... + def visit_FLOAT(self, type_, **kw): ... + def visit_NUMBER(self, type_, **kw): ... + def visit_string(self, type_, **kw): ... + def visit_VARCHAR2(self, type_, **kw): ... + def visit_NVARCHAR2(self, type_, **kw): ... + visit_NVARCHAR: Any + def visit_VARCHAR(self, type_, **kw): ... + def visit_text(self, type_, **kw): ... + def visit_unicode_text(self, type_, **kw): ... + def visit_large_binary(self, type_, **kw): ... + def visit_big_integer(self, type_, **kw): ... + def visit_boolean(self, type_, **kw): ... + def visit_RAW(self, type_, **kw): ... + def visit_ROWID(self, type_, **kw): ... + +class OracleCompiler(compiler.SQLCompiler): + compound_keywords: Any + def __init__(self, *args, **kwargs) -> None: ... + def visit_mod_binary(self, binary, operator, **kw): ... + def visit_now_func(self, fn, **kw): ... + def visit_char_length_func(self, fn, **kw): ... + def visit_match_op_binary(self, binary, operator, **kw): ... + def visit_true(self, expr, **kw): ... + def visit_false(self, expr, **kw): ... + def get_cte_preamble(self, recursive): ... + def get_select_hint_text(self, byfroms): ... + def function_argspec(self, fn, **kw): ... + def visit_function(self, func, **kw): ... + def visit_table_valued_column(self, element, **kw): ... + def default_from(self): ... + def visit_join(self, join, from_linter: Incomplete | None = ..., **kwargs): ... # type: ignore[override] + def visit_outer_join_column(self, vc, **kw): ... + def visit_sequence(self, seq, **kw): ... + def get_render_as_alias_suffix(self, alias_name_text): ... + has_out_parameters: bool + def returning_clause(self, stmt, returning_cols): ... + def translate_select_structure(self, select_stmt, **kwargs): ... + def limit_clause(self, select, **kw): ... + def visit_empty_set_expr(self, type_): ... + def for_update_clause(self, select, **kw): ... + def visit_is_distinct_from_binary(self, binary, operator, **kw): ... + def visit_is_not_distinct_from_binary(self, binary, operator, **kw): ... + def visit_regexp_match_op_binary(self, binary, operator, **kw): ... + def visit_not_regexp_match_op_binary(self, binary, operator, **kw): ... + def visit_regexp_replace_op_binary(self, binary, operator, **kw): ... + +class OracleDDLCompiler(compiler.DDLCompiler): + def define_constraint_cascades(self, constraint): ... + def visit_drop_table_comment(self, drop): ... + def visit_create_index(self, create): ... + def post_create_table(self, table): ... + def get_identity_options(self, identity_options): ... + def visit_computed_column(self, generated): ... + def visit_identity_column(self, identity, **kw): ... + +class OracleIdentifierPreparer(compiler.IdentifierPreparer): + reserved_words: Any + illegal_initial_characters: Any + def format_savepoint(self, savepoint): ... + +class OracleExecutionContext(default.DefaultExecutionContext): + def fire_sequence(self, seq, type_): ... + +class OracleDialect(default.DefaultDialect): + name: str + supports_statement_cache: bool + supports_alter: bool + supports_unicode_statements: bool + supports_unicode_binds: bool + max_identifier_length: int + supports_simple_order_by_label: bool + cte_follows_insert: bool + supports_sequences: bool + sequences_optional: bool + postfetch_lastrowid: bool + default_paramstyle: str + colspecs: Any + ischema_names: Any + requires_name_normalize: bool + supports_comments: bool + supports_default_values: bool + supports_default_metavalue: bool + supports_empty_insert: bool + supports_identity_columns: bool + statement_compiler: Any + ddl_compiler: Any + type_compiler: Any + preparer: Any + reflection_options: Any + construct_arguments: Any + use_ansi: Any + optimize_limits: Any + exclude_tablespaces: Any + def __init__( + self, + use_ansi: bool = ..., + optimize_limits: bool = ..., + use_binds_for_limits: Incomplete | None = ..., + use_nchar_for_unicode: bool = ..., + exclude_tablespaces=..., + **kwargs, + ) -> None: ... + implicit_returning: Any + def initialize(self, connection) -> None: ... + def do_release_savepoint(self, connection, name) -> None: ... + def get_isolation_level(self, connection) -> None: ... + def get_default_isolation_level(self, dbapi_conn): ... + def set_isolation_level(self, connection, level) -> None: ... + def has_table(self, connection, table_name, schema: Incomplete | None = ...): ... # type: ignore[override] + def has_sequence(self, connection, sequence_name, schema: Incomplete | None = ...): ... # type: ignore[override] + def get_schema_names(self, connection, **kw): ... + def get_table_names(self, connection, schema: Incomplete | None = ..., **kw): ... + def get_temp_table_names(self, connection, **kw): ... + def get_view_names(self, connection, schema: Incomplete | None = ..., **kw): ... + def get_sequence_names(self, connection, schema: Incomplete | None = ..., **kw): ... + def get_table_options(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_columns(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_table_comment( + self, connection, table_name, schema: Incomplete | None = ..., resolve_synonyms: bool = ..., dblink: str = ..., **kw + ): ... + def get_indexes( + self, connection, table_name, schema: Incomplete | None = ..., resolve_synonyms: bool = ..., dblink: str = ..., **kw + ): ... + def get_pk_constraint(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_foreign_keys(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_unique_constraints(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_view_definition( + self, connection, view_name, schema: Incomplete | None = ..., resolve_synonyms: bool = ..., dblink: str = ..., **kw + ): ... + def get_check_constraints(self, connection, table_name, schema: Incomplete | None = ..., include_all: bool = ..., **kw): ... + +class _OuterJoinColumn(ClauseElement): + __visit_name__: str + column: Any + def __init__(self, column) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/oracle/cx_oracle.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/oracle/cx_oracle.pyi new file mode 100644 index 00000000..3484c83e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/oracle/cx_oracle.pyi @@ -0,0 +1,128 @@ +from _typeshed import Incomplete +from typing import Any + +import sqlalchemy.types as sqltypes + +from . import base as oracle +from .base import OracleCompiler, OracleDialect, OracleExecutionContext + +class _OracleInteger(sqltypes.Integer): + def get_dbapi_type(self, dbapi): ... + +class _OracleNumeric(sqltypes.Numeric): + is_number: bool + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype) -> None: ... + +class _OracleBinaryFloat(_OracleNumeric): + def get_dbapi_type(self, dbapi): ... + +class _OracleBINARY_FLOAT(_OracleBinaryFloat, oracle.BINARY_FLOAT): ... +class _OracleBINARY_DOUBLE(_OracleBinaryFloat, oracle.BINARY_DOUBLE): ... + +class _OracleNUMBER(_OracleNumeric): + is_number: bool + +class _OracleDate(sqltypes.Date): + def bind_processor(self, dialect) -> None: ... + def result_processor(self, dialect, coltype): ... + +class _OracleChar(sqltypes.CHAR): + def get_dbapi_type(self, dbapi): ... + +class _OracleNChar(sqltypes.NCHAR): + def get_dbapi_type(self, dbapi): ... + +class _OracleUnicodeStringNCHAR(oracle.NVARCHAR2): + def get_dbapi_type(self, dbapi): ... + +class _OracleUnicodeStringCHAR(sqltypes.Unicode): + def get_dbapi_type(self, dbapi): ... + +class _OracleUnicodeTextNCLOB(oracle.NCLOB): + def get_dbapi_type(self, dbapi): ... + +class _OracleUnicodeTextCLOB(sqltypes.UnicodeText): + def get_dbapi_type(self, dbapi): ... + +class _OracleText(sqltypes.Text): + def get_dbapi_type(self, dbapi): ... + +class _OracleLong(oracle.LONG): + def get_dbapi_type(self, dbapi): ... + +class _OracleString(sqltypes.String): ... + +class _OracleEnum(sqltypes.Enum): + def bind_processor(self, dialect): ... + +class _OracleBinary(sqltypes.LargeBinary): + def get_dbapi_type(self, dbapi): ... + def bind_processor(self, dialect) -> None: ... + def result_processor(self, dialect, coltype): ... + +class _OracleInterval(oracle.INTERVAL): + def get_dbapi_type(self, dbapi): ... + +class _OracleRaw(oracle.RAW): ... + +class _OracleRowid(oracle.ROWID): + def get_dbapi_type(self, dbapi): ... + +class OracleCompiler_cx_oracle(OracleCompiler): + def bindparam_string(self, name, **kw): ... + +class OracleExecutionContext_cx_oracle(OracleExecutionContext): + out_parameters: Any + include_set_input_sizes: Any + def pre_exec(self) -> None: ... + cursor_fetch_strategy: Any + def post_exec(self) -> None: ... + def create_cursor(self): ... + def get_out_parameter_values(self, out_param_names): ... + +class OracleDialect_cx_oracle(OracleDialect): + supports_statement_cache: bool + statement_compiler: Any + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + supports_unicode_statements: bool + supports_unicode_binds: bool + use_setinputsizes: bool + driver: str + colspecs: Any + execute_sequence_format: Any + arraysize: Any + encoding_errors: Any + auto_convert_lobs: Any + coerce_to_unicode: Any + coerce_to_decimal: Any + cx_oracle_ver: Any + def __init__( + self, + auto_convert_lobs: bool = ..., + coerce_to_unicode: bool = ..., + coerce_to_decimal: bool = ..., + arraysize: int = ..., + encoding_errors: Incomplete | None = ..., + threaded: Incomplete | None = ..., + **kwargs, + ): ... + @classmethod + def dbapi(cls): ... + def initialize(self, connection) -> None: ... + def get_isolation_level(self, connection): ... + def set_isolation_level(self, connection, level) -> None: ... + def on_connect(self): ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + def create_xid(self): ... + def do_executemany(self, cursor, statement, parameters, context: Incomplete | None = ...) -> None: ... + def do_begin_twophase(self, connection, xid) -> None: ... + def do_prepare_twophase(self, connection, xid) -> None: ... + def do_rollback_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_commit_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_set_input_sizes(self, cursor, list_of_tuples, context) -> None: ... + def do_recover_twophase(self, connection) -> None: ... + +dialect = OracleDialect_cx_oracle diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/oracle/provision.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/oracle/provision.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/__init__.pyi new file mode 100644 index 00000000..9a213981 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/__init__.pyi @@ -0,0 +1,100 @@ +import typing + +from .array import ARRAY as ARRAY, All as All, Any as Any, array as array +from .base import ( + BIGINT as BIGINT, + BIT as BIT, + BOOLEAN as BOOLEAN, + BYTEA as BYTEA, + CHAR as CHAR, + CIDR as CIDR, + DATE as DATE, + DOUBLE_PRECISION as DOUBLE_PRECISION, + ENUM as ENUM, + FLOAT as FLOAT, + INET as INET, + INTEGER as INTEGER, + INTERVAL as INTERVAL, + MACADDR as MACADDR, + MACADDR8 as MACADDR8, + MONEY as MONEY, + NUMERIC as NUMERIC, + OID as OID, + REAL as REAL, + REGCLASS as REGCLASS, + SMALLINT as SMALLINT, + TEXT as TEXT, + TIME as TIME, + TIMESTAMP as TIMESTAMP, + TSVECTOR as TSVECTOR, + UUID as UUID, + VARCHAR as VARCHAR, + CreateEnumType as CreateEnumType, + DropEnumType as DropEnumType, +) +from .dml import Insert as Insert, insert as insert +from .ext import ExcludeConstraint as ExcludeConstraint, aggregate_order_by as aggregate_order_by, array_agg as array_agg +from .hstore import HSTORE as HSTORE, hstore as hstore +from .json import JSON as JSON, JSONB as JSONB +from .ranges import ( + DATERANGE as DATERANGE, + INT4RANGE as INT4RANGE, + INT8RANGE as INT8RANGE, + NUMRANGE as NUMRANGE, + TSRANGE as TSRANGE, + TSTZRANGE as TSTZRANGE, +) + +__all__ = ( + "INTEGER", + "BIGINT", + "SMALLINT", + "VARCHAR", + "CHAR", + "TEXT", + "NUMERIC", + "FLOAT", + "REAL", + "INET", + "CIDR", + "UUID", + "BIT", + "MACADDR", + "MACADDR8", + "MONEY", + "OID", + "REGCLASS", + "DOUBLE_PRECISION", + "TIMESTAMP", + "TIME", + "DATE", + "BYTEA", + "BOOLEAN", + "INTERVAL", + "ARRAY", + "ENUM", + "dialect", + "array", + "HSTORE", + "hstore", + "INT4RANGE", + "INT8RANGE", + "NUMRANGE", + "DATERANGE", + "TSVECTOR", + "TSRANGE", + "TSTZRANGE", + "JSON", + "JSONB", + "Any", + "All", + "DropEnumType", + "CreateEnumType", + "ExcludeConstraint", + "aggregate_order_by", + "array_agg", + "insert", + "Insert", +) + +dialect: typing.Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/array.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/array.pyi new file mode 100644 index 00000000..fe7c0e8b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/array.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete +from typing import Any as _Any + +import sqlalchemy.types as sqltypes + +from ...sql import expression + +def Any(other, arrexpr, operator=...): ... +def All(other, arrexpr, operator=...): ... + +class array(expression.ClauseList, expression.ColumnElement[_Any]): + __visit_name__: str + stringify_dialect: str + inherit_cache: bool + type: _Any + def __init__(self, clauses, **kw) -> None: ... + def self_group(self, against: Incomplete | None = ...): ... + +CONTAINS: _Any +CONTAINED_BY: _Any +OVERLAP: _Any + +class ARRAY(sqltypes.ARRAY): + class Comparator(sqltypes.ARRAY.Comparator[_Any]): + def contains(self, other, **kwargs): ... + def contained_by(self, other): ... + def overlap(self, other): ... + comparator_factory: _Any + item_type: _Any + as_tuple: _Any + dimensions: _Any + zero_indexes: _Any + def __init__( + self, item_type, as_tuple: bool = ..., dimensions: Incomplete | None = ..., zero_indexes: bool = ... + ) -> None: ... + @property + def hashable(self): ... + @property + def python_type(self): ... + def compare_values(self, x, y): ... + def bind_expression(self, bindvalue): ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/asyncpg.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/asyncpg.pyi new file mode 100644 index 00000000..dfe48e37 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/asyncpg.pyi @@ -0,0 +1,203 @@ +from _typeshed import Incomplete +from typing import Any + +from ...engine import AdaptedConnection +from ...sql import sqltypes +from . import json +from .base import ENUM, INTERVAL, OID, REGCLASS, UUID, PGCompiler, PGDialect, PGExecutionContext, PGIdentifierPreparer + +class AsyncpgTime(sqltypes.Time): + def get_dbapi_type(self, dbapi): ... + +class AsyncpgDate(sqltypes.Date): + def get_dbapi_type(self, dbapi): ... + +class AsyncpgDateTime(sqltypes.DateTime): + def get_dbapi_type(self, dbapi): ... + +class AsyncpgBoolean(sqltypes.Boolean): + def get_dbapi_type(self, dbapi): ... + +class AsyncPgInterval(INTERVAL): + def get_dbapi_type(self, dbapi): ... + @classmethod + def adapt_emulated_to_native(cls, interval, **kw): ... + +class AsyncPgEnum(ENUM): + def get_dbapi_type(self, dbapi): ... + +class AsyncpgInteger(sqltypes.Integer): + def get_dbapi_type(self, dbapi): ... + +class AsyncpgBigInteger(sqltypes.BigInteger): + def get_dbapi_type(self, dbapi): ... + +class AsyncpgJSON(json.JSON): + def get_dbapi_type(self, dbapi): ... + def result_processor(self, dialect, coltype) -> None: ... + +class AsyncpgJSONB(json.JSONB): + def get_dbapi_type(self, dbapi): ... + def result_processor(self, dialect, coltype) -> None: ... + +class AsyncpgJSONIndexType(sqltypes.JSON.JSONIndexType): + def get_dbapi_type(self, dbapi) -> None: ... + +class AsyncpgJSONIntIndexType(sqltypes.JSON.JSONIntIndexType): + def get_dbapi_type(self, dbapi): ... + +class AsyncpgJSONStrIndexType(sqltypes.JSON.JSONStrIndexType): + def get_dbapi_type(self, dbapi): ... + +class AsyncpgJSONPathType(json.JSONPathType): + def bind_processor(self, dialect): ... + +class AsyncpgUUID(UUID): + def get_dbapi_type(self, dbapi): ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class AsyncpgNumeric(sqltypes.Numeric): + def get_dbapi_type(self, dbapi): ... + def bind_processor(self, dialect) -> None: ... + def result_processor(self, dialect, coltype): ... + +class AsyncpgFloat(AsyncpgNumeric): + def get_dbapi_type(self, dbapi): ... + +class AsyncpgREGCLASS(REGCLASS): + def get_dbapi_type(self, dbapi): ... + +class AsyncpgOID(OID): + def get_dbapi_type(self, dbapi): ... + +class PGExecutionContext_asyncpg(PGExecutionContext): + def handle_dbapi_exception(self, e) -> None: ... + exclude_set_input_sizes: Any + def pre_exec(self) -> None: ... + def create_server_side_cursor(self): ... + +class PGCompiler_asyncpg(PGCompiler): ... +class PGIdentifierPreparer_asyncpg(PGIdentifierPreparer): ... + +class AsyncAdapt_asyncpg_cursor: + server_side: bool + description: Any + arraysize: int + rowcount: int + def __init__(self, adapt_connection) -> None: ... + def close(self) -> None: ... + def execute(self, operation, parameters: Incomplete | None = ...) -> None: ... + def executemany(self, operation, seq_of_parameters): ... + def setinputsizes(self, *inputsizes) -> None: ... + def __iter__(self): ... + def fetchone(self): ... + def fetchmany(self, size: Incomplete | None = ...): ... + def fetchall(self): ... + +class AsyncAdapt_asyncpg_ss_cursor(AsyncAdapt_asyncpg_cursor): + server_side: bool + def __init__(self, adapt_connection) -> None: ... + def close(self) -> None: ... + def __aiter__(self): ... + async def __anext__(self) -> None: ... + def fetchone(self): ... + def fetchmany(self, size: Incomplete | None = ...): ... + def fetchall(self): ... + def executemany(self, operation, seq_of_parameters) -> None: ... + +class AsyncAdapt_asyncpg_connection(AdaptedConnection): + await_: Any + dbapi: Any + isolation_level: str + readonly: bool + deferrable: bool + def __init__(self, dbapi, connection, prepared_statement_cache_size: int = ...) -> None: ... + @property + def autocommit(self): ... + @autocommit.setter + def autocommit(self, value) -> None: ... + def set_isolation_level(self, level) -> None: ... + def cursor(self, server_side: bool = ...): ... + def rollback(self) -> None: ... + def commit(self) -> None: ... + def close(self) -> None: ... + +class AsyncAdaptFallback_asyncpg_connection(AsyncAdapt_asyncpg_connection): + await_: Any + +class AsyncAdapt_asyncpg_dbapi: + asyncpg: Any + paramstyle: str + def __init__(self, asyncpg) -> None: ... + def connect(self, *arg, **kw): ... + + class Error(Exception): ... + class Warning(Exception): ... + class InterfaceError(Error): ... + class DatabaseError(Error): ... + class InternalError(DatabaseError): ... + class OperationalError(DatabaseError): ... + class ProgrammingError(DatabaseError): ... + class IntegrityError(DatabaseError): ... + class DataError(DatabaseError): ... + class NotSupportedError(DatabaseError): ... + class InternalServerError(InternalError): ... + + class InvalidCachedStatementError(NotSupportedError): + def __init__(self, message) -> None: ... + + def Binary(self, value): ... + STRING: Any + TIMESTAMP: Any + TIMESTAMP_W_TZ: Any + TIME: Any + DATE: Any + INTERVAL: Any + NUMBER: Any + FLOAT: Any + BOOLEAN: Any + INTEGER: Any + BIGINTEGER: Any + BYTES: Any + DECIMAL: Any + JSON: Any + JSONB: Any + ENUM: Any + UUID: Any + BYTEA: Any + DATETIME: Any + BINARY: Any + +class PGDialect_asyncpg(PGDialect): + driver: str + supports_statement_cache: bool + supports_unicode_statements: bool + supports_server_side_cursors: bool + supports_unicode_binds: bool + default_paramstyle: str + supports_sane_multi_rowcount: bool + statement_compiler: Any + preparer: Any + use_setinputsizes: bool + use_native_uuid: bool + colspecs: Any + is_async: bool + @classmethod + def dbapi(cls): ... + def set_isolation_level(self, connection, level) -> None: ... + def set_readonly(self, connection, value) -> None: ... + def get_readonly(self, connection): ... + def set_deferrable(self, connection, value) -> None: ... + def get_deferrable(self, connection): ... + def create_connect_args(self, url): ... + @classmethod + def get_pool_class(cls, url): ... + def is_disconnect(self, e, connection, cursor): ... + def do_set_input_sizes(self, cursor, list_of_tuples, context) -> None: ... + async def setup_asyncpg_json_codec(self, conn): ... + async def setup_asyncpg_jsonb_codec(self, conn): ... + def on_connect(self): ... + def get_driver_connection(self, connection): ... + +dialect = PGDialect_asyncpg diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/base.pyi new file mode 100644 index 00000000..d72b1ed8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/base.pyi @@ -0,0 +1,312 @@ +from _typeshed import Incomplete +from typing import Any + +from ...engine import characteristics, default, reflection +from ...schema import _CreateDropBase +from ...sql import compiler, elements, sqltypes +from ...sql.ddl import DDLBase +from ...types import ( + BIGINT as BIGINT, + BOOLEAN as BOOLEAN, + CHAR as CHAR, + DATE as DATE, + FLOAT as FLOAT, + INTEGER as INTEGER, + NUMERIC as NUMERIC, + REAL as REAL, + SMALLINT as SMALLINT, + TEXT as TEXT, + VARCHAR as VARCHAR, +) + +IDX_USING: Any +AUTOCOMMIT_REGEXP: Any +RESERVED_WORDS: Any + +class BYTEA(sqltypes.LargeBinary): + __visit_name__: str + +class DOUBLE_PRECISION(sqltypes.Float): + __visit_name__: str + +class INET(sqltypes.TypeEngine): + __visit_name__: str + +PGInet = INET + +class CIDR(sqltypes.TypeEngine): + __visit_name__: str + +PGCidr = CIDR + +class MACADDR(sqltypes.TypeEngine): + __visit_name__: str + +PGMacAddr = MACADDR + +class MACADDR8(sqltypes.TypeEngine): + __visit_name__: str + +PGMacAddr8 = MACADDR8 + +class MONEY(sqltypes.TypeEngine): + __visit_name__: str + +class OID(sqltypes.TypeEngine): + __visit_name__: str + +class REGCLASS(sqltypes.TypeEngine): + __visit_name__: str + +class TIMESTAMP(sqltypes.TIMESTAMP): + precision: Any + def __init__(self, timezone: bool = ..., precision: Incomplete | None = ...) -> None: ... + +class TIME(sqltypes.TIME): + precision: Any + def __init__(self, timezone: bool = ..., precision: Incomplete | None = ...) -> None: ... + +class INTERVAL(sqltypes.NativeForEmulated, sqltypes._AbstractInterval): + __visit_name__: str + native: bool + precision: Any + fields: Any + def __init__(self, precision: Incomplete | None = ..., fields: Incomplete | None = ...) -> None: ... + @classmethod + def adapt_emulated_to_native(cls, interval, **kw): ... + def as_generic(self, allow_nulltype: bool = ...): ... + @property + def python_type(self): ... + def coerce_compared_value(self, op, value): ... + +PGInterval = INTERVAL + +class BIT(sqltypes.TypeEngine): + __visit_name__: str + length: Any + varying: Any + def __init__(self, length: Incomplete | None = ..., varying: bool = ...) -> None: ... + +PGBit = BIT + +class UUID(sqltypes.TypeEngine): + __visit_name__: str + as_uuid: Any + def __init__(self, as_uuid: bool = ...) -> None: ... + def coerce_compared_value(self, op, value): ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +PGUuid = UUID + +class TSVECTOR(sqltypes.TypeEngine): + __visit_name__: str + +class ENUM(sqltypes.NativeForEmulated, sqltypes.Enum): # type: ignore[misc] # base classes incompatible + native_enum: bool + create_type: Any + def __init__(self, *enums, **kw) -> None: ... + @classmethod + def adapt_emulated_to_native(cls, impl, **kw): ... + def create(self, bind: Incomplete | None = ..., checkfirst: bool = ...) -> None: ... + def drop(self, bind: Incomplete | None = ..., checkfirst: bool = ...) -> None: ... + + class EnumGenerator(DDLBase): + checkfirst: Any + def __init__(self, dialect, connection, checkfirst: bool = ..., **kwargs) -> None: ... + def visit_enum(self, enum) -> None: ... + + class EnumDropper(DDLBase): + checkfirst: Any + def __init__(self, dialect, connection, checkfirst: bool = ..., **kwargs) -> None: ... + def visit_enum(self, enum) -> None: ... + +class _ColonCast(elements.Cast): + __visit_name__: str + type: Any + clause: Any + typeclause: Any + def __init__(self, expression, type_) -> None: ... + +colspecs: Any +ischema_names: Any + +class PGCompiler(compiler.SQLCompiler): + def visit_colon_cast(self, element, **kw): ... + def visit_array(self, element, **kw): ... + def visit_slice(self, element, **kw): ... + def visit_json_getitem_op_binary(self, binary, operator, _cast_applied: bool = ..., **kw): ... + def visit_json_path_getitem_op_binary(self, binary, operator, _cast_applied: bool = ..., **kw): ... + def visit_getitem_binary(self, binary, operator, **kw): ... + def visit_aggregate_order_by(self, element, **kw): ... + def visit_match_op_binary(self, binary, operator, **kw): ... + def visit_ilike_op_binary(self, binary, operator, **kw): ... + def visit_not_ilike_op_binary(self, binary, operator, **kw): ... + def visit_regexp_match_op_binary(self, binary, operator, **kw): ... + def visit_not_regexp_match_op_binary(self, binary, operator, **kw): ... + def visit_regexp_replace_op_binary(self, binary, operator, **kw): ... + def visit_empty_set_expr(self, element_types): ... + def render_literal_value(self, value, type_): ... + def visit_sequence(self, seq, **kw): ... + def limit_clause(self, select, **kw): ... + def format_from_hint_text(self, sqltext, table, hint, iscrud): ... + def get_select_precolumns(self, select, **kw): ... + def for_update_clause(self, select, **kw): ... + def returning_clause(self, stmt, returning_cols): ... + def visit_substring_func(self, func, **kw): ... + def visit_on_conflict_do_nothing(self, on_conflict, **kw): ... + def visit_on_conflict_do_update(self, on_conflict, **kw): ... + def update_from_clause(self, update_stmt, from_table, extra_froms, from_hints, **kw): ... + def delete_extra_from_clause(self, delete_stmt, from_table, extra_froms, from_hints, **kw): ... + def fetch_clause(self, select, **kw): ... + +class PGDDLCompiler(compiler.DDLCompiler): + def get_column_specification(self, column, **kwargs): ... + def visit_check_constraint(self, constraint): ... + def visit_drop_table_comment(self, drop): ... + def visit_create_enum_type(self, create): ... + def visit_drop_enum_type(self, drop): ... + def visit_create_index(self, create): ... + def visit_drop_index(self, drop): ... + def visit_exclude_constraint(self, constraint, **kw): ... + def post_create_table(self, table): ... + def visit_computed_column(self, generated): ... + def visit_create_sequence(self, create, **kw): ... + +class PGTypeCompiler(compiler.GenericTypeCompiler): + def visit_TSVECTOR(self, type_, **kw): ... + def visit_INET(self, type_, **kw): ... + def visit_CIDR(self, type_, **kw): ... + def visit_MACADDR(self, type_, **kw): ... + def visit_MONEY(self, type_, **kw): ... + def visit_OID(self, type_, **kw): ... + def visit_REGCLASS(self, type_, **kw): ... + def visit_FLOAT(self, type_, **kw): ... + def visit_DOUBLE_PRECISION(self, type_, **kw): ... + def visit_BIGINT(self, type_, **kw): ... + def visit_HSTORE(self, type_, **kw): ... + def visit_JSON(self, type_, **kw): ... + def visit_JSONB(self, type_, **kw): ... + def visit_INT4RANGE(self, type_, **kw): ... + def visit_INT8RANGE(self, type_, **kw): ... + def visit_NUMRANGE(self, type_, **kw): ... + def visit_DATERANGE(self, type_, **kw): ... + def visit_TSRANGE(self, type_, **kw): ... + def visit_TSTZRANGE(self, type_, **kw): ... + def visit_datetime(self, type_, **kw): ... + def visit_enum(self, type_, **kw): ... + def visit_ENUM(self, type_, identifier_preparer: Incomplete | None = ..., **kw): ... + def visit_TIMESTAMP(self, type_, **kw): ... + def visit_TIME(self, type_, **kw): ... + def visit_INTERVAL(self, type_, **kw): ... + def visit_BIT(self, type_, **kw): ... + def visit_UUID(self, type_, **kw): ... + def visit_large_binary(self, type_, **kw): ... + def visit_BYTEA(self, type_, **kw): ... + def visit_ARRAY(self, type_, **kw): ... + +class PGIdentifierPreparer(compiler.IdentifierPreparer): + reserved_words: Any + def format_type(self, type_, use_schema: bool = ...): ... + +class PGInspector(reflection.Inspector): + def get_table_oid(self, table_name, schema: Incomplete | None = ...): ... + def get_enums(self, schema: Incomplete | None = ...): ... + def get_foreign_table_names(self, schema: Incomplete | None = ...): ... + def get_view_names(self, schema: Incomplete | None = ..., include=...): ... + +class CreateEnumType(_CreateDropBase): + __visit_name__: str + +class DropEnumType(_CreateDropBase): + __visit_name__: str + +class PGExecutionContext(default.DefaultExecutionContext): + def fire_sequence(self, seq, type_): ... + def get_insert_default(self, column): ... + def should_autocommit_text(self, statement): ... + +class PGReadOnlyConnectionCharacteristic(characteristics.ConnectionCharacteristic): + transactional: bool + def reset_characteristic(self, dialect, dbapi_conn) -> None: ... + def set_characteristic(self, dialect, dbapi_conn, value) -> None: ... + def get_characteristic(self, dialect, dbapi_conn): ... + +class PGDeferrableConnectionCharacteristic(characteristics.ConnectionCharacteristic): + transactional: bool + def reset_characteristic(self, dialect, dbapi_conn) -> None: ... + def set_characteristic(self, dialect, dbapi_conn, value) -> None: ... + def get_characteristic(self, dialect, dbapi_conn): ... + +class PGDialect(default.DefaultDialect): + name: str + supports_statement_cache: bool + supports_alter: bool + max_identifier_length: int + supports_sane_rowcount: bool + supports_native_enum: bool + supports_native_boolean: bool + supports_smallserial: bool + supports_sequences: bool + sequences_optional: bool + preexecute_autoincrement_sequences: bool + postfetch_lastrowid: bool + supports_comments: bool + supports_default_values: bool + supports_default_metavalue: bool + supports_empty_insert: bool + supports_multivalues_insert: bool + supports_identity_columns: bool + default_paramstyle: str + ischema_names: Any + colspecs: Any + statement_compiler: Any + ddl_compiler: Any + type_compiler: Any + preparer: Any + inspector: Any + isolation_level: Any + implicit_returning: bool + full_returning: bool + connection_characteristics: Any + construct_arguments: Any + reflection_options: Any + def __init__( + self, + isolation_level: Incomplete | None = ..., + json_serializer: Incomplete | None = ..., + json_deserializer: Incomplete | None = ..., + **kwargs, + ) -> None: ... + def initialize(self, connection) -> None: ... + def on_connect(self): ... + def set_isolation_level(self, connection, level) -> None: ... + def get_isolation_level(self, connection): ... + def set_readonly(self, connection, value) -> None: ... + def get_readonly(self, connection) -> None: ... + def set_deferrable(self, connection, value) -> None: ... + def get_deferrable(self, connection) -> None: ... + def do_begin_twophase(self, connection, xid) -> None: ... + def do_prepare_twophase(self, connection, xid) -> None: ... + def do_rollback_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_commit_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_recover_twophase(self, connection): ... + def has_schema(self, connection, schema): ... + def has_table(self, connection, table_name, schema: Incomplete | None = ...): ... # type: ignore[override] + def has_sequence(self, connection, sequence_name, schema: Incomplete | None = ...): ... # type: ignore[override] + def has_type(self, connection, type_name, schema: Incomplete | None = ...): ... + def get_table_oid(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_schema_names(self, connection, **kw): ... + def get_table_names(self, connection, schema: Incomplete | None = ..., **kw): ... + def get_view_names(self, connection, schema: Incomplete | None = ..., include=..., **kw): ... + def get_sequence_names(self, connection, schema: Incomplete | None = ..., **kw): ... + def get_view_definition(self, connection, view_name, schema: Incomplete | None = ..., **kw): ... + def get_columns(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_pk_constraint(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_foreign_keys( + self, connection, table_name, schema: Incomplete | None = ..., postgresql_ignore_search_path: bool = ..., **kw + ): ... + def get_indexes(self, connection, table_name, schema, **kw): ... + def get_unique_constraints(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_table_comment(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_check_constraints(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/dml.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/dml.pyi new file mode 100644 index 00000000..9ddcfcce --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/dml.pyi @@ -0,0 +1,50 @@ +from _typeshed import Incomplete +from typing import Any + +from ...sql.dml import Insert as StandardInsert +from ...sql.elements import ClauseElement +from ...util import memoized_property + +class Insert(StandardInsert): + stringify_dialect: str + inherit_cache: bool + @memoized_property + def excluded(self): ... + def on_conflict_do_update( + self, + constraint: Incomplete | None = ..., + index_elements: Incomplete | None = ..., + index_where: Incomplete | None = ..., + set_: Incomplete | None = ..., + where: Incomplete | None = ..., + ) -> None: ... + def on_conflict_do_nothing( + self, constraint: Incomplete | None = ..., index_elements: Incomplete | None = ..., index_where: Incomplete | None = ... + ) -> None: ... + +insert: Any + +class OnConflictClause(ClauseElement): + stringify_dialect: str + constraint_target: Any + inferred_target_elements: Any + inferred_target_whereclause: Any + def __init__( + self, constraint: Incomplete | None = ..., index_elements: Incomplete | None = ..., index_where: Incomplete | None = ... + ) -> None: ... + +class OnConflictDoNothing(OnConflictClause): + __visit_name__: str + +class OnConflictDoUpdate(OnConflictClause): + __visit_name__: str + update_values_to_set: Any + update_whereclause: Any + def __init__( + self, + constraint: Incomplete | None = ..., + index_elements: Incomplete | None = ..., + index_where: Incomplete | None = ..., + set_: Incomplete | None = ..., + where: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/ext.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/ext.pyi new file mode 100644 index 00000000..c9e682f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/ext.pyi @@ -0,0 +1,27 @@ +from _typeshed import Incomplete +from typing import Any + +from ...sql import expression +from ...sql.schema import ColumnCollectionConstraint + +class aggregate_order_by(expression.ColumnElement[Any]): + __visit_name__: str + stringify_dialect: str + target: Any + type: Any + order_by: Any + def __init__(self, target, *order_by) -> None: ... + def self_group(self, against: Incomplete | None = ...): ... + def get_children(self, **kwargs): ... + +class ExcludeConstraint(ColumnCollectionConstraint): + __visit_name__: str + where: Any + inherit_cache: bool + create_drop_stringify_dialect: str + operators: Any + using: Any + ops: Any + def __init__(self, *elements, **kw) -> None: ... + +def array_agg(*arg, **kw): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/hstore.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/hstore.pyi new file mode 100644 index 00000000..3f37cdef --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/hstore.pyi @@ -0,0 +1,69 @@ +from _typeshed import Incomplete +from typing import Any + +import sqlalchemy.types as sqltypes + +from ...sql import functions as sqlfunc + +class HSTORE(sqltypes.Indexable, sqltypes.Concatenable, sqltypes.TypeEngine): + __visit_name__: str + hashable: bool + text_type: Any + def __init__(self, text_type: Incomplete | None = ...) -> None: ... + + class Comparator(sqltypes.Indexable.Comparator[Any], sqltypes.Concatenable.Comparator[Any]): + def has_key(self, other): ... + def has_all(self, other): ... + def has_any(self, other): ... + def contains(self, other, **kwargs): ... + def contained_by(self, other): ... + def defined(self, key): ... + def delete(self, key): ... + def slice(self, array): ... + def keys(self): ... + def vals(self): ... + def array(self): ... + def matrix(self): ... + comparator_factory: Any + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class hstore(sqlfunc.GenericFunction): + type: Any + name: str + inherit_cache: bool + +class _HStoreDefinedFunction(sqlfunc.GenericFunction): + type: Any + name: str + inherit_cache: bool + +class _HStoreDeleteFunction(sqlfunc.GenericFunction): + type: Any + name: str + inherit_cache: bool + +class _HStoreSliceFunction(sqlfunc.GenericFunction): + type: Any + name: str + inherit_cache: bool + +class _HStoreKeysFunction(sqlfunc.GenericFunction): + type: Any + name: str + inherit_cache: bool + +class _HStoreValsFunction(sqlfunc.GenericFunction): + type: Any + name: str + inherit_cache: bool + +class _HStoreArrayFunction(sqlfunc.GenericFunction): + type: Any + name: str + inherit_cache: bool + +class _HStoreMatrixFunction(sqlfunc.GenericFunction): + type: Any + name: str + inherit_cache: bool diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/json.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/json.pyi new file mode 100644 index 00000000..728842b8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/json.pyi @@ -0,0 +1,28 @@ +from _typeshed import Incomplete +from typing import Any + +import sqlalchemy.types as sqltypes + +class JSONPathType(sqltypes.JSON.JSONPathType): + def bind_processor(self, dialect): ... + def literal_processor(self, dialect): ... + +class JSON(sqltypes.JSON): + astext_type: Any + def __init__(self, none_as_null: bool = ..., astext_type: Incomplete | None = ...) -> None: ... + + class Comparator(sqltypes.JSON.Comparator): + @property + def astext(self): ... + comparator_factory: Any + +class JSONB(JSON): + __visit_name__: str + + class Comparator(JSON.Comparator): + def has_key(self, other): ... + def has_all(self, other): ... + def has_any(self, other): ... + def contains(self, other, **kwargs): ... + def contained_by(self, other): ... + comparator_factory: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/pg8000.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/pg8000.pyi new file mode 100644 index 00000000..d87f2c0b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/pg8000.pyi @@ -0,0 +1,135 @@ +from _typeshed import Incomplete +from typing import Any + +import sqlalchemy.types as sqltypes + +from .array import ARRAY as PGARRAY +from .base import ENUM, INTERVAL, UUID, PGCompiler, PGDialect, PGExecutionContext, PGIdentifierPreparer +from .json import JSON, JSONB, JSONPathType + +class _PGNumeric(sqltypes.Numeric): + def result_processor(self, dialect, coltype): ... + +class _PGNumericNoBind(_PGNumeric): + def bind_processor(self, dialect) -> None: ... + +class _PGJSON(JSON): + def result_processor(self, dialect, coltype) -> None: ... + def get_dbapi_type(self, dbapi): ... + +class _PGJSONB(JSONB): + def result_processor(self, dialect, coltype) -> None: ... + def get_dbapi_type(self, dbapi): ... + +class _PGJSONIndexType(sqltypes.JSON.JSONIndexType): + def get_dbapi_type(self, dbapi) -> None: ... + +class _PGJSONIntIndexType(sqltypes.JSON.JSONIntIndexType): + def get_dbapi_type(self, dbapi): ... + +class _PGJSONStrIndexType(sqltypes.JSON.JSONStrIndexType): + def get_dbapi_type(self, dbapi): ... + +class _PGJSONPathType(JSONPathType): + def get_dbapi_type(self, dbapi): ... + +class _PGUUID(UUID): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class _PGEnum(ENUM): + def get_dbapi_type(self, dbapi): ... + +class _PGInterval(INTERVAL): + def get_dbapi_type(self, dbapi): ... + @classmethod + def adapt_emulated_to_native(cls, interval, **kw): ... + +class _PGTimeStamp(sqltypes.DateTime): + def get_dbapi_type(self, dbapi): ... + +class _PGTime(sqltypes.Time): + def get_dbapi_type(self, dbapi): ... + +class _PGInteger(sqltypes.Integer): + def get_dbapi_type(self, dbapi): ... + +class _PGSmallInteger(sqltypes.SmallInteger): + def get_dbapi_type(self, dbapi): ... + +class _PGNullType(sqltypes.NullType): + def get_dbapi_type(self, dbapi): ... + +class _PGBigInteger(sqltypes.BigInteger): + def get_dbapi_type(self, dbapi): ... + +class _PGBoolean(sqltypes.Boolean): + def get_dbapi_type(self, dbapi): ... + +class _PGARRAY(PGARRAY): + def bind_expression(self, bindvalue): ... + +class PGExecutionContext_pg8000(PGExecutionContext): + def create_server_side_cursor(self): ... + def pre_exec(self) -> None: ... + +class ServerSideCursor: + server_side: bool + ident: Any + cursor: Any + def __init__(self, cursor, ident) -> None: ... + @property + def connection(self): ... + @property + def rowcount(self): ... + @property + def description(self): ... + def execute(self, operation, args=..., stream: Incomplete | None = ...): ... + def executemany(self, operation, param_sets): ... + def fetchone(self): ... + def fetchmany(self, num: Incomplete | None = ...): ... + def fetchall(self): ... + def close(self) -> None: ... + def setinputsizes(self, *sizes) -> None: ... + def setoutputsize(self, size, column: Incomplete | None = ...) -> None: ... + +class PGCompiler_pg8000(PGCompiler): + def visit_mod_binary(self, binary, operator, **kw): ... + +class PGIdentifierPreparer_pg8000(PGIdentifierPreparer): + def __init__(self, *args, **kwargs) -> None: ... + +class PGDialect_pg8000(PGDialect): + driver: str + supports_statement_cache: bool + supports_unicode_statements: bool + supports_unicode_binds: bool + default_paramstyle: str + supports_sane_multi_rowcount: bool + statement_compiler: Any + preparer: Any + supports_server_side_cursors: bool + use_setinputsizes: bool + description_encoding: Any + colspecs: Any + client_encoding: Any + def __init__(self, client_encoding: Incomplete | None = ..., **kwargs) -> None: ... + @classmethod + def dbapi(cls): ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + def set_isolation_level(self, connection, level) -> None: ... + def set_readonly(self, connection, value) -> None: ... + def get_readonly(self, connection): ... + def set_deferrable(self, connection, value) -> None: ... + def get_deferrable(self, connection): ... + def set_client_encoding(self, connection, client_encoding) -> None: ... + def do_set_input_sizes(self, cursor, list_of_tuples, context) -> None: ... + def do_begin_twophase(self, connection, xid) -> None: ... + def do_prepare_twophase(self, connection, xid) -> None: ... + def do_rollback_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_commit_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_recover_twophase(self, connection): ... + def on_connect(self): ... + +dialect = PGDialect_pg8000 diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/provision.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/provision.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/psycopg2.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/psycopg2.pyi new file mode 100644 index 00000000..e94fb3e0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/psycopg2.pyi @@ -0,0 +1,96 @@ +from _typeshed import Incomplete +from typing import Any + +import sqlalchemy.types as sqltypes + +from .array import ARRAY as PGARRAY +from .base import ENUM, UUID, PGCompiler, PGDialect, PGExecutionContext, PGIdentifierPreparer +from .hstore import HSTORE +from .json import JSON, JSONB + +logger: Any + +class _PGNumeric(sqltypes.Numeric): + def bind_processor(self, dialect) -> None: ... + def result_processor(self, dialect, coltype): ... + +class _PGEnum(ENUM): + def result_processor(self, dialect, coltype): ... + +class _PGHStore(HSTORE): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class _PGARRAY(PGARRAY): + def bind_expression(self, bindvalue): ... + +class _PGJSON(JSON): + def result_processor(self, dialect, coltype) -> None: ... + +class _PGJSONB(JSONB): + def result_processor(self, dialect, coltype) -> None: ... + +class _PGUUID(UUID): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class PGExecutionContext_psycopg2(PGExecutionContext): + def create_server_side_cursor(self): ... + cursor_fetch_strategy: Any + def post_exec(self) -> None: ... + +class PGCompiler_psycopg2(PGCompiler): ... +class PGIdentifierPreparer_psycopg2(PGIdentifierPreparer): ... + +EXECUTEMANY_PLAIN: Any +EXECUTEMANY_BATCH: Any +EXECUTEMANY_VALUES: Any +EXECUTEMANY_VALUES_PLUS_BATCH: Any + +class PGDialect_psycopg2(PGDialect): + driver: str + supports_statement_cache: bool + supports_unicode_statements: bool + supports_server_side_cursors: bool + default_paramstyle: str + supports_sane_multi_rowcount: bool + statement_compiler: Any + preparer: Any + psycopg2_version: Any + engine_config_types: Any + colspecs: Any + use_native_unicode: Any + use_native_hstore: Any + use_native_uuid: Any + supports_unicode_binds: Any + client_encoding: Any + executemany_mode: Any + insert_executemany_returning: bool + executemany_batch_page_size: Any + executemany_values_page_size: Any + def __init__( + self, + use_native_unicode: bool = ..., + client_encoding: Incomplete | None = ..., + use_native_hstore: bool = ..., + use_native_uuid: bool = ..., + executemany_mode: str = ..., + executemany_batch_page_size: int = ..., + executemany_values_page_size: int = ..., + **kwargs, + ) -> None: ... + def initialize(self, connection) -> None: ... + @classmethod + def dbapi(cls): ... + def set_isolation_level(self, connection, level) -> None: ... + def set_readonly(self, connection, value) -> None: ... + def get_readonly(self, connection): ... + def set_deferrable(self, connection, value) -> None: ... + def get_deferrable(self, connection): ... + def do_ping(self, dbapi_connection): ... + def on_connect(self): ... + def do_executemany(self, cursor, statement, parameters, context: Incomplete | None = ...) -> None: ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + +dialect = PGDialect_psycopg2 diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/psycopg2cffi.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/psycopg2cffi.pyi new file mode 100644 index 00000000..4456b329 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/psycopg2cffi.pyi @@ -0,0 +1,13 @@ +from typing import Any + +from .psycopg2 import PGDialect_psycopg2 + +class PGDialect_psycopg2cffi(PGDialect_psycopg2): + driver: str + supports_unicode_statements: bool + supports_statement_cache: bool + FEATURE_VERSION_MAP: Any + @classmethod + def dbapi(cls): ... + +dialect = PGDialect_psycopg2cffi diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/pygresql.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/pygresql.pyi new file mode 100644 index 00000000..a6f0d861 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/pygresql.pyi @@ -0,0 +1,52 @@ +from typing import Any + +from ...types import Numeric +from .base import UUID, PGCompiler, PGDialect, PGIdentifierPreparer +from .hstore import HSTORE +from .json import JSON, JSONB + +class _PGNumeric(Numeric): + def bind_processor(self, dialect) -> None: ... + def result_processor(self, dialect, coltype): ... + +class _PGHStore(HSTORE): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class _PGJSON(JSON): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class _PGJSONB(JSONB): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class _PGUUID(UUID): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class _PGCompiler(PGCompiler): + def visit_mod_binary(self, binary, operator, **kw): ... + def post_process_text(self, text): ... + +class _PGIdentifierPreparer(PGIdentifierPreparer): ... + +class PGDialect_pygresql(PGDialect): + driver: str + supports_statement_cache: bool + statement_compiler: Any + preparer: Any + @classmethod + def dbapi(cls): ... + colspecs: Any + dbapi_version: Any + supports_unicode_statements: bool + supports_unicode_binds: bool + has_native_hstore: Any + has_native_json: Any + has_native_uuid: Any + def __init__(self, **kwargs) -> None: ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + +dialect = PGDialect_pygresql diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/pypostgresql.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/pypostgresql.pyi new file mode 100644 index 00000000..1b5bed22 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/pypostgresql.pyi @@ -0,0 +1,31 @@ +from typing import Any + +import sqlalchemy.types as sqltypes + +from ...util import memoized_property +from .base import PGDialect, PGExecutionContext + +class PGNumeric(sqltypes.Numeric): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class PGExecutionContext_pypostgresql(PGExecutionContext): ... + +class PGDialect_pypostgresql(PGDialect): + driver: str + supports_statement_cache: bool + supports_unicode_statements: bool + supports_unicode_binds: bool + description_encoding: Any + default_paramstyle: str + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + colspecs: Any + @classmethod + def dbapi(cls): ... + @memoized_property + def dbapi_exception_translation_map(self): ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + +dialect = PGDialect_pypostgresql diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/ranges.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/ranges.pyi new file mode 100644 index 00000000..c31ee1b8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/ranges.pyi @@ -0,0 +1,36 @@ +from typing import Any + +import sqlalchemy.types as sqltypes + +class RangeOperators: + class comparator_factory(sqltypes.Concatenable.Comparator[Any]): + def __ne__(self, other): ... + def contains(self, other, **kw): ... + def contained_by(self, other): ... + def overlaps(self, other): ... + def strictly_left_of(self, other): ... + __lshift__: Any + def strictly_right_of(self, other): ... + __rshift__: Any + def not_extend_right_of(self, other): ... + def not_extend_left_of(self, other): ... + def adjacent_to(self, other): ... + def __add__(self, other): ... + +class INT4RANGE(RangeOperators, sqltypes.TypeEngine): + __visit_name__: str + +class INT8RANGE(RangeOperators, sqltypes.TypeEngine): + __visit_name__: str + +class NUMRANGE(RangeOperators, sqltypes.TypeEngine): + __visit_name__: str + +class DATERANGE(RangeOperators, sqltypes.TypeEngine): + __visit_name__: str + +class TSRANGE(RangeOperators, sqltypes.TypeEngine): + __visit_name__: str + +class TSTZRANGE(RangeOperators, sqltypes.TypeEngine): + __visit_name__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/__init__.pyi new file mode 100644 index 00000000..7dcb0cd7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/__init__.pyi @@ -0,0 +1,45 @@ +from typing import Any + +from .base import ( + BLOB as BLOB, + BOOLEAN as BOOLEAN, + CHAR as CHAR, + DATE as DATE, + DATETIME as DATETIME, + DECIMAL as DECIMAL, + FLOAT as FLOAT, + INTEGER as INTEGER, + JSON as JSON, + NUMERIC as NUMERIC, + REAL as REAL, + SMALLINT as SMALLINT, + TEXT as TEXT, + TIME as TIME, + TIMESTAMP as TIMESTAMP, + VARCHAR as VARCHAR, +) +from .dml import Insert as Insert, insert as insert + +__all__ = ( + "BLOB", + "BOOLEAN", + "CHAR", + "DATE", + "DATETIME", + "DECIMAL", + "FLOAT", + "INTEGER", + "JSON", + "NUMERIC", + "SMALLINT", + "TEXT", + "TIME", + "TIMESTAMP", + "VARCHAR", + "REAL", + "Insert", + "insert", + "dialect", +) + +dialect: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/aiosqlite.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/aiosqlite.pyi new file mode 100644 index 00000000..8d48580c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/aiosqlite.pyi @@ -0,0 +1,73 @@ +from _typeshed import Incomplete +from typing import Any + +from ...engine import AdaptedConnection +from .base import SQLiteExecutionContext +from .pysqlite import SQLiteDialect_pysqlite + +class AsyncAdapt_aiosqlite_cursor: + server_side: bool + await_: Any + arraysize: int + rowcount: int + description: Any + def __init__(self, adapt_connection) -> None: ... + def close(self) -> None: ... + lastrowid: int + def execute(self, operation, parameters: Incomplete | None = ...) -> None: ... + def executemany(self, operation, seq_of_parameters) -> None: ... + def setinputsizes(self, *inputsizes) -> None: ... + def __iter__(self): ... + def fetchone(self): ... + def fetchmany(self, size: Incomplete | None = ...): ... + def fetchall(self): ... + +class AsyncAdapt_aiosqlite_ss_cursor(AsyncAdapt_aiosqlite_cursor): + server_side: bool + def __init__(self, *arg, **kw) -> None: ... + def close(self) -> None: ... + def fetchone(self): ... + def fetchmany(self, size: Incomplete | None = ...): ... + def fetchall(self): ... + +class AsyncAdapt_aiosqlite_connection(AdaptedConnection): + await_: Any + dbapi: Any + def __init__(self, dbapi, connection) -> None: ... + @property + def isolation_level(self): ... + @isolation_level.setter + def isolation_level(self, value) -> None: ... + def create_function(self, *args, **kw) -> None: ... + def cursor(self, server_side: bool = ...): ... + def execute(self, *args, **kw): ... + def rollback(self) -> None: ... + def commit(self) -> None: ... + def close(self) -> None: ... + +class AsyncAdaptFallback_aiosqlite_connection(AsyncAdapt_aiosqlite_connection): + await_: Any + +class AsyncAdapt_aiosqlite_dbapi: + aiosqlite: Any + sqlite: Any + paramstyle: str + def __init__(self, aiosqlite, sqlite) -> None: ... + def connect(self, *arg, **kw): ... + +class SQLiteExecutionContext_aiosqlite(SQLiteExecutionContext): + def create_server_side_cursor(self): ... + +class SQLiteDialect_aiosqlite(SQLiteDialect_pysqlite): + driver: str + supports_statement_cache: bool + is_async: bool + supports_server_side_cursors: bool + @classmethod + def dbapi(cls): ... + @classmethod + def get_pool_class(cls, url): ... + def is_disconnect(self, e, connection, cursor): ... + def get_driver_connection(self, connection): ... + +dialect = SQLiteDialect_aiosqlite diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/base.pyi new file mode 100644 index 00000000..bb7d0c25 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/base.pyi @@ -0,0 +1,143 @@ +from _typeshed import Incomplete +from typing import Any + +import sqlalchemy.types as sqltypes + +from ...engine import default +from ...sql import compiler +from ...types import ( + BLOB as BLOB, + BOOLEAN as BOOLEAN, + CHAR as CHAR, + DECIMAL as DECIMAL, + FLOAT as FLOAT, + INTEGER as INTEGER, + NUMERIC as NUMERIC, + REAL as REAL, + SMALLINT as SMALLINT, + TEXT as TEXT, + TIMESTAMP as TIMESTAMP, + VARCHAR as VARCHAR, +) +from .json import JSON as JSON + +class _SQliteJson(JSON): + def result_processor(self, dialect, coltype): ... + +class _DateTimeMixin: + def __init__(self, storage_format: Incomplete | None = ..., regexp: Incomplete | None = ..., **kw) -> None: ... + @property + def format_is_text_affinity(self): ... + def adapt(self, cls, **kw): ... + def literal_processor(self, dialect): ... + +class DATETIME(_DateTimeMixin, sqltypes.DateTime): + def __init__(self, *args, **kwargs) -> None: ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class DATE(_DateTimeMixin, sqltypes.Date): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class TIME(_DateTimeMixin, sqltypes.Time): + def __init__(self, *args, **kwargs) -> None: ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +colspecs: Any +ischema_names: Any + +class SQLiteCompiler(compiler.SQLCompiler): + extract_map: Any + def visit_now_func(self, fn, **kw): ... + def visit_localtimestamp_func(self, func, **kw): ... + def visit_true(self, expr, **kw): ... + def visit_false(self, expr, **kw): ... + def visit_char_length_func(self, fn, **kw): ... + def visit_cast(self, cast, **kwargs): ... + def visit_extract(self, extract, **kw): ... + def limit_clause(self, select, **kw): ... + def for_update_clause(self, select, **kw): ... + def visit_is_distinct_from_binary(self, binary, operator, **kw): ... + def visit_is_not_distinct_from_binary(self, binary, operator, **kw): ... + def visit_json_getitem_op_binary(self, binary, operator, **kw): ... + def visit_json_path_getitem_op_binary(self, binary, operator, **kw): ... + def visit_empty_set_op_expr(self, type_, expand_op): ... + def visit_empty_set_expr(self, element_types): ... + def visit_regexp_match_op_binary(self, binary, operator, **kw): ... + def visit_not_regexp_match_op_binary(self, binary, operator, **kw): ... + def visit_on_conflict_do_nothing(self, on_conflict, **kw): ... + def visit_on_conflict_do_update(self, on_conflict, **kw): ... + +class SQLiteDDLCompiler(compiler.DDLCompiler): + def get_column_specification(self, column, **kwargs): ... + def visit_primary_key_constraint(self, constraint): ... + def visit_unique_constraint(self, constraint): ... + def visit_check_constraint(self, constraint): ... + def visit_column_check_constraint(self, constraint): ... + def visit_foreign_key_constraint(self, constraint): ... + def define_constraint_remote_table(self, constraint, table, preparer): ... + def visit_create_index(self, create, include_schema: bool = ..., include_table_schema: bool = ...): ... # type: ignore[override] + def post_create_table(self, table): ... + +class SQLiteTypeCompiler(compiler.GenericTypeCompiler): + def visit_large_binary(self, type_, **kw): ... + def visit_DATETIME(self, type_, **kw): ... + def visit_DATE(self, type_, **kw): ... + def visit_TIME(self, type_, **kw): ... + def visit_JSON(self, type_, **kw): ... + +class SQLiteIdentifierPreparer(compiler.IdentifierPreparer): + reserved_words: Any + +class SQLiteExecutionContext(default.DefaultExecutionContext): ... + +class SQLiteDialect(default.DefaultDialect): + name: str + supports_alter: bool + supports_unicode_statements: bool + supports_unicode_binds: bool + supports_default_values: bool + supports_default_metavalue: bool + supports_empty_insert: bool + supports_cast: bool + supports_multivalues_insert: bool + tuple_in_values: bool + supports_statement_cache: bool + default_paramstyle: str + statement_compiler: Any + ddl_compiler: Any + type_compiler: Any + preparer: Any + ischema_names: Any + colspecs: Any + isolation_level: Any + construct_arguments: Any + native_datetime: Any + def __init__( + self, + isolation_level: Incomplete | None = ..., + native_datetime: bool = ..., + json_serializer: Incomplete | None = ..., + json_deserializer: Incomplete | None = ..., + _json_serializer: Incomplete | None = ..., + _json_deserializer: Incomplete | None = ..., + **kwargs, + ) -> None: ... + def set_isolation_level(self, connection, level) -> None: ... + def get_isolation_level(self, connection): ... + def on_connect(self): ... + def get_schema_names(self, connection, **kw): ... + def get_table_names(self, connection, schema: Incomplete | None = ..., **kw): ... + def get_temp_table_names(self, connection, **kw): ... + def get_temp_view_names(self, connection, **kw): ... + def has_table(self, connection, table_name, schema: Incomplete | None = ...): ... # type: ignore[override] + def get_view_names(self, connection, schema: Incomplete | None = ..., **kw): ... + def get_view_definition(self, connection, view_name, schema: Incomplete | None = ..., **kw): ... + def get_columns(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_pk_constraint(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_foreign_keys(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_unique_constraints(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_check_constraints(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_indexes(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/dml.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/dml.pyi new file mode 100644 index 00000000..da20b23a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/dml.pyi @@ -0,0 +1,44 @@ +from _typeshed import Incomplete +from typing import Any + +from ...sql.dml import Insert as StandardInsert +from ...sql.elements import ClauseElement +from ...util import memoized_property + +class Insert(StandardInsert): + stringify_dialect: str + inherit_cache: bool + @memoized_property + def excluded(self): ... + def on_conflict_do_update( + self, + index_elements: Incomplete | None = ..., + index_where: Incomplete | None = ..., + set_: Incomplete | None = ..., + where: Incomplete | None = ..., + ) -> None: ... + def on_conflict_do_nothing(self, index_elements: Incomplete | None = ..., index_where: Incomplete | None = ...) -> None: ... + +insert: Any + +class OnConflictClause(ClauseElement): + stringify_dialect: str + constraint_target: Any + inferred_target_elements: Any + inferred_target_whereclause: Any + def __init__(self, index_elements: Incomplete | None = ..., index_where: Incomplete | None = ...) -> None: ... + +class OnConflictDoNothing(OnConflictClause): + __visit_name__: str + +class OnConflictDoUpdate(OnConflictClause): + __visit_name__: str + update_values_to_set: Any + update_whereclause: Any + def __init__( + self, + index_elements: Incomplete | None = ..., + index_where: Incomplete | None = ..., + set_: Incomplete | None = ..., + where: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/json.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/json.pyi new file mode 100644 index 00000000..2ced3bee --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/json.pyi @@ -0,0 +1,10 @@ +from ...types import JSON as _JSON + +class JSON(_JSON): ... + +class _FormatTypeMixin: + def bind_processor(self, dialect): ... + def literal_processor(self, dialect): ... + +class JSONIndexType(_FormatTypeMixin, _JSON.JSONIndexType): ... +class JSONPathType(_FormatTypeMixin, _JSON.JSONPathType): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/provision.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/provision.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/pysqlcipher.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/pysqlcipher.pyi new file mode 100644 index 00000000..cf2d8738 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/pysqlcipher.pyi @@ -0,0 +1,16 @@ +from typing import Any + +from .pysqlite import SQLiteDialect_pysqlite + +class SQLiteDialect_pysqlcipher(SQLiteDialect_pysqlite): + driver: str + supports_statement_cache: bool + pragmas: Any + @classmethod + def dbapi(cls): ... + @classmethod + def get_pool_class(cls, url): ... + def on_connect_url(self, url): ... + def create_connect_args(self, url): ... + +dialect = SQLiteDialect_pysqlcipher diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/pysqlite.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/pysqlite.pyi new file mode 100644 index 00000000..5703abbd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/pysqlite.pyi @@ -0,0 +1,28 @@ +from typing import Any + +from .base import DATE, DATETIME, SQLiteDialect + +class _SQLite_pysqliteTimeStamp(DATETIME): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class _SQLite_pysqliteDate(DATE): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class SQLiteDialect_pysqlite(SQLiteDialect): + default_paramstyle: str + supports_statement_cache: bool + colspecs: Any + description_encoding: Any + driver: str + @classmethod + def dbapi(cls): ... + @classmethod + def get_pool_class(cls, url): ... + def set_isolation_level(self, connection, level): ... + def on_connect(self): ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + +dialect = SQLiteDialect_pysqlite diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/__init__.pyi new file mode 100644 index 00000000..3b97262f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/__init__.pyi @@ -0,0 +1,58 @@ +from typing import Any + +from .base import ( + BIGINT as BIGINT, + BINARY as BINARY, + BIT as BIT, + CHAR as CHAR, + DATE as DATE, + DATETIME as DATETIME, + FLOAT as FLOAT, + IMAGE as IMAGE, + INT as INT, + INTEGER as INTEGER, + MONEY as MONEY, + NCHAR as NCHAR, + NUMERIC as NUMERIC, + NVARCHAR as NVARCHAR, + SMALLINT as SMALLINT, + SMALLMONEY as SMALLMONEY, + TEXT as TEXT, + TIME as TIME, + TINYINT as TINYINT, + UNICHAR as UNICHAR, + UNITEXT as UNITEXT, + UNIVARCHAR as UNIVARCHAR, + VARBINARY as VARBINARY, + VARCHAR as VARCHAR, +) + +__all__ = ( + "CHAR", + "VARCHAR", + "TIME", + "NCHAR", + "NVARCHAR", + "TEXT", + "DATE", + "DATETIME", + "FLOAT", + "NUMERIC", + "BIGINT", + "INT", + "INTEGER", + "SMALLINT", + "BINARY", + "VARBINARY", + "UNITEXT", + "UNICHAR", + "UNIVARCHAR", + "IMAGE", + "BIT", + "MONEY", + "SMALLMONEY", + "TINYINT", + "dialect", +) + +dialect: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/base.pyi new file mode 100644 index 00000000..f6a90151 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/base.pyi @@ -0,0 +1,136 @@ +from _typeshed import Incomplete +from typing import Any + +from sqlalchemy import types as sqltypes +from sqlalchemy.engine import default, reflection +from sqlalchemy.sql import compiler +from sqlalchemy.types import ( + BIGINT as BIGINT, + BINARY as BINARY, + CHAR as CHAR, + DATE as DATE, + DATETIME as DATETIME, + DECIMAL as DECIMAL, + FLOAT as FLOAT, + INT as INT, + INTEGER as INTEGER, + NCHAR as NCHAR, + NUMERIC as NUMERIC, + NVARCHAR as NVARCHAR, + REAL as REAL, + SMALLINT as SMALLINT, + TEXT as TEXT, + TIME as TIME, + TIMESTAMP as TIMESTAMP, + VARBINARY as VARBINARY, + VARCHAR as VARCHAR, + Unicode as Unicode, +) + +RESERVED_WORDS: Any + +class _SybaseUnitypeMixin: + def result_processor(self, dialect, coltype): ... + +class UNICHAR(_SybaseUnitypeMixin, sqltypes.Unicode): + __visit_name__: str + +class UNIVARCHAR(_SybaseUnitypeMixin, sqltypes.Unicode): + __visit_name__: str + +class UNITEXT(_SybaseUnitypeMixin, sqltypes.UnicodeText): + __visit_name__: str + +class TINYINT(sqltypes.Integer): + __visit_name__: str + +class BIT(sqltypes.TypeEngine): + __visit_name__: str + +class MONEY(sqltypes.TypeEngine): + __visit_name__: str + +class SMALLMONEY(sqltypes.TypeEngine): + __visit_name__: str + +class UNIQUEIDENTIFIER(sqltypes.TypeEngine): + __visit_name__: str + +class IMAGE(sqltypes.LargeBinary): + __visit_name__: str + +class SybaseTypeCompiler(compiler.GenericTypeCompiler): + def visit_large_binary(self, type_, **kw): ... + def visit_boolean(self, type_, **kw): ... + def visit_unicode(self, type_, **kw): ... + def visit_UNICHAR(self, type_, **kw): ... + def visit_UNIVARCHAR(self, type_, **kw): ... + def visit_UNITEXT(self, type_, **kw): ... + def visit_TINYINT(self, type_, **kw): ... + def visit_IMAGE(self, type_, **kw): ... + def visit_BIT(self, type_, **kw): ... + def visit_MONEY(self, type_, **kw): ... + def visit_SMALLMONEY(self, type_, **kw): ... + def visit_UNIQUEIDENTIFIER(self, type_, **kw): ... + +ischema_names: Any + +class SybaseInspector(reflection.Inspector): + def __init__(self, conn) -> None: ... + def get_table_id(self, table_name, schema: Incomplete | None = ...): ... + +class SybaseExecutionContext(default.DefaultExecutionContext): + def set_ddl_autocommit(self, connection, value) -> None: ... + def pre_exec(self) -> None: ... + def post_exec(self) -> None: ... + def get_lastrowid(self): ... + +class SybaseSQLCompiler(compiler.SQLCompiler): + ansi_bind_rules: bool + extract_map: Any + def get_from_hint_text(self, table, text): ... + def limit_clause(self, select, **kw): ... + def visit_extract(self, extract, **kw): ... + def visit_now_func(self, fn, **kw): ... + def for_update_clause(self, select): ... + def order_by_clause(self, select, **kw): ... + def delete_table_clause(self, delete_stmt, from_table, extra_froms): ... + def delete_extra_from_clause(self, delete_stmt, from_table, extra_froms, from_hints, **kw): ... + +class SybaseDDLCompiler(compiler.DDLCompiler): + def get_column_specification(self, column, **kwargs): ... + def visit_drop_index(self, drop): ... + +class SybaseIdentifierPreparer(compiler.IdentifierPreparer): + reserved_words: Any + +class SybaseDialect(default.DefaultDialect): + name: str + supports_unicode_statements: bool + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + supports_statement_cache: bool + supports_native_boolean: bool + supports_unicode_binds: bool + postfetch_lastrowid: bool + colspecs: Any + ischema_names: Any + type_compiler: Any + statement_compiler: Any + ddl_compiler: Any + preparer: Any + inspector: Any + construct_arguments: Any + def __init__(self, *args, **kwargs) -> None: ... + max_identifier_length: int + def initialize(self, connection) -> None: ... + def get_table_id(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_columns(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_foreign_keys(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_indexes(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_pk_constraint(self, connection, table_name, schema: Incomplete | None = ..., **kw): ... + def get_schema_names(self, connection, **kw): ... + def get_table_names(self, connection, schema: Incomplete | None = ..., **kw): ... + def get_view_definition(self, connection, view_name, schema: Incomplete | None = ..., **kw): ... + def get_view_names(self, connection, schema: Incomplete | None = ..., **kw): ... + def has_table(self, connection, table_name, schema: Incomplete | None = ...): ... # type: ignore[override] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/mxodbc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/mxodbc.pyi new file mode 100644 index 00000000..596496ea --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/mxodbc.pyi @@ -0,0 +1,9 @@ +from sqlalchemy.connectors.mxodbc import MxODBCConnector +from sqlalchemy.dialects.sybase.base import SybaseDialect, SybaseExecutionContext + +class SybaseExecutionContext_mxodbc(SybaseExecutionContext): ... + +class SybaseDialect_mxodbc(MxODBCConnector, SybaseDialect): + supports_statement_cache: bool + +dialect = SybaseDialect_mxodbc diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/pyodbc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/pyodbc.pyi new file mode 100644 index 00000000..3940f1f6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/pyodbc.pyi @@ -0,0 +1,19 @@ +from typing import Any + +from sqlalchemy import types as sqltypes +from sqlalchemy.connectors.pyodbc import PyODBCConnector +from sqlalchemy.dialects.sybase.base import SybaseDialect, SybaseExecutionContext + +class _SybNumeric_pyodbc(sqltypes.Numeric): + def bind_processor(self, dialect): ... + +class SybaseExecutionContext_pyodbc(SybaseExecutionContext): + def set_ddl_autocommit(self, connection, value) -> None: ... + +class SybaseDialect_pyodbc(PyODBCConnector, SybaseDialect): + supports_statement_cache: bool + colspecs: Any + @classmethod + def dbapi(cls): ... + +dialect = SybaseDialect_pyodbc diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/pysybase.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/pysybase.pyi new file mode 100644 index 00000000..d0a7a81a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/pysybase.pyi @@ -0,0 +1,28 @@ +from _typeshed import Incomplete +from typing import Any + +from sqlalchemy import types as sqltypes +from sqlalchemy.dialects.sybase.base import SybaseDialect, SybaseExecutionContext, SybaseSQLCompiler + +class _SybNumeric(sqltypes.Numeric): + def result_processor(self, dialect, type_): ... + +class SybaseExecutionContext_pysybase(SybaseExecutionContext): + def set_ddl_autocommit(self, dbapi_connection, value) -> None: ... + def pre_exec(self) -> None: ... + +class SybaseSQLCompiler_pysybase(SybaseSQLCompiler): + def bindparam_string(self, name, **kw): ... + +class SybaseDialect_pysybase(SybaseDialect): + driver: str + statement_compiler: Any + supports_statement_cache: bool + colspecs: Any + @classmethod + def dbapi(cls): ... + def create_connect_args(self, url): ... + def do_executemany(self, cursor, statement, parameters, context: Incomplete | None = ...) -> None: ... + def is_disconnect(self, e, connection, cursor): ... + +dialect = SybaseDialect_pysybase diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/__init__.pyi new file mode 100644 index 00000000..48d019b6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/__init__.pyi @@ -0,0 +1,46 @@ +from ..sql import ddl as ddl +from . import events as events, util as util +from .base import ( + Connection as Connection, + Engine as Engine, + NestedTransaction as NestedTransaction, + RootTransaction as RootTransaction, + Transaction as Transaction, + TwoPhaseTransaction as TwoPhaseTransaction, +) +from .create import create_engine as create_engine, engine_from_config as engine_from_config +from .cursor import ( + BaseCursorResult as BaseCursorResult, + BufferedColumnResultProxy as BufferedColumnResultProxy, + BufferedColumnRow as BufferedColumnRow, + BufferedRowResultProxy as BufferedRowResultProxy, + CursorResult as CursorResult, + FullyBufferedResultProxy as FullyBufferedResultProxy, + LegacyCursorResult as LegacyCursorResult, + ResultProxy as ResultProxy, +) +from .interfaces import ( + AdaptedConnection as AdaptedConnection, + Compiled as Compiled, + Connectable as Connectable, + CreateEnginePlugin as CreateEnginePlugin, + Dialect as Dialect, + ExceptionContext as ExceptionContext, + ExecutionContext as ExecutionContext, + TypeCompiler as TypeCompiler, +) +from .mock import create_mock_engine as create_mock_engine +from .reflection import Inspector as Inspector +from .result import ( + ChunkedIteratorResult as ChunkedIteratorResult, + FrozenResult as FrozenResult, + IteratorResult as IteratorResult, + MappingResult as MappingResult, + MergedResult as MergedResult, + Result as Result, + ScalarResult as ScalarResult, + result_tuple as result_tuple, +) +from .row import BaseRow as BaseRow, LegacyRow as LegacyRow, Row as Row, RowMapping as RowMapping +from .url import URL as URL, make_url as make_url +from .util import connection_memoize as connection_memoize diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/base.pyi new file mode 100644 index 00000000..9d737652 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/base.pyi @@ -0,0 +1,215 @@ +from _typeshed import Incomplete +from _typeshed.dbapi import DBAPIConnection +from abc import abstractmethod +from collections.abc import Callable, Mapping +from types import TracebackType +from typing import Any, TypeVar, overload +from typing_extensions import Concatenate, ParamSpec, Self, TypeAlias + +from ..log import Identified, _EchoFlag, echo_property +from ..pool import Pool +from ..sql.compiler import Compiled +from ..sql.ddl import DDLElement +from ..sql.elements import ClauseElement +from ..sql.functions import FunctionElement +from ..sql.schema import DefaultGenerator +from .cursor import CursorResult +from .interfaces import Connectable as Connectable, Dialect, ExceptionContext +from .url import URL +from .util import TransactionalContext + +_T = TypeVar("_T") +_P = ParamSpec("_P") + +_Executable: TypeAlias = ClauseElement | FunctionElement | DDLElement | DefaultGenerator | Compiled + +class Connection(Connectable): + engine: Engine + dialect: Dialect + should_close_with_result: bool + dispatch: Any + def __init__( + self, + engine: Engine, + connection: DBAPIConnection | None = ..., + close_with_result: bool = ..., + _branch_from: Incomplete | None = ..., + _execution_options: Incomplete | None = ..., + _dispatch: Incomplete | None = ..., + _has_events: Incomplete | None = ..., + _allow_revalidate: bool = ..., + ) -> None: ... + def schema_for_object(self, obj) -> str | None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, type_: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def execution_options(self, **opt): ... + def get_execution_options(self): ... + @property + def closed(self) -> bool: ... + @property + def invalidated(self) -> bool: ... + @property + def connection(self) -> DBAPIConnection: ... + def get_isolation_level(self): ... + @property + def default_isolation_level(self): ... + @property + def info(self): ... + def connect(self, close_with_result: bool = ...): ... # type: ignore[override] + def invalidate(self, exception: Exception | None = ...) -> None: ... + def detach(self) -> None: ... + def begin(self) -> Transaction: ... + def begin_nested(self) -> Transaction | None: ... + def begin_twophase(self, xid: Incomplete | None = ...) -> TwoPhaseTransaction: ... + def recover_twophase(self): ... + def rollback_prepared(self, xid, recover: bool = ...) -> None: ... + def commit_prepared(self, xid, recover: bool = ...) -> None: ... + def in_transaction(self) -> bool: ... + def in_nested_transaction(self) -> bool: ... + def get_transaction(self) -> Transaction | None: ... + def get_nested_transaction(self) -> Transaction | None: ... + def close(self) -> None: ... + @overload + def scalar(self, object_: _Executable, *multiparams: Mapping[str, Any], **params: Any) -> Any: ... + @overload + def scalar(self, object_: str, *multiparams: Any | tuple[Any, ...] | Mapping[str, Any], **params: Any) -> Any: ... + def scalars(self, object_, *multiparams, **params): ... + @overload # type: ignore[override] + def execute(self, statement: _Executable, *multiparams: Mapping[str, Any], **params) -> CursorResult: ... + @overload + def execute(self, statement: str, *multiparams: Any | tuple[Any, ...] | Mapping[str, Any], **params) -> CursorResult: ... + def exec_driver_sql( + self, statement: str, parameters: Incomplete | None = ..., execution_options: Incomplete | None = ... + ): ... + def transaction(self, callable_: Callable[Concatenate[Connection, _P], _T], *args: _P.args, **kwargs: _P.kwargs) -> _T: ... + def run_callable(self, callable_: Callable[Concatenate[Connection, _P], _T], *args: _P.args, **kwargs: _P.kwargs) -> _T: ... + +class ExceptionContextImpl(ExceptionContext): + engine: Any + connection: Any + sqlalchemy_exception: Any + original_exception: Any + execution_context: Any + statement: Any + parameters: Any + is_disconnect: Any + invalidate_pool_on_disconnect: Any + def __init__( + self, + exception, + sqlalchemy_exception, + engine, + connection, + cursor, + statement, + parameters, + context, + is_disconnect, + invalidate_pool_on_disconnect, + ) -> None: ... + +class Transaction(TransactionalContext): + def __init__(self, connection: Connection) -> None: ... + @property + def is_valid(self) -> bool: ... + def close(self) -> None: ... + def rollback(self) -> None: ... + def commit(self) -> None: ... + # The following field are technically not defined on Transaction, but on + # all sub-classes. + @property + @abstractmethod + def connection(self) -> Connection: ... + @property + @abstractmethod + def is_active(self) -> bool: ... + +class MarkerTransaction(Transaction): + connection: Connection + @property + def is_active(self) -> bool: ... + +class RootTransaction(Transaction): + connection: Connection + is_active: bool + +class NestedTransaction(Transaction): + connection: Connection + is_active: bool + +class TwoPhaseTransaction(RootTransaction): + xid: Any + def __init__(self, connection: Connection, xid) -> None: ... + def prepare(self) -> None: ... + +class Engine(Connectable, Identified): + pool: Pool + url: URL + dialect: Dialect + logging_name: str # only exists if not None during initialization + echo: echo_property + hide_parameters: bool + def __init__( + self, + pool: Pool, + dialect: Dialect, + url: str | URL, + logging_name: str | None = ..., + echo: _EchoFlag = ..., + query_cache_size: int = ..., + execution_options: Mapping[str, Any] | None = ..., + hide_parameters: bool = ..., + ) -> None: ... + @property + def engine(self) -> Engine: ... + def clear_compiled_cache(self) -> None: ... + def update_execution_options(self, **opt) -> None: ... + def execution_options(self, **opt): ... + def get_execution_options(self): ... + @property + def name(self) -> str: ... + @property + def driver(self): ... + def dispose(self, close: bool = ...) -> None: ... + + class _trans_ctx: + conn: Connection + transaction: Transaction + close_with_result: bool + def __init__(self, conn: Connection, transaction: Transaction, close_with_result: bool) -> None: ... + def __enter__(self) -> Connection: ... + def __exit__( + self, type_: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + + def begin(self, close_with_result: bool = ...) -> _trans_ctx: ... + def transaction( + self, callable_: Callable[Concatenate[Connection, _P], _T], *args: _P.args, **kwargs: _P.kwargs + ) -> _T | None: ... + def run_callable(self, callable_: Callable[Concatenate[Connection, _P], _T], *args: _P.args, **kwargs: _P.kwargs) -> _T: ... + @overload # type: ignore[override] + def execute(self, statement: _Executable, *multiparams: Mapping[str, Any], **params: Any) -> CursorResult: ... + @overload + def execute(self, statement: str, *multiparams: Any | tuple[Any, ...] | Mapping[str, Any], **params: Any) -> CursorResult: ... + @overload # type: ignore[override] + def scalar(self, statement: _Executable, *multiparams: Mapping[str, Any], **params: Any) -> Any: ... + @overload + def scalar(self, statement: str, *multiparams: Any | tuple[Any, ...] | Mapping[str, Any], **params: Any) -> Any: ... + def connect(self, close_with_result: bool = ...) -> Connection: ... # type: ignore[override] + def table_names(self, schema: Incomplete | None = ..., connection: Connection | None = ...): ... + def has_table(self, table_name: str, schema: Incomplete | None = ...) -> bool: ... + def raw_connection(self, _connection: Connection | None = ...) -> DBAPIConnection: ... + +class OptionEngineMixin: + url: URL + dialect: Dialect + logging_name: str + echo: bool + hide_parameters: bool + dispatch: Any + def __init__(self, proxied, execution_options) -> None: ... + pool: Pool + +class OptionEngine(OptionEngineMixin, Engine): ... # type: ignore[misc] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/characteristics.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/characteristics.pyi new file mode 100644 index 00000000..ab5b5f63 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/characteristics.pyi @@ -0,0 +1,18 @@ +import abc + +from ..util import ABC + +class ConnectionCharacteristic(ABC, metaclass=abc.ABCMeta): + transactional: bool + @abc.abstractmethod + def reset_characteristic(self, dialect, dbapi_conn): ... + @abc.abstractmethod + def set_characteristic(self, dialect, dbapi_conn, value): ... + @abc.abstractmethod + def get_characteristic(self, dialect, dbapi_conn): ... + +class IsolationLevelCharacteristic(ConnectionCharacteristic): + transactional: bool + def reset_characteristic(self, dialect, dbapi_conn) -> None: ... + def set_characteristic(self, dialect, dbapi_conn, value) -> None: ... + def get_characteristic(self, dialect, dbapi_conn): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/create.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/create.pyi new file mode 100644 index 00000000..97070a37 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/create.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete +from collections.abc import Mapping +from typing import Any, overload +from typing_extensions import Literal + +from ..future.engine import Engine as FutureEngine +from .base import Engine +from .mock import MockConnection +from .url import URL + +# Further kwargs are forwarded to the engine, dialect, or pool. +@overload +def create_engine(url: URL | str, *, strategy: Literal["mock"], **kwargs) -> MockConnection: ... # type: ignore[misc] +@overload +def create_engine( + url: URL | str, *, module: Incomplete | None = ..., enable_from_linting: bool = ..., future: Literal[True], **kwargs +) -> FutureEngine: ... +@overload +def create_engine( + url: URL | str, *, module: Incomplete | None = ..., enable_from_linting: bool = ..., future: Literal[False] = ..., **kwargs +) -> Engine: ... +def engine_from_config(configuration: Mapping[str, Any], prefix: str = ..., **kwargs) -> Engine: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/cursor.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/cursor.pyi new file mode 100644 index 00000000..76d0d369 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/cursor.pyi @@ -0,0 +1,132 @@ +import abc +from _typeshed import Incomplete +from typing import Any + +from ..util import memoized_property +from .result import Result, ResultMetaData +from .row import LegacyRow + +MD_INDEX: int +MD_RESULT_MAP_INDEX: int +MD_OBJECTS: int +MD_LOOKUP_KEY: int +MD_RENDERED_NAME: int +MD_PROCESSOR: int +MD_UNTRANSLATED: int + +class CursorResultMetaData(ResultMetaData): + returns_rows: bool + case_sensitive: Any + def __init__(self, parent, cursor_description) -> None: ... + +class LegacyCursorResultMetaData(CursorResultMetaData): ... + +class ResultFetchStrategy: + alternate_cursor_description: Any + def soft_close(self, result, dbapi_cursor) -> None: ... + def hard_close(self, result, dbapi_cursor) -> None: ... + def yield_per(self, result, dbapi_cursor, num) -> None: ... + def fetchone(self, result, dbapi_cursor, hard_close: bool = ...) -> None: ... + def fetchmany(self, result, dbapi_cursor, size: Incomplete | None = ...) -> None: ... + def fetchall(self, result) -> None: ... + def handle_exception(self, result, dbapi_cursor, err) -> None: ... + +class NoCursorFetchStrategy(ResultFetchStrategy): + def soft_close(self, result, dbapi_cursor) -> None: ... + def hard_close(self, result, dbapi_cursor) -> None: ... + def fetchone(self, result, dbapi_cursor, hard_close: bool = ...): ... + def fetchmany(self, result, dbapi_cursor, size: Incomplete | None = ...): ... + def fetchall(self, result, dbapi_cursor): ... + +class NoCursorDQLFetchStrategy(NoCursorFetchStrategy): ... +class NoCursorDMLFetchStrategy(NoCursorFetchStrategy): ... + +class CursorFetchStrategy(ResultFetchStrategy): + def soft_close(self, result, dbapi_cursor) -> None: ... + def hard_close(self, result, dbapi_cursor) -> None: ... + def handle_exception(self, result, dbapi_cursor, err) -> None: ... + def yield_per(self, result, dbapi_cursor, num) -> None: ... + def fetchone(self, result, dbapi_cursor, hard_close: bool = ...): ... + def fetchmany(self, result, dbapi_cursor, size: Incomplete | None = ...): ... + def fetchall(self, result, dbapi_cursor): ... + +class BufferedRowCursorFetchStrategy(CursorFetchStrategy): + def __init__( + self, dbapi_cursor, execution_options, growth_factor: int = ..., initial_buffer: Incomplete | None = ... + ) -> None: ... + @classmethod + def create(cls, result): ... + def yield_per(self, result, dbapi_cursor, num) -> None: ... + def soft_close(self, result, dbapi_cursor) -> None: ... + def hard_close(self, result, dbapi_cursor) -> None: ... + def fetchone(self, result, dbapi_cursor, hard_close: bool = ...): ... + def fetchmany(self, result, dbapi_cursor, size: Incomplete | None = ...): ... + def fetchall(self, result, dbapi_cursor): ... + +class FullyBufferedCursorFetchStrategy(CursorFetchStrategy): + alternate_cursor_description: Any + def __init__( + self, dbapi_cursor, alternate_description: Incomplete | None = ..., initial_buffer: Incomplete | None = ... + ) -> None: ... + def yield_per(self, result, dbapi_cursor, num) -> None: ... + def soft_close(self, result, dbapi_cursor) -> None: ... + def hard_close(self, result, dbapi_cursor) -> None: ... + def fetchone(self, result, dbapi_cursor, hard_close: bool = ...): ... + def fetchmany(self, result, dbapi_cursor, size: Incomplete | None = ...): ... + def fetchall(self, result, dbapi_cursor): ... + +class _NoResultMetaData(ResultMetaData): + returns_rows: bool + @property + def keys(self) -> None: ... + +class _LegacyNoResultMetaData(_NoResultMetaData): + @property + def keys(self): ... + +class BaseCursorResult: + out_parameters: Any + closed: bool + context: Any + dialect: Any + cursor: Any + cursor_strategy: Any + connection: Any + def __init__(self, context, cursor_strategy, cursor_description): ... + @property + def inserted_primary_key_rows(self): ... + @property + def inserted_primary_key(self): ... + def last_updated_params(self): ... + def last_inserted_params(self): ... + @property + def returned_defaults_rows(self): ... + @property + def returned_defaults(self): ... + def lastrow_has_defaults(self): ... + def postfetch_cols(self): ... + def prefetch_cols(self): ... + def supports_sane_rowcount(self): ... + def supports_sane_multi_rowcount(self): ... + @memoized_property + def rowcount(self): ... + @property + def lastrowid(self): ... + @property + def returns_rows(self): ... + @property + def is_insert(self): ... + +class CursorResult(BaseCursorResult, Result): + def merge(self, *others): ... + def close(self) -> None: ... + +class LegacyCursorResult(CursorResult): + def close(self) -> None: ... + +ResultProxy = LegacyCursorResult + +class BufferedRowResultProxy(ResultProxy): ... +class FullyBufferedResultProxy(ResultProxy): ... +class BufferedColumnRow(LegacyRow, metaclass=abc.ABCMeta): ... +class BufferedColumnResultProxy(ResultProxy): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/default.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/default.pyi new file mode 100644 index 00000000..fa725cb7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/default.pyi @@ -0,0 +1,221 @@ +from _typeshed import Incomplete +from typing import Any, ClassVar + +from .. import types as sqltypes +from ..util import memoized_property +from . import interfaces + +AUTOCOMMIT_REGEXP: Any +SERVER_SIDE_CURSOR_RE: Any +CACHE_HIT: Any +CACHE_MISS: Any +CACHING_DISABLED: Any +NO_CACHE_KEY: Any +NO_DIALECT_SUPPORT: Any + +class DefaultDialect(interfaces.Dialect): # type: ignore[misc] + execution_ctx_cls: ClassVar[type[interfaces.ExecutionContext]] + statement_compiler: Any + ddl_compiler: Any + type_compiler: Any + preparer: Any + supports_alter: bool + supports_comments: bool + inline_comments: bool + use_setinputsizes: bool + supports_statement_cache: bool + default_sequence_base: int + execute_sequence_format: Any + supports_schemas: bool + supports_views: bool + supports_sequences: bool + sequences_optional: bool + preexecute_autoincrement_sequences: bool + supports_identity_columns: bool + postfetch_lastrowid: bool + implicit_returning: bool + full_returning: bool + insert_executemany_returning: bool + cte_follows_insert: bool + supports_native_enum: bool + supports_native_boolean: bool + non_native_boolean_check_constraint: bool + supports_simple_order_by_label: bool + tuple_in_values: bool + connection_characteristics: Any + engine_config_types: Any + supports_native_decimal: bool + supports_unicode_statements: bool + supports_unicode_binds: bool + returns_unicode_strings: Any + description_encoding: Any + name: str + max_identifier_length: int + isolation_level: Any + max_index_name_length: Any + max_constraint_name_length: Any + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + colspecs: Any + default_paramstyle: str + supports_default_values: bool + supports_default_metavalue: bool + supports_empty_insert: bool + supports_multivalues_insert: bool + supports_is_distinct_from: bool + supports_server_side_cursors: bool + server_side_cursors: bool + supports_for_update_of: bool + server_version_info: Any + default_schema_name: Any + construct_arguments: Any + requires_name_normalize: bool + reflection_options: Any + dbapi_exception_translation_map: Any + is_async: bool + CACHE_HIT: Any + CACHE_MISS: Any + CACHING_DISABLED: Any + NO_CACHE_KEY: Any + NO_DIALECT_SUPPORT: Any + convert_unicode: Any + encoding: Any + positional: bool + dbapi: Any + paramstyle: Any + identifier_preparer: Any + case_sensitive: Any + label_length: Any + compiler_linting: Any + def __init__( + self, + convert_unicode: bool = ..., + encoding: str = ..., + paramstyle: Incomplete | None = ..., + dbapi: Incomplete | None = ..., + implicit_returning: Incomplete | None = ..., + case_sensitive: bool = ..., + supports_native_boolean: Incomplete | None = ..., + max_identifier_length: Incomplete | None = ..., + label_length: Incomplete | None = ..., + compiler_linting=..., + server_side_cursors: bool = ..., + **kwargs, + ) -> None: ... + @property + def dialect_description(self): ... + @property + def supports_sane_rowcount_returning(self): ... + @classmethod + def get_pool_class(cls, url): ... + def get_dialect_pool_class(self, url): ... + @classmethod + def load_provisioning(cls) -> None: ... + default_isolation_level: Any + def initialize(self, connection) -> None: ... + def on_connect(self) -> None: ... + def get_default_isolation_level(self, dbapi_conn): ... + def type_descriptor(self, typeobj): ... + def has_index(self, connection, table_name, index_name, schema: Incomplete | None = ...): ... + def validate_identifier(self, ident) -> None: ... + def connect(self, *cargs, **cparams): ... + def create_connect_args(self, url): ... + def set_engine_execution_options(self, engine, opts) -> None: ... + def set_connection_execution_options(self, connection, opts) -> None: ... + def do_begin(self, dbapi_connection) -> None: ... + def do_rollback(self, dbapi_connection) -> None: ... + def do_commit(self, dbapi_connection) -> None: ... + def do_close(self, dbapi_connection) -> None: ... + def do_ping(self, dbapi_connection): ... + def create_xid(self): ... + def do_savepoint(self, connection, name) -> None: ... + def do_rollback_to_savepoint(self, connection, name) -> None: ... + def do_release_savepoint(self, connection, name) -> None: ... + def do_executemany(self, cursor, statement, parameters, context: Incomplete | None = ...) -> None: ... + def do_execute(self, cursor, statement, parameters, context: Incomplete | None = ...) -> None: ... + def do_execute_no_params(self, cursor, statement, context: Incomplete | None = ...) -> None: ... # type: ignore[override] + def is_disconnect(self, e, connection, cursor): ... + def reset_isolation_level(self, dbapi_conn) -> None: ... + def normalize_name(self, name): ... + def denormalize_name(self, name): ... + def get_driver_connection(self, connection): ... + +class _RendersLiteral: + def literal_processor(self, dialect): ... + +class _StrDateTime(_RendersLiteral, sqltypes.DateTime): ... +class _StrDate(_RendersLiteral, sqltypes.Date): ... +class _StrTime(_RendersLiteral, sqltypes.Time): ... + +class StrCompileDialect(DefaultDialect): # type: ignore[misc] + statement_compiler: Any + ddl_compiler: Any + type_compiler: Any + preparer: Any + supports_statement_cache: bool + supports_identity_columns: bool + supports_sequences: bool + sequences_optional: bool + preexecute_autoincrement_sequences: bool + implicit_returning: bool + supports_native_boolean: bool + supports_multivalues_insert: bool + supports_simple_order_by_label: bool + colspecs: Any + +class DefaultExecutionContext(interfaces.ExecutionContext): + isinsert: bool + isupdate: bool + isdelete: bool + is_crud: bool + is_text: bool + isddl: bool + executemany: bool + compiled: Any + statement: Any + result_column_struct: Any + returned_default_rows: Any + execution_options: Any + include_set_input_sizes: Any + exclude_set_input_sizes: Any + cursor_fetch_strategy: Any + cache_stats: Any + invoked_statement: Any + cache_hit: Any + @memoized_property + def identifier_preparer(self): ... + @memoized_property + def engine(self): ... + @memoized_property + def postfetch_cols(self): ... + @memoized_property + def prefetch_cols(self): ... + @memoized_property + def returning_cols(self) -> None: ... + @memoized_property + def no_parameters(self): ... + @memoized_property + def should_autocommit(self): ... + @property + def connection(self): ... + def should_autocommit_text(self, statement): ... + def create_cursor(self): ... + def create_default_cursor(self): ... + def create_server_side_cursor(self) -> None: ... + def pre_exec(self) -> None: ... + def get_out_parameter_values(self, names) -> None: ... + def post_exec(self) -> None: ... + def get_result_processor(self, type_, colname, coltype): ... + def get_lastrowid(self): ... + def handle_dbapi_exception(self, e) -> None: ... + @property + def rowcount(self): ... + def supports_sane_rowcount(self): ... + def supports_sane_multi_rowcount(self): ... + @memoized_property + def inserted_primary_key_rows(self): ... + def lastrow_has_defaults(self): ... + current_parameters: Any + def get_current_parameters(self, isolate_multiinsert_groups: bool = ...): ... + def get_insert_default(self, column): ... + def get_update_default(self, column): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/events.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/events.pyi new file mode 100644 index 00000000..7cca8b27 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/events.pyi @@ -0,0 +1,29 @@ +from .. import event as event + +class ConnectionEvents(event.Events): + def before_execute(self, conn, clauseelement, multiparams, params, execution_options) -> None: ... + def after_execute(self, conn, clauseelement, multiparams, params, execution_options, result) -> None: ... + def before_cursor_execute(self, conn, cursor, statement, parameters, context, executemany) -> None: ... + def after_cursor_execute(self, conn, cursor, statement, parameters, context, executemany) -> None: ... + def handle_error(self, exception_context) -> None: ... + def engine_connect(self, conn, branch) -> None: ... + def set_connection_execution_options(self, conn, opts) -> None: ... + def set_engine_execution_options(self, engine, opts) -> None: ... + def engine_disposed(self, engine) -> None: ... + def begin(self, conn) -> None: ... + def rollback(self, conn) -> None: ... + def commit(self, conn) -> None: ... + def savepoint(self, conn, name) -> None: ... + def rollback_savepoint(self, conn, name, context) -> None: ... + def release_savepoint(self, conn, name, context) -> None: ... + def begin_twophase(self, conn, xid) -> None: ... + def prepare_twophase(self, conn, xid) -> None: ... + def rollback_twophase(self, conn, xid, is_prepared) -> None: ... + def commit_twophase(self, conn, xid, is_prepared) -> None: ... + +class DialectEvents(event.Events): + def do_connect(self, dialect, conn_rec, cargs, cparams) -> None: ... + def do_executemany(self, cursor, statement, parameters, context) -> None: ... + def do_execute_no_params(self, cursor, statement, context) -> None: ... + def do_execute(self, cursor, statement, parameters, context) -> None: ... + def do_setinputsizes(self, inputsizes, cursor, statement, parameters, context) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/interfaces.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/interfaces.pyi new file mode 100644 index 00000000..7b6eac7d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/interfaces.pyi @@ -0,0 +1,168 @@ +from _typeshed import Incomplete +from _typeshed.dbapi import DBAPIConnection, DBAPICursor +from abc import abstractmethod +from collections.abc import Callable, Collection, Mapping +from typing import Any, ClassVar, overload + +from ..exc import StatementError +from ..sql.compiler import Compiled as Compiled, IdentifierPreparer, TypeCompiler as TypeCompiler +from ..sql.ddl import DDLElement +from ..sql.elements import ClauseElement +from ..sql.functions import FunctionElement +from ..sql.schema import DefaultGenerator +from .base import Connection, Engine +from .cursor import CursorResult +from .url import URL + +class Dialect: + # Sub-classes are required to have the following attributes: + name: str + driver: str + positional: bool + paramstyle: str + encoding: str + statement_compiler: Compiled + ddl_compiler: Compiled + server_version_info: tuple[Any, ...] + # Only available on supporting dialects: + # default_schema_name: str + execution_ctx_cls: ClassVar[type[ExecutionContext]] + execute_sequence_format: type[tuple[Any] | list[Any]] + preparer: IdentifierPreparer + supports_alter: bool + max_identifier_length: int + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + preexecute_autoincrement_sequences: bool + implicit_returning: bool + colspecs: dict[Any, Any] + supports_default_values: bool + supports_sequences: bool + sequences_optional: bool + supports_native_enum: bool + supports_native_boolean: bool + dbapi_exception_translation_map: dict[Any, Any] + + supports_statement_cache: bool + @abstractmethod + def create_connect_args(self, url: URL) -> None: ... + def initialize(self, connection) -> None: ... + def on_connect_url(self, url) -> Callable[[DBAPIConnection], object] | None: ... + def on_connect(self) -> Callable[[DBAPIConnection], object] | None: ... + # The following methods all raise NotImplementedError, but not all + # dialects implement all methods, which is why they can't be marked + # as abstract. + @classmethod + def type_descriptor(cls, typeobj) -> None: ... + def get_columns(self, connection, table_name, schema: Incomplete | None = ..., **kw) -> None: ... + def get_pk_constraint(self, connection, table_name, schema: Incomplete | None = ..., **kw) -> None: ... + def get_foreign_keys(self, connection, table_name, schema: Incomplete | None = ..., **kw) -> None: ... + def get_table_names(self, connection, schema: Incomplete | None = ..., **kw) -> None: ... + def get_temp_table_names(self, connection, schema: Incomplete | None = ..., **kw) -> None: ... + def get_view_names(self, connection, schema: Incomplete | None = ..., **kw) -> None: ... + def get_sequence_names(self, connection, schema: Incomplete | None = ..., **kw) -> None: ... + def get_temp_view_names(self, connection, schema: Incomplete | None = ..., **kw) -> None: ... + def get_view_definition(self, connection, view_name, schema: Incomplete | None = ..., **kw) -> None: ... + def get_indexes(self, connection, table_name, schema: Incomplete | None = ..., **kw) -> None: ... + def get_unique_constraints(self, connection, table_name, schema: Incomplete | None = ..., **kw) -> None: ... + def get_check_constraints(self, connection, table_name, schema: Incomplete | None = ..., **kw) -> None: ... + def get_table_comment(self, connection, table_name, schema: Incomplete | None = ..., **kw) -> None: ... + def normalize_name(self, name) -> None: ... + def denormalize_name(self, name) -> None: ... + def has_table(self, connection, table_name, schema: Incomplete | None = ..., **kw) -> None: ... + def has_index(self, connection, table_name, index_name, schema: Incomplete | None = ...) -> None: ... + def has_sequence(self, connection, sequence_name, schema: Incomplete | None = ..., **kw) -> None: ... + def do_begin(self, dbapi_connection) -> None: ... + def do_rollback(self, dbapi_connection) -> None: ... + def do_commit(self, dbapi_connection) -> None: ... + def do_close(self, dbapi_connection) -> None: ... + def do_set_input_sizes(self, cursor, list_of_tuples, context) -> None: ... + def create_xid(self) -> None: ... + def do_savepoint(self, connection, name) -> None: ... + def do_rollback_to_savepoint(self, connection, name) -> None: ... + def do_release_savepoint(self, connection, name) -> None: ... + def do_begin_twophase(self, connection, xid) -> None: ... + def do_prepare_twophase(self, connection, xid) -> None: ... + def do_rollback_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_commit_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_recover_twophase(self, connection) -> None: ... + def do_executemany(self, cursor, statement, parameters, context: Incomplete | None = ...) -> None: ... + def do_execute(self, cursor, statement, parameters, context: Incomplete | None = ...) -> None: ... + def do_execute_no_params(self, cursor, statement, parameters, context: Incomplete | None = ...) -> None: ... + def is_disconnect(self, e, connection, cursor) -> None: ... + def connect(self, *cargs, **cparams) -> DBAPIConnection: ... + def reset_isolation_level(self, dbapi_conn) -> None: ... + def set_isolation_level(self, dbapi_conn, level) -> None: ... + def get_isolation_level(self, dbapi_conn) -> None: ... + def get_default_isolation_level(self, dbapi_conn) -> None: ... + @classmethod + def get_dialect_cls(cls, url): ... + @classmethod + def load_provisioning(cls) -> None: ... + @classmethod + def engine_created(cls, engine) -> None: ... + def get_driver_connection(self, connection) -> None: ... + +class CreateEnginePlugin: + url: URL + def __init__(self, url: URL, kwargs) -> None: ... + def update_url(self, url) -> None: ... + def handle_dialect_kwargs(self, dialect_cls, dialect_args) -> None: ... + def handle_pool_kwargs(self, pool_cls, pool_args) -> None: ... + def engine_created(self, engine) -> None: ... + +class ExecutionContext: + def create_cursor(self) -> None: ... + def pre_exec(self) -> None: ... + def get_out_parameter_values(self, out_param_names) -> None: ... + def post_exec(self) -> None: ... + def get_result_cursor_strategy(self, result) -> None: ... + def handle_dbapi_exception(self, e) -> None: ... + def should_autocommit_text(self, statement) -> None: ... + def lastrow_has_defaults(self) -> None: ... + def get_rowcount(self) -> None: ... + +class Connectable: + @abstractmethod + def connect(self, **kwargs) -> Connection: ... + @property + def engine(self) -> Engine | None: ... + @abstractmethod + @overload + def execute( + self, + object_: ClauseElement | FunctionElement | DDLElement | DefaultGenerator | Compiled, + *multiparams: Mapping[str, Any], + **params: Any, + ) -> CursorResult: ... + @abstractmethod + @overload + def execute(self, object_: str, *multiparams: Any | tuple[Any, ...] | Mapping[str, Any], **params: Any) -> CursorResult: ... + @abstractmethod + @overload + def scalar( + self, + object_: ClauseElement | FunctionElement | DDLElement | DefaultGenerator | Compiled, + *multiparams: Mapping[str, Any], + **params: Any, + ) -> Any: ... + @abstractmethod + @overload + def scalar(self, object_: str, *multiparams: Any | tuple[Any, ...] | Mapping[str, Any], **params: Any) -> Any: ... + +class ExceptionContext: + connection: Connection | None + engine: Engine | None + cursor: DBAPICursor | None + statement: str | None + parameters: Collection[Any] | None + original_exception: BaseException | None + sqlalchemy_exception: StatementError | None + chained_exception: BaseException | None + execution_context: ExecutionContext | None + is_disconnect: bool | None + invalidate_pool_on_disconnect: bool + +class AdaptedConnection: + @property + def driver_connection(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/mock.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/mock.pyi new file mode 100644 index 00000000..bfd80072 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/mock.pyi @@ -0,0 +1,32 @@ +from abc import abstractmethod +from collections.abc import Mapping +from typing import Any, overload +from typing_extensions import Self + +from .base import _Executable +from .cursor import CursorResult +from .interfaces import Connectable, Dialect +from .url import URL + +class MockConnection(Connectable): + def __init__(self, dialect: Dialect, execute) -> None: ... + @property + def engine(self) -> Self: ... # type: ignore[override] + @property + def dialect(self) -> Dialect: ... + @property + def name(self) -> str: ... + def schema_for_object(self, obj): ... + def connect(self, **kwargs): ... + def execution_options(self, **kw): ... + def compiler(self, statement, parameters, **kwargs): ... + def create(self, entity, **kwargs) -> None: ... + def drop(self, entity, **kwargs) -> None: ... + @abstractmethod + @overload + def execute(self, object_: _Executable, *multiparams: Mapping[str, Any], **params: Any) -> CursorResult: ... + @abstractmethod + @overload + def execute(self, object_: str, *multiparams: Any | tuple[Any, ...] | Mapping[str, Any], **params: Any) -> CursorResult: ... + +def create_mock_engine(url: URL | str, executor, **kw) -> MockConnection: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/reflection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/reflection.pyi new file mode 100644 index 00000000..350ba42e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/reflection.pyi @@ -0,0 +1,32 @@ +from _typeshed import Incomplete + +def cache(fn, self, con, *args, **kw): ... + +class Inspector: + def __init__(self, bind): ... + @classmethod + def from_engine(cls, bind): ... + @property + def default_schema_name(self): ... + def get_schema_names(self): ... + def get_table_names(self, schema: Incomplete | None = ...): ... + def has_table(self, table_name, schema: Incomplete | None = ...): ... + def has_sequence(self, sequence_name, schema: Incomplete | None = ...): ... + def get_sorted_table_and_fkc_names(self, schema: Incomplete | None = ...): ... + def get_temp_table_names(self): ... + def get_temp_view_names(self): ... + def get_table_options(self, table_name, schema: Incomplete | None = ..., **kw): ... + def get_view_names(self, schema: Incomplete | None = ...): ... + def get_sequence_names(self, schema: Incomplete | None = ...): ... + def get_view_definition(self, view_name, schema: Incomplete | None = ...): ... + def get_columns(self, table_name, schema: Incomplete | None = ..., **kw): ... + def get_pk_constraint(self, table_name, schema: Incomplete | None = ..., **kw): ... + def get_foreign_keys(self, table_name, schema: Incomplete | None = ..., **kw): ... + def get_indexes(self, table_name, schema: Incomplete | None = ..., **kw): ... + def get_unique_constraints(self, table_name, schema: Incomplete | None = ..., **kw): ... + def get_table_comment(self, table_name, schema: Incomplete | None = ..., **kw): ... + def get_check_constraints(self, table_name, schema: Incomplete | None = ..., **kw): ... + def reflecttable(self, *args, **kwargs): ... + def reflect_table( + self, table, include_columns, exclude_columns=..., resolve_fks: bool = ..., _extend_on: Incomplete | None = ... + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/result.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/result.pyi new file mode 100644 index 00000000..7328eb5e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/result.pyi @@ -0,0 +1,124 @@ +from _typeshed import Incomplete +from collections.abc import Generator, KeysView +from typing import Any +from typing_extensions import Self + +from ..sql.base import InPlaceGenerative +from .row import Row + +class ResultMetaData: + @property + def keys(self): ... + +class RMKeyView(KeysView[Any]): + def __init__(self, parent) -> None: ... + def __len__(self) -> int: ... + def __iter__(self): ... + def __contains__(self, item): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + +class SimpleResultMetaData(ResultMetaData): + def __init__( + self, + keys, + extra: Incomplete | None = ..., + _processors: Incomplete | None = ..., + _tuplefilter: Incomplete | None = ..., + _translated_indexes: Incomplete | None = ..., + _unique_filters: Incomplete | None = ..., + ) -> None: ... + +def result_tuple(fields, extra: Incomplete | None = ...): ... + +class ResultInternal(InPlaceGenerative): ... + +class _WithKeys: + def keys(self): ... + +class Result(_WithKeys, ResultInternal): + def __init__(self, cursor_metadata) -> None: ... + def close(self) -> None: ... + def yield_per(self, num: int) -> Self: ... + def unique(self, strategy: Incomplete | None = ...) -> Self: ... + def columns(self, *col_expressions): ... + def scalars(self, index: int = ...) -> ScalarResult: ... + def mappings(self) -> MappingResult: ... + def __iter__(self): ... + def __next__(self): ... + def partitions(self, size: int | None = ...) -> Generator[list[Row], None, None]: ... + def fetchall(self) -> list[Row]: ... + def fetchone(self) -> Row | None: ... + def fetchmany(self, size: int | None = ...) -> list[Row]: ... + def all(self) -> list[Row]: ... + def first(self) -> Row | None: ... + def one_or_none(self) -> Row | None: ... + def scalar_one(self) -> Any: ... + def scalar_one_or_none(self) -> Any | None: ... + def one(self) -> Row: ... + def scalar(self) -> Any | None: ... + def freeze(self) -> FrozenResult: ... + def merge(self, *others) -> MergedResult: ... + +class FilterResult(ResultInternal): ... + +class ScalarResult(FilterResult): + def __init__(self, real_result, index) -> None: ... + def unique(self, strategy: Incomplete | None = ...): ... + def partitions(self, size: Incomplete | None = ...) -> None: ... + def fetchall(self): ... + def fetchmany(self, size: Incomplete | None = ...): ... + def all(self): ... + def __iter__(self): ... + def __next__(self): ... + def first(self): ... + def one_or_none(self): ... + def one(self): ... + +class MappingResult(_WithKeys, FilterResult): + def __init__(self, result) -> None: ... + def unique(self, strategy: Incomplete | None = ...): ... + def columns(self, *col_expressions): ... + def partitions(self, size: Incomplete | None = ...) -> None: ... + def fetchall(self): ... + def fetchone(self): ... + def fetchmany(self, size: Incomplete | None = ...): ... + def all(self): ... + def __iter__(self): ... + def __next__(self): ... + def first(self): ... + def one_or_none(self): ... + def one(self): ... + +class FrozenResult: + metadata: Any + data: Any + def __init__(self, result) -> None: ... + def rewrite_rows(self): ... + def with_new_rows(self, tuple_data): ... + def __call__(self): ... + +class IteratorResult(Result): + iterator: Any + raw: Any + def __init__(self, cursor_metadata, iterator, raw: Incomplete | None = ..., _source_supports_scalars: bool = ...) -> None: ... + +def null_result() -> IteratorResult: ... + +class ChunkedIteratorResult(IteratorResult): + chunks: Any + raw: Any + iterator: Any + dynamic_yield_per: Any + def __init__( + self, + cursor_metadata, + chunks, + source_supports_scalars: bool = ..., + raw: Incomplete | None = ..., + dynamic_yield_per: bool = ..., + ) -> None: ... + +class MergedResult(IteratorResult): + closed: bool + def __init__(self, cursor_metadata, results) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/row.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/row.pyi new file mode 100644 index 00000000..b5098c78 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/row.pyi @@ -0,0 +1,57 @@ +import abc +from collections.abc import ItemsView, KeysView, Mapping, Sequence, ValuesView +from typing import Any + +from ..cresultproxy import BaseRow as BaseRow + +MD_INDEX: int + +def rowproxy_reconstructor(cls, state): ... + +KEY_INTEGER_ONLY: int +KEY_OBJECTS_ONLY: int +KEY_OBJECTS_BUT_WARN: int +KEY_OBJECTS_NO_WARN: int + +class Row(BaseRow, Sequence[Any], metaclass=abc.ABCMeta): + @property + def count(self): ... + @property + def index(self): ... + def __contains__(self, key): ... + __hash__ = BaseRow.__hash__ + def __lt__(self, other): ... + def __le__(self, other): ... + def __ge__(self, other): ... + def __gt__(self, other): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def keys(self): ... + +class LegacyRow(Row, metaclass=abc.ABCMeta): + def __contains__(self, key): ... + def has_key(self, key): ... + def items(self): ... + def iterkeys(self): ... + def itervalues(self): ... + def values(self): ... + +BaseRowProxy = BaseRow +RowProxy = Row + +class ROMappingView(KeysView[Any], ValuesView[Any], ItemsView[Any, Any]): + def __init__(self, mapping, items) -> None: ... + def __len__(self) -> int: ... + def __iter__(self): ... + def __contains__(self, item): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + +class RowMapping(BaseRow, Mapping[Any, Any]): + __getitem__: Any + def __iter__(self): ... + def __len__(self) -> int: ... + def __contains__(self, key): ... + def items(self): ... + def keys(self): ... + def values(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/strategies.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/strategies.pyi new file mode 100644 index 00000000..25239d17 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/strategies.pyi @@ -0,0 +1,4 @@ +from typing import Any + +class MockEngineStrategy: + MockConnection: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/url.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/url.pyi new file mode 100644 index 00000000..6d428111 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/url.pyi @@ -0,0 +1,64 @@ +from _typeshed import SupportsItems, Unused +from collections.abc import Iterable, Mapping, Sequence +from typing import Any, NamedTuple +from typing_extensions import Self, TypeAlias + +from ..util import immutabledict +from .interfaces import Dialect + +# object that produces a password when called with str() +_PasswordObject: TypeAlias = object + +# stub-only helper class +class _URLTuple(NamedTuple): + drivername: str + username: str | None + password: str | _PasswordObject | None + host: str | None + port: int | None + database: str | None + query: immutabledict[str, str | tuple[str, ...]] + +_Query: TypeAlias = Mapping[str, str | Sequence[str]] | Sequence[tuple[str, str | Sequence[str]]] + +class URL(_URLTuple): + @classmethod + def create( + cls, + drivername: str, + username: str | None = ..., + password: str | _PasswordObject | None = None, + host: str | None = ..., + port: int | None = ..., + database: str | None = ..., + query: _Query | None = ..., + ) -> URL: ... + def set( + self, + drivername: str | None = ..., + username: str | None = ..., + password: str | _PasswordObject | None = None, + host: str | None = ..., + port: int | None = ..., + database: str | None = ..., + query: _Query | None = ..., + ) -> Self: ... + def update_query_string(self, query_string: str, append: bool = ...) -> Self: ... + def update_query_pairs(self, key_value_pairs: Iterable[tuple[str, str]], append: bool = ...) -> Self: ... + def update_query_dict(self, query_parameters: SupportsItems[str, str | Sequence[str]], append: bool = ...) -> Self: ... + def difference_update_query(self, names: Iterable[str]) -> URL: ... + @property + def normalized_query(self) -> immutabledict[str, tuple[str, ...]]: ... + def __to_string__(self, hide_password: bool = ...) -> str: ... + def render_as_string(self, hide_password: bool = ...) -> str: ... + def __copy__(self) -> Self: ... + def __deepcopy__(self, memo: Unused) -> Self: ... + def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def get_backend_name(self) -> str: ... + def get_driver_name(self) -> str: ... + def get_dialect(self) -> type[Dialect]: ... + def translate_connect_args(self, names: list[str] | None = ..., **kw: str) -> dict[str, Any]: ... + +def make_url(name_or_url: str | URL) -> URL: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/util.pyi new file mode 100644 index 00000000..0fdef743 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/engine/util.pyi @@ -0,0 +1,12 @@ +from collections.abc import Callable +from types import TracebackType +from typing import Any +from typing_extensions import Self + +def connection_memoize(key: str) -> Callable[..., Any]: ... + +class TransactionalContext: + def __enter__(self) -> Self: ... + def __exit__( + self, type_: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/event/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/event/__init__.pyi new file mode 100644 index 00000000..a0b0fcea --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/event/__init__.pyi @@ -0,0 +1,10 @@ +from .api import ( + CANCEL as CANCEL, + NO_RETVAL as NO_RETVAL, + contains as contains, + listen as listen, + listens_for as listens_for, + remove as remove, +) +from .attr import RefCollection as RefCollection +from .base import Events as Events, dispatcher as dispatcher diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/event/api.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/event/api.pyi new file mode 100644 index 00000000..bea12af0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/event/api.pyi @@ -0,0 +1,9 @@ +from typing import Any + +CANCEL: Any +NO_RETVAL: Any + +def listen(target, identifier, fn, *args, **kw) -> None: ... +def listens_for(target, identifier, *args, **kw): ... +def remove(target, identifier, fn) -> None: ... +def contains(target, identifier, fn): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/event/attr.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/event/attr.pyi new file mode 100644 index 00000000..5f03e16b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/event/attr.pyi @@ -0,0 +1,85 @@ +from typing import Any + +from .. import util + +class RefCollection(util.MemoizedSlots): + ref: Any + +class _empty_collection: + def append(self, element) -> None: ... + def extend(self, other) -> None: ... + def remove(self, element) -> None: ... + def __iter__(self): ... + def clear(self) -> None: ... + +class _ClsLevelDispatch(RefCollection): + name: Any + clsname: Any + arg_names: Any + has_kw: Any + legacy_signatures: Any + def __init__(self, parent_dispatch_cls, fn): ... + def insert(self, event_key, propagate) -> None: ... + def append(self, event_key, propagate) -> None: ... + def update_subclass(self, target) -> None: ... + def remove(self, event_key) -> None: ... + def clear(self) -> None: ... + def for_modify(self, obj): ... + +class _InstanceLevelDispatch(RefCollection): ... + +class _EmptyListener(_InstanceLevelDispatch): + propagate: Any + listeners: Any + parent: Any + parent_listeners: Any + name: Any + def __init__(self, parent, target_cls) -> None: ... + def for_modify(self, obj): ... + exec_once: Any + exec_once_unless_exception: Any + insert: Any + append: Any + remove: Any + clear: Any + def __call__(self, *args, **kw) -> None: ... + def __len__(self) -> int: ... + def __iter__(self): ... + def __bool__(self) -> bool: ... + def __nonzero__(self) -> bool: ... + +class _CompoundListener(_InstanceLevelDispatch): + def exec_once(self, *args, **kw) -> None: ... + def exec_once_unless_exception(self, *args, **kw) -> None: ... + def __call__(self, *args, **kw) -> None: ... + def __len__(self) -> int: ... + def __iter__(self): ... + def __bool__(self) -> bool: ... + def __nonzero__(self) -> bool: ... + +class _ListenerCollection(_CompoundListener): + parent_listeners: Any + parent: Any + name: Any + listeners: Any + propagate: Any + def __init__(self, parent, target_cls) -> None: ... + def for_modify(self, obj): ... + def insert(self, event_key, propagate) -> None: ... + def append(self, event_key, propagate) -> None: ... + def remove(self, event_key) -> None: ... + def clear(self) -> None: ... + +class _JoinedListener(_CompoundListener): + parent: Any + name: Any + local: Any + parent_listeners: Any + def __init__(self, parent, name, local) -> None: ... + @property + def listeners(self): ... + def for_modify(self, obj): ... + def insert(self, event_key, propagate) -> None: ... + def append(self, event_key, propagate) -> None: ... + def remove(self, event_key) -> None: ... + def clear(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/event/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/event/base.pyi new file mode 100644 index 00000000..9b1341c1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/event/base.pyi @@ -0,0 +1,31 @@ +from _typeshed import Incomplete +from typing import Any + +class _UnpickleDispatch: + def __call__(self, _instance_cls): ... + +class _Dispatch: + def __init__(self, parent, instance_cls: Incomplete | None = ...) -> None: ... + def __getattr__(self, name: str): ... + def __reduce__(self): ... + +class _EventMeta(type): + def __init__(cls, classname, bases, dict_) -> None: ... + +class Events: + dispatch: Any + +class _JoinedDispatcher: + local: Any + parent: Any + def __init__(self, local, parent) -> None: ... + def __getattr__(self, name: str): ... + +class dispatcher: + dispatch: Any + events: Any + def __init__(self, events) -> None: ... + def __get__(self, obj, cls): ... + +class slots_dispatcher(dispatcher): + def __get__(self, obj, cls): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/event/legacy.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/event/legacy.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/event/registry.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/event/registry.pyi new file mode 100644 index 00000000..f513b966 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/event/registry.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete +from typing import Any + +class _EventKey: + target: Any + identifier: Any + fn: Any + fn_key: Any + fn_wrap: Any + dispatch_target: Any + def __init__(self, target, identifier, fn, dispatch_target, _fn_wrap: Incomplete | None = ...) -> None: ... + def with_wrapper(self, fn_wrap): ... + def with_dispatch_target(self, dispatch_target): ... + def listen(self, *args, **kw) -> None: ... + def remove(self) -> None: ... + def contains(self): ... + def base_listen( + self, propagate: bool = ..., insert: bool = ..., named: bool = ..., retval: Incomplete | None = ..., asyncio: bool = ... + ) -> None: ... + def append_to_list(self, owner, list_): ... + def remove_from_list(self, owner, list_) -> None: ... + def prepend_to_list(self, owner, list_): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/events.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/events.pyi new file mode 100644 index 00000000..8bbbfa58 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/events.pyi @@ -0,0 +1,4 @@ +from .engine.events import ConnectionEvents as ConnectionEvents, DialectEvents as DialectEvents +from .pool.events import PoolEvents as PoolEvents +from .sql.base import SchemaEventTarget as SchemaEventTarget +from .sql.events import DDLEvents as DDLEvents diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/exc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/exc.pyi new file mode 100644 index 00000000..30dba09f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/exc.pyi @@ -0,0 +1,147 @@ +from _typeshed import Incomplete +from typing import Any, ClassVar + +class HasDescriptionCode: + code: str | None + def __init__(self, *arg: Any, code: str | None = ..., **kw: Any) -> None: ... + +class SQLAlchemyError(HasDescriptionCode, Exception): + def __unicode__(self) -> str: ... + +class ArgumentError(SQLAlchemyError): ... + +class ObjectNotExecutableError(ArgumentError): + target: Any + def __init__(self, target) -> None: ... + def __reduce__(self): ... + +class NoSuchModuleError(ArgumentError): ... +class NoForeignKeysError(ArgumentError): ... +class AmbiguousForeignKeysError(ArgumentError): ... + +class CircularDependencyError(SQLAlchemyError): + cycles: Any + edges: Any + def __init__(self, message, cycles, edges, msg: Incomplete | None = ..., code: Incomplete | None = ...) -> None: ... + def __reduce__(self): ... + +class CompileError(SQLAlchemyError): ... + +class UnsupportedCompilationError(CompileError): + code: str + compiler: Any + element_type: Any + message: str | None + def __init__(self, compiler, element_type, message: str | None = ...) -> None: ... + def __reduce__(self): ... + +class IdentifierError(SQLAlchemyError): ... + +class DisconnectionError(SQLAlchemyError): + invalidate_pool: bool + +class InvalidatePoolError(DisconnectionError): + invalidate_pool: bool + +class TimeoutError(SQLAlchemyError): ... +class InvalidRequestError(SQLAlchemyError): ... +class NoInspectionAvailable(InvalidRequestError): ... +class PendingRollbackError(InvalidRequestError): ... +class ResourceClosedError(InvalidRequestError): ... +class NoSuchColumnError(InvalidRequestError, KeyError): ... +class NoResultFound(InvalidRequestError): ... +class MultipleResultsFound(InvalidRequestError): ... +class NoReferenceError(InvalidRequestError): ... + +class AwaitRequired(InvalidRequestError): + code: str + +class MissingGreenlet(InvalidRequestError): + code: str + +class NoReferencedTableError(NoReferenceError): + table_name: Any + def __init__(self, message, tname) -> None: ... + def __reduce__(self): ... + +class NoReferencedColumnError(NoReferenceError): + table_name: Any + column_name: Any + def __init__(self, message, tname, cname) -> None: ... + def __reduce__(self): ... + +class NoSuchTableError(InvalidRequestError): ... +class UnreflectableTableError(InvalidRequestError): ... +class UnboundExecutionError(InvalidRequestError): ... +class DontWrapMixin: ... + +class StatementError(SQLAlchemyError): + statement: Any + params: Any + orig: Any + ismulti: Any + hide_parameters: Any + detail: Any + def __init__( + self, + message, + statement, + params, + orig, + hide_parameters: bool = ..., + code: Incomplete | None = ..., + ismulti: Incomplete | None = ..., + ) -> None: ... + def add_detail(self, msg) -> None: ... + def __reduce__(self): ... + +class DBAPIError(StatementError): + code: str + @classmethod + def instance( + cls, + statement, + params, + orig, + dbapi_base_err, + hide_parameters: bool = ..., + connection_invalidated: bool = ..., + dialect: Incomplete | None = ..., + ismulti: Incomplete | None = ..., + ): ... + def __reduce__(self): ... + connection_invalidated: Any + def __init__( + self, + statement, + params, + orig, + hide_parameters: bool = ..., + connection_invalidated: bool = ..., + code: Incomplete | None = ..., + ismulti: Incomplete | None = ..., + ) -> None: ... + +class InterfaceError(DBAPIError): ... +class DatabaseError(DBAPIError): ... +class DataError(DatabaseError): ... +class OperationalError(DatabaseError): ... +class IntegrityError(DatabaseError): ... +class InternalError(DatabaseError): ... +class ProgrammingError(DatabaseError): ... +class NotSupportedError(DatabaseError): ... + +class SADeprecationWarning(HasDescriptionCode, DeprecationWarning): + deprecated_since: ClassVar[str | None] + +class Base20DeprecationWarning(SADeprecationWarning): + deprecated_since: ClassVar[str] + +class LegacyAPIWarning(Base20DeprecationWarning): ... +class RemovedIn20Warning(Base20DeprecationWarning): ... +class MovedIn20Warning(RemovedIn20Warning): ... + +class SAPendingDeprecationWarning(PendingDeprecationWarning): + deprecated_since: ClassVar[str | None] + +class SAWarning(HasDescriptionCode, RuntimeWarning): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/associationproxy.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/associationproxy.pyi new file mode 100644 index 00000000..d72c6d7e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/associationproxy.pyi @@ -0,0 +1,200 @@ +from _typeshed import Incomplete +from typing import Any + +from ..orm import interfaces +from ..sql.operators import ColumnOperators +from ..util import memoized_property + +def association_proxy(target_collection, attr, **kw): ... + +ASSOCIATION_PROXY: Any + +class AssociationProxy(interfaces.InspectionAttrInfo): + is_attribute: bool + extension_type: Any + target_collection: Any + value_attr: Any + creator: Any + getset_factory: Any + proxy_factory: Any + proxy_bulk_set: Any + cascade_scalar_deletes: Any + key: Any + info: Any + def __init__( + self, + target_collection, + attr, + creator: Incomplete | None = ..., + getset_factory: Incomplete | None = ..., + proxy_factory: Incomplete | None = ..., + proxy_bulk_set: Incomplete | None = ..., + info: Incomplete | None = ..., + cascade_scalar_deletes: bool = ..., + ) -> None: ... + def __get__(self, obj, class_): ... + def __set__(self, obj, values) -> None: ... + def __delete__(self, obj) -> None: ... + def for_class(self, class_, obj: Incomplete | None = ...): ... + +class AssociationProxyInstance: + parent: Any + key: Any + owning_class: Any + target_collection: Any + collection_class: Any + target_class: Any + value_attr: Any + def __init__(self, parent, owning_class, target_class, value_attr) -> None: ... + @classmethod + def for_proxy(cls, parent, owning_class, parent_instance): ... + def __clause_element__(self) -> None: ... + @property + def remote_attr(self): ... + @property + def local_attr(self): ... + @property + def attr(self): ... + @memoized_property + def scalar(self): ... + @property + def info(self): ... + def get(self, obj): ... + def set(self, obj, values) -> None: ... + def delete(self, obj) -> None: ... + def any(self, criterion: Incomplete | None = ..., **kwargs): ... + def has(self, criterion: Incomplete | None = ..., **kwargs): ... + +class AmbiguousAssociationProxyInstance(AssociationProxyInstance): + def get(self, obj): ... + def __eq__(self, obj): ... + def __ne__(self, obj): ... + def any(self, criterion: Incomplete | None = ..., **kwargs) -> None: ... + def has(self, criterion: Incomplete | None = ..., **kwargs) -> None: ... + +class ObjectAssociationProxyInstance(AssociationProxyInstance): + def contains(self, obj): ... + def __eq__(self, obj): ... + def __ne__(self, obj): ... + +class ColumnAssociationProxyInstance(ColumnOperators[Any], AssociationProxyInstance): + def __eq__(self, other) -> ColumnOperators[Any]: ... # type: ignore[override] + def operate(self, op, *other, **kwargs): ... + +class _lazy_collection: + parent: Any + target: Any + def __init__(self, obj, target) -> None: ... + def __call__(self): ... + +class _AssociationCollection: + lazy_collection: Any + creator: Any + getter: Any + setter: Any + parent: Any + def __init__(self, lazy_collection, creator, getter, setter, parent) -> None: ... + @property + def col(self): ... + def __len__(self) -> int: ... + def __bool__(self) -> bool: ... + def __nonzero__(self) -> bool: ... + +class _AssociationList(_AssociationCollection): + def __getitem__(self, index): ... + def __setitem__(self, index, value) -> None: ... + def __delitem__(self, index) -> None: ... + def __contains__(self, value): ... + def __getslice__(self, start, end): ... + def __setslice__(self, start, end, values) -> None: ... + def __delslice__(self, start, end) -> None: ... + def __iter__(self): ... + def append(self, value) -> None: ... + def count(self, value): ... + def extend(self, values) -> None: ... + def insert(self, index, value) -> None: ... + def pop(self, index: int = ...): ... + def remove(self, value) -> None: ... + def reverse(self) -> None: ... + def sort(self) -> None: ... + def clear(self) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __lt__(self, other): ... + def __le__(self, other): ... + def __gt__(self, other): ... + def __ge__(self, other): ... + def __cmp__(self, other): ... + def __add__(self, iterable): ... + def __radd__(self, iterable): ... + def __mul__(self, n): ... + __rmul__: Any + def __iadd__(self, iterable): ... + def __imul__(self, n): ... + def index(self, item, *args): ... + def copy(self): ... + def __hash__(self) -> int: ... + +class _AssociationDict(_AssociationCollection): + def __getitem__(self, key): ... + def __setitem__(self, key, value) -> None: ... + def __delitem__(self, key) -> None: ... + def __contains__(self, key): ... + def has_key(self, key): ... + def __iter__(self): ... + def clear(self) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __lt__(self, other): ... + def __le__(self, other): ... + def __gt__(self, other): ... + def __ge__(self, other): ... + def __cmp__(self, other): ... + def get(self, key, default: Incomplete | None = ...): ... + def setdefault(self, key, default: Incomplete | None = ...): ... + def keys(self): ... + def items(self): ... + def values(self): ... + def pop(self, key, default=...): ... + def popitem(self): ... + def update(self, *a, **kw) -> None: ... + def copy(self): ... + def __hash__(self) -> int: ... + +class _AssociationSet(_AssociationCollection): + def __len__(self) -> int: ... + def __bool__(self) -> bool: ... + def __nonzero__(self) -> bool: ... + def __contains__(self, value): ... + def __iter__(self): ... + def add(self, value) -> None: ... + def discard(self, value) -> None: ... + def remove(self, value) -> None: ... + def pop(self): ... + def update(self, other) -> None: ... + def __ior__(self, other): ... # type: ignore[misc] + def union(self, other): ... + __or__: Any + def difference(self, other): ... + __sub__: Any + def difference_update(self, other) -> None: ... + def __isub__(self, other): ... # type: ignore[misc] + def intersection(self, other): ... + __and__: Any + def intersection_update(self, other) -> None: ... + def __iand__(self, other): ... # type: ignore[misc] + def symmetric_difference(self, other): ... + __xor__: Any + def symmetric_difference_update(self, other) -> None: ... + def __ixor__(self, other): ... # type: ignore[misc] + def issubset(self, other): ... + def issuperset(self, other): ... + def clear(self) -> None: ... + def copy(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __lt__(self, other): ... + def __le__(self, other): ... + def __gt__(self, other): ... + def __ge__(self, other): ... + def __hash__(self) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/__init__.pyi new file mode 100644 index 00000000..e065d748 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/__init__.pyi @@ -0,0 +1,15 @@ +from .engine import ( + AsyncConnection as AsyncConnection, + AsyncEngine as AsyncEngine, + AsyncTransaction as AsyncTransaction, + create_async_engine as create_async_engine, +) +from .events import AsyncConnectionEvents as AsyncConnectionEvents, AsyncSessionEvents as AsyncSessionEvents +from .result import AsyncMappingResult as AsyncMappingResult, AsyncResult as AsyncResult, AsyncScalarResult as AsyncScalarResult +from .scoping import async_scoped_session as async_scoped_session +from .session import ( + AsyncSession as AsyncSession, + AsyncSessionTransaction as AsyncSessionTransaction, + async_object_session as async_object_session, + async_session as async_session, +) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/base.pyi new file mode 100644 index 00000000..fb8ba65e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/base.pyi @@ -0,0 +1,16 @@ +import abc + +class ReversibleProxy: ... + +class StartableContext(abc.ABC, metaclass=abc.ABCMeta): + @abc.abstractmethod + async def start(self, is_ctxmanager: bool = ...): ... + def __await__(self): ... + async def __aenter__(self): ... + @abc.abstractmethod + async def __aexit__(self, type_, value, traceback): ... + +class ProxyComparable(ReversibleProxy): + def __hash__(self) -> int: ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/engine.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/engine.pyi new file mode 100644 index 00000000..37258ead --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/engine.pyi @@ -0,0 +1,94 @@ +from _typeshed import Incomplete +from typing import Any + +from .base import ProxyComparable, StartableContext + +def create_async_engine(*arg, **kw) -> AsyncEngine: ... + +class AsyncConnectable: ... + +class AsyncConnection(ProxyComparable, StartableContext, AsyncConnectable): + engine: Any + sync_engine: Any + sync_connection: Any + def __init__(self, async_engine, sync_connection: Incomplete | None = ...) -> None: ... + async def start(self, is_ctxmanager: bool = ...): ... + @property + def connection(self) -> None: ... + async def get_raw_connection(self): ... + @property + def info(self): ... + def begin(self): ... + def begin_nested(self): ... + async def invalidate(self, exception: Incomplete | None = ...): ... + async def get_isolation_level(self): ... + async def set_isolation_level(self): ... + def in_transaction(self): ... + def in_nested_transaction(self): ... + def get_transaction(self): ... + def get_nested_transaction(self): ... + async def execution_options(self, **opt): ... + async def commit(self) -> None: ... + async def rollback(self) -> None: ... + async def close(self) -> None: ... + async def exec_driver_sql(self, statement, parameters: Incomplete | None = ..., execution_options=...): ... + async def stream(self, statement, parameters: Incomplete | None = ..., execution_options=...): ... + async def execute(self, statement, parameters: Incomplete | None = ..., execution_options=...): ... + async def scalar(self, statement, parameters: Incomplete | None = ..., execution_options=...): ... + async def scalars(self, statement, parameters: Incomplete | None = ..., execution_options=...): ... + async def stream_scalars(self, statement, parameters: Incomplete | None = ..., execution_options=...): ... + async def run_sync(self, fn, *arg, **kw): ... + def __await__(self): ... + async def __aexit__(self, type_, value, traceback) -> None: ... + # proxied from Connection + dialect: Any + @property + def closed(self): ... + @property + def invalidated(self): ... + @property + def default_isolation_level(self): ... + +class AsyncEngine(ProxyComparable, AsyncConnectable): + class _trans_ctx(StartableContext): + conn: Any + def __init__(self, conn) -> None: ... + transaction: Any + async def start(self, is_ctxmanager: bool = ...): ... + async def __aexit__(self, type_, value, traceback) -> None: ... + sync_engine: Any + def __init__(self, sync_engine) -> None: ... + def begin(self): ... + def connect(self): ... + async def raw_connection(self): ... + def execution_options(self, **opt): ... + async def dispose(self): ... + # proxied from Engine + url: Any + pool: Any + dialect: Any + echo: Any + @property + def engine(self): ... + @property + def name(self): ... + @property + def driver(self): ... + def clear_compiled_cache(self) -> None: ... + def update_execution_options(self, **opt) -> None: ... + def get_execution_options(self): ... + +class AsyncTransaction(ProxyComparable, StartableContext): + connection: Any + sync_transaction: Any + nested: Any + def __init__(self, connection, nested: bool = ...) -> None: ... + @property + def is_valid(self): ... + @property + def is_active(self): ... + async def close(self) -> None: ... + async def rollback(self) -> None: ... + async def commit(self) -> None: ... + async def start(self, is_ctxmanager: bool = ...): ... + async def __aexit__(self, type_, value, traceback) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/events.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/events.pyi new file mode 100644 index 00000000..e9a8bf1a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/events.pyi @@ -0,0 +1,5 @@ +from ...engine import events as engine_event +from ...orm import events as orm_event + +class AsyncConnectionEvents(engine_event.ConnectionEvents): ... +class AsyncSessionEvents(orm_event.SessionEvents): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/exc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/exc.pyi new file mode 100644 index 00000000..56f3b638 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/exc.pyi @@ -0,0 +1,5 @@ +from ...exc import InvalidRequestError + +class AsyncMethodRequired(InvalidRequestError): ... +class AsyncContextNotStarted(InvalidRequestError): ... +class AsyncContextAlreadyStarted(InvalidRequestError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/result.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/result.pyi new file mode 100644 index 00000000..43a2a3b0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/result.pyi @@ -0,0 +1,56 @@ +from _typeshed import Incomplete + +from ...engine.result import FilterResult + +class AsyncCommon(FilterResult): + async def close(self) -> None: ... + +class AsyncResult(AsyncCommon): + def __init__(self, real_result) -> None: ... + def keys(self): ... + def unique(self, strategy: Incomplete | None = ...): ... + def columns(self, *col_expressions): ... + async def partitions(self, size: Incomplete | None = ...) -> None: ... + async def fetchone(self): ... + async def fetchmany(self, size: Incomplete | None = ...): ... + async def all(self): ... + def __aiter__(self): ... + async def __anext__(self): ... + async def first(self): ... + async def one_or_none(self): ... + async def scalar_one(self): ... + async def scalar_one_or_none(self): ... + async def one(self): ... + async def scalar(self): ... + async def freeze(self): ... + def scalars(self, index: int = ...): ... + def mappings(self): ... + +class AsyncScalarResult(AsyncCommon): + def __init__(self, real_result, index) -> None: ... + def unique(self, strategy: Incomplete | None = ...): ... + async def partitions(self, size: Incomplete | None = ...) -> None: ... + async def fetchall(self): ... + async def fetchmany(self, size: Incomplete | None = ...): ... + async def all(self): ... + def __aiter__(self): ... + async def __anext__(self): ... + async def first(self): ... + async def one_or_none(self): ... + async def one(self): ... + +class AsyncMappingResult(AsyncCommon): + def __init__(self, result) -> None: ... + def keys(self): ... + def unique(self, strategy: Incomplete | None = ...): ... + def columns(self, *col_expressions): ... + async def partitions(self, size: Incomplete | None = ...) -> None: ... + async def fetchall(self): ... + async def fetchone(self): ... + async def fetchmany(self, size: Incomplete | None = ...): ... + async def all(self): ... + def __aiter__(self): ... + async def __anext__(self): ... + async def first(self): ... + async def one_or_none(self): ... + async def one(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/scoping.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/scoping.pyi new file mode 100644 index 00000000..a71390ce --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/scoping.pyi @@ -0,0 +1,77 @@ +from _typeshed import Incomplete +from typing import Any + +from ...orm.scoping import ScopedSessionMixin +from ...util import memoized_property + +class async_scoped_session(ScopedSessionMixin): + session_factory: Any + registry: Any + def __init__(self, session_factory, scopefunc) -> None: ... + async def remove(self) -> None: ... + # proxied from Session + @classmethod + async def close_all(cls): ... + @classmethod + def identity_key(cls, *args, **kwargs): ... + @classmethod + def object_session(cls, instance): ... + bind: Any + identity_map: Any + autoflush: Any + def __contains__(self, instance): ... + def __iter__(self): ... + def add(self, instance, _warn: bool = ...) -> None: ... + def add_all(self, instances) -> None: ... + def begin(self, **kw): ... + def begin_nested(self, **kw): ... + async def close(self): ... + async def commit(self): ... + async def connection(self, **kw): ... + async def delete(self, instance): ... + async def execute( + self, statement, params: Incomplete | None = ..., execution_options=..., bind_arguments: Incomplete | None = ..., **kw + ): ... + def expire(self, instance, attribute_names: Incomplete | None = ...) -> None: ... + def expire_all(self) -> None: ... + def expunge(self, instance) -> None: ... + def expunge_all(self) -> None: ... + async def flush(self, objects: Incomplete | None = ...) -> None: ... + async def get( + self, + entity, + ident, + options: Incomplete | None = ..., + populate_existing: bool = ..., + with_for_update: Incomplete | None = ..., + identity_token: Incomplete | None = ..., + ): ... + def get_bind(self, mapper: Incomplete | None = ..., clause: Incomplete | None = ..., bind: Incomplete | None = ..., **kw): ... + def is_modified(self, instance, include_collections: bool = ...): ... + async def merge(self, instance, load: bool = ..., options: Incomplete | None = ...): ... + async def refresh(self, instance, attribute_names: Incomplete | None = ..., with_for_update: Incomplete | None = ...): ... + async def rollback(self): ... + async def scalar( + self, statement, params: Incomplete | None = ..., execution_options=..., bind_arguments: Incomplete | None = ..., **kw + ): ... + async def scalars( + self, statement, params: Incomplete | None = ..., execution_options=..., bind_arguments: Incomplete | None = ..., **kw + ): ... + async def stream( + self, statement, params: Incomplete | None = ..., execution_options=..., bind_arguments: Incomplete | None = ..., **kw + ): ... + async def stream_scalars( + self, statement, params: Incomplete | None = ..., execution_options=..., bind_arguments: Incomplete | None = ..., **kw + ): ... + @property + def dirty(self): ... + @property + def deleted(self): ... + @property + def new(self): ... + @property + def is_active(self): ... + @property + def no_autoflush(self) -> None: ... + @memoized_property + def info(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/session.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/session.pyi new file mode 100644 index 00000000..a7353820 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/session.pyi @@ -0,0 +1,110 @@ +from _typeshed import Incomplete +from typing import Any +from typing_extensions import Self + +from ...util import memoized_property +from .base import ReversibleProxy, StartableContext + +class AsyncSession(ReversibleProxy): + dispatch: Any + bind: Any + binds: Any + sync_session_class: Any + sync_session: Any + def __init__( + self, bind: Incomplete | None = ..., binds: Incomplete | None = ..., sync_session_class: Incomplete | None = ..., **kw + ) -> None: ... + async def refresh(self, instance, attribute_names: Incomplete | None = ..., with_for_update: Incomplete | None = ...): ... + async def run_sync(self, fn, *arg, **kw): ... + async def execute( + self, statement, params: Incomplete | None = ..., execution_options=..., bind_arguments: Incomplete | None = ..., **kw + ): ... + async def scalar( + self, statement, params: Incomplete | None = ..., execution_options=..., bind_arguments: Incomplete | None = ..., **kw + ): ... + async def scalars( + self, statement, params: Incomplete | None = ..., execution_options=..., bind_arguments: Incomplete | None = ..., **kw + ): ... + async def get( + self, + entity, + ident, + options: Incomplete | None = ..., + populate_existing: bool = ..., + with_for_update: Incomplete | None = ..., + identity_token: Incomplete | None = ..., + ): ... + async def stream( + self, statement, params: Incomplete | None = ..., execution_options=..., bind_arguments: Incomplete | None = ..., **kw + ): ... + async def stream_scalars( + self, statement, params: Incomplete | None = ..., execution_options=..., bind_arguments: Incomplete | None = ..., **kw + ): ... + async def delete(self, instance): ... + async def merge(self, instance, load: bool = ..., options: Incomplete | None = ...): ... + async def flush(self, objects: Incomplete | None = ...) -> None: ... + def get_transaction(self): ... + def get_nested_transaction(self): ... + def get_bind(self, mapper: Incomplete | None = ..., clause: Incomplete | None = ..., bind: Incomplete | None = ..., **kw): ... + async def connection(self, **kw): ... + def begin(self, **kw): ... + def begin_nested(self, **kw): ... + async def rollback(self): ... + async def commit(self): ... + async def close(self): ... + @classmethod + async def close_all(cls): ... + async def __aenter__(self) -> Self: ... + async def __aexit__(self, type_, value, traceback) -> None: ... + # proxied from Session + identity_map: Any + autoflush: Any + @classmethod + def identity_key(cls, *args, **kwargs): ... + @classmethod + def object_session(cls, instance): ... + def __contains__(self, instance): ... + def __iter__(self): ... + def add(self, instance, _warn: bool = ...) -> None: ... + def add_all(self, instances) -> None: ... + def expire(self, instance, attribute_names: Incomplete | None = ...) -> None: ... + def expire_all(self) -> None: ... + def expunge(self, instance) -> None: ... + def expunge_all(self) -> None: ... + def is_modified(self, instance, include_collections: bool = ...): ... + def in_transaction(self): ... + def in_nested_transaction(self): ... + @property + def no_autoflush(self) -> None: ... + @property + def is_active(self): ... + @property + def dirty(self): ... + @property + def deleted(self): ... + @property + def new(self): ... + @memoized_property + def info(self): ... + +class _AsyncSessionContextManager: + async_session: Any + def __init__(self, async_session) -> None: ... + trans: Any + async def __aenter__(self): ... + async def __aexit__(self, type_, value, traceback) -> None: ... + +class AsyncSessionTransaction(ReversibleProxy, StartableContext): + session: Any + nested: Any + sync_transaction: Any + def __init__(self, session, nested: bool = ...) -> None: ... + @property + def is_active(self): ... + async def rollback(self) -> None: ... + async def commit(self) -> None: ... + async def start(self, is_ctxmanager: bool = ...): ... + async def __aexit__(self, type_, value, traceback) -> None: ... + +def async_object_session(instance): ... +def async_session(session): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/automap.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/automap.pyi new file mode 100644 index 00000000..f17b2d3b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/automap.pyi @@ -0,0 +1,27 @@ +from _typeshed import Incomplete +from typing import Any + +def classname_for_table(base, tablename, table): ... +def name_for_scalar_relationship(base, local_cls, referred_cls, constraint): ... +def name_for_collection_relationship(base, local_cls, referred_cls, constraint): ... +def generate_relationship(base, direction, return_fn, attrname, local_cls, referred_cls, **kw): ... + +class AutomapBase: + __abstract__: bool + classes: Any + @classmethod + def prepare( + cls, + autoload_with: Incomplete | None = ..., + engine: Incomplete | None = ..., + reflect: bool = ..., + schema: Incomplete | None = ..., + classname_for_table: Incomplete | None = ..., + collection_class: Incomplete | None = ..., + name_for_scalar_relationship: Incomplete | None = ..., + name_for_collection_relationship: Incomplete | None = ..., + generate_relationship: Incomplete | None = ..., + reflection_options=..., + ) -> None: ... + +def automap_base(declarative_base: Incomplete | None = ..., **kw): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/baked.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/baked.pyi new file mode 100644 index 00000000..2cf3c325 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/baked.pyi @@ -0,0 +1,46 @@ +from _typeshed import Incomplete +from typing import Any + +log: Any + +class Bakery: + cls: Any + cache: Any + def __init__(self, cls_, cache) -> None: ... + def __call__(self, initial_fn, *args): ... + +class BakedQuery: + steps: Any + def __init__(self, bakery, initial_fn, args=...) -> None: ... + @classmethod + def bakery(cls, size: int = ..., _size_alert: Incomplete | None = ...): ... + def __iadd__(self, other): ... + def __add__(self, other): ... + def add_criteria(self, fn, *args): ... + def with_criteria(self, fn, *args): ... + def for_session(self, session): ... + def __call__(self, session): ... + def spoil(self, full: bool = ...): ... + def to_query(self, query_or_session): ... + +class Result: + bq: Any + session: Any + def __init__(self, bq, session) -> None: ... + def params(self, *args, **kw): ... + def with_post_criteria(self, fn): ... + def __iter__(self): ... + def count(self): ... + def scalar(self): ... + def first(self): ... + def one(self): ... + def one_or_none(self): ... + def all(self): ... + def get(self, ident): ... + +def bake_lazy_loaders() -> None: ... +def unbake_lazy_loaders() -> None: ... + +baked_lazyload: Any +baked_lazyload_all: Any +bakery: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/compiler.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/compiler.pyi new file mode 100644 index 00000000..79b29eea --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/compiler.pyi @@ -0,0 +1,8 @@ +from typing import Any + +def compiles(class_, *specs): ... +def deregister(class_) -> None: ... + +class _dispatcher: + specs: Any + def __call__(self, element, compiler, **kw): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/declarative/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/declarative/__init__.pyi new file mode 100644 index 00000000..7eddf860 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/declarative/__init__.pyi @@ -0,0 +1,27 @@ +from ...orm.decl_api import ( + DeclarativeMeta as DeclarativeMeta, + as_declarative as as_declarative, + declarative_base as declarative_base, + declared_attr as declared_attr, + has_inherited_table as has_inherited_table, + synonym_for as synonym_for, +) +from .extensions import ( + AbstractConcreteBase as AbstractConcreteBase, + ConcreteBase as ConcreteBase, + DeferredReflection as DeferredReflection, + instrument_declarative as instrument_declarative, +) + +__all__ = [ + "declarative_base", + "synonym_for", + "has_inherited_table", + "instrument_declarative", + "declared_attr", + "as_declarative", + "ConcreteBase", + "AbstractConcreteBase", + "DeclarativeMeta", + "DeferredReflection", +] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/declarative/extensions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/declarative/extensions.pyi new file mode 100644 index 00000000..c4fbf0ff --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/declarative/extensions.pyi @@ -0,0 +1,14 @@ +def instrument_declarative(cls, cls_registry, metadata) -> None: ... + +class ConcreteBase: + @classmethod + def __declare_first__(cls) -> None: ... + +class AbstractConcreteBase(ConcreteBase): + __no_table__: bool + @classmethod + def __declare_first__(cls) -> None: ... + +class DeferredReflection: + @classmethod + def prepare(cls, engine) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/horizontal_shard.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/horizontal_shard.pyi new file mode 100644 index 00000000..08a80eb0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/horizontal_shard.pyi @@ -0,0 +1,34 @@ +from _typeshed import Incomplete +from typing import Any, Generic, TypeVar + +from ..orm.query import Query +from ..orm.session import Session + +_T = TypeVar("_T") + +class ShardedQuery(Query[_T], Generic[_T]): + id_chooser: Any + query_chooser: Any + execute_chooser: Any + def __init__(self, *args, **kwargs) -> None: ... + def set_shard(self, shard_id): ... + +class ShardedSession(Session): + shard_chooser: Any + id_chooser: Any + execute_chooser: Any + query_chooser: Any + def __init__( + self, + shard_chooser, + id_chooser, + execute_chooser: Incomplete | None = ..., + shards: Incomplete | None = ..., + query_cls=..., + **kwargs, + ): ... + def connection_callable( + self, mapper: Incomplete | None = ..., instance: Incomplete | None = ..., shard_id: Incomplete | None = ..., **kwargs + ): ... + def get_bind(self, mapper: Incomplete | None = ..., shard_id: Incomplete | None = ..., instance: Incomplete | None = ..., clause: Incomplete | None = ..., **kw): ... # type: ignore[override] + def bind_shard(self, shard_id, bind) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/hybrid.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/hybrid.pyi new file mode 100644 index 00000000..eda35d68 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/hybrid.pyi @@ -0,0 +1,68 @@ +from _typeshed import Incomplete +from typing import Any + +from ..orm import interfaces + +HYBRID_METHOD: Any +HYBRID_PROPERTY: Any + +class hybrid_method(interfaces.InspectionAttrInfo): + is_attribute: bool + extension_type: Any + func: Any + def __init__(self, func, expr: Incomplete | None = ...) -> None: ... + def __get__(self, instance, owner): ... + expr: Any + def expression(self, expr): ... + +class hybrid_property(interfaces.InspectionAttrInfo): + is_attribute: bool + extension_type: Any + fget: Any + fset: Any + fdel: Any + expr: Any + custom_comparator: Any + update_expr: Any + def __init__( + self, + fget, + fset: Incomplete | None = ..., + fdel: Incomplete | None = ..., + expr: Incomplete | None = ..., + custom_comparator: Incomplete | None = ..., + update_expr: Incomplete | None = ..., + ) -> None: ... + def __get__(self, instance, owner): ... + def __set__(self, instance, value) -> None: ... + def __delete__(self, instance) -> None: ... + @property + def overrides(self): ... + def getter(self, fget): ... + def setter(self, fset): ... + def deleter(self, fdel): ... + def expression(self, expr): ... + def comparator(self, comparator): ... + def update_expression(self, meth): ... + +class Comparator(interfaces.PropComparator[Any]): + property: Any + expression: Any + def __init__(self, expression) -> None: ... + def __clause_element__(self): ... + def adapt_to_entity(self, adapt_to_entity): ... + +_property = property + +class ExprComparator(Comparator): + cls: Any + expression: Any + hybrid: Any + def __init__(self, cls, expression, hybrid) -> None: ... + def __getattr__(self, key: str): ... + @_property + def info(self): ... + @_property + def property(self): ... + def operate(self, op, *other, **kwargs): ... + def reverse_operate(self, op, other, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/indexable.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/indexable.pyi new file mode 100644 index 00000000..1178ebd4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/indexable.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete +from typing import Any + +from ..ext.hybrid import hybrid_property + +class index_property(hybrid_property): + attr_name: Any + index: Any + default: Any + datatype: Any + onebased: Any + def __init__( + self, attr_name, index, default=..., datatype: Incomplete | None = ..., mutable: bool = ..., onebased: bool = ... + ): ... + def fget(self, instance): ... + def fset(self, instance, value) -> None: ... + def fdel(self, instance) -> None: ... + def expr(self, model): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/instrumentation.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/instrumentation.pyi new file mode 100644 index 00000000..d054f2de --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/instrumentation.pyi @@ -0,0 +1,55 @@ +from _typeshed import Incomplete +from typing import Any + +from ..orm.instrumentation import ClassManager, InstrumentationFactory + +INSTRUMENTATION_MANAGER: str + +def find_native_user_instrumentation_hook(cls): ... + +instrumentation_finders: Any + +class ExtendedInstrumentationRegistry(InstrumentationFactory): + def unregister(self, class_) -> None: ... + def manager_of_class(self, cls): ... + def state_of(self, instance): ... + def dict_of(self, instance): ... + +class InstrumentationManager: + def __init__(self, class_) -> None: ... + def manage(self, class_, manager) -> None: ... + def unregister(self, class_, manager) -> None: ... + def manager_getter(self, class_): ... + def instrument_attribute(self, class_, key, inst) -> None: ... + def post_configure_attribute(self, class_, key, inst) -> None: ... + def install_descriptor(self, class_, key, inst) -> None: ... + def uninstall_descriptor(self, class_, key) -> None: ... + def install_member(self, class_, key, implementation) -> None: ... + def uninstall_member(self, class_, key) -> None: ... + def instrument_collection_class(self, class_, key, collection_class): ... + def get_instance_dict(self, class_, instance): ... + def initialize_instance_dict(self, class_, instance) -> None: ... + def install_state(self, class_, instance, state) -> None: ... + def remove_state(self, class_, instance) -> None: ... + def state_getter(self, class_): ... + def dict_getter(self, class_): ... + +class _ClassInstrumentationAdapter(ClassManager): + def __init__(self, class_, override) -> None: ... + def manage(self) -> None: ... + def unregister(self) -> None: ... + def manager_getter(self): ... + def instrument_attribute(self, key, inst, propagated: bool = ...) -> None: ... + def post_configure_attribute(self, key) -> None: ... + def install_descriptor(self, key, inst) -> None: ... + def uninstall_descriptor(self, key) -> None: ... + def install_member(self, key, implementation) -> None: ... + def uninstall_member(self, key) -> None: ... + def instrument_collection_class(self, key, collection_class): ... + def initialize_collection(self, key, state, factory): ... + def new_instance(self, state: Incomplete | None = ...): ... + def setup_instance(self, instance, state: Incomplete | None = ...): ... + def teardown_instance(self, instance) -> None: ... + def has_state(self, instance): ... + def state_getter(self): ... + def dict_getter(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/mutable.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/mutable.pyi new file mode 100644 index 00000000..c1e4be44 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/mutable.pyi @@ -0,0 +1,64 @@ +from typing import Any + +class MutableBase: + @classmethod + def coerce(cls, key, value) -> None: ... + +class Mutable(MutableBase): + def changed(self) -> None: ... + @classmethod + def associate_with_attribute(cls, attribute) -> None: ... + @classmethod + def associate_with(cls, sqltype) -> None: ... + @classmethod + def as_mutable(cls, sqltype): ... + +class MutableComposite(MutableBase): + def changed(self) -> None: ... + +class MutableDict(Mutable, dict[Any, Any]): + def __setitem__(self, key, value) -> None: ... + def setdefault(self, key, value): ... + def __delitem__(self, key) -> None: ... + def update(self, *a, **kw) -> None: ... + def pop(self, *arg): ... + def popitem(self): ... + def clear(self) -> None: ... + @classmethod + def coerce(cls, key, value): ... + +class MutableList(Mutable, list[Any]): + def __reduce_ex__(self, proto): ... + def __setitem__(self, index, value) -> None: ... + def __setslice__(self, start, end, value) -> None: ... + def __delitem__(self, index) -> None: ... + def __delslice__(self, start, end) -> None: ... + def pop(self, *arg): ... + def append(self, x) -> None: ... + def extend(self, x) -> None: ... + def __iadd__(self, x): ... # type: ignore[misc] + def insert(self, i, x) -> None: ... + def remove(self, i) -> None: ... + def clear(self) -> None: ... + def sort(self, **kw) -> None: ... + def reverse(self) -> None: ... + @classmethod + def coerce(cls, index, value): ... + +class MutableSet(Mutable, set[Any]): + def update(self, *arg) -> None: ... + def intersection_update(self, *arg) -> None: ... + def difference_update(self, *arg) -> None: ... + def symmetric_difference_update(self, *arg) -> None: ... + def __ior__(self, other): ... # type: ignore[misc] + def __iand__(self, other): ... # type: ignore[misc] + def __ixor__(self, other): ... # type: ignore[misc] + def __isub__(self, other): ... # type: ignore[misc] + def add(self, elem) -> None: ... + def remove(self, elem) -> None: ... + def discard(self, elem) -> None: ... + def pop(self, *arg): ... + def clear(self) -> None: ... + @classmethod + def coerce(cls, index, value): ... + def __reduce_ex__(self, proto): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/orderinglist.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/orderinglist.pyi new file mode 100644 index 00000000..7c86012b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/orderinglist.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete +from typing import Any + +def ordering_list(attr, count_from: Incomplete | None = ..., **kw): ... + +class OrderingList(list[Any]): + ordering_attr: Any + ordering_func: Any + reorder_on_append: Any + def __init__( + self, ordering_attr: Incomplete | None = ..., ordering_func: Incomplete | None = ..., reorder_on_append: bool = ... + ) -> None: ... + def reorder(self) -> None: ... + def append(self, entity) -> None: ... + def insert(self, index, entity) -> None: ... + def remove(self, entity) -> None: ... + def pop(self, index: int = ...): ... # type: ignore[override] + def __setitem__(self, index, entity) -> None: ... + def __delitem__(self, index) -> None: ... + def __setslice__(self, start, end, values) -> None: ... + def __delslice__(self, start, end) -> None: ... + def __reduce__(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/serializer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/serializer.pyi new file mode 100644 index 00000000..4177beb2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/ext/serializer.pyi @@ -0,0 +1,8 @@ +from _typeshed import Incomplete + +def Serializer(*args, **kw): ... +def Deserializer( + file, metadata: Incomplete | None = ..., scoped_session: Incomplete | None = ..., engine: Incomplete | None = ... +): ... +def dumps(obj, protocol=...): ... +def loads(data, metadata: Incomplete | None = ..., scoped_session: Incomplete | None = ..., engine: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/future/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/future/__init__.pyi new file mode 100644 index 00000000..00f3a300 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/future/__init__.pyi @@ -0,0 +1,5 @@ +from typing import Any + +from .engine import Connection as Connection, Engine as Engine, create_engine as create_engine + +select: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/future/engine.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/future/engine.pyi new file mode 100644 index 00000000..f1b3362d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/future/engine.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete +from typing import Any, overload +from typing_extensions import Literal + +from ..engine import Connection as _LegacyConnection, Engine as _LegacyEngine +from ..engine.base import OptionEngineMixin +from ..engine.mock import MockConnection +from ..engine.url import URL + +NO_OPTIONS: Any + +@overload +def create_engine(url: URL | str, *, strategy: Literal["mock"], **kwargs) -> MockConnection: ... # type: ignore[misc] +@overload +def create_engine( + url: URL | str, *, module: Incomplete | None = ..., enable_from_linting: bool = ..., future: bool = ..., **kwargs +) -> Engine: ... + +class Connection(_LegacyConnection): + def begin(self): ... + def begin_nested(self): ... + def commit(self) -> None: ... + def rollback(self) -> None: ... + def close(self) -> None: ... + def execute(self, statement, parameters: Incomplete | None = ..., execution_options: Incomplete | None = ...): ... # type: ignore[override] + def scalar(self, statement, parameters: Incomplete | None = ..., execution_options: Incomplete | None = ...): ... # type: ignore[override] + +class Engine(_LegacyEngine): + transaction: Any + run_callable: Any + execute: Any + scalar: Any + table_names: Any + has_table: Any + def begin(self) -> None: ... # type: ignore[override] + def connect(self): ... + +class OptionEngine(OptionEngineMixin, Engine): ... # type: ignore[misc] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/future/orm/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/future/orm/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/inspection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/inspection.pyi new file mode 100644 index 00000000..d758818c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/inspection.pyi @@ -0,0 +1 @@ +def inspect(subject, raiseerr: bool = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/log.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/log.pyi new file mode 100644 index 00000000..98b20ef9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/log.pyi @@ -0,0 +1,39 @@ +from _typeshed import Unused +from logging import Logger +from typing import Any, TypeVar, overload +from typing_extensions import Literal, Self, TypeAlias + +_ClsT = TypeVar("_ClsT", bound=type) +_EchoFlag: TypeAlias = bool | Literal["debug"] | None + +rootlogger: Any + +def class_logger(cls: _ClsT) -> _ClsT: ... + +class Identified: + logging_name: str | None + +class InstanceLogger: + echo: _EchoFlag + logger: Logger + def __init__(self, echo: _EchoFlag, name: str | None) -> None: ... + def debug(self, msg, *args, **kwargs) -> None: ... + def info(self, msg, *args, **kwargs) -> None: ... + def warning(self, msg, *args, **kwargs) -> None: ... + warn = warning + def error(self, msg, *args, **kwargs) -> None: ... + def exception(self, msg, *args, **kwargs) -> None: ... + def critical(self, msg, *args, **kwargs) -> None: ... + def log(self, level, msg, *args, **kwargs) -> None: ... + def isEnabledFor(self, level): ... + def getEffectiveLevel(self): ... + +def instance_logger(instance: Identified, echoflag: _EchoFlag = ...) -> None: ... + +class echo_property: + __doc__: str + @overload + def __get__(self, instance: None, owner: Unused) -> Self: ... + @overload + def __get__(self, instance: Identified, owner: Unused) -> _EchoFlag: ... + def __set__(self, instance: Identified, value: _EchoFlag) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/__init__.pyi new file mode 100644 index 00000000..6e32aa68 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/__init__.pyi @@ -0,0 +1,126 @@ +from _typeshed import Incomplete +from typing import Any + +from ..util.langhelpers import public_factory as public_factory +from . import exc as exc, strategy_options as strategy_options +from .attributes import ( + AttributeEvent as AttributeEvent, + InstrumentedAttribute as InstrumentedAttribute, + Mapped as Mapped, + QueryableAttribute as QueryableAttribute, +) +from .context import QueryContext as QueryContext +from .decl_api import ( + DeclarativeMeta as DeclarativeMeta, + as_declarative as as_declarative, + declarative_base as declarative_base, + declarative_mixin as declarative_mixin, + declared_attr as declared_attr, + has_inherited_table as has_inherited_table, + registry as registry, + synonym_for as synonym_for, +) +from .descriptor_props import CompositeProperty as CompositeProperty, SynonymProperty as SynonymProperty +from .dynamic import AppenderQuery as AppenderQuery +from .events import ( + AttributeEvents as AttributeEvents, + InstanceEvents as InstanceEvents, + InstrumentationEvents as InstrumentationEvents, + MapperEvents as MapperEvents, + QueryEvents as QueryEvents, + SessionEvents as SessionEvents, +) +from .identity import IdentityMap as IdentityMap +from .instrumentation import ClassManager as ClassManager +from .interfaces import ( + EXT_CONTINUE as EXT_CONTINUE, + EXT_SKIP as EXT_SKIP, + EXT_STOP as EXT_STOP, + MANYTOMANY as MANYTOMANY, + MANYTOONE as MANYTOONE, + NOT_EXTENSION as NOT_EXTENSION, + ONETOMANY as ONETOMANY, + InspectionAttr as InspectionAttr, + InspectionAttrInfo as InspectionAttrInfo, + MapperProperty as MapperProperty, + PropComparator as PropComparator, + UserDefinedOption as UserDefinedOption, +) +from .loading import merge_frozen_result as merge_frozen_result, merge_result as merge_result +from .mapper import ( + Mapper as Mapper, + class_mapper as class_mapper, + configure_mappers as configure_mappers, + reconstructor as reconstructor, + validates as validates, +) +from .properties import ColumnProperty as ColumnProperty +from .query import AliasOption as AliasOption, FromStatement as FromStatement, Query as Query +from .relationships import RelationshipProperty as RelationshipProperty, foreign as foreign, remote as remote +from .scoping import scoped_session as scoped_session +from .session import ( + ORMExecuteState as ORMExecuteState, + Session as Session, + SessionTransaction as SessionTransaction, + close_all_sessions as close_all_sessions, + make_transient as make_transient, + make_transient_to_detached as make_transient_to_detached, + object_session as object_session, + sessionmaker as sessionmaker, +) +from .state import AttributeState as AttributeState, InstanceState as InstanceState +from .strategy_options import Load as Load +from .unitofwork import UOWTransaction as UOWTransaction +from .util import ( + Bundle as Bundle, + CascadeOptions as CascadeOptions, + LoaderCriteriaOption as LoaderCriteriaOption, + aliased as aliased, + join as join, + object_mapper as object_mapper, + outerjoin as outerjoin, + polymorphic_union as polymorphic_union, + was_deleted as was_deleted, + with_parent as with_parent, + with_polymorphic as with_polymorphic, +) + +def create_session(bind: Incomplete | None = ..., **kwargs): ... + +with_loader_criteria: Any +relationship: Any + +def relation(*arg, **kw): ... +def dynamic_loader(argument, **kw): ... + +column_property: Any +composite: Any + +def backref(name, **kwargs): ... +def deferred(*columns, **kw): ... +def query_expression(default_expr=...): ... + +mapper: Any +synonym: Any + +def clear_mappers() -> None: ... + +joinedload: Any +contains_eager: Any +defer: Any +undefer: Any +undefer_group: Any +with_expression: Any +load_only: Any +lazyload: Any +subqueryload: Any +selectinload: Any +immediateload: Any +noload: Any +raiseload: Any +defaultload: Any +selectin_polymorphic: Any + +def eagerload(*args, **kwargs): ... + +contains_alias: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/attributes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/attributes.pyi new file mode 100644 index 00000000..c054153e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/attributes.pyi @@ -0,0 +1,253 @@ +from _typeshed import Incomplete +from typing import Any, Generic, NamedTuple, TypeVar + +from ..sql import base as sql_base, roles, traversals +from ..util import memoized_property +from . import interfaces + +_T = TypeVar("_T") + +class NoKey(str): ... + +NO_KEY: Any + +class QueryableAttribute( + interfaces._MappedAttribute, + interfaces.InspectionAttr, + interfaces.PropComparator[Any], + traversals.HasCopyInternals, + roles.JoinTargetRole, + roles.OnClauseRole, + sql_base.Immutable, + sql_base.MemoizedHasCacheKey, +): + is_attribute: bool + __visit_name__: str + class_: Any + key: Any + impl: Any + comparator: Any + def __init__( + self, + class_, + key, + parententity, + impl: Incomplete | None = ..., + comparator: Incomplete | None = ..., + of_type: Incomplete | None = ..., + extra_criteria=..., + ) -> None: ... + def __reduce__(self): ... + def get_history(self, instance, passive=...): ... + @memoized_property + def info(self): ... + @memoized_property + def parent(self): ... + @memoized_property + def expression(self): ... + def __clause_element__(self): ... + def adapt_to_entity(self, adapt_to_entity): ... + def of_type(self, entity): ... + def and_(self, *other): ... + def label(self, name): ... + def operate(self, op, *other, **kwargs): ... + def reverse_operate(self, op, other, **kwargs): ... + def hasparent(self, state, optimistic: bool = ...): ... + def __getattr__(self, key: str): ... + @memoized_property + def property(self): ... + +class Mapped(QueryableAttribute, Generic[_T]): + def __get__(self, instance, owner) -> None: ... + def __set__(self, instance, value) -> None: ... + def __delete__(self, instance) -> None: ... + +class InstrumentedAttribute(Mapped[Any]): + inherit_cache: bool + def __set__(self, instance, value) -> None: ... + def __delete__(self, instance) -> None: ... + def __get__(self, instance, owner): ... + +class _HasEntityNamespace(NamedTuple): + entity_namespace: Any + +class HasEntityNamespace(_HasEntityNamespace): + is_mapper: bool + is_aliased_class: bool + +def create_proxied_attribute(descriptor): ... + +OP_REMOVE: Any +OP_APPEND: Any +OP_REPLACE: Any +OP_BULK_REPLACE: Any +OP_MODIFIED: Any + +class AttributeEvent: + impl: Any + op: Any + parent_token: Any + def __init__(self, attribute_impl, op) -> None: ... + def __eq__(self, other): ... + @property + def key(self): ... + def hasparent(self, state): ... + +Event = AttributeEvent + +class AttributeImpl: + class_: Any + key: Any + callable_: Any + dispatch: Any + trackparent: Any + parent_token: Any + send_modified_events: Any + is_equal: Any + accepts_scalar_loader: Any + load_on_unexpire: Any + def __init__( + self, + class_, + key, + callable_, + dispatch, + trackparent: bool = ..., + compare_function: Incomplete | None = ..., + active_history: bool = ..., + parent_token: Incomplete | None = ..., + load_on_unexpire: bool = ..., + send_modified_events: bool = ..., + accepts_scalar_loader: Incomplete | None = ..., + **kwargs, + ) -> None: ... + active_history: Any + def hasparent(self, state, optimistic: bool = ...): ... + def sethasparent(self, state, parent_state, value) -> None: ... + def get_history(self, state, dict_, passive=...) -> None: ... + def get_all_pending(self, state, dict_, passive=...) -> None: ... + def get(self, state, dict_, passive=...): ... + def append(self, state, dict_, value, initiator, passive=...) -> None: ... + def remove(self, state, dict_, value, initiator, passive=...) -> None: ... + def pop(self, state, dict_, value, initiator, passive=...) -> None: ... + def set(self, state, dict_, value, initiator, passive=..., check_old: Incomplete | None = ..., pop: bool = ...) -> None: ... + def get_committed_value(self, state, dict_, passive=...): ... + def set_committed_value(self, state, dict_, value): ... + +class ScalarAttributeImpl(AttributeImpl): + default_accepts_scalar_loader: bool + uses_objects: bool + supports_population: bool + collection: bool + dynamic: bool + def __init__(self, *arg, **kw) -> None: ... + def delete(self, state, dict_) -> None: ... + def get_history(self, state, dict_, passive=...): ... + def set(self, state, dict_, value, initiator, passive=..., check_old: Incomplete | None = ..., pop: bool = ...) -> None: ... + def fire_replace_event(self, state, dict_, value, previous, initiator): ... + def fire_remove_event(self, state, dict_, value, initiator) -> None: ... + @property + def type(self) -> None: ... + +class ScalarObjectAttributeImpl(ScalarAttributeImpl): + default_accepts_scalar_loader: bool + uses_objects: bool + supports_population: bool + collection: bool + def delete(self, state, dict_) -> None: ... + def get_history(self, state, dict_, passive=...): ... + def get_all_pending(self, state, dict_, passive=...): ... + def set(self, state, dict_, value, initiator, passive=..., check_old: Incomplete | None = ..., pop: bool = ...) -> None: ... + def fire_remove_event(self, state, dict_, value, initiator) -> None: ... + def fire_replace_event(self, state, dict_, value, previous, initiator): ... + +class CollectionAttributeImpl(AttributeImpl): + default_accepts_scalar_loader: bool + uses_objects: bool + supports_population: bool + collection: bool + dynamic: bool + copy: Any + collection_factory: Any + def __init__( + self, + class_, + key, + callable_, + dispatch, + typecallable: Incomplete | None = ..., + trackparent: bool = ..., + copy_function: Incomplete | None = ..., + compare_function: Incomplete | None = ..., + **kwargs, + ) -> None: ... + def get_history(self, state, dict_, passive=...): ... + def get_all_pending(self, state, dict_, passive=...): ... + def fire_append_event(self, state, dict_, value, initiator): ... + def fire_append_wo_mutation_event(self, state, dict_, value, initiator): ... + def fire_pre_remove_event(self, state, dict_, initiator) -> None: ... + def fire_remove_event(self, state, dict_, value, initiator) -> None: ... + def delete(self, state, dict_) -> None: ... + def append(self, state, dict_, value, initiator, passive=...) -> None: ... + def remove(self, state, dict_, value, initiator, passive=...) -> None: ... + def pop(self, state, dict_, value, initiator, passive=...) -> None: ... + def set( + self, + state, + dict_, + value, + initiator: Incomplete | None = ..., + passive=..., + check_old: Incomplete | None = ..., + pop: bool = ..., + _adapt: bool = ..., + ) -> None: ... + def set_committed_value(self, state, dict_, value): ... + def get_collection(self, state, dict_, user_data: Incomplete | None = ..., passive=...): ... + +def backref_listeners(attribute, key, uselist): ... + +class History: + def __bool__(self) -> bool: ... + def __nonzero__(self) -> bool: ... + def empty(self): ... + def sum(self): ... + def non_deleted(self): ... + def non_added(self): ... + def has_changes(self): ... + def as_state(self): ... + @classmethod + def from_scalar_attribute(cls, attribute, state, current): ... + @classmethod + def from_object_attribute(cls, attribute, state, current, original=...): ... + @classmethod + def from_collection(cls, attribute, state, current): ... + +HISTORY_BLANK: Any + +def get_history(obj, key, passive=...): ... +def get_state_history(state, key, passive=...): ... +def has_parent(cls, obj, key, optimistic: bool = ...): ... +def register_attribute(class_, key, **kw): ... +def register_attribute_impl( + class_, + key, + uselist: bool = ..., + callable_: Incomplete | None = ..., + useobject: bool = ..., + impl_class: Incomplete | None = ..., + backref: Incomplete | None = ..., + **kw, +): ... +def register_descriptor( + class_, key, comparator: Incomplete | None = ..., parententity: Incomplete | None = ..., doc: Incomplete | None = ... +): ... +def unregister_attribute(class_, key) -> None: ... +def init_collection(obj, key): ... +def init_state_collection(state, dict_, key): ... +def set_committed_value(instance, key, value) -> None: ... +def set_attribute(instance, key, value, initiator: Incomplete | None = ...) -> None: ... +def get_attribute(instance, key): ... +def del_attribute(instance, key) -> None: ... +def flag_modified(instance, key) -> None: ... +def flag_dirty(instance) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/base.pyi new file mode 100644 index 00000000..4d247f53 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/base.pyi @@ -0,0 +1,75 @@ +from typing import Any + +from ..util import memoized_property + +PASSIVE_NO_RESULT: Any +PASSIVE_CLASS_MISMATCH: Any +ATTR_WAS_SET: Any +ATTR_EMPTY: Any +NO_VALUE: Any +NEVER_SET: Any +NO_CHANGE: Any +CALLABLES_OK: Any +SQL_OK: Any +RELATED_OBJECT_OK: Any +INIT_OK: Any +NON_PERSISTENT_OK: Any +LOAD_AGAINST_COMMITTED: Any +NO_AUTOFLUSH: Any +NO_RAISE: Any +DEFERRED_HISTORY_LOAD: Any +PASSIVE_OFF: Any +PASSIVE_RETURN_NO_VALUE: Any +PASSIVE_NO_INITIALIZE: Any +PASSIVE_NO_FETCH: Any +PASSIVE_NO_FETCH_RELATED: Any +PASSIVE_ONLY_PERSISTENT: Any +DEFAULT_MANAGER_ATTR: str +DEFAULT_STATE_ATTR: str +EXT_CONTINUE: Any +EXT_STOP: Any +EXT_SKIP: Any +ONETOMANY: Any +MANYTOONE: Any +MANYTOMANY: Any +NOT_EXTENSION: Any + +_never_set: frozenset[Any] +_none_set: frozenset[Any] + +def manager_of_class(cls): ... + +instance_state: Any +instance_dict: Any + +def instance_str(instance): ... +def state_str(state): ... +def state_class_str(state): ... +def attribute_str(instance, attribute): ... +def state_attribute_str(state, attribute): ... +def object_mapper(instance): ... +def object_state(instance): ... +def _class_to_mapper(class_or_mapper): ... +def _mapper_or_none(entity): ... +def _is_mapped_class(entity): ... + +_state_mapper: Any + +def class_mapper(class_, configure: bool = ...): ... + +class InspectionAttr: + is_selectable: bool + is_aliased_class: bool + is_instance: bool + is_mapper: bool + is_bundle: bool + is_property: bool + is_attribute: bool + is_clause_element: bool + extension_type: Any + +class InspectionAttrInfo(InspectionAttr): + @memoized_property + def info(self): ... + +class _MappedAttribute: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/clsregistry.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/clsregistry.pyi new file mode 100644 index 00000000..b21f02a3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/clsregistry.pyi @@ -0,0 +1,52 @@ +from _typeshed import Incomplete +from typing import Any + +def add_class(classname, cls, decl_class_registry) -> None: ... +def remove_class(classname, cls, decl_class_registry) -> None: ... + +class _MultipleClassMarker: + on_remove: Any + contents: Any + def __init__(self, classes, on_remove: Incomplete | None = ...) -> None: ... + def remove_item(self, cls) -> None: ... + def __iter__(self): ... + def attempt_get(self, path, key): ... + def add_item(self, item) -> None: ... + +class _ModuleMarker: + parent: Any + name: Any + contents: Any + mod_ns: Any + path: Any + def __init__(self, name, parent) -> None: ... + def __contains__(self, name): ... + def __getitem__(self, name): ... + def resolve_attr(self, key): ... + def get_module(self, name): ... + def add_class(self, name, cls): ... + def remove_class(self, name, cls) -> None: ... + +class _ModNS: + def __init__(self, parent) -> None: ... + def __getattr__(self, key: str): ... + +class _GetColumns: + cls: Any + def __init__(self, cls) -> None: ... + def __getattr__(self, key: str): ... + +class _GetTable: + key: Any + metadata: Any + def __init__(self, key, metadata) -> None: ... + def __getattr__(self, key: str): ... + +class _class_resolver: + cls: Any + prop: Any + arg: Any + fallback: Any + favor_tables: Any + def __init__(self, cls, prop, fallback, arg, favor_tables: bool = ...) -> None: ... + def __call__(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/collections.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/collections.pyi new file mode 100644 index 00000000..0160b491 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/collections.pyi @@ -0,0 +1,92 @@ +from _typeshed import Incomplete +from typing import Any + +class _PlainColumnGetter: + cols: Any + composite: Any + def __init__(self, cols) -> None: ... + def __reduce__(self): ... + def __call__(self, value): ... + +class _SerializableColumnGetter: + colkeys: Any + composite: Any + def __init__(self, colkeys) -> None: ... + def __reduce__(self): ... + def __call__(self, value): ... + +class _SerializableColumnGetterV2(_PlainColumnGetter): + colkeys: Any + composite: Any + def __init__(self, colkeys) -> None: ... + def __reduce__(self): ... + +def column_mapped_collection(mapping_spec): ... + +class _SerializableAttrGetter: + name: Any + getter: Any + def __init__(self, name) -> None: ... + def __call__(self, target): ... + def __reduce__(self): ... + +def attribute_mapped_collection(attr_name): ... +def mapped_collection(keyfunc): ... + +class collection: + @staticmethod + def appender(fn): ... + @staticmethod + def remover(fn): ... + @staticmethod + def iterator(fn): ... + @staticmethod + def internally_instrumented(fn): ... + @staticmethod + def converter(fn): ... + @staticmethod + def adds(arg): ... + @staticmethod + def replaces(arg): ... + @staticmethod + def removes(arg): ... + @staticmethod + def removes_return(): ... + +collection_adapter: Any + +class CollectionAdapter: + attr: Any + owner_state: Any + invalidated: bool + empty: bool + def __init__(self, attr, owner_state, data) -> None: ... + @property + def data(self): ... + def bulk_appender(self): ... + def append_with_event(self, item, initiator: Incomplete | None = ...) -> None: ... + def append_without_event(self, item) -> None: ... + def append_multiple_without_event(self, items) -> None: ... + def bulk_remover(self): ... + def remove_with_event(self, item, initiator: Incomplete | None = ...) -> None: ... + def remove_without_event(self, item) -> None: ... + def clear_with_event(self, initiator: Incomplete | None = ...) -> None: ... + def clear_without_event(self) -> None: ... + def __iter__(self): ... + def __len__(self) -> int: ... + def __bool__(self) -> bool: ... + def __nonzero__(self) -> bool: ... + def fire_append_wo_mutation_event(self, item, initiator: Incomplete | None = ...): ... + def fire_append_event(self, item, initiator: Incomplete | None = ...): ... + def fire_remove_event(self, item, initiator: Incomplete | None = ...) -> None: ... + def fire_pre_remove_event(self, initiator: Incomplete | None = ...) -> None: ... + +class InstrumentedList(list[Any]): ... +class InstrumentedSet(set[Any]): ... +class InstrumentedDict(dict[Any, Any]): ... + +class MappedCollection(dict[Any, Any]): + keyfunc: Any + def __init__(self, keyfunc) -> None: ... + def set(self, value, _sa_initiator: Incomplete | None = ...) -> None: ... + def remove(self, value, _sa_initiator: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/context.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/context.pyi new file mode 100644 index 00000000..423eeb98 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/context.pyi @@ -0,0 +1,211 @@ +from _typeshed import Incomplete +from typing import Any + +from ..sql.base import CacheableOptions, CompileState, Options +from ..sql.selectable import SelectState + +LABEL_STYLE_LEGACY_ORM: Any + +class QueryContext: + class default_load_options(Options): ... + load_options: Any + execution_options: Any + bind_arguments: Any + compile_state: Any + query: Any + session: Any + loaders_require_buffering: bool + loaders_require_uniquing: bool + params: Any + create_eager_joins: Any + propagated_loader_options: Any + attributes: Any + runid: Any + partials: Any + post_load_paths: Any + autoflush: Any + populate_existing: Any + invoke_all_eagers: Any + version_check: Any + refresh_state: Any + yield_per: Any + identity_token: Any + def __init__( + self, + compile_state, + statement, + params, + session, + load_options, + execution_options: Incomplete | None = ..., + bind_arguments: Incomplete | None = ..., + ) -> None: ... + +class ORMCompileState(CompileState): + class default_compile_options(CacheableOptions): ... + current_path: Any + def __init__(self, *arg, **kw) -> None: ... + @classmethod + def create_for_statement(cls, statement_container, compiler, **kw) -> None: ... # type: ignore[override] + @classmethod + def get_column_descriptions(cls, statement): ... + @classmethod + def orm_pre_session_exec(cls, session, statement, params, execution_options, bind_arguments, is_reentrant_invoke): ... + @classmethod + def orm_setup_cursor_result(cls, session, statement, params, execution_options, bind_arguments, result): ... + +class ORMFromStatementCompileState(ORMCompileState): + multi_row_eager_loaders: bool + compound_eager_adapter: Any + extra_criteria_entities: Any + eager_joins: Any + use_legacy_query_style: Any + statement_container: Any + requested_statement: Any + dml_table: Any + compile_options: Any + statement: Any + current_path: Any + attributes: Any + global_attributes: Any + primary_columns: Any + secondary_columns: Any + dedupe_columns: Any + create_eager_joins: Any + order_by: Any + @classmethod + def create_for_statement(cls, statement_container, compiler, **kw): ... + +class ORMSelectCompileState(ORMCompileState, SelectState): + multi_row_eager_loaders: bool + compound_eager_adapter: Any + correlate: Any + correlate_except: Any + global_attributes: Any + select_statement: Any + for_statement: Any + use_legacy_query_style: Any + compile_options: Any + label_style: Any + current_path: Any + eager_order_by: Any + attributes: Any + primary_columns: Any + secondary_columns: Any + dedupe_columns: Any + eager_joins: Any + extra_criteria_entities: Any + create_eager_joins: Any + from_clauses: Any + @classmethod + def create_for_statement(cls, statement, compiler, **kw): ... + @classmethod + def determine_last_joined_entity(cls, statement): ... + @classmethod + def all_selected_columns(cls, statement) -> None: ... + @classmethod + def get_columns_clause_froms(cls, statement): ... + @classmethod + def from_statement(cls, statement, from_statement): ... + +class _QueryEntity: + use_id_for_hash: bool + @classmethod + def to_compile_state(cls, compile_state, entities, entities_collection, is_current_entities): ... + +class _MapperEntity(_QueryEntity): + expr: Any + mapper: Any + entity_zero: Any + is_aliased_class: Any + path: Any + selectable: Any + def __init__(self, compile_state, entity, entities_collection, is_current_entities) -> None: ... + supports_single_entity: bool + use_id_for_hash: bool + @property + def type(self): ... + @property + def entity_zero_or_selectable(self): ... + def corresponds_to(self, entity): ... + def row_processor(self, context, result): ... + def setup_compile_state(self, compile_state) -> None: ... + +class _BundleEntity(_QueryEntity): + bundle: Any + expr: Any + type: Any + supports_single_entity: Any + def __init__( + self, + compile_state, + expr, + entities_collection, + is_current_entities: bool, + setup_entities: bool = ..., + parent_bundle: Incomplete | None = ..., + ) -> None: ... + @property + def mapper(self): ... + @property + def entity_zero(self): ... + def corresponds_to(self, entity): ... + @property + def entity_zero_or_selectable(self): ... + def setup_compile_state(self, compile_state) -> None: ... + def row_processor(self, context, result): ... + +class _ColumnEntity(_QueryEntity): + raw_column_index: Any + translate_raw_column: Any + @property + def type(self): ... + def row_processor(self, context, result): ... + +class _RawColumnEntity(_ColumnEntity): + entity_zero: Any + mapper: Any + supports_single_entity: bool + expr: Any + raw_column_index: Any + translate_raw_column: Any + column: Any + entity_zero_or_selectable: Any + def __init__( + self, + compile_state, + column, + entities_collection, + raw_column_index, + is_current_entities: bool, + parent_bundle: Incomplete | None = ..., + ) -> None: ... + def corresponds_to(self, entity): ... + def setup_compile_state(self, compile_state) -> None: ... + +class _ORMColumnEntity(_ColumnEntity): + supports_single_entity: bool + expr: Any + translate_raw_column: bool + raw_column_index: Any + entity_zero_or_selectable: Any + entity_zero: Any + mapper: Any + column: Any + def __init__( + self, + compile_state, + column, + entities_collection, + parententity, + raw_column_index, + is_current_entities: bool, + parent_bundle: Incomplete | None = ..., + ) -> None: ... + def corresponds_to(self, entity): ... + def setup_compile_state(self, compile_state) -> None: ... + +class _IdentityTokenEntity(_ORMColumnEntity): + translate_raw_column: bool + def setup_compile_state(self, compile_state) -> None: ... + def row_processor(self, context, result): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/decl_api.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/decl_api.pyi new file mode 100644 index 00000000..dc181862 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/decl_api.pyi @@ -0,0 +1,146 @@ +from _typeshed import Incomplete, Unused +from collections.abc import Callable +from typing import Any, ClassVar, TypeVar, overload +from typing_extensions import TypeAlias + +from ..engine.interfaces import Connectable +from ..sql.schema import MetaData +from ..util import hybridproperty +from . import interfaces + +_ClsT = TypeVar("_ClsT", bound=type[Any]) +_DeclT = TypeVar("_DeclT", bound=type[_DeclarativeBase]) + +# Dynamic class as created by registry.generate_base() via DeclarativeMeta +# or another metaclass. This class does not exist at runtime. +class _DeclarativeBase(Any): # super classes are dynamic + registry: ClassVar[registry] + metadata: ClassVar[MetaData] + __abstract__: ClassVar[bool] + # not always existing: + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + __mapper_cls__: ClassVar[Any] + __class_getitem__: ClassVar[Any] + +# Meta class (or function) that creates a _DeclarativeBase class. +_DeclarativeBaseMeta: TypeAlias = Callable[[str, tuple[type[Any], ...], dict[str, Any]], _DeclT] + +def has_inherited_table(cls: type[Any]) -> bool: ... + +class DeclarativeMeta(type): + def __init__(cls, classname: str, bases: tuple[type[Any], ...], dict_: dict[str, Any], **kw: Unused) -> None: ... + def __setattr__(cls, key: str, value: Any) -> None: ... + def __delattr__(cls, key: str) -> None: ... + +def synonym_for(name, map_column: bool = ...): ... + +class declared_attr(interfaces._MappedAttribute, property): + def __init__(self, fget, cascading: bool = ...) -> None: ... + def __get__(self, self_, cls): ... + @hybridproperty + def cascading(self): ... + +class _stateful_declared_attr(declared_attr): + kw: Any + def __init__(self, **kw) -> None: ... + def __call__(self, fn): ... + +def declarative_mixin(cls: _ClsT) -> _ClsT: ... +@overload +def declarative_base( + bind: Connectable | None = ..., + metadata: MetaData | None = ..., + mapper: Incomplete | None = ..., + cls: type[Any] | tuple[type[Any], ...] = ..., + name: str = ..., + constructor: Callable[..., None] = ..., + class_registry: dict[str, type[Any]] | None = ..., +) -> type[_DeclarativeBase]: ... +@overload +def declarative_base( + bind: Connectable | None = ..., + metadata: MetaData | None = ..., + mapper: Incomplete | None = ..., + cls: type[Any] | tuple[type[Any], ...] = ..., + name: str = ..., + constructor: Callable[..., None] = ..., + class_registry: dict[str, type[Any]] | None = ..., + *, + metaclass: _DeclarativeBaseMeta[_DeclT], +) -> _DeclT: ... +@overload +def declarative_base( + bind: Connectable | None, + metadata: MetaData | None, + mapper: Incomplete | None, + cls: type[Any] | tuple[type[Any], ...], + name: str, + constructor: Callable[..., None], + class_registry: dict[str, type[Any]] | None, + metaclass: _DeclarativeBaseMeta[_DeclT], +) -> _DeclT: ... + +class registry: + metadata: MetaData + constructor: Callable[..., None] + def __init__( + self, + metadata: MetaData | None = ..., + class_registry: dict[str, type[Any]] | None = ..., + constructor: Callable[..., None] = ..., + _bind: Connectable | None = ..., + ) -> None: ... + @property + def mappers(self) -> frozenset[Any]: ... + def configure(self, cascade: bool = ...) -> None: ... + def dispose(self, cascade: bool = ...) -> None: ... + @overload + def generate_base( + self, mapper: Incomplete | None = ..., cls: type[Any] | tuple[type[Any], ...] = ..., name: str = ... + ) -> type[_DeclarativeBase]: ... + @overload + def generate_base( + self, + mapper: Incomplete | None = ..., + cls: type[Any] | tuple[type[Any], ...] = ..., + name: str = ..., + *, + metaclass: _DeclarativeBaseMeta[_DeclT], + ) -> _DeclT: ... + @overload + def generate_base( + self, + mapper: Incomplete | None, + cls: type[Any] | tuple[type[Any], ...], + name: str, + metaclass: _DeclarativeBaseMeta[_DeclT], + ) -> type[_DeclarativeBase]: ... + def mapped(self, cls: _ClsT) -> _ClsT: ... + # Return type of the callable is a _DeclarativeBase class with the passed in class as base. + # This could be better approximated with Intersection[PassedInClass, _DeclarativeBase]. + @overload + def as_declarative_base(self, *, mapper: Incomplete | None = ...) -> Callable[[_ClsT], _ClsT | DeclarativeMeta | Any]: ... + @overload + def as_declarative_base( + self, *, mapper: Incomplete | None = ..., metaclass: _DeclarativeBaseMeta[_DeclT] + ) -> Callable[[_ClsT], _ClsT | _DeclT | Any]: ... + def map_declaratively(self, cls): ... + def map_imperatively(self, class_, local_table: Incomplete | None = ..., **kw): ... + +@overload +def as_declarative( + *, + bind: Connectable | None = ..., + metadata: MetaData | None = ..., + class_registry: dict[str, type[Any]] | None = ..., + mapper: Incomplete | None = ..., +) -> Callable[[_ClsT], _ClsT | DeclarativeMeta | Any]: ... +@overload +def as_declarative( + *, + bind: Connectable | None = ..., + metadata: MetaData | None = ..., + class_registry: dict[str, type[Any]] | None = ..., + mapper: Incomplete | None = ..., + metaclass: _DeclarativeBaseMeta[_DeclT], +) -> Callable[[_ClsT], _ClsT | _DeclT | Any]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/decl_base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/decl_base.pyi new file mode 100644 index 00000000..9d5dbf1c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/decl_base.pyi @@ -0,0 +1,47 @@ +from typing import Any + +class _MapperConfig: + @classmethod + def setup_mapping(cls, registry, cls_, dict_, table, mapper_kw): ... + cls: Any + classname: Any + properties: Any + declared_attr_reg: Any + def __init__(self, registry, cls_, mapper_kw) -> None: ... + def set_cls_attribute(self, attrname, value): ... + +class _ImperativeMapperConfig(_MapperConfig): + dict_: Any + local_table: Any + inherits: Any + def __init__(self, registry, cls_, table, mapper_kw) -> None: ... + def map(self, mapper_kw=...): ... + +class _ClassScanMapperConfig(_MapperConfig): + dict_: Any + local_table: Any + persist_selectable: Any + declared_columns: Any + column_copies: Any + table_args: Any + tablename: Any + mapper_args: Any + mapper_args_fn: Any + inherits: Any + def __init__(self, registry, cls_, dict_, table, mapper_kw) -> None: ... + def map(self, mapper_kw=...): ... + +class _DeferredMapperConfig(_ClassScanMapperConfig): + @property + def cls(self): ... + @cls.setter + def cls(self, class_) -> None: ... + @classmethod + def has_cls(cls, class_): ... + @classmethod + def raise_unmapped_for_cls(cls, class_) -> None: ... + @classmethod + def config_for_cls(cls, class_): ... + @classmethod + def classes_for_base(cls, base_cls, sort: bool = ...): ... + def map(self, mapper_kw=...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/dependency.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/dependency.pyi new file mode 100644 index 00000000..8fe92087 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/dependency.pyi @@ -0,0 +1,74 @@ +from typing import Any + +class DependencyProcessor: + prop: Any + cascade: Any + mapper: Any + parent: Any + secondary: Any + direction: Any + post_update: Any + passive_deletes: Any + passive_updates: Any + enable_typechecks: Any + sort_key: Any + key: Any + def __init__(self, prop) -> None: ... + @classmethod + def from_relationship(cls, prop): ... + def hasparent(self, state): ... + def per_property_preprocessors(self, uow) -> None: ... + def per_property_flush_actions(self, uow) -> None: ... + def per_state_flush_actions(self, uow, states, isdelete) -> None: ... + def presort_deletes(self, uowcommit, states): ... + def presort_saves(self, uowcommit, states): ... + def process_deletes(self, uowcommit, states) -> None: ... + def process_saves(self, uowcommit, states) -> None: ... + def prop_has_changes(self, uowcommit, states, isdelete): ... + +class OneToManyDP(DependencyProcessor): + def per_property_dependencies( + self, uow, parent_saves, child_saves, parent_deletes, child_deletes, after_save, before_delete + ) -> None: ... + def per_state_dependencies( + self, uow, save_parent, delete_parent, child_action, after_save, before_delete, isdelete, childisdelete + ) -> None: ... + def presort_deletes(self, uowcommit, states) -> None: ... + def presort_saves(self, uowcommit, states) -> None: ... + def process_deletes(self, uowcommit, states) -> None: ... + def process_saves(self, uowcommit, states) -> None: ... + +class ManyToOneDP(DependencyProcessor): + def __init__(self, prop) -> None: ... + def per_property_dependencies( + self, uow, parent_saves, child_saves, parent_deletes, child_deletes, after_save, before_delete + ) -> None: ... + def per_state_dependencies( + self, uow, save_parent, delete_parent, child_action, after_save, before_delete, isdelete, childisdelete + ) -> None: ... + def presort_deletes(self, uowcommit, states) -> None: ... + def presort_saves(self, uowcommit, states) -> None: ... + def process_deletes(self, uowcommit, states) -> None: ... + def process_saves(self, uowcommit, states) -> None: ... + +class DetectKeySwitch(DependencyProcessor): + def per_property_preprocessors(self, uow) -> None: ... + def per_property_flush_actions(self, uow) -> None: ... + def per_state_flush_actions(self, uow, states, isdelete) -> None: ... + def presort_deletes(self, uowcommit, states) -> None: ... + def presort_saves(self, uow, states) -> None: ... + def prop_has_changes(self, uow, states, isdelete): ... + def process_deletes(self, uowcommit, states) -> None: ... + def process_saves(self, uowcommit, states) -> None: ... + +class ManyToManyDP(DependencyProcessor): + def per_property_dependencies( + self, uow, parent_saves, child_saves, parent_deletes, child_deletes, after_save, before_delete + ) -> None: ... + def per_state_dependencies( + self, uow, save_parent, delete_parent, child_action, after_save, before_delete, isdelete, childisdelete + ) -> None: ... + def presort_deletes(self, uowcommit, states) -> None: ... + def presort_saves(self, uowcommit, states) -> None: ... + def process_deletes(self, uowcommit, states) -> None: ... + def process_saves(self, uowcommit, states) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/descriptor_props.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/descriptor_props.pyi new file mode 100644 index 00000000..f4e8b793 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/descriptor_props.pyi @@ -0,0 +1,74 @@ +from _typeshed import Incomplete +from typing import Any, ClassVar, Generic, TypeVar + +from ..sql.operators import ColumnOperators +from ..util import memoized_property +from . import util as orm_util +from .interfaces import MapperProperty, PropComparator + +_T = TypeVar("_T") + +class DescriptorProperty(MapperProperty): + doc: Any + uses_objects: bool + key: Any + descriptor: Any + def instrument_class(self, mapper): ... + +class CompositeProperty(DescriptorProperty): + attrs: Any + composite_class: Any + active_history: Any + deferred: Any + group: Any + comparator_factory: Any + info: Any + def __init__(self, class_, *attrs, **kwargs) -> None: ... + def instrument_class(self, mapper) -> None: ... + def do_init(self) -> None: ... + @memoized_property + def props(self): ... + @property + def columns(self): ... + def get_history(self, state, dict_, passive=...): ... + + class CompositeBundle(orm_util.Bundle): + property: Any + def __init__(self, property_, expr) -> None: ... + def create_row_processor(self, query, procs, labels): ... + + class Comparator(PropComparator[_T], Generic[_T]): + __hash__: ClassVar[None] # type: ignore[assignment] + @memoized_property + def clauses(self): ... + def __clause_element__(self): ... + @memoized_property + def expression(self): ... + def __eq__(self, other) -> ColumnOperators[_T]: ... # type: ignore[override] + def __ne__(self, other) -> ColumnOperators[_T]: ... # type: ignore[override] + +class ConcreteInheritedProperty(DescriptorProperty): + descriptor: Any + def __init__(self): ... + +class SynonymProperty(DescriptorProperty): + name: Any + map_column: Any + descriptor: Any + comparator_factory: Any + doc: Any + info: Any + def __init__( + self, + name, + map_column: Incomplete | None = ..., + descriptor: Incomplete | None = ..., + comparator_factory: Incomplete | None = ..., + doc: Incomplete | None = ..., + info: Incomplete | None = ..., + ) -> None: ... + @property + def uses_objects(self): ... + def get_history(self, *arg, **kw): ... + parent: Any + def set_parent(self, parent, init) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/dynamic.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/dynamic.pyi new file mode 100644 index 00000000..40193a48 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/dynamic.pyi @@ -0,0 +1,85 @@ +from _typeshed import Incomplete +from typing import Any, Generic, TypeVar + +from . import attributes, strategies +from .query import Query + +_T = TypeVar("_T") + +class DynaLoader(strategies.AbstractRelationshipLoader): + logger: Any + is_class_level: bool + def init_class_attribute(self, mapper) -> None: ... + +class DynamicAttributeImpl(attributes.AttributeImpl): + uses_objects: bool + default_accepts_scalar_loader: bool + supports_population: bool + collection: bool + dynamic: bool + order_by: Any + target_mapper: Any + query_class: Any + def __init__( + self, class_, key, typecallable, dispatch, target_mapper, order_by, query_class: Incomplete | None = ..., **kw + ) -> None: ... + def get(self, state, dict_, passive=...): ... + def get_collection(self, state, dict_, user_data: Incomplete | None = ..., passive=...): ... + def fire_append_event(self, state, dict_, value, initiator, collection_history: Incomplete | None = ...) -> None: ... + def fire_remove_event(self, state, dict_, value, initiator, collection_history: Incomplete | None = ...) -> None: ... + def set( + self, + state, + dict_, + value, + initiator: Incomplete | None = ..., + passive=..., + check_old: Incomplete | None = ..., + pop: bool = ..., + _adapt: bool = ..., + ) -> None: ... + def delete(self, *args, **kwargs) -> None: ... + def set_committed_value(self, state, dict_, value) -> None: ... + def get_history(self, state, dict_, passive=...): ... + def get_all_pending(self, state, dict_, passive=...): ... + def append(self, state, dict_, value, initiator, passive=...) -> None: ... + def remove(self, state, dict_, value, initiator, passive=...) -> None: ... + def pop(self, state, dict_, value, initiator, passive=...) -> None: ... + +class DynamicCollectionAdapter: + data: Any + def __init__(self, data) -> None: ... + def __iter__(self): ... + def __len__(self) -> int: ... + def __bool__(self) -> bool: ... + def __nonzero__(self) -> bool: ... + +class AppenderMixin: + query_class: Any + instance: Any + attr: Any + def __init__(self, attr, state) -> None: ... + session: Any + def __getitem__(self, index): ... + def count(self): ... + def extend(self, iterator) -> None: ... + def append(self, item) -> None: ... + def remove(self, item) -> None: ... + +class AppenderQuery(AppenderMixin, Query[_T], Generic[_T]): ... + +def mixin_user_query(cls): ... + +class CollectionHistory: + unchanged_items: Any + added_items: Any + deleted_items: Any + def __init__(self, attr, state, apply_to: Incomplete | None = ...) -> None: ... + @property + def added_plus_unchanged(self): ... + @property + def all_items(self): ... + def as_history(self): ... + def indexed(self, index): ... + def add_added(self, value) -> None: ... + def add_removed(self, value) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/evaluator.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/evaluator.pyi new file mode 100644 index 00000000..9091e969 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/evaluator.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete +from typing import Any + +from ..sql import operators + +class UnevaluatableError(Exception): ... + +class _NoObject(operators.ColumnOperators[Any]): + def operate(self, *arg, **kw) -> None: ... + def reverse_operate(self, *arg, **kw) -> None: ... + +class EvaluatorCompiler: + target_cls: Any + def __init__(self, target_cls: Incomplete | None = ...) -> None: ... + def process(self, *clauses): ... + def visit_grouping(self, clause): ... + def visit_null(self, clause): ... + def visit_false(self, clause): ... + def visit_true(self, clause): ... + def visit_column(self, clause): ... + def visit_tuple(self, clause): ... + def visit_clauselist(self, clause): ... + def visit_binary(self, clause): ... + def visit_unary(self, clause): ... + def visit_bindparam(self, clause): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/events.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/events.pyi new file mode 100644 index 00000000..36a6414f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/events.pyi @@ -0,0 +1,104 @@ +from typing import Any + +from .. import event + +class InstrumentationEvents(event.Events): + def class_instrument(self, cls) -> None: ... + def class_uninstrument(self, cls) -> None: ... + def attribute_instrument(self, cls, key, inst) -> None: ... + +class _InstrumentationEventsHold: + class_: Any + def __init__(self, class_) -> None: ... + dispatch: Any + +class InstanceEvents(event.Events): + def first_init(self, manager, cls) -> None: ... + def init(self, target, args, kwargs) -> None: ... + def init_failure(self, target, args, kwargs) -> None: ... + def load(self, target, context) -> None: ... + def refresh(self, target, context, attrs) -> None: ... + def refresh_flush(self, target, flush_context, attrs) -> None: ... + def expire(self, target, attrs) -> None: ... + def pickle(self, target, state_dict) -> None: ... + def unpickle(self, target, state_dict) -> None: ... + +class _EventsHold(event.RefCollection): + class_: Any + def __init__(self, class_) -> None: ... + + class HoldEvents: ... + + def remove(self, event_key) -> None: ... + @classmethod + def populate(cls, class_, subject) -> None: ... + +class _InstanceEventsHold(_EventsHold): + all_holds: Any + def resolve(self, class_): ... + + class HoldInstanceEvents(_EventsHold.HoldEvents, InstanceEvents): ... + dispatch: Any + +class MapperEvents(event.Events): + def instrument_class(self, mapper, class_) -> None: ... + def before_mapper_configured(self, mapper, class_) -> None: ... + def mapper_configured(self, mapper, class_) -> None: ... + def before_configured(self) -> None: ... + def after_configured(self) -> None: ... + def before_insert(self, mapper, connection, target) -> None: ... + def after_insert(self, mapper, connection, target) -> None: ... + def before_update(self, mapper, connection, target) -> None: ... + def after_update(self, mapper, connection, target) -> None: ... + def before_delete(self, mapper, connection, target) -> None: ... + def after_delete(self, mapper, connection, target) -> None: ... + +class _MapperEventsHold(_EventsHold): + all_holds: Any + def resolve(self, class_): ... + + class HoldMapperEvents(_EventsHold.HoldEvents, MapperEvents): ... + dispatch: Any + +class SessionEvents(event.Events): + def do_orm_execute(self, orm_execute_state) -> None: ... + def after_transaction_create(self, session, transaction) -> None: ... + def after_transaction_end(self, session, transaction) -> None: ... + def before_commit(self, session) -> None: ... + def after_commit(self, session) -> None: ... + def after_rollback(self, session) -> None: ... + def after_soft_rollback(self, session, previous_transaction) -> None: ... + def before_flush(self, session, flush_context, instances) -> None: ... + def after_flush(self, session, flush_context) -> None: ... + def after_flush_postexec(self, session, flush_context) -> None: ... + def after_begin(self, session, transaction, connection) -> None: ... + def before_attach(self, session, instance) -> None: ... + def after_attach(self, session, instance) -> None: ... + def after_bulk_update(self, update_context) -> None: ... + def after_bulk_delete(self, delete_context) -> None: ... + def transient_to_pending(self, session, instance) -> None: ... + def pending_to_transient(self, session, instance) -> None: ... + def persistent_to_transient(self, session, instance) -> None: ... + def pending_to_persistent(self, session, instance) -> None: ... + def detached_to_persistent(self, session, instance) -> None: ... + def loaded_as_persistent(self, session, instance) -> None: ... + def persistent_to_deleted(self, session, instance) -> None: ... + def deleted_to_persistent(self, session, instance) -> None: ... + def deleted_to_detached(self, session, instance) -> None: ... + def persistent_to_detached(self, session, instance) -> None: ... + +class AttributeEvents(event.Events): + def append(self, target, value, initiator) -> None: ... + def append_wo_mutation(self, target, value, initiator) -> None: ... + def bulk_replace(self, target, values, initiator) -> None: ... + def remove(self, target, value, initiator) -> None: ... + def set(self, target, value, oldvalue, initiator) -> None: ... + def init_scalar(self, target, value, dict_) -> None: ... + def init_collection(self, target, collection, collection_adapter) -> None: ... + def dispose_collection(self, target, collection, collection_adapter) -> None: ... + def modified(self, target, initiator) -> None: ... + +class QueryEvents(event.Events): + def before_compile(self, query) -> None: ... + def before_compile_update(self, query, update_context) -> None: ... + def before_compile_delete(self, query, delete_context) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/exc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/exc.pyi new file mode 100644 index 00000000..384ffdf1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/exc.pyi @@ -0,0 +1,35 @@ +from _typeshed import Incomplete +from typing import Any + +from .. import exc as sa_exc +from ..exc import NoResultFound as NoResultFound + +NO_STATE: Any + +class StaleDataError(sa_exc.SQLAlchemyError): ... + +ConcurrentModificationError = StaleDataError + +class FlushError(sa_exc.SQLAlchemyError): ... +class UnmappedError(sa_exc.InvalidRequestError): ... +class ObjectDereferencedError(sa_exc.SQLAlchemyError): ... + +class DetachedInstanceError(sa_exc.SQLAlchemyError): + code: str + +class UnmappedInstanceError(UnmappedError): + def __init__(self, obj, msg: Incomplete | None = ...) -> None: ... + def __reduce__(self): ... + +class UnmappedClassError(UnmappedError): + def __init__(self, cls, msg: Incomplete | None = ...) -> None: ... + def __reduce__(self): ... + +class ObjectDeletedError(sa_exc.InvalidRequestError): + def __init__(self, state, msg: Incomplete | None = ...) -> None: ... + def __reduce__(self): ... + +class UnmappedColumnError(sa_exc.InvalidRequestError): ... + +class LoaderStrategyException(sa_exc.InvalidRequestError): + def __init__(self, applied_to_property_type, requesting_property, applies_to, actual_strategy_type, strategy_key) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/identity.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/identity.pyi new file mode 100644 index 00000000..062e91c0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/identity.pyi @@ -0,0 +1,31 @@ +from _typeshed import Incomplete + +class IdentityMap: + def keys(self): ... + def replace(self, state) -> None: ... + def add(self, state) -> None: ... + def update(self, dict_) -> None: ... + def clear(self) -> None: ... + def check_modified(self): ... + def has_key(self, key): ... + def popitem(self) -> None: ... + def pop(self, key, *args) -> None: ... + def setdefault(self, key, default: Incomplete | None = ...) -> None: ... + def __len__(self) -> int: ... + def copy(self) -> None: ... + def __setitem__(self, key, value) -> None: ... + def __delitem__(self, key) -> None: ... + +class WeakInstanceDict(IdentityMap): + def __getitem__(self, key): ... + def __contains__(self, key): ... + def contains_state(self, state): ... + def replace(self, state): ... + def add(self, state): ... + def get(self, key, default: Incomplete | None = ...): ... + def items(self): ... + def values(self): ... + def __iter__(self): ... + def all_states(self): ... + def discard(self, state) -> None: ... + def safe_discard(self, state) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/instrumentation.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/instrumentation.pyi new file mode 100644 index 00000000..6c972cef --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/instrumentation.pyi @@ -0,0 +1,88 @@ +from _typeshed import Incomplete +from typing import Any + +from ..util import HasMemoized, hybridmethod +from . import base + +DEL_ATTR: Any + +class ClassManager(HasMemoized, dict[Any, Any]): + MANAGER_ATTR: Any + STATE_ATTR: Any + expired_attribute_loader: Any + init_method: Any + factory: Any + mapper: Any + declarative_scan: Any + registry: Any + @property + def deferred_scalar_loader(self): ... + @deferred_scalar_loader.setter + def deferred_scalar_loader(self, obj) -> None: ... + class_: Any + info: Any + new_init: Any + local_attrs: Any + originals: Any + def __init__(self, class_) -> None: ... + def __hash__(self) -> int: ... # type: ignore[override] + def __eq__(self, other): ... + @property + def is_mapped(self): ... + # Will be overwritten when mapped + # def mapper(self) -> None: ... + def manage(self) -> None: ... + @hybridmethod + def manager_getter(self): ... + @hybridmethod + def state_getter(self): ... + @hybridmethod + def dict_getter(self): ... + def instrument_attribute(self, key, inst, propagated: bool = ...) -> None: ... + def subclass_managers(self, recursive) -> None: ... + def post_configure_attribute(self, key) -> None: ... + def uninstrument_attribute(self, key, propagated: bool = ...) -> None: ... + def unregister(self) -> None: ... + def install_descriptor(self, key, inst) -> None: ... + def uninstall_descriptor(self, key) -> None: ... + def install_member(self, key, implementation) -> None: ... + def uninstall_member(self, key) -> None: ... + def instrument_collection_class(self, key, collection_class): ... + def initialize_collection(self, key, state, factory): ... + def is_instrumented(self, key, search: bool = ...): ... + def get_impl(self, key): ... + @property + def attributes(self): ... + def new_instance(self, state: Incomplete | None = ...): ... + def setup_instance(self, instance, state: Incomplete | None = ...) -> None: ... + def teardown_instance(self, instance) -> None: ... + def has_state(self, instance): ... + def has_parent(self, state, key, optimistic: bool = ...): ... + def __bool__(self) -> bool: ... + def __nonzero__(self) -> bool: ... + +class _SerializeManager: + class_: Any + def __init__(self, state, d) -> None: ... + def __call__(self, state, inst, state_dict) -> None: ... + +class InstrumentationFactory: + def create_manager_for_cls(self, class_): ... + def unregister(self, class_) -> None: ... + +instance_state: Any + +instance_dict: Any +manager_of_class = base.manager_of_class + +def register_class( + class_, + finalize: bool = ..., + mapper: Incomplete | None = ..., + registry: Incomplete | None = ..., + declarative_scan: Incomplete | None = ..., + expired_attribute_loader: Incomplete | None = ..., + init_method: Incomplete | None = ..., +): ... +def unregister_class(class_) -> None: ... +def is_instrumented(instance, key): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/interfaces.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/interfaces.pyi new file mode 100644 index 00000000..61b1c7f7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/interfaces.pyi @@ -0,0 +1,131 @@ +from _typeshed import Incomplete +from typing import Any, Generic, TypeVar + +from .. import util +from ..sql import operators, roles +from ..sql.base import ExecutableOption +from ..sql.traversals import HasCacheKey +from .base import ( + EXT_CONTINUE as EXT_CONTINUE, + EXT_SKIP as EXT_SKIP, + EXT_STOP as EXT_STOP, + MANYTOMANY as MANYTOMANY, + MANYTOONE as MANYTOONE, + NOT_EXTENSION as NOT_EXTENSION, + ONETOMANY as ONETOMANY, + InspectionAttr as InspectionAttr, + InspectionAttrInfo as InspectionAttrInfo, + _MappedAttribute as _MappedAttribute, +) + +_T = TypeVar("_T") + +__all__ = ( + "EXT_CONTINUE", + "EXT_STOP", + "EXT_SKIP", + "ONETOMANY", + "MANYTOMANY", + "MANYTOONE", + "NOT_EXTENSION", + "LoaderStrategy", + "MapperOption", + "LoaderOption", + "MapperProperty", + "PropComparator", + "StrategizedProperty", +) + +class ORMStatementRole(roles.StatementRole): ... +class ORMColumnsClauseRole(roles.ColumnsClauseRole): ... +class ORMEntityColumnsClauseRole(ORMColumnsClauseRole): ... +class ORMFromClauseRole(roles.StrictFromClauseRole): ... + +class MapperProperty(HasCacheKey, _MappedAttribute, InspectionAttr, util.MemoizedSlots): + cascade: Any + is_property: bool + def setup(self, context, query_entity, path, adapter, **kwargs) -> None: ... + def create_row_processor(self, context, query_entity, path, mapper, result, adapter, populators) -> None: ... + def cascade_iterator(self, type_, state, dict_, visited_states, halt_on: Incomplete | None = ...): ... + parent: Any + def set_parent(self, parent, init) -> None: ... + def instrument_class(self, mapper) -> None: ... + def __init__(self) -> None: ... + def init(self) -> None: ... + @property + def class_attribute(self): ... + def do_init(self) -> None: ... + def post_instrument_class(self, mapper) -> None: ... + def merge( + self, session, source_state, source_dict, dest_state, dest_dict, load, _recursive, _resolve_conflict_map + ) -> None: ... + +class PropComparator(operators.ColumnOperators[_T], Generic[_T]): + __visit_name__: str + prop: Any + property: Any + def __init__(self, prop, parentmapper, adapt_to_entity: Incomplete | None = ...) -> None: ... + def __clause_element__(self) -> None: ... + def adapt_to_entity(self, adapt_to_entity): ... + @property + def adapter(self): ... + @property + def info(self): ... + @staticmethod + def any_op(a, b, **kwargs): ... + @staticmethod + def has_op(a, b, **kwargs): ... + @staticmethod + def of_type_op(a, class_): ... + def of_type(self, class_): ... + def and_(self, *criteria): ... + def any(self, criterion: Incomplete | None = ..., **kwargs): ... + def has(self, criterion: Incomplete | None = ..., **kwargs): ... + +class StrategizedProperty(MapperProperty): + inherit_cache: bool + strategy_wildcard_key: Any + def setup(self, context, query_entity, path, adapter, **kwargs) -> None: ... + def create_row_processor(self, context, query_entity, path, mapper, result, adapter, populators) -> None: ... + strategy: Any + def do_init(self) -> None: ... + def post_instrument_class(self, mapper) -> None: ... + @classmethod + def strategy_for(cls, **kw): ... + +class ORMOption(ExecutableOption): + propagate_to_loaders: bool + +class CompileStateOption(HasCacheKey, ORMOption): + def process_compile_state(self, compile_state) -> None: ... + def process_compile_state_replaced_entities(self, compile_state, mapper_entities) -> None: ... + +class LoaderOption(CompileStateOption): + def process_compile_state_replaced_entities(self, compile_state, mapper_entities) -> None: ... + def process_compile_state(self, compile_state) -> None: ... + +class CriteriaOption(CompileStateOption): + def process_compile_state(self, compile_state) -> None: ... + def get_global_criteria(self, attributes) -> None: ... + +class UserDefinedOption(ORMOption): + propagate_to_loaders: bool + payload: Any + def __init__(self, payload: Incomplete | None = ...) -> None: ... + +class MapperOption(ORMOption): + propagate_to_loaders: bool + def process_query(self, query) -> None: ... + def process_query_conditionally(self, query) -> None: ... + +class LoaderStrategy: + parent_property: Any + is_class_level: bool + parent: Any + key: Any + strategy_key: Any + strategy_opts: Any + def __init__(self, parent, strategy_key) -> None: ... + def init_class_attribute(self, mapper) -> None: ... + def setup_query(self, compile_state, query_entity, path, loadopt, adapter, **kwargs) -> None: ... + def create_row_processor(self, context, query_entity, path, loadopt, mapper, result, adapter, populators) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/loading.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/loading.pyi new file mode 100644 index 00000000..83412459 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/loading.pyi @@ -0,0 +1,47 @@ +from _typeshed import Incomplete +from typing import Any + +def instances(cursor, context): ... +def merge_frozen_result(session, statement, frozen_result, load: bool = ...): ... +def merge_result(query, iterator, load: bool = ...): ... +def get_from_identity(session, mapper, key, passive): ... +def load_on_ident( + session, + statement, + key, + load_options: Incomplete | None = ..., + refresh_state: Incomplete | None = ..., + with_for_update: Incomplete | None = ..., + only_load_props: Incomplete | None = ..., + no_autoflush: bool = ..., + bind_arguments=..., + execution_options=..., +): ... +def load_on_pk_identity( + session, + statement, + primary_key_identity, + load_options: Incomplete | None = ..., + refresh_state: Incomplete | None = ..., + with_for_update: Incomplete | None = ..., + only_load_props: Incomplete | None = ..., + identity_token: Incomplete | None = ..., + no_autoflush: bool = ..., + bind_arguments=..., + execution_options=..., +): ... + +class PostLoad: + loaders: Any + states: Any + load_keys: Any + def add_state(self, state, overwrite) -> None: ... + def invoke(self, context, path) -> None: ... + @classmethod + def for_context(cls, context, path, only_load_props): ... + @classmethod + def path_exists(cls, context, path, key): ... + @classmethod + def callable_for_path(cls, context, path, limit_to_mapper, token, loader_callable, *arg, **kw) -> None: ... + +def load_scalar_attributes(mapper, state, attribute_names, passive) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/mapper.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/mapper.pyi new file mode 100644 index 00000000..999ad898 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/mapper.pyi @@ -0,0 +1,144 @@ +from _typeshed import Incomplete +from typing import Any + +from ..sql import base as sql_base +from ..util import HasMemoized, memoized_property +from .base import ( + _class_to_mapper as _class_to_mapper, + _state_mapper as _state_mapper, + class_mapper as class_mapper, + state_str as state_str, +) +from .interfaces import InspectionAttr, ORMEntityColumnsClauseRole, ORMFromClauseRole + +NO_ATTRIBUTE: Any + +class Mapper(ORMFromClauseRole, ORMEntityColumnsClauseRole, sql_base.MemoizedHasCacheKey, InspectionAttr): + logger: Any + class_: Any + class_manager: Any + non_primary: Any + always_refresh: Any + version_id_prop: Any + version_id_col: Any + version_id_generator: bool + concrete: Any + single: bool + inherits: Any + local_table: Any + inherit_condition: Any + inherit_foreign_keys: Any + batch: Any + eager_defaults: Any + column_prefix: Any + polymorphic_on: Any + validators: Any + passive_updates: Any + passive_deletes: Any + legacy_is_orphan: Any + allow_partial_pks: Any + confirm_deleted_rows: bool + polymorphic_load: Any + polymorphic_identity: Any + polymorphic_map: Any + include_properties: Any + exclude_properties: Any + def __init__( + self, + class_, + local_table: Incomplete | None = ..., + properties: Incomplete | None = ..., + primary_key: Incomplete | None = ..., + non_primary: bool = ..., + inherits: Incomplete | None = ..., + inherit_condition: Incomplete | None = ..., + inherit_foreign_keys: Incomplete | None = ..., + always_refresh: bool = ..., + version_id_col: Incomplete | None = ..., + version_id_generator: Incomplete | None = ..., + polymorphic_on: Incomplete | None = ..., + _polymorphic_map: Incomplete | None = ..., + polymorphic_identity: Incomplete | None = ..., + concrete: bool = ..., + with_polymorphic: Incomplete | None = ..., + polymorphic_load: Incomplete | None = ..., + allow_partial_pks: bool = ..., + batch: bool = ..., + column_prefix: Incomplete | None = ..., + include_properties: Incomplete | None = ..., + exclude_properties: Incomplete | None = ..., + passive_updates: bool = ..., + passive_deletes: bool = ..., + confirm_deleted_rows: bool = ..., + eager_defaults: bool = ..., + legacy_is_orphan: bool = ..., + _compiled_cache_size: int = ..., + ): ... + is_mapper: bool + represents_outer_join: bool + @property + def mapper(self): ... + @property + def entity(self): ... + persist_selectable: Any + configured: bool + tables: Any + primary_key: Any + base_mapper: Any + columns: Any + c: Any + @property + def mapped_table(self): ... + def add_properties(self, dict_of_properties) -> None: ... + def add_property(self, key, prop) -> None: ... + def has_property(self, key): ... + def get_property(self, key, _configure_mappers: bool = ...): ... + def get_property_by_column(self, column): ... + @property + def iterate_properties(self): ... + with_polymorphic_mappers: Any + def __clause_element__(self): ... + @memoized_property + def select_identity_token(self): ... + @property + def selectable(self): ... + @HasMemoized.memoized_attribute + def attrs(self): ... + @HasMemoized.memoized_attribute + def all_orm_descriptors(self): ... + @HasMemoized.memoized_attribute + def synonyms(self): ... + @property + def entity_namespace(self): ... + @HasMemoized.memoized_attribute + def column_attrs(self): ... + @HasMemoized.memoized_attribute + def relationships(self): ... + @HasMemoized.memoized_attribute + def composites(self): ... + def common_parent(self, other): ... + def is_sibling(self, other): ... + def isa(self, other): ... + def iterate_to_root(self) -> None: ... + @HasMemoized.memoized_attribute + def self_and_descendants(self): ... + def polymorphic_iterator(self): ... + def primary_mapper(self): ... + @property + def primary_base_mapper(self): ... + def identity_key_from_row(self, row, identity_token: Incomplete | None = ..., adapter: Incomplete | None = ...): ... + def identity_key_from_primary_key(self, primary_key, identity_token: Incomplete | None = ...): ... + def identity_key_from_instance(self, instance): ... + def primary_key_from_instance(self, instance): ... + def cascade_iterator(self, type_, state, halt_on: Incomplete | None = ...) -> None: ... + +class _OptGetColumnsNotAvailable(Exception): ... + +def configure_mappers() -> None: ... +def reconstructor(fn): ... +def validates(*names, **kw): ... + +class _ColumnMapping(dict[Any, Any]): + mapper: Any + def __init__(self, mapper) -> None: ... + def __missing__(self, column) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/path_registry.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/path_registry.pyi new file mode 100644 index 00000000..9da5d316 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/path_registry.pyi @@ -0,0 +1,107 @@ +from _typeshed import Incomplete +from typing import Any, ClassVar + +from ..sql.traversals import HasCacheKey +from ..util import memoized_property +from . import base as orm_base + +log: Any + +class PathRegistry(HasCacheKey): + root: ClassVar[RootRegistry] + is_token: bool + is_root: bool + def __eq__(self, other): ... + def __ne__(self, other): ... + def set(self, attributes, key, value) -> None: ... + def setdefault(self, attributes, key, value) -> None: ... + def get(self, attributes, key, value: Incomplete | None = ...): ... + def __len__(self) -> int: ... + def __hash__(self) -> int: ... + @property + def length(self): ... + def pairs(self) -> None: ... + def contains_mapper(self, mapper): ... + def contains(self, attributes, key): ... + def __reduce__(self): ... + @classmethod + def serialize_context_dict(cls, dict_, tokens): ... + @classmethod + def deserialize_context_dict(cls, serialized): ... + def serialize(self): ... + @classmethod + def deserialize(cls, path): ... + @classmethod + def per_mapper(cls, mapper): ... + @classmethod + def coerce(cls, raw): ... + def token(self, token): ... + def __add__(self, other): ... + +class RootRegistry(PathRegistry): + inherit_cache: bool + path: Any + natural_path: Any + has_entity: bool + is_aliased_class: bool + is_root: bool + def __getitem__(self, entity): ... + +class PathToken(orm_base.InspectionAttr, HasCacheKey, str): + @classmethod + def intern(cls, strvalue): ... + +class TokenRegistry(PathRegistry): + inherit_cache: bool + token: Any + parent: Any + path: Any + natural_path: Any + def __init__(self, parent, token) -> None: ... + has_entity: bool + is_token: bool + def generate_for_superclasses(self) -> None: ... + def __getitem__(self, entity) -> None: ... + +class PropRegistry(PathRegistry): + is_unnatural: bool + inherit_cache: bool + prop: Any + parent: Any + path: Any + natural_path: Any + def __init__(self, parent, prop) -> None: ... + @memoized_property + def has_entity(self): ... + @memoized_property + def entity(self): ... + @property + def mapper(self): ... + @property + def entity_path(self): ... + def __getitem__(self, entity): ... + +class AbstractEntityRegistry(PathRegistry): + has_entity: bool + key: Any + parent: Any + is_aliased_class: Any + entity: Any + path: Any + natural_path: Any + def __init__(self, parent, entity) -> None: ... + @property + def entity_path(self): ... + @property + def mapper(self): ... + def __bool__(self) -> bool: ... + def __nonzero__(self) -> bool: ... + def __getitem__(self, entity): ... + +class SlotsEntityRegistry(AbstractEntityRegistry): + inherit_cache: bool + +class CachingEntityRegistry(AbstractEntityRegistry, dict[Incomplete, Incomplete]): # type: ignore[misc] + inherit_cache: bool + def __getitem__(self, entity): ... + def __missing__(self, key): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/persistence.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/persistence.pyi new file mode 100644 index 00000000..8d5d721e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/persistence.pyi @@ -0,0 +1,28 @@ +from typing import Any + +from ..sql.base import CompileState, Options +from ..sql.dml import DeleteDMLState, UpdateDMLState + +def save_obj(base_mapper, states, uowtransaction, single: bool = ...) -> None: ... +def post_update(base_mapper, states, uowtransaction, post_update_cols) -> None: ... +def delete_obj(base_mapper, states, uowtransaction) -> None: ... + +class BulkUDCompileState(CompileState): + class default_update_options(Options): ... + + @classmethod + def orm_pre_session_exec(cls, session, statement, params, execution_options, bind_arguments, is_reentrant_invoke): ... + @classmethod + def orm_setup_cursor_result(cls, session, statement, params, execution_options, bind_arguments, result): ... + +class BulkORMUpdate(UpdateDMLState, BulkUDCompileState): + mapper: Any + extra_criteria_entities: Any + @classmethod + def create_for_statement(cls, statement, compiler, **kw): ... + +class BulkORMDelete(DeleteDMLState, BulkUDCompileState): + mapper: Any + extra_criteria_entities: Any + @classmethod + def create_for_statement(cls, statement, compiler, **kw): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/properties.pyi new file mode 100644 index 00000000..21dd0286 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/properties.pyi @@ -0,0 +1,45 @@ +from typing import Any + +from .. import util +from .descriptor_props import ( + CompositeProperty as CompositeProperty, + ConcreteInheritedProperty as ConcreteInheritedProperty, + SynonymProperty as SynonymProperty, +) +from .interfaces import PropComparator, StrategizedProperty +from .relationships import RelationshipProperty as RelationshipProperty + +__all__ = ["ColumnProperty", "CompositeProperty", "ConcreteInheritedProperty", "RelationshipProperty", "SynonymProperty"] + +class ColumnProperty(StrategizedProperty): + logger: Any + strategy_wildcard_key: str + inherit_cache: bool + columns: Any + group: Any + deferred: Any + raiseload: Any + instrument: Any + comparator_factory: Any + descriptor: Any + active_history: Any + expire_on_flush: Any + info: Any + doc: Any + strategy_key: Any + def __init__(self, *columns, **kwargs) -> None: ... + def __clause_element__(self): ... + @property + def expression(self): ... + def instrument_class(self, mapper) -> None: ... + def do_init(self) -> None: ... + def copy(self): ... + def merge( + self, session, source_state, source_dict, dest_state, dest_dict, load, _recursive, _resolve_conflict_map + ) -> None: ... + + class Comparator(util.MemoizedSlots, PropComparator[Any]): + expressions: Any + def _memoized_method___clause_element__(self): ... + def operate(self, op, *other, **kwargs): ... + def reverse_operate(self, op, other, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/query.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/query.pyi new file mode 100644 index 00000000..ab769f03 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/query.pyi @@ -0,0 +1,135 @@ +from _typeshed import Incomplete +from collections.abc import Iterator +from typing import Any, Generic, TypeVar +from typing_extensions import Literal, Self, TypeAlias + +from ..sql.annotation import SupportsCloneAnnotations +from ..sql.base import Executable +from ..sql.selectable import GroupedElement, HasHints, HasPrefixes, HasSuffixes, SelectBase, _SelectFromElements +from . import interfaces +from .context import QueryContext as QueryContext +from .util import aliased as aliased + +__all__ = ["Query", "QueryContext", "aliased"] + +_T = TypeVar("_T") +_SynchronizeSessionArgument: TypeAlias = Literal[False, "evaluate", "fetch"] + +class Query(_SelectFromElements, SupportsCloneAnnotations, HasPrefixes, HasSuffixes, HasHints, Executable, Generic[_T]): + logger: Any + load_options: Any + session: Any + def __init__(self, entities, session: Incomplete | None = ...) -> None: ... + @property + def statement(self): ... + def subquery(self, name: str | None = ..., with_labels: bool = ..., reduce_columns: bool = ...): ... + def cte(self, name: Incomplete | None = ..., recursive: bool = ..., nesting: bool = ...): ... + def label(self, name): ... + def as_scalar(self): ... + def scalar_subquery(self): ... + @property + def selectable(self): ... + def __clause_element__(self): ... + def only_return_tuples(self, value) -> Self: ... + @property + def is_single_entity(self): ... + def enable_eagerloads(self, value) -> Self: ... + def with_labels(self): ... + apply_labels: Any + @property + def get_label_style(self): ... + def set_label_style(self, style): ... + def enable_assertions(self, value) -> Self: ... + @property + def whereclause(self): ... + def with_polymorphic( + self, cls_or_mappers, selectable: Incomplete | None = ..., polymorphic_on: Incomplete | None = ... + ) -> Self: ... + def yield_per(self, count) -> Self: ... + def get(self, ident): ... + @property + def lazy_loaded_from(self): ... + def correlate(self, *fromclauses) -> Self: ... + def autoflush(self, setting) -> Self: ... + def populate_existing(self) -> Self: ... + def with_parent(self, instance, property: Incomplete | None = ..., from_entity: Incomplete | None = ...): ... + def add_entity(self, entity, alias: Incomplete | None = ...) -> Self: ... + def with_session(self, session) -> Self: ... + def from_self(self, *entities): ... + def values(self, *columns): ... + def value(self, column): ... + def with_entities(self, *entities) -> Self: ... + def add_columns(self, *column) -> Self: ... + def add_column(self, column): ... + def options(self, *args) -> Self: ... + def with_transformation(self, fn): ... + def get_execution_options(self): ... + def execution_options(self, **kwargs) -> Self: ... + def with_for_update( + self, read: bool = ..., nowait: bool = ..., of: Incomplete | None = ..., skip_locked: bool = ..., key_share: bool = ... + ) -> Self: ... + def params(self, *args, **kwargs) -> Self: ... + def where(self, *criterion): ... + def filter(self, *criterion) -> Self: ... + def filter_by(self, **kwargs) -> Self: ... + def order_by(self, *clauses) -> Self: ... + def group_by(self, *clauses) -> Self: ... + def having(self, criterion) -> Self: ... + def union(self, *q): ... + def union_all(self, *q): ... + def intersect(self, *q): ... + def intersect_all(self, *q): ... + def except_(self, *q): ... + def except_all(self, *q): ... + def join(self, target, *props, **kwargs) -> Self: ... + def outerjoin(self, target, *props, **kwargs) -> Self: ... + def reset_joinpoint(self) -> Self: ... + def select_from(self, *from_obj) -> Self: ... + def select_entity_from(self, from_obj) -> Self: ... + def __getitem__(self, item): ... + def slice(self, start, stop) -> Self: ... + def limit(self, limit) -> Self: ... + def offset(self, offset) -> Self: ... + def distinct(self, *expr) -> Self: ... + def all(self) -> list[_T]: ... + def from_statement(self, statement) -> Self: ... + def first(self) -> _T | None: ... + def one_or_none(self): ... + def one(self): ... + def scalar(self) -> Any: ... # type: ignore[override] + def __iter__(self) -> Iterator[_T]: ... + @property + def column_descriptions(self): ... + def instances(self, result_proxy, context: Incomplete | None = ...): ... + def merge_result(self, iterator, load: bool = ...): ... + def exists(self): ... + def count(self) -> int: ... + def delete(self, synchronize_session: _SynchronizeSessionArgument = ...) -> int: ... + def update(self, values, synchronize_session: _SynchronizeSessionArgument = ..., update_args: Incomplete | None = ...): ... + +class FromStatement(GroupedElement, SelectBase, Executable): + __visit_name__: str + element: Any + def __init__(self, entities, element) -> None: ... + def get_label_style(self): ... + def set_label_style(self, label_style): ... + def get_children(self, **kw) -> None: ... # type: ignore[override] + +class AliasOption(interfaces.LoaderOption): + def __init__(self, alias) -> None: ... + inherit_cache: bool + def process_compile_state(self, compile_state) -> None: ... + +class BulkUD: + query: Any + mapper: Any + def __init__(self, query) -> None: ... + @property + def session(self): ... + +class BulkUpdate(BulkUD): + values: Any + update_kwargs: Any + def __init__(self, query, values, update_kwargs) -> None: ... + +class BulkDelete(BulkUD): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/relationships.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/relationships.pyi new file mode 100644 index 00000000..e7382a51 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/relationships.pyi @@ -0,0 +1,187 @@ +from _typeshed import Incomplete +from typing import Any, ClassVar, Generic, TypeVar + +from ..sql.operators import ColumnOperators +from ..util import memoized_property +from .interfaces import PropComparator, StrategizedProperty + +_T = TypeVar("_T") + +def remote(expr): ... +def foreign(expr): ... + +class RelationshipProperty(StrategizedProperty): + logger: Any + strategy_wildcard_key: str + inherit_cache: bool + uselist: Any + argument: Any + secondary: Any + primaryjoin: Any + secondaryjoin: Any + post_update: Any + direction: Any + viewonly: Any + sync_backref: Any + lazy: Any + single_parent: Any + collection_class: Any + passive_deletes: Any + cascade_backrefs: Any + passive_updates: Any + remote_side: Any + enable_typechecks: Any + query_class: Any + innerjoin: Any + distinct_target_key: Any + doc: Any + active_history: Any + join_depth: Any + omit_join: Any + local_remote_pairs: Any + bake_queries: Any + load_on_pending: Any + comparator_factory: Any + comparator: Any + info: Any + strategy_key: Any + order_by: Any + back_populates: Any + backref: Any + def __init__( + self, + argument, + secondary: Incomplete | None = ..., + primaryjoin: Incomplete | None = ..., + secondaryjoin: Incomplete | None = ..., + foreign_keys: Incomplete | None = ..., + uselist: Incomplete | None = ..., + order_by: bool = ..., + backref: Incomplete | None = ..., + back_populates: Incomplete | None = ..., + overlaps: Incomplete | None = ..., + post_update: bool = ..., + cascade: bool = ..., + viewonly: bool = ..., + lazy: str = ..., + collection_class: Incomplete | None = ..., + passive_deletes=..., + passive_updates=..., + remote_side: Incomplete | None = ..., + enable_typechecks=..., + join_depth: Incomplete | None = ..., + comparator_factory: Incomplete | None = ..., + single_parent: bool = ..., + innerjoin: bool = ..., + distinct_target_key: Incomplete | None = ..., + doc: Incomplete | None = ..., + active_history=..., + cascade_backrefs=..., + load_on_pending: bool = ..., + bake_queries: bool = ..., + _local_remote_pairs: Incomplete | None = ..., + query_class: Incomplete | None = ..., + info: Incomplete | None = ..., + omit_join: Incomplete | None = ..., + sync_backref: Incomplete | None = ..., + _legacy_inactive_history_style: bool = ..., + ) -> None: ... + def instrument_class(self, mapper) -> None: ... + + class Comparator(PropComparator[_T], Generic[_T]): + prop: Any + def __init__( + self, + prop, + parentmapper, + adapt_to_entity: Incomplete | None = ..., + of_type: Incomplete | None = ..., + extra_criteria=..., + ) -> None: ... + def adapt_to_entity(self, adapt_to_entity): ... + @memoized_property + def entity(self): ... + @memoized_property + def mapper(self): ... + def __clause_element__(self): ... + def of_type(self, cls): ... + def and_(self, *other): ... + def in_(self, other) -> ColumnOperators[_T]: ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __eq__(self, other): ... + def any(self, criterion: Incomplete | None = ..., **kwargs): ... + def has(self, criterion: Incomplete | None = ..., **kwargs): ... + def contains(self, other, **kwargs) -> ColumnOperators[_T]: ... + def __ne__(self, other) -> ColumnOperators[_T]: ... # type: ignore[override] + @memoized_property + def property(self): ... + + def merge( + self, session, source_state, source_dict, dest_state, dest_dict, load, _recursive, _resolve_conflict_map + ) -> None: ... + def cascade_iterator(self, type_, state, dict_, visited_states, halt_on: Incomplete | None = ...) -> None: ... + @memoized_property + def entity(self): ... + @memoized_property + def mapper(self): ... + def do_init(self) -> None: ... + @property + def cascade(self): ... + @cascade.setter + def cascade(self, cascade) -> None: ... + +class JoinCondition: + parent_persist_selectable: Any + parent_local_selectable: Any + child_persist_selectable: Any + child_local_selectable: Any + parent_equivalents: Any + child_equivalents: Any + primaryjoin: Any + secondaryjoin: Any + secondary: Any + consider_as_foreign_keys: Any + prop: Any + self_referential: Any + support_sync: Any + can_be_synced_fn: Any + def __init__( + self, + parent_persist_selectable, + child_persist_selectable, + parent_local_selectable, + child_local_selectable, + primaryjoin: Incomplete | None = ..., + secondary: Incomplete | None = ..., + secondaryjoin: Incomplete | None = ..., + parent_equivalents: Incomplete | None = ..., + child_equivalents: Incomplete | None = ..., + consider_as_foreign_keys: Incomplete | None = ..., + local_remote_pairs: Incomplete | None = ..., + remote_side: Incomplete | None = ..., + self_referential: bool = ..., + prop: Incomplete | None = ..., + support_sync: bool = ..., + can_be_synced_fn=..., + ): ... + @property + def primaryjoin_minus_local(self): ... + @property + def secondaryjoin_minus_local(self): ... + @memoized_property + def primaryjoin_reverse_remote(self): ... + @memoized_property + def remote_columns(self): ... + @memoized_property + def local_columns(self): ... + @memoized_property + def foreign_key_columns(self): ... + def join_targets( + self, source_selectable, dest_selectable, aliased, single_crit: Incomplete | None = ..., extra_criteria=... + ): ... + def create_lazy_clause(self, reverse_direction: bool = ...): ... + +class _ColInAnnotations: + name: Any + def __init__(self, name) -> None: ... + def __call__(self, c): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/scoping.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/scoping.pyi new file mode 100644 index 00000000..0731e461 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/scoping.pyi @@ -0,0 +1,105 @@ +from _typeshed import Incomplete +from typing import Any + +from ..util import memoized_property + +class ScopedSessionMixin: + def __call__(self, **kw): ... + def configure(self, **kwargs) -> None: ... + +class scoped_session(ScopedSessionMixin): + session_factory: Any + registry: Any + def __init__(self, session_factory, scopefunc: Incomplete | None = ...) -> None: ... + def remove(self) -> None: ... + def query_property(self, query_cls: Incomplete | None = ...): ... + # dynamically proxied from class Session + bind: Any + identity_map: Any + autoflush: Any + autocommit: bool + @property + def dirty(self): ... + @property + def deleted(self): ... + @property + def new(self): ... + @property + def is_active(self): ... + @property + def no_autoflush(self) -> None: ... + @memoized_property + def info(self): ... + @classmethod + def close_all(cls) -> None: ... + @classmethod + def identity_key(cls, *args, **kwargs): ... + @classmethod + def object_session(cls, instance): ... + def __contains__(self, instance): ... + def __iter__(self): ... + def add(self, instance, _warn: bool = ...) -> None: ... + def add_all(self, instances) -> None: ... + def begin(self, subtransactions: bool = ..., nested: bool = ..., _subtrans: bool = ...): ... + def begin_nested(self): ... + def close(self) -> None: ... + def commit(self) -> None: ... + def connection( + self, + bind_arguments: Incomplete | None = ..., + close_with_result: bool = ..., + execution_options: Incomplete | None = ..., + **kw, + ): ... + def delete(self, instance) -> None: ... + def execute( + self, + statement, + params: Incomplete | None = ..., + execution_options=..., + bind_arguments: Incomplete | None = ..., + _parent_execute_state: Incomplete | None = ..., + _add_event: Incomplete | None = ..., + **kw, + ): ... + def expire(self, instance, attribute_names: Incomplete | None = ...) -> None: ... + def expire_all(self) -> None: ... + def expunge(self, instance) -> None: ... + def expunge_all(self) -> None: ... + def flush(self, objects: Incomplete | None = ...) -> None: ... + def get( + self, + entity, + ident, + options: Incomplete | None = ..., + populate_existing: bool = ..., + with_for_update: Incomplete | None = ..., + identity_token: Incomplete | None = ..., + execution_options: Incomplete | None = ..., + ): ... + def get_bind( + self, + mapper: Incomplete | None = ..., + clause: Incomplete | None = ..., + bind: Incomplete | None = ..., + _sa_skip_events: Incomplete | None = ..., + _sa_skip_for_implicit_returning: bool = ..., + ): ... + def is_modified(self, instance, include_collections: bool = ...): ... + def bulk_save_objects( + self, objects, return_defaults: bool = ..., update_changed_only: bool = ..., preserve_order: bool = ... + ): ... + def bulk_insert_mappings(self, mapper, mappings, return_defaults: bool = ..., render_nulls: bool = ...) -> None: ... + def bulk_update_mappings(self, mapper, mappings) -> None: ... + def merge(self, instance, load: bool = ..., options: Incomplete | None = ...): ... + def query(self, *entities, **kwargs): ... + def refresh(self, instance, attribute_names: Incomplete | None = ..., with_for_update: Incomplete | None = ...) -> None: ... + def rollback(self) -> None: ... + def scalar( + self, statement, params: Incomplete | None = ..., execution_options=..., bind_arguments: Incomplete | None = ..., **kw + ): ... + def scalars( + self, statement, params: Incomplete | None = ..., execution_options=..., bind_arguments: Incomplete | None = ..., **kw + ): ... + +ScopedSession = scoped_session diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/session.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/session.pyi new file mode 100644 index 00000000..09e4acb2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/session.pyi @@ -0,0 +1,228 @@ +from _typeshed import Incomplete +from collections.abc import Mapping +from typing import Any, TypeVar, overload +from typing_extensions import Self + +from ..engine.base import Connection +from ..engine.result import Result +from ..engine.util import TransactionalContext +from ..sql.elements import ColumnElement +from ..sql.schema import Table +from ..util import MemoizedSlots, memoized_property +from .query import Query + +_T = TypeVar("_T") + +class _SessionClassMethods: + @classmethod + def close_all(cls) -> None: ... + @classmethod + def identity_key(cls, *args, **kwargs): ... + @classmethod + def object_session(cls, instance): ... + +class ORMExecuteState(MemoizedSlots): + session: Any + statement: Any + parameters: Any + local_execution_options: Any + execution_options: Any + bind_arguments: Any + def __init__( + self, session, statement, parameters, execution_options, bind_arguments, compile_state_cls, events_todo + ) -> None: ... + def invoke_statement( + self, + statement: Incomplete | None = ..., + params: Incomplete | None = ..., + execution_options: Incomplete | None = ..., + bind_arguments: Incomplete | None = ..., + ): ... + @property + def bind_mapper(self): ... + @property + def all_mappers(self): ... + @property + def is_orm_statement(self): ... + @property + def is_select(self): ... + @property + def is_insert(self): ... + @property + def is_update(self): ... + @property + def is_delete(self): ... + def update_execution_options(self, **opts) -> None: ... + @property + def lazy_loaded_from(self): ... + @property + def loader_strategy_path(self): ... + @property + def is_column_load(self): ... + @property + def is_relationship_load(self): ... + @property + def load_options(self): ... + @property + def update_delete_options(self): ... + @property + def user_defined_options(self): ... + +class SessionTransaction(TransactionalContext): + session: Any + nested: Any + def __init__(self, session, parent: Incomplete | None = ..., nested: bool = ..., autobegin: bool = ...) -> None: ... + @property + def parent(self): ... + @property + def is_active(self): ... + def connection(self, bindkey, execution_options: Incomplete | None = ..., **kwargs): ... + def prepare(self) -> None: ... + def commit(self, _to_root: bool = ...): ... + def rollback(self, _capture_exception: bool = ..., _to_root: bool = ...): ... + def close(self, invalidate: bool = ...) -> None: ... + +class Session(_SessionClassMethods): + identity_map: Any + bind: Any + future: Any + hash_key: Any + autoflush: Any + expire_on_commit: Any + enable_baked_queries: Any + autocommit: bool + twophase: Any + def __init__( + self, + bind: Incomplete | None = ..., + autoflush: bool = ..., + future: bool = ..., + expire_on_commit: bool = ..., + autocommit: bool = ..., + twophase: bool = ..., + binds: Incomplete | None = ..., + enable_baked_queries: bool = ..., + info: Incomplete | None = ..., + query_cls: Incomplete | None = ..., + ) -> None: ... + connection_callable: Any + def __enter__(self) -> Self: ... + def __exit__(self, type_, value, traceback) -> None: ... + @property + def transaction(self): ... + def in_transaction(self): ... + def in_nested_transaction(self): ... + def get_transaction(self): ... + def get_nested_transaction(self): ... + @memoized_property + def info(self): ... + def begin(self, subtransactions: bool = ..., nested: bool = ..., _subtrans: bool = ...): ... + def begin_nested(self): ... + def rollback(self) -> None: ... + def commit(self) -> None: ... + def prepare(self) -> None: ... + # TODO: bind_arguments could use a TypedDict + def connection( + self, + bind_arguments: Mapping[str, Any] | None = ..., + close_with_result: bool = ..., + execution_options: Mapping[str, Any] | None = ..., + **kw: Any, + ) -> Connection: ... + def execute( + self, + statement, + params: Incomplete | None = ..., + execution_options=..., + bind_arguments: Incomplete | None = ..., + _parent_execute_state: Incomplete | None = ..., + _add_event: Incomplete | None = ..., + **kw, + ) -> Result: ... + def scalar( + self, statement, params: Incomplete | None = ..., execution_options=..., bind_arguments: Incomplete | None = ..., **kw + ): ... + def scalars( + self, statement, params: Incomplete | None = ..., execution_options=..., bind_arguments: Incomplete | None = ..., **kw + ): ... + def close(self) -> None: ... + def invalidate(self) -> None: ... + def expunge_all(self) -> None: ... + def bind_mapper(self, mapper, bind) -> None: ... + def bind_table(self, table, bind) -> None: ... + def get_bind( + self, + mapper: Incomplete | None = ..., + clause: Incomplete | None = ..., + bind: Incomplete | None = ..., + _sa_skip_events: Incomplete | None = ..., + _sa_skip_for_implicit_returning: bool = ..., + ): ... + @overload + def query(self, entities: Table, **kwargs: Any) -> Query[Any]: ... + @overload + def query(self, entities: ColumnElement[_T], **kwargs: Any) -> Query[tuple[_T]]: ... # type: ignore[misc] + @overload + def query(self, *entities: ColumnElement[_T], **kwargs: Any) -> Query[tuple[_T, ...]]: ... + @overload + def query(self, *entities: type[_T], **kwargs: Any) -> Query[_T]: ... + @property + def no_autoflush(self) -> None: ... + def refresh(self, instance, attribute_names: Incomplete | None = ..., with_for_update: Incomplete | None = ...) -> None: ... + def expire_all(self) -> None: ... + def expire(self, instance, attribute_names: Incomplete | None = ...) -> None: ... + def expunge(self, instance) -> None: ... + def add(self, instance, _warn: bool = ...) -> None: ... + def add_all(self, instances) -> None: ... + def delete(self, instance) -> None: ... + def get( + self, + entity, + ident, + options: Incomplete | None = ..., + populate_existing: bool = ..., + with_for_update: Incomplete | None = ..., + identity_token: Incomplete | None = ..., + execution_options: Incomplete | None = ..., + ): ... + def merge(self, instance, load: bool = ..., options: Incomplete | None = ...): ... + def enable_relationship_loading(self, obj) -> None: ... + def __contains__(self, instance): ... + def __iter__(self): ... + def flush(self, objects: Incomplete | None = ...) -> None: ... + def bulk_save_objects( + self, objects, return_defaults: bool = ..., update_changed_only: bool = ..., preserve_order: bool = ... + ): ... + def bulk_insert_mappings(self, mapper, mappings, return_defaults: bool = ..., render_nulls: bool = ...) -> None: ... + def bulk_update_mappings(self, mapper, mappings) -> None: ... + def is_modified(self, instance, include_collections: bool = ...): ... + @property + def is_active(self): ... + @property + def dirty(self): ... + @property + def deleted(self): ... + @property + def new(self): ... + +class sessionmaker(_SessionClassMethods): + kw: Any + class_: Any + def __init__( + self, + bind: Incomplete | None = ..., + class_=..., + autoflush: bool = ..., + autocommit: bool = ..., + expire_on_commit: bool = ..., + info: Incomplete | None = ..., + **kw, + ) -> None: ... + def begin(self): ... + def __call__(self, **local_kw) -> Session: ... + def configure(self, **new_kw) -> None: ... + +def close_all_sessions() -> None: ... +def make_transient(instance) -> None: ... +def make_transient_to_detached(instance) -> None: ... +def object_session(instance): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/state.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/state.pyi new file mode 100644 index 00000000..60621f0b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/state.pyi @@ -0,0 +1,82 @@ +from typing import Any + +from ..util import memoized_property +from . import interfaces + +class InstanceState(interfaces.InspectionAttrInfo): + session_id: Any + key: Any + runid: Any + load_options: Any + load_path: Any + insert_order: Any + modified: bool + expired: bool + is_instance: bool + identity_token: Any + callables: Any + class_: Any + manager: Any + committed_state: Any + expired_attributes: Any + def __init__(self, obj, manager) -> None: ... + @memoized_property + def attrs(self): ... + @property + def transient(self): ... + @property + def pending(self): ... + @property + def deleted(self): ... + @property + def was_deleted(self): ... + @property + def persistent(self): ... + @property + def detached(self): ... + @property + def session(self): ... + @property + def async_session(self): ... + @property + def object(self): ... + @property + def identity(self): ... + @property + def identity_key(self): ... + @memoized_property + def parents(self): ... + @memoized_property + def mapper(self): ... + @property + def has_identity(self): ... + def obj(self) -> None: ... + @property + def dict(self): ... + def get_history(self, key, passive): ... + def get_impl(self, key): ... + @property + def unmodified(self): ... + def unmodified_intersection(self, keys): ... + @property + def unloaded(self): ... + @property + def unloaded_expirable(self): ... + +class AttributeState: + state: Any + key: Any + def __init__(self, state, key) -> None: ... + @property + def loaded_value(self): ... + @property + def value(self): ... + @property + def history(self): ... + def load_history(self): ... + +class PendingCollection: + deleted_items: Any + added_items: Any + def append(self, value) -> None: ... + def remove(self, value) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/strategies.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/strategies.pyi new file mode 100644 index 00000000..f4b95081 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/strategies.pyi @@ -0,0 +1,142 @@ +from _typeshed import Incomplete +from typing import Any, NamedTuple + +from .. import util +from .interfaces import LoaderStrategy + +class UninstrumentedColumnLoader(LoaderStrategy): + columns: Any + def __init__(self, parent, strategy_key) -> None: ... + def setup_query( + self, compile_state, query_entity, path, loadopt, adapter, column_collection: Incomplete | None = ..., **kwargs + ) -> None: ... + def create_row_processor(self, context, query_entity, path, loadopt, mapper, result, adapter, populators) -> None: ... + +class ColumnLoader(LoaderStrategy): + logger: Any + columns: Any + is_composite: Any + def __init__(self, parent, strategy_key) -> None: ... + def setup_query(self, compile_state, query_entity, path, loadopt, adapter, column_collection, memoized_populators, check_for_adapt: bool = ..., **kwargs) -> None: ... # type: ignore[override] + is_class_level: bool + def init_class_attribute(self, mapper) -> None: ... + def create_row_processor(self, context, query_entity, path, loadopt, mapper, result, adapter, populators) -> None: ... + +class ExpressionColumnLoader(ColumnLoader): + logger: Any + def __init__(self, parent, strategy_key) -> None: ... + def setup_query(self, compile_state, query_entity, path, loadopt, adapter, column_collection, memoized_populators, **kwargs) -> None: ... # type: ignore[override] + def create_row_processor(self, context, query_entity, path, loadopt, mapper, result, adapter, populators) -> None: ... + is_class_level: bool + def init_class_attribute(self, mapper) -> None: ... + +class DeferredColumnLoader(LoaderStrategy): + logger: Any + raiseload: Any + columns: Any + group: Any + def __init__(self, parent, strategy_key) -> None: ... + def create_row_processor(self, context, query_entity, path, loadopt, mapper, result, adapter, populators) -> None: ... + is_class_level: bool + def init_class_attribute(self, mapper) -> None: ... + def setup_query(self, compile_state, query_entity, path, loadopt, adapter, column_collection, memoized_populators, only_load_props: Incomplete | None = ..., **kw) -> None: ... # type: ignore[override] + +class LoadDeferredColumns: + key: Any + raiseload: Any + def __init__(self, key, raiseload: bool = ...) -> None: ... + def __call__(self, state, passive=...): ... + +class AbstractRelationshipLoader(LoaderStrategy): + mapper: Any + entity: Any + target: Any + uselist: Any + def __init__(self, parent, strategy_key) -> None: ... + +class DoNothingLoader(LoaderStrategy): + logger: Any + +class NoLoader(AbstractRelationshipLoader): + logger: Any + is_class_level: bool + def init_class_attribute(self, mapper) -> None: ... + def create_row_processor(self, context, query_entity, path, loadopt, mapper, result, adapter, populators) -> None: ... + +class LazyLoader(AbstractRelationshipLoader, util.MemoizedSlots): + logger: Any + is_aliased_class: Any + use_get: Any + def __init__(self, parent, strategy_key) -> None: ... + is_class_level: bool + def init_class_attribute(self, mapper) -> None: ... + def create_row_processor(self, context, query_entity, path, loadopt, mapper, result, adapter, populators) -> None: ... + +class LoadLazyAttribute: + key: Any + strategy_key: Any + loadopt: Any + extra_criteria: Any + def __init__(self, key, initiating_strategy, loadopt, extra_criteria) -> None: ... + def __call__(self, state, passive=...): ... + +class PostLoader(AbstractRelationshipLoader): ... + +class ImmediateLoader(PostLoader): + def init_class_attribute(self, mapper) -> None: ... + def create_row_processor(self, context, query_entity, path, loadopt, mapper, result, adapter, populators) -> None: ... + +class SubqueryLoader(PostLoader): + logger: Any + join_depth: Any + def __init__(self, parent, strategy_key) -> None: ... + def init_class_attribute(self, mapper) -> None: ... + + class _SubqCollections: + session: Any + execution_options: Any + load_options: Any + params: Any + subq: Any + def __init__(self, context, subq) -> None: ... + def get(self, key, default): ... + def loader(self, state, dict_, row) -> None: ... + + def create_row_processor(self, context, query_entity, path, loadopt, mapper, result, adapter, populators): ... + +class JoinedLoader(AbstractRelationshipLoader): + logger: Any + join_depth: Any + def __init__(self, parent, strategy_key) -> None: ... + def init_class_attribute(self, mapper) -> None: ... + def setup_query( + self, + compile_state, + query_entity, + path, + loadopt, + adapter, + column_collection: Incomplete | None = ..., + parentmapper: Incomplete | None = ..., + chained_from_outerjoin: bool = ..., + **kwargs, + ) -> None: ... + def create_row_processor(self, context, query_entity, path, loadopt, mapper, result, adapter, populators) -> None: ... + +class SelectInLoader(PostLoader, util.MemoizedSlots): + logger: Any + + class query_info(NamedTuple): + load_only_child: Any + load_with_join: Any + in_expr: Any + pk_cols: Any + zero_idx: Any + child_lookup_cols: Any + join_depth: Any + omit_join: Any + def __init__(self, parent, strategy_key) -> None: ... + def init_class_attribute(self, mapper) -> None: ... + def create_row_processor(self, context, query_entity, path, loadopt, mapper, result, adapter, populators): ... + +def single_parent_validator(desc, prop): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/strategy_options.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/strategy_options.pyi new file mode 100644 index 00000000..5422cfff --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/strategy_options.pyi @@ -0,0 +1,66 @@ +from _typeshed import Incomplete +from typing import Any + +from ..sql.base import Generative +from .interfaces import LoaderOption + +class Load(Generative, LoaderOption): + path: Any + context: Any + local_opts: Any + is_class_strategy: bool + def __init__(self, entity) -> None: ... + @classmethod + def for_existing_path(cls, path): ... + is_opts_only: bool + strategy: Any + propagate_to_loaders: bool + def process_compile_state_replaced_entities(self, compile_state, mapper_entities) -> None: ... + def process_compile_state(self, compile_state) -> None: ... + def options(self, *opts) -> None: ... + def set_relationship_strategy(self, attr, strategy, propagate_to_loaders: bool = ...) -> None: ... + def set_column_strategy(self, attrs, strategy, opts: Incomplete | None = ..., opts_only: bool = ...) -> None: ... + def set_generic_strategy(self, attrs, strategy) -> None: ... + def set_class_strategy(self, strategy, opts) -> None: ... + # added dynamically at runtime + def contains_eager(self, attr, alias: Incomplete | None = ...): ... + def load_only(self, *attrs): ... + def joinedload(self, attr, innerjoin: Incomplete | None = ...): ... + def subqueryload(self, attr): ... + def selectinload(self, attr): ... + def lazyload(self, attr): ... + def immediateload(self, attr): ... + def noload(self, attr): ... + def raiseload(self, attr, sql_only: bool = ...): ... + def defaultload(self, attr): ... + def defer(self, key, raiseload: bool = ...): ... + def undefer(self, key): ... + def undefer_group(self, name): ... + def with_expression(self, key, expression): ... + def selectin_polymorphic(self, classes): ... + +class _UnboundLoad(Load): + path: Any + local_opts: Any + def __init__(self) -> None: ... + +class loader_option: + name: Any + fn: Any + def __call__(self, fn): ... + +def contains_eager(loadopt, attr, alias: Incomplete | None = ...): ... +def load_only(loadopt, *attrs): ... +def joinedload(loadopt, attr, innerjoin: Incomplete | None = ...): ... +def subqueryload(loadopt, attr): ... +def selectinload(loadopt, attr): ... +def lazyload(loadopt, attr): ... +def immediateload(loadopt, attr): ... +def noload(loadopt, attr): ... +def raiseload(loadopt, attr, sql_only: bool = ...): ... +def defaultload(loadopt, attr): ... +def defer(loadopt, key, raiseload: bool = ...): ... +def undefer(loadopt, key): ... +def undefer_group(loadopt, name): ... +def with_expression(loadopt, key, expression): ... +def selectin_polymorphic(loadopt, classes): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/sync.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/sync.pyi new file mode 100644 index 00000000..558c9c84 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/sync.pyi @@ -0,0 +1,6 @@ +def populate(source, source_mapper, dest, dest_mapper, synchronize_pairs, uowcommit, flag_cascaded_pks) -> None: ... +def bulk_populate_inherit_keys(source_dict, source_mapper, synchronize_pairs) -> None: ... +def clear(dest, dest_mapper, synchronize_pairs) -> None: ... +def update(source, source_mapper, dest, old_prefix, synchronize_pairs) -> None: ... +def populate_dict(source, source_mapper, dict_, synchronize_pairs) -> None: ... +def source_modified(uowcommit, source, source_mapper, synchronize_pairs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/unitofwork.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/unitofwork.pyi new file mode 100644 index 00000000..20de2df9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/unitofwork.pyi @@ -0,0 +1,109 @@ +from _typeshed import Incomplete +from typing import Any + +def track_cascade_events(descriptor, prop): ... + +class UOWTransaction: + session: Any + attributes: Any + deps: Any + mappers: Any + presort_actions: Any + postsort_actions: Any + dependencies: Any + states: Any + post_update_states: Any + def __init__(self, session): ... + @property + def has_work(self): ... + def was_already_deleted(self, state): ... + def is_deleted(self, state): ... + def memo(self, key, callable_): ... + def remove_state_actions(self, state) -> None: ... + def get_attribute_history(self, state, key, passive=...): ... + def has_dep(self, processor): ... + def register_preprocessor(self, processor, fromparent) -> None: ... + def register_object( + self, + state, + isdelete: bool = ..., + listonly: bool = ..., + cancel_delete: bool = ..., + operation: Incomplete | None = ..., + prop: Incomplete | None = ..., + ): ... + def register_post_update(self, state, post_update_cols) -> None: ... + def filter_states_for_dep(self, dep, states): ... + def states_for_mapper_hierarchy(self, mapper, isdelete, listonly) -> None: ... + def execute(self): ... + def finalize_flush_changes(self) -> None: ... + +class IterateMappersMixin: ... + +class Preprocess(IterateMappersMixin): + dependency_processor: Any + fromparent: Any + processed: Any + setup_flush_actions: bool + def __init__(self, dependency_processor, fromparent) -> None: ... + def execute(self, uow): ... + +class PostSortRec: + disabled: Any + # At runtime __new__ is defined, not __init__, + # But if we define __new__ here then all subclasses are identified by pyright + # as having __init__ methods that are inconsistent with their __new__ methods + def __init__(self, uow, *args) -> None: ... + def execute_aggregate(self, uow, recs) -> None: ... + +class ProcessAll(IterateMappersMixin, PostSortRec): + dependency_processor: Any + sort_key: Any + isdelete: Any + fromparent: Any + def __init__(self, uow, dependency_processor, isdelete, fromparent) -> None: ... + def execute(self, uow) -> None: ... + def per_state_flush_actions(self, uow): ... + +class PostUpdateAll(PostSortRec): + mapper: Any + isdelete: Any + sort_key: Any + def __init__(self, uow, mapper, isdelete) -> None: ... + def execute(self, uow) -> None: ... + +class SaveUpdateAll(PostSortRec): + mapper: Any + sort_key: Any + def __init__(self, uow, mapper) -> None: ... + def execute(self, uow) -> None: ... + def per_state_flush_actions(self, uow) -> None: ... + +class DeleteAll(PostSortRec): + mapper: Any + sort_key: Any + def __init__(self, uow, mapper) -> None: ... + def execute(self, uow) -> None: ... + def per_state_flush_actions(self, uow) -> None: ... + +class ProcessState(PostSortRec): + dependency_processor: Any + sort_key: Any + isdelete: Any + state: Any + def __init__(self, uow, dependency_processor, isdelete, state) -> None: ... + def execute_aggregate(self, uow, recs) -> None: ... + +class SaveUpdateState(PostSortRec): + state: Any + mapper: Any + sort_key: Any + def __init__(self, uow, state) -> None: ... + def execute_aggregate(self, uow, recs) -> None: ... + +class DeleteState(PostSortRec): + state: Any + mapper: Any + sort_key: Any + def __init__(self, uow, state) -> None: ... + def execute_aggregate(self, uow, recs) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/util.pyi new file mode 100644 index 00000000..b7c6fd4f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/orm/util.pyi @@ -0,0 +1,193 @@ +from _typeshed import Incomplete +from typing import Any + +from ..sql import base as sql_base, expression, util as sql_util +from ..sql.annotation import SupportsCloneAnnotations +from .base import ( + InspectionAttr as InspectionAttr, + _class_to_mapper as _class_to_mapper, + _never_set as _never_set, + _none_set as _none_set, + attribute_str as attribute_str, + class_mapper as class_mapper, + instance_str as instance_str, + object_mapper as object_mapper, + object_state as object_state, + state_attribute_str as state_attribute_str, + state_class_str as state_class_str, + state_str as state_str, +) +from .interfaces import CriteriaOption, ORMColumnsClauseRole, ORMEntityColumnsClauseRole, ORMFromClauseRole + +all_cascades: Any + +class CascadeOptions(frozenset[Any]): + save_update: Any + delete: Any + refresh_expire: Any + merge: Any + expunge: Any + delete_orphan: Any + def __new__(cls, value_list): ... + @classmethod + def from_string(cls, arg): ... + +def polymorphic_union(table_map, typecolname, aliasname: str = ..., cast_nulls: bool = ...): ... +def identity_key(*args, **kwargs): ... + +class ORMAdapter(sql_util.ColumnAdapter): + mapper: Any + aliased_class: Any + def __init__( + self, + entity, + equivalents: Incomplete | None = ..., + adapt_required: bool = ..., + allow_label_resolve: bool = ..., + anonymize_labels: bool = ..., + ) -> None: ... + +class AliasedClass: + __name__: Any + def __init__( + self, + mapped_class_or_ac, + alias: Incomplete | None = ..., + name: Incomplete | None = ..., + flat: bool = ..., + adapt_on_names: bool = ..., + with_polymorphic_mappers=..., + with_polymorphic_discriminator: Incomplete | None = ..., + base_alias: Incomplete | None = ..., + use_mapper_path: bool = ..., + represents_outer_join: bool = ..., + ) -> None: ... + def __getattr__(self, key: str): ... + +class AliasedInsp(ORMEntityColumnsClauseRole, ORMFromClauseRole, sql_base.MemoizedHasCacheKey, InspectionAttr): + mapper: Any + selectable: Any + name: Any + polymorphic_on: Any + represents_outer_join: Any + with_polymorphic_mappers: Any + def __init__( + self, + entity, + inspected, + selectable, + name, + with_polymorphic_mappers, + polymorphic_on, + _base_alias, + _use_mapper_path, + adapt_on_names, + represents_outer_join, + nest_adapters: bool, # added in 1.4.30 + ) -> None: ... + @property + def entity(self): ... + is_aliased_class: bool + def __clause_element__(self): ... + @property + def entity_namespace(self): ... + @property + def class_(self): ... + +class _WrapUserEntity: + subject: Any + def __init__(self, subject) -> None: ... + def __getattribute__(self, name: str): ... + +class LoaderCriteriaOption(CriteriaOption): + root_entity: Any + entity: Any + deferred_where_criteria: bool + where_criteria: Any + include_aliases: Any + propagate_to_loaders: Any + def __init__( + self, + entity_or_base, + where_criteria, + loader_only: bool = ..., + include_aliases: bool = ..., + propagate_to_loaders: bool = ..., + track_closure_variables: bool = ..., + ) -> None: ... + def process_compile_state_replaced_entities(self, compile_state, mapper_entities): ... + def process_compile_state(self, compile_state) -> None: ... + def get_global_criteria(self, attributes) -> None: ... + +def aliased( + element, alias: Incomplete | None = ..., name: Incomplete | None = ..., flat: bool = ..., adapt_on_names: bool = ... +): ... +def with_polymorphic( + base, + classes, + selectable: bool = ..., + flat: bool = ..., + polymorphic_on: Incomplete | None = ..., + aliased: bool = ..., + adapt_on_names: bool = ..., + innerjoin: bool = ..., + _use_mapper_path: bool = ..., + _existing_alias: Incomplete | None = ..., +) -> AliasedClass: ... + +class Bundle(ORMColumnsClauseRole, SupportsCloneAnnotations, sql_base.MemoizedHasCacheKey, InspectionAttr): + single_entity: bool + is_clause_element: bool + is_mapper: bool + is_aliased_class: bool + is_bundle: bool + name: Any + exprs: Any + c: Any + def __init__(self, name, *exprs, **kw) -> None: ... + @property + def mapper(self): ... + @property + def entity(self): ... + @property + def entity_namespace(self): ... + columns: Any + def __clause_element__(self): ... + @property + def clauses(self): ... + def label(self, name): ... + def create_row_processor(self, query, procs, labels): ... + +class _ORMJoin(expression.Join): + __visit_name__: Any + inherit_cache: bool + onclause: Any + def __init__( + self, + left, + right, + onclause: Incomplete | None = ..., + isouter: bool = ..., + full: bool = ..., + _left_memo: Incomplete | None = ..., + _right_memo: Incomplete | None = ..., + _extra_criteria=..., + ) -> None: ... + def join( + self, + right, + onclause: Incomplete | None = ..., + isouter: bool = ..., + full: bool = ..., + join_to_left: Incomplete | None = ..., + ): ... + def outerjoin(self, right, onclause: Incomplete | None = ..., full: bool = ..., join_to_left: Incomplete | None = ...): ... + +def join( + left, right, onclause: Incomplete | None = ..., isouter: bool = ..., full: bool = ..., join_to_left: Incomplete | None = ... +): ... +def outerjoin(left, right, onclause: Incomplete | None = ..., full: bool = ..., join_to_left: Incomplete | None = ...): ... +def with_parent(instance, prop, from_entity: Incomplete | None = ...): ... +def has_identity(object_): ... +def was_deleted(object_): ... +def randomize_unitofwork() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/pool/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/pool/__init__.pyi new file mode 100644 index 00000000..977c65ad --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/pool/__init__.pyi @@ -0,0 +1,27 @@ +from .base import Pool as Pool, reset_commit as reset_commit, reset_none as reset_none, reset_rollback as reset_rollback +from .dbapi_proxy import clear_managers as clear_managers, manage as manage +from .impl import ( + AssertionPool as AssertionPool, + AsyncAdaptedQueuePool as AsyncAdaptedQueuePool, + FallbackAsyncAdaptedQueuePool as FallbackAsyncAdaptedQueuePool, + NullPool as NullPool, + QueuePool as QueuePool, + SingletonThreadPool as SingletonThreadPool, + StaticPool as StaticPool, +) + +__all__ = [ + "Pool", + "reset_commit", + "reset_none", + "reset_rollback", + "clear_managers", + "manage", + "AssertionPool", + "NullPool", + "QueuePool", + "AsyncAdaptedQueuePool", + "FallbackAsyncAdaptedQueuePool", + "SingletonThreadPool", + "StaticPool", +] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/pool/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/pool/base.pyi new file mode 100644 index 00000000..b169e6d8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/pool/base.pyi @@ -0,0 +1,89 @@ +from _typeshed import Incomplete +from typing import Any + +from .. import log +from ..util import memoized_property + +reset_rollback: Any +reset_commit: Any +reset_none: Any + +class _ConnDialect: + is_async: bool + def do_rollback(self, dbapi_connection) -> None: ... + def do_commit(self, dbapi_connection) -> None: ... + def do_close(self, dbapi_connection) -> None: ... + def do_ping(self, dbapi_connection) -> None: ... + def get_driver_connection(self, connection): ... + +class _AsyncConnDialect(_ConnDialect): + is_async: bool + +class Pool(log.Identified): + logging_name: Any + echo: Any + def __init__( + self, + creator, + recycle: int = ..., + echo: Incomplete | None = ..., + logging_name: Incomplete | None = ..., + reset_on_return: bool = ..., + events: Incomplete | None = ..., + dialect: Incomplete | None = ..., + pre_ping: bool = ..., + _dispatch: Incomplete | None = ..., + ) -> None: ... + def recreate(self) -> None: ... + def dispose(self) -> None: ... + def connect(self): ... + def status(self) -> None: ... + +class _ConnectionRecord: + finalize_callback: Any + def __init__(self, pool, connect: bool = ...) -> None: ... + fresh: bool + fairy_ref: Any + starttime: Any + dbapi_connection: Any + @property + def driver_connection(self): ... + @property + def connection(self): ... + @connection.setter + def connection(self, value) -> None: ... + @memoized_property + def info(self): ... + @memoized_property + def record_info(self): ... + @classmethod + def checkout(cls, pool): ... + def checkin(self, _fairy_was_created: bool = ...) -> None: ... + @property + def in_use(self): ... + @property + def last_connect_time(self): ... + def close(self) -> None: ... + def invalidate(self, e: Incomplete | None = ..., soft: bool = ...) -> None: ... + def get_connection(self): ... + +class _ConnectionFairy: + dbapi_connection: Any + def __init__(self, dbapi_connection, connection_record, echo) -> None: ... + @property + def driver_connection(self): ... + @property + def connection(self): ... + @connection.setter + def connection(self, value) -> None: ... + @property + def is_valid(self): ... + @memoized_property + def info(self): ... + @property + def record_info(self): ... + def invalidate(self, e: Incomplete | None = ..., soft: bool = ...) -> None: ... + def cursor(self, *args, **kwargs): ... + def __getattr__(self, key: str): ... + def detach(self) -> None: ... + def close(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/pool/dbapi_proxy.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/pool/dbapi_proxy.pyi new file mode 100644 index 00000000..6bfa8ae8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/pool/dbapi_proxy.pyi @@ -0,0 +1,19 @@ +from typing import Any + +proxies: Any + +def manage(module, **params): ... +def clear_managers() -> None: ... + +class _DBProxy: + module: Any + kw: Any + poolclass: Any + pools: Any + def __init__(self, module, poolclass=..., **kw) -> None: ... + def close(self) -> None: ... + def __del__(self) -> None: ... + def __getattr__(self, key: str): ... + def get_pool(self, *args, **kw): ... + def connect(self, *args, **kw): ... + def dispose(self, *args, **kw) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/pool/events.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/pool/events.pyi new file mode 100644 index 00000000..8a5dde54 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/pool/events.pyi @@ -0,0 +1,13 @@ +from .. import event + +class PoolEvents(event.Events): + def connect(self, dbapi_connection, connection_record) -> None: ... + def first_connect(self, dbapi_connection, connection_record) -> None: ... + def checkout(self, dbapi_connection, connection_record, connection_proxy) -> None: ... + def checkin(self, dbapi_connection, connection_record) -> None: ... + def reset(self, dbapi_connection, connection_record) -> None: ... + def invalidate(self, dbapi_connection, connection_record, exception) -> None: ... + def soft_invalidate(self, dbapi_connection, connection_record, exception) -> None: ... + def close(self, dbapi_connection, connection_record) -> None: ... + def detach(self, dbapi_connection, connection_record) -> None: ... + def close_detached(self, dbapi_connection) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/pool/impl.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/pool/impl.pyi new file mode 100644 index 00000000..2646cafd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/pool/impl.pyi @@ -0,0 +1,46 @@ +from typing import Any + +from ..util import memoized_property +from .base import Pool + +class QueuePool(Pool): + def __init__( + self, creator, pool_size: int = ..., max_overflow: int = ..., timeout: float = ..., use_lifo: bool = ..., **kw + ) -> None: ... + def recreate(self): ... + def dispose(self) -> None: ... + def status(self): ... + def size(self): ... + def timeout(self): ... + def checkedin(self): ... + def overflow(self): ... + def checkedout(self): ... + +class AsyncAdaptedQueuePool(QueuePool): ... +class FallbackAsyncAdaptedQueuePool(AsyncAdaptedQueuePool): ... + +class NullPool(Pool): + def status(self): ... + def recreate(self): ... + def dispose(self) -> None: ... + +class SingletonThreadPool(Pool): + size: Any + def __init__(self, creator, pool_size: int = ..., **kw) -> None: ... + def recreate(self): ... + def dispose(self) -> None: ... + def status(self): ... + def connect(self): ... + +class StaticPool(Pool): + @memoized_property + def connection(self): ... + def status(self): ... + def dispose(self) -> None: ... + def recreate(self): ... + +class AssertionPool(Pool): + def __init__(self, *args, **kw) -> None: ... + def status(self): ... + def dispose(self) -> None: ... + def recreate(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/processors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/processors.pyi new file mode 100644 index 00000000..2c74bf49 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/processors.pyi @@ -0,0 +1,7 @@ +from _typeshed import Incomplete + +def str_to_datetime_processor_factory(regexp, type_): ... +def py_fallback(): ... +def to_unicode_processor_factory(encoding, errors: Incomplete | None = ...): ... +def to_conditional_unicode_processor_factory(encoding, errors: Incomplete | None = ...): ... +def to_decimal_processor_factory(target_class, scale): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/schema.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/schema.pyi new file mode 100644 index 00000000..ef2cc1ed --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/schema.pyi @@ -0,0 +1,51 @@ +from .sql.base import SchemaVisitor as SchemaVisitor +from .sql.ddl import ( + DDL as DDL, + AddConstraint as AddConstraint, + CreateColumn as CreateColumn, + CreateIndex as CreateIndex, + CreateSchema as CreateSchema, + CreateSequence as CreateSequence, + CreateTable as CreateTable, + DDLBase as DDLBase, + DDLElement as DDLElement, + DropColumnComment as DropColumnComment, + DropConstraint as DropConstraint, + DropIndex as DropIndex, + DropSchema as DropSchema, + DropSequence as DropSequence, + DropTable as DropTable, + DropTableComment as DropTableComment, + SetColumnComment as SetColumnComment, + SetTableComment as SetTableComment, + _CreateDropBase as _CreateDropBase, + _DDLCompiles as _DDLCompiles, + _DropView as _DropView, + sort_tables as sort_tables, + sort_tables_and_constraints as sort_tables_and_constraints, +) +from .sql.naming import conv as conv +from .sql.schema import ( + BLANK_SCHEMA as BLANK_SCHEMA, + CheckConstraint as CheckConstraint, + Column as Column, + ColumnCollectionConstraint as ColumnCollectionConstraint, + ColumnCollectionMixin as ColumnCollectionMixin, + ColumnDefault as ColumnDefault, + Computed as Computed, + Constraint as Constraint, + DefaultClause as DefaultClause, + DefaultGenerator as DefaultGenerator, + FetchedValue as FetchedValue, + ForeignKey as ForeignKey, + ForeignKeyConstraint as ForeignKeyConstraint, + Identity as Identity, + Index as Index, + MetaData as MetaData, + PrimaryKeyConstraint as PrimaryKeyConstraint, + SchemaItem as SchemaItem, + Sequence as Sequence, + Table as Table, + ThreadLocalMetaData as ThreadLocalMetaData, + UniqueConstraint as UniqueConstraint, +) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/__init__.pyi new file mode 100644 index 00000000..625094b1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/__init__.pyi @@ -0,0 +1,91 @@ +from . import sqltypes as sqltypes +from .base import Executable as Executable +from .compiler import ( + COLLECT_CARTESIAN_PRODUCTS as COLLECT_CARTESIAN_PRODUCTS, + FROM_LINTING as FROM_LINTING, + NO_LINTING as NO_LINTING, + WARN_LINTING as WARN_LINTING, +) +from .expression import ( + LABEL_STYLE_DEFAULT as LABEL_STYLE_DEFAULT, + LABEL_STYLE_DISAMBIGUATE_ONLY as LABEL_STYLE_DISAMBIGUATE_ONLY, + LABEL_STYLE_NONE as LABEL_STYLE_NONE, + LABEL_STYLE_TABLENAME_PLUS_COL as LABEL_STYLE_TABLENAME_PLUS_COL, + Alias as Alias, + ClauseElement as ClauseElement, + ColumnCollection as ColumnCollection, + ColumnElement as ColumnElement, + CompoundSelect as CompoundSelect, + Delete as Delete, + False_ as False_, + FromClause as FromClause, + Insert as Insert, + Join as Join, + LambdaElement as LambdaElement, + Select as Select, + Selectable as Selectable, + StatementLambdaElement as StatementLambdaElement, + Subquery as Subquery, + TableClause as TableClause, + TableSample as TableSample, + True_ as True_, + Update as Update, + Values as Values, + alias as alias, + all_ as all_, + and_ as and_, + any_ as any_, + asc as asc, + between as between, + bindparam as bindparam, + case as case, + cast as cast, + collate as collate, + column as column, + cte as cte, + delete as delete, + desc as desc, + distinct as distinct, + except_ as except_, + except_all as except_all, + exists as exists, + extract as extract, + false as false, + func as func, + funcfilter as funcfilter, + insert as insert, + intersect as intersect, + intersect_all as intersect_all, + join as join, + label as label, + lambda_stmt as lambda_stmt, + lateral as lateral, + literal as literal, + literal_column as literal_column, + modifier as modifier, + not_ as not_, + null as null, + nulls_first as nulls_first, + nulls_last as nulls_last, + nullsfirst as nullsfirst, + nullslast as nullslast, + or_ as or_, + outerjoin as outerjoin, + outparam as outparam, + over as over, + quoted_name as quoted_name, + select as select, + subquery as subquery, + table as table, + tablesample as tablesample, + text as text, + true as true, + tuple_ as tuple_, + type_coerce as type_coerce, + union as union, + union_all as union_all, + update as update, + values as values, + within_group as within_group, +) +from .visitors import ClauseVisitor as ClauseVisitor diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/annotation.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/annotation.pyi new file mode 100644 index 00000000..e631cb48 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/annotation.pyi @@ -0,0 +1,18 @@ +from typing import Any + +EMPTY_ANNOTATIONS: Any + +class SupportsAnnotations: ... +class SupportsCloneAnnotations(SupportsAnnotations): ... +class SupportsWrappingAnnotations(SupportsAnnotations): ... + +class Annotated: + __dict__: Any + def __init__(self, element, values) -> None: ... + def __reduce__(self): ... + def __hash__(self) -> int: ... + def __eq__(self, other): ... + @property + def entity_namespace(self): ... + +annotated_classes: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/base.pyi new file mode 100644 index 00000000..885aac1d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/base.pyi @@ -0,0 +1,161 @@ +from _typeshed import Incomplete +from collections.abc import MutableMapping +from typing import Any, ClassVar +from typing_extensions import Self + +from .. import util +from ..util import HasMemoized, hybridmethod, memoized_property +from . import roles +from .elements import ColumnElement +from .traversals import ( + HasCacheKey as HasCacheKey, + HasCopyInternals as HasCopyInternals, + MemoizedHasCacheKey as MemoizedHasCacheKey, +) +from .visitors import ClauseVisitor + +coercions: Any +elements: Any +type_api: Any +PARSE_AUTOCOMMIT: Any +NO_ARG: Any + +class Immutable: + def unique_params(self, *optionaldict, **kwargs) -> None: ... + def params(self, *optionaldict, **kwargs) -> None: ... + +class SingletonConstant(Immutable): + def __new__(cls, *arg, **kw): ... + +class _DialectArgView(MutableMapping[Any, Any]): + obj: Any + def __init__(self, obj) -> None: ... + def __getitem__(self, key): ... + def __setitem__(self, key, value) -> None: ... + def __delitem__(self, key) -> None: ... + def __len__(self) -> int: ... + def __iter__(self): ... + +class _DialectArgDict(MutableMapping[Any, Any]): + def __init__(self) -> None: ... + def __len__(self) -> int: ... + def __iter__(self): ... + def __getitem__(self, key): ... + def __setitem__(self, key, value) -> None: ... + def __delitem__(self, key) -> None: ... + +class DialectKWArgs: + @classmethod + def argument_for(cls, dialect_name, argument_name, default) -> None: ... + @memoized_property + def dialect_kwargs(self): ... + @property + def kwargs(self): ... + @memoized_property + def dialect_options(self): ... + +class CompileState: + plugins: Any + @classmethod + def create_for_statement(cls, statement, compiler, **kw): ... + statement: Any + def __init__(self, statement, compiler, **kw) -> None: ... + @classmethod + def get_plugin_class(cls, statement): ... + @classmethod + def plugin_for(cls, plugin_name, visit_name): ... + +class Generative(HasMemoized): ... +class InPlaceGenerative(HasMemoized): ... +class HasCompileState(Generative): ... + +class _MetaOptions(type): + def __init__(cls, classname, bases, dict_) -> None: ... + def __add__(self, other): ... + +class Options: + def __init__(self, **kw) -> None: ... + def __add__(self, other): ... + def __eq__(self, other): ... + @classmethod + def isinstance(cls, klass): ... + @hybridmethod + def add_to_element(self, name, value): ... + @classmethod + def safe_merge(cls, other): ... + @classmethod + def from_execution_options(cls, key, attrs, exec_options, statement_exec_options): ... + +class CacheableOptions(Options, HasCacheKey): ... + +class ExecutableOption(HasCopyInternals): + __visit_name__: str + +class Executable(roles.StatementRole, Generative): + supports_execution: bool + is_select: bool + is_update: bool + is_insert: bool + is_text: bool + is_delete: bool + is_dml: bool + def options(self, *options) -> Self: ... + def execution_options(self, **kw) -> Self: ... + def get_execution_options(self): ... + def execute(self, *multiparams, **params): ... + def scalar(self, *multiparams, **params): ... + @property + def bind(self): ... + +class prefix_anon_map(dict[Any, Any]): + def __missing__(self, key): ... + +class SchemaEventTarget: ... + +class SchemaVisitor(ClauseVisitor): + __traverse_options__: Any + +class ColumnCollection: + def __init__(self, columns: Incomplete | None = ...) -> None: ... + def keys(self): ... + def values(self): ... + def items(self): ... + def __bool__(self) -> bool: ... + def __len__(self) -> int: ... + def __iter__(self): ... + def __getitem__(self, key): ... + def __getattr__(self, key: str): ... + def __contains__(self, key): ... + def compare(self, other): ... + def __eq__(self, other): ... + def get(self, key, default: Incomplete | None = ...): ... + def __setitem__(self, key, value) -> None: ... + def __delitem__(self, key) -> None: ... + def __setattr__(self, key: str, obj) -> None: ... + def clear(self) -> None: ... + def remove(self, column) -> None: ... + def update(self, iter_) -> None: ... + __hash__: ClassVar[None] # type: ignore[assignment] + def add(self, column, key: Incomplete | None = ...) -> None: ... + def contains_column(self, col): ... + def as_immutable(self): ... + def corresponding_column(self, column, require_embedded: bool = ...): ... + +class DedupeColumnCollection(ColumnCollection): + def add(self, column, key: Incomplete | None = ...) -> None: ... + def extend(self, iter_) -> None: ... + def remove(self, column) -> None: ... + def replace(self, column) -> None: ... + +class ImmutableColumnCollection(util.ImmutableContainer, ColumnCollection): + def __init__(self, collection) -> None: ... + add: Any + extend: Any + remove: Any + +class ColumnSet(util.ordered_column_set[ColumnElement[Any]]): + def contains_column(self, col): ... + def extend(self, cols) -> None: ... + def __add__(self, other): ... + def __eq__(self, other): ... + def __hash__(self) -> int: ... # type: ignore[override] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/coercions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/coercions.pyi new file mode 100644 index 00000000..03425225 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/coercions.pyi @@ -0,0 +1,74 @@ +from _typeshed import Incomplete +from typing import Any + +from . import roles + +elements: Any +lambdas: Any +schema: Any +selectable: Any +sqltypes: Any +traversals: Any + +def expect( + role, + element, + apply_propagate_attrs: Incomplete | None = ..., + argname: Incomplete | None = ..., + post_inspect: bool = ..., + **kw, +): ... +def expect_as_key(role, element, **kw): ... +def expect_col_expression_collection(role, expressions) -> None: ... + +class RoleImpl: + name: Any + def __init__(self, role_class) -> None: ... + +class _Deannotate: ... +class _StringOnly: ... +class _ReturnsStringKey: ... +class _ColumnCoercions: ... +class _NoTextCoercion: ... +class _CoerceLiterals: ... +class LiteralValueImpl(RoleImpl): ... +class _SelectIsNotFrom: ... +class HasCacheKeyImpl(RoleImpl): ... +class ExecutableOptionImpl(RoleImpl): ... +class ExpressionElementImpl(_ColumnCoercions, RoleImpl): ... +class BinaryElementImpl(ExpressionElementImpl, RoleImpl): ... +class InElementImpl(RoleImpl): ... +class OnClauseImpl(_CoerceLiterals, _ColumnCoercions, RoleImpl): ... +class WhereHavingImpl(_CoerceLiterals, _ColumnCoercions, RoleImpl): ... +class StatementOptionImpl(_CoerceLiterals, RoleImpl): ... +class ColumnArgumentImpl(_NoTextCoercion, RoleImpl): ... +class ColumnArgumentOrKeyImpl(_ReturnsStringKey, RoleImpl): ... +class StrAsPlainColumnImpl(_CoerceLiterals, RoleImpl): ... +class ByOfImpl(_CoerceLiterals, _ColumnCoercions, RoleImpl, roles.ByOfRole): ... +class OrderByImpl(ByOfImpl, RoleImpl): ... +class GroupByImpl(ByOfImpl, RoleImpl): ... +class DMLColumnImpl(_ReturnsStringKey, RoleImpl): ... +class ConstExprImpl(RoleImpl): ... +class TruncatedLabelImpl(_StringOnly, RoleImpl): ... +class DDLExpressionImpl(_Deannotate, _CoerceLiterals, RoleImpl): ... +class DDLConstraintColumnImpl(_Deannotate, _ReturnsStringKey, RoleImpl): ... +class DDLReferredColumnImpl(DDLConstraintColumnImpl): ... +class LimitOffsetImpl(RoleImpl): ... +class LabeledColumnExprImpl(ExpressionElementImpl): ... +class ColumnsClauseImpl(_SelectIsNotFrom, _CoerceLiterals, RoleImpl): ... +class ReturnsRowsImpl(RoleImpl): ... +class StatementImpl(_CoerceLiterals, RoleImpl): ... +class SelectStatementImpl(_NoTextCoercion, RoleImpl): ... +class HasCTEImpl(ReturnsRowsImpl): ... +class IsCTEImpl(RoleImpl): ... +class JoinTargetImpl(RoleImpl): ... +class FromClauseImpl(_SelectIsNotFrom, _NoTextCoercion, RoleImpl): ... +class StrictFromClauseImpl(FromClauseImpl): ... +class AnonymizedFromClauseImpl(StrictFromClauseImpl): ... +class DMLTableImpl(_SelectIsNotFrom, _NoTextCoercion, RoleImpl): ... +class DMLSelectImpl(_NoTextCoercion, RoleImpl): ... +class CompoundElementImpl(_NoTextCoercion, RoleImpl): ... + +cls: Any +name: Any +impl: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/compiler.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/compiler.pyi new file mode 100644 index 00000000..44178d7f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/compiler.pyi @@ -0,0 +1,484 @@ +from _typeshed import Incomplete +from typing import NamedTuple + +from ..util import memoized_property +from . import elements + +RESERVED_WORDS: Incomplete +LEGAL_CHARACTERS: Incomplete +LEGAL_CHARACTERS_PLUS_SPACE: Incomplete +ILLEGAL_INITIAL_CHARACTERS: Incomplete +FK_ON_DELETE: Incomplete +FK_ON_UPDATE: Incomplete +FK_INITIALLY: Incomplete +BIND_PARAMS: Incomplete +BIND_PARAMS_ESC: Incomplete +BIND_TEMPLATES: Incomplete +OPERATORS: Incomplete +FUNCTIONS: Incomplete +EXTRACT_MAP: Incomplete +COMPOUND_KEYWORDS: Incomplete +RM_RENDERED_NAME: int +RM_NAME: int +RM_OBJECTS: int +RM_TYPE: int + +class ExpandedState(NamedTuple): + statement: Incomplete + additional_parameters: Incomplete + processors: Incomplete + positiontup: Incomplete + parameter_expansion: Incomplete + +NO_LINTING: Incomplete +COLLECT_CARTESIAN_PRODUCTS: Incomplete +WARN_LINTING: Incomplete +FROM_LINTING: Incomplete + +class FromLinter: + def lint(self, start: Incomplete | None = ...): ... + def warn(self) -> None: ... + +class Compiled: + schema_translate_map: Incomplete + execution_options: Incomplete + compile_state: Incomplete + cache_key: Incomplete + dialect: Incomplete + preparer: Incomplete + statement: Incomplete + can_execute: Incomplete + string: Incomplete + def __init__( + self, + dialect, + statement, + schema_translate_map: Incomplete | None = ..., + render_schema_translate: bool = ..., + compile_kwargs=..., + ) -> None: ... + def visit_unsupported_compilation(self, element, err) -> None: ... + @property + def sql_compiler(self) -> None: ... + def process(self, obj, **kwargs): ... + def construct_params( + self, params: Incomplete | None = ..., extracted_parameters: Incomplete | None = ..., escape_names: bool = ... + ) -> None: ... + @property + def params(self): ... + +class TypeCompiler: + ensure_kwarg: str + dialect: Incomplete + def __init__(self, dialect) -> None: ... + def process(self, type_, **kw): ... + def visit_unsupported_compilation(self, element, err, **kw) -> None: ... + +class _CompileLabel(elements.ColumnElement[Incomplete]): + __visit_name__: str + element: Incomplete + name: Incomplete + def __init__(self, col, name, alt_names=...) -> None: ... + @property + def proxy_set(self): ... + @property + def type(self): ... + def self_group(self, **kw): ... + +class SQLCompiler(Compiled): + extract_map: Incomplete + compound_keywords: Incomplete + isdelete: bool + isinsert: bool + isupdate: bool + isplaintext: bool + returning: Incomplete + returning_precedes_values: bool + render_table_with_column_in_update_from: bool + ansi_bind_rules: bool + insert_single_values_expr: Incomplete + literal_execute_params: Incomplete + post_compile_params: Incomplete + escaped_bind_names: Incomplete + has_out_parameters: bool + insert_prefetch: Incomplete + update_prefetch: Incomplete + postfetch_lastrowid: bool + positiontup: Incomplete + inline: bool + column_keys: Incomplete + cache_key: Incomplete + for_executemany: Incomplete + linting: Incomplete + binds: Incomplete + bind_names: Incomplete + stack: Incomplete + positional: Incomplete + bindtemplate: Incomplete + ctes: Incomplete + label_length: Incomplete + anon_map: Incomplete + truncated_names: Incomplete + def __init__( + self, + dialect, + statement, + cache_key: Incomplete | None = ..., + column_keys: Incomplete | None = ..., + for_executemany: bool = ..., + linting=..., + **kwargs, + ) -> None: ... + @property + def current_executable(self): ... + @property + def prefetch(self): ... + def is_subquery(self): ... + @property + def sql_compiler(self): ... + def construct_params(self, params: Incomplete | None = ..., _group_number: Incomplete | None = ..., _check: bool = ..., extracted_parameters: Incomplete | None = ..., escape_names: bool = ...): ... # type: ignore[override] + @property + def params(self): ... + def default_from(self): ... + def visit_grouping(self, grouping, asfrom: bool = ..., **kwargs): ... + def visit_select_statement_grouping(self, grouping, **kwargs): ... + def visit_label_reference(self, element, within_columns_clause: bool = ..., **kwargs): ... + def visit_textual_label_reference(self, element, within_columns_clause: bool = ..., **kwargs): ... + def visit_label( + self, + label, + add_to_result_map: Incomplete | None = ..., + within_label_clause: bool = ..., + within_columns_clause: bool = ..., + render_label_as_label: Incomplete | None = ..., + result_map_targets=..., + **kw, + ): ... + def visit_lambda_element(self, element, **kw): ... + def visit_column( + self, column, add_to_result_map: Incomplete | None = ..., include_table: bool = ..., result_map_targets=..., **kwargs + ): ... + def visit_collation(self, element, **kw): ... + def visit_fromclause(self, fromclause, **kwargs): ... + def visit_index(self, index, **kwargs): ... + def visit_typeclause(self, typeclause, **kw): ... + def post_process_text(self, text): ... + def escape_literal_column(self, text): ... + def visit_textclause(self, textclause, add_to_result_map: Incomplete | None = ..., **kw): ... + def visit_textual_select(self, taf, compound_index: Incomplete | None = ..., asfrom: bool = ..., **kw): ... + def visit_null(self, expr, **kw): ... + def visit_true(self, expr, **kw): ... + def visit_false(self, expr, **kw): ... + def visit_tuple(self, clauselist, **kw): ... + def visit_clauselist(self, clauselist, **kw): ... + def visit_case(self, clause, **kwargs): ... + def visit_type_coerce(self, type_coerce, **kw): ... + def visit_cast(self, cast, **kwargs): ... + def visit_over(self, over, **kwargs): ... + def visit_withingroup(self, withingroup, **kwargs): ... + def visit_funcfilter(self, funcfilter, **kwargs): ... + def visit_extract(self, extract, **kwargs): ... + def visit_scalar_function_column(self, element, **kw): ... + def visit_function(self, func, add_to_result_map: Incomplete | None = ..., **kwargs): ... + def visit_next_value_func(self, next_value, **kw): ... + def visit_sequence(self, sequence, **kw) -> None: ... + def function_argspec(self, func, **kwargs): ... + compile_state: Incomplete + def visit_compound_select(self, cs, asfrom: bool = ..., compound_index: Incomplete | None = ..., **kwargs): ... + def visit_unary(self, unary, add_to_result_map: Incomplete | None = ..., result_map_targets=..., **kw): ... + def visit_is_true_unary_operator(self, element, operator, **kw): ... + def visit_is_false_unary_operator(self, element, operator, **kw): ... + def visit_not_match_op_binary(self, binary, operator, **kw): ... + def visit_not_in_op_binary(self, binary, operator, **kw): ... + def visit_empty_set_op_expr(self, type_, expand_op): ... + def visit_empty_set_expr(self, element_types) -> None: ... + def visit_binary( + self, + binary, + override_operator: Incomplete | None = ..., + eager_grouping: bool = ..., + from_linter: Incomplete | None = ..., + lateral_from_linter: Incomplete | None = ..., + **kw, + ): ... + def visit_function_as_comparison_op_binary(self, element, operator, **kw): ... + def visit_mod_binary(self, binary, operator, **kw): ... + def visit_custom_op_binary(self, element, operator, **kw): ... + def visit_custom_op_unary_operator(self, element, operator, **kw): ... + def visit_custom_op_unary_modifier(self, element, operator, **kw): ... + def visit_contains_op_binary(self, binary, operator, **kw): ... + def visit_not_contains_op_binary(self, binary, operator, **kw): ... + def visit_startswith_op_binary(self, binary, operator, **kw): ... + def visit_not_startswith_op_binary(self, binary, operator, **kw): ... + def visit_endswith_op_binary(self, binary, operator, **kw): ... + def visit_not_endswith_op_binary(self, binary, operator, **kw): ... + def visit_like_op_binary(self, binary, operator, **kw): ... + def visit_not_like_op_binary(self, binary, operator, **kw): ... + def visit_ilike_op_binary(self, binary, operator, **kw): ... + def visit_not_ilike_op_binary(self, binary, operator, **kw): ... + def visit_between_op_binary(self, binary, operator, **kw): ... + def visit_not_between_op_binary(self, binary, operator, **kw): ... + def visit_regexp_match_op_binary(self, binary, operator, **kw) -> None: ... + def visit_not_regexp_match_op_binary(self, binary, operator, **kw) -> None: ... + def visit_regexp_replace_op_binary(self, binary, operator, **kw) -> None: ... + def visit_bindparam( + self, + bindparam, + within_columns_clause: bool = ..., + literal_binds: bool = ..., + skip_bind_expression: bool = ..., + literal_execute: bool = ..., + render_postcompile: bool = ..., + **kwargs, + ): ... + def render_literal_bindparam( + self, bindparam, render_literal_value=..., bind_expression_template: Incomplete | None = ..., **kw + ): ... + def render_literal_value(self, value, type_): ... + def bindparam_string( + self, + name, + positional_names: Incomplete | None = ..., + post_compile: bool = ..., + expanding: bool = ..., + escaped_from: Incomplete | None = ..., + **kw, + ): ... + execution_options: Incomplete + ctes_recursive: bool + def visit_cte( + self, + cte, + asfrom: bool = ..., + ashint: bool = ..., + fromhints: Incomplete | None = ..., + visiting_cte: Incomplete | None = ..., + from_linter: Incomplete | None = ..., + **kwargs, + ): ... + def visit_table_valued_alias(self, element, **kw): ... + def visit_table_valued_column(self, element, **kw): ... + def visit_alias( + self, + alias, + asfrom: bool = ..., + ashint: bool = ..., + iscrud: bool = ..., + fromhints: Incomplete | None = ..., + subquery: bool = ..., + lateral: bool = ..., + enclosing_alias: Incomplete | None = ..., + from_linter: Incomplete | None = ..., + **kwargs, + ): ... + def visit_subquery(self, subquery, **kw): ... + def visit_lateral(self, lateral_, **kw): ... + def visit_tablesample(self, tablesample, asfrom: bool = ..., **kw): ... + def visit_values(self, element, asfrom: bool = ..., from_linter: Incomplete | None = ..., **kw): ... + def get_render_as_alias_suffix(self, alias_name_text): ... + def format_from_hint_text(self, sqltext, table, hint, iscrud): ... + def get_select_hint_text(self, byfroms) -> None: ... + def get_from_hint_text(self, table, text) -> None: ... + def get_crud_hint_text(self, table, text) -> None: ... + def get_statement_hint_text(self, hint_texts): ... + translate_select_structure: Incomplete + def visit_select( + self, + select_stmt, + asfrom: bool = ..., + insert_into: bool = ..., + fromhints: Incomplete | None = ..., + compound_index: Incomplete | None = ..., + select_wraps_for: Incomplete | None = ..., + lateral: bool = ..., + from_linter: Incomplete | None = ..., + **kwargs, + ): ... + def get_cte_preamble(self, recursive): ... + def get_select_precolumns(self, select, **kw): ... + def group_by_clause(self, select, **kw): ... + def order_by_clause(self, select, **kw): ... + def for_update_clause(self, select, **kw): ... + def returning_clause(self, stmt, returning_cols) -> None: ... + def limit_clause(self, select, **kw): ... + def fetch_clause(self, select, **kw): ... + def visit_table( + self, + table, + asfrom: bool = ..., + iscrud: bool = ..., + ashint: bool = ..., + fromhints: Incomplete | None = ..., + use_schema: bool = ..., + from_linter: Incomplete | None = ..., + **kwargs, + ): ... + def visit_join(self, join, asfrom: bool = ..., from_linter: Incomplete | None = ..., **kwargs): ... + def visit_insert(self, insert_stmt, **kw): ... + def update_limit_clause(self, update_stmt) -> None: ... + def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw): ... + def update_from_clause(self, update_stmt, from_table, extra_froms, from_hints, **kw) -> None: ... + def visit_update(self, update_stmt, **kw): ... + def delete_extra_from_clause(self, update_stmt, from_table, extra_froms, from_hints, **kw) -> None: ... + def delete_table_clause(self, delete_stmt, from_table, extra_froms): ... + def visit_delete(self, delete_stmt, **kw): ... + def visit_savepoint(self, savepoint_stmt): ... + def visit_rollback_to_savepoint(self, savepoint_stmt): ... + def visit_release_savepoint(self, savepoint_stmt): ... + +class StrSQLCompiler(SQLCompiler): + def visit_unsupported_compilation(self, element, err, **kw): ... + def visit_getitem_binary(self, binary, operator, **kw): ... + def visit_json_getitem_op_binary(self, binary, operator, **kw): ... + def visit_json_path_getitem_op_binary(self, binary, operator, **kw): ... + def visit_sequence(self, seq, **kw): ... + def returning_clause(self, stmt, returning_cols): ... + def update_from_clause(self, update_stmt, from_table, extra_froms, from_hints, **kw): ... + def delete_extra_from_clause(self, update_stmt, from_table, extra_froms, from_hints, **kw): ... + def visit_empty_set_expr(self, type_): ... + def get_from_hint_text(self, table, text): ... + def visit_regexp_match_op_binary(self, binary, operator, **kw): ... + def visit_not_regexp_match_op_binary(self, binary, operator, **kw): ... + def visit_regexp_replace_op_binary(self, binary, operator, **kw): ... + +class DDLCompiler(Compiled): + @memoized_property + def sql_compiler(self): ... + @memoized_property + def type_compiler(self): ... + def construct_params( + self, params: Incomplete | None = ..., extracted_parameters: Incomplete | None = ..., escape_names: bool = ... + ) -> None: ... + def visit_ddl(self, ddl, **kwargs): ... + def visit_create_schema(self, create, **kw): ... + def visit_drop_schema(self, drop, **kw): ... + def visit_create_table(self, create, **kw): ... + def visit_create_column(self, create, first_pk: bool = ..., **kw): ... + def create_table_constraints(self, table, _include_foreign_key_constraints: Incomplete | None = ..., **kw): ... + def visit_drop_table(self, drop, **kw): ... + def visit_drop_view(self, drop, **kw): ... + def visit_create_index(self, create, include_schema: bool = ..., include_table_schema: bool = ..., **kw): ... + def visit_drop_index(self, drop, **kw): ... + def visit_add_constraint(self, create, **kw): ... + def visit_set_table_comment(self, create, **kw): ... + def visit_drop_table_comment(self, drop, **kw): ... + def visit_set_column_comment(self, create, **kw): ... + def visit_drop_column_comment(self, drop, **kw): ... + def get_identity_options(self, identity_options): ... + def visit_create_sequence(self, create, prefix: Incomplete | None = ..., **kw): ... + def visit_drop_sequence(self, drop, **kw): ... + def visit_drop_constraint(self, drop, **kw): ... + def get_column_specification(self, column, **kwargs): ... + def create_table_suffix(self, table): ... + def post_create_table(self, table): ... + def get_column_default_string(self, column): ... + def visit_table_or_column_check_constraint(self, constraint, **kw): ... + def visit_check_constraint(self, constraint, **kw): ... + def visit_column_check_constraint(self, constraint, **kw): ... + def visit_primary_key_constraint(self, constraint, **kw): ... + def visit_foreign_key_constraint(self, constraint, **kw): ... + def define_constraint_remote_table(self, constraint, table, preparer): ... + def visit_unique_constraint(self, constraint, **kw): ... + def define_constraint_cascades(self, constraint): ... + def define_constraint_deferrability(self, constraint): ... + def define_constraint_match(self, constraint): ... + def visit_computed_column(self, generated, **kw): ... + def visit_identity_column(self, identity, **kw): ... + +class GenericTypeCompiler(TypeCompiler): + def visit_FLOAT(self, type_, **kw): ... + def visit_REAL(self, type_, **kw): ... + def visit_NUMERIC(self, type_, **kw): ... + def visit_DECIMAL(self, type_, **kw): ... + def visit_INTEGER(self, type_, **kw): ... + def visit_SMALLINT(self, type_, **kw): ... + def visit_BIGINT(self, type_, **kw): ... + def visit_TIMESTAMP(self, type_, **kw): ... + def visit_DATETIME(self, type_, **kw): ... + def visit_DATE(self, type_, **kw): ... + def visit_TIME(self, type_, **kw): ... + def visit_CLOB(self, type_, **kw): ... + def visit_NCLOB(self, type_, **kw): ... + def visit_CHAR(self, type_, **kw): ... + def visit_NCHAR(self, type_, **kw): ... + def visit_VARCHAR(self, type_, **kw): ... + def visit_NVARCHAR(self, type_, **kw): ... + def visit_TEXT(self, type_, **kw): ... + def visit_BLOB(self, type_, **kw): ... + def visit_BINARY(self, type_, **kw): ... + def visit_VARBINARY(self, type_, **kw): ... + def visit_BOOLEAN(self, type_, **kw): ... + def visit_large_binary(self, type_, **kw): ... + def visit_boolean(self, type_, **kw): ... + def visit_time(self, type_, **kw): ... + def visit_datetime(self, type_, **kw): ... + def visit_date(self, type_, **kw): ... + def visit_big_integer(self, type_, **kw): ... + def visit_small_integer(self, type_, **kw): ... + def visit_integer(self, type_, **kw): ... + def visit_real(self, type_, **kw): ... + def visit_float(self, type_, **kw): ... + def visit_numeric(self, type_, **kw): ... + def visit_string(self, type_, **kw): ... + def visit_unicode(self, type_, **kw): ... + def visit_text(self, type_, **kw): ... + def visit_unicode_text(self, type_, **kw): ... + def visit_enum(self, type_, **kw): ... + def visit_null(self, type_, **kw) -> None: ... + def visit_type_decorator(self, type_, **kw): ... + def visit_user_defined(self, type_, **kw): ... + +class StrSQLTypeCompiler(GenericTypeCompiler): + def process(self, type_, **kw): ... + def __getattr__(self, key: str): ... + def visit_null(self, type_, **kw): ... + def visit_user_defined(self, type_, **kw): ... + +class IdentifierPreparer: + reserved_words: Incomplete + legal_characters: Incomplete + illegal_initial_characters: Incomplete + schema_for_object: Incomplete + dialect: Incomplete + initial_quote: Incomplete + final_quote: Incomplete + escape_quote: Incomplete + escape_to_quote: Incomplete + omit_schema: Incomplete + quote_case_sensitive_collations: Incomplete + def __init__( + self, + dialect, + initial_quote: str = ..., + final_quote: Incomplete | None = ..., + escape_quote: str = ..., + quote_case_sensitive_collations: bool = ..., + omit_schema: bool = ..., + ) -> None: ... + def validate_sql_phrase(self, element, reg): ... + def quote_identifier(self, value): ... + def quote_schema(self, schema, force: Incomplete | None = ...): ... + def quote(self, ident, force: Incomplete | None = ...): ... + def format_collation(self, collation_name): ... + def format_sequence(self, sequence, use_schema: bool = ...): ... + def format_label(self, label, name: Incomplete | None = ...): ... + def format_alias(self, alias, name: Incomplete | None = ...): ... + def format_savepoint(self, savepoint, name: Incomplete | None = ...): ... + def format_constraint(self, constraint, _alembic_quote: bool = ...): ... + def truncate_and_render_index_name(self, name, _alembic_quote: bool = ...): ... + def truncate_and_render_constraint_name(self, name, _alembic_quote: bool = ...): ... + def format_index(self, index): ... + def format_table(self, table, use_schema: bool = ..., name: Incomplete | None = ...): ... + def format_schema(self, name): ... + def format_label_name(self, name, anon_map: Incomplete | None = ...): ... + def format_column( + self, + column, + use_table: bool = ..., + name: Incomplete | None = ..., + table_name: Incomplete | None = ..., + use_schema: bool = ..., + anon_map: Incomplete | None = ..., + ): ... + def format_table_seq(self, table, use_schema: bool = ...): ... + def unformat_identifiers(self, identifiers): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/crud.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/crud.pyi new file mode 100644 index 00000000..a13be2f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/crud.pyi @@ -0,0 +1,18 @@ +from typing import Any, Generic, TypeVar + +from . import elements +from .operators import ColumnOperators + +_T = TypeVar("_T") + +REQUIRED: Any + +class _multiparam_column(elements.ColumnElement[_T], Generic[_T]): + index: Any + key: Any + original: Any + default: Any + type: Any + def __init__(self, original, index) -> None: ... + def compare(self, other, **kw) -> None: ... + def __eq__(self, other) -> ColumnOperators[_T]: ... # type: ignore[override] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/ddl.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/ddl.pyi new file mode 100644 index 00000000..5d34ce02 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/ddl.pyi @@ -0,0 +1,155 @@ +from _typeshed import Incomplete +from typing import Any + +from . import roles +from .base import Executable, SchemaVisitor +from .elements import ClauseElement + +class _DDLCompiles(ClauseElement): ... + +class DDLElement(roles.DDLRole, Executable, _DDLCompiles): + target: Any + on: Any + dialect: Any + callable_: Any + def execute(self, bind: Incomplete | None = ..., target: Incomplete | None = ...): ... # type: ignore[override] + def against(self, target) -> None: ... + state: Any + def execute_if( + self, dialect: Incomplete | None = ..., callable_: Incomplete | None = ..., state: Incomplete | None = ... + ) -> None: ... + def __call__(self, target, bind, **kw): ... + bind: Any + +class DDL(DDLElement): + __visit_name__: str + statement: Any + context: Any + def __init__(self, statement, context: Incomplete | None = ..., bind: Incomplete | None = ...) -> None: ... + +class _CreateDropBase(DDLElement): + element: Any + bind: Any + if_exists: Any + if_not_exists: Any + def __init__( + self, + element, + bind: Incomplete | None = ..., + if_exists: bool = ..., + if_not_exists: bool = ..., + _legacy_bind: Incomplete | None = ..., + ) -> None: ... + @property + def stringify_dialect(self): ... + +class CreateSchema(_CreateDropBase): + __visit_name__: str + quote: Any + def __init__(self, name, quote: Incomplete | None = ..., **kw) -> None: ... + +class DropSchema(_CreateDropBase): + __visit_name__: str + quote: Any + cascade: Any + def __init__(self, name, quote: Incomplete | None = ..., cascade: bool = ..., **kw) -> None: ... + +class CreateTable(_CreateDropBase): + __visit_name__: str + columns: Any + include_foreign_key_constraints: Any + def __init__( + self, + element, + bind: Incomplete | None = ..., + include_foreign_key_constraints: Incomplete | None = ..., + if_not_exists: bool = ..., + ) -> None: ... + +class _DropView(_CreateDropBase): + __visit_name__: str + +class CreateColumn(_DDLCompiles): + __visit_name__: str + element: Any + def __init__(self, element) -> None: ... + +class DropTable(_CreateDropBase): + __visit_name__: str + def __init__(self, element, bind: Incomplete | None = ..., if_exists: bool = ...) -> None: ... + +class CreateSequence(_CreateDropBase): + __visit_name__: str + +class DropSequence(_CreateDropBase): + __visit_name__: str + +class CreateIndex(_CreateDropBase): + __visit_name__: str + def __init__(self, element, bind: Incomplete | None = ..., if_not_exists: bool = ...) -> None: ... + +class DropIndex(_CreateDropBase): + __visit_name__: str + def __init__(self, element, bind: Incomplete | None = ..., if_exists: bool = ...) -> None: ... + +class AddConstraint(_CreateDropBase): + __visit_name__: str + def __init__(self, element, *args, **kw) -> None: ... + +class DropConstraint(_CreateDropBase): + __visit_name__: str + cascade: Any + def __init__(self, element, cascade: bool = ..., **kw) -> None: ... + +class SetTableComment(_CreateDropBase): + __visit_name__: str + +class DropTableComment(_CreateDropBase): + __visit_name__: str + +class SetColumnComment(_CreateDropBase): + __visit_name__: str + +class DropColumnComment(_CreateDropBase): + __visit_name__: str + +class DDLBase(SchemaVisitor): + connection: Any + def __init__(self, connection) -> None: ... + +class SchemaGenerator(DDLBase): + checkfirst: Any + tables: Any + preparer: Any + dialect: Any + memo: Any + def __init__(self, dialect, connection, checkfirst: bool = ..., tables: Incomplete | None = ..., **kwargs) -> None: ... + def visit_metadata(self, metadata) -> None: ... + def visit_table( + self, + table, + create_ok: bool = ..., + include_foreign_key_constraints: Incomplete | None = ..., + _is_metadata_operation: bool = ..., + ) -> None: ... + def visit_foreign_key_constraint(self, constraint) -> None: ... + def visit_sequence(self, sequence, create_ok: bool = ...) -> None: ... + def visit_index(self, index, create_ok: bool = ...) -> None: ... + +class SchemaDropper(DDLBase): + checkfirst: Any + tables: Any + preparer: Any + dialect: Any + memo: Any + def __init__(self, dialect, connection, checkfirst: bool = ..., tables: Incomplete | None = ..., **kwargs) -> None: ... + def visit_metadata(self, metadata): ... + def visit_index(self, index, drop_ok: bool = ...) -> None: ... + def visit_table(self, table, drop_ok: bool = ..., _is_metadata_operation: bool = ..., _ignore_sequences=...) -> None: ... + def visit_foreign_key_constraint(self, constraint) -> None: ... + def visit_sequence(self, sequence, drop_ok: bool = ...) -> None: ... + +def sort_tables(tables, skip_fn: Incomplete | None = ..., extra_dependencies: Incomplete | None = ...): ... +def sort_tables_and_constraints( + tables, filter_fn: Incomplete | None = ..., extra_dependencies: Incomplete | None = ..., _warn_for_cycles: bool = ... +): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/default_comparator.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/default_comparator.pyi new file mode 100644 index 00000000..ac514fdf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/default_comparator.pyi @@ -0,0 +1,3 @@ +from typing import Any + +operator_lookup: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/dml.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/dml.pyi new file mode 100644 index 00000000..8dbc7c04 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/dml.pyi @@ -0,0 +1,112 @@ +from _typeshed import Incomplete +from typing import Any + +from . import roles +from .base import CompileState, DialectKWArgs, Executable, HasCompileState +from .elements import ClauseElement +from .selectable import HasCTE, HasPrefixes, ReturnsRows + +class DMLState(CompileState): + isupdate: bool + isdelete: bool + isinsert: bool + def __init__(self, statement, compiler, **kw) -> None: ... + @property + def dml_table(self): ... + +class InsertDMLState(DMLState): + isinsert: bool + include_table_with_column_exprs: bool + statement: Any + def __init__(self, statement, compiler, **kw) -> None: ... + +class UpdateDMLState(DMLState): + isupdate: bool + include_table_with_column_exprs: bool + statement: Any + is_multitable: Any + def __init__(self, statement, compiler, **kw) -> None: ... + +class DeleteDMLState(DMLState): + isdelete: bool + statement: Any + def __init__(self, statement, compiler, **kw) -> None: ... + +class UpdateBase(roles.DMLRole, HasCTE, HasCompileState, DialectKWArgs, HasPrefixes, ReturnsRows, Executable, ClauseElement): + __visit_name__: str + named_with_column: bool + is_dml: bool + def params(self, *arg, **kw) -> None: ... + def with_dialect_options(self, **opt) -> None: ... + bind: Any + def returning(self, *cols) -> None: ... + @property + def exported_columns(self): ... + def with_hint(self, text, selectable: Incomplete | None = ..., dialect_name: str = ...) -> None: ... + +class ValuesBase(UpdateBase): + __visit_name__: str + select: Any + table: Any + def __init__(self, table, values, prefixes) -> None: ... + def values(self, *args, **kwargs) -> None: ... + def return_defaults(self, *cols) -> None: ... + +class Insert(ValuesBase): + __visit_name__: str + select: Any + include_insert_from_select_defaults: bool + is_insert: bool + def __init__( + self, + table, + values: Incomplete | None = ..., + inline: bool = ..., + bind: Incomplete | None = ..., + prefixes: Incomplete | None = ..., + returning: Incomplete | None = ..., + return_defaults: bool = ..., + **dialect_kw, + ) -> None: ... + def inline(self) -> None: ... + def from_select(self, names, select, include_defaults: bool = ...) -> None: ... + +class DMLWhereBase: + def where(self, *whereclause) -> None: ... + def filter(self, *criteria): ... + def filter_by(self, **kwargs): ... + @property + def whereclause(self): ... + +class Update(DMLWhereBase, ValuesBase): + __visit_name__: str + is_update: bool + def __init__( + self, + table, + whereclause: Incomplete | None = ..., + values: Incomplete | None = ..., + inline: bool = ..., + bind: Incomplete | None = ..., + prefixes: Incomplete | None = ..., + returning: Incomplete | None = ..., + return_defaults: bool = ..., + preserve_parameter_order: bool = ..., + **dialect_kw, + ) -> None: ... + def ordered_values(self, *args) -> None: ... + def inline(self) -> None: ... + +class Delete(DMLWhereBase, UpdateBase): + __visit_name__: str + is_delete: bool + table: Any + def __init__( + self, + table, + whereclause: Incomplete | None = ..., + bind: Incomplete | None = ..., + returning: Incomplete | None = ..., + prefixes: Incomplete | None = ..., + **dialect_kw, + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/elements.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/elements.pyi new file mode 100644 index 00000000..93c53c0c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/elements.pyi @@ -0,0 +1,473 @@ +from _typeshed import Incomplete +from typing import Any, Generic, TypeVar +from typing_extensions import Literal, Self + +from .. import util +from ..util import HasMemoized, memoized_property +from . import operators, roles +from .annotation import Annotated, SupportsWrappingAnnotations +from .base import Executable, Immutable, SingletonConstant +from .traversals import HasCopyInternals, MemoizedHasCacheKey +from .visitors import Traversible + +_T = TypeVar("_T") + +def collate(expression, collation): ... +def between(expr, lower_bound, upper_bound, symmetric: bool = ...): ... +def literal(value, type_: Incomplete | None = ...): ... +def outparam(key, type_: Incomplete | None = ...): ... +def not_(clause): ... + +class ClauseElement(roles.SQLRole, SupportsWrappingAnnotations, MemoizedHasCacheKey, HasCopyInternals, Traversible): + __visit_name__: str + supports_execution: bool + stringify_dialect: str + bind: Any + description: Any + is_clause_element: bool + is_selectable: bool + @property + def entity_namespace(self) -> None: ... + def unique_params(self, *optionaldict, **kwargs): ... + def params(self, *optionaldict, **kwargs): ... + def compare(self, other, **kw): ... + def self_group(self, against: Incomplete | None = ...): ... + def compile(self, bind: Incomplete | None = ..., dialect: Incomplete | None = ..., **kw): ... + def __invert__(self): ... + def __bool__(self) -> bool: ... + def __nonzero__(self) -> bool: ... + +class ColumnElement( + roles.ColumnArgumentOrKeyRole, + roles.StatementOptionRole, + roles.WhereHavingRole, + roles.BinaryElementRole, + roles.OrderByRole, + roles.ColumnsClauseRole, + roles.LimitOffsetRole, + roles.DMLColumnRole, + roles.DDLConstraintColumnRole, + roles.DDLExpressionRole, + operators.ColumnOperators[_T], + ClauseElement, + Generic[_T], +): + __visit_name__: str + primary_key: bool + foreign_keys: Any + key: Any + def self_group(self, against: Incomplete | None = ...): ... + @memoized_property + def type(self): ... + @HasMemoized.memoized_attribute + def comparator(self): ... + def __getattr__(self, key: str): ... + def operate(self, op, *other, **kwargs): ... + def reverse_operate(self, op, other, **kwargs): ... + @property + def expression(self): ... + @memoized_property + def base_columns(self): ... + @memoized_property + def proxy_set(self): ... + def shares_lineage(self, othercolumn): ... + def cast(self, type_): ... + def label(self, name): ... + @property + def anon_label(self): ... + @property + def anon_key_label(self): ... + +class WrapsColumnExpression: + @property + def wrapped_column_expression(self) -> None: ... + +class BindParameter(roles.InElementRole, ColumnElement[_T], Generic[_T]): + __visit_name__: str + inherit_cache: bool + key: Any + unique: Any + value: Any + callable: Any + isoutparam: Any + required: Any + expanding: Any + expand_op: Any + literal_execute: Any + type: Any + def __init__( + self, + key, + value=..., + type_: Incomplete | None = ..., + unique: bool = ..., + required=..., + quote: Incomplete | None = ..., + callable_: Incomplete | None = ..., + expanding: bool = ..., + isoutparam: bool = ..., + literal_execute: bool = ..., + _compared_to_operator: Incomplete | None = ..., + _compared_to_type: Incomplete | None = ..., + _is_crud: bool = ..., + ) -> None: ... + @property + def effective_value(self): ... + def render_literal_execute(self): ... + +class TypeClause(ClauseElement): + __visit_name__: str + type: Any + def __init__(self, type_) -> None: ... + +class TextClause( + roles.DDLConstraintColumnRole, + roles.DDLExpressionRole, + roles.StatementOptionRole, + roles.WhereHavingRole, + roles.OrderByRole, + roles.FromClauseRole, + roles.SelectStatementRole, + roles.BinaryElementRole, + roles.InElementRole, + Executable, + ClauseElement, +): + __visit_name__: str + def __and__(self, other): ... + key: Any + text: Any + def __init__(self, text: str, bind: Incomplete | None = None) -> None: ... + def bindparams(self, *binds, **names_to_values) -> Self: ... + def columns(self, *cols, **types): ... + @property + def type(self): ... + @property + def comparator(self): ... + def self_group(self, against: Incomplete | None = ...): ... + +class Null(SingletonConstant, roles.ConstExprRole, ColumnElement[None]): + __visit_name__: str + @memoized_property + def type(self): ... + +class False_(SingletonConstant, roles.ConstExprRole, ColumnElement[Literal[False]]): + __visit_name__: str + @memoized_property + def type(self): ... + +class True_(SingletonConstant, roles.ConstExprRole, ColumnElement[Literal[True]]): + __visit_name__: str + @memoized_property + def type(self): ... + +class ClauseList(roles.InElementRole, roles.OrderByRole, roles.ColumnsClauseRole, roles.DMLColumnRole, ClauseElement): + __visit_name__: str + operator: Any + group: Any + group_contents: Any + clauses: Any + def __init__(self, *clauses, **kwargs) -> None: ... + def __iter__(self): ... + def __len__(self) -> int: ... + def append(self, clause) -> None: ... + def self_group(self, against: Incomplete | None = ...): ... + +class BooleanClauseList(ClauseList, ColumnElement[Any]): + __visit_name__: str + inherit_cache: bool + def __init__(self, *arg, **kw) -> None: ... + @classmethod + def and_(cls, *clauses): ... + @classmethod + def or_(cls, *clauses): ... + def self_group(self, against: Incomplete | None = ...): ... + +and_: Any +or_: Any + +class Tuple(ClauseList, ColumnElement[Any]): + __visit_name__: str + type: Any + def __init__(self, *clauses, **kw) -> None: ... + def self_group(self, against: Incomplete | None = ...): ... + +class Case(ColumnElement[Any]): + __visit_name__: str + value: Any + type: Any + whens: Any + else_: Any + def __init__(self, *whens, **kw) -> None: ... + +def literal_column(text, type_: Incomplete | None = ...): ... + +class Cast(WrapsColumnExpression, ColumnElement[Any]): + __visit_name__: str + type: Any + clause: Any + typeclause: Any + def __init__(self, expression, type_) -> None: ... + @property + def wrapped_column_expression(self): ... + +class TypeCoerce(WrapsColumnExpression, ColumnElement[Any]): + __visit_name__: str + type: Any + clause: Any + def __init__(self, expression, type_) -> None: ... + @HasMemoized.memoized_attribute + def typed_expression(self): ... + @property + def wrapped_column_expression(self): ... + def self_group(self, against: Incomplete | None = ...): ... + +class Extract(ColumnElement[Any]): + __visit_name__: str + type: Any + field: Any + expr: Any + def __init__(self, field, expr, **kwargs) -> None: ... + +class _label_reference(ColumnElement[Any]): + __visit_name__: str + element: Any + def __init__(self, element) -> None: ... + +class _textual_label_reference(ColumnElement[Any]): + __visit_name__: str + element: Any + def __init__(self, element) -> None: ... + +class UnaryExpression(ColumnElement[Any]): + __visit_name__: str + operator: Any + modifier: Any + element: Any + type: Any + wraps_column_expression: Any + def __init__( + self, + element, + operator: Incomplete | None = ..., + modifier: Incomplete | None = ..., + type_: Incomplete | None = ..., + wraps_column_expression: bool = ..., + ) -> None: ... + def self_group(self, against: Incomplete | None = ...): ... + +class CollectionAggregate(UnaryExpression): + inherit_cache: bool + def operate(self, op, *other, **kwargs): ... + def reverse_operate(self, op, other, **kwargs) -> None: ... + +class AsBoolean(WrapsColumnExpression, UnaryExpression): + inherit_cache: bool + element: Any + type: Any + operator: Any + negate: Any + modifier: Any + wraps_column_expression: bool + def __init__(self, element, operator, negate) -> None: ... + @property + def wrapped_column_expression(self): ... + def self_group(self, against: Incomplete | None = ...): ... + +class BinaryExpression(ColumnElement[Any]): + __visit_name__: str + left: Any + right: Any + operator: Any + type: Any + negate: Any + modifiers: Any + def __init__( + self, + left, + right, + operator, + type_: Incomplete | None = ..., + negate: Incomplete | None = ..., + modifiers: Incomplete | None = ..., + ) -> None: ... + def __bool__(self) -> bool: ... + def __nonzero__(self) -> bool: ... + @property + def is_comparison(self): ... + def self_group(self, against: Incomplete | None = ...): ... + +class Slice(ColumnElement[Any]): + __visit_name__: str + start: Any + stop: Any + step: Any + type: Any + def __init__(self, start, stop, step, _name: Incomplete | None = ...) -> None: ... + def self_group(self, against: Incomplete | None = ...): ... + +class IndexExpression(BinaryExpression): + inherit_cache: bool + +class GroupedElement(ClauseElement): + __visit_name__: str + def self_group(self, against: Incomplete | None = ...): ... + +class Grouping(GroupedElement, ColumnElement[Any]): + element: Any + type: Any + def __init__(self, element) -> None: ... + def __getattr__(self, attr: str): ... + +RANGE_UNBOUNDED: Any +RANGE_CURRENT: Any + +class Over(ColumnElement[Any]): + __visit_name__: str + order_by: Any + partition_by: Any + element: Any + range_: Any + rows: Any + def __init__( + self, + element, + partition_by: Incomplete | None = ..., + order_by: Incomplete | None = ..., + range_: Incomplete | None = ..., + rows: Incomplete | None = ..., + ) -> None: ... + def __reduce__(self): ... + @memoized_property + def type(self): ... + +class WithinGroup(ColumnElement[Any]): + __visit_name__: str + order_by: Any + element: Any + def __init__(self, element, *order_by) -> None: ... + def __reduce__(self): ... + def over( + self, + partition_by: Incomplete | None = ..., + order_by: Incomplete | None = ..., + range_: Incomplete | None = ..., + rows: Incomplete | None = ..., + ): ... + @memoized_property + def type(self): ... + +class FunctionFilter(ColumnElement[Any]): + __visit_name__: str + criterion: Any + func: Any + def __init__(self, func, *criterion) -> None: ... + def filter(self, *criterion): ... + def over( + self, + partition_by: Incomplete | None = ..., + order_by: Incomplete | None = ..., + range_: Incomplete | None = ..., + rows: Incomplete | None = ..., + ): ... + def self_group(self, against: Incomplete | None = ...): ... + @memoized_property + def type(self): ... + +class Label(roles.LabeledColumnExprRole, ColumnElement[Any]): + __visit_name__: str + name: Any + key: Any + def __init__(self, name, element, type_: Incomplete | None = ...) -> None: ... + def __reduce__(self): ... + @memoized_property + def type(self): ... + @HasMemoized.memoized_attribute + def element(self): ... + def self_group(self, against: Incomplete | None = ...): ... + @property + def primary_key(self): ... + @property + def foreign_keys(self): ... + +class NamedColumn(ColumnElement[Any]): + is_literal: bool + table: Any + @memoized_property + def description(self): ... + +class ColumnClause(roles.DDLReferredColumnRole, roles.LabeledColumnExprRole, roles.StrAsPlainColumnRole, Immutable, NamedColumn): + table: Any + is_literal: bool + __visit_name__: str + onupdate: Any + default: Any + server_default: Any + server_onupdate: Any + key: Any + type: Any + def __init__( + self, text, type_: Incomplete | None = ..., is_literal: bool = ..., _selectable: Incomplete | None = ... + ) -> None: ... + def get_children(self, column_tables: bool = ..., **kw): ... # type: ignore[override] + @property + def entity_namespace(self): ... + +class TableValuedColumn(NamedColumn): + __visit_name__: str + scalar_alias: Any + key: Any + type: Any + def __init__(self, scalar_alias, type_) -> None: ... + +class CollationClause(ColumnElement[Any]): + __visit_name__: str + collation: Any + def __init__(self, collation) -> None: ... + +class _IdentifiedClause(Executable, ClauseElement): + __visit_name__: str + ident: Any + def __init__(self, ident) -> None: ... + +class SavepointClause(_IdentifiedClause): + __visit_name__: str + inherit_cache: bool + +class RollbackToSavepointClause(_IdentifiedClause): + __visit_name__: str + inherit_cache: bool + +class ReleaseSavepointClause(_IdentifiedClause): + __visit_name__: str + inherit_cache: bool + +class quoted_name(util.MemoizedSlots, util.text_type): + quote: Any + def __new__(cls, value, quote): ... + def __reduce__(self): ... + +class AnnotatedColumnElement(Annotated): + def __init__(self, element, values) -> None: ... + @memoized_property + def name(self): ... + @memoized_property + def table(self): ... + @memoized_property + def key(self): ... + @memoized_property + def info(self): ... + +class _truncated_label(quoted_name): + def __new__(cls, value, quote: Incomplete | None = ...): ... + def __reduce__(self): ... + def apply_map(self, map_): ... + +class conv(_truncated_label): ... + +class _anonymous_label(_truncated_label): + @classmethod + def safe_construct(cls, seed, body, enclosing_label: Incomplete | None = ..., sanitize_key: bool = ...): ... + def __add__(self, other): ... + def __radd__(self, other): ... + def apply_map(self, map_): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/events.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/events.pyi new file mode 100644 index 00000000..11765c6a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/events.pyi @@ -0,0 +1,10 @@ +from .. import event + +class DDLEvents(event.Events): + def before_create(self, target, connection, **kw) -> None: ... + def after_create(self, target, connection, **kw) -> None: ... + def before_drop(self, target, connection, **kw) -> None: ... + def after_drop(self, target, connection, **kw) -> None: ... + def before_parent_attach(self, target, parent) -> None: ... + def after_parent_attach(self, target, parent) -> None: ... + def column_reflect(self, inspector, table, column_info) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/expression.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/expression.pyi new file mode 100644 index 00000000..44ca4b6e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/expression.pyi @@ -0,0 +1,203 @@ +from _typeshed import Incomplete + +from .base import PARSE_AUTOCOMMIT as PARSE_AUTOCOMMIT, ColumnCollection as ColumnCollection, Executable as Executable +from .dml import Delete as Delete, Insert as Insert, Update as Update, UpdateBase as UpdateBase, ValuesBase as ValuesBase +from .elements import ( + BinaryExpression as BinaryExpression, + BindParameter as BindParameter, + BooleanClauseList as BooleanClauseList, + Case as Case, + Cast as Cast, + ClauseElement as ClauseElement, + ClauseList as ClauseList, + CollectionAggregate as CollectionAggregate, + ColumnClause as ColumnClause, + ColumnElement as ColumnElement, + Extract as Extract, + False_ as False_, + FunctionFilter as FunctionFilter, + Grouping as Grouping, + Label as Label, + Null as Null, + Over as Over, + ReleaseSavepointClause as ReleaseSavepointClause, + RollbackToSavepointClause as RollbackToSavepointClause, + SavepointClause as SavepointClause, + TextClause as TextClause, + True_ as True_, + Tuple as Tuple, + TypeClause as TypeClause, + TypeCoerce as TypeCoerce, + UnaryExpression as UnaryExpression, + WithinGroup as WithinGroup, + _truncated_label as _truncated_label, + between as between, + collate as collate, + literal as literal, + literal_column as literal_column, + not_ as not_, + outparam as outparam, + quoted_name as quoted_name, +) +from .functions import Function as Function, FunctionElement as FunctionElement, func as func, modifier as modifier +from .lambdas import LambdaElement as LambdaElement, StatementLambdaElement as StatementLambdaElement, lambda_stmt as lambda_stmt +from .operators import ColumnOperators as ColumnOperators, Operators as Operators, custom_op as custom_op +from .selectable import ( + CTE as CTE, + LABEL_STYLE_DEFAULT as LABEL_STYLE_DEFAULT, + LABEL_STYLE_DISAMBIGUATE_ONLY as LABEL_STYLE_DISAMBIGUATE_ONLY, + LABEL_STYLE_NONE as LABEL_STYLE_NONE, + LABEL_STYLE_TABLENAME_PLUS_COL as LABEL_STYLE_TABLENAME_PLUS_COL, + Alias as Alias, + AliasedReturnsRows as AliasedReturnsRows, + CompoundSelect as CompoundSelect, + Exists as Exists, + FromClause as FromClause, + FromGrouping as FromGrouping, + GenerativeSelect as GenerativeSelect, + HasCTE as HasCTE, + HasPrefixes as HasPrefixes, + HasSuffixes as HasSuffixes, + Join as Join, + Lateral as Lateral, + ReturnsRows as ReturnsRows, + ScalarSelect as ScalarSelect, + Select as Select, + Selectable as Selectable, + SelectBase as SelectBase, + Subquery as Subquery, + TableClause as TableClause, + TableSample as TableSample, + TableValuedAlias as TableValuedAlias, + TextAsFrom as TextAsFrom, + TextualSelect as TextualSelect, + Values as Values, + subquery as subquery, +) +from .traversals import CacheKey as CacheKey +from .visitors import Visitable as Visitable + +__all__ = [ + "Alias", + "AliasedReturnsRows", + "any_", + "all_", + "CacheKey", + "ClauseElement", + "ColumnCollection", + "ColumnElement", + "CompoundSelect", + "Delete", + "FromClause", + "Insert", + "Join", + "Lateral", + "LambdaElement", + "StatementLambdaElement", + "Select", + "Selectable", + "TableClause", + "TableValuedAlias", + "Update", + "Values", + "alias", + "and_", + "asc", + "between", + "bindparam", + "case", + "cast", + "column", + "custom_op", + "cte", + "delete", + "desc", + "distinct", + "except_", + "except_all", + "exists", + "extract", + "func", + "modifier", + "collate", + "insert", + "intersect", + "intersect_all", + "join", + "label", + "lateral", + "lambda_stmt", + "literal", + "literal_column", + "not_", + "null", + "nulls_first", + "nulls_last", + "or_", + "outparam", + "outerjoin", + "over", + "select", + "table", + "text", + "tuple_", + "type_coerce", + "quoted_name", + "union", + "union_all", + "update", + "quoted_name", + "within_group", + "Subquery", + "TableSample", + "tablesample", + "values", +] + +all_: Incomplete +any_: Incomplete +and_: Incomplete +alias: Incomplete +tablesample: Incomplete +lateral: Incomplete +or_: Incomplete +bindparam: Incomplete +select: Incomplete + +def text(text: str, bind: Incomplete | None = None) -> TextClause: ... + +table: Incomplete +column: Incomplete +over: Incomplete +within_group: Incomplete +label: Incomplete +case: Incomplete +cast: Incomplete +cte: Incomplete +values: Incomplete +extract: Incomplete +tuple_: Incomplete +except_: Incomplete +except_all: Incomplete +intersect: Incomplete +intersect_all: Incomplete +union: Incomplete +union_all: Incomplete +exists: Incomplete +nulls_first: Incomplete +nullsfirst: Incomplete +nulls_last: Incomplete +nullslast: Incomplete +asc: Incomplete +desc: Incomplete +distinct: Incomplete +type_coerce: Incomplete +true: Incomplete +false: Incomplete +null: Incomplete +join: Incomplete +outerjoin: Incomplete +insert: Incomplete +update: Incomplete +delete: Incomplete +funcfilter: Incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/functions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/functions.pyi new file mode 100644 index 00000000..c1a0d1ec --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/functions.pyi @@ -0,0 +1,228 @@ +from _typeshed import Incomplete +from typing import Any + +from ..util import HasMemoized +from .base import Executable, Generative +from .elements import BinaryExpression, ColumnElement, NamedColumn +from .selectable import FromClause, TableValuedAlias +from .visitors import TraversibleType + +def register_function(identifier, fn, package: str = ...) -> None: ... + +class FunctionElement(Executable, ColumnElement[Any], FromClause, Generative): # type: ignore[misc] + packagenames: Incomplete + clause_expr: Incomplete + def __init__(self, *clauses, **kwargs) -> None: ... + def scalar_table_valued(self, name, type_: Incomplete | None = ...): ... + def table_valued(self, *expr, **kw): ... + def column_valued(self, name: str | None = ..., joins_implicitly: bool = ...): ... + @property + def columns(self): ... + @property + def exported_columns(self): ... + @HasMemoized.memoized_attribute + def clauses(self): ... + def over( + self, + partition_by: Incomplete | None = ..., + order_by: Incomplete | None = ..., + rows: Incomplete | None = ..., + range_: Incomplete | None = ..., + ): ... + def within_group(self, *order_by): ... + def filter(self, *criterion): ... + def as_comparison(self, left_index, right_index): ... + def within_group_type(self, within_group) -> None: ... + def alias(self, name: str | None = ..., joins_implicitly: bool = ...) -> TableValuedAlias: ... # type: ignore[override] + def select(self): ... + def scalar(self): ... + def execute(self): ... + def self_group(self, against: Incomplete | None = ...): ... + @property + def entity_namespace(self): ... + +class FunctionAsBinary(BinaryExpression): + sql_function: Incomplete + left_index: Incomplete + right_index: Incomplete + operator: Incomplete + type: Incomplete + negate: Incomplete + modifiers: Incomplete + def __init__(self, fn, left_index, right_index) -> None: ... + @property + def left(self): ... + @left.setter + def left(self, value) -> None: ... + @property + def right(self): ... + @right.setter + def right(self, value) -> None: ... + +class ScalarFunctionColumn(NamedColumn): + __visit_name__: str + is_literal: bool + table: Incomplete + fn: Incomplete + name: Incomplete + type: Incomplete + def __init__(self, fn, name, type_: Incomplete | None = ...) -> None: ... + +class _FunctionGenerator: + opts: Incomplete + def __init__(self, **opts) -> None: ... + def __getattr__(self, name: str): ... + def __call__(self, *c, **kwargs): ... + +func: Incomplete +modifier: Incomplete + +class Function(FunctionElement): + __visit_name__: str + type: Incomplete + packagenames: Incomplete + name: Incomplete + def __init__(self, name, *clauses, **kw) -> None: ... + +class _GenericMeta(TraversibleType): + def __init__(cls, clsname, bases, clsdict) -> None: ... + +class GenericFunction: + name: Incomplete + identifier: Incomplete + coerce_arguments: bool + inherit_cache: bool + packagenames: Incomplete + clause_expr: Incomplete + type: Incomplete + def __init__(self, *args, **kwargs) -> None: ... + +class next_value(GenericFunction): + type: Incomplete + name: str + sequence: Incomplete + def __init__(self, seq, **kw) -> None: ... + def compare(self, other, **kw): ... + +class AnsiFunction(GenericFunction): + inherit_cache: bool + def __init__(self, *args, **kwargs) -> None: ... + +class ReturnTypeFromArgs(GenericFunction): + inherit_cache: bool + def __init__(self, *args, **kwargs) -> None: ... + +class coalesce(ReturnTypeFromArgs): + inherit_cache: bool + +class max(ReturnTypeFromArgs): + inherit_cache: bool + +class min(ReturnTypeFromArgs): + inherit_cache: bool + +class sum(ReturnTypeFromArgs): + inherit_cache: bool + +class now(GenericFunction): + type: Incomplete + inherit_cache: bool + +class concat(GenericFunction): + type: Incomplete + inherit_cache: bool + +class char_length(GenericFunction): + type: Incomplete + inherit_cache: bool + def __init__(self, arg, **kwargs) -> None: ... + +class random(GenericFunction): + inherit_cache: bool + +class count(GenericFunction): + type: Incomplete + inherit_cache: bool + def __init__(self, expression: Incomplete | None = ..., **kwargs) -> None: ... + +class current_date(AnsiFunction): + type: Incomplete + inherit_cache: bool + +class current_time(AnsiFunction): + type: Incomplete + inherit_cache: bool + +class current_timestamp(AnsiFunction): + type: Incomplete + inherit_cache: bool + +class current_user(AnsiFunction): + type: Incomplete + inherit_cache: bool + +class localtime(AnsiFunction): + type: Incomplete + inherit_cache: bool + +class localtimestamp(AnsiFunction): + type: Incomplete + inherit_cache: bool + +class session_user(AnsiFunction): + type: Incomplete + inherit_cache: bool + +class sysdate(AnsiFunction): + type: Incomplete + inherit_cache: bool + +class user(AnsiFunction): + type: Incomplete + inherit_cache: bool + +class array_agg(GenericFunction): + type: Incomplete + inherit_cache: bool + def __init__(self, *args, **kwargs) -> None: ... + +class OrderedSetAgg(GenericFunction): + array_for_multi_clause: bool + inherit_cache: bool + def within_group_type(self, within_group): ... + +class mode(OrderedSetAgg): + inherit_cache: bool + +class percentile_cont(OrderedSetAgg): + array_for_multi_clause: bool + inherit_cache: bool + +class percentile_disc(OrderedSetAgg): + array_for_multi_clause: bool + inherit_cache: bool + +class rank(GenericFunction): + type: Incomplete + inherit_cache: bool + +class dense_rank(GenericFunction): + type: Incomplete + inherit_cache: bool + +class percent_rank(GenericFunction): + type: Incomplete + inherit_cache: bool + +class cume_dist(GenericFunction): + type: Incomplete + inherit_cache: bool + +class cube(GenericFunction): + inherit_cache: bool + +class rollup(GenericFunction): + inherit_cache: bool + +class grouping_sets(GenericFunction): + inherit_cache: bool diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/lambdas.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/lambdas.pyi new file mode 100644 index 00000000..b6ada6db --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/lambdas.pyi @@ -0,0 +1,122 @@ +from _typeshed import Incomplete +from typing import Generic, TypeVar + +from . import elements, roles +from .base import Options +from .operators import ColumnOperators + +_T = TypeVar("_T") + +class LambdaOptions(Options): + enable_tracking: bool + track_closure_variables: bool + track_on: Incomplete + global_track_bound_values: bool + track_bound_values: bool + lambda_cache: Incomplete + +def lambda_stmt( + lmb, + enable_tracking: bool = ..., + track_closure_variables: bool = ..., + track_on: Incomplete | None = ..., + global_track_bound_values: bool = ..., + track_bound_values: bool = ..., + lambda_cache: Incomplete | None = ..., +): ... + +class LambdaElement(elements.ClauseElement): + __visit_name__: str + parent_lambda: Incomplete + fn: Incomplete + role: Incomplete + tracker_key: Incomplete + opts: Incomplete + def __init__(self, fn, role, opts=..., apply_propagate_attrs: Incomplete | None = ...) -> None: ... + def __getattr__(self, key: str): ... + +class DeferredLambdaElement(LambdaElement): + lambda_args: Incomplete + def __init__(self, fn, role, opts=..., lambda_args=...) -> None: ... + +class StatementLambdaElement(roles.AllowsLambdaRole, LambdaElement): + def __add__(self, other): ... + def add_criteria( + self, + other, + enable_tracking: bool = ..., + track_on: Incomplete | None = ..., + track_closure_variables: bool = ..., + track_bound_values: bool = ..., + ): ... + def spoil(self): ... + +class NullLambdaStatement(roles.AllowsLambdaRole, elements.ClauseElement): + __visit_name__: str + def __init__(self, statement) -> None: ... + def __getattr__(self, key: str): ... + def __add__(self, other): ... + def add_criteria(self, other, **kw): ... + +class LinkedLambdaElement(StatementLambdaElement): + role: Incomplete + opts: Incomplete + fn: Incomplete + parent_lambda: Incomplete + tracker_key: Incomplete + def __init__(self, fn, parent_lambda, opts) -> None: ... + +class AnalyzedCode: + @classmethod + def get(cls, fn, lambda_element, lambda_kw, **kw): ... + track_bound_values: Incomplete + track_closure_variables: Incomplete + bindparam_trackers: Incomplete + closure_trackers: Incomplete + build_py_wrappers: Incomplete + def __init__(self, fn, lambda_element, opts) -> None: ... + +class NonAnalyzedFunction: + closure_bindparams: Incomplete + bindparam_trackers: Incomplete + expr: Incomplete + def __init__(self, expr) -> None: ... + @property + def expected_expr(self): ... + +class AnalyzedFunction: + analyzed_code: Incomplete + fn: Incomplete + closure_pywrappers: Incomplete + tracker_instrumented_fn: Incomplete + expr: Incomplete + bindparam_trackers: Incomplete + expected_expr: Incomplete + is_sequence: Incomplete + propagate_attrs: Incomplete + closure_bindparams: Incomplete + def __init__(self, analyzed_code, lambda_element, apply_propagate_attrs, fn) -> None: ... + +class PyWrapper(ColumnOperators[_T], Generic[_T]): + fn: Incomplete + track_bound_values: Incomplete + def __init__( + self, + fn, + name, + to_evaluate, + closure_index: Incomplete | None = ..., + getter: Incomplete | None = ..., + track_bound_values: bool = ..., + ) -> None: ... + def __call__(self, *arg, **kw): ... + def operate(self, op, *other, **kwargs): ... + def reverse_operate(self, op, other, **kwargs): ... + def __clause_element__(self): ... # Field not always present. + def __bool__(self) -> bool: ... + def __nonzero__(self) -> bool: ... + def __getattribute__(self, key: str): ... + def __iter__(self): ... + def __getitem__(self, key) -> ColumnOperators[_T]: ... + +def insp(lmb): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/naming.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/naming.pyi new file mode 100644 index 00000000..50bdacf7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/naming.pyi @@ -0,0 +1,10 @@ +from typing import Any + +from .elements import conv as conv + +class ConventionDict: + const: Any + table: Any + convention: Any + def __init__(self, const, table, convention) -> None: ... + def __getitem__(self, key): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/operators.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/operators.pyi new file mode 100644 index 00000000..abfafe7c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/operators.pyi @@ -0,0 +1,194 @@ +from _typeshed import Incomplete +from collections.abc import Container, Iterable +from operator import truediv +from typing import Any, Generic, TypeVar + +_T = TypeVar("_T") + +div = truediv + +class Operators: + def __and__(self, other): ... + def __or__(self, other): ... + def __invert__(self): ... + def op(self, opstring, precedence: int = ..., is_comparison: bool = ..., return_type: Incomplete | None = ...): ... + def bool_op(self, opstring, precedence: int = ...): ... + def operate(self, op, *other, **kwargs): ... + def reverse_operate(self, op, other, **kwargs): ... + +class custom_op: + __name__: str + opstring: Any + precedence: Any + is_comparison: Any + natural_self_precedent: Any + eager_grouping: Any + return_type: Any + def __init__( + self, + opstring, + precedence: int = ..., + is_comparison: bool = ..., + return_type: Incomplete | None = ..., + natural_self_precedent: bool = ..., + eager_grouping: bool = ..., + ) -> None: ... + def __eq__(self, other): ... + def __hash__(self) -> int: ... + def __call__(self, left, right, **kw): ... + +class ColumnOperators(Operators, Generic[_T]): + timetuple: Any + def __lt__(self, other: _T | ColumnOperators[_T] | None) -> ColumnOperators[_T]: ... + def __le__(self, other: _T | ColumnOperators[_T] | None) -> ColumnOperators[_T]: ... + def __hash__(self) -> int: ... + def __eq__(self, other: _T | ColumnOperators[_T] | None) -> ColumnOperators[_T]: ... # type: ignore[override] + def __ne__(self, other: _T | ColumnOperators[_T] | None) -> ColumnOperators[_T]: ... # type: ignore[override] + def is_distinct_from(self, other) -> ColumnOperators[_T]: ... + def is_not_distinct_from(self, other) -> ColumnOperators[_T]: ... + def isnot_distinct_from(self, other) -> ColumnOperators[_T]: ... + def __gt__(self, other: _T | ColumnOperators[_T] | None) -> ColumnOperators[_T]: ... + def __ge__(self, other: _T | ColumnOperators[_T] | None) -> ColumnOperators[_T]: ... + def __neg__(self) -> ColumnOperators[_T]: ... + def __contains__(self, other) -> ColumnOperators[_T]: ... + def __getitem__(self, index: int) -> ColumnOperators[_T]: ... + def __lshift__(self, other) -> ColumnOperators[_T]: ... + def __rshift__(self, other) -> ColumnOperators[_T]: ... + def concat(self, other: _T | ColumnOperators[_T] | None) -> ColumnOperators[_T]: ... + def like(self, other: _T, escape: str | None = ...) -> ColumnOperators[_T]: ... + def ilike(self, other: _T, escape: str | None = ...) -> ColumnOperators[_T]: ... + def in_(self, other: Container[_T] | Iterable[_T]) -> ColumnOperators[_T]: ... + def not_in(self, other: Container[_T] | Iterable[_T]) -> ColumnOperators[_T]: ... + def notin_(self, other: Container[_T] | Iterable[_T]) -> ColumnOperators[_T]: ... + def not_like(self, other: _T, escape: str | None = ...) -> ColumnOperators[_T]: ... + def notlike(self, other: _T, escape: str | None = ...) -> ColumnOperators[_T]: ... + def not_ilike(self, other: _T, escape: str | None = ...) -> ColumnOperators[_T]: ... + def notilike(self, other: _T, escape: str | None = ...) -> ColumnOperators[_T]: ... + def is_(self, other: _T) -> ColumnOperators[_T]: ... + def is_not(self, other: _T) -> ColumnOperators[_T]: ... + def isnot(self, other: _T) -> ColumnOperators[_T]: ... + def startswith(self, other: str, **kwargs) -> ColumnOperators[_T]: ... + def endswith(self, other: str, **kwargs) -> ColumnOperators[_T]: ... + def contains(self, other: str, **kwargs) -> ColumnOperators[_T]: ... + def match(self, other: str, **kwargs) -> ColumnOperators[_T]: ... + def regexp_match(self, pattern, flags: Incomplete | None = ...) -> ColumnOperators[_T]: ... + def regexp_replace(self, pattern, replacement, flags: Incomplete | None = ...) -> ColumnOperators[_T]: ... + def desc(self) -> ColumnOperators[_T]: ... + def asc(self) -> ColumnOperators[_T]: ... + def nulls_first(self) -> ColumnOperators[_T]: ... + def nullsfirst(self) -> ColumnOperators[_T]: ... + def nulls_last(self) -> ColumnOperators[_T]: ... + def nullslast(self) -> ColumnOperators[_T]: ... + def collate(self, collation) -> ColumnOperators[_T]: ... + def __radd__(self, other) -> ColumnOperators[_T]: ... + def __rsub__(self, other) -> ColumnOperators[_T]: ... + def __rmul__(self, other) -> ColumnOperators[_T]: ... + def __rdiv__(self, other) -> ColumnOperators[_T]: ... + def __rmod__(self, other) -> ColumnOperators[_T]: ... + def between(self, cleft, cright, symmetric: bool = ...) -> ColumnOperators[_T]: ... + def distinct(self) -> ColumnOperators[_T]: ... + def any_(self) -> ColumnOperators[_T]: ... + def all_(self) -> ColumnOperators[_T]: ... + def __add__(self, other) -> ColumnOperators[_T]: ... + def __sub__(self, other) -> ColumnOperators[_T]: ... + def __mul__(self, other) -> ColumnOperators[_T]: ... + def __div__(self, other) -> ColumnOperators[_T]: ... + def __mod__(self, other) -> ColumnOperators[_T]: ... + def __truediv__(self, other) -> ColumnOperators[_T]: ... + def __rtruediv__(self, other) -> ColumnOperators[_T]: ... + +def commutative_op(fn): ... +def comparison_op(fn): ... +def from_() -> None: ... +def function_as_comparison_op() -> None: ... +def as_() -> None: ... +def exists() -> None: ... +def is_true(a) -> None: ... + +istrue = is_true + +def is_false(a) -> None: ... + +isfalse = is_false + +def is_distinct_from(a, b): ... +def is_not_distinct_from(a, b): ... + +isnot_distinct_from = is_not_distinct_from + +def is_(a, b): ... +def is_not(a, b): ... + +isnot = is_not + +def collate(a, b): ... +def op(a, opstring, b): ... +def like_op(a, b, escape: Incomplete | None = ...): ... +def not_like_op(a, b, escape: Incomplete | None = ...): ... + +notlike_op = not_like_op + +def ilike_op(a, b, escape: Incomplete | None = ...): ... +def not_ilike_op(a, b, escape: Incomplete | None = ...): ... + +notilike_op = not_ilike_op + +def between_op(a, b, c, symmetric: bool = ...): ... +def not_between_op(a, b, c, symmetric: bool = ...): ... + +notbetween_op = not_between_op + +def in_op(a, b): ... +def not_in_op(a, b): ... + +notin_op = not_in_op + +def distinct_op(a): ... +def any_op(a): ... +def all_op(a): ... +def startswith_op(a, b, escape: Incomplete | None = ..., autoescape: bool = ...): ... +def not_startswith_op(a, b, escape: Incomplete | None = ..., autoescape: bool = ...): ... + +notstartswith_op = not_startswith_op + +def endswith_op(a, b, escape: Incomplete | None = ..., autoescape: bool = ...): ... +def not_endswith_op(a, b, escape: Incomplete | None = ..., autoescape: bool = ...): ... + +notendswith_op = not_endswith_op + +def contains_op(a, b, escape: Incomplete | None = ..., autoescape: bool = ...): ... +def not_contains_op(a, b, escape: Incomplete | None = ..., autoescape: bool = ...): ... + +notcontains_op = not_contains_op + +def match_op(a, b, **kw): ... +def regexp_match_op(a, b, flags: Incomplete | None = ...): ... +def not_regexp_match_op(a, b, flags: Incomplete | None = ...): ... +def regexp_replace_op(a, b, replacement, flags: Incomplete | None = ...): ... +def not_match_op(a, b, **kw): ... + +notmatch_op = not_match_op + +def comma_op(a, b) -> None: ... +def filter_op(a, b) -> None: ... +def concat_op(a, b): ... +def desc_op(a): ... +def asc_op(a): ... +def nulls_first_op(a): ... + +nullsfirst_op = nulls_first_op + +def nulls_last_op(a): ... + +nullslast_op = nulls_last_op + +def json_getitem_op(a, b) -> None: ... +def json_path_getitem_op(a, b) -> None: ... +def is_comparison(op) -> bool: ... +def is_commutative(op) -> bool: ... +def is_ordering_modifier(op) -> bool: ... +def is_natural_self_precedent(op) -> bool: ... +def is_boolean(op) -> bool: ... +def mirror(op): ... +def is_associative(op) -> bool: ... +def is_precedent(operator, against) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/roles.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/roles.pyi new file mode 100644 index 00000000..e7c290b1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/roles.pyi @@ -0,0 +1,57 @@ +class SQLRole: + allows_lambda: bool + uses_inspection: bool + +class UsesInspection: + uses_inspection: bool + +class AllowsLambdaRole: + allows_lambda: bool + +class HasCacheKeyRole(SQLRole): ... +class ExecutableOptionRole(SQLRole): ... +class LiteralValueRole(SQLRole): ... +class ColumnArgumentRole(SQLRole): ... +class ColumnArgumentOrKeyRole(ColumnArgumentRole): ... +class StrAsPlainColumnRole(ColumnArgumentRole): ... +class ColumnListRole(SQLRole): ... +class TruncatedLabelRole(SQLRole): ... +class ColumnsClauseRole(AllowsLambdaRole, UsesInspection, ColumnListRole): ... +class LimitOffsetRole(SQLRole): ... +class ByOfRole(ColumnListRole): ... +class GroupByRole(AllowsLambdaRole, UsesInspection, ByOfRole): ... +class OrderByRole(AllowsLambdaRole, ByOfRole): ... +class StructuralRole(SQLRole): ... +class StatementOptionRole(StructuralRole): ... +class OnClauseRole(AllowsLambdaRole, StructuralRole): ... +class WhereHavingRole(OnClauseRole): ... +class ExpressionElementRole(SQLRole): ... +class ConstExprRole(ExpressionElementRole): ... +class LabeledColumnExprRole(ExpressionElementRole): ... +class BinaryElementRole(ExpressionElementRole): ... +class InElementRole(SQLRole): ... +class JoinTargetRole(AllowsLambdaRole, UsesInspection, StructuralRole): ... +class FromClauseRole(ColumnsClauseRole, JoinTargetRole): ... + +class StrictFromClauseRole(FromClauseRole): + @property + def description(self) -> None: ... + +class AnonymizedFromClauseRole(StrictFromClauseRole): ... +class ReturnsRowsRole(SQLRole): ... +class StatementRole(SQLRole): ... + +class SelectStatementRole(StatementRole, ReturnsRowsRole): + def subquery(self) -> None: ... + +class HasCTERole(ReturnsRowsRole): ... +class IsCTERole(SQLRole): ... +class CompoundElementRole(AllowsLambdaRole, SQLRole): ... +class DMLRole(StatementRole): ... +class DMLTableRole(FromClauseRole): ... +class DMLColumnRole(SQLRole): ... +class DMLSelectRole(SQLRole): ... +class DDLRole(StatementRole): ... +class DDLExpressionRole(StructuralRole): ... +class DDLConstraintColumnRole(SQLRole): ... +class DDLReferredColumnRole(DDLConstraintColumnRole): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/schema.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/schema.pyi new file mode 100644 index 00000000..21656bee --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/schema.pyi @@ -0,0 +1,378 @@ +from _typeshed import Incomplete +from typing import Any + +from ..util import memoized_property +from . import visitors +from .base import DialectKWArgs, Executable, SchemaEventTarget +from .elements import ColumnClause +from .selectable import TableClause + +RETAIN_SCHEMA: Any +BLANK_SCHEMA: Any +NULL_UNSPECIFIED: Any + +class SchemaItem(SchemaEventTarget, visitors.Visitable): + __visit_name__: str + create_drop_stringify_dialect: str + @memoized_property + def info(self): ... + +class Table(DialectKWArgs, SchemaItem, TableClause): + __visit_name__: str + constraints: Any + indexes: Any + def __new__(cls, *args, **kw): ... + def __init__(self, *args, **kw) -> None: ... + @property + def foreign_key_constraints(self): ... + @property + def key(self): ... + @property + def bind(self): ... + def add_is_dependent_on(self, table) -> None: ... + def append_column(self, column, replace_existing: bool = ...) -> None: ... # type: ignore[override] + def append_constraint(self, constraint) -> None: ... + def exists(self, bind: Incomplete | None = ...): ... + def create(self, bind: Incomplete | None = ..., checkfirst: bool = ...) -> None: ... + def drop(self, bind: Incomplete | None = ..., checkfirst: bool = ...) -> None: ... + def tometadata(self, metadata, schema=..., referred_schema_fn: Incomplete | None = ..., name: Incomplete | None = ...): ... + def to_metadata(self, metadata, schema=..., referred_schema_fn: Incomplete | None = ..., name: Incomplete | None = ...): ... + +class Column(DialectKWArgs, SchemaItem, ColumnClause): + __visit_name__: str + inherit_cache: bool + key: Any + primary_key: Any + nullable: Any + default: Any + server_default: Any + server_onupdate: Any + index: Any + unique: Any + system: Any + doc: Any + onupdate: Any + autoincrement: Any + constraints: Any + foreign_keys: Any + comment: Any + computed: Any + identity: Any + info: Any + def __init__(self, *args, **kwargs) -> None: ... + def references(self, column): ... + def append_foreign_key(self, fk) -> None: ... + def copy(self, **kw): ... + +class ForeignKey(DialectKWArgs, SchemaItem): + __visit_name__: str + constraint: Any + parent: Any + use_alter: Any + name: Any + onupdate: Any + ondelete: Any + deferrable: Any + initially: Any + link_to_name: Any + match: Any + info: Any + def __init__( + self, + column, + _constraint: Incomplete | None = ..., + use_alter: bool = ..., + name: Incomplete | None = ..., + onupdate: Incomplete | None = ..., + ondelete: Incomplete | None = ..., + deferrable: Incomplete | None = ..., + initially: Incomplete | None = ..., + link_to_name: bool = ..., + match: Incomplete | None = ..., + info: Incomplete | None = ..., + _unresolvable: bool = ..., + **dialect_kw, + ) -> None: ... + def copy(self, schema: Incomplete | None = ..., **kw): ... + @property + def target_fullname(self): ... + def references(self, table): ... + def get_referent(self, table): ... + @memoized_property + def column(self): ... + +class DefaultGenerator(Executable, SchemaItem): + __visit_name__: str + is_sequence: bool + is_server_default: bool + column: Any + for_update: Any + def __init__(self, for_update: bool = ...) -> None: ... + def execute(self, bind: Incomplete | None = ...): ... # type: ignore[override] + @property + def bind(self): ... + +class ColumnDefault(DefaultGenerator): + arg: Any + def __init__(self, arg, **kwargs) -> None: ... + @memoized_property + def is_callable(self): ... + @memoized_property + def is_clause_element(self): ... + @memoized_property + def is_scalar(self): ... + +class IdentityOptions: + start: Any + increment: Any + minvalue: Any + maxvalue: Any + nominvalue: Any + nomaxvalue: Any + cycle: Any + cache: Any + order: Any + def __init__( + self, + start: Incomplete | None = ..., + increment: Incomplete | None = ..., + minvalue: Incomplete | None = ..., + maxvalue: Incomplete | None = ..., + nominvalue: Incomplete | None = ..., + nomaxvalue: Incomplete | None = ..., + cycle: Incomplete | None = ..., + cache: Incomplete | None = ..., + order: Incomplete | None = ..., + ) -> None: ... + +class Sequence(IdentityOptions, DefaultGenerator): + __visit_name__: str + is_sequence: bool + name: Any + optional: Any + schema: Any + metadata: Any + data_type: Any + def __init__( + self, + name, + start: Incomplete | None = ..., + increment: Incomplete | None = ..., + minvalue: Incomplete | None = ..., + maxvalue: Incomplete | None = ..., + nominvalue: Incomplete | None = ..., + nomaxvalue: Incomplete | None = ..., + cycle: Incomplete | None = ..., + schema: Incomplete | None = ..., + cache: Incomplete | None = ..., + order: Incomplete | None = ..., + data_type: Incomplete | None = ..., + optional: bool = ..., + quote: Incomplete | None = ..., + metadata: Incomplete | None = ..., + quote_schema: Incomplete | None = ..., + for_update: bool = ..., + ) -> None: ... + @memoized_property + def is_callable(self): ... + @memoized_property + def is_clause_element(self): ... + def next_value(self): ... + @property + def bind(self): ... + def create(self, bind: Incomplete | None = ..., checkfirst: bool = ...) -> None: ... + def drop(self, bind: Incomplete | None = ..., checkfirst: bool = ...) -> None: ... + +class FetchedValue(SchemaEventTarget): + is_server_default: bool + reflected: bool + has_argument: bool + is_clause_element: bool + for_update: Any + def __init__(self, for_update: bool = ...) -> None: ... + +class DefaultClause(FetchedValue): + has_argument: bool + arg: Any + reflected: Any + def __init__(self, arg, for_update: bool = ..., _reflected: bool = ...) -> None: ... + +class Constraint(DialectKWArgs, SchemaItem): + __visit_name__: str + name: Any + deferrable: Any + initially: Any + info: Any + def __init__( + self, + name: Incomplete | None = ..., + deferrable: Incomplete | None = ..., + initially: Incomplete | None = ..., + _create_rule: Incomplete | None = ..., + info: Incomplete | None = ..., + _type_bound: bool = ..., + **dialect_kw, + ) -> None: ... + @property + def table(self): ... + def copy(self, **kw): ... + +class ColumnCollectionMixin: + columns: Any + def __init__(self, *columns, **kw) -> None: ... + +class ColumnCollectionConstraint(ColumnCollectionMixin, Constraint): + def __init__(self, *columns, **kw) -> None: ... + columns: Any + def __contains__(self, x): ... + def copy(self, target_table: Incomplete | None = ..., **kw): ... + def contains_column(self, col): ... + def __iter__(self): ... + def __len__(self) -> int: ... + +class CheckConstraint(ColumnCollectionConstraint): + __visit_name__: str + sqltext: Any + def __init__( + self, + sqltext, + name: Incomplete | None = ..., + deferrable: Incomplete | None = ..., + initially: Incomplete | None = ..., + table: Incomplete | None = ..., + info: Incomplete | None = ..., + _create_rule: Incomplete | None = ..., + _autoattach: bool = ..., + _type_bound: bool = ..., + **kw, + ) -> None: ... + @property + def is_column_level(self): ... + def copy(self, target_table: Incomplete | None = ..., **kw): ... + +class ForeignKeyConstraint(ColumnCollectionConstraint): + __visit_name__: str + onupdate: Any + ondelete: Any + link_to_name: Any + use_alter: Any + match: Any + elements: Any + def __init__( + self, + columns, + refcolumns, + name: Incomplete | None = ..., + onupdate: Incomplete | None = ..., + ondelete: Incomplete | None = ..., + deferrable: Incomplete | None = ..., + initially: Incomplete | None = ..., + use_alter: bool = ..., + link_to_name: bool = ..., + match: Incomplete | None = ..., + table: Incomplete | None = ..., + info: Incomplete | None = ..., + **dialect_kw, + ) -> None: ... + columns: Any + @property + def referred_table(self): ... + @property + def column_keys(self): ... + def copy(self, schema: Incomplete | None = ..., target_table: Incomplete | None = ..., **kw): ... # type: ignore[override] + +class PrimaryKeyConstraint(ColumnCollectionConstraint): + __visit_name__: str + def __init__(self, *columns, **kw) -> None: ... + @property + def columns_autoinc_first(self): ... + +class UniqueConstraint(ColumnCollectionConstraint): + __visit_name__: str + +class Index(DialectKWArgs, ColumnCollectionMixin, SchemaItem): + __visit_name__: str + table: Any + name: Any + unique: Any + info: Any + expressions: Any + def __init__(self, name, *expressions, **kw) -> None: ... + @property + def bind(self): ... + def create(self, bind: Incomplete | None = ..., checkfirst: bool = ...): ... + def drop(self, bind: Incomplete | None = ..., checkfirst: bool = ...) -> None: ... + +DEFAULT_NAMING_CONVENTION: Any + +class MetaData(SchemaItem): + __visit_name__: str + tables: Any + schema: Any + naming_convention: Any + info: Any + def __init__( + self, + bind: Incomplete | None = ..., + schema: Incomplete | None = ..., + quote_schema: Incomplete | None = ..., + naming_convention: Incomplete | None = ..., + info: Incomplete | None = ..., + ) -> None: ... + def __contains__(self, table_or_key) -> bool: ... + def is_bound(self): ... + bind: Any + def clear(self) -> None: ... + def remove(self, table) -> None: ... + @property + def sorted_tables(self): ... + def reflect( + self, + bind: Incomplete | None = ..., + schema: Incomplete | None = ..., + views: bool = ..., + only: Incomplete | None = ..., + extend_existing: bool = ..., + autoload_replace: bool = ..., + resolve_fks: bool = ..., + **dialect_kwargs, + ) -> None: ... + def create_all(self, bind: Incomplete | None = ..., tables: Incomplete | None = ..., checkfirst: bool = ...) -> None: ... + def drop_all(self, bind: Incomplete | None = ..., tables: Incomplete | None = ..., checkfirst: bool = ...) -> None: ... + +class ThreadLocalMetaData(MetaData): + __visit_name__: str + context: Any + def __init__(self) -> None: ... + bind: Any + def is_bound(self): ... + def dispose(self) -> None: ... + +class Computed(FetchedValue, SchemaItem): + __visit_name__: str + sqltext: Any + persisted: Any + column: Any + def __init__(self, sqltext, persisted: Incomplete | None = ...) -> None: ... + def copy(self, target_table: Incomplete | None = ..., **kw): ... + +class Identity(IdentityOptions, FetchedValue, SchemaItem): + __visit_name__: str + always: Any + on_null: Any + column: Any + def __init__( + self, + always: bool = ..., + on_null: Incomplete | None = ..., + start: Incomplete | None = ..., + increment: Incomplete | None = ..., + minvalue: Incomplete | None = ..., + maxvalue: Incomplete | None = ..., + nominvalue: Incomplete | None = ..., + nomaxvalue: Incomplete | None = ..., + cycle: Incomplete | None = ..., + cache: Incomplete | None = ..., + order: Incomplete | None = ..., + ) -> None: ... + def copy(self, **kw): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/selectable.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/selectable.pyi new file mode 100644 index 00000000..eb24ab00 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/selectable.pyi @@ -0,0 +1,420 @@ +from _typeshed import Incomplete +from typing import Any +from typing_extensions import Self + +from .. import util +from ..util import HasMemoized, memoized_property +from . import roles, traversals, visitors +from .annotation import Annotated, SupportsCloneAnnotations +from .base import CacheableOptions, CompileState, Executable, Generative, HasCompileState, Immutable +from .elements import ( + BindParameter as BindParameter, + BooleanClauseList as BooleanClauseList, + ClauseElement as ClauseElement, + ClauseList as ClauseList, + ColumnClause as ColumnClause, + GroupedElement as GroupedElement, + Grouping as Grouping, + TableValuedColumn as TableValuedColumn, + UnaryExpression as UnaryExpression, + literal_column as literal_column, +) + +class _OffsetLimitParam(BindParameter[Any]): + inherit_cache: bool + +def subquery(alias, *args, **kwargs): ... + +class ReturnsRows(roles.ReturnsRowsRole, ClauseElement): + @property + def selectable(self): ... + @property + def exported_columns(self) -> None: ... + +class Selectable(ReturnsRows): + __visit_name__: str + is_selectable: bool + def lateral(self, name: Incomplete | None = ...): ... + def replace_selectable(self, old, alias): ... + def corresponding_column(self, column, require_embedded: bool = ...): ... + +class HasPrefixes: + def prefix_with(self, *expr, **kw) -> Self: ... + +class HasSuffixes: + def suffix_with(self, *expr, **kw) -> Self: ... + +class HasHints: + def with_statement_hint(self, text, dialect_name: str = ...): ... + def with_hint(self, selectable, text: str, dialect_name: str = ...) -> Self: ... + +class FromClause(roles.AnonymizedFromClauseRole, Selectable): + __visit_name__: str + named_with_column: bool + schema: Any + is_selectable: bool + def select(self, whereclause: Incomplete | None = ..., **kwargs): ... + def join(self, right, onclause: Incomplete | None = ..., isouter: bool = ..., full: bool = ...): ... + def outerjoin(self, right, onclause: Incomplete | None = ..., full: bool = ...): ... + def alias(self, name: Incomplete | None = ..., flat: bool = ...): ... + def table_valued(self): ... + def tablesample(self, sampling, name: Incomplete | None = ..., seed: Incomplete | None = ...): ... + def is_derived_from(self, fromclause): ... + @property + def description(self): ... + @property + def exported_columns(self): ... + @memoized_property + def columns(self): ... + @property + def entity_namespace(self): ... + @memoized_property + def primary_key(self): ... + @memoized_property + def foreign_keys(self): ... + @property + def c(self): ... + +LABEL_STYLE_NONE: Any +LABEL_STYLE_TABLENAME_PLUS_COL: Any +LABEL_STYLE_DISAMBIGUATE_ONLY: Any +LABEL_STYLE_DEFAULT: Any + +class Join(roles.DMLTableRole, FromClause): + __visit_name__: str + left: Any + right: Any + onclause: Any + isouter: Any + full: Any + def __init__(self, left, right, onclause: Incomplete | None = ..., isouter: bool = ..., full: bool = ...) -> None: ... + @property + def description(self): ... + def is_derived_from(self, fromclause): ... + def self_group(self, against: Incomplete | None = ...): ... + def select(self, whereclause: Incomplete | None = ..., **kwargs): ... + @property + def bind(self): ... + def alias(self, name: Incomplete | None = ..., flat: bool = ...): ... + +class NoInit: + def __init__(self, *arg, **kw) -> None: ... + +class AliasedReturnsRows(NoInit, FromClause): + named_with_column: bool + @property + def description(self): ... + @property + def original(self): ... + def is_derived_from(self, fromclause): ... + @property + def bind(self): ... + +class Alias(roles.DMLTableRole, AliasedReturnsRows): + __visit_name__: str + inherit_cache: bool + +class TableValuedAlias(Alias): + __visit_name__: str + @HasMemoized.memoized_attribute + def column(self): ... + def alias(self, name: Incomplete | None = ...): ... # type: ignore[override] + def lateral(self, name: Incomplete | None = ...): ... + def render_derived(self, name: Incomplete | None = ..., with_types: bool = ...): ... + +class Lateral(AliasedReturnsRows): + __visit_name__: str + inherit_cache: bool + +class TableSample(AliasedReturnsRows): + __visit_name__: str + +class CTE(roles.DMLTableRole, roles.IsCTERole, Generative, HasPrefixes, HasSuffixes, AliasedReturnsRows): + __visit_name__: str + def alias(self, name: Incomplete | None = ..., flat: bool = ...): ... + def union(self, *other): ... + def union_all(self, *other): ... + +class HasCTE(roles.HasCTERole): + def add_cte(self, cte) -> None: ... + def cte(self, name: Incomplete | None = ..., recursive: bool = ..., nesting: bool = ...): ... + +class Subquery(AliasedReturnsRows): + __visit_name__: str + inherit_cache: bool + def as_scalar(self): ... + +class FromGrouping(GroupedElement, FromClause): + element: Any + def __init__(self, element) -> None: ... + @property + def columns(self): ... + @property + def primary_key(self): ... + @property + def foreign_keys(self): ... + def is_derived_from(self, element): ... + def alias(self, **kw): ... + +class TableClause(roles.DMLTableRole, Immutable, FromClause): + __visit_name__: str + named_with_column: bool + implicit_returning: bool + name: Any + primary_key: Any + foreign_keys: Any + schema: Any + fullname: Any + def __init__(self, name, *columns, **kw) -> None: ... + @memoized_property + def description(self): ... + def append_column(self, c, **kw) -> None: ... + def insert(self, values: Incomplete | None = ..., inline: bool = ..., **kwargs): ... + def update(self, whereclause: Incomplete | None = ..., values: Incomplete | None = ..., inline: bool = ..., **kwargs): ... + def delete(self, whereclause: Incomplete | None = ..., **kwargs): ... + +class ForUpdateArg(ClauseElement): + def __eq__(self, other): ... + def __ne__(self, other): ... + def __hash__(self) -> int: ... + nowait: Any + read: Any + skip_locked: Any + key_share: Any + of: Any + def __init__( + self, nowait: bool = ..., read: bool = ..., of: Incomplete | None = ..., skip_locked: bool = ..., key_share: bool = ... + ) -> None: ... + +class Values(Generative, FromClause): + named_with_column: bool + __visit_name__: str + name: Any + literal_binds: Any + def __init__(self, *columns, **kw) -> None: ... + def alias(self, name: Incomplete | None, **kw) -> Self: ... # type: ignore[override] + def lateral(self, name: Incomplete | None = ...) -> Self: ... + def data(self, values) -> Self: ... + +class SelectBase( + roles.SelectStatementRole, + roles.DMLSelectRole, + roles.CompoundElementRole, + roles.InElementRole, + HasCTE, + Executable, + SupportsCloneAnnotations, + Selectable, +): + is_select: bool + @property + def selected_columns(self) -> None: ... + @property + def exported_columns(self): ... + @property + def c(self): ... + @property + def columns(self): ... + def select(self, *arg, **kw): ... + def as_scalar(self): ... + def exists(self): ... + def scalar_subquery(self): ... + def label(self, name): ... + def lateral(self, name: Incomplete | None = ...): ... + def subquery(self, name: Incomplete | None = ...): ... + def alias(self, name: Incomplete | None = ..., flat: bool = ...): ... + +class SelectStatementGrouping(GroupedElement, SelectBase): + __visit_name__: str + element: Any + def __init__(self, element) -> None: ... + def get_label_style(self): ... + def set_label_style(self, label_style): ... + @property + def select_statement(self): ... + def self_group(self, against: Incomplete | None = ...): ... + @property + def selected_columns(self): ... + +class DeprecatedSelectBaseGenerations: + def append_order_by(self, *clauses) -> None: ... + def append_group_by(self, *clauses) -> None: ... + +class GenerativeSelect(DeprecatedSelectBaseGenerations, SelectBase): + def __init__( + self, + _label_style=..., + use_labels: bool = ..., + limit: Incomplete | None = ..., + offset: Incomplete | None = ..., + order_by: Incomplete | None = ..., + group_by: Incomplete | None = ..., + bind: Incomplete | None = ..., + ) -> None: ... + def with_for_update( + self, nowait: bool = ..., read: bool = ..., of: Incomplete | None = ..., skip_locked: bool = ..., key_share: bool = ... + ) -> Self: ... + def get_label_style(self): ... + def set_label_style(self, style): ... + def apply_labels(self): ... + def limit(self, limit: Incomplete | None) -> Self: ... + def fetch(self, count: Incomplete | None, with_ties: bool = ..., percent: bool = ...) -> Self: ... + def offset(self, offset: Incomplete | None) -> Self: ... + def slice(self, start: Incomplete | None, stop: Incomplete | None) -> Self: ... + def order_by(self, *clauses) -> Self: ... + def group_by(self, *clauses) -> Self: ... + +class CompoundSelectState(CompileState): ... + +class CompoundSelect(HasCompileState, GenerativeSelect): + __visit_name__: str + UNION: Any + UNION_ALL: Any + EXCEPT: Any + EXCEPT_ALL: Any + INTERSECT: Any + INTERSECT_ALL: Any + keyword: Any + selects: Any + def __init__(self, keyword, *selects, **kwargs) -> None: ... + def self_group(self, against: Incomplete | None = ...): ... + def is_derived_from(self, fromclause): ... + @property + def selected_columns(self): ... + @property + def bind(self): ... + @bind.setter + def bind(self, bind) -> None: ... + +class DeprecatedSelectGenerations: + def append_correlation(self, fromclause) -> None: ... + def append_column(self, column) -> None: ... + def append_prefix(self, clause) -> None: ... + def append_whereclause(self, whereclause) -> None: ... + def append_having(self, having) -> None: ... + def append_from(self, fromclause) -> None: ... + +class SelectState(util.MemoizedSlots, CompileState): + class default_select_compile_options(CacheableOptions): ... + statement: Any + from_clauses: Any + froms: Any + columns_plus_names: Any + def __init__(self, statement, compiler, **kw) -> None: ... + @classmethod + def get_column_descriptions(cls, statement) -> None: ... + @classmethod + def from_statement(cls, statement, from_statement) -> None: ... + @classmethod + def get_columns_clause_froms(cls, statement): ... + @classmethod + def determine_last_joined_entity(cls, stmt): ... + @classmethod + def all_selected_columns(cls, statement): ... + +class _SelectFromElements: ... + +class _MemoizedSelectEntities(traversals.HasCacheKey, traversals.HasCopyInternals, visitors.Traversible): + __visit_name__: str + +class Select( + HasPrefixes, HasSuffixes, HasHints, HasCompileState, DeprecatedSelectGenerations, _SelectFromElements, GenerativeSelect +): + __visit_name__: str + @classmethod + def create_legacy_select( + cls, + columns: Incomplete | None = ..., + whereclause: Incomplete | None = ..., + from_obj: Incomplete | None = ..., + distinct: bool = ..., + having: Incomplete | None = ..., + correlate: bool = ..., + prefixes: Incomplete | None = ..., + suffixes: Incomplete | None = ..., + **kwargs, + ): ... + def __init__(self) -> None: ... + def filter(self, *criteria): ... + def filter_by(self, **kwargs): ... + @property + def column_descriptions(self): ... + def from_statement(self, statement): ... + def join(self, target, onclause: Incomplete | None = ..., isouter: bool = ..., full: bool = ...) -> Self: ... + def outerjoin_from(self, from_, target, onclause: Incomplete | None = ..., full: bool = ...): ... + def join_from(self, from_, target, onclause: Incomplete | None = ..., isouter: bool = ..., full: bool = ...) -> Self: ... + def outerjoin(self, target, onclause: Incomplete | None = ..., full: bool = ...): ... + def get_final_froms(self): ... + @property + def froms(self): ... + @property + def columns_clause_froms(self): ... + @property + def inner_columns(self): ... + def is_derived_from(self, fromclause): ... + def get_children(self, **kwargs): ... + def add_columns(self, *columns) -> Self: ... + def column(self, column): ... + def reduce_columns(self, only_synonyms: bool = ...): ... + def with_only_columns(self, *columns, **kw) -> Self: ... + @property + def whereclause(self): ... + def where(self, *whereclause) -> Self: ... + def having(self, having) -> Self: ... + def distinct(self, *expr) -> Self: ... + def select_from(self, *froms) -> Self: ... + def correlate(self, *fromclauses) -> Self: ... + def correlate_except(self, *fromclauses) -> Self: ... + @HasMemoized.memoized_attribute + def selected_columns(self): ... + def self_group(self, against: Incomplete | None = ...): ... + def union(self, *other, **kwargs): ... + def union_all(self, *other, **kwargs): ... + def except_(self, *other, **kwargs): ... + def except_all(self, *other, **kwargs): ... + def intersect(self, *other, **kwargs): ... + def intersect_all(self, *other, **kwargs): ... + @property + def bind(self): ... + @bind.setter + def bind(self, bind) -> None: ... + +class ScalarSelect(roles.InElementRole, Generative, Grouping): + inherit_cache: bool + element: Any + type: Any + def __init__(self, element) -> None: ... + @property + def columns(self) -> None: ... + @property + def c(self): ... + def where(self, crit) -> Self: ... + def self_group(self, **kwargs): ... + def correlate(self, *fromclauses) -> Self: ... + def correlate_except(self, *fromclauses) -> Self: ... + +class Exists(UnaryExpression): + inherit_cache: bool + def __init__(self, *args, **kwargs) -> None: ... + def select(self, whereclause: Incomplete | None = ..., **kwargs): ... + def correlate(self, *fromclause): ... + def correlate_except(self, *fromclause): ... + def select_from(self, *froms): ... + def where(self, *clause): ... + +class TextualSelect(SelectBase): + __visit_name__: str + is_text: bool + is_select: bool + element: Any + column_args: Any + positional: Any + def __init__(self, text, columns, positional: bool = ...) -> None: ... + @HasMemoized.memoized_attribute + def selected_columns(self): ... + def bindparams(self, *binds, **bind_as_values) -> Self: ... + +TextAsFrom = TextualSelect + +class AnnotatedFromClause(Annotated): + def __init__(self, element, values) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/sqltypes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/sqltypes.pyi new file mode 100644 index 00000000..eef39d63 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/sqltypes.pyi @@ -0,0 +1,378 @@ +from _typeshed import Incomplete +from typing import Any, Generic, TypeVar + +from .base import SchemaEventTarget +from .operators import ColumnOperators +from .traversals import HasCacheKey +from .type_api import ( + Emulated as Emulated, + NativeForEmulated as NativeForEmulated, + TypeDecorator as TypeDecorator, + TypeEngine as TypeEngine, + Variant as Variant, + to_instance as to_instance, +) + +_T = TypeVar("_T") + +class _LookupExpressionAdapter: + class Comparator(TypeEngine.Comparator[Any]): ... + comparator_factory: Any + +class Concatenable: + class Comparator(TypeEngine.Comparator[_T], Generic[_T]): ... + comparator_factory: Any + +class Indexable: + class Comparator(TypeEngine.Comparator[_T], Generic[_T]): + def __getitem__(self, index) -> ColumnOperators[_T]: ... + comparator_factory: Any + +class String(Concatenable, TypeEngine): + __visit_name__: str + RETURNS_UNICODE: Any + RETURNS_BYTES: Any + RETURNS_CONDITIONAL: Any + RETURNS_UNKNOWN: Any + length: Any + collation: Any + def __init__( + self, + length: Incomplete | None = ..., + collation: Incomplete | None = ..., + convert_unicode: bool = ..., + unicode_error: Incomplete | None = ..., + _warn_on_bytestring: bool = ..., + _expect_unicode: bool = ..., + ) -> None: ... + def literal_processor(self, dialect): ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + @property + def python_type(self): ... + def get_dbapi_type(self, dbapi): ... + +class Text(String): + __visit_name__: str + +class Unicode(String): + __visit_name__: str + def __init__(self, length: Incomplete | None = ..., **kwargs) -> None: ... + +class UnicodeText(Text): + __visit_name__: str + def __init__(self, length: Incomplete | None = ..., **kwargs) -> None: ... + +class Integer(_LookupExpressionAdapter, TypeEngine): + __visit_name__: str + def get_dbapi_type(self, dbapi): ... + @property + def python_type(self): ... + def literal_processor(self, dialect): ... + +class SmallInteger(Integer): + __visit_name__: str + +class BigInteger(Integer): + __visit_name__: str + +class Numeric(_LookupExpressionAdapter, TypeEngine): + __visit_name__: str + precision: Any + scale: Any + decimal_return_scale: Any + asdecimal: Any + def __init__( + self, + precision: Incomplete | None = ..., + scale: Incomplete | None = ..., + decimal_return_scale: Incomplete | None = ..., + asdecimal: bool = ..., + ) -> None: ... + def get_dbapi_type(self, dbapi): ... + def literal_processor(self, dialect): ... + @property + def python_type(self): ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class Float(Numeric): + __visit_name__: str + scale: Any + precision: Any + asdecimal: Any + decimal_return_scale: Any + def __init__( + self, precision: Incomplete | None = ..., asdecimal: bool = ..., decimal_return_scale: Incomplete | None = ... + ) -> None: ... + def result_processor(self, dialect, coltype): ... + +class DateTime(_LookupExpressionAdapter, TypeEngine): + __visit_name__: str + timezone: Any + def __init__(self, timezone: bool = ...) -> None: ... + def get_dbapi_type(self, dbapi): ... + @property + def python_type(self): ... + +class Date(_LookupExpressionAdapter, TypeEngine): + __visit_name__: str + def get_dbapi_type(self, dbapi): ... + @property + def python_type(self): ... + +class Time(_LookupExpressionAdapter, TypeEngine): + __visit_name__: str + timezone: Any + def __init__(self, timezone: bool = ...) -> None: ... + def get_dbapi_type(self, dbapi): ... + @property + def python_type(self): ... + +class _Binary(TypeEngine): + length: Any + def __init__(self, length: Incomplete | None = ...) -> None: ... + def literal_processor(self, dialect): ... + @property + def python_type(self): ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + def coerce_compared_value(self, op, value): ... + def get_dbapi_type(self, dbapi): ... + +class LargeBinary(_Binary): + __visit_name__: str + def __init__(self, length: Incomplete | None = ...) -> None: ... + +class SchemaType(SchemaEventTarget): + name: Any + schema: Any + metadata: Any + inherit_schema: Any + def __init__( + self, + name: Incomplete | None = ..., + schema: Incomplete | None = ..., + metadata: Incomplete | None = ..., + inherit_schema: bool = ..., + quote: Incomplete | None = ..., + _create_events: bool = ..., + ) -> None: ... + def copy(self, **kw): ... + def adapt(self, impltype, **kw): ... + @property + def bind(self): ... + def create(self, bind: Incomplete | None = ..., checkfirst: bool = ...) -> None: ... + def drop(self, bind: Incomplete | None = ..., checkfirst: bool = ...) -> None: ... + +class Enum(Emulated, String, SchemaType): + __visit_name__: str + def __init__(self, *enums, **kw) -> None: ... + @property + def sort_key_function(self): ... + @property + def native(self): ... + + class Comparator(Concatenable.Comparator[Any]): ... + comparator_factory: Any + def as_generic(self, allow_nulltype: bool = ...): ... + def adapt_to_emulated(self, impltype, **kw): ... + def adapt(self, impltype, **kw): ... + def literal_processor(self, dialect): ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + def copy(self, **kw): ... + @property + def python_type(self): ... + +class PickleType(TypeDecorator): + impl: Any + cache_ok: bool + protocol: Any + pickler: Any + comparator: Any + def __init__( + self, protocol=..., pickler: Incomplete | None = ..., comparator: Incomplete | None = ..., impl: Incomplete | None = ... + ) -> None: ... + def __reduce__(self): ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + def compare_values(self, x, y): ... + +class Boolean(Emulated, TypeEngine, SchemaType): # type: ignore[misc] + __visit_name__: str + native: bool + create_constraint: Any + name: Any + def __init__(self, create_constraint: bool = ..., name: Incomplete | None = ..., _create_events: bool = ...) -> None: ... + @property + def python_type(self): ... + def literal_processor(self, dialect): ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class _AbstractInterval(_LookupExpressionAdapter, TypeEngine): + def coerce_compared_value(self, op, value): ... + +class Interval(Emulated, _AbstractInterval, TypeDecorator): # type: ignore[misc] + impl: Any + epoch: Any + cache_ok: bool + native: Any + second_precision: Any + day_precision: Any + def __init__( + self, native: bool = ..., second_precision: Incomplete | None = ..., day_precision: Incomplete | None = ... + ) -> None: ... + @property + def python_type(self): ... + def adapt_to_emulated(self, impltype, **kw): ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class JSON(Indexable, TypeEngine): + __visit_name__: str + hashable: bool + NULL: Any + none_as_null: Any + def __init__(self, none_as_null: bool = ...) -> None: ... + + class JSONElementType(TypeEngine): + def string_bind_processor(self, dialect): ... + def string_literal_processor(self, dialect): ... + def bind_processor(self, dialect): ... + def literal_processor(self, dialect): ... + + class JSONIndexType(JSONElementType): ... + class JSONIntIndexType(JSONIndexType): ... + class JSONStrIndexType(JSONIndexType): ... + class JSONPathType(JSONElementType): ... + + class Comparator(Indexable.Comparator[Any], Concatenable.Comparator[Any]): + def as_boolean(self): ... + def as_string(self): ... + def as_integer(self): ... + def as_float(self): ... + def as_numeric(self, precision, scale, asdecimal: bool = ...): ... + def as_json(self): ... + comparator_factory: Any + @property + def python_type(self): ... + @property # type: ignore[override] + def should_evaluate_none(self): ... + @should_evaluate_none.setter + def should_evaluate_none(self, value) -> None: ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class ARRAY(SchemaEventTarget, Indexable, Concatenable, TypeEngine): + __visit_name__: str + zero_indexes: bool + + class Comparator(Indexable.Comparator[_T], Concatenable.Comparator[_T], Generic[_T]): + def contains(self, *arg, **kw) -> ColumnOperators[_T]: ... + def any(self, other, operator: Incomplete | None = ...): ... + def all(self, other, operator: Incomplete | None = ...): ... + comparator_factory: Any + item_type: Any + as_tuple: Any + dimensions: Any + def __init__( + self, item_type, as_tuple: bool = ..., dimensions: Incomplete | None = ..., zero_indexes: bool = ... + ) -> None: ... + @property + def hashable(self): ... + @property + def python_type(self): ... + def compare_values(self, x, y): ... + +class TupleType(TypeEngine): + types: Any + def __init__(self, *types) -> None: ... + def result_processor(self, dialect, coltype) -> None: ... + +class REAL(Float): + __visit_name__: str + +class FLOAT(Float): + __visit_name__: str + +class NUMERIC(Numeric): + __visit_name__: str + +class DECIMAL(Numeric): + __visit_name__: str + +class INTEGER(Integer): + __visit_name__: str + +INT = INTEGER + +class SMALLINT(SmallInteger): + __visit_name__: str + +class BIGINT(BigInteger): + __visit_name__: str + +class TIMESTAMP(DateTime): + __visit_name__: str + def __init__(self, timezone: bool = ...) -> None: ... + def get_dbapi_type(self, dbapi): ... + +class DATETIME(DateTime): + __visit_name__: str + +class DATE(Date): + __visit_name__: str + +class TIME(Time): + __visit_name__: str + +class TEXT(Text): + __visit_name__: str + +class CLOB(Text): + __visit_name__: str + +class VARCHAR(String): + __visit_name__: str + +class NVARCHAR(Unicode): + __visit_name__: str + +class CHAR(String): + __visit_name__: str + +class NCHAR(Unicode): + __visit_name__: str + +class BLOB(LargeBinary): + __visit_name__: str + +class BINARY(_Binary): + __visit_name__: str + +class VARBINARY(_Binary): + __visit_name__: str + +class BOOLEAN(Boolean): + __visit_name__: str + +class NullType(TypeEngine): + __visit_name__: str + def literal_processor(self, dialect): ... + + class Comparator(TypeEngine.Comparator[Any]): ... + comparator_factory: Any + +class TableValueType(HasCacheKey, TypeEngine): + def __init__(self, *elements) -> None: ... + +class MatchType(Boolean): ... + +NULLTYPE: Any +BOOLEANTYPE: Any +STRINGTYPE: Any +INTEGERTYPE: Any +MATCHTYPE: Any +TABLEVALUE: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/traversals.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/traversals.pyi new file mode 100644 index 00000000..0bd17128 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/traversals.pyi @@ -0,0 +1,158 @@ +from typing import Any + +from .. import util +from ..util import HasMemoized +from .visitors import ExtendedInternalTraversal, InternalTraversal + +SKIP_TRAVERSE: Any +COMPARE_FAILED: bool +COMPARE_SUCCEEDED: bool +NO_CACHE: Any +CACHE_IN_PLACE: Any +CALL_GEN_CACHE_KEY: Any +STATIC_CACHE_KEY: Any +PROPAGATE_ATTRS: Any +ANON_NAME: Any + +def compare(obj1, obj2, **kw): ... + +class HasCacheKey: + inherit_cache: Any + +class MemoizedHasCacheKey(HasCacheKey, HasMemoized): ... + +class CacheKey: + def __hash__(self) -> int: ... + def to_offline_string(self, statement_cache, statement, parameters): ... + def __eq__(self, other): ... + +class _CacheKey(ExtendedInternalTraversal): + visit_has_cache_key: Any + visit_clauseelement: Any + visit_clauseelement_list: Any + visit_annotations_key: Any + visit_clauseelement_tuple: Any + visit_memoized_select_entities: Any + visit_string: Any + visit_boolean: Any + visit_operator: Any + visit_plain_obj: Any + visit_statement_hint_list: Any + visit_type: Any + visit_anon_name: Any + visit_propagate_attrs: Any + def visit_with_context_options(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_inspectable(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_string_list(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_multi(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_multi_list(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_has_cache_key_tuples(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_has_cache_key_list(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_executable_options(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_inspectable_list(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_clauseelement_tuples(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_fromclause_ordered_set(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_clauseelement_unordered_set(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_named_ddl_element(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_prefix_sequence(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_setup_join_tuple(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_table_hint_list(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_plain_dict(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_dialect_options(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_string_clauseelement_dict(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_string_multi_dict(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_fromclause_canonical_column_collection(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_unknown_structure(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_dml_ordered_values(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_dml_values(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_dml_multi_values(self, attrname, obj, parent, anon_map, bindparams): ... + +class HasCopyInternals: ... + +class _CopyInternals(InternalTraversal): + def visit_clauseelement(self, attrname, parent, element, clone=..., **kw): ... + def visit_clauseelement_list(self, attrname, parent, element, clone=..., **kw): ... + def visit_clauseelement_tuple(self, attrname, parent, element, clone=..., **kw): ... + def visit_executable_options(self, attrname, parent, element, clone=..., **kw): ... + def visit_clauseelement_unordered_set(self, attrname, parent, element, clone=..., **kw): ... + def visit_clauseelement_tuples(self, attrname, parent, element, clone=..., **kw): ... + def visit_string_clauseelement_dict(self, attrname, parent, element, clone=..., **kw): ... + def visit_setup_join_tuple(self, attrname, parent, element, clone=..., **kw): ... + def visit_memoized_select_entities(self, attrname, parent, element, **kw): ... + def visit_dml_ordered_values(self, attrname, parent, element, clone=..., **kw): ... + def visit_dml_values(self, attrname, parent, element, clone=..., **kw): ... + def visit_dml_multi_values(self, attrname, parent, element, clone=..., **kw): ... + def visit_propagate_attrs(self, attrname, parent, element, clone=..., **kw): ... + +class _GetChildren(InternalTraversal): + def visit_has_cache_key(self, element, **kw): ... + def visit_clauseelement(self, element, **kw): ... + def visit_clauseelement_list(self, element, **kw): ... + def visit_clauseelement_tuple(self, element, **kw): ... + def visit_clauseelement_tuples(self, element, **kw): ... + def visit_fromclause_canonical_column_collection(self, element, **kw): ... + def visit_string_clauseelement_dict(self, element, **kw): ... + def visit_fromclause_ordered_set(self, element, **kw): ... + def visit_clauseelement_unordered_set(self, element, **kw): ... + def visit_setup_join_tuple(self, element, **kw) -> None: ... + def visit_memoized_select_entities(self, element, **kw): ... + def visit_dml_ordered_values(self, element, **kw) -> None: ... + def visit_dml_values(self, element, **kw) -> None: ... + def visit_dml_multi_values(self, element, **kw): ... + def visit_propagate_attrs(self, element, **kw): ... + +class anon_map(dict[Any, Any]): + index: int + def __init__(self) -> None: ... + def __missing__(self, key): ... + +class TraversalComparatorStrategy(InternalTraversal, util.MemoizedSlots): + stack: Any + cache: Any + anon_map: Any + def __init__(self) -> None: ... + def compare(self, obj1, obj2, **kw): ... + def compare_inner(self, obj1, obj2, **kw): ... + def visit_has_cache_key(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_propagate_attrs(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_has_cache_key_list(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_executable_options(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_clauseelement(self, attrname, left_parent, left, right_parent, right, **kw) -> None: ... + def visit_fromclause_canonical_column_collection(self, attrname, left_parent, left, right_parent, right, **kw) -> None: ... + def visit_fromclause_derived_column_collection(self, attrname, left_parent, left, right_parent, right, **kw) -> None: ... + def visit_string_clauseelement_dict(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_clauseelement_tuples(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_clauseelement_list(self, attrname, left_parent, left, right_parent, right, **kw) -> None: ... + def visit_clauseelement_tuple(self, attrname, left_parent, left, right_parent, right, **kw) -> None: ... + def visit_clauseelement_unordered_set(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_fromclause_ordered_set(self, attrname, left_parent, left, right_parent, right, **kw) -> None: ... + def visit_string(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_string_list(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_anon_name(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_boolean(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_operator(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_type(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_plain_dict(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_dialect_options(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_annotations_key(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_with_context_options(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_plain_obj(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_named_ddl_element(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_prefix_sequence(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_setup_join_tuple(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_memoized_select_entities(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_table_hint_list(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_statement_hint_list(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_unknown_structure(self, attrname, left_parent, left, right_parent, right, **kw) -> None: ... + def visit_dml_ordered_values(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_dml_values(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_dml_multi_values(self, attrname, left_parent, left, right_parent, right, **kw): ... + def compare_clauselist(self, left, right, **kw): ... + def compare_binary(self, left, right, **kw): ... + def compare_bindparam(self, left, right, **kw): ... + +class ColIdentityComparatorStrategy(TraversalComparatorStrategy): + def compare_column_element(self, left, right, use_proxies: bool = ..., equivalents=..., **kw): ... + def compare_column(self, left, right, **kw): ... + def compare_label(self, left, right, **kw): ... + def compare_table(self, left, right, **kw): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/type_api.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/type_api.pyi new file mode 100644 index 00000000..dd3f8492 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/type_api.pyi @@ -0,0 +1,115 @@ +from _typeshed import Incomplete +from typing import Any, Generic, TypeVar + +from .. import util +from . import operators +from .base import SchemaEventTarget +from .visitors import Traversible, TraversibleType + +_T = TypeVar("_T") + +BOOLEANTYPE: Any +INTEGERTYPE: Any +NULLTYPE: Any +STRINGTYPE: Any +MATCHTYPE: Any +INDEXABLE: Any +TABLEVALUE: Any + +class TypeEngine(Traversible): + class Comparator(operators.ColumnOperators[_T], Generic[_T]): + default_comparator: Any + def __clause_element__(self): ... + expr: Any + type: Any + def __init__(self, expr) -> None: ... + def operate(self, op, *other, **kwargs): ... + def reverse_operate(self, op, other, **kwargs): ... + def __reduce__(self): ... + hashable: bool + comparator_factory: Any + sort_key_function: Any + should_evaluate_none: bool + def evaluates_none(self): ... + def copy(self, **kw): ... + def compare_against_backend(self, dialect, conn_type) -> None: ... + def copy_value(self, value): ... + def literal_processor(self, dialect) -> None: ... + def bind_processor(self, dialect) -> None: ... + def result_processor(self, dialect, coltype) -> None: ... + def column_expression(self, colexpr) -> None: ... + def bind_expression(self, bindvalue) -> None: ... + def compare_values(self, x, y): ... + def get_dbapi_type(self, dbapi) -> None: ... + @property + def python_type(self) -> None: ... + def with_variant(self, type_, dialect_name): ... + def as_generic(self, allow_nulltype: bool = ...): ... + def dialect_impl(self, dialect): ... + def adapt(self, cls, **kw): ... + def coerce_compared_value(self, op, value): ... + def compile(self, dialect: Incomplete | None = ...): ... + +class VisitableCheckKWArg(util.EnsureKWArgType, TraversibleType): ... + +class ExternalType: + cache_ok: Any + +class UserDefinedType: + __visit_name__: str + ensure_kwarg: str + def coerce_compared_value(self, op, value): ... + +class Emulated: + def adapt_to_emulated(self, impltype, **kw): ... + def adapt(self, impltype, **kw): ... + +class NativeForEmulated: + @classmethod + def adapt_native_to_emulated(cls, impl, **kw): ... + @classmethod + def adapt_emulated_to_native(cls, impl, **kw): ... + +class TypeDecorator(ExternalType, SchemaEventTarget, TypeEngine): + __visit_name__: str + impl: Any + def __init__(self, *args, **kwargs) -> None: ... + coerce_to_is_types: Any + + class Comparator(TypeEngine.Comparator[Any]): + def operate(self, op, *other, **kwargs): ... + def reverse_operate(self, op, other, **kwargs): ... + + @property + def comparator_factory(self): ... + def type_engine(self, dialect): ... + def load_dialect_impl(self, dialect): ... + def __getattr__(self, key: str): ... + def process_literal_param(self, value, dialect) -> None: ... + def process_bind_param(self, value, dialect) -> None: ... + def process_result_value(self, value, dialect) -> None: ... + def literal_processor(self, dialect): ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + def bind_expression(self, bindparam): ... + def column_expression(self, column): ... + def coerce_compared_value(self, op, value): ... + def copy(self, **kw): ... + def get_dbapi_type(self, dbapi): ... + def compare_values(self, x, y): ... + @property + def sort_key_function(self): ... + +class Variant(TypeDecorator): + cache_ok: bool + impl: Any + mapping: Any + def __init__(self, base, mapping) -> None: ... + def coerce_compared_value(self, operator, value): ... + def load_dialect_impl(self, dialect): ... + def with_variant(self, type_, dialect_name): ... + @property + def comparator_factory(self): ... + +def to_instance(typeobj, *arg, **kw): ... +def adapt_type(typeobj, colspecs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/util.pyi new file mode 100644 index 00000000..8bf4c57f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/util.pyi @@ -0,0 +1,104 @@ +from _typeshed import Incomplete +from typing import Any + +from . import visitors + +join_condition: Any + +def find_join_source(clauses, join_to): ... +def find_left_clause_that_matches_given(clauses, join_from): ... +def find_left_clause_to_join_from(clauses, join_to, onclause): ... +def visit_binary_product(fn, expr) -> None: ... +def find_tables( + clause, + check_columns: bool = ..., + include_aliases: bool = ..., + include_joins: bool = ..., + include_selects: bool = ..., + include_crud: bool = ..., +): ... +def unwrap_order_by(clause): ... +def unwrap_label_reference(element): ... +def expand_column_list_from_order_by(collist, order_by): ... +def clause_is_present(clause, search): ... +def tables_from_leftmost(clause) -> None: ... +def surface_selectables(clause) -> None: ... +def surface_selectables_only(clause) -> None: ... +def extract_first_column_annotation(column, annotation_name): ... +def selectables_overlap(left, right): ... +def bind_values(clause): ... + +class _repr_base: + max_chars: Any + def trunc(self, value): ... + +class _repr_row(_repr_base): + row: Any + max_chars: Any + def __init__(self, row, max_chars: int = ...) -> None: ... + +class _repr_params(_repr_base): + params: Any + ismulti: Any + batches: Any + max_chars: Any + def __init__(self, params, batches, max_chars: int = ..., ismulti: Incomplete | None = ...) -> None: ... + +def adapt_criterion_to_null(crit, nulls): ... +def splice_joins(left, right, stop_on: Incomplete | None = ...): ... +def reduce_columns(columns, *clauses, **kw): ... +def criterion_as_pairs( + expression, + consider_as_foreign_keys: Incomplete | None = ..., + consider_as_referenced_keys: Incomplete | None = ..., + any_operator: bool = ..., +): ... + +class ClauseAdapter(visitors.ReplacingExternalTraversal): + __traverse_options__: Any + selectable: Any + include_fn: Any + exclude_fn: Any + equivalents: Any + adapt_on_names: Any + adapt_from_selectables: Any + def __init__( + self, + selectable, + equivalents: Incomplete | None = ..., + include_fn: Incomplete | None = ..., + exclude_fn: Incomplete | None = ..., + adapt_on_names: bool = ..., + anonymize_labels: bool = ..., + adapt_from_selectables: Incomplete | None = ..., + ) -> None: ... + def replace(self, col, _include_singleton_constants: bool = ...): ... + +class ColumnAdapter(ClauseAdapter): + columns: Any + adapt_required: Any + allow_label_resolve: Any + def __init__( + self, + selectable, + equivalents: Incomplete | None = ..., + adapt_required: bool = ..., + include_fn: Incomplete | None = ..., + exclude_fn: Incomplete | None = ..., + adapt_on_names: bool = ..., + allow_label_resolve: bool = ..., + anonymize_labels: bool = ..., + adapt_from_selectables: Incomplete | None = ..., + ) -> None: ... + + class _IncludeExcludeMapping: + parent: Any + columns: Any + def __init__(self, parent, columns) -> None: ... + def __getitem__(self, key): ... + + def wrap(self, adapter): ... + def traverse(self, obj): ... + adapt_clause: Any + adapt_list: Any + def adapt_check_present(self, col): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/visitors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/visitors.pyi new file mode 100644 index 00000000..c3dd4479 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/sql/visitors.pyi @@ -0,0 +1,86 @@ +from typing import Any + +class TraversibleType(type): + def __init__(cls, clsname, bases, clsdict) -> None: ... + +class Traversible: + def __class_getitem__(cls, key): ... + def get_children(self, omit_attrs=..., **kw): ... + +class _InternalTraversalType(type): + def __init__(cls, clsname, bases, clsdict) -> None: ... + +class InternalTraversal: + def dispatch(self, visit_symbol): ... + def run_generated_dispatch(self, target, internal_dispatch, generate_dispatcher_name): ... + def generate_dispatch(self, target_cls, internal_dispatch, generate_dispatcher_name): ... + dp_has_cache_key: Any + dp_has_cache_key_list: Any + dp_clauseelement: Any + dp_fromclause_canonical_column_collection: Any + dp_clauseelement_tuples: Any + dp_clauseelement_list: Any + dp_clauseelement_tuple: Any + dp_executable_options: Any + dp_with_context_options: Any + dp_fromclause_ordered_set: Any + dp_string: Any + dp_string_list: Any + dp_anon_name: Any + dp_boolean: Any + dp_operator: Any + dp_type: Any + dp_plain_dict: Any + dp_dialect_options: Any + dp_string_clauseelement_dict: Any + dp_string_multi_dict: Any + dp_annotations_key: Any + dp_plain_obj: Any + dp_named_ddl_element: Any + dp_prefix_sequence: Any + dp_table_hint_list: Any + dp_setup_join_tuple: Any + dp_memoized_select_entities: Any + dp_statement_hint_list: Any + dp_unknown_structure: Any + dp_dml_ordered_values: Any + dp_dml_values: Any + dp_dml_multi_values: Any + dp_propagate_attrs: Any + +class ExtendedInternalTraversal(InternalTraversal): + dp_ignore: Any + dp_inspectable: Any + dp_multi: Any + dp_multi_list: Any + dp_has_cache_key_tuples: Any + dp_inspectable_list: Any + +class ExternalTraversal: + __traverse_options__: Any + def traverse_single(self, obj, **kw): ... + def iterate(self, obj): ... + def traverse(self, obj): ... + @property + def visitor_iterator(self) -> None: ... + def chain(self, visitor): ... + +class CloningExternalTraversal(ExternalTraversal): + def copy_and_process(self, list_): ... + def traverse(self, obj): ... + +class ReplacingExternalTraversal(CloningExternalTraversal): + def replace(self, elem) -> None: ... + def traverse(self, obj): ... + +Visitable = Traversible +VisitableType = TraversibleType +ClauseVisitor = ExternalTraversal +CloningVisitor = CloningExternalTraversal +ReplacingCloningVisitor = ReplacingExternalTraversal + +def iterate(obj, opts=...) -> None: ... +def traverse_using(iterator, obj, visitors): ... +def traverse(obj, opts, visitors): ... +def cloned_traverse(obj, opts, visitors): ... +def replacement_traverse(obj, opts, replace): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/__init__.pyi new file mode 100644 index 00000000..a27e338d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/__init__.pyi @@ -0,0 +1,74 @@ +from . import config as config, mock as mock +from .assertions import ( + AssertsCompiledSQL as AssertsCompiledSQL, + AssertsExecutionResults as AssertsExecutionResults, + ComparesTables as ComparesTables, + assert_raises as assert_raises, + assert_raises_context_ok as assert_raises_context_ok, + assert_raises_message as assert_raises_message, + assert_raises_message_context_ok as assert_raises_message_context_ok, + emits_warning as emits_warning, + emits_warning_on as emits_warning_on, + eq_ as eq_, + eq_ignore_whitespace as eq_ignore_whitespace, + eq_regex as eq_regex, + expect_deprecated as expect_deprecated, + expect_deprecated_20 as expect_deprecated_20, + expect_raises as expect_raises, + expect_raises_message as expect_raises_message, + expect_warnings as expect_warnings, + in_ as in_, + is_ as is_, + is_false as is_false, + is_instance_of as is_instance_of, + is_none as is_none, + is_not as is_not, + is_not_ as is_not_, + is_not_none as is_not_none, + is_true as is_true, + le_ as le_, + ne_ as ne_, + not_in as not_in, + not_in_ as not_in_, + startswith_ as startswith_, + uses_deprecated as uses_deprecated, +) +from .config import ( + async_test as async_test, + combinations as combinations, + combinations_list as combinations_list, + db as db, + fixture as fixture, +) +from .exclusions import ( + db_spec as db_spec, + exclude as exclude, + fails as fails, + fails_if as fails_if, + fails_on as fails_on, + fails_on_everything_except as fails_on_everything_except, + future as future, + only_if as only_if, + only_on as only_on, + skip as skip, + skip_if as skip_if, +) +from .schema import eq_clause_element as eq_clause_element, eq_type_affinity as eq_type_affinity +from .util import ( + adict as adict, + fail as fail, + flag_combinations as flag_combinations, + force_drop_names as force_drop_names, + lambda_combinations as lambda_combinations, + metadata_fixture as metadata_fixture, + provide_metadata as provide_metadata, + resolve_lambda as resolve_lambda, + rowset as rowset, + run_as_contextmanager as run_as_contextmanager, + teardown_events as teardown_events, +) +from .warnings import assert_warnings as assert_warnings, warn_test_suite as warn_test_suite + +def against(*queries): ... + +crashes = skip diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/assertions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/assertions.pyi new file mode 100644 index 00000000..e8366e90 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/assertions.pyi @@ -0,0 +1,86 @@ +from _typeshed import Incomplete + +def expect_warnings(*messages, **kw): ... +def expect_warnings_on(db, *messages, **kw) -> None: ... +def emits_warning(*messages): ... +def expect_deprecated(*messages, **kw): ... +def expect_deprecated_20(*messages, **kw): ... +def emits_warning_on(db, *messages): ... +def uses_deprecated(*messages): ... +def global_cleanup_assertions() -> None: ... +def eq_regex(a, b, msg: Incomplete | None = ...) -> None: ... +def eq_(a, b, msg: Incomplete | None = ...) -> None: ... +def ne_(a, b, msg: Incomplete | None = ...) -> None: ... +def le_(a, b, msg: Incomplete | None = ...) -> None: ... +def is_instance_of(a, b, msg: Incomplete | None = ...) -> None: ... +def is_none(a, msg: Incomplete | None = ...) -> None: ... +def is_not_none(a, msg: Incomplete | None = ...) -> None: ... +def is_true(a, msg: Incomplete | None = ...) -> None: ... +def is_false(a, msg: Incomplete | None = ...) -> None: ... +def is_(a, b, msg: Incomplete | None = ...) -> None: ... +def is_not(a, b, msg: Incomplete | None = ...) -> None: ... + +is_not_ = is_not + +def in_(a, b, msg: Incomplete | None = ...) -> None: ... +def not_in(a, b, msg: Incomplete | None = ...) -> None: ... + +not_in_ = not_in + +def startswith_(a, fragment, msg: Incomplete | None = ...) -> None: ... +def eq_ignore_whitespace(a, b, msg: Incomplete | None = ...) -> None: ... +def assert_raises(except_cls, callable_, *args, **kw): ... +def assert_raises_context_ok(except_cls, callable_, *args, **kw): ... +def assert_raises_message(except_cls, msg, callable_, *args, **kwargs): ... +def assert_raises_message_context_ok(except_cls, msg, callable_, *args, **kwargs): ... + +class _ErrorContainer: + error: Incomplete + +def expect_raises(except_cls, check_context: bool = ...): ... +def expect_raises_message(except_cls, msg, check_context: bool = ...): ... + +class AssertsCompiledSQL: + test_statement: Incomplete + supports_execution: Incomplete + def assert_compile( + self, + clause, + result, + params: Incomplete | None = ..., + checkparams: Incomplete | None = ..., + for_executemany: bool = ..., + check_literal_execute: Incomplete | None = ..., + check_post_param: Incomplete | None = ..., + dialect: Incomplete | None = ..., + checkpositional: Incomplete | None = ..., + check_prefetch: Incomplete | None = ..., + use_default_dialect: bool = ..., + allow_dialect_select: bool = ..., + supports_default_values: bool = ..., + supports_default_metavalue: bool = ..., + literal_binds: bool = ..., + render_postcompile: bool = ..., + schema_translate_map: Incomplete | None = ..., + render_schema_translate: bool = ..., + default_schema_name: Incomplete | None = ..., + from_linting: bool = ..., + check_param_order: bool = ..., + ) -> None: ... + +class ComparesTables: + def assert_tables_equal(self, table, reflected_table, strict_types: bool = ...) -> None: ... + def assert_types_base(self, c1, c2) -> None: ... + +class AssertsExecutionResults: + def assert_result(self, result, class_, *objects) -> None: ... + def assert_list(self, result, class_, list_) -> None: ... + def assert_row(self, class_, rowobj, desc) -> None: ... + def assert_unordered_result(self, result, cls, *expected): ... + def sql_execution_asserter(self, db: Incomplete | None = ...): ... + def assert_sql_execution(self, db, callable_, *rules): ... + def assert_sql(self, db, callable_, rules): ... + def assert_sql_count(self, db, callable_, count) -> None: ... + def assert_multiple_sql_count(self, dbs, callable_, counts): ... + def assert_execution(self, db, *rules) -> None: ... + def assert_statement_count(self, db, count): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/assertsql.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/assertsql.pyi new file mode 100644 index 00000000..c0239180 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/assertsql.pyi @@ -0,0 +1,82 @@ +from _typeshed import Incomplete +from typing import Any + +class AssertRule: + is_consumed: bool + errormessage: Any + consume_statement: bool + def process_statement(self, execute_observed) -> None: ... + def no_more_statements(self) -> None: ... + +class SQLMatchRule(AssertRule): ... + +class CursorSQL(SQLMatchRule): + statement: Any + params: Any + consume_statement: Any + def __init__(self, statement, params: Incomplete | None = ..., consume_statement: bool = ...) -> None: ... + errormessage: Any + is_consumed: bool + def process_statement(self, execute_observed) -> None: ... + +class CompiledSQL(SQLMatchRule): + statement: Any + params: Any + dialect: Any + def __init__(self, statement, params: Incomplete | None = ..., dialect: str = ...) -> None: ... + is_consumed: bool + errormessage: Any + def process_statement(self, execute_observed) -> None: ... + +class RegexSQL(CompiledSQL): + regex: Any + orig_regex: Any + params: Any + dialect: Any + def __init__(self, regex, params: Incomplete | None = ..., dialect: str = ...) -> None: ... + +class DialectSQL(CompiledSQL): ... + +class CountStatements(AssertRule): + count: Any + def __init__(self, count) -> None: ... + def process_statement(self, execute_observed) -> None: ... + def no_more_statements(self) -> None: ... + +class AllOf(AssertRule): + rules: Any + def __init__(self, *rules) -> None: ... + is_consumed: bool + errormessage: Any + def process_statement(self, execute_observed) -> None: ... + +class EachOf(AssertRule): + rules: Any + def __init__(self, *rules) -> None: ... + errormessage: Any + is_consumed: bool + def process_statement(self, execute_observed) -> None: ... + def no_more_statements(self) -> None: ... + +class Conditional(EachOf): + def __init__(self, condition, rules, else_rules) -> None: ... + +class Or(AllOf): + is_consumed: bool + errormessage: Any + def process_statement(self, execute_observed) -> None: ... + +class SQLExecuteObserved: + context: Any + clauseelement: Any + parameters: Any + statements: Any + def __init__(self, context, clauseelement, multiparams, params) -> None: ... + +class SQLCursorExecuteObserved: ... + +class SQLAsserter: + accumulated: Any + def assert_(self, *rules) -> None: ... + +def assert_engine(engine) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/asyncio.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/asyncio.pyi new file mode 100644 index 00000000..7455e47b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/asyncio.pyi @@ -0,0 +1 @@ +ENABLE_ASYNCIO: bool diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/config.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/config.pyi new file mode 100644 index 00000000..db67aa9c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/config.pyi @@ -0,0 +1,47 @@ +from typing import Any + +requirements: Any +db: Any +db_url: Any +db_opts: Any +file_config: Any +test_schema: Any +test_schema_2: Any +any_async: bool +ident: str + +def combinations(*comb, **kw): ... +def combinations_list(arg_iterable, **kw): ... +def fixture(*arg, **kw): ... +def get_current_test_name(): ... +def mark_base_test_class(): ... + +class Config: + db: Any + db_opts: Any + options: Any + file_config: Any + test_schema: str + test_schema_2: str + is_async: Any + def __init__(self, db, db_opts, options, file_config) -> None: ... + @classmethod + def register(cls, db, db_opts, options, file_config): ... + @classmethod + def set_as_current(cls, config, namespace) -> None: ... + @classmethod + def push_engine(cls, db, namespace) -> None: ... + @classmethod + def push(cls, config, namespace) -> None: ... + @classmethod + def pop(cls, namespace) -> None: ... + @classmethod + def reset(cls, namespace) -> None: ... + @classmethod + def all_configs(cls): ... + @classmethod + def all_dbs(cls) -> None: ... + def skip_test(self, msg) -> None: ... + +def skip_test(msg) -> None: ... +def async_test(fn): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/engines.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/engines.pyi new file mode 100644 index 00000000..4f0abf91 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/engines.pyi @@ -0,0 +1,83 @@ +from _typeshed import Incomplete +from collections.abc import Mapping +from typing import Any, overload +from typing_extensions import Literal + +from ..engine import Engine +from ..engine.url import URL +from ..ext.asyncio import AsyncEngine + +class ConnectionKiller: + proxy_refs: Any + testing_engines: Any + dbapi_connections: Any + def add_pool(self, pool) -> None: ... + def add_engine(self, engine, scope) -> None: ... + def rollback_all(self) -> None: ... + def checkin_all(self) -> None: ... + def close_all(self) -> None: ... + def prepare_for_drop_tables(self, connection) -> None: ... + def after_test(self) -> None: ... + def after_test_outside_fixtures(self, test) -> None: ... + def stop_test_class_inside_fixtures(self) -> None: ... + def stop_test_class_outside_fixtures(self) -> None: ... + def final_cleanup(self) -> None: ... + def assert_all_closed(self) -> None: ... + +testing_reaper: Any + +def assert_conns_closed(fn, *args, **kw) -> None: ... +def rollback_open_connections(fn, *args, **kw) -> None: ... +def close_first(fn, *args, **kw) -> None: ... +def close_open_connections(fn, *args, **kw) -> None: ... +def all_dialects(exclude: Incomplete | None = ...) -> None: ... + +class ReconnectFixture: + dbapi: Any + connections: Any + is_stopped: bool + def __init__(self, dbapi) -> None: ... + def __getattr__(self, key: str): ... + def connect(self, *args, **kwargs): ... + def shutdown(self, stop: bool = ...) -> None: ... + def restart(self) -> None: ... + +def reconnecting_engine(url: Incomplete | None = ..., options: Incomplete | None = ...): ... +@overload +def testing_engine( # type: ignore[misc] + url: URL | str | None = ..., + options: Mapping[str, Any] | None = ..., + future: bool | None = ..., + asyncio: Literal[False] = ..., + transfer_staticpool: bool = ..., +) -> Engine: ... +@overload +def testing_engine( + url: URL | str | None = ..., + options: Mapping[str, Any] | None = ..., + future: bool | None = ..., + asyncio: Literal[True] = ..., + transfer_staticpool: bool = ..., +) -> AsyncEngine: ... +def mock_engine(dialect_name: Incomplete | None = ...): ... + +class DBAPIProxyCursor: + engine: Any + connection: Any + cursor: Any + def __init__(self, engine, conn, *args, **kwargs) -> None: ... + def execute(self, stmt, parameters: Incomplete | None = ..., **kw): ... + def executemany(self, stmt, params, **kw): ... + def __iter__(self): ... + def __getattr__(self, key: str): ... + +class DBAPIProxyConnection: + conn: Any + engine: Any + cursor_cls: Any + def __init__(self, engine, cursor_cls) -> None: ... + def cursor(self, *args, **kwargs): ... + def close(self) -> None: ... + def __getattr__(self, key: str): ... + +def proxying_engine(conn_cls=..., cursor_cls=...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/entities.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/entities.pyi new file mode 100644 index 00000000..51428895 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/entities.pyi @@ -0,0 +1,9 @@ +class BasicEntity: + def __init__(self, **kw) -> None: ... + +class ComparableMixin: + def __ne__(self, other): ... + def __eq__(self, other): ... + +class ComparableEntity(ComparableMixin, BasicEntity): + def __hash__(self) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/exclusions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/exclusions.pyi new file mode 100644 index 00000000..7041a026 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/exclusions.pyi @@ -0,0 +1,80 @@ +from _typeshed import Incomplete +from typing import Any + +def skip_if(predicate, reason: Incomplete | None = ...): ... +def fails_if(predicate, reason: Incomplete | None = ...): ... + +class compound: + fails: Any + skips: Any + tags: Any + def __add__(self, other): ... + def as_skips(self): ... + def add(self, *others): ... + def not_(self): ... + @property + def enabled(self): ... + def enabled_for_config(self, config): ... + def matching_config_reasons(self, config): ... + def include_test(self, include_tags, exclude_tags): ... + def __call__(self, fn): ... + def fail_if(self) -> None: ... + +def requires_tag(tagname): ... +def tags(tagnames): ... +def only_if(predicate, reason: Incomplete | None = ...): ... +def succeeds_if(predicate, reason: Incomplete | None = ...): ... + +class Predicate: + @classmethod + def as_predicate(cls, predicate, description: Incomplete | None = ...): ... + +class BooleanPredicate(Predicate): + value: Any + description: Any + def __init__(self, value, description: Incomplete | None = ...) -> None: ... + def __call__(self, config): ... + +class SpecPredicate(Predicate): + db: Any + op: Any + spec: Any + description: Any + def __init__( + self, db, op: Incomplete | None = ..., spec: Incomplete | None = ..., description: Incomplete | None = ... + ) -> None: ... + def __call__(self, config): ... + +class LambdaPredicate(Predicate): + lambda_: Any + args: Any + kw: Any + description: Any + def __init__( + self, lambda_, description: Incomplete | None = ..., args: Incomplete | None = ..., kw: Incomplete | None = ... + ): ... + def __call__(self, config): ... + +class NotPredicate(Predicate): + predicate: Any + description: Any + def __init__(self, predicate, description: Incomplete | None = ...) -> None: ... + def __call__(self, config): ... + +class OrPredicate(Predicate): + predicates: Any + description: Any + def __init__(self, predicates, description: Incomplete | None = ...) -> None: ... + def __call__(self, config): ... + +def db_spec(*dbs): ... +def open(): ... +def closed(): ... +def fails(reason: Incomplete | None = ...): ... +def future(fn, *arg): ... +def fails_on(db, reason: Incomplete | None = ...): ... +def fails_on_everything_except(*dbs): ... +def skip(db, reason: Incomplete | None = ...): ... +def only_on(dbs, reason: Incomplete | None = ...): ... +def exclude(db, op, spec, reason: Incomplete | None = ...): ... +def against(config, *queries): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/fixtures.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/fixtures.pyi new file mode 100644 index 00000000..817046c9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/fixtures.pyi @@ -0,0 +1,84 @@ +from _typeshed import Incomplete +from typing import Any + +from . import assertions +from .entities import ComparableEntity as ComparableEntity + +class TestBase: + __requires__: Any + __unsupported_on__: Any + __only_on__: Any + __skip_if__: Any + __leave_connections_for_teardown__: bool + def assert_(self, val, msg: Incomplete | None = ...) -> None: ... + def connection_no_trans(self) -> None: ... + def connection(self) -> None: ... + def registry(self, metadata) -> None: ... + def future_connection(self, future_engine, connection) -> None: ... + def future_engine(self) -> None: ... + def testing_engine(self): ... + def async_testing_engine(self, testing_engine): ... + def metadata(self, request) -> None: ... + def trans_ctx_manager_fixture(self, request, metadata): ... + +class FutureEngineMixin: ... + +class TablesTest(TestBase): + run_setup_bind: str + run_define_tables: str + run_create_tables: str + run_inserts: str + run_deletes: str + run_dispose_bind: Any + bind: Any + tables: Any + other: Any + sequences: Any + @property + def tables_test_metadata(self): ... + @classmethod + def setup_bind(cls): ... + @classmethod + def dispose_bind(cls, bind) -> None: ... + @classmethod + def define_tables(cls, metadata) -> None: ... + @classmethod + def fixtures(cls): ... + @classmethod + def insert_data(cls, connection) -> None: ... + def sql_count_(self, count, fn) -> None: ... + def sql_eq_(self, callable_, statements) -> None: ... + +class NoCache: ... + +class RemovesEvents: + def event_listen(self, target, name, fn, **kw) -> None: ... + +def fixture_session(**kw): ... +def stop_test_class_inside_fixtures(cls) -> None: ... +def after_test() -> None: ... + +class ORMTest(TestBase): ... + +class MappedTest(TablesTest, assertions.AssertsExecutionResults): + run_setup_classes: str + run_setup_mappers: str + classes: Any + @classmethod + def setup_classes(cls) -> None: ... + @classmethod + def setup_mappers(cls) -> None: ... + +class DeclarativeMappedTest(MappedTest): + run_setup_classes: str + run_setup_mappers: str + +class ComputedReflectionFixtureTest(TablesTest): + run_inserts: Any + run_deletes: Any + __backend__: bool + __requires__: Any + regexp: Any + def normalize(self, text): ... + @classmethod + def define_tables(cls, metadata) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/mock.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/mock.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/pickleable.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/pickleable.pyi new file mode 100644 index 00000000..a66684a8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/pickleable.pyi @@ -0,0 +1,63 @@ +from _typeshed import Incomplete +from typing import Any + +from . import fixtures + +class User(fixtures.ComparableEntity): ... +class Order(fixtures.ComparableEntity): ... +class Dingaling(fixtures.ComparableEntity): ... +class EmailUser(User): ... +class Address(fixtures.ComparableEntity): ... +class Child1(fixtures.ComparableEntity): ... +class Child2(fixtures.ComparableEntity): ... +class Parent(fixtures.ComparableEntity): ... + +class Screen: + obj: Any + parent: Any + def __init__(self, obj, parent: Incomplete | None = ...) -> None: ... + +class Foo: + data: str + stuff: Any + moredata: Any + def __init__(self, moredata, stuff: str = ...) -> None: ... + __hash__ = object.__hash__ + def __eq__(self, other): ... + +class Bar: + x: Any + y: Any + def __init__(self, x, y) -> None: ... + __hash__ = object.__hash__ + def __eq__(self, other): ... + +class OldSchool: + x: Any + y: Any + def __init__(self, x, y) -> None: ... + def __eq__(self, other): ... + +class OldSchoolWithoutCompare: + x: Any + y: Any + def __init__(self, x, y) -> None: ... + +class BarWithoutCompare: + x: Any + y: Any + def __init__(self, x, y) -> None: ... + +class NotComparable: + data: Any + def __init__(self, data) -> None: ... + def __hash__(self) -> int: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + +class BrokenComparable: + data: Any + def __init__(self, data) -> None: ... + def __hash__(self) -> int: ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/plugin/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/plugin/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/plugin/bootstrap.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/plugin/bootstrap.pyi new file mode 100644 index 00000000..5c554d0d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/plugin/bootstrap.pyi @@ -0,0 +1,6 @@ +from typing import Any + +bootstrap_file: Any +to_bootstrap: Any + +def load_file_as_module(name): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/plugin/plugin_base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/plugin/plugin_base.pyi new file mode 100644 index 00000000..e0cdfbe5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/plugin/plugin_base.pyi @@ -0,0 +1,63 @@ +import abc +from typing import Any + +bootstrapped_as_sqlalchemy: bool +log: Any +py3k: Any +ABC = abc.ABC + +fixtures: Any +engines: Any +exclusions: Any +warnings: Any +profiling: Any +provision: Any +assertions: Any +requirements: Any +config: Any +testing: Any +util: Any +file_config: Any +include_tags: Any +exclude_tags: Any +options: Any + +def setup_options(make_option) -> None: ... +def configure_follower(follower_ident) -> None: ... +def memoize_important_follower_config(dict_) -> None: ... +def restore_important_follower_config(dict_) -> None: ... +def read_config() -> None: ... +def pre_begin(opt) -> None: ... +def set_coverage_flag(value) -> None: ... +def post_begin() -> None: ... + +pre_configure: Any +post_configure: Any + +def pre(fn): ... +def post(fn): ... +def want_class(name, cls): ... +def want_method(cls, fn): ... +def generate_sub_tests(cls, module) -> None: ... +def start_test_class_outside_fixtures(cls) -> None: ... +def stop_test_class(cls) -> None: ... +def stop_test_class_outside_fixtures(cls) -> None: ... +def final_process_cleanup() -> None: ... +def before_test(test, test_module_name, test_class, test_name) -> None: ... +def after_test(test) -> None: ... +def after_test_fixtures(test) -> None: ... + +class FixtureFunctions(ABC, metaclass=abc.ABCMeta): + @abc.abstractmethod + def skip_test_exception(self, *arg, **kw): ... + @abc.abstractmethod + def combinations(self, *args, **kw): ... + @abc.abstractmethod + def param_ident(self, *args, **kw): ... + @abc.abstractmethod + def fixture(self, *arg, **kw): ... + def get_current_test_name(self) -> None: ... + @abc.abstractmethod + def mark_base_test_class(self): ... + +def set_fixture_functions(fixture_fn_class) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/plugin/pytestplugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/plugin/pytestplugin.pyi new file mode 100644 index 00000000..bfdd0245 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/plugin/pytestplugin.pyi @@ -0,0 +1,33 @@ +from typing_extensions import Final + +from . import plugin_base + +py2k: Final = False + +def pytest_addoption(parser) -> None: ... +def pytest_configure(config) -> None: ... + +DUMP_PYANNOTATE: bool + +def collect_types_fixture() -> None: ... +def pytest_sessionstart(session) -> None: ... +def pytest_sessionfinish(session) -> None: ... +def pytest_collection_finish(session): ... +def pytest_collection_modifyitems(session, config, items): ... +def pytest_pycollect_makeitem(collector, name, obj): ... +def pytest_runtest_setup(item) -> None: ... +def pytest_runtest_teardown(item, nextitem) -> None: ... +def pytest_runtest_call(item) -> None: ... +def pytest_runtest_logreport(report) -> None: ... +def setup_class_methods(request) -> None: ... +def setup_test_methods(request) -> None: ... +def getargspec(fn): ... + +class PytestFixtureFunctions(plugin_base.FixtureFunctions): + def skip_test_exception(self, *arg, **kw): ... + def mark_base_test_class(self): ... + def combinations(self, *arg_sets, **kw): ... + def param_ident(self, *parameters): ... + def fixture(self, *arg, **kw): ... + def get_current_test_name(self): ... + def async_test(self, fn): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/plugin/reinvent_fixtures_py2k.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/plugin/reinvent_fixtures_py2k.pyi new file mode 100644 index 00000000..83f3da86 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/plugin/reinvent_fixtures_py2k.pyi @@ -0,0 +1,6 @@ +def add_fixture(fn, fixture) -> None: ... +def scan_for_fixtures_to_use_for_class(item) -> None: ... +def run_class_fixture_setup(request) -> None: ... +def run_class_fixture_teardown(request) -> None: ... +def run_fn_fixture_setup(request) -> None: ... +def run_fn_fixture_teardown(request) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/profiling.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/profiling.pyi new file mode 100644 index 00000000..5d3f8927 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/profiling.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete +from typing import Any + +class ProfileStatsFile: + force_write: Any + write: Any + fname: Any + short_fname: Any + data: Any + dump: Any + sort: Any + def __init__(self, filename, sort: str = ..., dump: Incomplete | None = ...): ... + @property + def platform_key(self): ... + def has_stats(self): ... + def result(self, callcount): ... + def reset_count(self) -> None: ... + def replace(self, callcount) -> None: ... + +def function_call_count(variance: float = ..., times: int = ..., warmup: int = ...): ... +def count_functions(variance: float = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/provision.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/provision.pyi new file mode 100644 index 00000000..a6c48328 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/provision.pyi @@ -0,0 +1,33 @@ +from typing import Any + +log: Any +FOLLOWER_IDENT: Any + +class register: + fns: Any + @classmethod + def init(cls, fn): ... + def for_db(self, *dbnames): ... + def __call__(self, cfg, *arg): ... + +def create_follower_db(follower_ident) -> None: ... +def setup_config(db_url, options, file_config, follower_ident): ... +def drop_follower_db(follower_ident) -> None: ... +def generate_db_urls(db_urls, extra_drivers) -> None: ... +def generate_driver_url(url, driver, query_str): ... +def drop_all_schema_objects_pre_tables(cfg, eng) -> None: ... +def drop_all_schema_objects_post_tables(cfg, eng) -> None: ... +def drop_all_schema_objects(cfg, eng) -> None: ... +def create_db(cfg, eng, ident) -> None: ... +def drop_db(cfg, eng, ident) -> None: ... +def update_db_opts(cfg, db_opts) -> None: ... +def post_configure_engine(url, engine, follower_ident) -> None: ... +def follower_url_from_main(url, ident): ... +def configure_follower(cfg, ident) -> None: ... +def run_reap_dbs(url, ident) -> None: ... +def reap_dbs(idents_file) -> None: ... +def temp_table_keyword_args(cfg, eng) -> None: ... +def prepare_for_drop_tables(config, connection) -> None: ... +def stop_test_class_outside_fixtures(config, db, testcls) -> None: ... +def get_temp_table_name(cfg, eng, base_name): ... +def set_default_schema_on_connection(cfg, dbapi_connection, schema_name) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/requirements.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/requirements.pyi new file mode 100644 index 00000000..5f0a97c1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/requirements.pyi @@ -0,0 +1,381 @@ +class Requirements: ... + +class SuiteRequirements(Requirements): + @property + def create_table(self): ... + @property + def drop_table(self): ... + @property + def table_ddl_if_exists(self): ... + @property + def index_ddl_if_exists(self): ... + @property + def foreign_keys(self): ... + @property + def table_value_constructor(self): ... + @property + def standard_cursor_sql(self): ... + @property + def on_update_cascade(self): ... + @property + def non_updating_cascade(self): ... + @property + def deferrable_fks(self): ... + @property + def on_update_or_deferrable_fks(self): ... + @property + def queue_pool(self): ... + @property + def self_referential_foreign_keys(self): ... + @property + def foreign_key_ddl(self): ... + @property + def named_constraints(self): ... + @property + def implicitly_named_constraints(self): ... + @property + def subqueries(self): ... + @property + def offset(self): ... + @property + def bound_limit_offset(self): ... + @property + def sql_expression_limit_offset(self): ... + @property + def parens_in_union_contained_select_w_limit_offset(self): ... + @property + def parens_in_union_contained_select_wo_limit_offset(self): ... + @property + def boolean_col_expressions(self): ... + @property + def nullable_booleans(self): ... + @property + def nullsordering(self): ... + @property + def standalone_binds(self): ... + @property + def standalone_null_binds_whereclause(self): ... + @property + def intersect(self): ... + @property + def except_(self): ... + @property + def window_functions(self): ... + @property + def ctes(self): ... + @property + def ctes_with_update_delete(self): ... + @property + def ctes_on_dml(self): ... + @property + def autoincrement_insert(self): ... + @property + def fetch_rows_post_commit(self): ... + @property + def group_by_complex_expression(self): ... + @property + def sane_rowcount(self): ... + @property + def sane_multi_rowcount(self): ... + @property + def sane_rowcount_w_returning(self): ... + @property + def empty_inserts(self): ... + @property + def empty_inserts_executemany(self): ... + @property + def insert_from_select(self): ... + @property + def full_returning(self): ... + @property + def insert_executemany_returning(self): ... + @property + def returning(self): ... + @property + def tuple_in(self): ... + @property + def tuple_in_w_empty(self): ... + @property + def duplicate_names_in_cursor_description(self): ... + @property + def denormalized_names(self): ... + @property + def multivalues_inserts(self): ... + @property + def implements_get_lastrowid(self): ... + @property + def emulated_lastrowid(self): ... + @property + def emulated_lastrowid_even_with_sequences(self): ... + @property + def dbapi_lastrowid(self): ... + @property + def views(self): ... + @property + def schemas(self): ... + @property + def cross_schema_fk_reflection(self): ... + @property + def foreign_key_constraint_name_reflection(self): ... + @property + def implicit_default_schema(self): ... + @property + def default_schema_name_switch(self): ... + @property + def server_side_cursors(self): ... + @property + def sequences(self): ... + @property + def no_sequences(self): ... + @property + def sequences_optional(self): ... + @property + def supports_lastrowid(self): ... + @property + def no_lastrowid_support(self): ... + @property + def reflects_pk_names(self): ... + @property + def table_reflection(self): ... + @property + def reflect_tables_no_columns(self): ... + @property + def comment_reflection(self): ... + @property + def view_column_reflection(self): ... + @property + def view_reflection(self): ... + @property + def schema_reflection(self): ... + @property + def primary_key_constraint_reflection(self): ... + @property + def foreign_key_constraint_reflection(self): ... + @property + def foreign_key_constraint_option_reflection_ondelete(self): ... + @property + def fk_constraint_option_reflection_ondelete_restrict(self): ... + @property + def fk_constraint_option_reflection_ondelete_noaction(self): ... + @property + def foreign_key_constraint_option_reflection_onupdate(self): ... + @property + def fk_constraint_option_reflection_onupdate_restrict(self): ... + @property + def temp_table_reflection(self): ... + @property + def temp_table_reflect_indexes(self): ... + @property + def temp_table_names(self): ... + @property + def temporary_tables(self): ... + @property + def temporary_views(self): ... + @property + def index_reflection(self): ... + @property + def index_reflects_included_columns(self): ... + @property + def indexes_with_ascdesc(self): ... + @property + def indexes_with_expressions(self): ... + @property + def unique_constraint_reflection(self): ... + @property + def check_constraint_reflection(self): ... + @property + def duplicate_key_raises_integrity_error(self): ... + @property + def unbounded_varchar(self): ... + @property + def unicode_data(self): ... + @property + def unicode_ddl(self): ... + @property + def symbol_names_w_double_quote(self): ... + @property + def datetime_literals(self): ... + @property + def datetime(self): ... + @property + def datetime_microseconds(self): ... + @property + def timestamp_microseconds(self): ... + @property + def datetime_historic(self): ... + @property + def date(self): ... + @property + def date_coerces_from_datetime(self): ... + @property + def date_historic(self): ... + @property + def time(self): ... + @property + def time_microseconds(self): ... + @property + def binary_comparisons(self): ... + @property + def binary_literals(self): ... + @property + def autocommit(self): ... + @property + def isolation_level(self): ... + def get_isolation_levels(self, config) -> None: ... + @property + def json_type(self): ... + @property + def json_array_indexes(self): ... + @property + def json_index_supplementary_unicode_element(self): ... + @property + def legacy_unconditional_json_extract(self): ... + @property + def precision_numerics_general(self): ... + @property + def precision_numerics_enotation_small(self): ... + @property + def precision_numerics_enotation_large(self): ... + @property + def precision_numerics_many_significant_digits(self): ... + @property + def cast_precision_numerics_many_significant_digits(self): ... + @property + def implicit_decimal_binds(self): ... + @property + def nested_aggregates(self): ... + @property + def recursive_fk_cascade(self): ... + @property + def precision_numerics_retains_significant_digits(self): ... + @property + def infinity_floats(self): ... + @property + def precision_generic_float_type(self): ... + @property + def floats_to_four_decimals(self): ... + @property + def fetch_null_from_numeric(self): ... + @property + def text_type(self): ... + @property + def empty_strings_varchar(self): ... + @property + def empty_strings_text(self): ... + @property + def expressions_against_unbounded_text(self): ... + @property + def selectone(self): ... + @property + def savepoints(self): ... + @property + def two_phase_transactions(self): ... + @property + def update_from(self): ... + @property + def delete_from(self): ... + @property + def update_where_target_in_subquery(self): ... + @property + def mod_operator_as_percent_sign(self): ... + @property + def percent_schema_names(self): ... + @property + def order_by_col_from_union(self): ... + @property + def order_by_label_with_expression(self): ... + @property + def order_by_collation(self): ... + def get_order_by_collation(self, config) -> None: ... + @property + def unicode_connections(self): ... + @property + def graceful_disconnects(self): ... + @property + def independent_connections(self): ... + @property + def skip_mysql_on_windows(self): ... + @property + def ad_hoc_engines(self): ... + @property + def no_windows(self): ... + @property + def timing_intensive(self): ... + @property + def memory_intensive(self): ... + @property + def threading_with_mock(self): ... + @property + def sqlalchemy2_stubs(self): ... + @property + def python2(self): ... + @property + def python3(self): ... + @property + def pep520(self): ... + @property + def insert_order_dicts(self): ... + @property + def python36(self): ... + @property + def python37(self): ... + @property + def dataclasses(self): ... + @property + def python38(self): ... + @property + def cpython(self): ... + @property + def patch_library(self): ... + @property + def non_broken_pickle(self): ... + @property + def predictable_gc(self): ... + @property + def no_coverage(self): ... + @property + def sqlite(self): ... + @property + def cextensions(self): ... + @property + def async_dialect(self): ... + @property + def greenlet(self): ... + @property + def computed_columns(self): ... + @property + def computed_columns_stored(self): ... + @property + def computed_columns_virtual(self): ... + @property + def computed_columns_default_persisted(self): ... + @property + def computed_columns_reflect_persisted(self): ... + @property + def supports_distinct_on(self): ... + @property + def supports_is_distinct_from(self): ... + @property + def identity_columns(self): ... + @property + def identity_columns_standard(self): ... + @property + def regexp_match(self): ... + @property + def regexp_replace(self): ... + @property + def fetch_first(self): ... + @property + def fetch_percent(self): ... + @property + def fetch_ties(self): ... + @property + def fetch_no_order_by(self): ... + @property + def fetch_offset_with_options(self): ... + @property + def fetch_expression(self): ... + @property + def autoincrement_without_sequence(self): ... + @property + def generic_classes(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/schema.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/schema.pyi new file mode 100644 index 00000000..dc8b6219 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/schema.pyi @@ -0,0 +1,16 @@ +from typing import Any + +def Table(*args, **kw): ... +def Column(*args, **kw): ... + +class eq_type_affinity: + target: Any + def __init__(self, target) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + +class eq_clause_element: + target: Any + def __init__(self, target) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/util.pyi new file mode 100644 index 00000000..0da8a388 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/util.pyi @@ -0,0 +1,39 @@ +from _typeshed import Incomplete +from typing import Any + +def non_refcount_gc_collect(*args) -> None: ... # only present on Python implementations with non-refcount gc +def gc_collect(generation: int = ...) -> None: ... +def lazy_gc() -> None: ... +def picklers(): ... +def random_choices(population, k: int = ...): ... +def round_decimal(value, prec): ... + +class RandomSet(set[Any]): + def __iter__(self): ... + def pop(self): ... + def union(self, other): ... + def difference(self, other): ... + def intersection(self, other): ... + def copy(self): ... + +def conforms_partial_ordering(tuples, sorted_elements): ... +def all_partial_orderings(tuples, elements): ... +def function_named(fn, name): ... +def run_as_contextmanager(ctx, fn, *arg, **kw): ... +def rowset(results): ... +def fail(msg) -> None: ... +def provide_metadata(fn, *args, **kw): ... +def flag_combinations(*combinations): ... +def lambda_combinations(lambda_arg_sets, **kw): ... +def resolve_lambda(__fn, **kw): ... +def metadata_fixture(ddl: str = ...): ... +def force_drop_names(*names): ... + +class adict(dict[Any, Any]): + def __getattribute__(self, key: str): ... + def __call__(self, *keys): ... + get_all: Any + +def drop_all_tables_from_metadata(metadata, engine_or_connection) -> None: ... +def drop_all_tables(engine, inspector, schema: Incomplete | None = ..., include_names: Incomplete | None = ...) -> None: ... +def teardown_events(event_cls): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/warnings.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/warnings.pyi new file mode 100644 index 00000000..9aa4255c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/testing/warnings.pyi @@ -0,0 +1,7 @@ +from .. import exc as sa_exc + +class SATestSuiteWarning(sa_exc.SAWarning): ... + +def warn_test_suite(message) -> None: ... +def setup_filters() -> None: ... +def assert_warnings(fn, warning_msgs, regex: bool = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/types.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/types.pyi new file mode 100644 index 00000000..ee455334 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/types.pyi @@ -0,0 +1,106 @@ +from .sql.sqltypes import ( + ARRAY as ARRAY, + BIGINT as BIGINT, + BINARY as BINARY, + BLOB as BLOB, + BOOLEAN as BOOLEAN, + CHAR as CHAR, + CLOB as CLOB, + DATE as DATE, + DATETIME as DATETIME, + DECIMAL as DECIMAL, + FLOAT as FLOAT, + INT as INT, + INTEGER as INTEGER, + JSON as JSON, + NCHAR as NCHAR, + NUMERIC as NUMERIC, + NVARCHAR as NVARCHAR, + REAL as REAL, + SMALLINT as SMALLINT, + TEXT as TEXT, + TIME as TIME, + TIMESTAMP as TIMESTAMP, + VARBINARY as VARBINARY, + VARCHAR as VARCHAR, + BigInteger as BigInteger, + Boolean as Boolean, + Concatenable as Concatenable, + Date as Date, + DateTime as DateTime, + Enum as Enum, + Float as Float, + Indexable as Indexable, + Integer as Integer, + Interval as Interval, + LargeBinary as LargeBinary, + MatchType as MatchType, + NullType as NullType, + Numeric as Numeric, + PickleType as PickleType, + SmallInteger as SmallInteger, + String as String, + Text as Text, + Time as Time, + TupleType as TupleType, + Unicode as Unicode, + UnicodeText as UnicodeText, + _Binary as _Binary, +) +from .sql.type_api import ( + ExternalType as ExternalType, + TypeDecorator as TypeDecorator, + TypeEngine as TypeEngine, + UserDefinedType as UserDefinedType, +) + +__all__ = [ + "TypeEngine", + "TypeDecorator", + "UserDefinedType", + "ExternalType", + "INT", + "CHAR", + "VARCHAR", + "NCHAR", + "NVARCHAR", + "TEXT", + "Text", + "FLOAT", + "NUMERIC", + "REAL", + "DECIMAL", + "TIMESTAMP", + "DATETIME", + "CLOB", + "BLOB", + "BINARY", + "VARBINARY", + "BOOLEAN", + "BIGINT", + "SMALLINT", + "INTEGER", + "DATE", + "TIME", + "TupleType", + "String", + "Integer", + "SmallInteger", + "BigInteger", + "Numeric", + "Float", + "DateTime", + "Date", + "Time", + "LargeBinary", + "Boolean", + "Unicode", + "Concatenable", + "UnicodeText", + "PickleType", + "Interval", + "Enum", + "Indexable", + "ARRAY", + "JSON", +] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/__init__.pyi new file mode 100644 index 00000000..d569a665 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/__init__.pyi @@ -0,0 +1,174 @@ +from collections import defaultdict as defaultdict +from contextlib import contextmanager as contextmanager +from functools import partial as partial, update_wrapper as update_wrapper + +from ._collections import ( + EMPTY_DICT as EMPTY_DICT, + EMPTY_SET as EMPTY_SET, + FacadeDict as FacadeDict, + IdentitySet as IdentitySet, + ImmutableContainer as ImmutableContainer, + ImmutableProperties as ImmutableProperties, + LRUCache as LRUCache, + OrderedDict as OrderedDict, + OrderedIdentitySet as OrderedIdentitySet, + OrderedProperties as OrderedProperties, + OrderedSet as OrderedSet, + PopulateDict as PopulateDict, + Properties as Properties, + ScopedRegistry as ScopedRegistry, + ThreadLocalRegistry as ThreadLocalRegistry, + UniqueAppender as UniqueAppender, + WeakPopulateDict as WeakPopulateDict, + WeakSequence as WeakSequence, + coerce_generator_arg as coerce_generator_arg, + coerce_to_immutabledict as coerce_to_immutabledict, + collections_abc as collections_abc, + column_dict as column_dict, + column_set as column_set, + flatten_iterator as flatten_iterator, + has_dupes as has_dupes, + has_intersection as has_intersection, + immutabledict as immutabledict, + ordered_column_set as ordered_column_set, + sort_dictionary as sort_dictionary, + to_column_set as to_column_set, + to_list as to_list, + to_set as to_set, + unique_list as unique_list, + update_copy as update_copy, +) +from ._preloaded import preload_module as preload_module, preloaded as preloaded +from .compat import ( + ABC as ABC, + TYPE_CHECKING as TYPE_CHECKING, + StringIO as StringIO, + arm as arm, + b as b, + b64decode as b64decode, + b64encode as b64encode, + binary_type as binary_type, + binary_types as binary_types, + byte_buffer as byte_buffer, + callable as callable, + cmp as cmp, + cpython as cpython, + dataclass_fields as dataclass_fields, + decode_backslashreplace as decode_backslashreplace, + dottedgetter as dottedgetter, + has_refcount_gc as has_refcount_gc, + inspect_getfullargspec as inspect_getfullargspec, + int_types as int_types, + iterbytes as iterbytes, + itertools_filter as itertools_filter, + itertools_filterfalse as itertools_filterfalse, + local_dataclass_fields as local_dataclass_fields, + namedtuple as namedtuple, + next as next, + nullcontext as nullcontext, + osx as osx, + parse_qsl as parse_qsl, + perf_counter as perf_counter, + pickle as pickle, + print_ as print_, + py2k as py2k, + py3k as py3k, + py37 as py37, + py38 as py38, + py39 as py39, + pypy as pypy, + quote_plus as quote_plus, + raise_ as raise_, + raise_from_cause as raise_from_cause, + reduce as reduce, + reraise as reraise, + string_types as string_types, + text_type as text_type, + threading as threading, + timezone as timezone, + u as u, + ue as ue, + unquote as unquote, + unquote_plus as unquote_plus, + win32 as win32, + with_metaclass as with_metaclass, + zip_longest as zip_longest, +) +from .concurrency import ( + asyncio as asyncio, + await_fallback as await_fallback, + await_only as await_only, + greenlet_spawn as greenlet_spawn, + is_exit_exception as is_exit_exception, +) +from .deprecations import ( + SQLALCHEMY_WARN_20 as SQLALCHEMY_WARN_20, + deprecated as deprecated, + deprecated_20 as deprecated_20, + deprecated_20_cls as deprecated_20_cls, + deprecated_cls as deprecated_cls, + deprecated_params as deprecated_params, + inject_docstring_text as inject_docstring_text, + moved_20 as moved_20, + warn_deprecated as warn_deprecated, + warn_deprecated_20 as warn_deprecated_20, +) +from .langhelpers import ( + EnsureKWArgType as EnsureKWArgType, + HasMemoized as HasMemoized, + MemoizedSlots as MemoizedSlots, + NoneType as NoneType, + PluginLoader as PluginLoader, + add_parameter_text as add_parameter_text, + as_interface as as_interface, + asbool as asbool, + asint as asint, + assert_arg_type as assert_arg_type, + attrsetter as attrsetter, + bool_or_str as bool_or_str, + chop_traceback as chop_traceback, + class_hierarchy as class_hierarchy, + classproperty as classproperty, + clsname_as_plain_name as clsname_as_plain_name, + coerce_kw_type as coerce_kw_type, + constructor_copy as constructor_copy, + constructor_key as constructor_key, + counter as counter, + create_proxy_methods as create_proxy_methods, + decode_slice as decode_slice, + decorator as decorator, + dictlike_iteritems as dictlike_iteritems, + duck_type_collection as duck_type_collection, + ellipses_string as ellipses_string, + format_argspec_init as format_argspec_init, + format_argspec_plus as format_argspec_plus, + generic_repr as generic_repr, + get_callable_argspec as get_callable_argspec, + get_cls_kwargs as get_cls_kwargs, + get_func_kwargs as get_func_kwargs, + getargspec_init as getargspec_init, + has_compiled_ext as has_compiled_ext, + hybridmethod as hybridmethod, + hybridproperty as hybridproperty, + iterate_attributes as iterate_attributes, + map_bits as map_bits, + md5_hex as md5_hex, + memoized_instancemethod as memoized_instancemethod, + memoized_property as memoized_property, + method_is_overridden as method_is_overridden, + methods_equivalent as methods_equivalent, + monkeypatch_proxied_specials as monkeypatch_proxied_specials, + only_once as only_once, + portable_instancemethod as portable_instancemethod, + quoted_token_parser as quoted_token_parser, + safe_reraise as safe_reraise, + set_creation_order as set_creation_order, + string_or_unprintable as string_or_unprintable, + symbol as symbol, + unbound_method_to_callable as unbound_method_to_callable, + walk_subclasses as walk_subclasses, + warn as warn, + warn_exception as warn_exception, + warn_limited as warn_limited, + wrap_callable as wrap_callable, +) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/_collections.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/_collections.pyi new file mode 100644 index 00000000..9e197be6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/_collections.pyi @@ -0,0 +1,219 @@ +import collections.abc +from _typeshed import Incomplete, SupportsKeysAndGetItem, Unused +from collections.abc import Callable, Iterable, Iterator, Mapping +from typing import Any, Generic, NoReturn, TypeVar, overload +from typing_extensions import Self + +from ..cimmutabledict import immutabledict as immutabledict + +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") +_S = TypeVar("_S") +_T = TypeVar("_T") + +collections_abc = collections.abc + +EMPTY_SET: frozenset[Any] + +class ImmutableContainer: + def __delitem__(self, *arg: Unused, **kw: Unused) -> NoReturn: ... + def __setitem__(self, *arg: Unused, **kw: Unused) -> NoReturn: ... + def __setattr__(self, *arg: Unused, **kw: Unused) -> NoReturn: ... + +@overload +def coerce_to_immutabledict(d: None) -> immutabledict[Any, Any]: ... +@overload +def coerce_to_immutabledict(d: Mapping[_KT, _VT]) -> immutabledict[_KT, _VT]: ... + +EMPTY_DICT: immutabledict[Any, Any] + +class FacadeDict(ImmutableContainer, dict[Any, Any]): + clear: Any + pop: Any + popitem: Any + setdefault: Any + update: Any + def __new__(cls, *args): ... + def copy(self) -> None: ... # type: ignore[override] + def __reduce__(self): ... + +class Properties(Generic[_T]): + def __init__(self, data: dict[str, _T]) -> None: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[_T]: ... + def __dir__(self) -> list[str]: ... + def __add__(self, other: Iterable[_S]) -> list[_S | _T]: ... + def __setitem__(self, key: str, obj: _T) -> None: ... + def __getitem__(self, key: str) -> _T: ... + def __delitem__(self, key: str) -> None: ... + def __setattr__(self, key: str, obj: _T) -> None: ... + def __getattr__(self, key: str) -> _T: ... + def __contains__(self, key: str) -> bool: ... + def as_immutable(self) -> ImmutableProperties[_T]: ... + def update(self, value: Iterable[tuple[str, _T]] | SupportsKeysAndGetItem[str, _T]) -> None: ... + @overload + def get(self, key: str) -> _T | None: ... + @overload + def get(self, key: str, default: _S) -> _T | _S: ... + def keys(self) -> list[str]: ... + def values(self) -> list[_T]: ... + def items(self) -> list[tuple[str, _T]]: ... + def has_key(self, key: str) -> bool: ... + def clear(self) -> None: ... + +class OrderedProperties(Properties[_T], Generic[_T]): + def __init__(self) -> None: ... + +class ImmutableProperties(ImmutableContainer, Properties[_T], Generic[_T]): ... + +OrderedDict = dict + +def sort_dictionary(d, key: Incomplete | None = ...): ... + +class OrderedSet(set[_T], Generic[_T]): + @overload + def __init__(self, d: None = ...) -> None: ... + @overload + def __init__(self, d: Iterable[_T]) -> None: ... + def add(self, element: _T) -> None: ... + def remove(self, element: _T) -> None: ... + def insert(self, pos: int, element: _T) -> None: ... + def discard(self, element: _T) -> None: ... + def clear(self) -> None: ... + def __getitem__(self, key: int) -> _T: ... + def __iter__(self) -> Iterator[_T]: ... + def __add__(self, other: Iterable[_S]) -> OrderedSet[_S | _T]: ... + def update(self, iterable: Iterable[_T]) -> Self: ... # type: ignore[override] + __ior__ = update # type: ignore[assignment] + def union(self, other: Iterable[_S]) -> OrderedSet[_S | _T]: ... # type: ignore[override] + __or__ = union # type: ignore[assignment] # pyright: ignore[reportGeneralTypeIssues] + def intersection(self, other: Iterable[Any]) -> Self: ... # type: ignore[override] + __and__ = intersection + def symmetric_difference(self, other: Iterable[_S]) -> OrderedSet[_S | _T]: ... + __xor__ = symmetric_difference # type: ignore[assignment] # pyright: ignore[reportGeneralTypeIssues] + def difference(self, other: Iterable[Any]) -> Self: ... # type: ignore[override] + __sub__ = difference + def intersection_update(self, other: Iterable[Any]) -> Self: ... # type: ignore[override] + __iand__ = intersection_update # type: ignore[assignment] + def symmetric_difference_update(self, other: Iterable[_T]) -> Self: ... # type: ignore[override] + __ixor__ = symmetric_difference_update # type: ignore[assignment] + def difference_update(self, other: Iterable[Any]) -> Self: ... # type: ignore[override] + __isub__ = difference_update # type: ignore[assignment] + +class IdentitySet: + def __init__(self, iterable: Incomplete | None = ...) -> None: ... + def add(self, value) -> None: ... + def __contains__(self, value): ... + def remove(self, value) -> None: ... + def discard(self, value) -> None: ... + def pop(self): ... + def clear(self) -> None: ... + def __cmp__(self, other) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def issubset(self, iterable): ... + def __le__(self, other): ... + def __lt__(self, other): ... + def issuperset(self, iterable): ... + def __ge__(self, other): ... + def __gt__(self, other): ... + def union(self, iterable): ... + def __or__(self, other): ... + def update(self, iterable) -> None: ... + def __ior__(self, other): ... + def difference(self, iterable): ... + def __sub__(self, other): ... + def difference_update(self, iterable) -> None: ... + def __isub__(self, other): ... + def intersection(self, iterable): ... + def __and__(self, other): ... + def intersection_update(self, iterable) -> None: ... + def __iand__(self, other): ... + def symmetric_difference(self, iterable): ... + def __xor__(self, other): ... + def symmetric_difference_update(self, iterable) -> None: ... + def __ixor__(self, other): ... + def copy(self): ... + __copy__: Any + def __len__(self) -> int: ... + def __iter__(self): ... + def __hash__(self) -> int: ... + +class WeakSequence: + def __init__(self, __elements=...) -> None: ... + def append(self, item) -> None: ... + def __len__(self) -> int: ... + def __iter__(self): ... + def __getitem__(self, index): ... + +class OrderedIdentitySet(IdentitySet): + def __init__(self, iterable: Incomplete | None = ...) -> None: ... + +class PopulateDict(dict[Any, Any]): + creator: Any + def __init__(self, creator) -> None: ... + def __missing__(self, key): ... + +class WeakPopulateDict(dict[Any, Any]): + creator: Any + weakself: Any + def __init__(self, creator_method) -> None: ... + def __missing__(self, key): ... + +column_set = set +column_dict = dict +# Ignore Y026, this isn't a type alias. +# We have to do `ordered_column_set = OrderedSet[_T] +# instead of `ordered_column_set = OrderedSet`, +# or pyright complains +ordered_column_set = OrderedSet[_T] # noqa: Y026 + +def unique_list(seq: Iterable[_T], hashfunc: Callable[[_T], Any] | None = ...) -> list[_T]: ... + +class UniqueAppender: + data: Any + def __init__(self, data, via: Incomplete | None = ...) -> None: ... + def append(self, item) -> None: ... + def __iter__(self): ... + +def coerce_generator_arg(arg): ... +def to_list(x, default: Incomplete | None = ...): ... +def has_intersection(set_, iterable): ... +def to_set(x): ... +def to_column_set(x): ... +def update_copy(d, _new: Incomplete | None = ..., **kw): ... +def flatten_iterator(x) -> None: ... + +class LRUCache(dict[Any, Any]): + capacity: Any + threshold: Any + size_alert: Any + def __init__(self, capacity: int = ..., threshold: float = ..., size_alert: Incomplete | None = ...) -> None: ... + def get(self, key, default: Incomplete | None = ...): ... + def __getitem__(self, key): ... + def values(self): ... + def setdefault(self, key, value): ... + def __setitem__(self, key, value) -> None: ... + @property + def size_threshold(self): ... + +class ScopedRegistry: + createfunc: Any + scopefunc: Any + registry: Any + def __init__(self, createfunc, scopefunc) -> None: ... + def __call__(self): ... + def has(self): ... + def set(self, obj) -> None: ... + def clear(self) -> None: ... + +class ThreadLocalRegistry(ScopedRegistry): + createfunc: Any + registry: Any + def __init__(self, createfunc) -> None: ... + def __call__(self): ... + def has(self): ... + def set(self, obj) -> None: ... + def clear(self) -> None: ... + +def has_dupes(sequence, target): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/_compat_py3k.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/_compat_py3k.pyi new file mode 100644 index 00000000..d23165bf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/_compat_py3k.pyi @@ -0,0 +1,10 @@ +from typing import Any + +class _AsyncGeneratorContextManager: + gen: Any + __doc__: Any + def __init__(self, func, args, kwds) -> None: ... + async def __aenter__(self): ... + async def __aexit__(self, typ, value, traceback): ... + +def asynccontextmanager(func): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/_concurrency_py3k.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/_concurrency_py3k.pyi new file mode 100644 index 00000000..f2fea9f5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/_concurrency_py3k.pyi @@ -0,0 +1,18 @@ +import asyncio as asyncio +from collections.abc import Callable, Coroutine +from typing import Any + +from .langhelpers import memoized_property + +def is_exit_exception(e): ... +def await_only(awaitable: Coroutine[Any, Any, Any]) -> Any: ... +def await_fallback(awaitable: Coroutine[Any, Any, Any]) -> Any: ... +async def greenlet_spawn(fn: Callable[..., Any], *args, _require_await: bool = ..., **kwargs) -> Any: ... + +class AsyncAdaptedLock: + @memoized_property + def mutex(self): ... + def __enter__(self): ... + def __exit__(self, *arg, **kw) -> None: ... + +def get_event_loop(): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/_preloaded.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/_preloaded.pyi new file mode 100644 index 00000000..eaabad39 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/_preloaded.pyi @@ -0,0 +1,11 @@ +from typing import Any + +class _ModuleRegistry: + module_registry: Any + prefix: Any + def __init__(self, prefix: str = ...) -> None: ... + def preload_module(self, *deps): ... + def import_prefix(self, path) -> None: ... + +preloaded: Any +preload_module: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/compat.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/compat.pyi new file mode 100644 index 00000000..b329ffe4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/compat.pyi @@ -0,0 +1,108 @@ +import builtins +import collections +import contextlib +import itertools +import operator +import pickle as pickle +import threading as threading +from _typeshed import Incomplete +from abc import ABC as ABC +from datetime import timezone as timezone +from functools import reduce as reduce +from io import BytesIO as BytesIO, StringIO as StringIO +from itertools import zip_longest as zip_longest +from time import perf_counter as perf_counter +from typing import TYPE_CHECKING as TYPE_CHECKING, Any, NamedTuple +from typing_extensions import Literal +from urllib.parse import ( + parse_qsl as parse_qsl, + quote as quote, + quote_plus as quote_plus, + unquote as unquote, + unquote_plus as unquote_plus, +) + +byte_buffer = BytesIO + +py39: bool +py38: bool +py37: bool +py3k: Literal[True] +py2k: Literal[False] +pypy: bool +cpython: bool +win32: bool +osx: bool +arm: bool +has_refcount_gc: bool +contextmanager = contextlib.contextmanager +dottedgetter = operator.attrgetter +namedtuple = collections.namedtuple # noqa: Y024 +next = builtins.next + +class FullArgSpec(NamedTuple): + args: Any + varargs: Any + varkw: Any + defaults: Any + kwonlyargs: Any + kwonlydefaults: Any + annotations: Any + +class nullcontext: + enter_result: Any + def __init__(self, enter_result: Incomplete | None = ...) -> None: ... + def __enter__(self): ... + def __exit__(self, *excinfo) -> None: ... + +def inspect_getfullargspec(func): ... +def importlib_metadata_get(group): ... + +string_types: tuple[type, ...] +binary_types: tuple[type, ...] +binary_type = bytes +text_type = str +int_types: tuple[type, ...] +iterbytes = iter +long_type = int +itertools_filterfalse = itertools.filterfalse +itertools_filter = filter +itertools_imap = map +exec_: Any +import_: Any +print_: Any + +def b(s): ... +def b64decode(x): ... +def b64encode(x): ... +def decode_backslashreplace(text, encoding): ... +def cmp(a, b): ... +def raise_( + exception, with_traceback: Incomplete | None = ..., replace_context: Incomplete | None = ..., from_: bool = ... +) -> None: ... +def u(s): ... +def ue(s): ... + +callable = builtins.callable + +def safe_bytestring(text): ... +def inspect_formatargspec( + args, + varargs: Incomplete | None = ..., + varkw: Incomplete | None = ..., + defaults: Incomplete | None = ..., + kwonlyargs=..., + kwonlydefaults=..., + annotations=..., + formatarg=..., + formatvarargs=..., + formatvarkw=..., + formatvalue=..., + formatreturns=..., + formatannotation=..., +): ... +def dataclass_fields(cls): ... +def local_dataclass_fields(cls): ... +def raise_from_cause(exception, exc_info: Incomplete | None = ...) -> None: ... +def reraise(tp, value, tb: Incomplete | None = ..., cause: Incomplete | None = ...) -> None: ... +def with_metaclass(meta, *bases, **kw): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/concurrency.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/concurrency.pyi new file mode 100644 index 00000000..7fe1879c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/concurrency.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete + +from ._compat_py3k import asynccontextmanager as asynccontextmanager +from ._concurrency_py3k import ( + AsyncAdaptedLock as AsyncAdaptedLock, + await_fallback as await_fallback, + await_only as await_only, + greenlet_spawn as greenlet_spawn, + is_exit_exception as is_exit_exception, +) + +have_greenlet: bool +asyncio: Incomplete | None diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/deprecations.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/deprecations.pyi new file mode 100644 index 00000000..ca733b24 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/deprecations.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +from .langhelpers import ( + decorator as decorator, + inject_docstring_text as inject_docstring_text, + inject_param_text as inject_param_text, +) + +SQLALCHEMY_WARN_20: bool + +def warn_deprecated(msg, version, stacklevel: int = ..., code: Incomplete | None = ...) -> None: ... +def warn_deprecated_limited(msg, args, version, stacklevel: int = ..., code: Incomplete | None = ...) -> None: ... +def warn_deprecated_20(msg, stacklevel: int = ..., code: Incomplete | None = ...) -> None: ... +def deprecated_cls(version, message, constructor: str = ...): ... +def deprecated_20_cls(clsname, alternative: Incomplete | None = ..., constructor: str = ..., becomes_legacy: bool = ...): ... +def deprecated( + version, + message: Incomplete | None = ..., + add_deprecation_to_docstring: bool = ..., + warning: Incomplete | None = ..., + enable_warnings: bool = ..., +): ... +def moved_20(message, **kw): ... +def deprecated_20(api_name, alternative: Incomplete | None = ..., becomes_legacy: bool = ..., **kw): ... +def deprecated_params(**specs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/langhelpers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/langhelpers.pyi new file mode 100644 index 00000000..abc0bc2c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/langhelpers.pyi @@ -0,0 +1,169 @@ +from _typeshed import Incomplete, Unused +from collections.abc import Callable +from typing import Any, Generic, TypeVar, overload +from typing_extensions import Self + +from . import compat + +_R = TypeVar("_R") + +def md5_hex(x): ... + +class safe_reraise: + warn_only: Any + def __init__(self, warn_only: bool = ...) -> None: ... + def __enter__(self) -> None: ... + def __exit__(self, type_, value, traceback) -> None: ... + +def walk_subclasses(cls) -> None: ... +def string_or_unprintable(element): ... +def clsname_as_plain_name(cls): ... +def method_is_overridden(instance_or_cls, against_method): ... +def decode_slice(slc): ... +def map_bits(fn, n) -> None: ... +def decorator(target): ... +def public_factory(target, location, class_location: Incomplete | None = ...): ... + +class PluginLoader: + group: Any + impls: Any + auto_fn: Any + def __init__(self, group, auto_fn: Incomplete | None = ...) -> None: ... + def clear(self) -> None: ... + def load(self, name): ... + def register(self, name, modulepath, objname): ... + +def get_cls_kwargs(cls, _set: Incomplete | None = ...): ... +def get_func_kwargs(func): ... +def get_callable_argspec(fn, no_self: bool = ..., _is_init: bool = ...): ... +def format_argspec_plus(fn, grouped: bool = ...): ... +def format_argspec_init(method, grouped: bool = ...): ... +def create_proxy_methods( + target_cls, target_cls_sphinx_name, proxy_cls_sphinx_name, classmethods=..., methods=..., attributes=... +): ... +def getargspec_init(method): ... +def unbound_method_to_callable(func_or_cls): ... +def generic_repr(obj, additional_kw=..., to_inspect: Incomplete | None = ..., omit_kwarg=...): ... + +class portable_instancemethod: + target: Any + name: Any + kwargs: Any + def __init__(self, meth, kwargs=...) -> None: ... + def __call__(self, *arg, **kw): ... + +def class_hierarchy(cls): ... +def iterate_attributes(cls) -> None: ... +def monkeypatch_proxied_specials( + into_cls, + from_cls, + skip: Incomplete | None = ..., + only: Incomplete | None = ..., + name: str = ..., + from_instance: Incomplete | None = ..., +) -> None: ... +def methods_equivalent(meth1, meth2): ... +def as_interface(obj, cls: Incomplete | None = ..., methods: Incomplete | None = ..., required: Incomplete | None = ...): ... + +class memoized_property(Generic[_R]): + fget: Callable[..., _R] + __doc__: str + __name__: str + def __init__(self, fget: Callable[..., _R], doc: str | None = ...) -> None: ... + @overload + def __get__(self, obj: None, cls: Unused) -> Self: ... + @overload + def __get__(self, obj: object, cls: Unused) -> _R: ... + @classmethod + def reset(cls, obj: object, name: str) -> None: ... + +def memoized_instancemethod(fn): ... + +class HasMemoized: + class memoized_attribute(Generic[_R]): + fget: Callable[..., _R] + __doc__: str + __name__: str + def __init__(self, fget: Callable[..., _R], doc: str | None = ...) -> None: ... + @overload + def __get__(self, obj: None, cls: Unused) -> Self: ... + @overload + def __get__(self, obj: object, cls: Unused) -> _R: ... + + @classmethod + def memoized_instancemethod(cls, fn): ... + +class MemoizedSlots: + def __getattr__(self, key: str): ... + +def asbool(obj): ... +def bool_or_str(*text): ... +def asint(value): ... +def coerce_kw_type(kw, key, type_, flexi_bool: bool = ..., dest: Incomplete | None = ...) -> None: ... +def constructor_key(obj, cls): ... +def constructor_copy(obj, cls, *args, **kw): ... +def counter(): ... +def duck_type_collection(specimen, default: Incomplete | None = ...): ... +def assert_arg_type(arg, argtype, name): ... +def dictlike_iteritems(dictlike): ... + +class classproperty(property): + __doc__: Any + def __init__(self, fget, *arg, **kw) -> None: ... + def __get__(self, self_, cls): ... + +class hybridproperty(Generic[_R]): + func: Callable[..., _R] + clslevel: Callable[..., _R] + def __init__(self, func: Callable[..., _R]) -> None: ... + @overload + def __get__(self, instance: None, owner: Any) -> _R: ... + @overload + def __get__(self, instance: object, owner: object) -> _R: ... + def classlevel(self, func: Callable[..., _R]) -> Self: ... + +class hybridmethod: + func: Any + clslevel: Any + def __init__(self, func) -> None: ... + def __get__(self, instance, owner): ... + def classlevel(self, func): ... + +class _symbol(int): + def __new__(cls, name, doc: Incomplete | None = ..., canonical: Incomplete | None = ...): ... + def __reduce__(self): ... + +class symbol: + symbols: Any + def __new__(cls, name, doc: Incomplete | None = ..., canonical: Incomplete | None = ...): ... + @classmethod + def parse_user_argument(cls, arg, choices, name, resolve_symbol_names: bool = ...): ... + +def set_creation_order(instance) -> None: ... +def warn_exception(func, *args, **kwargs): ... +def ellipses_string(value, len_: int = ...): ... + +class _hash_limit_string(compat.text_type): + def __new__(cls, value, num, args): ... + def __hash__(self) -> int: ... + def __eq__(self, other): ... + +def warn(msg, code: Incomplete | None = ...) -> None: ... +def warn_limited(msg, args) -> None: ... +def only_once(fn, retry_on_exception): ... +def chop_traceback(tb, exclude_prefix=..., exclude_suffix=...): ... + +NoneType: Any + +def attrsetter(attrname): ... + +class EnsureKWArgType(type): + def __init__(cls, clsname, bases, clsdict) -> None: ... + +def wrap_callable(wrapper, fn): ... +def quoted_token_parser(value): ... +def add_parameter_text(params, text): ... +def inject_docstring_text(doctext, injecttext, pos): ... +def inject_param_text(doctext, inject_params): ... +def repr_tuple_names(names): ... +def has_compiled_ext(): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/queue.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/queue.pyi new file mode 100644 index 00000000..7a26c25d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/queue.pyi @@ -0,0 +1,35 @@ +from _typeshed import Incomplete +from typing import Any + +class Empty(Exception): ... +class Full(Exception): ... + +class Queue: + mutex: Any + not_empty: Any + not_full: Any + use_lifo: Any + def __init__(self, maxsize: int = ..., use_lifo: bool = ...) -> None: ... + def qsize(self): ... + def empty(self): ... + def full(self): ... + def put(self, item, block: bool = ..., timeout: Incomplete | None = ...) -> None: ... + def put_nowait(self, item): ... + def get(self, block: bool = ..., timeout: Incomplete | None = ...): ... + def get_nowait(self): ... + +class AsyncAdaptedQueue: + await_: Any + use_lifo: Any + maxsize: Any + def __init__(self, maxsize: int = ..., use_lifo: bool = ...) -> None: ... + def empty(self): ... + def full(self): ... + def qsize(self): ... + def put_nowait(self, item): ... + def put(self, item, block: bool = ..., timeout: Incomplete | None = ...): ... + def get_nowait(self): ... + def get(self, block: bool = ..., timeout: Incomplete | None = ...): ... + +class FallbackAsyncAdaptedQueue(AsyncAdaptedQueue): + await_: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/topological.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/topological.pyi new file mode 100644 index 00000000..04428e1b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/SQLAlchemy/sqlalchemy/util/topological.pyi @@ -0,0 +1,3 @@ +def sort_as_subsets(tuples, allitems) -> None: ... +def sort(tuples, allitems, deterministic_order: bool = ...) -> None: ... +def find_cycles(tuples, allitems): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Send2Trash/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Send2Trash/METADATA.toml new file mode 100644 index 00000000..ee7e83f3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Send2Trash/METADATA.toml @@ -0,0 +1,4 @@ +version = "1.8.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Send2Trash/send2trash/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Send2Trash/send2trash/__init__.pyi new file mode 100644 index 00000000..9ff00ba7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Send2Trash/send2trash/__init__.pyi @@ -0,0 +1,10 @@ +from _typeshed import Incomplete, StrOrBytesPath +from typing import Any + +from .exceptions import TrashPermissionError as TrashPermissionError + +# The list should be list[StrOrBytesPath] but that doesn't work because invariance +def send2trash(paths: list[Any] | StrOrBytesPath) -> None: ... + +# Marked as incomplete because there are platform-specific plat_foo modules +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Send2Trash/send2trash/compat.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Send2Trash/send2trash/compat.pyi new file mode 100644 index 00000000..e148e352 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Send2Trash/send2trash/compat.pyi @@ -0,0 +1,7 @@ +from typing import Any +from typing_extensions import Literal + +PY3: Literal[True] +text_type = str +binary_type = bytes +environb: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Send2Trash/send2trash/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Send2Trash/send2trash/exceptions.pyi new file mode 100644 index 00000000..a263f655 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Send2Trash/send2trash/exceptions.pyi @@ -0,0 +1,5 @@ +from typing import Any + +class TrashPermissionError(PermissionError): + # Typed the same as `filename` in `PermissionError`: + def __init__(self, filename: Any) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Send2Trash/send2trash/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Send2Trash/send2trash/util.pyi new file mode 100644 index 00000000..c89143c4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/Send2Trash/send2trash/util.pyi @@ -0,0 +1,5 @@ +from _typeshed import StrOrBytesPath +from typing import Any + +# Should be consistent with `__init__.py` +def preprocess_paths(paths: list[Any] | StrOrBytesPath) -> list[str | bytes]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..7640356f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/@tests/stubtest_allowlist.txt @@ -0,0 +1,65 @@ +# These all delegate using *args,**kwargs, but stubs use signature of +# method they are being delegated to. +aiofiles.threadpool.binary.AsyncBufferedIOBase.close +aiofiles.threadpool.binary.AsyncBufferedIOBase.detach +aiofiles.threadpool.binary.AsyncBufferedIOBase.fileno +aiofiles.threadpool.binary.AsyncBufferedIOBase.flush +aiofiles.threadpool.binary.AsyncBufferedIOBase.isatty +aiofiles.threadpool.binary.AsyncBufferedIOBase.readable +aiofiles.threadpool.binary.AsyncBufferedIOBase.seekable +aiofiles.threadpool.binary.AsyncBufferedIOBase.tell +aiofiles.threadpool.binary.AsyncBufferedIOBase.writable +aiofiles.threadpool.binary.AsyncFileIO.close +aiofiles.threadpool.binary.AsyncFileIO.fileno +aiofiles.threadpool.binary.AsyncFileIO.flush +aiofiles.threadpool.binary.AsyncFileIO.isatty +aiofiles.threadpool.binary.AsyncFileIO.readable +aiofiles.threadpool.binary.AsyncFileIO.readall +aiofiles.threadpool.binary.AsyncFileIO.seekable +aiofiles.threadpool.binary.AsyncFileIO.tell +aiofiles.threadpool.binary.AsyncFileIO.writable +aiofiles.threadpool.text.AsyncTextIOWrapper.close +aiofiles.threadpool.text.AsyncTextIOWrapper.detach +aiofiles.threadpool.text.AsyncTextIOWrapper.fileno +aiofiles.threadpool.text.AsyncTextIOWrapper.flush +aiofiles.threadpool.text.AsyncTextIOWrapper.isatty +aiofiles.threadpool.text.AsyncTextIOWrapper.readable +aiofiles.threadpool.text.AsyncTextIOWrapper.seekable +aiofiles.threadpool.text.AsyncTextIOWrapper.tell +aiofiles.threadpool.text.AsyncTextIOWrapper.writable + +# These functions get the wrong signature from functools.wraps() +aiofiles.os.stat +aiofiles.os.rename +aiofiles.os.replace +aiofiles.os.remove +aiofiles.os.mkdir +aiofiles.os.makedirs +aiofiles.os.rmdir +aiofiles.os.removedirs +aiofiles.os.scandir +aiofiles.ospath.exists +aiofiles.ospath.isfile +aiofiles.ospath.isdir +aiofiles.ospath.getsize +aiofiles.ospath.getmtime +aiofiles.ospath.getatime +aiofiles.ospath.getctime +aiofiles.ospath.samefile +aiofiles.ospath.sameopenfile + +# Same issues as above +aiofiles.tempfile.temptypes.AsyncSpooledTemporaryFile.close +aiofiles.tempfile.temptypes.AsyncSpooledTemporaryFile.fileno +aiofiles.tempfile.temptypes.AsyncSpooledTemporaryFile.flush +aiofiles.tempfile.temptypes.AsyncSpooledTemporaryFile.isatty +aiofiles.tempfile.temptypes.AsyncSpooledTemporaryFile.rollover +aiofiles.tempfile.temptypes.AsyncSpooledTemporaryFile.tell +aiofiles.tempfile.temptypes.AsyncTemporaryDirectory.cleanup + +# Metaclass differs: +aiofiles.base.AiofilesContextManager +aiofiles.tempfile.AiofilesContextManagerTempDir + +# Helper decorator, too complex to type +aiofiles.os.wrap diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/@tests/stubtest_allowlist_darwin.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/@tests/stubtest_allowlist_darwin.txt new file mode 100644 index 00000000..b0ea37bd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/@tests/stubtest_allowlist_darwin.txt @@ -0,0 +1,2 @@ +# This function gets the wrong signature from functools.wraps() +aiofiles.os.sendfile diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/@tests/stubtest_allowlist_linux.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/@tests/stubtest_allowlist_linux.txt new file mode 100644 index 00000000..b0ea37bd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/@tests/stubtest_allowlist_linux.txt @@ -0,0 +1,2 @@ +# This function gets the wrong signature from functools.wraps() +aiofiles.os.sendfile diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/METADATA.toml new file mode 100644 index 00000000..f5ebbca0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/METADATA.toml @@ -0,0 +1,5 @@ +version = "22.1.*" + +[tool.stubtest] +# linux and darwin are equivalent +platforms = ["linux", "win32"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/__init__.pyi new file mode 100644 index 00000000..468393cb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/__init__.pyi @@ -0,0 +1,2 @@ +from . import tempfile as tempfile +from .threadpool import open as open diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/base.pyi new file mode 100644 index 00000000..cc9baf4a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/base.pyi @@ -0,0 +1,35 @@ +from collections.abc import Coroutine, Generator, Iterator +from types import CodeType, FrameType, TracebackType, coroutine +from typing import Any, Generic, TypeVar +from typing_extensions import Self + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) +_V_co = TypeVar("_V_co", covariant=True) +_T_contra = TypeVar("_T_contra", contravariant=True) + +class AsyncBase(Generic[_T]): + def __init__(self, file: str, loop: Any, executor: Any) -> None: ... + def __aiter__(self) -> Self: ... + async def __anext__(self) -> _T: ... + +class AiofilesContextManager(Generic[_T_co, _T_contra, _V_co]): + def __init__(self, coro: Coroutine[_T_co, _T_contra, _V_co]) -> None: ... + def send(self, value: _T_contra) -> _T_co: ... + def throw(self, typ: type[BaseException], val: BaseException | object = ..., tb: TracebackType | None = ...) -> _T_co: ... + def close(self) -> None: ... + @property + def gi_frame(self) -> FrameType: ... + @property + def gi_running(self) -> bool: ... + @property + def gi_code(self) -> CodeType: ... + def __next__(self) -> _T_co: ... + @coroutine + def __iter__(self) -> Iterator[Coroutine[_T_co, _T_contra, _V_co]]: ... + def __await__(self) -> Generator[Any, None, _V_co]: ... + async def __anext__(self) -> _V_co: ... + async def __aenter__(self) -> _V_co: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/os.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/os.pyi new file mode 100644 index 00000000..e7d3edb4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/os.pyi @@ -0,0 +1,77 @@ +import sys +from _typeshed import FileDescriptorOrPath, GenericPath, StrOrBytesPath +from asyncio.events import AbstractEventLoop +from collections.abc import Sequence +from os import _ScandirIterator, stat_result +from typing import Any, AnyStr, overload + +from aiofiles import ospath + +path = ospath + +async def stat( + path: FileDescriptorOrPath, + *, + dir_fd: int | None = ..., + follow_symlinks: bool = ..., + loop: AbstractEventLoop | None = ..., + executor: Any = ..., +) -> stat_result: ... +async def rename( + src: StrOrBytesPath, + dst: StrOrBytesPath, + *, + src_dir_fd: int | None = ..., + dst_dir_fd: int | None = ..., + loop: AbstractEventLoop | None = ..., + executor: Any = ..., +) -> None: ... +async def replace( + src: StrOrBytesPath, + dst: StrOrBytesPath, + *, + src_dir_fd: int | None = ..., + dst_dir_fd: int | None = ..., + loop: AbstractEventLoop | None = ..., + executor: Any = ..., +) -> None: ... +async def remove( + path: StrOrBytesPath, *, dir_fd: int | None = ..., loop: AbstractEventLoop | None = ..., executor: Any = ... +) -> None: ... +async def mkdir( + path: StrOrBytesPath, mode: int = ..., *, dir_fd: int | None = ..., loop: AbstractEventLoop | None = ..., executor: Any = ... +) -> None: ... +async def makedirs( + name: StrOrBytesPath, mode: int = ..., exist_ok: bool = ..., *, loop: AbstractEventLoop | None = ..., executor: Any = ... +) -> None: ... +async def rmdir( + path: StrOrBytesPath, *, dir_fd: int | None = ..., loop: AbstractEventLoop | None = ..., executor: Any = ... +) -> None: ... +async def removedirs(name: StrOrBytesPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> None: ... +@overload +async def scandir(path: None = ..., *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> _ScandirIterator[str]: ... +@overload +async def scandir(path: int, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> _ScandirIterator[str]: ... +@overload +async def scandir( + path: GenericPath[AnyStr], *, loop: AbstractEventLoop | None = ..., executor: Any = ... +) -> _ScandirIterator[AnyStr]: ... + +if sys.platform != "win32": + @overload + async def sendfile( + out_fd: int, in_fd: int, offset: int | None, count: int, *, loop: AbstractEventLoop | None = ..., executor: Any = ... + ) -> int: ... + @overload + async def sendfile( + out_fd: int, + in_fd: int, + offset: int, + count: int, + headers: Sequence[bytes] = ..., + trailers: Sequence[bytes] = ..., + flags: int = ..., + *, + loop: AbstractEventLoop | None = ..., + executor: Any = ..., + ) -> int: ... # FreeBSD and Mac OS X only diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/ospath.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/ospath.pyi new file mode 100644 index 00000000..e58e7289 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/ospath.pyi @@ -0,0 +1,15 @@ +from _typeshed import FileDescriptorOrPath +from asyncio.events import AbstractEventLoop +from typing import Any + +async def exists(path: FileDescriptorOrPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> bool: ... +async def isfile(path: FileDescriptorOrPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> bool: ... +async def isdir(s: FileDescriptorOrPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> bool: ... +async def getsize(filename: FileDescriptorOrPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> int: ... +async def getmtime(filename: FileDescriptorOrPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> float: ... +async def getatime(filename: FileDescriptorOrPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> float: ... +async def getctime(filename: FileDescriptorOrPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> float: ... +async def samefile( + f1: FileDescriptorOrPath, f2: FileDescriptorOrPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ... +) -> bool: ... +async def sameopenfile(fp1: int, fp2: int, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/tempfile/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/tempfile/__init__.pyi new file mode 100644 index 00000000..95af3e1c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/tempfile/__init__.pyi @@ -0,0 +1,260 @@ +from _typeshed import ( + BytesPath, + Incomplete, + OpenBinaryMode, + OpenBinaryModeReading, + OpenBinaryModeUpdating, + OpenBinaryModeWriting, + OpenTextMode, + StrOrBytesPath, + StrPath, +) +from asyncio import AbstractEventLoop +from typing import AnyStr, TypeVar, overload +from typing_extensions import Literal + +from ..base import AiofilesContextManager +from ..threadpool.binary import AsyncBufferedIOBase, AsyncBufferedReader, AsyncFileIO +from ..threadpool.text import AsyncTextIOWrapper +from .temptypes import AsyncTemporaryDirectory + +_T_co = TypeVar("_T_co", covariant=True) +_V_co = TypeVar("_V_co", covariant=True) +_T_contra = TypeVar("_T_contra", contravariant=True) + +# Text mode: always returns AsyncTextIOWrapper +@overload +def NamedTemporaryFile( + mode: OpenTextMode, + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: AnyStr | None = ..., + prefix: AnyStr | None = ..., + dir: StrOrBytesPath | None = ..., + delete: bool = ..., + loop: AbstractEventLoop | None = ..., + executor: Incomplete | None = ..., +) -> AiofilesContextManager[None, None, AsyncTextIOWrapper]: ... + +# Unbuffered binary: returns a FileIO +@overload +def NamedTemporaryFile( + mode: OpenBinaryMode, + buffering: Literal[0], + encoding: None = ..., + newline: None = ..., + suffix: AnyStr | None = ..., + prefix: AnyStr | None = ..., + dir: StrOrBytesPath | None = ..., + delete: bool = ..., + loop: AbstractEventLoop | None = ..., + executor: Incomplete | None = ..., +) -> AiofilesContextManager[None, None, AsyncFileIO]: ... + +# Buffered binary reading/updating: AsyncBufferedReader +@overload +def NamedTemporaryFile( + mode: OpenBinaryModeReading | OpenBinaryModeUpdating = ..., + buffering: Literal[-1, 1] = ..., + encoding: None = ..., + newline: None = ..., + suffix: AnyStr | None = ..., + prefix: AnyStr | None = ..., + dir: StrOrBytesPath | None = ..., + delete: bool = ..., + loop: AbstractEventLoop | None = ..., + executor: Incomplete | None = ..., +) -> AiofilesContextManager[None, None, AsyncBufferedReader]: ... + +# Buffered binary writing: AsyncBufferedIOBase +@overload +def NamedTemporaryFile( + mode: OpenBinaryModeWriting, + buffering: Literal[-1, 1] = ..., + encoding: None = ..., + newline: None = ..., + suffix: AnyStr | None = ..., + prefix: AnyStr | None = ..., + dir: StrOrBytesPath | None = ..., + delete: bool = ..., + loop: AbstractEventLoop | None = ..., + executor: Incomplete | None = ..., +) -> AiofilesContextManager[None, None, AsyncBufferedIOBase]: ... + +# Text mode: always returns AsyncTextIOWrapper +@overload +def TemporaryFile( + mode: OpenTextMode, + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: AnyStr | None = ..., + prefix: AnyStr | None = ..., + dir: StrOrBytesPath | None = ..., + loop: AbstractEventLoop | None = ..., + executor: Incomplete | None = ..., +) -> AiofilesContextManager[None, None, AsyncTextIOWrapper]: ... + +# Unbuffered binary: returns a FileIO +@overload +def TemporaryFile( + mode: OpenBinaryMode, + buffering: Literal[0], + encoding: None = ..., + newline: None = ..., + suffix: AnyStr | None = ..., + prefix: AnyStr | None = ..., + dir: StrOrBytesPath | None = ..., + loop: AbstractEventLoop | None = ..., + executor: Incomplete | None = ..., +) -> AiofilesContextManager[None, None, AsyncFileIO]: ... + +# Buffered binary reading/updating: AsyncBufferedReader +@overload +def TemporaryFile( + mode: OpenBinaryModeReading | OpenBinaryModeUpdating = ..., + buffering: Literal[-1, 1] = ..., + encoding: None = ..., + newline: None = ..., + suffix: AnyStr | None = ..., + prefix: AnyStr | None = ..., + dir: StrOrBytesPath | None = ..., + loop: AbstractEventLoop | None = ..., + executor: Incomplete | None = ..., +) -> AiofilesContextManager[None, None, AsyncBufferedReader]: ... + +# Buffered binary writing: AsyncBufferedIOBase +@overload +def TemporaryFile( + mode: OpenBinaryModeWriting, + buffering: Literal[-1, 1] = ..., + encoding: None = ..., + newline: None = ..., + suffix: AnyStr | None = ..., + prefix: AnyStr | None = ..., + dir: StrOrBytesPath | None = ..., + loop: AbstractEventLoop | None = ..., + executor: Incomplete | None = ..., +) -> AiofilesContextManager[None, None, AsyncBufferedIOBase]: ... + +# Text mode: always returns AsyncTextIOWrapper +@overload +def SpooledTemporaryFile( + max_size: int = ..., + *, + mode: OpenTextMode, + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: AnyStr | None = ..., + prefix: AnyStr | None = ..., + dir: StrOrBytesPath | None = ..., + loop: AbstractEventLoop | None = ..., + executor: Incomplete | None = ..., +) -> AiofilesContextManager[None, None, AsyncTextIOWrapper]: ... +@overload +def SpooledTemporaryFile( + max_size: int, + mode: OpenTextMode, + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: AnyStr | None = ..., + prefix: AnyStr | None = ..., + dir: StrOrBytesPath | None = ..., + loop: AbstractEventLoop | None = ..., + executor: Incomplete | None = ..., +) -> AiofilesContextManager[None, None, AsyncTextIOWrapper]: ... + +# Unbuffered binary: returns a FileIO +@overload +def SpooledTemporaryFile( + max_size: int = ..., + mode: OpenBinaryMode = ..., + *, + buffering: Literal[0], + encoding: None = ..., + newline: None = ..., + suffix: AnyStr | None = ..., + prefix: AnyStr | None = ..., + dir: StrOrBytesPath | None = ..., + loop: AbstractEventLoop | None = ..., + executor: Incomplete | None = ..., +) -> AiofilesContextManager[None, None, AsyncFileIO]: ... +@overload +def SpooledTemporaryFile( + max_size: int, + mode: OpenBinaryMode, + buffering: Literal[0], + encoding: None = ..., + newline: None = ..., + suffix: AnyStr | None = ..., + prefix: AnyStr | None = ..., + dir: StrOrBytesPath | None = ..., + loop: AbstractEventLoop | None = ..., + executor: Incomplete | None = ..., +) -> AiofilesContextManager[None, None, AsyncFileIO]: ... + +# Buffered binary reading/updating: AsyncBufferedReader +@overload +def SpooledTemporaryFile( + max_size: int = ..., + mode: OpenBinaryModeReading | OpenBinaryModeUpdating = ..., + buffering: Literal[-1, 1] = ..., + encoding: None = ..., + newline: None = ..., + suffix: AnyStr | None = ..., + prefix: AnyStr | None = ..., + dir: StrOrBytesPath | None = ..., + loop: AbstractEventLoop | None = ..., + executor: Incomplete | None = ..., +) -> AiofilesContextManager[None, None, AsyncBufferedReader]: ... + +# Buffered binary writing: AsyncBufferedIOBase +@overload +def SpooledTemporaryFile( + max_size: int = ..., + *, + mode: OpenBinaryModeWriting, + buffering: Literal[-1, 1] = ..., + encoding: None = ..., + newline: None = ..., + suffix: AnyStr | None = ..., + prefix: AnyStr | None = ..., + dir: StrOrBytesPath | None = ..., + loop: AbstractEventLoop | None = ..., + executor: Incomplete | None = ..., +) -> AiofilesContextManager[None, None, AsyncBufferedIOBase]: ... +@overload +def SpooledTemporaryFile( + max_size: int, + mode: OpenBinaryModeWriting, + buffering: Literal[-1, 1] = ..., + encoding: None = ..., + newline: None = ..., + suffix: AnyStr | None = ..., + prefix: AnyStr | None = ..., + dir: StrOrBytesPath | None = ..., + loop: AbstractEventLoop | None = ..., + executor: Incomplete | None = ..., +) -> AiofilesContextManager[None, None, AsyncBufferedIOBase]: ... +@overload +def TemporaryDirectory( + suffix: str | None = ..., + prefix: str | None = ..., + dir: StrPath | None = ..., + loop: AbstractEventLoop | None = ..., + executor: Incomplete | None = ..., +) -> AiofilesContextManagerTempDir[None, None, AsyncTemporaryDirectory]: ... +@overload +def TemporaryDirectory( + suffix: bytes | None = ..., + prefix: bytes | None = ..., + dir: BytesPath | None = ..., + loop: AbstractEventLoop | None = ..., + executor: Incomplete | None = ..., +) -> AiofilesContextManagerTempDir[None, None, AsyncTemporaryDirectory]: ... + +class AiofilesContextManagerTempDir(AiofilesContextManager[_T_co, _T_contra, _V_co]): + async def __aenter__(self) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/tempfile/temptypes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/tempfile/temptypes.pyi new file mode 100644 index 00000000..6b66e27e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/tempfile/temptypes.pyi @@ -0,0 +1,54 @@ +from _typeshed import Incomplete +from asyncio import AbstractEventLoop +from collections.abc import Generator, Iterable +from tempfile import TemporaryDirectory, _BytesMode +from types import coroutine as coroutine +from typing import TypeVar + +from aiofiles.base import AsyncBase as AsyncBase +from aiofiles.threadpool.utils import ( + cond_delegate_to_executor as cond_delegate_to_executor, + delegate_to_executor as delegate_to_executor, + proxy_property_directly as proxy_property_directly, +) + +_T = TypeVar("_T") + +class AsyncSpooledTemporaryFile(AsyncBase[_T]): + def fileno(self) -> Generator[Incomplete, Incomplete, Incomplete]: ... + def rollover(self) -> Generator[Incomplete, Incomplete, Incomplete]: ... + async def close(self) -> None: ... + async def flush(self) -> None: ... + async def isatty(self) -> bool: ... + # All must return `AnyStr`: + async def read(self, __n: int = ...) -> Incomplete: ... + async def readline(self, __limit: int | None = ...) -> Incomplete: ... + async def readlines(self, __hint: int = ...) -> list[Incomplete]: ... + # --- + async def seek(self, offset: int, whence: int = ...) -> int: ... + async def tell(self) -> int: ... + async def truncate(self, size: int | None = ...) -> None: ... + @property + def closed(self) -> bool: ... + @property + def encoding(self) -> str: ... + @property + def mode(self) -> _BytesMode: ... + @property + def name(self) -> str: ... + @property + def newlines(self) -> str: ... + @property + def softspace(self) -> bool: ... + # Both should work with `AnyStr`, like in `tempfile`: + async def write(self, s: Incomplete) -> int: ... + async def writelines(self, iterable: Iterable[Incomplete]) -> None: ... + +class AsyncTemporaryDirectory: + async def cleanup(self) -> None: ... + @property + def name(self) -> Incomplete: ... # should be `AnyStr` + def __init__( + self, file: TemporaryDirectory[Incomplete], loop: AbstractEventLoop | None, executor: Incomplete | None + ) -> None: ... + async def close(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/threadpool/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/threadpool/__init__.pyi new file mode 100644 index 00000000..89dcc121 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/threadpool/__init__.pyi @@ -0,0 +1,99 @@ +from _typeshed import ( + FileDescriptorOrPath, + Incomplete, + OpenBinaryMode, + OpenBinaryModeReading, + OpenBinaryModeUpdating, + OpenBinaryModeWriting, + OpenTextMode, +) +from asyncio import AbstractEventLoop +from collections.abc import Callable +from typing import overload +from typing_extensions import Literal, TypeAlias + +from ..base import AiofilesContextManager +from .binary import AsyncBufferedIOBase, AsyncBufferedReader, AsyncFileIO, _UnknownAsyncBinaryIO +from .text import AsyncTextIOWrapper + +_Opener: TypeAlias = Callable[[str, int], int] + +# Text mode: always returns AsyncTextIOWrapper +@overload +def open( + file: FileDescriptorOrPath, + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + closefd: bool = ..., + opener: _Opener | None = ..., + *, + loop: AbstractEventLoop | None = ..., + executor: Incomplete | None = ..., +) -> AiofilesContextManager[None, None, AsyncTextIOWrapper]: ... + +# Unbuffered binary: returns a FileIO +@overload +def open( + file: FileDescriptorOrPath, + mode: OpenBinaryMode, + buffering: Literal[0], + encoding: None = ..., + errors: None = ..., + newline: None = ..., + closefd: bool = ..., + opener: _Opener | None = ..., + *, + loop: AbstractEventLoop | None = ..., + executor: Incomplete | None = ..., +) -> AiofilesContextManager[None, None, AsyncFileIO]: ... + +# Buffered binary reading/updating: AsyncBufferedReader +@overload +def open( + file: FileDescriptorOrPath, + mode: OpenBinaryModeReading | OpenBinaryModeUpdating, + buffering: Literal[-1, 1] = ..., + encoding: None = ..., + errors: None = ..., + newline: None = ..., + closefd: bool = ..., + opener: _Opener | None = ..., + *, + loop: AbstractEventLoop | None = ..., + executor: Incomplete | None = ..., +) -> AiofilesContextManager[None, None, AsyncBufferedReader]: ... + +# Buffered binary writing: AsyncBufferedIOBase +@overload +def open( + file: FileDescriptorOrPath, + mode: OpenBinaryModeWriting, + buffering: Literal[-1, 1] = ..., + encoding: None = ..., + errors: None = ..., + newline: None = ..., + closefd: bool = ..., + opener: _Opener | None = ..., + *, + loop: AbstractEventLoop | None = ..., + executor: Incomplete | None = ..., +) -> AiofilesContextManager[None, None, AsyncBufferedIOBase]: ... + +# Buffering cannot be determined: fall back to _UnknownAsyncBinaryIO +@overload +def open( + file: FileDescriptorOrPath, + mode: OpenBinaryMode, + buffering: int = ..., + encoding: None = ..., + errors: None = ..., + newline: None = ..., + closefd: bool = ..., + opener: _Opener | None = ..., + *, + loop: AbstractEventLoop | None = ..., + executor: Incomplete | None = ..., +) -> AiofilesContextManager[None, None, _UnknownAsyncBinaryIO]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/threadpool/binary.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/threadpool/binary.pyi new file mode 100644 index 00000000..c6421325 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/threadpool/binary.pyi @@ -0,0 +1,41 @@ +from _typeshed import FileDescriptorOrPath, ReadableBuffer, WriteableBuffer +from collections.abc import Iterable +from io import FileIO + +from ..base import AsyncBase + +class _UnknownAsyncBinaryIO(AsyncBase[bytes]): + async def close(self) -> None: ... + async def flush(self) -> None: ... + async def isatty(self) -> bool: ... + async def read(self, __size: int = ...) -> bytes: ... + async def readinto(self, __buffer: WriteableBuffer) -> int | None: ... + async def readline(self, __size: int | None = ...) -> bytes: ... + async def readlines(self, __hint: int = ...) -> list[bytes]: ... + async def seek(self, __offset: int, __whence: int = ...) -> int: ... + async def seekable(self) -> bool: ... + async def tell(self) -> int: ... + async def truncate(self, __size: int | None = ...) -> int: ... + async def writable(self) -> bool: ... + async def write(self, __b: ReadableBuffer) -> int: ... + async def writelines(self, __lines: Iterable[ReadableBuffer]) -> None: ... + def fileno(self) -> int: ... + def readable(self) -> bool: ... + @property + def closed(self) -> bool: ... + @property + def mode(self) -> str: ... + @property + def name(self) -> FileDescriptorOrPath: ... + +class AsyncBufferedIOBase(_UnknownAsyncBinaryIO): + async def read1(self, __size: int = ...) -> bytes: ... + def detach(self) -> FileIO: ... + @property + def raw(self) -> FileIO: ... + +class AsyncBufferedReader(AsyncBufferedIOBase): + async def peek(self, __size: int = ...) -> bytes: ... + +class AsyncFileIO(_UnknownAsyncBinaryIO): + async def readall(self) -> bytes: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/threadpool/text.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/threadpool/text.pyi new file mode 100644 index 00000000..b02cb83f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/threadpool/text.pyi @@ -0,0 +1,39 @@ +from _typeshed import FileDescriptorOrPath +from collections.abc import Iterable +from typing import BinaryIO + +from ..base import AsyncBase + +class AsyncTextIOWrapper(AsyncBase[str]): + async def close(self) -> None: ... + async def flush(self) -> None: ... + async def isatty(self) -> bool: ... + async def read(self, __size: int | None = ...) -> str: ... + async def readline(self, __size: int = ...) -> str: ... + async def readlines(self, __hint: int = ...) -> list[str]: ... + async def seek(self, __offset: int, __whence: int = ...) -> int: ... + async def seekable(self) -> bool: ... + async def tell(self) -> int: ... + async def truncate(self, __size: int | None = ...) -> int: ... + async def writable(self) -> bool: ... + async def write(self, __b: str) -> int: ... + async def writelines(self, __lines: Iterable[str]) -> None: ... + def detach(self) -> BinaryIO: ... + def fileno(self) -> int: ... + def readable(self) -> bool: ... + @property + def buffer(self) -> BinaryIO: ... + @property + def closed(self) -> bool: ... + @property + def encoding(self) -> str: ... + @property + def errors(self) -> str | None: ... + @property + def line_buffering(self) -> bool: ... + @property + def newlines(self) -> str | tuple[str, ...] | None: ... + @property + def name(self) -> FileDescriptorOrPath: ... + @property + def mode(self) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/threadpool/utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/threadpool/utils.pyi new file mode 100644 index 00000000..afff76d0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aiofiles/aiofiles/threadpool/utils.pyi @@ -0,0 +1,11 @@ +from collections.abc import Callable +from types import coroutine as coroutine +from typing import TypeVar + +_T = TypeVar("_T", bound=type) + +# All these function actually mutate the given type: +def delegate_to_executor(*attrs: str) -> Callable[[_T], _T]: ... +def proxy_method_directly(*attrs: str) -> Callable[[_T], _T]: ... +def proxy_property_directly(*attrs: str) -> Callable[[_T], _T]: ... +def cond_delegate_to_executor(*attrs: str) -> Callable[[_T], _T]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/annoy/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/annoy/METADATA.toml new file mode 100644 index 00000000..74b2b8d5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/annoy/METADATA.toml @@ -0,0 +1,2 @@ +version = "1.17.*" +requires = [] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/annoy/annoy/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/annoy/annoy/__init__.pyi new file mode 100644 index 00000000..8a1ced3e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/annoy/annoy/__init__.pyi @@ -0,0 +1,5 @@ +from typing_extensions import TypeAlias + +from .annoylib import Annoy + +AnnoyIndex: TypeAlias = Annoy diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/annoy/annoy/annoylib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/annoy/annoy/annoylib.pyi new file mode 100644 index 00000000..cae6dd24 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/annoy/annoy/annoylib.pyi @@ -0,0 +1,44 @@ +from _typeshed import SupportsLenAndGetItem +from typing import overload +from typing_extensions import Literal, TypeAlias + +_Vector: TypeAlias = SupportsLenAndGetItem[float] + +class Annoy: + f: int + def __init__(self, f: int, metric: Literal["angular", "euclidean", "manhattan", "hamming", "dot"]) -> None: ... + def load(self, fn: str, prefault: bool = ...) -> Literal[True]: ... + def save(self, fn: str, prefault: bool = ...) -> Literal[True]: ... + @overload + def get_nns_by_item(self, i: int, n: int, search_k: int = ..., include_distances: Literal[False] = ...) -> list[int]: ... + @overload + def get_nns_by_item( + self, i: int, n: int, search_k: int, include_distances: Literal[True] + ) -> tuple[list[int], list[float]]: ... + @overload + def get_nns_by_item( + self, i: int, n: int, search_k: int = ..., *, include_distances: Literal[True] + ) -> tuple[list[int], list[float]]: ... + @overload + def get_nns_by_vector( + self, vector: _Vector, n: int, search_k: int = ..., include_distances: Literal[False] = ... + ) -> list[int]: ... + @overload + def get_nns_by_vector( + self, vector: _Vector, n: int, search_k: int, include_distances: Literal[True] + ) -> tuple[list[int], list[float]]: ... + @overload + def get_nns_by_vector( + self, vector: _Vector, n: int, search_k: int = ..., *, include_distances: Literal[True] + ) -> tuple[list[int], list[float]]: ... + def get_item_vector(self, __i: int) -> list[float]: ... + def add_item(self, i: int, vector: _Vector) -> None: ... + def on_disk_build(self, fn: str) -> Literal[True]: ... + def build(self, n_trees: int, n_jobs: int = ...) -> Literal[True]: ... + def unbuild(self) -> Literal[True]: ... + def unload(self) -> Literal[True]: ... + def get_distance(self, __i: int, __j: int) -> float: ... + def get_n_items(self) -> int: ... + def get_n_trees(self) -> int: ... + def verbose(self, __v: bool) -> Literal[True]: ... + def set_seed(self, __s: int) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/appdirs/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/appdirs/METADATA.toml new file mode 100644 index 00000000..6d3b9223 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/appdirs/METADATA.toml @@ -0,0 +1 @@ +version = "1.4.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/appdirs/appdirs.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/appdirs/appdirs.pyi new file mode 100644 index 00000000..e31d66c2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/appdirs/appdirs.pyi @@ -0,0 +1,57 @@ +from typing_extensions import Literal + +__version_info__: tuple[int, int, int] +PY3: Literal[True] +unicode = str +system: str + +def user_data_dir( + appname: str | None = ..., appauthor: str | None = ..., version: str | None = ..., roaming: bool = ... +) -> str: ... +def site_data_dir( + appname: str | None = ..., appauthor: str | None = ..., version: str | None = ..., multipath: bool = ... +) -> str: ... +def user_config_dir( + appname: str | None = ..., appauthor: str | None = ..., version: str | None = ..., roaming: bool = ... +) -> str: ... +def site_config_dir( + appname: str | None = ..., appauthor: str | None = ..., version: str | None = ..., multipath: bool = ... +) -> str: ... +def user_cache_dir( + appname: str | None = ..., appauthor: str | None = ..., version: str | None = ..., opinion: bool = ... +) -> str: ... +def user_state_dir( + appname: str | None = ..., appauthor: str | None = ..., version: str | None = ..., roaming: bool = ... +) -> str: ... +def user_log_dir( + appname: str | None = ..., appauthor: str | None = ..., version: str | None = ..., opinion: bool = ... +) -> str: ... + +class AppDirs: + appname: str + appauthor: str + version: str + roaming: bool + multipath: bool + def __init__( + self, + appname: str | None = ..., + appauthor: str | None = ..., + version: str | None = ..., + roaming: bool = ..., + multipath: bool = ..., + ) -> None: ... + @property + def user_data_dir(self) -> str: ... + @property + def site_data_dir(self) -> str: ... + @property + def user_config_dir(self) -> str: ... + @property + def site_config_dir(self) -> str: ... + @property + def user_cache_dir(self) -> str: ... + @property + def user_state_dir(self) -> str: ... + @property + def user_log_dir(self) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..c0f8fbaf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/@tests/stubtest_allowlist.txt @@ -0,0 +1,4 @@ +aws_xray_sdk.core.async_recorder.subsegment_decorator +aws_xray_sdk.core.models.subsegment.subsegment_decorator +aws_xray_sdk.core.sampling.connector.ServiceConnector.fetch_sampling_rules +aws_xray_sdk.core.sampling.sampler.ServiceConnector.fetch_sampling_rules diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/METADATA.toml new file mode 100644 index 00000000..d4db0c1a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/METADATA.toml @@ -0,0 +1,4 @@ +version = "2.11.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/__init__.pyi new file mode 100644 index 00000000..0db89337 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/__init__.pyi @@ -0,0 +1,3 @@ +from .sdk_config import SDKConfig + +global_sdk_config: SDKConfig diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/__init__.pyi new file mode 100644 index 00000000..bfaa6ea7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/__init__.pyi @@ -0,0 +1,4 @@ +from .patcher import patch as patch, patch_all as patch_all +from .recorder import AWSXRayRecorder as AWSXRayRecorder + +xray_recorder: AWSXRayRecorder diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/async_context.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/async_context.pyi new file mode 100644 index 00000000..36c2d904 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/async_context.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +from .context import Context as _Context + +class AsyncContext(_Context): + def __init__(self, *args, loop: Incomplete | None = ..., use_task_factory: bool = ..., **kwargs) -> None: ... + def clear_trace_entities(self) -> None: ... + +class TaskLocalStorage: + def __init__(self, loop: Incomplete | None = ...) -> None: ... + def __setattr__(self, name: str, value) -> None: ... + def __getattribute__(self, item: str): ... + def clear(self) -> None: ... + +def task_factory(loop, coro): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/async_recorder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/async_recorder.pyi new file mode 100644 index 00000000..e059dde5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/async_recorder.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +from .models.segment import SegmentContextManager as SegmentContextManager +from .models.subsegment import ( + SubsegmentContextManager as SubsegmentContextManager, + is_already_recording as is_already_recording, + subsegment_decorator as subsegment_decorator, +) +from .recorder import AWSXRayRecorder as AWSXRayRecorder +from .utils import stacktrace as stacktrace + +class AsyncSegmentContextManager(SegmentContextManager): + async def __aenter__(self): ... + async def __aexit__(self, exc_type, exc_val, exc_tb): ... + +class AsyncSubsegmentContextManager(SubsegmentContextManager): + async def __call__(self, wrapped, instance, args, kwargs): ... + async def __aenter__(self): ... + async def __aexit__(self, exc_type, exc_val, exc_tb): ... + +class AsyncAWSXRayRecorder(AWSXRayRecorder): + def capture_async(self, name: Incomplete | None = ...): ... + def in_segment_async(self, name: Incomplete | None = ..., **segment_kwargs): ... + def in_subsegment_async(self, name: Incomplete | None = ..., **subsegment_kwargs): ... + async def record_subsegment_async(self, wrapped, instance, args, kwargs, name, namespace, meta_processor): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/context.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/context.pyi new file mode 100644 index 00000000..f495ac64 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/context.pyi @@ -0,0 +1,30 @@ +import time +from logging import Logger +from typing import Any + +from .. import global_sdk_config as global_sdk_config +from .exceptions.exceptions import SegmentNotFoundException as SegmentNotFoundException +from .models.dummy_entities import DummySegment as DummySegment +from .models.entity import Entity +from .models.segment import Segment +from .models.subsegment import Subsegment + +log: Logger +SUPPORTED_CONTEXT_MISSING: Any +MISSING_SEGMENT_MSG: str +CXT_MISSING_STRATEGY_KEY: str + +class Context: + def __init__(self, context_missing: str = ...) -> None: ... + def put_segment(self, segment: Segment) -> None: ... + def end_segment(self, end_time: time.struct_time | None = ...) -> None: ... + def put_subsegment(self, subsegment: Subsegment) -> None: ... + def end_subsegment(self, end_time: time.struct_time | None = ...): ... + def get_trace_entity(self): ... + def set_trace_entity(self, trace_entity: Entity) -> None: ... + def clear_trace_entities(self) -> None: ... + def handle_context_missing(self) -> None: ... + @property + def context_missing(self): ... + @context_missing.setter + def context_missing(self, value: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/daemon_config.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/daemon_config.pyi new file mode 100644 index 00000000..2cbec2ee --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/daemon_config.pyi @@ -0,0 +1,15 @@ +from .exceptions.exceptions import InvalidDaemonAddressException as InvalidDaemonAddressException + +DAEMON_ADDRESS_KEY: str +DEFAULT_ADDRESS: str + +class DaemonConfig: + def __init__(self, daemon_address=...) -> None: ... + @property + def udp_ip(self): ... + @property + def udp_port(self): ... + @property + def tcp_ip(self): ... + @property + def tcp_port(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/emitters/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/emitters/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/emitters/udp_emitter.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/emitters/udp_emitter.pyi new file mode 100644 index 00000000..4d5bfb3f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/emitters/udp_emitter.pyi @@ -0,0 +1,19 @@ +from logging import Logger + +from aws_xray_sdk.core.daemon_config import DaemonConfig as DaemonConfig + +from ..exceptions.exceptions import InvalidDaemonAddressException as InvalidDaemonAddressException + +log: Logger +PROTOCOL_HEADER: str +PROTOCOL_DELIMITER: str +DEFAULT_DAEMON_ADDRESS: str + +class UDPEmitter: + def __init__(self, daemon_address=...) -> None: ... + def send_entity(self, entity) -> None: ... + def set_daemon_address(self, address) -> None: ... + @property + def ip(self): ... + @property + def port(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/exceptions/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/exceptions/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/exceptions/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/exceptions/exceptions.pyi new file mode 100644 index 00000000..82d208b0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/exceptions/exceptions.pyi @@ -0,0 +1,8 @@ +class InvalidSamplingManifestError(Exception): ... +class SegmentNotFoundException(Exception): ... +class InvalidDaemonAddressException(Exception): ... +class SegmentNameMissingException(Exception): ... +class SubsegmentNameMissingException(Exception): ... +class FacadeSegmentMutationException(Exception): ... +class MissingPluginNames(Exception): ... +class AlreadyEndedException(Exception): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/lambda_launcher.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/lambda_launcher.pyi new file mode 100644 index 00000000..0c254da6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/lambda_launcher.pyi @@ -0,0 +1,28 @@ +from _typeshed import Incomplete +from logging import Logger + +from aws_xray_sdk import global_sdk_config as global_sdk_config + +from .context import Context as Context +from .models.facade_segment import FacadeSegment as FacadeSegment +from .models.trace_header import TraceHeader as TraceHeader + +log: Logger +LAMBDA_TRACE_HEADER_KEY: str +LAMBDA_TASK_ROOT_KEY: str +TOUCH_FILE_DIR: str +TOUCH_FILE_PATH: str + +def check_in_lambda(): ... + +class LambdaContext(Context): + def __init__(self) -> None: ... + def put_segment(self, segment) -> None: ... + def end_segment(self, end_time: Incomplete | None = ...) -> None: ... + def put_subsegment(self, subsegment) -> None: ... + def get_trace_entity(self): ... + @property + def context_missing(self) -> None: ... + @context_missing.setter + def context_missing(self, value) -> None: ... + def handle_context_missing(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/default_dynamic_naming.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/default_dynamic_naming.pyi new file mode 100644 index 00000000..8103c32a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/default_dynamic_naming.pyi @@ -0,0 +1,5 @@ +from ..utils.search_pattern import wildcard_match as wildcard_match + +class DefaultDynamicNaming: + def __init__(self, pattern, fallback) -> None: ... + def get_name(self, host_name): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/dummy_entities.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/dummy_entities.pyi new file mode 100644 index 00000000..018d9f3d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/dummy_entities.pyi @@ -0,0 +1,29 @@ +from .noop_traceid import NoOpTraceId as NoOpTraceId +from .segment import Segment as Segment +from .subsegment import Subsegment as Subsegment +from .traceid import TraceId as TraceId + +class DummySegment(Segment): + sampled: bool + def __init__(self, name: str = ...) -> None: ... + def set_aws(self, aws_meta) -> None: ... + def put_http_meta(self, key, value) -> None: ... + def put_annotation(self, key, value) -> None: ... + def put_metadata(self, key, value, namespace: str = ...) -> None: ... + def set_user(self, user) -> None: ... + def set_service(self, service_info) -> None: ... + def apply_status_code(self, status_code) -> None: ... + def add_exception(self, exception, stack, remote: bool = ...) -> None: ... + def serialize(self) -> None: ... + +class DummySubsegment(Subsegment): + sampled: bool + def __init__(self, segment, name: str = ...) -> None: ... + def set_aws(self, aws_meta) -> None: ... + def put_http_meta(self, key, value) -> None: ... + def put_annotation(self, key, value) -> None: ... + def put_metadata(self, key, value, namespace: str = ...) -> None: ... + def set_sql(self, sql) -> None: ... + def apply_status_code(self, status_code) -> None: ... + def add_exception(self, exception, stack, remote: bool = ...) -> None: ... + def serialize(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/entity.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/entity.pyi new file mode 100644 index 00000000..8bba421c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/entity.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete +from logging import Logger +from traceback import StackSummary +from typing import Any + +from ..exceptions.exceptions import AlreadyEndedException as AlreadyEndedException +from ..utils.compat import annotation_value_types as annotation_value_types, string_types as string_types +from ..utils.conversion import metadata_to_dict as metadata_to_dict +from . import http as http +from .throwable import Throwable as Throwable + +log: Logger +ORIGIN_TRACE_HEADER_ATTR_KEY: str + +class Entity: + id: Any + name: Any + start_time: Any + parent_id: Any + sampled: bool + in_progress: bool + http: Any + annotations: Any + metadata: Any + aws: Any + cause: Any + subsegments: Any + end_time: Any + def __init__(self, name, entity_id: Incomplete | None = ...) -> None: ... + def close(self, end_time: Incomplete | None = ...) -> None: ... + def add_subsegment(self, subsegment) -> None: ... + def remove_subsegment(self, subsegment) -> None: ... + def put_http_meta(self, key, value) -> None: ... + def put_annotation(self, key, value) -> None: ... + def put_metadata(self, key, value, namespace: str = ...) -> None: ... + def set_aws(self, aws_meta) -> None: ... + throttle: bool + def add_throttle_flag(self) -> None: ... + fault: bool + def add_fault_flag(self) -> None: ... + error: bool + def add_error_flag(self) -> None: ... + def apply_status_code(self, status_code) -> None: ... + def add_exception(self, exception: Exception, stack: StackSummary, remote: bool = ...) -> None: ... + def save_origin_trace_header(self, trace_header) -> None: ... + def get_origin_trace_header(self): ... + def serialize(self): ... + def to_dict(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/facade_segment.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/facade_segment.pyi new file mode 100644 index 00000000..e00b7edd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/facade_segment.pyi @@ -0,0 +1,26 @@ +from _typeshed import Incomplete +from typing import Any + +from ..exceptions.exceptions import FacadeSegmentMutationException as FacadeSegmentMutationException +from .segment import Segment as Segment + +MUTATION_UNSUPPORTED_MESSAGE: str + +class FacadeSegment(Segment): + initializing: Any + def __init__(self, name, entityid, traceid, sampled) -> None: ... + def close(self, end_time: Incomplete | None = ...) -> None: ... + def put_http_meta(self, key, value) -> None: ... + def put_annotation(self, key, value) -> None: ... + def put_metadata(self, key, value, namespace: str = ...) -> None: ... + def set_aws(self, aws_meta) -> None: ... + def set_user(self, user) -> None: ... + def add_throttle_flag(self) -> None: ... + def add_fault_flag(self) -> None: ... + def add_error_flag(self) -> None: ... + def add_exception(self, exception, stack, remote: bool = ...) -> None: ... + def apply_status_code(self, status_code) -> None: ... + def serialize(self) -> None: ... + def ready_to_send(self): ... + def increment(self) -> None: ... + def decrement_ref_counter(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/http.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/http.pyi new file mode 100644 index 00000000..c42504de --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/http.pyi @@ -0,0 +1,13 @@ +from typing import Any + +URL: str +METHOD: str +USER_AGENT: str +CLIENT_IP: str +X_FORWARDED_FOR: str +STATUS: str +CONTENT_LENGTH: str +XRAY_HEADER: str +ALT_XRAY_HEADER: str +request_keys: Any +response_keys: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/noop_traceid.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/noop_traceid.pyi new file mode 100644 index 00000000..0185c6b0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/noop_traceid.pyi @@ -0,0 +1,6 @@ +class NoOpTraceId: + VERSION: str + DELIMITER: str + start_time: str + def __init__(self) -> None: ... + def to_id(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/segment.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/segment.pyi new file mode 100644 index 00000000..50c3a859 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/segment.pyi @@ -0,0 +1,43 @@ +from typing import Any + +from ..exceptions.exceptions import SegmentNameMissingException as SegmentNameMissingException +from ..recorder import AWSXRayRecorder +from ..utils.atomic_counter import AtomicCounter as AtomicCounter +from .entity import Entity as Entity +from .subsegment import Subsegment +from .traceid import TraceId as TraceId + +ORIGIN_TRACE_HEADER_ATTR_KEY: str + +class SegmentContextManager: + name: str + segment_kwargs: dict[str, Any] + recorder: AWSXRayRecorder + segment: Segment + def __init__(self, recorder: AWSXRayRecorder, name: str | None = ..., **segment_kwargs) -> None: ... + def __enter__(self): ... + def __exit__(self, exc_type, exc_val, exc_tb) -> None: ... + +class Segment(Entity): + trace_id: str | None + id: str | None + in_progress: bool + sampled: bool + user: str | None + ref_counter: AtomicCounter + parent_id: str | None + service: dict[str, str] + def __init__( + self, name, entityid: str | None = ..., traceid: str | None = ..., parent_id: str | None = ..., sampled: bool = ... + ) -> None: ... + def add_subsegment(self, subsegment: Subsegment) -> None: ... + def increment(self) -> None: ... + def decrement_ref_counter(self) -> None: ... + def ready_to_send(self): ... + def get_total_subsegments_size(self): ... + def decrement_subsegments_size(self): ... + def remove_subsegment(self, subsegment) -> None: ... + def set_user(self, user) -> None: ... + def set_service(self, service_info) -> None: ... + def set_rule_name(self, rule_name) -> None: ... + def to_dict(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/subsegment.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/subsegment.pyi new file mode 100644 index 00000000..a7851f39 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/subsegment.pyi @@ -0,0 +1,37 @@ +import time +from _typeshed import Incomplete +from typing import Any + +from ...core import AWSXRayRecorder +from ..exceptions.exceptions import SegmentNotFoundException as SegmentNotFoundException +from .entity import Entity as Entity +from .segment import Segment + +SUBSEGMENT_RECORDING_ATTRIBUTE: str + +def set_as_recording(decorated_func, wrapped) -> None: ... +def is_already_recording(func): ... +def subsegment_decorator(wrapped, instance, args, kwargs): ... + +class SubsegmentContextManager: + name: str | None + subsegment_kwargs: dict[str, Any] | None + recorder: AWSXRayRecorder + subsegment: Subsegment + def __init__(self, recorder: AWSXRayRecorder, name: Incomplete | None = ..., **subsegment_kwargs) -> None: ... + def __call__(self, wrapped, instance, args: list[Any], kwargs: dict[str, Any]): ... + def __enter__(self) -> Subsegment: ... + def __exit__(self, exc_type, exc_val, exc_tb) -> None: ... + +class Subsegment(Entity): + parent_segment: Segment + trace_id: str + type: str + namespace: str + sql: dict[str, Any] + def __init__(self, name: str, namespace: str, segment: Segment) -> None: ... + def add_subsegment(self, subsegment: Subsegment) -> None: ... + def remove_subsegment(self, subsegment: Subsegment) -> None: ... + def close(self, end_time: time.struct_time | None = ...) -> None: ... + def set_sql(self, sql: dict[str, Any]) -> None: ... + def to_dict(self) -> dict[str, Any]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/throwable.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/throwable.pyi new file mode 100644 index 00000000..bc26c4c2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/throwable.pyi @@ -0,0 +1,14 @@ +from typing import Any + +from ..utils.compat import string_types as string_types + +log: Any + +class Throwable: + id: Any + message: Any + type: Any + remote: Any + stack: Any + def __init__(self, exception, stack, remote: bool = ...) -> None: ... + def to_dict(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/trace_header.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/trace_header.pyi new file mode 100644 index 00000000..cb34fbaa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/trace_header.pyi @@ -0,0 +1,25 @@ +from typing import Any +from typing_extensions import Self + +log: Any +ROOT: str +PARENT: str +SAMPLE: str +SELF: str +HEADER_DELIMITER: str + +class TraceHeader: + def __init__( + self, root: str | None = ..., parent: str | None = ..., sampled: bool | None = ..., data: dict[str, Any] | None = ... + ) -> None: ... + @classmethod + def from_header_str(cls, header) -> Self: ... + def to_header_str(self): ... + @property + def root(self): ... + @property + def parent(self): ... + @property + def sampled(self): ... + @property + def data(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/traceid.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/traceid.pyi new file mode 100644 index 00000000..6c00ccd4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/models/traceid.pyi @@ -0,0 +1,8 @@ +from typing import Any + +class TraceId: + VERSION: str + DELIMITER: str + start_time: Any + def __init__(self) -> None: ... + def to_id(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/patcher.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/patcher.pyi new file mode 100644 index 00000000..e0c09577 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/patcher.pyi @@ -0,0 +1,14 @@ +from collections.abc import Iterable +from logging import Logger +from typing import Any + +from aws_xray_sdk import global_sdk_config as global_sdk_config + +from .utils.compat import PY2 as PY2, is_classmethod as is_classmethod, is_instance_method as is_instance_method + +log: Logger +SUPPORTED_MODULES: Any +NO_DOUBLE_PATCH: Any + +def patch_all(double_patch: bool = ...) -> None: ... +def patch(modules_to_patch: Iterable[str], raise_errors: bool = ..., ignore_module_patterns: str | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/ec2_plugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/ec2_plugin.pyi new file mode 100644 index 00000000..801eff41 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/ec2_plugin.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete +from typing import Any + +log: Any +SERVICE_NAME: str +ORIGIN: str +IMDS_URL: str + +def initialize() -> None: ... +def get_token(): ... +def get_metadata(token: Incomplete | None = ...): ... +def parse_metadata_json(json_str): ... +def do_request(url, headers: Incomplete | None = ..., method: str = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/ecs_plugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/ecs_plugin.pyi new file mode 100644 index 00000000..56a4a90b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/ecs_plugin.pyi @@ -0,0 +1,7 @@ +from typing import Any + +log: Any +SERVICE_NAME: str +ORIGIN: str + +def initialize() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/elasticbeanstalk_plugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/elasticbeanstalk_plugin.pyi new file mode 100644 index 00000000..8fd94a5e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/elasticbeanstalk_plugin.pyi @@ -0,0 +1,8 @@ +from typing import Any + +log: Any +CONF_PATH: str +SERVICE_NAME: str +ORIGIN: str + +def initialize() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/utils.pyi new file mode 100644 index 00000000..9d521e65 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/plugins/utils.pyi @@ -0,0 +1,8 @@ +from typing import Any + +from ..exceptions.exceptions import MissingPluginNames as MissingPluginNames + +module_prefix: str +PLUGIN_MAPPING: Any + +def get_plugin_modules(plugins): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/recorder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/recorder.pyi new file mode 100644 index 00000000..7cfbf5b1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/recorder.pyi @@ -0,0 +1,125 @@ +import time +from collections.abc import Callable, Iterable +from logging import Logger +from typing import Any + +from aws_xray_sdk import global_sdk_config as global_sdk_config +from aws_xray_sdk.version import VERSION as VERSION + +from .context import Context as Context +from .daemon_config import DaemonConfig as DaemonConfig +from .emitters.udp_emitter import UDPEmitter as UDPEmitter +from .exceptions.exceptions import ( + SegmentNameMissingException as SegmentNameMissingException, + SegmentNotFoundException as SegmentNotFoundException, +) +from .lambda_launcher import check_in_lambda as check_in_lambda +from .models.default_dynamic_naming import DefaultDynamicNaming as DefaultDynamicNaming +from .models.dummy_entities import DummySegment as DummySegment, DummySubsegment as DummySubsegment +from .models.segment import Segment as Segment, SegmentContextManager as SegmentContextManager +from .models.subsegment import Subsegment as Subsegment, SubsegmentContextManager as SubsegmentContextManager +from .plugins.utils import get_plugin_modules as get_plugin_modules +from .sampling.local.sampler import LocalSampler +from .sampling.sampler import DefaultSampler +from .streaming.default_streaming import DefaultStreaming as DefaultStreaming +from .utils import stacktrace as stacktrace +from .utils.compat import string_types as string_types + +log: Logger +TRACING_NAME_KEY: str +DAEMON_ADDR_KEY: str +CONTEXT_MISSING_KEY: str +XRAY_META: Any +SERVICE_INFO: Any + +class AWSXRayRecorder: + def __init__(self) -> None: ... + def configure( + self, + sampling: bool | None = ..., + plugins: Iterable[str] | None = ..., + context_missing: str | None = ..., + sampling_rules: dict[str, Any] | str | None = ..., + daemon_address: str | None = ..., + service: str | None = ..., + context: Context | None = ..., + emitter: UDPEmitter | None = ..., + streaming: DefaultStreaming | None = ..., + dynamic_naming: DefaultDynamicNaming | None = ..., + streaming_threshold: int | None = ..., + max_trace_back: int | None = ..., + sampler: LocalSampler | DefaultSampler | None = ..., + stream_sql: bool | None = ..., + ) -> None: ... + def in_segment(self, name: str | None = ..., **segment_kwargs) -> SegmentContextManager: ... + def in_subsegment(self, name: str | None = ..., **subsegment_kwargs) -> SubsegmentContextManager: ... + def begin_segment( + self, name: str | None = ..., traceid: str | None = ..., parent_id: str | None = ..., sampling: bool | None = ... + ) -> Segment | DummySegment: ... + def end_segment(self, end_time: time.struct_time | None = ...) -> None: ... + def current_segment(self) -> Segment: ... + def begin_subsegment(self, name: str, namespace: str = ...) -> DummySubsegment | Subsegment | None: ... + def current_subsegment(self) -> Subsegment | DummySubsegment | None: ... + def end_subsegment(self, end_time: time.struct_time | None = ...) -> None: ... + def put_annotation(self, key: str, value: Any) -> None: ... + def put_metadata(self, key: str, value: Any, namespace: str = ...) -> None: ... + def is_sampled(self) -> bool: ... + def get_trace_entity(self) -> Segment | Subsegment | DummySegment | DummySubsegment: ... + def set_trace_entity(self, trace_entity: Segment | Subsegment | DummySegment | DummySubsegment) -> None: ... + def clear_trace_entities(self) -> None: ... + def stream_subsegments(self) -> None: ... + def capture(self, name: str | None = ...) -> SubsegmentContextManager: ... + def record_subsegment( + self, + wrapped: Callable[..., Any], + instance: Any, + args: list[Any], + kwargs: dict[str, Any], + name: str, + namespace: str, + meta_processor: Callable[..., object], + ) -> Any: ... + @property + def enabled(self) -> bool: ... + @enabled.setter + def enabled(self, value: bool) -> None: ... + @property + def sampling(self) -> bool: ... + @sampling.setter + def sampling(self, value: bool) -> None: ... + @property + def sampler(self) -> LocalSampler | DefaultSampler: ... + @sampler.setter + def sampler(self, value: LocalSampler | DefaultSampler) -> None: ... + @property + def service(self) -> str: ... + @service.setter + def service(self, value: str) -> None: ... + @property + def dynamic_naming(self) -> Any | DefaultDynamicNaming: ... + @dynamic_naming.setter + def dynamic_naming(self, value: Any | DefaultDynamicNaming) -> None: ... + @property + def context(self) -> Context: ... + @context.setter + def context(self, cxt: Context) -> None: ... + @property + def emitter(self) -> UDPEmitter: ... + @emitter.setter + def emitter(self, value: UDPEmitter) -> None: ... + @property + def streaming(self) -> DefaultStreaming: ... + @streaming.setter + def streaming(self, value: DefaultStreaming) -> None: ... + @property + def streaming_threshold(self) -> int: ... + @streaming_threshold.setter + def streaming_threshold(self, value: int) -> None: ... + @property + def max_trace_back(self) -> int: ... + @max_trace_back.setter + def max_trace_back(self, value: int) -> None: ... + @property + def stream_sql(self) -> bool: ... + @stream_sql.setter + def stream_sql(self, value: bool) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/connector.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/connector.pyi new file mode 100644 index 00000000..8edea3c5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/connector.pyi @@ -0,0 +1,17 @@ +from datetime import datetime as datetime + +from aws_xray_sdk.core.context import Context as Context +from aws_xray_sdk.core.models.dummy_entities import DummySegment as DummySegment +from aws_xray_sdk.core.utils.compat import PY2 as PY2 + +from .sampling_rule import SamplingRule as SamplingRule + +class ServiceConnector: + def __init__(self) -> None: ... + def fetch_sampling_rules(self): ... + def fetch_sampling_target(self, rules): ... + def setup_xray_client(self, ip, port, client) -> None: ... + @property + def context(self): ... + @context.setter + def context(self, v) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/local/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/local/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/local/reservoir.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/local/reservoir.pyi new file mode 100644 index 00000000..aa5836b3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/local/reservoir.pyi @@ -0,0 +1,8 @@ +from typing import Any + +class Reservoir: + traces_per_sec: Any + used_this_sec: int + this_sec: Any + def __init__(self, traces_per_sec: int = ...) -> None: ... + def take(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/local/sampler.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/local/sampler.pyi new file mode 100644 index 00000000..76882c9f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/local/sampler.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete +from typing import Any + +from ...exceptions.exceptions import InvalidSamplingManifestError as InvalidSamplingManifestError +from .sampling_rule import SamplingRule as SamplingRule + +local_sampling_rule: Any +SUPPORTED_RULE_VERSION: Any + +class LocalSampler: + def __init__(self, rules=...) -> None: ... + def should_trace(self, sampling_req: Incomplete | None = ...): ... + def load_local_rules(self, rules) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/local/sampling_rule.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/local/sampling_rule.pyi new file mode 100644 index 00000000..ff8f23b7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/local/sampling_rule.pyi @@ -0,0 +1,28 @@ +from aws_xray_sdk.core.utils.search_pattern import wildcard_match as wildcard_match + +from ...exceptions.exceptions import InvalidSamplingManifestError as InvalidSamplingManifestError +from .reservoir import Reservoir as Reservoir + +class SamplingRule: + FIXED_TARGET: str + RATE: str + HOST: str + METHOD: str + PATH: str + SERVICE_NAME: str + def __init__(self, rule_dict, version: int = ..., default: bool = ...) -> None: ... + def applies(self, host, method, path): ... + @property + def fixed_target(self): ... + @property + def rate(self): ... + @property + def host(self): ... + @property + def method(self): ... + @property + def path(self): ... + @property + def reservoir(self): ... + @property + def version(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/reservoir.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/reservoir.pyi new file mode 100644 index 00000000..322d1d38 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/reservoir.pyi @@ -0,0 +1,15 @@ +from enum import Enum + +class Reservoir: + def __init__(self) -> None: ... + def borrow_or_take(self, now, can_borrow): ... + def load_quota(self, quota, TTL, interval) -> None: ... + @property + def quota(self): ... + @property + def TTL(self): ... + +class ReservoirDecision(Enum): + TAKE: str + BORROW: str + NO: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/rule_cache.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/rule_cache.pyi new file mode 100644 index 00000000..cccd9105 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/rule_cache.pyi @@ -0,0 +1,17 @@ +from typing import Any + +TTL: Any + +class RuleCache: + def __init__(self) -> None: ... + def get_matched_rule(self, sampling_req, now): ... + def load_rules(self, rules) -> None: ... + def load_targets(self, targets_dict) -> None: ... + @property + def rules(self): ... + @rules.setter + def rules(self, v) -> None: ... + @property + def last_updated(self): ... + @last_updated.setter + def last_updated(self, v) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/rule_poller.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/rule_poller.pyi new file mode 100644 index 00000000..dc4b3633 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/rule_poller.pyi @@ -0,0 +1,9 @@ +from typing import Any + +log: Any +DEFAULT_INTERVAL: Any + +class RulePoller: + def __init__(self, cache, connector) -> None: ... + def start(self) -> None: ... + def wake_up(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/sampler.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/sampler.pyi new file mode 100644 index 00000000..eb5143d5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/sampler.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete +from typing import Any + +from .connector import ServiceConnector as ServiceConnector +from .local.sampler import LocalSampler as LocalSampler +from .reservoir import ReservoirDecision as ReservoirDecision +from .rule_cache import RuleCache as RuleCache +from .rule_poller import RulePoller as RulePoller +from .target_poller import TargetPoller as TargetPoller + +log: Any + +class DefaultSampler: + def __init__(self) -> None: ... + def start(self) -> None: ... + def should_trace(self, sampling_req: Incomplete | None = ...): ... + def load_local_rules(self, rules) -> None: ... + def load_settings(self, daemon_config, context, origin: Incomplete | None = ...) -> None: ... + @property + def xray_client(self): ... + @xray_client.setter + def xray_client(self, v) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/sampling_rule.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/sampling_rule.pyi new file mode 100644 index 00000000..0893db19 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/sampling_rule.pyi @@ -0,0 +1,47 @@ +from _typeshed import Incomplete + +from ..utils.search_pattern import wildcard_match as wildcard_match +from .reservoir import Reservoir as Reservoir + +class SamplingRule: + def __init__( + self, + name, + priority, + rate, + reservoir_size, + host: Incomplete | None = ..., + method: Incomplete | None = ..., + path: Incomplete | None = ..., + service: Incomplete | None = ..., + service_type: Incomplete | None = ..., + ) -> None: ... + def match(self, sampling_req): ... + def is_default(self): ... + def snapshot_statistics(self): ... + def merge(self, rule) -> None: ... + def ever_matched(self): ... + def time_to_report(self): ... + def increment_request_count(self) -> None: ... + def increment_borrow_count(self) -> None: ... + def increment_sampled_count(self) -> None: ... + @property + def rate(self): ... + @rate.setter + def rate(self, v) -> None: ... + @property + def name(self): ... + @property + def priority(self): ... + @property + def reservoir(self): ... + @reservoir.setter + def reservoir(self, v) -> None: ... + @property + def can_borrow(self): ... + @property + def request_count(self): ... + @property + def borrow_count(self): ... + @property + def sampled_count(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/target_poller.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/target_poller.pyi new file mode 100644 index 00000000..4be740c6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/sampling/target_poller.pyi @@ -0,0 +1,7 @@ +from typing import Any + +log: Any + +class TargetPoller: + def __init__(self, cache, rule_poller, connector) -> None: ... + def start(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/streaming/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/streaming/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/streaming/default_streaming.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/streaming/default_streaming.pyi new file mode 100644 index 00000000..21309ebd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/streaming/default_streaming.pyi @@ -0,0 +1,8 @@ +class DefaultStreaming: + def __init__(self, streaming_threshold: int = ...) -> None: ... + def is_eligible(self, segment): ... + def stream(self, entity, callback) -> None: ... + @property + def streaming_threshold(self): ... + @streaming_threshold.setter + def streaming_threshold(self, value) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/utils/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/utils/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/utils/atomic_counter.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/utils/atomic_counter.pyi new file mode 100644 index 00000000..9cf8ebcd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/utils/atomic_counter.pyi @@ -0,0 +1,9 @@ +from typing import Any + +class AtomicCounter: + value: Any + def __init__(self, initial: int = ...) -> None: ... + def increment(self, num: int = ...): ... + def decrement(self, num: int = ...): ... + def get_current(self): ... + def reset(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/utils/compat.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/utils/compat.pyi new file mode 100644 index 00000000..32562d54 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/utils/compat.pyi @@ -0,0 +1,10 @@ +from typing import Any +from typing_extensions import Literal + +PY2: Literal[False] +PY35: Literal[True] +annotation_value_types: Any +string_types = str + +def is_classmethod(func): ... +def is_instance_method(parent_class, func_name, func): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/utils/conversion.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/utils/conversion.pyi new file mode 100644 index 00000000..f8c14141 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/utils/conversion.pyi @@ -0,0 +1,5 @@ +from typing import Any + +log: Any + +def metadata_to_dict(obj): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/utils/search_pattern.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/utils/search_pattern.pyi new file mode 100644 index 00000000..5206e243 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/utils/search_pattern.pyi @@ -0,0 +1 @@ +def wildcard_match(pattern, text, case_insensitive: bool = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/utils/stacktrace.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/utils/stacktrace.pyi new file mode 100644 index 00000000..05d8dc2c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/core/utils/stacktrace.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def get_stacktrace(limit: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/sdk_config.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/sdk_config.pyi new file mode 100644 index 00000000..c76b34d0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/sdk_config.pyi @@ -0,0 +1,11 @@ +from logging import Logger + +log: Logger + +class SDKConfig: + XRAY_ENABLED_KEY: str + DISABLED_ENTITY_NAME: str + @classmethod + def sdk_enabled(cls): ... + @classmethod + def set_sdk_enabled(cls, value) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/version.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/version.pyi new file mode 100644 index 00000000..3acee936 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/aws-xray-sdk/aws_xray_sdk/version.pyi @@ -0,0 +1 @@ +VERSION: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/METADATA.toml new file mode 100644 index 00000000..855c0866 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/METADATA.toml @@ -0,0 +1,5 @@ +version = "2.11.*" +requires = ["types-pytz"] + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/__init__.pyi new file mode 100644 index 00000000..c046a8c0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/__init__.pyi @@ -0,0 +1,8 @@ +from babel.core import ( + Locale as Locale, + UnknownLocaleError as UnknownLocaleError, + default_locale as default_locale, + get_locale_identifier as get_locale_identifier, + negotiate_locale as negotiate_locale, + parse_locale as parse_locale, +) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/core.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/core.pyi new file mode 100644 index 00000000..c969492c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/core.pyi @@ -0,0 +1,149 @@ +from collections.abc import Iterable, Mapping +from typing import Any, overload +from typing_extensions import Literal, TypeAlias + +from babel.localedata import LocaleDataDict +from babel.plural import PluralRule + +class UnknownLocaleError(Exception): + identifier: str + def __init__(self, identifier: str) -> None: ... + +class Locale: + language: str + territory: str | None + script: str | None + variant: str | None + def __init__( + self, language: str, territory: str | None = ..., script: str | None = ..., variant: str | None = ... + ) -> None: ... + @classmethod + def default(cls, category: str | None = ..., aliases: Mapping[str, str] = ...) -> Locale: ... + @classmethod + def negotiate( + cls, preferred: Iterable[str], available: Iterable[str], sep: str = ..., aliases: Mapping[str, str] = ... + ) -> Locale | None: ... + @overload + @classmethod + def parse(cls, identifier: None, sep: str = ..., resolve_likely_subtags: bool = ...) -> None: ... + @overload + @classmethod + def parse(cls, identifier: str | Locale, sep: str = ..., resolve_likely_subtags: bool = ...) -> Locale: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + def get_display_name(self, locale: Locale | str | None = ...) -> str | None: ... + @property + def display_name(self) -> str | None: ... + def get_language_name(self, locale: Locale | str | None = ...) -> str | None: ... + @property + def language_name(self) -> str | None: ... + def get_territory_name(self, locale: Locale | str | None = ...) -> str | None: ... + @property + def territory_name(self) -> str | None: ... + def get_script_name(self, locale: Locale | str | None = ...) -> str | None: ... + @property + def script_name(self) -> str | None: ... + @property + def english_name(self) -> str | None: ... + @property + def languages(self) -> LocaleDataDict: ... + @property + def scripts(self) -> LocaleDataDict: ... + @property + def territories(self) -> LocaleDataDict: ... + @property + def variants(self) -> LocaleDataDict: ... + @property + def currencies(self) -> LocaleDataDict: ... + @property + def currency_symbols(self) -> LocaleDataDict: ... + @property + def number_symbols(self) -> LocaleDataDict: ... + @property + def decimal_formats(self) -> LocaleDataDict: ... + @property + def compact_decimal_formats(self) -> LocaleDataDict: ... + @property + def currency_formats(self) -> LocaleDataDict: ... + @property + def percent_formats(self) -> LocaleDataDict: ... + @property + def scientific_formats(self) -> LocaleDataDict: ... + @property + def periods(self) -> LocaleDataDict: ... + @property + def day_periods(self) -> LocaleDataDict: ... + @property + def day_period_rules(self) -> LocaleDataDict: ... + @property + def days(self) -> LocaleDataDict: ... + @property + def months(self) -> LocaleDataDict: ... + @property + def quarters(self) -> LocaleDataDict: ... + @property + def eras(self) -> LocaleDataDict: ... + @property + def time_zones(self) -> LocaleDataDict: ... + @property + def meta_zones(self) -> LocaleDataDict: ... + @property + def zone_formats(self) -> LocaleDataDict: ... + @property + def first_week_day(self) -> int: ... + @property + def weekend_start(self) -> int: ... + @property + def weekend_end(self) -> int: ... + @property + def min_week_days(self) -> int: ... + @property + def date_formats(self) -> LocaleDataDict: ... + @property + def time_formats(self) -> LocaleDataDict: ... + @property + def datetime_formats(self) -> LocaleDataDict: ... + @property + def datetime_skeletons(self) -> LocaleDataDict: ... + @property + def interval_formats(self) -> LocaleDataDict: ... + @property + def plural_form(self) -> PluralRule: ... + @property + def list_patterns(self) -> LocaleDataDict: ... + @property + def ordinal_form(self) -> PluralRule: ... + @property + def measurement_systems(self) -> LocaleDataDict: ... + @property + def character_order(self) -> str: ... + @property + def text_direction(self) -> str: ... + @property + def unit_display_names(self) -> LocaleDataDict: ... + +def default_locale(category: str | None = ..., aliases: Mapping[str, str] = ...) -> str | None: ... +def negotiate_locale( + preferred: Iterable[str], available: Iterable[str], sep: str = ..., aliases: Mapping[str, str] = ... +) -> str | None: ... +def parse_locale(identifier: str, sep: str = ...) -> tuple[str, str | None, str | None, str | None]: ... +def get_locale_identifier(tup: tuple[str, str | None, str | None, str | None], sep: str = ...) -> str: ... +def get_global(key: _GLOBAL_KEY) -> Mapping[str, Any]: ... + +_GLOBAL_KEY: TypeAlias = Literal[ + "all_currencies", + "currency_fractions", + "language_aliases", + "likely_subtags", + "parent_exceptions", + "script_aliases", + "territory_aliases", + "territory_currencies", + "territory_languages", + "territory_zones", + "variant_aliases", + "windows_zone_mapping", + "zone_aliases", + "zone_territories", +] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/dates.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/dates.pyi new file mode 100644 index 00000000..36a5b41a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/dates.pyi @@ -0,0 +1,174 @@ +from collections.abc import Iterable +from datetime import date, datetime, time, timedelta, tzinfo +from typing import SupportsInt, overload +from typing_extensions import Literal, TypeAlias + +from babel.core import Locale +from babel.localedata import LocaleDataDict +from babel.util import LOCALTZ as LOCALTZ, UTC as UTC +from pytz import BaseTzInfo + +# The module contents here are organized the same way they are in the API documentation at +# http://babel.pocoo.org/en/latest/api/dates.html + +# Date and Time Formatting +_Instant: TypeAlias = date | time | float | None +_PredefinedTimeFormat: TypeAlias = Literal["full", "long", "medium", "short"] +_Context: TypeAlias = Literal["format", "stand-alone"] + +def format_datetime( + datetime: _Instant = ..., + format: _PredefinedTimeFormat | str = ..., + tzinfo: tzinfo | None = ..., + locale: Locale | str | None = ..., +) -> str: ... +def format_date(date: date | None = ..., format: _PredefinedTimeFormat | str = ..., locale: Locale | str | None = ...) -> str: ... +def format_time( + time: time | datetime | float | None = ..., + format: _PredefinedTimeFormat | str = ..., + tzinfo: tzinfo | None = ..., + locale: Locale | str | None = ..., +) -> str: ... +def format_timedelta( + delta: timedelta | int, + granularity: Literal["year", "month", "week", "day", "hour", "minute", "second"] = ..., + threshold: float = ..., + add_direction: bool = ..., + format: Literal["narrow", "short", "medium", "long"] = ..., + locale: Locale | str | None = ..., +) -> str: ... +def format_skeleton( + skeleton: str, datetime: _Instant = ..., tzinfo: tzinfo | None = ..., fuzzy: bool = ..., locale: Locale | str | None = ... +) -> str: ... +def format_interval( + start: _Instant, + end: _Instant, + skeleton: str | None = ..., + tzinfo: tzinfo | None = ..., + fuzzy: bool = ..., + locale: Locale | str | None = ..., +) -> str: ... + +# Timezone Functionality +@overload +def get_timezone(zone: str | BaseTzInfo | None = ...) -> BaseTzInfo: ... +@overload +def get_timezone(zone: tzinfo) -> tzinfo: ... +def get_timezone_gmt( + datetime: _Instant = ..., + width: Literal["long", "short", "iso8601", "iso8601_short"] = ..., + locale: Locale | str | None = ..., + return_z: bool = ..., +) -> str: ... + +_DtOrTzinfo: TypeAlias = datetime | tzinfo | str | int | time | None + +def get_timezone_location(dt_or_tzinfo: _DtOrTzinfo = ..., locale: Locale | str | None = ..., return_city: bool = ...) -> str: ... +def get_timezone_name( + dt_or_tzinfo: _DtOrTzinfo = ..., + width: Literal["long", "short"] = ..., + uncommon: bool = ..., + locale: Locale | str | None = ..., + zone_variant: Literal["generic", "daylight", "standard"] | None = ..., + return_zone: bool = ..., +) -> str: ... + +# Note: While Babel accepts any tzinfo for the most part, the get_next_timeout_transition() +# function requires a tzinfo that is produced by get_timezone()/pytz AND has DST info. +# The typing here will help you with the first requirement, but will not protect against +# pytz tzinfo's without DST info, like what you get from get_timezone("UTC") for instance. +def get_next_timezone_transition(zone: BaseTzInfo | None = ..., dt: _Instant = ...) -> TimezoneTransition: ... + +class TimezoneTransition: + # This class itself is not included in the documentation, yet it is mentioned by name. + # See https://github.com/python-babel/babel/issues/823 + activates: datetime + from_tzinfo: tzinfo + to_tzinfo: tzinfo + reference_date: datetime | None + def __init__( + self, activates: datetime, from_tzinfo: tzinfo, to_tzinfo: tzinfo, reference_date: datetime | None = ... + ) -> None: ... + @property + def from_tz(self) -> str: ... + @property + def to_tz(self) -> str: ... + @property + def from_offset(self) -> int: ... + @property + def to_offset(self) -> int: ... + +# Data Access +def get_period_names( + width: Literal["abbreviated", "narrow", "wide"] = ..., context: _Context = ..., locale: Locale | str | None = ... +) -> LocaleDataDict: ... +def get_day_names( + width: Literal["abbreviated", "narrow", "short", "wide"] = ..., context: _Context = ..., locale: Locale | str | None = ... +) -> LocaleDataDict: ... +def get_month_names( + width: Literal["abbreviated", "narrow", "wide"] = ..., context: _Context = ..., locale: Locale | str | None = ... +) -> LocaleDataDict: ... +def get_quarter_names( + width: Literal["abbreviated", "narrow", "wide"] = ..., context: _Context = ..., locale: Locale | str | None = ... +) -> LocaleDataDict: ... +def get_era_names(width: Literal["abbreviated", "narrow", "wide"] = ..., locale: Locale | str | None = ...) -> LocaleDataDict: ... +def get_date_format(format: _PredefinedTimeFormat = ..., locale: Locale | str | None = ...) -> DateTimePattern: ... +def get_datetime_format(format: _PredefinedTimeFormat = ..., locale: Locale | str | None = ...) -> DateTimePattern: ... +def get_time_format(format: _PredefinedTimeFormat = ..., locale: Locale | str | None = ...) -> DateTimePattern: ... + +class ParseError(ValueError): ... + +# Basic Parsing +def parse_date(string: str, locale: Locale | str | None = ..., format: _PredefinedTimeFormat = ...) -> date: ... +def parse_time(string: str, locale: Locale | str | None = ..., format: _PredefinedTimeFormat = ...) -> time: ... +def parse_pattern(pattern: str) -> DateTimePattern: ... + +# Undocumented +NO_INHERITANCE_MARKER: str +LC_TIME: str | None +date_ = date +datetime_ = datetime +time_ = time + +TIMEDELTA_UNITS: tuple[tuple[str, int], ...] + +def get_period_id( + time: _Instant, tzinfo: BaseTzInfo | None = ..., type: Literal["selection"] | None = ..., locale: Locale | str | None = ... +): ... + +class DateTimePattern: + pattern: str + format: DateTimeFormat + def __init__(self, pattern: str, format: DateTimeFormat) -> None: ... + def __mod__(self, other: DateTimeFormat) -> str: ... + def apply(self, datetime: _Instant, locale: Locale | str | None) -> str: ... + +class DateTimeFormat: + value: date | time + locale: Locale + def __init__(self, value: date | time, locale: Locale | str) -> None: ... + def __getitem__(self, name: str) -> str: ... + def extract(self, char: str) -> int: ... + def format_era(self, char: str, num: int) -> str: ... + def format_year(self, char: str, num: int) -> str: ... + def format_quarter(self, char: str, num: int) -> str: ... + def format_month(self, char: str, num: int) -> str: ... + def format_week(self, char: str, num: int) -> str: ... + def format_weekday(self, char: str = ..., num: int = ...) -> str: ... + def format_day_of_year(self, num: int) -> str: ... + def format_day_of_week_in_month(self) -> str: ... + def format_period(self, char: str, num: int) -> str: ... + def format_frac_seconds(self, num: int) -> str: ... + def format_milliseconds_in_day(self, num: int) -> str: ... + def format_timezone(self, char: str, num: int) -> str: ... + def format(self, value: SupportsInt, length: int) -> str: ... + def get_day_of_year(self, date: date | None = ...) -> int: ... + def get_week_number(self, day_of_period: int, day_of_week: int | None = ...) -> int: ... + +PATTERN_CHARS: dict[str, list[int] | None] +PATTERN_CHAR_ORDER: str + +def tokenize_pattern(pattern: str) -> list[tuple[str, str | tuple[str, int]]]: ... +def untokenize_pattern(tokens: Iterable[tuple[str, str | tuple[str, int]]]) -> str: ... +def split_interval_pattern(pattern: str) -> list[str]: ... +def match_skeleton(skeleton: str, options: Iterable[str], allow_different_fields: bool = ...) -> str | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/languages.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/languages.pyi new file mode 100644 index 00000000..14ede035 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/languages.pyi @@ -0,0 +1,2 @@ +def get_official_languages(territory: str, regional: bool = ..., de_facto: bool = ...) -> tuple[str, ...]: ... +def get_territory_language_info(territory: str) -> dict[str, dict[str, float | str | None]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/lists.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/lists.pyi new file mode 100644 index 00000000..da8d8fc0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/lists.pyi @@ -0,0 +1,12 @@ +from collections.abc import Iterable +from typing_extensions import Literal + +from babel.core import Locale + +DEFAULT_LOCALE: str | None + +def format_list( + lst: Iterable[str], + style: Literal["standard", "standard-short", "or", "or-short", "unit", "unit-short", "unit-narrow"] = ..., + locale: Locale | str | None = ..., +) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/localedata.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/localedata.pyi new file mode 100644 index 00000000..e2c1a63e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/localedata.pyi @@ -0,0 +1,25 @@ +from collections.abc import Iterable, Iterator, Mapping, MutableMapping +from os import PathLike +from typing import Any + +def normalize_locale(name: str) -> str | None: ... +def resolve_locale_filename(name: PathLike[str] | str) -> str: ... +def exists(name: str) -> bool: ... +def locale_identifiers() -> list[str]: ... +def load(name: PathLike[str] | str, merge_inherited: bool = ...) -> dict[str, Any]: ... +def merge(dict1: MutableMapping[Any, Any], dict2: Mapping[Any, Any]) -> None: ... + +class Alias: + keys: tuple[str, ...] + def __init__(self, keys: Iterable[str]) -> None: ... + def resolve(self, data: Mapping[str, Any]) -> Mapping[str, Any]: ... + +class LocaleDataDict(MutableMapping[Any, Any]): + base: Mapping[str, Any] + def __init__(self, data: Mapping[str | int | None, Any], base: Mapping[str | int | None, Any] | None = ...) -> None: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[str | int | None]: ... + def __getitem__(self, key: str | int | None) -> Any: ... + def __setitem__(self, key: str | int | None, value: Any) -> None: ... + def __delitem__(self, key: str | int | None) -> None: ... + def copy(self) -> LocaleDataDict: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/localtime/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/localtime/__init__.pyi new file mode 100644 index 00000000..e3b77225 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/localtime/__init__.pyi @@ -0,0 +1,17 @@ +from datetime import datetime, timedelta, tzinfo + +from pytz import BaseTzInfo + +STDOFFSET: timedelta +DSTOFFSET: timedelta +DSTDIFF: timedelta +ZERO: timedelta + +class _FallbackLocalTimezone(tzinfo): + def utcoffset(self, dt: datetime | None) -> timedelta: ... + def dst(self, dt: datetime | None) -> timedelta: ... + def tzname(self, dt: datetime | None) -> str: ... + +def get_localzone() -> BaseTzInfo: ... + +LOCALTZ: BaseTzInfo | _FallbackLocalTimezone diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/localtime/_unix.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/localtime/_unix.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/localtime/_win32.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/localtime/_win32.pyi new file mode 100644 index 00000000..013330c9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/localtime/_win32.pyi @@ -0,0 +1,6 @@ +from typing import Any + +tz_names: dict[str, str] + +def valuestodict(key) -> dict[str, Any]: ... +def get_localzone_name() -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/__init__.pyi new file mode 100644 index 00000000..71d6cecf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/__init__.pyi @@ -0,0 +1 @@ +from babel.messages.catalog import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/catalog.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/catalog.pyi new file mode 100644 index 00000000..625bdd0e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/catalog.pyi @@ -0,0 +1,126 @@ +import datetime +from collections import OrderedDict +from collections.abc import Generator, Iterable, Iterator +from typing_extensions import TypeAlias + +from babel.core import Locale + +__all__ = ["Message", "Catalog", "TranslationError"] + +_MessageID: TypeAlias = str | tuple[str, ...] | list[str] + +class Message: + id: _MessageID + string: _MessageID + locations: list[tuple[str, int]] + flags: set[str] + auto_comments: list[str] + user_comments: list[str] + previous_id: list[str] + lineno: int | None + context: str | None + def __init__( + self, + id: str, + string: str = ..., + locations: Iterable[tuple[str, int]] = ..., + flags: Iterable[str] = ..., + auto_comments: Iterable[str] = ..., + user_comments: Iterable[str] = ..., + previous_id: _MessageID = ..., + lineno: int | None = ..., + context: str | None = ..., + ) -> None: ... + def __cmp__(self, other: Message) -> int: ... + def __gt__(self, other: Message) -> bool: ... + def __lt__(self, other: Message) -> bool: ... + def __ge__(self, other: Message) -> bool: ... + def __le__(self, other: Message) -> bool: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def is_identical(self, other: Message) -> bool: ... + def clone(self) -> Message: ... + def check(self, catalog: Catalog | None = ...) -> list[TranslationError]: ... + @property + def fuzzy(self) -> bool: ... + @property + def pluralizable(self) -> bool: ... + @property + def python_format(self) -> bool: ... + +class TranslationError(Exception): ... + +class Catalog: + domain: str | None + project: str + version: str + copyright_holder: str + msgid_bugs_address: str + last_translator: str + language_team: str + charset: str + creation_date: datetime.datetime | str + revision_date: datetime.datetime | datetime.time | float | str + fuzzy: bool + obsolete: OrderedDict[str | tuple[str, str], Message] + def __init__( + self, + locale: str | Locale | None = ..., + domain: str | None = ..., + header_comment: str | None = ..., + project: str | None = ..., + version: str | None = ..., + copyright_holder: str | None = ..., + msgid_bugs_address: str | None = ..., + creation_date: datetime.datetime | str | None = ..., + revision_date: datetime.datetime | datetime.time | float | str | None = ..., + last_translator: str | None = ..., + language_team: str | None = ..., + charset: str | None = ..., + fuzzy: bool = ..., + ) -> None: ... + @property + def locale(self) -> Locale | None: ... + @locale.setter # Assigning a string looks up the right Locale object. + def locale(self, value: Locale | str | None) -> None: ... + @property + def locale_identifier(self) -> str | None: ... + @property + def header_comment(self) -> str: ... + @header_comment.setter + def header_comment(self, value: str) -> None: ... + @property + def mime_headers(self) -> list[tuple[str, str]]: ... + @mime_headers.setter + def mime_headers(self, value: Iterable[tuple[str | bytes, str | bytes]]) -> None: ... + @property + def num_plurals(self) -> int: ... + @property + def plural_expr(self) -> str: ... + @property + def plural_forms(self) -> str: ... + def __contains__(self, id: _MessageID) -> bool: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[Message]: ... + def __delitem__(self, id: _MessageID) -> None: ... + def __getitem__(self, id: _MessageID) -> Message: ... + def __setitem__(self, id: _MessageID, message: Message) -> None: ... + def add( + self, + id: _MessageID, + string: _MessageID | None = ..., + locations: Iterable[tuple[str, int]] = ..., + flags: Iterable[str] = ..., + auto_comments: Iterable[str] = ..., + user_comments: Iterable[str] = ..., + previous_id: _MessageID = ..., + lineno: int | None = ..., + context: str | None = ..., + ) -> Message: ... + def check(self) -> Generator[tuple[Message, list[TranslationError]], None, None]: ... + def get(self, id: _MessageID, context: str | None = ...): ... + def delete(self, id, context: str | None = ...) -> None: ... + def update( + self, template: Catalog, no_fuzzy_matching: bool = ..., update_header_comment: bool = ..., keep_user_comments: bool = ... + ) -> None: ... + def is_identical(self, other: Catalog) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/checkers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/checkers.pyi new file mode 100644 index 00000000..f115ecb3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/checkers.pyi @@ -0,0 +1,8 @@ +from collections.abc import Callable + +from babel.messages.catalog import Catalog, Message + +def num_plurals(catalog: Catalog | None, message: Message) -> None: ... +def python_format(catalog: Catalog | None, message: Message) -> None: ... + +checkers: list[Callable[[Catalog | None, Message], object]] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/extract.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/extract.pyi new file mode 100644 index 00000000..4702a953 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/extract.pyi @@ -0,0 +1,85 @@ +from _typeshed import Incomplete, SupportsItems, SupportsRead, SupportsReadline +from collections.abc import Callable, Collection, Generator, Iterable, Mapping +from os import PathLike +from typing import Any, AnyStr, Protocol, overload +from typing_extensions import TypeAlias, TypedDict + +_Keyword: TypeAlias = tuple[int | tuple[int, int] | tuple[int, str], ...] | None + +GROUP_NAME: str +DEFAULT_KEYWORDS: dict[str, _Keyword] +DEFAULT_MAPPING: list[tuple[str, str]] +empty_msgid_warning: str + +@overload +def extract_from_dir( + dirname: AnyStr | PathLike[AnyStr], + method_map: Iterable[tuple[str, str]] = ..., + options_map: SupportsItems[str, dict[str, Any]] | None = ..., + keywords: Mapping[str, _Keyword] = ..., + comment_tags: Collection[str] = ..., + callback: Callable[[AnyStr, str, dict[str, Any]], object] | None = ..., + strip_comment_tags: bool = ..., + directory_filter: Callable[[str], bool] | None = ..., +) -> Generator[tuple[AnyStr, int, str | tuple[str, ...], list[str], str | None], None, None]: ... +@overload +def extract_from_dir( + dirname: None = ..., # No dirname causes os.getcwd() to be used, producing str. + method_map: Iterable[tuple[str, str]] = ..., + options_map: SupportsItems[str, dict[str, Any]] | None = ..., + keywords: Mapping[str, _Keyword] = ..., + comment_tags: Collection[str] = ..., + callback: Callable[[str, str, dict[str, Any]], object] | None = ..., + strip_comment_tags: bool = ..., + directory_filter: Callable[[str], bool] | None = ..., +) -> Generator[tuple[str, int, str | tuple[str, ...], list[str], str | None], None, None]: ... +def check_and_call_extract_file( + filepath: AnyStr | PathLike[AnyStr], + method_map: Iterable[tuple[str, str]], + options_map: SupportsItems[str, dict[str, Any]], + callback: Callable[[AnyStr, str, dict[str, Any]], object] | None, + keywords: Mapping[str, _Keyword], + comment_tags: Collection[str], + strip_comment_tags, + dirpath: Incomplete | None = ..., +) -> Generator[tuple[AnyStr, int, str | tuple[str, ...], list[str], str | None], None, None]: ... +def extract_from_file( + method, + filename: AnyStr | PathLike[AnyStr], + keywords: Mapping[str, _Keyword] = ..., + comment_tags: Collection[str] = ..., + options: dict[str, Any] | None = ..., + strip_comment_tags: bool = ..., +) -> list[tuple[AnyStr, int, str | tuple[str, ...], list[str], str | None]]: ... + +class _FileObj(SupportsRead[bytes], SupportsReadline[bytes], Protocol): + def seek(self, __offset: int, __whence: int = ...) -> int: ... + def tell(self) -> int: ... + +def extract( + method, + fileobj: _FileObj, + keywords: Mapping[str, _Keyword] = ..., + comment_tags: Collection[str] = ..., + options: dict[str, Any] | None = ..., + strip_comment_tags: bool = ..., +) -> Iterable[tuple[int, str | tuple[str, ...], list[str], str | None]]: ... +def extract_nothing( + fileobj: _FileObj, keywords: Mapping[str, _Keyword], comment_tags: Collection[str], options: dict[str, Any] +) -> Iterable[tuple[int, str | tuple[str, ...], list[str], str | None]]: ... + +class _PyOptions(TypedDict, total=False): + encoding: str + +def extract_python( + fileobj: _FileObj, keywords: Mapping[str, _Keyword], comment_tags: Collection[str], options: _PyOptions +) -> Iterable[tuple[int, str | tuple[str, ...], list[str], str | None]]: ... + +class _JSOptions(TypedDict, total=False): + encoding: str + jsx: bool + template_string: bool + +def extract_javascript( + fileobj: _FileObj, keywords: Mapping[str, _Keyword], comment_tags: Collection[str], options: _JSOptions +) -> Iterable[tuple[int, str | tuple[str, ...], list[str], str | None]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/frontend.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/frontend.pyi new file mode 100644 index 00000000..8bc7d5e1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/frontend.pyi @@ -0,0 +1,119 @@ +import abc +from _typeshed import Incomplete +from distutils.cmd import Command as _Command +from typing import Any + +def listify_value(arg, split: Incomplete | None = ...): ... + +class Command(_Command, metaclass=abc.ABCMeta): + as_args: Any + multiple_value_options: Any + boolean_options: Any + option_aliases: Any + option_choices: Any + log: Any + distribution: Any + verbose: bool + force: Any + help: int + finalized: int + def __init__(self, dist: Incomplete | None = ...) -> None: ... + +class compile_catalog(Command): + description: str + user_options: Any + boolean_options: Any + domain: str + directory: Any + input_file: Any + output_file: Any + locale: Any + use_fuzzy: bool + statistics: bool + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self): ... + +class extract_messages(Command): + description: str + user_options: Any + boolean_options: Any + as_args: str + multiple_value_options: Any + option_aliases: Any + option_choices: Any + charset: str + keywords: Any + no_default_keywords: bool + mapping_file: Any + no_location: bool + add_location: Any + omit_header: bool + output_file: Any + input_dirs: Any + input_paths: Any + width: Any + no_wrap: bool + sort_output: bool + sort_by_file: bool + msgid_bugs_address: Any + copyright_holder: Any + project: Any + version: Any + add_comments: Any + strip_comments: bool + include_lineno: bool + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + +def check_message_extractors(dist, name, value) -> None: ... + +class init_catalog(Command): + description: str + user_options: Any + boolean_options: Any + output_dir: Any + output_file: Any + input_file: Any + locale: Any + domain: str + no_wrap: bool + width: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + +class update_catalog(Command): + description: str + user_options: Any + boolean_options: Any + domain: str + input_file: Any + output_dir: Any + output_file: Any + omit_header: bool + locale: Any + width: Any + no_wrap: bool + ignore_obsolete: bool + init_missing: bool + no_fuzzy_matching: bool + update_header_comment: bool + previous: bool + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + +class CommandLineInterface: + usage: str + version: Any + commands: Any + command_classes: Any + log: Any + parser: Any + def run(self, argv: Incomplete | None = ...): ... + +def main(): ... +def parse_mapping(fileobj, filename: Incomplete | None = ...): ... +def parse_keywords(strings=...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/jslexer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/jslexer.pyi new file mode 100644 index 00000000..6eb48739 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/jslexer.pyi @@ -0,0 +1,24 @@ +from collections.abc import Generator, Sequence +from re import Pattern +from typing import NamedTuple + +operators: Sequence[str] +escapes: dict[str, str] +name_re: Pattern[str] +dotted_name_re: Pattern[str] +division_re: Pattern[str] +regex_re: Pattern[str] +line_re: Pattern[str] +line_join_re: Pattern[str] +uni_escape_re: Pattern[str] + +class Token(NamedTuple): + type: str + value: str + lineno: int + +# Documented as private +def get_rules(jsx: bool, dotted: bool, template_string: bool) -> list[tuple[str | None, Pattern[str]]]: ... # undocumented +def indicates_division(token: Token) -> bool: ... +def unquote_string(string: str) -> str: ... +def tokenize(source: str, jsx: bool = ..., dotted: bool = ..., template_string: bool = ...) -> Generator[Token, None, None]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/mofile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/mofile.pyi new file mode 100644 index 00000000..df15e4ef --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/mofile.pyi @@ -0,0 +1,9 @@ +from _typeshed import SupportsRead, SupportsWrite + +from babel.messages.catalog import Catalog + +LE_MAGIC: int +BE_MAGIC: int + +def read_mo(fileobj: SupportsRead[bytes]) -> Catalog: ... +def write_mo(fileobj: SupportsWrite[bytes], catalog: Catalog, use_fuzzy: bool = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/plurals.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/plurals.pyi new file mode 100644 index 00000000..2d6ea9c7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/plurals.pyi @@ -0,0 +1,13 @@ +LC_CTYPE: str +PLURALS: dict[str, tuple[int, str]] +DEFAULT_PLURAL: tuple[int, str] + +class _PluralTuple(tuple[int, str]): + @property + def num_plurals(self) -> int: ... + @property + def plural_expr(self) -> str: ... + @property + def plural_forms(self) -> str: ... + +def get_plural(locale: str = ...) -> _PluralTuple: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/pofile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/pofile.pyi new file mode 100644 index 00000000..b349dc4b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/messages/pofile.pyi @@ -0,0 +1,76 @@ +from _typeshed import SupportsWrite +from collections.abc import Iterable +from re import Pattern + +from babel.core import Locale +from babel.messages.catalog import Catalog + +def unescape(string: str) -> str: ... +def denormalize(string: str) -> str: ... + +class PoFileError(Exception): + catalog: Catalog + line: str + lineno: int + def __init__(self, message: str, catalog: Catalog, line: str, lineno: int) -> None: ... + +class _NormalizedString: + def __init__(self, *args: str) -> None: ... + def append(self, s: str) -> None: ... + def denormalize(self) -> str: ... + def __bool__(self) -> bool: ... + def __cmp__(self, other: object) -> int: ... + def __gt__(self, other: object) -> bool: ... + def __lt__(self, other: object) -> bool: ... + def __ge__(self, other: object) -> bool: ... + def __le__(self, other: object) -> bool: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + +class PoFileParser: + catalog: Catalog + ignore_obsolete: bool + counter: int + offset: int + abort_invalid: bool + # Internal variables: + messages: list[_NormalizedString] + # [index, string] lists + translations: list[list[int | _NormalizedString]] + locations: list[tuple[str, int | None]] + flags: list[str] + user_comments: list[str] + auto_comments: list[str] + context: str | None + obsolete: bool + in_msgid: bool + in_msgstr: bool + in_msgctxt: bool + def __init__(self, catalog, ignore_obsolete: bool = ..., abort_invalid: bool = ...) -> None: ... + def parse(self, fileobj: Iterable[str | bytes]) -> None: ... + +def read_po( + fileobj: Iterable[str | bytes], + locale: str | Locale | None = ..., + domain: str | None = ..., + ignore_obsolete: bool = ..., + charset: str | None = ..., + abort_invalid: bool = ..., +) -> Catalog: ... + +WORD_SEP: Pattern[str] + +def escape(string: str) -> str: ... +def normalize(string: str, prefix: str = ..., width: int = ...) -> str: ... +def write_po( + fileobj: SupportsWrite[bytes], + catalog: Catalog, + width: int | None = ..., + no_location: bool = ..., + omit_header: bool = ..., + sort_output: bool = ..., + sort_by_file: bool = ..., + ignore_obsolete: bool = ..., + include_previous: bool = ..., + include_lineno: bool = ..., +) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/numbers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/numbers.pyi new file mode 100644 index 00000000..f8a60df5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/numbers.pyi @@ -0,0 +1,127 @@ +import decimal +from datetime import date +from re import Pattern +from typing_extensions import Literal + +from babel.core import Locale + +long = int +LC_NUMERIC: str | None + +class UnknownCurrencyError(Exception): + identifier: str + def __init__(self, identifier: str) -> None: ... + +def list_currencies(locale: Locale | str | None = ...) -> set[str]: ... +def validate_currency(currency: str, locale: Locale | str | None = ...) -> None: ... +def is_currency(currency: str, locale: Locale | str | None = ...) -> bool: ... +def normalize_currency(currency: str, locale: Locale | str | None = ...) -> str | None: ... +def get_currency_name(currency: str, count: float | decimal.Decimal | None = ..., locale: Locale | str | None = ...) -> str: ... +def get_currency_symbol(currency: str, locale: Locale | str | None = ...) -> str: ... +def get_currency_precision(currency: str) -> int: ... +def get_currency_unit_pattern(currency: str, count: float | None = ..., locale: Locale | str | None = ...) -> str: ... +def get_territory_currencies( + territory: str, + start_date: date | None = ..., + end_date: date | None = ..., + tender: bool = ..., + non_tender: bool = ..., + include_details: bool = ..., +) -> list[str]: ... +def get_decimal_symbol(locale: Locale | str | None = ...) -> str: ... +def get_plus_sign_symbol(locale: Locale | str | None = ...) -> str: ... +def get_minus_sign_symbol(locale: Locale | str | None = ...) -> str: ... +def get_exponential_symbol(locale: Locale | str | None = ...) -> str: ... +def get_group_symbol(locale: Locale | str | None = ...) -> str: ... +def format_number(number: float | decimal.Decimal | str, locale: Locale | str | None = ...) -> str: ... +def get_decimal_precision(number: decimal.Decimal) -> int: ... +def get_decimal_quantum(precision: int | decimal.Decimal) -> decimal.Decimal: ... +def format_decimal( + number: float | decimal.Decimal | str, + format: str | None = ..., + locale: Locale | str | None = ..., + decimal_quantization: bool = ..., + group_separator: bool = ..., +): ... +def format_compact_decimal( + number: float, *, format_type: Literal["short", "long"] = ..., locale: Locale | str | None = ..., fraction_digits: int = ... +) -> str: ... + +class UnknownCurrencyFormatError(KeyError): ... + +def format_currency( + number: float | decimal.Decimal | str, + currency: str, + format: str | None = ..., + locale: Locale | str | None = ..., + currency_digits: bool = ..., + format_type: Literal["name", "standard", "accounting"] = ..., + decimal_quantization: bool = ..., + group_separator: bool = ..., +) -> str: ... +def format_percent( + number: float | decimal.Decimal | str, + format: str | None = ..., + locale: Locale | str | None = ..., + decimal_quantization: bool = ..., + group_separator: bool = ..., +) -> str: ... +def format_scientific( + number: float | decimal.Decimal | str, + format: str | None = ..., + locale: Locale | str | None = ..., + decimal_quantization: bool = ..., +) -> str: ... + +class NumberFormatError(ValueError): + suggestions: str | None + def __init__(self, message: str, suggestions: str | None = ...) -> None: ... + +def parse_number(string: str, locale: Locale | str | None = ...) -> int: ... +def parse_decimal(string: str, locale: Locale | str | None = ..., strict: bool = ...) -> decimal.Decimal: ... + +PREFIX_END: str +NUMBER_TOKEN: str +PREFIX_PATTERN: str +NUMBER_PATTERN: str +SUFFIX_PATTERN: str +number_re: Pattern[str] + +def parse_grouping(p: str) -> tuple[int, int]: ... +def parse_pattern(pattern: NumberPattern | str) -> NumberPattern: ... + +class NumberPattern: + pattern: str + prefix: tuple[str, str] + suffix: tuple[str, str] + grouping: tuple[int, int] + int_prec: tuple[int, int] + frac_prec: tuple[int, int] + exp_prec: tuple[int, int] | None + exp_plus: bool | None + scale: Literal[0, 2, 3] + def __init__( + self, + pattern: str, + prefix: tuple[str, str], + suffix: tuple[str, str], + grouping: tuple[int, int], + int_prec: tuple[int, int], + frac_prec: tuple[int, int], + exp_prec: tuple[int, int] | None, + exp_plus: bool | None, + ) -> None: ... + def compute_scale(self) -> Literal[0, 2, 3]: ... + def scientific_notation_elements( + self, value: decimal.Decimal, locale: Locale | str | None + ) -> tuple[decimal.Decimal, int, str]: ... + def apply( + self, + value: float | decimal.Decimal, + locale: Locale | str | None, + currency: str | None = ..., + currency_digits: bool = ..., + decimal_quantization: bool = ..., + force_frac: int | None = ..., + group_separator: bool = ..., + ) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/plural.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/plural.pyi new file mode 100644 index 00000000..51c2ff4f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/plural.pyi @@ -0,0 +1,99 @@ +import decimal +from _typeshed import Incomplete +from collections.abc import Iterable, Mapping +from typing import Any + +def extract_operands(source: float | decimal.Decimal) -> tuple[decimal.Decimal, int, int, int, int, int, int, int]: ... + +class PluralRule: + abstract: list[Any] + def __init__(self, rules: Mapping[str, str] | Iterable[tuple[str, str]]) -> None: ... + @classmethod + def parse(cls, rules: Mapping[str, str] | Iterable[tuple[str, str]] | PluralRule) -> PluralRule: ... + @property + def rules(self) -> Mapping[str, str]: ... + @property + def tags(self) -> frozenset[str]: ... + def __call__(self, n: float | decimal.Decimal) -> str: ... + +def to_javascript(rule): ... +def to_python(rule): ... +def to_gettext(rule): ... +def in_range_list(num, range_list): ... +def within_range_list(num, range_list): ... +def cldr_modulo(a, b): ... + +class RuleError(Exception): ... + +def tokenize_rule(s): ... +def test_next_token(tokens, type_, value: Incomplete | None = ...): ... +def skip_token(tokens, type_, value: Incomplete | None = ...): ... +def value_node(value): ... +def ident_node(name): ... +def range_list_node(range_list): ... +def negate(rv): ... + +class _Parser: + tokens: Any + ast: Any + def __init__(self, string) -> None: ... + def expect(self, type_, value: Incomplete | None = ..., term: Incomplete | None = ...): ... + def condition(self): ... + def and_condition(self): ... + def relation(self): ... + def newfangled_relation(self, left): ... + def range_or_value(self): ... + def range_list(self): ... + def expr(self): ... + def value(self): ... + +compile_zero: Any + +class _Compiler: + def compile(self, arg): ... + compile_n: Any + compile_i: Any + compile_v: Any + compile_w: Any + compile_f: Any + compile_t: Any + compile_value: Any + compile_and: Any + compile_or: Any + compile_not: Any + compile_mod: Any + compile_is: Any + compile_isnot: Any + def compile_relation(self, method, expr, range_list) -> None: ... + +class _PythonCompiler(_Compiler): + compile_and: Any + compile_or: Any + compile_not: Any + compile_mod: Any + def compile_relation(self, method, expr, range_list): ... + +class _GettextCompiler(_Compiler): + compile_i: Any + compile_v: Any + compile_w: Any + compile_f: Any + compile_t: Any + def compile_relation(self, method, expr, range_list): ... + +class _JavaScriptCompiler(_GettextCompiler): + compile_i: Any + compile_v: Any + compile_w: Any + compile_f: Any + compile_t: Any + def compile_relation(self, method, expr, range_list): ... + +class _UnicodeCompiler(_Compiler): + compile_is: Any + compile_isnot: Any + compile_and: Any + compile_or: Any + compile_mod: Any + def compile_not(self, relation): ... + def compile_relation(self, method, expr, range_list, negated: bool = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/support.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/support.pyi new file mode 100644 index 00000000..b7c307ec --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/support.pyi @@ -0,0 +1,106 @@ +import gettext +from _typeshed import Incomplete +from datetime import date as _date, datetime as _datetime, time as _time, timedelta as _timedelta +from decimal import Decimal +from typing import Any +from typing_extensions import Literal + +from babel.core import Locale +from pytz import BaseTzInfo + +from .dates import _PredefinedTimeFormat + +class Format: + locale: Locale + tzinfo: BaseTzInfo | None + def __init__(self, locale: Locale | str, tzinfo: BaseTzInfo | None = ...) -> None: ... + def date(self, date: _date | None = ..., format: _PredefinedTimeFormat | str = ...) -> str: ... + def datetime(self, datetime: _date | None = ..., format: _PredefinedTimeFormat | str = ...) -> str: ... + def time(self, time: _time | _datetime | None = ..., format: _PredefinedTimeFormat | str = ...) -> str: ... + def timedelta( + self, + delta: _timedelta | int, + granularity: Literal["year", "month", "week", "day", "hour", "minute", "second"] = ..., + threshold: float = ..., + format: _PredefinedTimeFormat = ..., + add_direction: bool = ..., + ) -> str: ... + def number(self, number: float | Decimal | str) -> str: ... + def decimal(self, number: float | Decimal | str, format: str | None = ...) -> str: ... + def currency(self, number: float | Decimal | str, currency: str) -> str: ... + def percent(self, number: float | Decimal | str, format: str | None = ...) -> str: ... + def scientific(self, number: float | Decimal | str) -> str: ... + +class LazyProxy: + def __init__(self, func, *args, **kwargs) -> None: ... + @property + def value(self): ... + def __contains__(self, key): ... + def __bool__(self) -> bool: ... + def __dir__(self): ... + def __iter__(self): ... + def __len__(self) -> int: ... + def __add__(self, other): ... + def __radd__(self, other): ... + def __mod__(self, other): ... + def __rmod__(self, other): ... + def __mul__(self, other): ... + def __rmul__(self, other): ... + def __call__(self, *args, **kwargs): ... + def __lt__(self, other): ... + def __le__(self, other): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __gt__(self, other): ... + def __ge__(self, other): ... + def __delattr__(self, name: str) -> None: ... + def __getattr__(self, name: str): ... + def __setattr__(self, name: str, value) -> None: ... + def __delitem__(self, key) -> None: ... + def __getitem__(self, key): ... + def __setitem__(self, key, value) -> None: ... + def __copy__(self) -> LazyProxy: ... + def __deepcopy__(self, memo: Any) -> LazyProxy: ... + +class NullTranslations(gettext.NullTranslations): + DEFAULT_DOMAIN: Any + plural: Any + files: Any + domain: Any + def __init__(self, fp: Incomplete | None = ...): ... + def dgettext(self, domain, message): ... + def ldgettext(self, domain, message): ... + def udgettext(self, domain, message): ... + dugettext: Any + def dngettext(self, domain, singular, plural, num): ... + def ldngettext(self, domain, singular, plural, num): ... + def udngettext(self, domain, singular, plural, num): ... + dungettext: Any + CONTEXT_ENCODING: str + def pgettext(self, context, message): ... + def lpgettext(self, context, message): ... + def npgettext(self, context, singular, plural, num): ... + def lnpgettext(self, context, singular, plural, num): ... + def upgettext(self, context, message): ... + def unpgettext(self, context, singular, plural, num): ... + def dpgettext(self, domain, context, message): ... + def udpgettext(self, domain, context, message): ... + dupgettext: Any + def ldpgettext(self, domain, context, message): ... + def dnpgettext(self, domain, context, singular, plural, num): ... + def udnpgettext(self, domain, context, singular, plural, num): ... + dunpgettext: Any + def ldnpgettext(self, domain, context, singular, plural, num): ... + ugettext: Any + ungettext: Any + +class Translations(NullTranslations, gettext.GNUTranslations): + DEFAULT_DOMAIN: str + domain: Any + def __init__(self, fp: Incomplete | None = ..., domain: Incomplete | None = ...) -> None: ... + ugettext: Any + ungettext: Any + @classmethod + def load(cls, dirname: Incomplete | None = ..., locales: Incomplete | None = ..., domain: Incomplete | None = ...): ... + def add(self, translations, merge: bool = ...): ... + def merge(self, translations): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/units.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/units.pyi new file mode 100644 index 00000000..90808cab --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/units.pyi @@ -0,0 +1,27 @@ +import decimal +from typing_extensions import Literal + +from babel.core import Locale + +class UnknownUnitError(ValueError): + def __init__(self, unit: str, locale: Locale) -> None: ... + +def get_unit_name( + measurement_unit: str, length: Literal["short", "long", "narrow"] = ..., locale: Locale | str | None = ... +) -> str: ... +def format_unit( + value: float | decimal.Decimal, + measurement_unit: str, + length: Literal["short", "long", "narrow"] = ..., + format: str | None = ..., + locale: Locale | str | None = ..., +) -> str: ... +def format_compound_unit( + numerator_value: float | decimal.Decimal, + numerator_unit: str | None = ..., + denominator_value: float | decimal.Decimal = ..., + denominator_unit: str | None = ..., + length: Literal["short", "long", "narrow"] = ..., + format: str | None = ..., + locale: Locale | str | None = ..., +) -> str | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/util.pyi new file mode 100644 index 00000000..7b40659d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/babel/babel/util.pyi @@ -0,0 +1,46 @@ +import collections +import textwrap +from collections.abc import Generator, Iterable +from datetime import timedelta, tzinfo +from re import Pattern +from typing import IO, Any, TypeVar + +from babel import localtime as localtime +from pytz import BaseTzInfo + +missing: object + +_T = TypeVar("_T") + +def distinct(iterable: Iterable[_T]) -> Generator[_T, None, None]: ... + +PYTHON_MAGIC_COMMENT_re: Pattern[bytes] + +def parse_encoding(fp: IO[bytes]) -> str | None: ... + +PYTHON_FUTURE_IMPORT_re: Pattern[str] + +def parse_future_flags(fp: IO[bytes], encoding: str = ...) -> int: ... +def pathmatch(pattern: str, filename: str) -> bool: ... + +class TextWrapper(textwrap.TextWrapper): + wordsep_re: Pattern[str] + +def wraptext(text, width: int = ..., initial_indent: str = ..., subsequent_indent: str = ...): ... + +odict = collections.OrderedDict + +class FixedOffsetTimezone(tzinfo): + zone: str + def __init__(self, offset: float, name: str | None = ...) -> None: ... + def utcoffset(self, dt: Any) -> timedelta: ... + def tzname(self, dt: Any) -> str: ... + def dst(self, dt: Any) -> timedelta: ... + +UTC: BaseTzInfo +LOCALTZ: BaseTzInfo +get_localzone = localtime.get_localzone +STDOFFSET: timedelta +DSTOFFSET: timedelta +DSTDIFF: timedelta +ZERO: timedelta diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/backports.ssl_match_hostname/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/backports.ssl_match_hostname/METADATA.toml new file mode 100644 index 00000000..1cc45409 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/backports.ssl_match_hostname/METADATA.toml @@ -0,0 +1 @@ +version = "3.7.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/backports.ssl_match_hostname/backports/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/backports.ssl_match_hostname/backports/__init__.pyi new file mode 100644 index 00000000..8a93ba9c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/backports.ssl_match_hostname/backports/__init__.pyi @@ -0,0 +1,4 @@ +from _typeshed import Incomplete + +# Explicitly mark this package as incomplete. +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/backports.ssl_match_hostname/backports/ssl_match_hostname/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/backports.ssl_match_hostname/backports/ssl_match_hostname/__init__.pyi new file mode 100644 index 00000000..37b7bc67 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/backports.ssl_match_hostname/backports/ssl_match_hostname/__init__.pyi @@ -0,0 +1,5 @@ +from ssl import _PeerCertRetDictType + +class CertificateError(ValueError): ... + +def match_hostname(cert: _PeerCertRetDictType, hostname: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/METADATA.toml new file mode 100644 index 00000000..ba9a1649 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/METADATA.toml @@ -0,0 +1,6 @@ +version = "4.11.*" +requires = ["types-html5lib"] + +[tool.stubtest] +ignore_missing_stub = true +extras = ["html5lib", "lxml"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/__init__.pyi new file mode 100644 index 00000000..21ed6f20 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/__init__.pyi @@ -0,0 +1,94 @@ +from _typeshed import Incomplete, SupportsRead +from collections.abc import Sequence +from typing import Any +from typing_extensions import Self + +from .builder import ParserRejectedMarkup as ParserRejectedMarkup, TreeBuilder, XMLParsedAsHTMLWarning as XMLParsedAsHTMLWarning +from .element import ( + CData as CData, + Comment as Comment, + Declaration as Declaration, + Doctype as Doctype, + NavigableString as NavigableString, + PageElement as PageElement, + ProcessingInstruction as ProcessingInstruction, + ResultSet as ResultSet, + Script as Script, + SoupStrainer as SoupStrainer, + Stylesheet as Stylesheet, + Tag as Tag, + TemplateString as TemplateString, +) +from .formatter import Formatter + +class GuessedAtParserWarning(UserWarning): ... +class MarkupResemblesLocatorWarning(UserWarning): ... + +class BeautifulSoup(Tag): + ROOT_TAG_NAME: str + DEFAULT_BUILDER_FEATURES: list[str] + ASCII_SPACES: str + NO_PARSER_SPECIFIED_WARNING: str + element_classes: Any + builder: TreeBuilder + is_xml: bool + known_xml: bool + parse_only: SoupStrainer | None + markup: str + def __init__( + self, + markup: str | bytes | SupportsRead[str] | SupportsRead[bytes] = ..., + features: str | Sequence[str] | None = ..., + builder: TreeBuilder | type[TreeBuilder] | None = ..., + parse_only: SoupStrainer | None = ..., + from_encoding: str | None = ..., + exclude_encodings: Sequence[str] | None = ..., + element_classes: dict[type[PageElement], type[Any]] | None = ..., + **kwargs, + ) -> None: ... + def __copy__(self) -> Self: ... + hidden: bool + current_data: Any + currentTag: Any + tagStack: Any + open_tag_counter: Any + preserve_whitespace_tag_stack: Any + string_container_stack: Any + def reset(self) -> None: ... + def new_tag( + self, + name, + namespace: Incomplete | None = ..., + nsprefix: Incomplete | None = ..., + attrs=..., + sourceline: Incomplete | None = ..., + sourcepos: Incomplete | None = ..., + **kwattrs, + ) -> Tag: ... + def string_container(self, base_class: Incomplete | None = ...): ... + def new_string(self, s, subclass: Incomplete | None = ...): ... + def insert_before(self, *args) -> None: ... + def insert_after(self, *args) -> None: ... + def popTag(self): ... + def pushTag(self, tag) -> None: ... + def endData(self, containerClass: Incomplete | None = ...) -> None: ... + def object_was_parsed(self, o, parent: Incomplete | None = ..., most_recent_element: Incomplete | None = ...) -> None: ... + def handle_starttag( + self, + name, + namespace, + nsprefix, + attrs, + sourceline: Incomplete | None = ..., + sourcepos: Incomplete | None = ..., + namespaces: dict[str, str] | None = ..., + ): ... + def handle_endtag(self, name, nsprefix: Incomplete | None = ...) -> None: ... + def handle_data(self, data) -> None: ... + def decode( # type: ignore[override] + self, pretty_print: bool = ..., eventual_encoding: str = ..., formatter: str | Formatter = ... + ): ... # missing some arguments + +class BeautifulStoneSoup(BeautifulSoup): ... +class StopParsing(Exception): ... +class FeatureNotFound(ValueError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/builder/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/builder/__init__.pyi new file mode 100644 index 00000000..3807629e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/builder/__init__.pyi @@ -0,0 +1,69 @@ +from _typeshed import Incomplete +from typing import Any + +class TreeBuilderRegistry: + builders_for_feature: Any + builders: Any + def __init__(self) -> None: ... + def register(self, treebuilder_class) -> None: ... + def lookup(self, *features): ... + +class TreeBuilder: + NAME: str + ALTERNATE_NAMES: Any + features: Any + is_xml: bool + picklable: bool + empty_element_tags: Any + DEFAULT_CDATA_LIST_ATTRIBUTES: Any + DEFAULT_PRESERVE_WHITESPACE_TAGS: Any + DEFAULT_STRING_CONTAINERS: Any + USE_DEFAULT: Any + TRACKS_LINE_NUMBERS: bool + soup: Any + cdata_list_attributes: Any + preserve_whitespace_tags: Any + store_line_numbers: Any + string_containers: Any + def __init__( + self, multi_valued_attributes=..., preserve_whitespace_tags=..., store_line_numbers=..., string_containers=... + ) -> None: ... + def initialize_soup(self, soup) -> None: ... + def reset(self) -> None: ... + def can_be_empty_element(self, tag_name): ... + def feed(self, markup) -> None: ... + def prepare_markup( + self, + markup, + user_specified_encoding: Incomplete | None = ..., + document_declared_encoding: Incomplete | None = ..., + exclude_encodings: Incomplete | None = ..., + ) -> None: ... + def test_fragment_to_document(self, fragment): ... + def set_up_substitutions(self, tag): ... + +class SAXTreeBuilder(TreeBuilder): + def feed(self, markup) -> None: ... + def close(self) -> None: ... + def startElement(self, name, attrs) -> None: ... + def endElement(self, name) -> None: ... + def startElementNS(self, nsTuple, nodeName, attrs) -> None: ... + def endElementNS(self, nsTuple, nodeName) -> None: ... + def startPrefixMapping(self, prefix, nodeValue) -> None: ... + def endPrefixMapping(self, prefix) -> None: ... + def characters(self, content) -> None: ... + def startDocument(self) -> None: ... + def endDocument(self) -> None: ... + +class HTMLTreeBuilder(TreeBuilder): + empty_element_tags: Any + block_elements: Any + DEFAULT_STRING_CONTAINERS: Any + DEFAULT_CDATA_LIST_ATTRIBUTES: Any + DEFAULT_PRESERVE_WHITESPACE_TAGS: Any + def set_up_substitutions(self, tag): ... + +class ParserRejectedMarkup(Exception): + def __init__(self, message_or_exception) -> None: ... + +class XMLParsedAsHTMLWarning(UserWarning): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/builder/_html5lib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/builder/_html5lib.pyi new file mode 100644 index 00000000..412d1afa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/builder/_html5lib.pyi @@ -0,0 +1,76 @@ +from _typeshed import Incomplete +from typing import Any + +from bs4.builder import HTMLTreeBuilder +from html5lib.treebuilders import base as treebuilder_base + +class HTML5TreeBuilder(HTMLTreeBuilder): + NAME: str + features: Any + TRACKS_LINE_NUMBERS: bool + user_specified_encoding: Any + def prepare_markup( # type: ignore[override] # user_specified_encoding doesn't have a default + self, + markup, + user_specified_encoding, + document_declared_encoding: Incomplete | None = ..., + exclude_encodings: Incomplete | None = ..., + ) -> None: ... + def feed(self, markup) -> None: ... + underlying_builder: Any + def create_treebuilder(self, namespaceHTMLElements): ... + def test_fragment_to_document(self, fragment): ... + +class TreeBuilderForHtml5lib(treebuilder_base.TreeBuilder): + soup: Any + parser: Any + store_line_numbers: Any + def __init__( + self, namespaceHTMLElements, soup: Incomplete | None = ..., store_line_numbers: bool = ..., **kwargs + ) -> None: ... + def documentClass(self): ... + def insertDoctype(self, token) -> None: ... + def elementClass(self, name, namespace): ... + def commentClass(self, data): ... + def fragmentClass(self): ... + def appendChild(self, node) -> None: ... + def getDocument(self): ... + def getFragment(self): ... + def testSerializer(self, element): ... + +class AttrList: + element: Any + attrs: Any + def __init__(self, element) -> None: ... + def __iter__(self): ... + def __setitem__(self, name, value) -> None: ... + def items(self): ... + def keys(self): ... + def __len__(self) -> int: ... + def __getitem__(self, name): ... + def __contains__(self, name): ... + +class Element(treebuilder_base.Node): + element: Any + soup: Any + namespace: Any + def __init__(self, element, soup, namespace) -> None: ... + def appendChild(self, node) -> None: ... + def getAttributes(self): ... + def setAttributes(self, attributes) -> None: ... + attributes: Any + def insertText(self, data, insertBefore: Incomplete | None = ...) -> None: ... + def insertBefore(self, node, refNode) -> None: ... + def removeChild(self, node) -> None: ... + def reparentChildren(self, new_parent) -> None: ... + def cloneNode(self): ... + def hasContent(self): ... + def getNameTuple(self): ... + @property + def nameTuple(self): ... + +class TextNode(Element): + element: Any + soup: Any + def __init__(self, element, soup) -> None: ... + def cloneNode(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/builder/_htmlparser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/builder/_htmlparser.pyi new file mode 100644 index 00000000..b21a0d44 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/builder/_htmlparser.pyi @@ -0,0 +1,39 @@ +from _typeshed import Incomplete +from html.parser import HTMLParser +from typing import Any + +from bs4.builder import HTMLTreeBuilder + +class BeautifulSoupHTMLParser(HTMLParser): + IGNORE: str + REPLACE: str + on_duplicate_attribute: Any + already_closed_empty_element: Any + def __init__(self, *args, **kwargs) -> None: ... + def handle_startendtag(self, name, attrs) -> None: ... + def handle_starttag(self, name, attrs, handle_empty_element: bool = ...) -> None: ... + def handle_endtag(self, name, check_already_closed: bool = ...) -> None: ... + def handle_data(self, data) -> None: ... + def handle_charref(self, name) -> None: ... + def handle_entityref(self, name) -> None: ... + def handle_comment(self, data) -> None: ... + def handle_decl(self, data) -> None: ... + def unknown_decl(self, data) -> None: ... + def handle_pi(self, data) -> None: ... + +class HTMLParserTreeBuilder(HTMLTreeBuilder): + is_xml: bool + picklable: bool + NAME: Any + features: Any + TRACKS_LINE_NUMBERS: bool + parser_args: Any + def __init__(self, parser_args: Incomplete | None = ..., parser_kwargs: Incomplete | None = ..., **kwargs) -> None: ... + def prepare_markup( + self, + markup, + user_specified_encoding: Incomplete | None = ..., + document_declared_encoding: Incomplete | None = ..., + exclude_encodings: Incomplete | None = ..., + ) -> None: ... + def feed(self, markup) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/builder/_lxml.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/builder/_lxml.pyi new file mode 100644 index 00000000..ef57b058 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/builder/_lxml.pyi @@ -0,0 +1,50 @@ +from _typeshed import Incomplete +from typing import Any + +from bs4.builder import HTMLTreeBuilder, TreeBuilder + +class LXMLTreeBuilderForXML(TreeBuilder): + DEFAULT_PARSER_CLASS: Any + is_xml: bool + processing_instruction_class: Any + NAME: str + ALTERNATE_NAMES: Any + features: Any + CHUNK_SIZE: int + DEFAULT_NSMAPS: Any + DEFAULT_NSMAPS_INVERTED: Any + def initialize_soup(self, soup) -> None: ... + def default_parser(self, encoding): ... + def parser_for(self, encoding): ... + empty_element_tags: Any + soup: Any + nsmaps: Any + def __init__(self, parser: Incomplete | None = ..., empty_element_tags: Incomplete | None = ..., **kwargs) -> None: ... + def prepare_markup( # type: ignore[override] # the order of the parameters is different + self, + markup, + user_specified_encoding: Incomplete | None = ..., + exclude_encodings: Incomplete | None = ..., + document_declared_encoding: Incomplete | None = ..., + ) -> None: ... + parser: Any + def feed(self, markup) -> None: ... + def close(self) -> None: ... + def start(self, name, attrs, nsmap=...) -> None: ... + def end(self, name) -> None: ... + def pi(self, target, data) -> None: ... + def data(self, content) -> None: ... + def doctype(self, name, pubid, system) -> None: ... + def comment(self, content) -> None: ... + def test_fragment_to_document(self, fragment): ... + +class LXMLTreeBuilder(HTMLTreeBuilder, LXMLTreeBuilderForXML): + NAME: Any + ALTERNATE_NAMES: Any + features: Any + is_xml: bool + processing_instruction_class: Any + def default_parser(self, encoding): ... + parser: Any + def feed(self, markup) -> None: ... + def test_fragment_to_document(self, fragment): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/dammit.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/dammit.pyi new file mode 100644 index 00000000..6422049e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/dammit.pyi @@ -0,0 +1,86 @@ +from _typeshed import Incomplete +from collections.abc import Iterable, Iterator +from logging import Logger +from typing import Any +from typing_extensions import Literal + +def chardet_dammit(s): ... + +xml_encoding: str +html_meta: str +encoding_res: Any + +class EntitySubstitution: + CHARACTER_TO_HTML_ENTITY: Any + HTML_ENTITY_TO_CHARACTER: Any + CHARACTER_TO_HTML_ENTITY_RE: Any + CHARACTER_TO_XML_ENTITY: Any + BARE_AMPERSAND_OR_BRACKET: Any + AMPERSAND_OR_BRACKET: Any + @classmethod + def quoted_attribute_value(cls, value): ... + @classmethod + def substitute_xml(cls, value, make_quoted_attribute: bool = ...): ... + @classmethod + def substitute_xml_containing_entities(cls, value, make_quoted_attribute: bool = ...): ... + @classmethod + def substitute_html(cls, s): ... + +class EncodingDetector: + known_definite_encodings: list[str] + user_encodings: list[str] + exclude_encodings: set[str] + chardet_encoding: Incomplete | None + is_html: bool + declared_encoding: str | None + markup: Any + sniffed_encoding: str | None + def __init__( + self, + markup, + known_definite_encodings: Iterable[str] | None = ..., + is_html: bool = ..., + exclude_encodings: list[str] | None = ..., + user_encodings: list[str] | None = ..., + override_encodings: list[str] | None = ..., + ) -> None: ... + @property + def encodings(self) -> Iterator[str]: ... + @classmethod + def strip_byte_order_mark(cls, data): ... + @classmethod + def find_declared_encoding(cls, markup, is_html: bool = ..., search_entire_document: bool = ...) -> str | None: ... + +class UnicodeDammit: + CHARSET_ALIASES: dict[str, str] + ENCODINGS_WITH_SMART_QUOTES: list[str] + smart_quotes_to: Literal["ascii", "xml", "html"] | None + tried_encodings: list[tuple[str, str]] + contains_replacement_characters: bool + is_html: bool + log: Logger + detector: EncodingDetector + markup: Any + unicode_markup: str + original_encoding: Incomplete | None + def __init__( + self, + markup, + known_definite_encodings: list[str] | None = ..., + smart_quotes_to: Literal["ascii", "xml", "html"] | None = ..., + is_html: bool = ..., + exclude_encodings: list[str] | None = ..., + user_encodings: list[str] | None = ..., + override_encodings: list[str] | None = ..., + ) -> None: ... + @property + def declared_html_encoding(self) -> str | None: ... + def find_codec(self, charset: str) -> str | None: ... + MS_CHARS: dict[bytes, str | tuple[str, ...]] + MS_CHARS_TO_ASCII: dict[bytes, str] + WINDOWS_1252_TO_UTF8: dict[int, bytes] + MULTIBYTE_MARKERS_AND_SIZES: list[tuple[int, int, int]] + FIRST_MULTIBYTE_MARKER: int + LAST_MULTIBYTE_MARKER: int + @classmethod + def detwingle(cls, in_bytes: bytes, main_encoding: str = ..., embedded_encoding: str = ...) -> bytes: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/diagnose.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/diagnose.pyi new file mode 100644 index 00000000..f498f996 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/diagnose.pyi @@ -0,0 +1,22 @@ +from html.parser import HTMLParser + +def diagnose(data) -> None: ... +def lxml_trace(data, html: bool = ..., **kwargs) -> None: ... + +class AnnouncingParser(HTMLParser): + def handle_starttag(self, name, attrs) -> None: ... + def handle_endtag(self, name) -> None: ... + def handle_data(self, data) -> None: ... + def handle_charref(self, name) -> None: ... + def handle_entityref(self, name) -> None: ... + def handle_comment(self, data) -> None: ... + def handle_decl(self, data) -> None: ... + def unknown_decl(self, data) -> None: ... + def handle_pi(self, data) -> None: ... + +def htmlparser_trace(data) -> None: ... +def rword(length: int = ...): ... +def rsentence(length: int = ...): ... +def rdoc(num_elements: int = ...): ... +def benchmark_parsers(num_elements: int = ...) -> None: ... +def profile(num_elements: int = ..., parser: str = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/element.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/element.pyi new file mode 100644 index 00000000..f7fa39af --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/element.pyi @@ -0,0 +1,368 @@ +from _typeshed import Incomplete +from collections.abc import Callable, Iterable, Iterator +from re import Pattern +from typing import Any, Generic, TypeVar, overload +from typing_extensions import Self, TypeAlias + +from . import BeautifulSoup +from .builder import TreeBuilder +from .formatter import Formatter, _EntitySubstitution + +DEFAULT_OUTPUT_ENCODING: str +nonwhitespace_re: Pattern[str] +whitespace_re: Pattern[str] +PYTHON_SPECIFIC_ENCODINGS: set[str] + +class NamespacedAttribute(str): + def __new__(cls, prefix: str, name: str | None = ..., namespace: str | None = ...) -> Self: ... + +class AttributeValueWithCharsetSubstitution(str): ... + +class CharsetMetaAttributeValue(AttributeValueWithCharsetSubstitution): + def __new__(cls, original_value): ... + def encode(self, encoding: str) -> str: ... # type: ignore[override] # incompatible with str + +class ContentMetaAttributeValue(AttributeValueWithCharsetSubstitution): + CHARSET_RE: Pattern[str] + def __new__(cls, original_value): ... + def encode(self, encoding: str) -> str: ... # type: ignore[override] # incompatible with str + +_PageElementT = TypeVar("_PageElementT", bound=PageElement) +_SimpleStrainable: TypeAlias = str | bool | None | bytes | Pattern[str] | Callable[[str], bool] | Callable[[Tag], bool] +_Strainable: TypeAlias = _SimpleStrainable | Iterable[_SimpleStrainable] +_SimpleNormalizedStrainable: TypeAlias = str | bool | None | Pattern[str] | Callable[[str], bool] | Callable[[Tag], bool] +_NormalizedStrainable: TypeAlias = _SimpleNormalizedStrainable | Iterable[_SimpleNormalizedStrainable] + +class PageElement: + parent: Tag | None + previous_element: PageElement | None + next_element: PageElement | None + next_sibling: PageElement | None + previous_sibling: PageElement | None + def setup( + self, + parent: Tag | None = ..., + previous_element: PageElement | None = ..., + next_element: PageElement | None = ..., + previous_sibling: PageElement | None = ..., + next_sibling: PageElement | None = ..., + ) -> None: ... + def format_string(self, s: str, formatter: Formatter | str | None) -> str: ... + def formatter_for_name(self, formatter: Formatter | str | _EntitySubstitution): ... + nextSibling: PageElement | None + previousSibling: PageElement | None + @property + def stripped_strings(self) -> Iterator[str]: ... + def get_text(self, separator: str = ..., strip: bool = ..., types: tuple[type[NavigableString], ...] = ...) -> str: ... + getText = get_text + @property + def text(self) -> str: ... + def replace_with(self, *args: PageElement | str) -> Self: ... + replaceWith = replace_with + def unwrap(self) -> Self: ... + replace_with_children = unwrap + replaceWithChildren = unwrap + def wrap(self, wrap_inside: _PageElementT) -> _PageElementT: ... + def extract(self, _self_index: int | None = ...) -> Self: ... + def insert(self, position: int, new_child: PageElement | str) -> None: ... + def append(self, tag: PageElement | str) -> None: ... + def extend(self, tags: Iterable[PageElement | str]) -> None: ... + def insert_before(self, *args: PageElement | str) -> None: ... + def insert_after(self, *args: PageElement | str) -> None: ... + def find_next( + self, + name: _Strainable | SoupStrainer | None = ..., + attrs: dict[str, _Strainable] | _Strainable = ..., + string: _Strainable | None = ..., + **kwargs: _Strainable, + ) -> Tag | NavigableString | None: ... + findNext = find_next + def find_all_next( + self, + name: _Strainable | SoupStrainer | None = ..., + attrs: dict[str, _Strainable] | _Strainable = ..., + string: _Strainable | None = ..., + limit: int | None = ..., + **kwargs: _Strainable, + ) -> ResultSet[PageElement]: ... + findAllNext = find_all_next + def find_next_sibling( + self, + name: _Strainable | SoupStrainer | None = ..., + attrs: dict[str, _Strainable] | _Strainable = ..., + string: _Strainable | None = ..., + **kwargs: _Strainable, + ) -> Tag | NavigableString | None: ... + findNextSibling = find_next_sibling + def find_next_siblings( + self, + name: _Strainable | SoupStrainer | None = ..., + attrs: dict[str, _Strainable] | _Strainable = ..., + string: _Strainable | None = ..., + limit: int | None = ..., + **kwargs: _Strainable, + ) -> ResultSet[PageElement]: ... + findNextSiblings = find_next_siblings + fetchNextSiblings = find_next_siblings + def find_previous( + self, + name: _Strainable | SoupStrainer | None = ..., + attrs: dict[str, _Strainable] | _Strainable = ..., + string: _Strainable | None = ..., + **kwargs: _Strainable, + ) -> Tag | NavigableString | None: ... + findPrevious = find_previous + def find_all_previous( + self, + name: _Strainable | SoupStrainer | None = ..., + attrs: dict[str, _Strainable] | _Strainable = ..., + string: _Strainable | None = ..., + limit: int | None = ..., + **kwargs: _Strainable, + ) -> ResultSet[PageElement]: ... + findAllPrevious = find_all_previous + fetchPrevious = find_all_previous + def find_previous_sibling( + self, + name: _Strainable | SoupStrainer | None = ..., + attrs: dict[str, _Strainable] | _Strainable = ..., + string: _Strainable | None = ..., + **kwargs: _Strainable, + ) -> Tag | NavigableString | None: ... + findPreviousSibling = find_previous_sibling + def find_previous_siblings( + self, + name: _Strainable | SoupStrainer | None = ..., + attrs: dict[str, _Strainable] | _Strainable = ..., + string: _Strainable | None = ..., + limit: int | None = ..., + **kwargs: _Strainable, + ) -> ResultSet[PageElement]: ... + findPreviousSiblings = find_previous_siblings + fetchPreviousSiblings = find_previous_siblings + def find_parent( + self, + name: _Strainable | SoupStrainer | None = ..., + attrs: dict[str, _Strainable] | _Strainable = ..., + **kwargs: _Strainable, + ) -> Tag | None: ... + findParent = find_parent + def find_parents( + self, + name: _Strainable | SoupStrainer | None = ..., + attrs: dict[str, _Strainable] | _Strainable = ..., + limit: int | None = ..., + **kwargs: _Strainable, + ) -> ResultSet[Tag]: ... + findParents = find_parents + fetchParents = find_parents + @property + def next(self) -> Tag | NavigableString | None: ... + @property + def previous(self) -> Tag | NavigableString | None: ... + @property + def next_elements(self) -> Iterable[PageElement]: ... + @property + def next_siblings(self) -> Iterable[PageElement]: ... + @property + def previous_elements(self) -> Iterable[PageElement]: ... + @property + def previous_siblings(self) -> Iterable[PageElement]: ... + @property + def parents(self) -> Iterable[Tag]: ... + @property + def decomposed(self) -> bool: ... + def nextGenerator(self) -> Iterable[PageElement]: ... + def nextSiblingGenerator(self) -> Iterable[PageElement]: ... + def previousGenerator(self) -> Iterable[PageElement]: ... + def previousSiblingGenerator(self) -> Iterable[PageElement]: ... + def parentGenerator(self) -> Iterable[Tag]: ... + +class NavigableString(str, PageElement): + PREFIX: str + SUFFIX: str + known_xml: bool | None + def __new__(cls, value: str | bytes) -> Self: ... + def __copy__(self) -> Self: ... + def __getnewargs__(self) -> tuple[str]: ... + def output_ready(self, formatter: Formatter | str | None = ...) -> str: ... + @property + def name(self) -> None: ... + @property + def strings(self) -> Iterable[str]: ... + +class PreformattedString(NavigableString): + PREFIX: str + SUFFIX: str + def output_ready(self, formatter: Formatter | str | None = ...) -> str: ... + +class CData(PreformattedString): + PREFIX: str + SUFFIX: str + +class ProcessingInstruction(PreformattedString): + PREFIX: str + SUFFIX: str + +class XMLProcessingInstruction(ProcessingInstruction): + PREFIX: str + SUFFIX: str + +class Comment(PreformattedString): + PREFIX: str + SUFFIX: str + +class Declaration(PreformattedString): + PREFIX: str + SUFFIX: str + +class Doctype(PreformattedString): + @classmethod + def for_name_and_ids(cls, name: str | None, pub_id: str, system_id: str) -> Doctype: ... + PREFIX: str + SUFFIX: str + +class Stylesheet(NavigableString): ... +class Script(NavigableString): ... +class TemplateString(NavigableString): ... + +class Tag(PageElement): + parser_class: type[BeautifulSoup] | None + name: str + namespace: str | None + prefix: str | None + sourceline: int | None + sourcepos: int | None + known_xml: bool | None + attrs: dict[str, str] + contents: list[PageElement] + hidden: bool + can_be_empty_element: bool | None + cdata_list_attributes: list[str] | None + preserve_whitespace_tags: list[str] | None + def __init__( + self, + parser: BeautifulSoup | None = ..., + builder: TreeBuilder | None = ..., + name: str | None = ..., + namespace: str | None = ..., + prefix: str | None = ..., + attrs: dict[str, str] | None = ..., + parent: Tag | None = ..., + previous: PageElement | None = ..., + is_xml: bool | None = ..., + sourceline: int | None = ..., + sourcepos: int | None = ..., + can_be_empty_element: bool | None = ..., + cdata_list_attributes: list[str] | None = ..., + preserve_whitespace_tags: list[str] | None = ..., + interesting_string_types: type[NavigableString] | tuple[type[NavigableString], ...] | None = ..., + namespaces: dict[str, str] | None = ..., + ) -> None: ... + parserClass: type[BeautifulSoup] | None + def __copy__(self) -> Self: ... + @property + def is_empty_element(self) -> bool: ... + @property + def isSelfClosing(self) -> bool: ... + @property + def string(self) -> str | None: ... + @string.setter + def string(self, string: str) -> None: ... + DEFAULT_INTERESTING_STRING_TYPES: tuple[type[NavigableString], ...] + @property + def strings(self) -> Iterable[str]: ... + def decompose(self) -> None: ... + def clear(self, decompose: bool = ...) -> None: ... + def smooth(self) -> None: ... + def index(self, element: PageElement) -> int: ... + def get(self, key: str, default: str | list[str] | None = ...) -> str | list[str] | None: ... + def get_attribute_list(self, key: str, default: str | list[str] | None = ...) -> list[str]: ... + def has_attr(self, key: str) -> bool: ... + def __hash__(self) -> int: ... + def __getitem__(self, key: str) -> str | list[str]: ... + def __iter__(self) -> Iterator[PageElement]: ... + def __len__(self) -> int: ... + def __contains__(self, x: object) -> bool: ... + def __bool__(self) -> bool: ... + def __setitem__(self, key: str, value: str | list[str]) -> None: ... + def __delitem__(self, key: str) -> None: ... + def __getattr__(self, tag: str) -> Tag | None: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __unicode__(self) -> str: ... + def encode( + self, encoding: str = ..., indent_level: int | None = ..., formatter: str | Formatter = ..., errors: str = ... + ) -> bytes: ... + def decode(self, indent_level: int | None = ..., eventual_encoding: str = ..., formatter: str | Formatter = ...) -> str: ... + @overload + def prettify(self, encoding: str, formatter: str | Formatter = ...) -> bytes: ... + @overload + def prettify(self, encoding: None = ..., formatter: str | Formatter = ...) -> str: ... + def decode_contents( + self, indent_level: int | None = ..., eventual_encoding: str = ..., formatter: str | Formatter = ... + ) -> str: ... + def encode_contents(self, indent_level: int | None = ..., encoding: str = ..., formatter: str | Formatter = ...) -> bytes: ... + def renderContents(self, encoding: str = ..., prettyPrint: bool = ..., indentLevel: int = ...) -> bytes: ... + def find( + self, + name: _Strainable | None = ..., + attrs: dict[str, _Strainable] | _Strainable = ..., + recursive: bool = ..., + string: _Strainable | None = ..., + **kwargs: _Strainable, + ) -> Tag | NavigableString | None: ... + findChild = find + def find_all( + self, + name: _Strainable | None = ..., + attrs: dict[str, _Strainable] | _Strainable = ..., + recursive: bool = ..., + string: _Strainable | None = ..., + limit: int | None = ..., + **kwargs: _Strainable, + ) -> ResultSet[Any]: ... + __call__ = find_all + findAll = find_all + findChildren = find_all + @property + def children(self) -> Iterable[PageElement]: ... + @property + def descendants(self) -> Iterable[PageElement]: ... + def select_one( + self, selector: str, namespaces: Incomplete | None = ..., *, flags: int = ..., custom: dict[str, str] | None = ... + ) -> Tag | None: ... + def select( + self, + selector: str, + namespaces: Incomplete | None = ..., + limit: int | None = ..., + *, + flags: int = ..., + custom: dict[str, str] | None = ..., + ) -> ResultSet[Tag]: ... + def childGenerator(self) -> Iterable[PageElement]: ... + def recursiveChildGenerator(self) -> Iterable[PageElement]: ... + def has_key(self, key: str) -> bool: ... + +class SoupStrainer: + name: _NormalizedStrainable + attrs: dict[str, _NormalizedStrainable] + string: _NormalizedStrainable + def __init__( + self, + name: _Strainable | None = ..., + attrs: dict[str, _Strainable] | _Strainable = ..., + string: _Strainable | None = ..., + **kwargs: _Strainable, + ) -> None: ... + def search_tag(self, markup_name: Tag | str | None = ..., markup_attrs=...): ... + searchTag = search_tag + def search(self, markup: PageElement | Iterable[PageElement]): ... + +class ResultSet(list[_PageElementT], Generic[_PageElementT]): + source: SoupStrainer + @overload + def __init__(self, source: SoupStrainer) -> None: ... + @overload + def __init__(self, source: SoupStrainer, result: Iterable[_PageElementT]) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/formatter.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/formatter.pyi new file mode 100644 index 00000000..bfbba827 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/beautifulsoup4/bs4/formatter.pyi @@ -0,0 +1,46 @@ +from collections.abc import Callable +from typing_extensions import TypeAlias + +from .dammit import EntitySubstitution as EntitySubstitution + +_EntitySubstitution: TypeAlias = Callable[[str], str] + +class Formatter(EntitySubstitution): + HTML: str + XML: str + HTML_DEFAULTS: dict[str, set[str]] + language: str | None + entity_substitution: _EntitySubstitution + void_element_close_prefix: str + cdata_containing_tags: list[str] + empty_attributes_are_booleans: bool + def __init__( + self, + language: str | None = ..., + entity_substitution: _EntitySubstitution | None = ..., + void_element_close_prefix: str = ..., + cdata_containing_tags: list[str] | None = ..., + empty_attributes_are_booleans: bool = ..., + indent: int = ..., + ) -> None: ... + def substitute(self, ns: str) -> str: ... + def attribute_value(self, value: str) -> str: ... + def attributes(self, tag): ... + +class HTMLFormatter(Formatter): + REGISTRY: dict[str, HTMLFormatter] + def __init__( + self, + entity_substitution: _EntitySubstitution | None = ..., + void_element_close_prefix: str = ..., + cdata_containing_tags: list[str] | None = ..., + ) -> None: ... + +class XMLFormatter(Formatter): + REGISTRY: dict[str, XMLFormatter] + def __init__( + self, + entity_substitution: _EntitySubstitution | None = ..., + void_element_close_prefix: str = ..., + cdata_containing_tags: list[str] | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..c3ff453f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/@tests/stubtest_allowlist.txt @@ -0,0 +1,2 @@ +bleach.css_sanitizer # Requires tinycss2 to be installed +bleach.html5lib_shim.* diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/METADATA.toml new file mode 100644 index 00000000..5221c412 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/METADATA.toml @@ -0,0 +1,4 @@ +version = "6.0.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/bleach/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/bleach/__init__.pyi new file mode 100644 index 00000000..5ce681f2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/bleach/__init__.pyi @@ -0,0 +1,33 @@ +from collections.abc import Container, Iterable +from typing_extensions import TypeAlias + +from .callbacks import _Callback +from .css_sanitizer import CSSSanitizer +from .linkifier import DEFAULT_CALLBACKS as DEFAULT_CALLBACKS, Linker as Linker +from .sanitizer import ( + ALLOWED_ATTRIBUTES as ALLOWED_ATTRIBUTES, + ALLOWED_PROTOCOLS as ALLOWED_PROTOCOLS, + ALLOWED_TAGS as ALLOWED_TAGS, + Cleaner as Cleaner, + _Attributes, +) + +__all__ = ["clean", "linkify"] + +__releasedate__: str +__version__: str + +_HTMLAttrKey: TypeAlias = tuple[str | None, str] # noqa: Y047 + +def clean( + text: str, + tags: Iterable[str] = ..., + attributes: _Attributes = ..., + protocols: Iterable[str] = ..., + strip: bool = ..., + strip_comments: bool = ..., + css_sanitizer: CSSSanitizer | None = ..., +) -> str: ... +def linkify( + text: str, callbacks: Iterable[_Callback] = ..., skip_tags: Container[str] | None = ..., parse_email: bool = ... +) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/bleach/callbacks.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/bleach/callbacks.pyi new file mode 100644 index 00000000..54e94d3e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/bleach/callbacks.pyi @@ -0,0 +1,13 @@ +from collections.abc import MutableMapping +from typing import Protocol +from typing_extensions import TypeAlias + +from bleach import _HTMLAttrKey + +_HTMLAttrs: TypeAlias = MutableMapping[_HTMLAttrKey, str] + +class _Callback(Protocol): # noqa: Y046 + def __call__(self, attrs: _HTMLAttrs, new: bool = ...) -> _HTMLAttrs: ... + +def nofollow(attrs: _HTMLAttrs, new: bool = ...) -> _HTMLAttrs: ... +def target_blank(attrs: _HTMLAttrs, new: bool = ...) -> _HTMLAttrs: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/bleach/css_sanitizer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/bleach/css_sanitizer.pyi new file mode 100644 index 00000000..5e3c6f2b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/bleach/css_sanitizer.pyi @@ -0,0 +1,11 @@ +from collections.abc import Container + +ALLOWED_CSS_PROPERTIES: frozenset[str] +ALLOWED_SVG_PROPERTIES: frozenset[str] + +class CSSSanitizer: + allowed_css_properties: Container[str] + allowed_svg_properties: Container[str] + + def __init__(self, allowed_css_properties: Container[str] = ..., allowed_svg_properties: Container[str] = ...) -> None: ... + def sanitize_css(self, style: str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/bleach/html5lib_shim.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/bleach/html5lib_shim.pyi new file mode 100644 index 00000000..bfe7e9ca --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/bleach/html5lib_shim.pyi @@ -0,0 +1,27 @@ +from _typeshed import Incomplete +from collections.abc import Generator, Iterable + +class HTMLParser: # actually html5lib.HTMLParser + def __getattr__(self, __name: str) -> Incomplete: ... + +class Filter: # actually html5lib.filters.base.Filter + def __getattr__(self, __name: str) -> Incomplete: ... + +class SanitizerFilter: # actually html5lib.filters.sanitizer.Filter + def __getattr__(self, __name: str) -> Incomplete: ... + +class HTMLSerializer: # actually html5lib.serializer.HTMLSerializer + def __getattr__(self, __name: str) -> Incomplete: ... + +class BleachHTMLParser(HTMLParser): + tags: list[str] | None + strip: bool + consume_entities: bool + def __init__(self, tags: Iterable[str] | None, strip: bool, consume_entities: bool, **kwargs) -> None: ... + +class BleachHTMLSerializer(HTMLSerializer): + escape_rcdata: bool + def escape_base_amp(self, stoken: str) -> Generator[str, None, None]: ... + def serialize(self, treewalker, encoding: str | None = ...) -> Generator[str, None, None]: ... + +def __getattr__(__name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/bleach/linkifier.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/bleach/linkifier.pyi new file mode 100644 index 00000000..d9555a53 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/bleach/linkifier.pyi @@ -0,0 +1,55 @@ +from _typeshed import Incomplete +from collections.abc import Container, Iterable, Iterator +from re import Pattern + +from .callbacks import _Callback +from .html5lib_shim import Filter + +DEFAULT_CALLBACKS: list[_Callback] + +TLDS: list[str] + +def build_url_re(tlds: Iterable[str] = ..., protocols: Iterable[str] = ...) -> Pattern[str]: ... + +URL_RE: Pattern[str] +PROTO_RE: Pattern[str] + +def build_email_re(tlds: Iterable[str] = ...) -> Pattern[str]: ... + +EMAIL_RE: Pattern[str] + +class Linker: + def __init__( + self, + callbacks: Iterable[_Callback] = ..., + skip_tags: Container[str] | None = None, + parse_email: bool = False, + url_re: Pattern[str] = ..., + email_re: Pattern[str] = ..., + recognized_tags: Container[str] | None = ..., + ) -> None: ... + def linkify(self, text: str) -> str: ... + +class LinkifyFilter(Filter): + callbacks: Iterable[_Callback] + skip_tags: Container[str] + parse_email: bool + url_re: Pattern[str] + email_re: Pattern[str] + def __init__( + self, + source, + callbacks: Iterable[_Callback] | None = ..., + skip_tags: Container[str] | None = None, + parse_email: bool = False, + url_re: Pattern[str] = ..., + email_re: Pattern[str] = ..., + ) -> None: ... + def apply_callbacks(self, attrs, is_new): ... + def extract_character_data(self, token_list): ... + def handle_email_addresses(self, src_iter): ... + def strip_non_url_bits(self, fragment): ... + def handle_links(self, src_iter): ... + def handle_a_tag(self, token_buffer): ... + def extract_entities(self, token): ... + def __iter__(self) -> Iterator[Incomplete]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/bleach/sanitizer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/bleach/sanitizer.pyi new file mode 100644 index 00000000..fc2fd810 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/bleach/bleach/sanitizer.pyi @@ -0,0 +1,85 @@ +from _typeshed import Incomplete +from collections.abc import Callable, Iterable +from re import Pattern +from typing import Protocol +from typing_extensions import TypeAlias + +from . import _HTMLAttrKey +from .css_sanitizer import CSSSanitizer +from .html5lib_shim import BleachHTMLParser, BleachHTMLSerializer, SanitizerFilter + +ALLOWED_TAGS: frozenset[str] +ALLOWED_ATTRIBUTES: dict[str, list[str]] +ALLOWED_PROTOCOLS: frozenset[str] + +INVISIBLE_CHARACTERS: str +INVISIBLE_CHARACTERS_RE: Pattern[str] +INVISIBLE_REPLACEMENT_CHAR: str + +# A html5lib Filter class +class _Filter(Protocol): + def __call__(self, *, source: BleachSanitizerFilter) -> Incomplete: ... + +_AttributeFilter: TypeAlias = Callable[[str, str, str], bool] +_AttributeDict: TypeAlias = dict[str, list[str] | _AttributeFilter] | dict[str, list[str]] | dict[str, _AttributeFilter] +_Attributes: TypeAlias = _AttributeFilter | _AttributeDict | list[str] + +_TreeWalker: TypeAlias = Callable[[Incomplete], Incomplete] + +class Cleaner: + tags: Iterable[str] + attributes: _Attributes + protocols: Iterable[str] + strip: bool + strip_comments: bool + filters: Iterable[_Filter] + css_sanitizer: CSSSanitizer | None + parser: BleachHTMLParser + walker: _TreeWalker + serializer: BleachHTMLSerializer + def __init__( + self, + tags: Iterable[str] = ..., + attributes: _Attributes = ..., + protocols: Iterable[str] = ..., + strip: bool = ..., + strip_comments: bool = ..., + filters: Iterable[_Filter] | None = ..., + css_sanitizer: CSSSanitizer | None = ..., + ) -> None: ... + def clean(self, text: str) -> str: ... + +def attribute_filter_factory(attributes: _Attributes) -> _AttributeFilter: ... + +class BleachSanitizerFilter(SanitizerFilter): + allowed_tags: frozenset[str] + allowed_protocols: frozenset[str] + attr_filter: _AttributeFilter + strip_disallowed_tags: bool + strip_html_comments: bool + attr_val_is_uri: frozenset[_HTMLAttrKey] + svg_attr_val_allows_ref: frozenset[_HTMLAttrKey] + svg_allow_local_href: frozenset[_HTMLAttrKey] + css_sanitizer: CSSSanitizer | None + def __init__( + self, + source, + allowed_tags: Iterable[str] = ..., + attributes: _Attributes = ..., + allowed_protocols: Iterable[str] = ..., + attr_val_is_uri: frozenset[_HTMLAttrKey] = ..., + svg_attr_val_allows_ref: frozenset[_HTMLAttrKey] = ..., + svg_allow_local_href: frozenset[_HTMLAttrKey] = ..., + strip_disallowed_tags: bool = False, + strip_html_comments: bool = True, + css_sanitizer: CSSSanitizer | None = None, + ) -> None: ... + def sanitize_stream(self, token_iterator): ... + def merge_characters(self, token_iterator): ... + def __iter__(self): ... + def sanitize_token(self, token): ... + def sanitize_characters(self, token): ... + def sanitize_uri_value(self, value, allowed_protocols): ... + def allow_token(self, token): ... + def disallowed_token(self, token): ... + def sanitize_css(self, style): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..e5fa8bd1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/@tests/stubtest_allowlist.txt @@ -0,0 +1,9 @@ +boto.connection.AWSQueryConnection.make_request +boto.elb +boto.kms.layer1.KMSConnection.make_request +boto.s3.S3RegionInfo.connect +boto.s3.bucket.Bucket.get_location +boto.s3.bucket.Bucket.get_tags +boto.s3.bucket.Bucket.get_xml_tags +boto.s3.connection.S3Connection.make_request +boto.utils.LazyLoadMetadata.get diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/METADATA.toml new file mode 100644 index 00000000..ca0c1fad --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/METADATA.toml @@ -0,0 +1,5 @@ +version = "2.49.*" +requires = [] + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/__init__.pyi new file mode 100644 index 00000000..e3dc24ae --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/__init__.pyi @@ -0,0 +1,130 @@ +import logging +from _typeshed import Incomplete +from typing import Any + +from .s3.connection import S3Connection + +Version: Any +UserAgent: Any +config: Any +BUCKET_NAME_RE: Any +TOO_LONG_DNS_NAME_COMP: Any +GENERATION_RE: Any +VERSION_RE: Any +ENDPOINTS_PATH: Any + +def init_logging(): ... + +class NullHandler(logging.Handler): + def emit(self, record): ... + +log: Any +perflog: Any + +def set_file_logger(name, filepath, level: Any = ..., format_string: Incomplete | None = ...): ... +def set_stream_logger(name, level: Any = ..., format_string: Incomplete | None = ...): ... +def connect_sqs(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_s3(aws_access_key_id: str | None = ..., aws_secret_access_key: str | None = ..., **kwargs) -> S3Connection: ... +def connect_gs(gs_access_key_id: Incomplete | None = ..., gs_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_ec2(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_elb(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_autoscale(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_cloudwatch(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_sdb(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_fps(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_mturk(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_cloudfront(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_vpc(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_rds(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_rds2(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_emr(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_sns(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_iam(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_route53(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_cloudformation( + aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs +): ... +def connect_euca( + host: Incomplete | None = ..., + aws_access_key_id: Incomplete | None = ..., + aws_secret_access_key: Incomplete | None = ..., + port: int = ..., + path: str = ..., + is_secure: bool = ..., + **kwargs, +): ... +def connect_glacier(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_ec2_endpoint( + url, aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs +): ... +def connect_walrus( + host: Incomplete | None = ..., + aws_access_key_id: Incomplete | None = ..., + aws_secret_access_key: Incomplete | None = ..., + port: int = ..., + path: str = ..., + is_secure: bool = ..., + **kwargs, +): ... +def connect_ses(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_sts(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_ia( + ia_access_key_id: Incomplete | None = ..., ia_secret_access_key: Incomplete | None = ..., is_secure: bool = ..., **kwargs +): ... +def connect_dynamodb(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_swf(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_cloudsearch(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_cloudsearch2( + aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., sign_request: bool = ..., **kwargs +): ... +def connect_cloudsearchdomain( + aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs +): ... +def connect_beanstalk(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_elastictranscoder( + aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs +): ... +def connect_opsworks(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_redshift(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_support(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_cloudtrail(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_directconnect( + aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs +): ... +def connect_kinesis(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_logs(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_route53domains( + aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs +): ... +def connect_cognito_identity( + aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs +): ... +def connect_cognito_sync( + aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs +): ... +def connect_kms(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_awslambda(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_codedeploy(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_configservice( + aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs +): ... +def connect_cloudhsm(aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs): ... +def connect_ec2containerservice( + aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs +): ... +def connect_machinelearning( + aws_access_key_id: Incomplete | None = ..., aws_secret_access_key: Incomplete | None = ..., **kwargs +): ... +def storage_uri( + uri_str, + default_scheme: str = ..., + debug: int = ..., + validate: bool = ..., + bucket_storage_uri_class: Any = ..., + suppress_consec_slashes: bool = ..., + is_latest: bool = ..., +): ... +def storage_uri_for_key(key): ... + +# Explicitly mark this package as incomplete. +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/auth.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/auth.pyi new file mode 100644 index 00000000..9beccfa6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/auth.pyi @@ -0,0 +1,112 @@ +from _typeshed import Incomplete +from typing import Any + +from boto.auth_handler import AuthHandler + +SIGV4_DETECT: Any + +class HmacKeys: + host: Any + def __init__(self, host, config, provider) -> None: ... + def update_provider(self, provider): ... + def algorithm(self): ... + def sign_string(self, string_to_sign): ... + +class AnonAuthHandler(AuthHandler, HmacKeys): + capability: Any + def __init__(self, host, config, provider) -> None: ... + def add_auth(self, http_request, **kwargs): ... + +class HmacAuthV1Handler(AuthHandler, HmacKeys): + capability: Any + def __init__(self, host, config, provider) -> None: ... + def update_provider(self, provider): ... + def add_auth(self, http_request, **kwargs): ... + +class HmacAuthV2Handler(AuthHandler, HmacKeys): + capability: Any + def __init__(self, host, config, provider) -> None: ... + def update_provider(self, provider): ... + def add_auth(self, http_request, **kwargs): ... + +class HmacAuthV3Handler(AuthHandler, HmacKeys): + capability: Any + def __init__(self, host, config, provider) -> None: ... + def add_auth(self, http_request, **kwargs): ... + +class HmacAuthV3HTTPHandler(AuthHandler, HmacKeys): + capability: Any + def __init__(self, host, config, provider) -> None: ... + def headers_to_sign(self, http_request): ... + def canonical_headers(self, headers_to_sign): ... + def string_to_sign(self, http_request): ... + def add_auth(self, req, **kwargs): ... + +class HmacAuthV4Handler(AuthHandler, HmacKeys): + capability: Any + service_name: Any + region_name: Any + def __init__( + self, host, config, provider, service_name: Incomplete | None = ..., region_name: Incomplete | None = ... + ) -> None: ... + def headers_to_sign(self, http_request): ... + def host_header(self, host, http_request): ... + def query_string(self, http_request): ... + def canonical_query_string(self, http_request): ... + def canonical_headers(self, headers_to_sign): ... + def signed_headers(self, headers_to_sign): ... + def canonical_uri(self, http_request): ... + def payload(self, http_request): ... + def canonical_request(self, http_request): ... + def scope(self, http_request): ... + def split_host_parts(self, host): ... + def determine_region_name(self, host): ... + def determine_service_name(self, host): ... + def credential_scope(self, http_request): ... + def string_to_sign(self, http_request, canonical_request): ... + def signature(self, http_request, string_to_sign): ... + def add_auth(self, req, **kwargs): ... + +class S3HmacAuthV4Handler(HmacAuthV4Handler, AuthHandler): + capability: Any + region_name: Any + def __init__(self, *args, **kwargs) -> None: ... + def clean_region_name(self, region_name): ... + def canonical_uri(self, http_request): ... + def canonical_query_string(self, http_request): ... + def host_header(self, host, http_request): ... + def headers_to_sign(self, http_request): ... + def determine_region_name(self, host): ... + def determine_service_name(self, host): ... + def mangle_path_and_params(self, req): ... + def payload(self, http_request): ... + def add_auth(self, req, **kwargs): ... + def presign(self, req, expires, iso_date: Incomplete | None = ...): ... + +class STSAnonHandler(AuthHandler): + capability: Any + def add_auth(self, http_request, **kwargs): ... + +class QuerySignatureHelper(HmacKeys): + def add_auth(self, http_request, **kwargs): ... + +class QuerySignatureV0AuthHandler(QuerySignatureHelper, AuthHandler): + SignatureVersion: int + capability: Any + +class QuerySignatureV1AuthHandler(QuerySignatureHelper, AuthHandler): + SignatureVersion: int + capability: Any + def __init__(self, *args, **kw) -> None: ... + +class QuerySignatureV2AuthHandler(QuerySignatureHelper, AuthHandler): + SignatureVersion: int + capability: Any + +class POSTPathQSV2AuthHandler(QuerySignatureV2AuthHandler, AuthHandler): + capability: Any + def add_auth(self, req, **kwargs): ... + +def get_auth_handler(host, config, provider, requested_capability: Incomplete | None = ...): ... +def detect_potential_sigv4(func): ... +def detect_potential_s3sigv4(func): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/auth_handler.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/auth_handler.pyi new file mode 100644 index 00000000..7cc874bc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/auth_handler.pyi @@ -0,0 +1,10 @@ +from typing import Any + +from boto.plugin import Plugin + +class NotReadyToAuthenticate(Exception): ... + +class AuthHandler(Plugin): + capability: Any + def __init__(self, host, config, provider) -> None: ... + def add_auth(self, http_request): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/compat.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/compat.pyi new file mode 100644 index 00000000..5f6e4cc5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/compat.pyi @@ -0,0 +1,8 @@ +from base64 import encodebytes as encodebytes +from typing import Any + +expanduser: Any +StandardError = Exception +long_type: Any +unquote_str: Any +parse_qs_safe: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/connection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/connection.pyi new file mode 100644 index 00000000..8a86a980 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/connection.pyi @@ -0,0 +1,178 @@ +import http.client +from _typeshed import Incomplete +from typing import Any + +HAVE_HTTPS_CONNECTION: bool +ON_APP_ENGINE: Any +PORTS_BY_SECURITY: Any +DEFAULT_CA_CERTS_FILE: Any + +class HostConnectionPool: + queue: Any + def __init__(self) -> None: ... + def size(self): ... + def put(self, conn): ... + def get(self): ... + def clean(self): ... + +class ConnectionPool: + CLEAN_INTERVAL: float + STALE_DURATION: float + host_to_pool: Any + last_clean_time: float + mutex: Any + def __init__(self) -> None: ... + def size(self): ... + def get_http_connection(self, host, port, is_secure): ... + def put_http_connection(self, host, port, is_secure, conn): ... + def clean(self): ... + +class HTTPRequest: + method: Any + protocol: Any + host: Any + port: Any + path: Any + auth_path: Any + params: Any + headers: Any + body: Any + def __init__(self, method, protocol, host, port, path, auth_path, params, headers, body) -> None: ... + def authorize(self, connection, **kwargs): ... + +class HTTPResponse(http.client.HTTPResponse): + def __init__(self, *args, **kwargs) -> None: ... + def read(self, amt: Incomplete | None = ...): ... + +class AWSAuthConnection: + suppress_consec_slashes: Any + num_retries: int + is_secure: Any + https_validate_certificates: Any + ca_certificates_file: Any + port: Any + http_exceptions: Any + http_unretryable_exceptions: Any + socket_exception_values: Any + https_connection_factory: Any + protocol: str + host: Any + path: Any + debug: Any + host_header: Any + http_connection_kwargs: Any + provider: Any + auth_service_name: Any + request_hook: Any + def __init__( + self, + host, + aws_access_key_id: Incomplete | None = ..., + aws_secret_access_key: Incomplete | None = ..., + is_secure: bool = ..., + port: Incomplete | None = ..., + proxy: Incomplete | None = ..., + proxy_port: Incomplete | None = ..., + proxy_user: Incomplete | None = ..., + proxy_pass: Incomplete | None = ..., + debug: int = ..., + https_connection_factory: Incomplete | None = ..., + path: str = ..., + provider: str = ..., + security_token: Incomplete | None = ..., + suppress_consec_slashes: bool = ..., + validate_certs: bool = ..., + profile_name: Incomplete | None = ..., + ) -> None: ... + auth_region_name: Any + @property + def connection(self): ... + @property + def aws_access_key_id(self): ... + @property + def gs_access_key_id(self) -> Any: ... + @property + def access_key(self): ... + @property + def aws_secret_access_key(self): ... + @property + def gs_secret_access_key(self): ... + @property + def secret_key(self): ... + @property + def profile_name(self): ... + def get_path(self, path: str = ...): ... + def server_name(self, port: Incomplete | None = ...): ... + proxy: Any + proxy_port: Any + proxy_user: Any + proxy_pass: Any + no_proxy: Any + use_proxy: Any + def handle_proxy(self, proxy, proxy_port, proxy_user, proxy_pass): ... + def get_http_connection(self, host, port, is_secure): ... + def skip_proxy(self, host): ... + def new_http_connection(self, host, port, is_secure): ... + def put_http_connection(self, host, port, is_secure, connection): ... + def proxy_ssl(self, host: Incomplete | None = ..., port: Incomplete | None = ...): ... + def prefix_proxy_to_path(self, path, host: Incomplete | None = ...): ... + def get_proxy_auth_header(self): ... + def get_proxy_url_with_auth(self): ... + def set_host_header(self, request): ... + def set_request_hook(self, hook): ... + def build_base_http_request( + self, + method, + path, + auth_path, + params: Incomplete | None = ..., + headers: Incomplete | None = ..., + data: str = ..., + host: Incomplete | None = ..., + ): ... + def make_request( + self, + method, + path, + headers: Incomplete | None = ..., + data: str = ..., + host: Incomplete | None = ..., + auth_path: Incomplete | None = ..., + sender: Incomplete | None = ..., + override_num_retries: Incomplete | None = ..., + params: Incomplete | None = ..., + retry_handler: Incomplete | None = ..., + ): ... + def close(self): ... + +class AWSQueryConnection(AWSAuthConnection): + APIVersion: str + ResponseError: Any + def __init__( + self, + aws_access_key_id: Incomplete | None = ..., + aws_secret_access_key: Incomplete | None = ..., + is_secure: bool = ..., + port: Incomplete | None = ..., + proxy: Incomplete | None = ..., + proxy_port: Incomplete | None = ..., + proxy_user: Incomplete | None = ..., + proxy_pass: Incomplete | None = ..., + host: Incomplete | None = ..., + debug: int = ..., + https_connection_factory: Incomplete | None = ..., + path: str = ..., + security_token: Incomplete | None = ..., + validate_certs: bool = ..., + profile_name: Incomplete | None = ..., + provider: str = ..., + ) -> None: ... + def get_utf8_value(self, value): ... + def make_request( # type: ignore[override] + self, action, params: Incomplete | None = ..., path: str = ..., verb: str = ..., *args, **kwargs + ): ... + def build_list_params(self, params, items, label): ... + def build_complex_list_params(self, params, items, label, names): ... + def get_list(self, action, params, markers, path: str = ..., parent: Incomplete | None = ..., verb: str = ...): ... + def get_object(self, action, params, cls, path: str = ..., parent: Incomplete | None = ..., verb: str = ...): ... + def get_status(self, action, params, path: str = ..., parent: Incomplete | None = ..., verb: str = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/ec2/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/ec2/__init__.pyi new file mode 100644 index 00000000..e21a2bfe --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/ec2/__init__.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete +from typing import Any + +RegionData: Any + +def regions(**kw_params): ... +def connect_to_region(region_name, **kw_params): ... +def get_region(region_name, **kw_params): ... + +# Explicitly mark this package as incomplete. +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/elb/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/elb/__init__.pyi new file mode 100644 index 00000000..d2a79d5f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/elb/__init__.pyi @@ -0,0 +1,59 @@ +from typing import Any + +from boto.connection import AWSQueryConnection + +RegionData: Any + +def regions(): ... +def connect_to_region(region_name, **kw_params): ... + +class ELBConnection(AWSQueryConnection): + APIVersion: Any + DefaultRegionName: Any + DefaultRegionEndpoint: Any + region: Any + def __init__( + self, + aws_access_key_id=..., + aws_secret_access_key=..., + is_secure=..., + port=..., + proxy=..., + proxy_port=..., + proxy_user=..., + proxy_pass=..., + debug=..., + https_connection_factory=..., + region=..., + path=..., + security_token=..., + validate_certs=..., + profile_name=..., + ) -> None: ... + def build_list_params(self, params, items, label): ... + def get_all_load_balancers(self, load_balancer_names=..., marker=...): ... + def create_load_balancer( + self, name, zones, listeners=..., subnets=..., security_groups=..., scheme=..., complex_listeners=... + ): ... + def create_load_balancer_listeners(self, name, listeners=..., complex_listeners=...): ... + def delete_load_balancer(self, name): ... + def delete_load_balancer_listeners(self, name, ports): ... + def enable_availability_zones(self, load_balancer_name, zones_to_add): ... + def disable_availability_zones(self, load_balancer_name, zones_to_remove): ... + def modify_lb_attribute(self, load_balancer_name, attribute, value): ... + def get_all_lb_attributes(self, load_balancer_name): ... + def get_lb_attribute(self, load_balancer_name, attribute): ... + def register_instances(self, load_balancer_name, instances): ... + def deregister_instances(self, load_balancer_name, instances): ... + def describe_instance_health(self, load_balancer_name, instances=...): ... + def configure_health_check(self, name, health_check): ... + def set_lb_listener_SSL_certificate(self, lb_name, lb_port, ssl_certificate_id): ... + def create_app_cookie_stickiness_policy(self, name, lb_name, policy_name): ... + def create_lb_cookie_stickiness_policy(self, cookie_expiration_period, lb_name, policy_name): ... + def create_lb_policy(self, lb_name, policy_name, policy_type, policy_attributes): ... + def delete_lb_policy(self, lb_name, policy_name): ... + def set_lb_policies_of_listener(self, lb_name, lb_port, policies): ... + def set_lb_policies_of_backend_server(self, lb_name, instance_port, policies): ... + def apply_security_groups_to_lb(self, name, security_groups): ... + def attach_lb_to_subnets(self, name, subnets): ... + def detach_lb_from_subnets(self, name, subnets): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/exception.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/exception.pyi new file mode 100644 index 00000000..009da58d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/exception.pyi @@ -0,0 +1,148 @@ +from _typeshed import Incomplete +from typing import Any + +from boto.compat import StandardError + +class BotoClientError(StandardError): + reason: Any + def __init__(self, reason, *args) -> None: ... + +class SDBPersistenceError(StandardError): ... +class StoragePermissionsError(BotoClientError): ... +class S3PermissionsError(StoragePermissionsError): ... +class GSPermissionsError(StoragePermissionsError): ... + +class BotoServerError(StandardError): + status: Any + reason: Any + body: Any + request_id: Any + error_code: Any + message: str + box_usage: Any + def __init__(self, status, reason, body: Incomplete | None = ..., *args) -> None: ... + def __getattr__(self, name: str): ... + def __setattr__(self, name: str, value) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + +class ConsoleOutput: + parent: Any + instance_id: Any + timestamp: Any + comment: Any + output: Any + def __init__(self, parent: Incomplete | None = ...) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + +class StorageCreateError(BotoServerError): + bucket: Any + def __init__(self, status, reason, body: Incomplete | None = ...) -> None: ... + def endElement(self, name, value, connection): ... + +class S3CreateError(StorageCreateError): ... +class GSCreateError(StorageCreateError): ... +class StorageCopyError(BotoServerError): ... +class S3CopyError(StorageCopyError): ... +class GSCopyError(StorageCopyError): ... + +class SQSError(BotoServerError): + detail: Any + type: Any + def __init__(self, status, reason, body: Incomplete | None = ...) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + +class SQSDecodeError(BotoClientError): + message: Any + def __init__(self, reason, message) -> None: ... + +class StorageResponseError(BotoServerError): + resource: Any + def __init__(self, status, reason, body: Incomplete | None = ...) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + +class S3ResponseError(StorageResponseError): ... +class GSResponseError(StorageResponseError): ... + +class EC2ResponseError(BotoServerError): + errors: Any + def __init__(self, status, reason, body: Incomplete | None = ...) -> None: ... + def startElement(self, name, attrs, connection): ... + request_id: Any + def endElement(self, name, value, connection): ... + +class JSONResponseError(BotoServerError): + status: Any + reason: Any + body: Any + error_message: Any + error_code: Any + def __init__(self, status, reason, body: Incomplete | None = ..., *args) -> None: ... + +class DynamoDBResponseError(JSONResponseError): ... +class SWFResponseError(JSONResponseError): ... +class EmrResponseError(BotoServerError): ... + +class _EC2Error: + connection: Any + error_code: Any + error_message: Any + def __init__(self, connection: Incomplete | None = ...) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + +class SDBResponseError(BotoServerError): ... +class AWSConnectionError(BotoClientError): ... +class StorageDataError(BotoClientError): ... +class S3DataError(StorageDataError): ... +class GSDataError(StorageDataError): ... + +class InvalidUriError(Exception): + message: Any + def __init__(self, message) -> None: ... + +class InvalidAclError(Exception): + message: Any + def __init__(self, message) -> None: ... + +class InvalidCorsError(Exception): + message: Any + def __init__(self, message) -> None: ... + +class NoAuthHandlerFound(Exception): ... + +class InvalidLifecycleConfigError(Exception): + message: Any + def __init__(self, message) -> None: ... + +class ResumableTransferDisposition: + START_OVER: str + WAIT_BEFORE_RETRY: str + ABORT_CUR_PROCESS: str + ABORT: str + +class ResumableUploadException(Exception): + message: Any + disposition: Any + def __init__(self, message, disposition) -> None: ... + +class ResumableDownloadException(Exception): + message: Any + disposition: Any + def __init__(self, message, disposition) -> None: ... + +class TooManyRecordsException(Exception): + message: Any + def __init__(self, message) -> None: ... + +class PleaseRetryException(Exception): + message: Any + response: Any + def __init__(self, message, response: Incomplete | None = ...) -> None: ... + +class InvalidInstanceMetadataError(Exception): + MSG: str + def __init__(self, msg) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/kms/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/kms/__init__.pyi new file mode 100644 index 00000000..9fc30112 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/kms/__init__.pyi @@ -0,0 +1,4 @@ +import boto.regioninfo + +def regions() -> list[boto.regioninfo.RegionInfo]: ... +def connect_to_region(region_name, **kw_params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/kms/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/kms/exceptions.pyi new file mode 100644 index 00000000..5ac2ecd2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/kms/exceptions.pyi @@ -0,0 +1,17 @@ +from boto.exception import BotoServerError + +class InvalidGrantTokenException(BotoServerError): ... +class DisabledException(BotoServerError): ... +class LimitExceededException(BotoServerError): ... +class DependencyTimeoutException(BotoServerError): ... +class InvalidMarkerException(BotoServerError): ... +class AlreadyExistsException(BotoServerError): ... +class InvalidCiphertextException(BotoServerError): ... +class KeyUnavailableException(BotoServerError): ... +class InvalidAliasNameException(BotoServerError): ... +class UnsupportedOperationException(BotoServerError): ... +class InvalidArnException(BotoServerError): ... +class KMSInternalException(BotoServerError): ... +class InvalidKeyUsageException(BotoServerError): ... +class MalformedPolicyDocumentException(BotoServerError): ... +class NotFoundException(BotoServerError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/kms/layer1.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/kms/layer1.pyi new file mode 100644 index 00000000..ed34447c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/kms/layer1.pyi @@ -0,0 +1,78 @@ +from collections.abc import Mapping +from typing import Any + +from boto.connection import AWSQueryConnection + +class KMSConnection(AWSQueryConnection): + APIVersion: str + DefaultRegionName: str + DefaultRegionEndpoint: str + ServiceName: str + TargetPrefix: str + ResponseError: type[Exception] + region: Any + def __init__(self, **kwargs) -> None: ... + def create_alias(self, alias_name: str, target_key_id: str) -> dict[str, Any] | None: ... + def create_grant( + self, + key_id: str, + grantee_principal: str, + retiring_principal: str | None = ..., + operations: list[str] | None = ..., + constraints: dict[str, dict[str, str]] | None = ..., + grant_tokens: list[str] | None = ..., + ) -> dict[str, Any] | None: ... + def create_key( + self, policy: str | None = ..., description: str | None = ..., key_usage: str | None = ... + ) -> dict[str, Any] | None: ... + def decrypt( + self, ciphertext_blob: bytes, encryption_context: Mapping[str, Any] | None = ..., grant_tokens: list[str] | None = ... + ) -> dict[str, Any] | None: ... + def delete_alias(self, alias_name: str) -> dict[str, Any] | None: ... + def describe_key(self, key_id: str) -> dict[str, Any] | None: ... + def disable_key(self, key_id: str) -> dict[str, Any] | None: ... + def disable_key_rotation(self, key_id: str) -> dict[str, Any] | None: ... + def enable_key(self, key_id: str) -> dict[str, Any] | None: ... + def enable_key_rotation(self, key_id: str) -> dict[str, Any] | None: ... + def encrypt( + self, + key_id: str, + plaintext: bytes, + encryption_context: Mapping[str, Any] | None = ..., + grant_tokens: list[str] | None = ..., + ) -> dict[str, Any] | None: ... + def generate_data_key( + self, + key_id: str, + encryption_context: Mapping[str, Any] | None = ..., + number_of_bytes: int | None = ..., + key_spec: str | None = ..., + grant_tokens: list[str] | None = ..., + ) -> dict[str, Any] | None: ... + def generate_data_key_without_plaintext( + self, + key_id: str, + encryption_context: Mapping[str, Any] | None = ..., + key_spec: str | None = ..., + number_of_bytes: int | None = ..., + grant_tokens: list[str] | None = ..., + ) -> dict[str, Any] | None: ... + def generate_random(self, number_of_bytes: int | None = ...) -> dict[str, Any] | None: ... + def get_key_policy(self, key_id: str, policy_name: str) -> dict[str, Any] | None: ... + def get_key_rotation_status(self, key_id: str) -> dict[str, Any] | None: ... + def list_aliases(self, limit: int | None = ..., marker: str | None = ...) -> dict[str, Any] | None: ... + def list_grants(self, key_id: str, limit: int | None = ..., marker: str | None = ...) -> dict[str, Any] | None: ... + def list_key_policies(self, key_id: str, limit: int | None = ..., marker: str | None = ...) -> dict[str, Any] | None: ... + def list_keys(self, limit: int | None = ..., marker: str | None = ...) -> dict[str, Any] | None: ... + def put_key_policy(self, key_id: str, policy_name: str, policy: str) -> dict[str, Any] | None: ... + def re_encrypt( + self, + ciphertext_blob: bytes, + destination_key_id: str, + source_encryption_context: Mapping[str, Any] | None = ..., + destination_encryption_context: Mapping[str, Any] | None = ..., + grant_tokens: list[str] | None = ..., + ) -> dict[str, Any] | None: ... + def retire_grant(self, grant_token: str) -> dict[str, Any] | None: ... + def revoke_grant(self, key_id: str, grant_id: str) -> dict[str, Any] | None: ... + def update_key_description(self, key_id: str, description: str) -> dict[str, Any] | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/plugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/plugin.pyi new file mode 100644 index 00000000..3723cf68 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/plugin.pyi @@ -0,0 +1,10 @@ +from _typeshed import Incomplete +from typing import Any + +class Plugin: + capability: Any + @classmethod + def is_capable(cls, requested_capability): ... + +def get_plugin(cls, requested_capability: Incomplete | None = ...): ... +def load_plugins(config): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/regioninfo.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/regioninfo.pyi new file mode 100644 index 00000000..9a400e88 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/regioninfo.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete +from typing import Any + +def load_endpoint_json(path): ... +def merge_endpoints(defaults, additions): ... +def load_regions(): ... +def get_regions(service_name, region_cls: Incomplete | None = ..., connection_cls: Incomplete | None = ...): ... + +class RegionInfo: + connection: Any + name: Any + endpoint: Any + connection_cls: Any + def __init__( + self, + connection: Incomplete | None = ..., + name: Incomplete | None = ..., + endpoint: Incomplete | None = ..., + connection_cls: Incomplete | None = ..., + ) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + def connect(self, **kw_params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/__init__.pyi new file mode 100644 index 00000000..c2ceb4b7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/__init__.pyi @@ -0,0 +1,16 @@ +from boto.connection import AWSAuthConnection +from boto.regioninfo import RegionInfo + +from .connection import S3Connection + +class S3RegionInfo(RegionInfo): + def connect( + self, + name: str | None = ..., + endpoint: str | None = ..., + connection_cls: type[AWSAuthConnection] | None = ..., + **kw_params, + ) -> S3Connection: ... + +def regions() -> list[S3RegionInfo]: ... +def connect_to_region(region_name: str, **kw_params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/acl.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/acl.pyi new file mode 100644 index 00000000..1c22780a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/acl.pyi @@ -0,0 +1,49 @@ +from _typeshed import Incomplete +from typing import Any + +from .connection import S3Connection +from .user import User + +CannedACLStrings: list[str] + +class Policy: + parent: Any + namespace: Any + acl: ACL + def __init__(self, parent: Incomplete | None = ...) -> None: ... + owner: User + def startElement(self, name: str, attrs: dict[str, Any], connection: S3Connection) -> None | User | ACL: ... + def endElement(self, name: str, value: Any, connection: S3Connection) -> None: ... + def to_xml(self) -> str: ... + +class ACL: + policy: Policy + grants: list[Grant] + def __init__(self, policy: Policy | None = ...) -> None: ... + def add_grant(self, grant: Grant) -> None: ... + def add_email_grant(self, permission: str, email_address: str) -> None: ... + def add_user_grant(self, permission: str, user_id: str, display_name: str | None = ...) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name: str, value: Any, connection: S3Connection) -> None: ... + def to_xml(self) -> str: ... + +class Grant: + NameSpace: str + permission: str + id: str + display_name: str + uri: str + email_address: str + type: str + def __init__( + self, + permission: str | None = ..., + type: str | None = ..., + id: str | None = ..., + display_name: str | None = ..., + uri: str | None = ..., + email_address: str | None = ..., + ) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name: str, value: Any, connection: S3Connection) -> None: ... + def to_xml(self) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/bucket.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/bucket.pyi new file mode 100644 index 00000000..9e065644 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/bucket.pyi @@ -0,0 +1,190 @@ +from _typeshed import Incomplete +from typing import Any + +from .bucketlistresultset import BucketListResultSet +from .connection import S3Connection +from .key import Key + +class S3WebsiteEndpointTranslate: + trans_region: dict[str, str] + @classmethod + def translate_region(cls, reg: str) -> str: ... + +S3Permissions: list[str] + +class Bucket: + LoggingGroup: str + BucketPaymentBody: str + VersioningBody: str + VersionRE: str + MFADeleteRE: str + name: str + connection: S3Connection + key_class: type[Key] + def __init__(self, connection: S3Connection | None = ..., name: str | None = ..., key_class: type[Key] = ...) -> None: ... + def __iter__(self): ... + def __contains__(self, key_name) -> bool: ... + def startElement(self, name, attrs, connection): ... + creation_date: Any + def endElement(self, name, value, connection): ... + def set_key_class(self, key_class): ... + def lookup(self, key_name, headers: dict[str, str] | None = ...): ... + def get_key( + self, + key_name, + headers: dict[str, str] | None = ..., + version_id: Incomplete | None = ..., + response_headers: dict[str, str] | None = ..., + validate: bool = ..., + ) -> Key: ... + def list( + self, + prefix: str = ..., + delimiter: str = ..., + marker: str = ..., + headers: dict[str, str] | None = ..., + encoding_type: Incomplete | None = ..., + ) -> BucketListResultSet: ... + def list_versions( + self, + prefix: str = ..., + delimiter: str = ..., + key_marker: str = ..., + version_id_marker: str = ..., + headers: dict[str, str] | None = ..., + encoding_type: str | None = ..., + ) -> BucketListResultSet: ... + def list_multipart_uploads( + self, + key_marker: str = ..., + upload_id_marker: str = ..., + headers: dict[str, str] | None = ..., + encoding_type: Incomplete | None = ..., + ): ... + def validate_kwarg_names(self, kwargs, names): ... + def get_all_keys(self, headers: dict[str, str] | None = ..., **params): ... + def get_all_versions(self, headers: dict[str, str] | None = ..., **params): ... + def validate_get_all_versions_params(self, params): ... + def get_all_multipart_uploads(self, headers: dict[str, str] | None = ..., **params): ... + def new_key(self, key_name: Incomplete | None = ...): ... + def generate_url( + self, + expires_in, + method: str = ..., + headers: dict[str, str] | None = ..., + force_http: bool = ..., + response_headers: dict[str, str] | None = ..., + expires_in_absolute: bool = ..., + ): ... + def delete_keys(self, keys, quiet: bool = ..., mfa_token: Incomplete | None = ..., headers: dict[str, str] | None = ...): ... + def delete_key( + self, + key_name, + headers: dict[str, str] | None = ..., + version_id: Incomplete | None = ..., + mfa_token: Incomplete | None = ..., + ): ... + def copy_key( + self, + new_key_name, + src_bucket_name, + src_key_name, + metadata: Incomplete | None = ..., + src_version_id: Incomplete | None = ..., + storage_class: str = ..., + preserve_acl: bool = ..., + encrypt_key: bool = ..., + headers: dict[str, str] | None = ..., + query_args: Incomplete | None = ..., + ): ... + def set_canned_acl( + self, acl_str, key_name: str = ..., headers: dict[str, str] | None = ..., version_id: Incomplete | None = ... + ): ... + def get_xml_acl(self, key_name: str = ..., headers: dict[str, str] | None = ..., version_id: Incomplete | None = ...): ... + def set_xml_acl( + self, + acl_str, + key_name: str = ..., + headers: dict[str, str] | None = ..., + version_id: Incomplete | None = ..., + query_args: str = ..., + ): ... + def set_acl( + self, acl_or_str, key_name: str = ..., headers: dict[str, str] | None = ..., version_id: Incomplete | None = ... + ): ... + def get_acl(self, key_name: str = ..., headers: dict[str, str] | None = ..., version_id: Incomplete | None = ...): ... + def set_subresource( + self, subresource, value, key_name: str = ..., headers: dict[str, str] | None = ..., version_id: Incomplete | None = ... + ): ... + def get_subresource( + self, subresource, key_name: str = ..., headers: dict[str, str] | None = ..., version_id: Incomplete | None = ... + ): ... + def make_public(self, recursive: bool = ..., headers: dict[str, str] | None = ...): ... + def add_email_grant(self, permission, email_address, recursive: bool = ..., headers: dict[str, str] | None = ...): ... + def add_user_grant( + self, + permission, + user_id, + recursive: bool = ..., + headers: dict[str, str] | None = ..., + display_name: Incomplete | None = ..., + ): ... + def list_grants(self, headers: dict[str, str] | None = ...): ... + def get_location(self): ... + def set_xml_logging(self, logging_str, headers: dict[str, str] | None = ...): ... + def enable_logging( + self, target_bucket, target_prefix: str = ..., grants: Incomplete | None = ..., headers: dict[str, str] | None = ... + ): ... + def disable_logging(self, headers: dict[str, str] | None = ...): ... + def get_logging_status(self, headers: dict[str, str] | None = ...): ... + def set_as_logging_target(self, headers: dict[str, str] | None = ...): ... + def get_request_payment(self, headers: dict[str, str] | None = ...): ... + def set_request_payment(self, payer: str = ..., headers: dict[str, str] | None = ...): ... + def configure_versioning( + self, versioning, mfa_delete: bool = ..., mfa_token: Incomplete | None = ..., headers: dict[str, str] | None = ... + ): ... + def get_versioning_status(self, headers: dict[str, str] | None = ...): ... + def configure_lifecycle(self, lifecycle_config, headers: dict[str, str] | None = ...): ... + def get_lifecycle_config(self, headers: dict[str, str] | None = ...): ... + def delete_lifecycle_configuration(self, headers: dict[str, str] | None = ...): ... + def configure_website( + self, + suffix: Incomplete | None = ..., + error_key: Incomplete | None = ..., + redirect_all_requests_to: Incomplete | None = ..., + routing_rules: Incomplete | None = ..., + headers: dict[str, str] | None = ..., + ): ... + def set_website_configuration(self, config, headers: dict[str, str] | None = ...): ... + def set_website_configuration_xml(self, xml, headers: dict[str, str] | None = ...): ... + def get_website_configuration(self, headers: dict[str, str] | None = ...): ... + def get_website_configuration_obj(self, headers: dict[str, str] | None = ...): ... + def get_website_configuration_with_xml(self, headers: dict[str, str] | None = ...): ... + def get_website_configuration_xml(self, headers: dict[str, str] | None = ...): ... + def delete_website_configuration(self, headers: dict[str, str] | None = ...): ... + def get_website_endpoint(self): ... + def get_policy(self, headers: dict[str, str] | None = ...): ... + def set_policy(self, policy, headers: dict[str, str] | None = ...): ... + def delete_policy(self, headers: dict[str, str] | None = ...): ... + def set_cors_xml(self, cors_xml, headers: dict[str, str] | None = ...): ... + def set_cors(self, cors_config, headers: dict[str, str] | None = ...): ... + def get_cors_xml(self, headers: dict[str, str] | None = ...): ... + def get_cors(self, headers: dict[str, str] | None = ...): ... + def delete_cors(self, headers: dict[str, str] | None = ...): ... + def initiate_multipart_upload( + self, + key_name, + headers: dict[str, str] | None = ..., + reduced_redundancy: bool = ..., + metadata: Incomplete | None = ..., + encrypt_key: bool = ..., + policy: Incomplete | None = ..., + ): ... + def complete_multipart_upload(self, key_name, upload_id, xml_body, headers: dict[str, str] | None = ...): ... + def cancel_multipart_upload(self, key_name, upload_id, headers: dict[str, str] | None = ...): ... + def delete(self, headers: dict[str, str] | None = ...): ... + def get_tags(self): ... + def get_xml_tags(self): ... + def set_xml_tags(self, tag_str, headers: dict[str, str] | None = ..., query_args: str = ...): ... + def set_tags(self, tags, headers: dict[str, str] | None = ...): ... + def delete_tags(self, headers: dict[str, str] | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/bucketlistresultset.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/bucketlistresultset.pyi new file mode 100644 index 00000000..e85adad7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/bucketlistresultset.pyi @@ -0,0 +1,86 @@ +from _typeshed import Incomplete +from collections.abc import Iterable, Iterator +from typing import Any + +from .key import Key + +def bucket_lister( + bucket, + prefix: str = ..., + delimiter: str = ..., + marker: str = ..., + headers: Incomplete | None = ..., + encoding_type: Incomplete | None = ..., +): ... + +class BucketListResultSet(Iterable[Key]): + bucket: Any + prefix: Any + delimiter: Any + marker: Any + headers: Any + encoding_type: Any + def __init__( + self, + bucket: Incomplete | None = ..., + prefix: str = ..., + delimiter: str = ..., + marker: str = ..., + headers: Incomplete | None = ..., + encoding_type: Incomplete | None = ..., + ) -> None: ... + def __iter__(self) -> Iterator[Key]: ... + +def versioned_bucket_lister( + bucket, + prefix: str = ..., + delimiter: str = ..., + key_marker: str = ..., + version_id_marker: str = ..., + headers: Incomplete | None = ..., + encoding_type: Incomplete | None = ..., +): ... + +class VersionedBucketListResultSet: + bucket: Any + prefix: Any + delimiter: Any + key_marker: Any + version_id_marker: Any + headers: Any + encoding_type: Any + def __init__( + self, + bucket: Incomplete | None = ..., + prefix: str = ..., + delimiter: str = ..., + key_marker: str = ..., + version_id_marker: str = ..., + headers: Incomplete | None = ..., + encoding_type: Incomplete | None = ..., + ) -> None: ... + def __iter__(self) -> Iterator[Key]: ... + +def multipart_upload_lister( + bucket, + key_marker: str = ..., + upload_id_marker: str = ..., + headers: Incomplete | None = ..., + encoding_type: Incomplete | None = ..., +): ... + +class MultiPartUploadListResultSet: + bucket: Any + key_marker: Any + upload_id_marker: Any + headers: Any + encoding_type: Any + def __init__( + self, + bucket: Incomplete | None = ..., + key_marker: str = ..., + upload_id_marker: str = ..., + headers: Incomplete | None = ..., + encoding_type: Incomplete | None = ..., + ) -> None: ... + def __iter__(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/bucketlogging.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/bucketlogging.pyi new file mode 100644 index 00000000..bde7fc4f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/bucketlogging.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete +from typing import Any + +class BucketLogging: + target: Any + prefix: Any + grants: Any + def __init__( + self, target: Incomplete | None = ..., prefix: Incomplete | None = ..., grants: Incomplete | None = ... + ) -> None: ... + def add_grant(self, grant): ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + def to_xml(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/connection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/connection.pyi new file mode 100644 index 00000000..5fe8c3c1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/connection.pyi @@ -0,0 +1,142 @@ +from _typeshed import Incomplete +from typing import Any + +from boto.connection import AWSAuthConnection +from boto.exception import BotoClientError + +from .bucket import Bucket + +def check_lowercase_bucketname(n): ... +def assert_case_insensitive(f): ... + +class _CallingFormat: + def get_bucket_server(self, server, bucket): ... + def build_url_base(self, connection, protocol, server, bucket, key: str = ...): ... + def build_host(self, server, bucket): ... + def build_auth_path(self, bucket, key: str = ...): ... + def build_path_base(self, bucket, key: str = ...): ... + +class SubdomainCallingFormat(_CallingFormat): + def get_bucket_server(self, server, bucket): ... + +class VHostCallingFormat(_CallingFormat): + def get_bucket_server(self, server, bucket): ... + +class OrdinaryCallingFormat(_CallingFormat): + def get_bucket_server(self, server, bucket): ... + def build_path_base(self, bucket, key: str = ...): ... + +class ProtocolIndependentOrdinaryCallingFormat(OrdinaryCallingFormat): + def build_url_base(self, connection, protocol, server, bucket, key: str = ...): ... + +class Location: + DEFAULT: str + EU: str + EUCentral1: str + USWest: str + USWest2: str + SAEast: str + APNortheast: str + APSoutheast: str + APSoutheast2: str + CNNorth1: str + +class NoHostProvided: ... +class HostRequiredError(BotoClientError): ... + +class S3Connection(AWSAuthConnection): + DefaultHost: Any + DefaultCallingFormat: Any + QueryString: str + calling_format: Any + bucket_class: type[Bucket] + anon: Any + def __init__( + self, + aws_access_key_id: Incomplete | None = ..., + aws_secret_access_key: Incomplete | None = ..., + is_secure: bool = ..., + port: Incomplete | None = ..., + proxy: Incomplete | None = ..., + proxy_port: Incomplete | None = ..., + proxy_user: Incomplete | None = ..., + proxy_pass: Incomplete | None = ..., + host: Any = ..., + debug: int = ..., + https_connection_factory: Incomplete | None = ..., + calling_format: Any = ..., + path: str = ..., + provider: str = ..., + bucket_class: type[Bucket] = ..., + security_token: Incomplete | None = ..., + suppress_consec_slashes: bool = ..., + anon: bool = ..., + validate_certs: Incomplete | None = ..., + profile_name: Incomplete | None = ..., + ) -> None: ... + def __iter__(self): ... + def __contains__(self, bucket_name): ... + def set_bucket_class(self, bucket_class: type[Bucket]) -> None: ... + def build_post_policy(self, expiration_time, conditions): ... + def build_post_form_args( + self, + bucket_name, + key, + expires_in: int = ..., + acl: Incomplete | None = ..., + success_action_redirect: Incomplete | None = ..., + max_content_length: Incomplete | None = ..., + http_method: str = ..., + fields: Incomplete | None = ..., + conditions: Incomplete | None = ..., + storage_class: str = ..., + server_side_encryption: Incomplete | None = ..., + ): ... + def generate_url_sigv4( + self, + expires_in, + method, + bucket: str = ..., + key: str = ..., + headers: dict[str, str] | None = ..., + force_http: bool = ..., + response_headers: dict[str, str] | None = ..., + version_id: Incomplete | None = ..., + iso_date: Incomplete | None = ..., + ): ... + def generate_url( + self, + expires_in, + method, + bucket: str = ..., + key: str = ..., + headers: dict[str, str] | None = ..., + query_auth: bool = ..., + force_http: bool = ..., + response_headers: dict[str, str] | None = ..., + expires_in_absolute: bool = ..., + version_id: Incomplete | None = ..., + ): ... + def get_all_buckets(self, headers: dict[str, str] | None = ...): ... + def get_canonical_user_id(self, headers: dict[str, str] | None = ...): ... + def get_bucket(self, bucket_name: str, validate: bool = ..., headers: dict[str, str] | None = ...) -> Bucket: ... + def head_bucket(self, bucket_name, headers: dict[str, str] | None = ...): ... + def lookup(self, bucket_name, validate: bool = ..., headers: dict[str, str] | None = ...): ... + def create_bucket( + self, bucket_name, headers: dict[str, str] | None = ..., location: Any = ..., policy: Incomplete | None = ... + ): ... + def delete_bucket(self, bucket, headers: dict[str, str] | None = ...): ... + def make_request( # type: ignore[override] + self, + method, + bucket: str = ..., + key: str = ..., + headers: Incomplete | None = ..., + data: str = ..., + query_args: Incomplete | None = ..., + sender: Incomplete | None = ..., + override_num_retries: Incomplete | None = ..., + retry_handler: Incomplete | None = ..., + *args, + **kwargs, + ): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/cors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/cors.pyi new file mode 100644 index 00000000..1b3e66b5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/cors.pyi @@ -0,0 +1,36 @@ +from _typeshed import Incomplete +from typing import Any + +class CORSRule: + allowed_method: Any + allowed_origin: Any + id: Any + allowed_header: Any + max_age_seconds: Any + expose_header: Any + def __init__( + self, + allowed_method: Incomplete | None = ..., + allowed_origin: Incomplete | None = ..., + id: Incomplete | None = ..., + allowed_header: Incomplete | None = ..., + max_age_seconds: Incomplete | None = ..., + expose_header: Incomplete | None = ..., + ) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + def to_xml(self) -> str: ... + +class CORSConfiguration(list[CORSRule]): + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + def to_xml(self) -> str: ... + def add_rule( + self, + allowed_method, + allowed_origin, + id: Incomplete | None = ..., + allowed_header: Incomplete | None = ..., + max_age_seconds: Incomplete | None = ..., + expose_header: Incomplete | None = ..., + ): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/deletemarker.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/deletemarker.pyi new file mode 100644 index 00000000..179f35f8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/deletemarker.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete +from typing import Any + +class DeleteMarker: + bucket: Any + name: Any + version_id: Any + is_latest: bool + last_modified: Any + owner: Any + def __init__(self, bucket: Incomplete | None = ..., name: Incomplete | None = ...) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/key.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/key.pyi new file mode 100644 index 00000000..52a31fd2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/key.pyi @@ -0,0 +1,235 @@ +from _typeshed import Incomplete +from collections.abc import Callable +from typing import Any, overload + +class Key: + DefaultContentType: str + RestoreBody: str + BufferSize: Any + base_user_settable_fields: Any + base_fields: Any + bucket: Any + name: str + metadata: Any + cache_control: Any + content_type: Any + content_encoding: Any + content_disposition: Any + content_language: Any + filename: Any + etag: Any + is_latest: bool + last_modified: Any + owner: Any + path: Any + resp: Any + mode: Any + size: Any + version_id: Any + source_version_id: Any + delete_marker: bool + encrypted: Any + ongoing_restore: Any + expiry_date: Any + local_hashes: Any + def __init__(self, bucket: Incomplete | None = ..., name: Incomplete | None = ...) -> None: ... + def __iter__(self): ... + @property + def provider(self): ... + key: Any + md5: Any + base64md5: Any + storage_class: Any + def get_md5_from_hexdigest(self, md5_hexdigest): ... + def handle_encryption_headers(self, resp): ... + def handle_version_headers(self, resp, force: bool = ...): ... + def handle_restore_headers(self, response): ... + def handle_addl_headers(self, headers): ... + def open_read( + self, + headers: dict[str, str] | None = ..., + query_args: str = ..., + override_num_retries: Incomplete | None = ..., + response_headers: dict[str, str] | None = ..., + ): ... + def open_write(self, headers: dict[str, str] | None = ..., override_num_retries: Incomplete | None = ...): ... + def open( + self, + mode: str = ..., + headers: dict[str, str] | None = ..., + query_args: Incomplete | None = ..., + override_num_retries: Incomplete | None = ..., + ): ... + closed: bool + def close(self, fast: bool = ...): ... + def next(self): ... + __next__: Any + def read(self, size: int = ...): ... + def change_storage_class(self, new_storage_class, dst_bucket: Incomplete | None = ..., validate_dst_bucket: bool = ...): ... + def copy( + self, + dst_bucket, + dst_key, + metadata: Incomplete | None = ..., + reduced_redundancy: bool = ..., + preserve_acl: bool = ..., + encrypt_key: bool = ..., + validate_dst_bucket: bool = ..., + ): ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + def exists(self, headers: dict[str, str] | None = ...): ... + def delete(self, headers: dict[str, str] | None = ...): ... + def get_metadata(self, name): ... + def set_metadata(self, name, value): ... + def update_metadata(self, d): ... + def set_acl(self, acl_str, headers: dict[str, str] | None = ...): ... + def get_acl(self, headers: dict[str, str] | None = ...): ... + def get_xml_acl(self, headers: dict[str, str] | None = ...): ... + def set_xml_acl(self, acl_str, headers: dict[str, str] | None = ...): ... + def set_canned_acl(self, acl_str, headers: dict[str, str] | None = ...): ... + def get_redirect(self): ... + def set_redirect(self, redirect_location, headers: dict[str, str] | None = ...): ... + def make_public(self, headers: dict[str, str] | None = ...): ... + def generate_url( + self, + expires_in, + method: str = ..., + headers: dict[str, str] | None = ..., + query_auth: bool = ..., + force_http: bool = ..., + response_headers: dict[str, str] | None = ..., + expires_in_absolute: bool = ..., + version_id: Incomplete | None = ..., + policy: Incomplete | None = ..., + reduced_redundancy: bool = ..., + encrypt_key: bool = ..., + ): ... + def send_file( + self, + fp, + headers: dict[str, str] | None = ..., + cb: Callable[[int, int], object] | None = ..., + num_cb: int = ..., + query_args: Incomplete | None = ..., + chunked_transfer: bool = ..., + size: Incomplete | None = ..., + ): ... + def should_retry(self, response, chunked_transfer: bool = ...): ... + def compute_md5(self, fp, size: Incomplete | None = ...): ... + def set_contents_from_stream( + self, + fp, + headers: dict[str, str] | None = ..., + replace: bool = ..., + cb: Callable[[int, int], object] | None = ..., + num_cb: int = ..., + policy: Incomplete | None = ..., + reduced_redundancy: bool = ..., + query_args: Incomplete | None = ..., + size: Incomplete | None = ..., + ): ... + def set_contents_from_file( + self, + fp, + headers: dict[str, str] | None = ..., + replace: bool = ..., + cb: Callable[[int, int], object] | None = ..., + num_cb: int = ..., + policy: Incomplete | None = ..., + md5: Incomplete | None = ..., + reduced_redundancy: bool = ..., + query_args: Incomplete | None = ..., + encrypt_key: bool = ..., + size: Incomplete | None = ..., + rewind: bool = ..., + ): ... + def set_contents_from_filename( + self, + filename, + headers: dict[str, str] | None = ..., + replace: bool = ..., + cb: Callable[[int, int], object] | None = ..., + num_cb: int = ..., + policy: Incomplete | None = ..., + md5: Incomplete | None = ..., + reduced_redundancy: bool = ..., + encrypt_key: bool = ..., + ): ... + def set_contents_from_string( + self, + string_data: str | bytes, + headers: dict[str, str] | None = ..., + replace: bool = ..., + cb: Callable[[int, int], object] | None = ..., + num_cb: int = ..., + policy: Incomplete | None = ..., + md5: Incomplete | None = ..., + reduced_redundancy: bool = ..., + encrypt_key: bool = ..., + ) -> None: ... + def get_file( + self, + fp, + headers: dict[str, str] | None = ..., + cb: Callable[[int, int], object] | None = ..., + num_cb: int = ..., + torrent: bool = ..., + version_id: Incomplete | None = ..., + override_num_retries: Incomplete | None = ..., + response_headers: dict[str, str] | None = ..., + ): ... + def get_torrent_file( + self, fp, headers: dict[str, str] | None = ..., cb: Callable[[int, int], object] | None = ..., num_cb: int = ... + ): ... + def get_contents_to_file( + self, + fp, + headers: dict[str, str] | None = ..., + cb: Callable[[int, int], object] | None = ..., + num_cb: int = ..., + torrent: bool = ..., + version_id: Incomplete | None = ..., + res_download_handler: Incomplete | None = ..., + response_headers: dict[str, str] | None = ..., + ): ... + def get_contents_to_filename( + self, + filename, + headers: dict[str, str] | None = ..., + cb: Callable[[int, int], object] | None = ..., + num_cb: int = ..., + torrent: bool = ..., + version_id: Incomplete | None = ..., + res_download_handler: Incomplete | None = ..., + response_headers: dict[str, str] | None = ..., + ): ... + @overload + def get_contents_as_string( + self, + headers: dict[str, str] | None = ..., + cb: Callable[[int, int], object] | None = ..., + num_cb: int = ..., + torrent: bool = ..., + version_id: Incomplete | None = ..., + response_headers: dict[str, str] | None = ..., + encoding: None = ..., + ) -> bytes: ... + @overload + def get_contents_as_string( + self, + headers: dict[str, str] | None = ..., + cb: Callable[[int, int], object] | None = ..., + num_cb: int = ..., + torrent: bool = ..., + version_id: Incomplete | None = ..., + response_headers: dict[str, str] | None = ..., + *, + encoding: str, + ) -> str: ... + def add_email_grant(self, permission, email_address, headers: dict[str, str] | None = ...): ... + def add_user_grant( + self, permission, user_id, headers: dict[str, str] | None = ..., display_name: Incomplete | None = ... + ): ... + def set_remote_metadata(self, metadata_plus, metadata_minus, preserve_acl, headers: dict[str, str] | None = ...): ... + def restore(self, days, headers: dict[str, str] | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/keyfile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/keyfile.pyi new file mode 100644 index 00000000..121da165 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/keyfile.pyi @@ -0,0 +1,29 @@ +from typing import Any + +class KeyFile: + key: Any + location: int + closed: bool + softspace: int + mode: str + encoding: str + errors: str + newlines: str + name: Any + def __init__(self, key) -> None: ... + def tell(self): ... + def seek(self, pos, whence: Any = ...): ... + def read(self, size): ... + def close(self): ... + def isatty(self): ... + def getkey(self): ... + def write(self, buf): ... + def fileno(self): ... + def flush(self): ... + def next(self): ... + def readinto(self): ... + def readline(self): ... + def readlines(self): ... + def truncate(self): ... + def writelines(self): ... + def xreadlines(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/lifecycle.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/lifecycle.pyi new file mode 100644 index 00000000..358faaff --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/lifecycle.pyi @@ -0,0 +1,70 @@ +from _typeshed import Incomplete +from typing import Any + +class Rule: + id: Any + prefix: Any + status: Any + expiration: Any + transition: Any + def __init__( + self, + id: Incomplete | None = ..., + prefix: Incomplete | None = ..., + status: Incomplete | None = ..., + expiration: Incomplete | None = ..., + transition: Incomplete | None = ..., + ) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + def to_xml(self): ... + +class Expiration: + days: Any + date: Any + def __init__(self, days: Incomplete | None = ..., date: Incomplete | None = ...) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + def to_xml(self): ... + +class Transition: + days: Any + date: Any + storage_class: Any + def __init__( + self, days: Incomplete | None = ..., date: Incomplete | None = ..., storage_class: Incomplete | None = ... + ) -> None: ... + def to_xml(self): ... + +class Transitions(list[Transition]): + transition_properties: int + current_transition_property: int + temp_days: Any + temp_date: Any + temp_storage_class: Any + def __init__(self) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + def to_xml(self): ... + def add_transition( + self, days: Incomplete | None = ..., date: Incomplete | None = ..., storage_class: Incomplete | None = ... + ): ... + @property + def days(self): ... + @property + def date(self): ... + @property + def storage_class(self): ... + +class Lifecycle(list[Rule]): + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + def to_xml(self): ... + def add_rule( + self, + id: Incomplete | None = ..., + prefix: str = ..., + status: str = ..., + expiration: Incomplete | None = ..., + transition: Incomplete | None = ..., + ): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/multidelete.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/multidelete.pyi new file mode 100644 index 00000000..3ab86b04 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/multidelete.pyi @@ -0,0 +1,40 @@ +from _typeshed import Incomplete +from typing import Any + +class Deleted: + key: Any + version_id: Any + delete_marker: Any + delete_marker_version_id: Any + def __init__( + self, + key: Incomplete | None = ..., + version_id: Incomplete | None = ..., + delete_marker: bool = ..., + delete_marker_version_id: Incomplete | None = ..., + ) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + +class Error: + key: Any + version_id: Any + code: Any + message: Any + def __init__( + self, + key: Incomplete | None = ..., + version_id: Incomplete | None = ..., + code: Incomplete | None = ..., + message: Incomplete | None = ..., + ) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + +class MultiDeleteResult: + bucket: Any + deleted: Any + errors: Any + def __init__(self, bucket: Incomplete | None = ...) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/multipart.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/multipart.pyi new file mode 100644 index 00000000..9fd51313 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/multipart.pyi @@ -0,0 +1,74 @@ +from _typeshed import Incomplete +from typing import Any + +class CompleteMultiPartUpload: + bucket: Any + location: Any + bucket_name: Any + key_name: Any + etag: Any + version_id: Any + encrypted: Any + def __init__(self, bucket: Incomplete | None = ...) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + +class Part: + bucket: Any + part_number: Any + last_modified: Any + etag: Any + size: Any + def __init__(self, bucket: Incomplete | None = ...) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + +def part_lister(mpupload, part_number_marker: Incomplete | None = ...): ... + +class MultiPartUpload: + bucket: Any + bucket_name: Any + key_name: Any + id: Any + initiator: Any + owner: Any + storage_class: Any + initiated: Any + part_number_marker: Any + next_part_number_marker: Any + max_parts: Any + is_truncated: bool + def __init__(self, bucket: Incomplete | None = ...) -> None: ... + def __iter__(self): ... + def to_xml(self): ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + def get_all_parts( + self, + max_parts: Incomplete | None = ..., + part_number_marker: Incomplete | None = ..., + encoding_type: Incomplete | None = ..., + ): ... + def upload_part_from_file( + self, + fp, + part_num, + headers: Incomplete | None = ..., + replace: bool = ..., + cb: Incomplete | None = ..., + num_cb: int = ..., + md5: Incomplete | None = ..., + size: Incomplete | None = ..., + ): ... + def copy_part_from_key( + self, + src_bucket_name, + src_key_name, + part_num, + start: Incomplete | None = ..., + end: Incomplete | None = ..., + src_version_id: Incomplete | None = ..., + headers: Incomplete | None = ..., + ): ... + def complete_upload(self): ... + def cancel_upload(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/prefix.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/prefix.pyi new file mode 100644 index 00000000..b4164f48 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/prefix.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete +from typing import Any + +class Prefix: + bucket: Any + name: Any + def __init__(self, bucket: Incomplete | None = ..., name: Incomplete | None = ...) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + @property + def provider(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/tagging.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/tagging.pyi new file mode 100644 index 00000000..3515e91c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/tagging.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete +from typing import Any + +class Tag: + key: Any + value: Any + def __init__(self, key: Incomplete | None = ..., value: Incomplete | None = ...) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + def to_xml(self): ... + def __eq__(self, other): ... + +class TagSet(list[Tag]): + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + def add_tag(self, key, value): ... + def to_xml(self): ... + +class Tags(list[TagSet]): + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + def to_xml(self): ... + def add_tag_set(self, tag_set): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/user.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/user.pyi new file mode 100644 index 00000000..d5c56ee3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/user.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete +from typing import Any + +class User: + type: Any + id: Any + display_name: Any + def __init__(self, parent: Incomplete | None = ..., id: str = ..., display_name: str = ...) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + def to_xml(self, element_name: str = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/website.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/website.pyi new file mode 100644 index 00000000..e7597a0d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/s3/website.pyi @@ -0,0 +1,83 @@ +from _typeshed import Incomplete +from typing import Any + +def tag(key, value): ... + +class WebsiteConfiguration: + suffix: Any + error_key: Any + redirect_all_requests_to: Any + routing_rules: Any + def __init__( + self, + suffix: Incomplete | None = ..., + error_key: Incomplete | None = ..., + redirect_all_requests_to: Incomplete | None = ..., + routing_rules: Incomplete | None = ..., + ) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + def to_xml(self): ... + +class _XMLKeyValue: + translator: Any + container: Any + def __init__(self, translator, container: Incomplete | None = ...) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + def to_xml(self): ... + +class RedirectLocation(_XMLKeyValue): + TRANSLATOR: Any + hostname: Any + protocol: Any + def __init__(self, hostname: Incomplete | None = ..., protocol: Incomplete | None = ...) -> None: ... + def to_xml(self): ... + +class RoutingRules(list[RoutingRule]): + def add_rule(self, rule: RoutingRule) -> RoutingRules: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + def to_xml(self): ... + +class RoutingRule: + condition: Any + redirect: Any + def __init__(self, condition: Incomplete | None = ..., redirect: Incomplete | None = ...) -> None: ... + def startElement(self, name, attrs, connection): ... + def endElement(self, name, value, connection): ... + def to_xml(self): ... + @classmethod + def when(cls, key_prefix: Incomplete | None = ..., http_error_code: Incomplete | None = ...): ... + def then_redirect( + self, + hostname: Incomplete | None = ..., + protocol: Incomplete | None = ..., + replace_key: Incomplete | None = ..., + replace_key_prefix: Incomplete | None = ..., + http_redirect_code: Incomplete | None = ..., + ): ... + +class Condition(_XMLKeyValue): + TRANSLATOR: Any + key_prefix: Any + http_error_code: Any + def __init__(self, key_prefix: Incomplete | None = ..., http_error_code: Incomplete | None = ...) -> None: ... + def to_xml(self): ... + +class Redirect(_XMLKeyValue): + TRANSLATOR: Any + hostname: Any + protocol: Any + replace_key: Any + replace_key_prefix: Any + http_redirect_code: Any + def __init__( + self, + hostname: Incomplete | None = ..., + protocol: Incomplete | None = ..., + replace_key: Incomplete | None = ..., + replace_key_prefix: Incomplete | None = ..., + http_redirect_code: Incomplete | None = ..., + ) -> None: ... + def to_xml(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/utils.pyi new file mode 100644 index 00000000..ae9a2396 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/boto/boto/utils.pyi @@ -0,0 +1,136 @@ +import datetime +import io +import logging.handlers +import subprocess +import time +from _typeshed import StrOrBytesPath +from collections.abc import Callable, Iterable, Mapping, Sequence +from contextlib import AbstractContextManager +from email.message import Message +from hashlib import _Hash +from typing import IO, Any, TypeVar +from typing_extensions import TypeAlias + +import boto.connection + +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") + +_Provider: TypeAlias = Any # TODO replace this with boto.provider.Provider once stubs exist +_LockType: TypeAlias = Any # TODO replace this with _thread.LockType once stubs exist + +JSONDecodeError: type[ValueError] +qsa_of_interest: list[str] + +def unquote_v(nv: str) -> str | tuple[str, str]: ... +def canonical_string( + method: str, path: str, headers: Mapping[str, str | None], expires: int | None = ..., provider: _Provider | None = ... +) -> str: ... +def merge_meta( + headers: Mapping[str, str], metadata: Mapping[str, str], provider: _Provider | None = ... +) -> Mapping[str, str]: ... +def get_aws_metadata(headers: Mapping[str, str], provider: _Provider | None = ...) -> Mapping[str, str]: ... +def retry_url(url: str, retry_on_404: bool = ..., num_retries: int = ..., timeout: int | None = ...) -> str: ... + +class LazyLoadMetadata(dict[_KT, _VT]): + def __init__(self, url: str, num_retries: int, timeout: int | None = ...) -> None: ... + +def get_instance_metadata( + version: str = ..., url: str = ..., data: str = ..., timeout: int | None = ..., num_retries: int = ... +) -> LazyLoadMetadata[Any, Any] | None: ... +def get_instance_identity( + version: str = ..., url: str = ..., timeout: int | None = ..., num_retries: int = ... +) -> Mapping[str, Any] | None: ... +def get_instance_userdata( + version: str = ..., sep: str | None = ..., url: str = ..., timeout: int | None = ..., num_retries: int = ... +) -> Mapping[str, str]: ... + +ISO8601: str +ISO8601_MS: str +RFC1123: str +LOCALE_LOCK: _LockType + +def setlocale(name: str | tuple[str, str]) -> AbstractContextManager[str]: ... +def get_ts(ts: time.struct_time | None = ...) -> str: ... +def parse_ts(ts: str) -> datetime.datetime: ... +def find_class(module_name: str, class_name: str | None = ...) -> type[Any] | None: ... +def update_dme(username: str, password: str, dme_id: str, ip_address: str) -> str: ... +def fetch_file( + uri: str, file: IO[str] | None = ..., username: str | None = ..., password: str | None = ... +) -> IO[str] | None: ... + +class ShellCommand: + exit_code: int + command: subprocess._CMD + log_fp: io.StringIO + wait: bool + fail_fast: bool + def __init__( + self, command: subprocess._CMD, wait: bool = ..., fail_fast: bool = ..., cwd: StrOrBytesPath | None = ... + ) -> None: ... + process: subprocess.Popen[Any] + def run(self, cwd: subprocess._CMD | None = ...) -> int | None: ... + def setReadOnly(self, value) -> None: ... + def getStatus(self) -> int | None: ... + status: int | None + def getOutput(self) -> str: ... + output: str + +class AuthSMTPHandler(logging.handlers.SMTPHandler): + username: str + password: str + def __init__( + self, mailhost: str, username: str, password: str, fromaddr: str, toaddrs: Sequence[str], subject: str + ) -> None: ... + +class LRUCache(dict[_KT, _VT]): + class _Item: + previous: LRUCache._Item | None + next: LRUCache._Item | None + key = ... + value = ... + def __init__(self, key, value) -> None: ... + _dict: dict[_KT, LRUCache._Item] + capacity: int + head: LRUCache._Item | None + tail: LRUCache._Item | None + def __init__(self, capacity: int) -> None: ... + +# This exists to work around Password.str's name shadowing the str type +_Str: TypeAlias = str + +class Password: + hashfunc: Callable[[bytes], _Hash] + str: _Str | None + def __init__(self, str: _Str | None = ..., hashfunc: Callable[[bytes], _Hash] | None = ...) -> None: ... + def set(self, value: bytes | _Str) -> None: ... + def __eq__(self, other: _Str | bytes | None) -> bool: ... # type: ignore[override] + def __len__(self) -> int: ... + +def notify( + subject: str, + body: str | None = ..., + html_body: Sequence[str] | str | None = ..., + to_string: str | None = ..., + attachments: Iterable[Message] | None = ..., + append_instance_id: bool = ..., +) -> None: ... +def get_utf8_value(value: str) -> bytes: ... +def mklist(value: Any) -> list[Any]: ... +def pythonize_name(name: str) -> str: ... +def write_mime_multipart( + content: list[tuple[str, str]], compress: bool = ..., deftype: str = ..., delimiter: str = ... +) -> str: ... +def guess_mime_type(content: str, deftype: str) -> str: ... +def compute_md5(fp: IO[Any], buf_size: int = ..., size: int | None = ...) -> tuple[str, str, int]: ... +def compute_hash(fp: IO[Any], buf_size: int = ..., size: int | None = ..., hash_algorithm: Any = ...) -> tuple[str, str, int]: ... +def find_matching_headers(name: str, headers: Mapping[str, str | None]) -> list[str]: ... +def merge_headers_by_name(name: str, headers: Mapping[str, str | None]) -> str: ... + +class RequestHook: + def handle_request_data( + self, request: boto.connection.HTTPRequest, response: boto.connection.HTTPResponse, error: bool = ... + ) -> Any: ... + +def host_is_ipv6(hostname: str) -> bool: ... +def parse_host(hostname: str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..f40932c4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/@tests/stubtest_allowlist.txt @@ -0,0 +1 @@ +.*\.AttributeGetter.__repr__ # has an extra argument, but also a million things inherit from it diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/METADATA.toml new file mode 100644 index 00000000..f0992bef --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/METADATA.toml @@ -0,0 +1,4 @@ +version = "4.18.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/__init__.pyi new file mode 100644 index 00000000..f242635c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/__init__.pyi @@ -0,0 +1,83 @@ +from braintree.ach_mandate import AchMandate as AchMandate +from braintree.add_on import AddOn as AddOn +from braintree.add_on_gateway import AddOnGateway as AddOnGateway +from braintree.address import Address as Address +from braintree.address_gateway import AddressGateway as AddressGateway +from braintree.amex_express_checkout_card import AmexExpressCheckoutCard as AmexExpressCheckoutCard +from braintree.android_pay_card import AndroidPayCard as AndroidPayCard +from braintree.apple_pay_card import ApplePayCard as ApplePayCard +from braintree.apple_pay_gateway import ApplePayGateway as ApplePayGateway +from braintree.braintree_gateway import BraintreeGateway as BraintreeGateway +from braintree.client_token import ClientToken as ClientToken +from braintree.configuration import Configuration as Configuration +from braintree.connected_merchant_paypal_status_changed import ( + ConnectedMerchantPayPalStatusChanged as ConnectedMerchantPayPalStatusChanged, +) +from braintree.connected_merchant_status_transitioned import ( + ConnectedMerchantStatusTransitioned as ConnectedMerchantStatusTransitioned, +) +from braintree.credentials_parser import CredentialsParser as CredentialsParser +from braintree.credit_card import CreditCard as CreditCard +from braintree.credit_card_gateway import CreditCardGateway as CreditCardGateway +from braintree.credit_card_verification import CreditCardVerification as CreditCardVerification +from braintree.credit_card_verification_search import CreditCardVerificationSearch as CreditCardVerificationSearch +from braintree.customer import Customer as Customer +from braintree.customer_gateway import CustomerGateway as CustomerGateway +from braintree.customer_search import CustomerSearch as CustomerSearch +from braintree.descriptor import Descriptor as Descriptor +from braintree.disbursement import Disbursement as Disbursement +from braintree.discount import Discount as Discount +from braintree.discount_gateway import DiscountGateway as DiscountGateway +from braintree.dispute import Dispute as Dispute +from braintree.dispute_search import DisputeSearch as DisputeSearch +from braintree.document_upload import DocumentUpload as DocumentUpload +from braintree.document_upload_gateway import DocumentUploadGateway as DocumentUploadGateway +from braintree.environment import Environment as Environment +from braintree.error_codes import ErrorCodes as ErrorCodes +from braintree.error_result import ErrorResult as ErrorResult +from braintree.errors import Errors as Errors +from braintree.europe_bank_account import EuropeBankAccount as EuropeBankAccount +from braintree.local_payment_completed import LocalPaymentCompleted as LocalPaymentCompleted +from braintree.local_payment_reversed import LocalPaymentReversed as LocalPaymentReversed +from braintree.merchant import Merchant as Merchant +from braintree.merchant_account import MerchantAccount as MerchantAccount +from braintree.merchant_account_gateway import MerchantAccountGateway as MerchantAccountGateway +from braintree.oauth_access_revocation import OAuthAccessRevocation as OAuthAccessRevocation +from braintree.partner_merchant import PartnerMerchant as PartnerMerchant +from braintree.payment_instrument_type import PaymentInstrumentType as PaymentInstrumentType +from braintree.payment_method import PaymentMethod as PaymentMethod +from braintree.payment_method_nonce import PaymentMethodNonce as PaymentMethodNonce +from braintree.payment_method_parser import parse_payment_method as parse_payment_method +from braintree.paypal_account import PayPalAccount as PayPalAccount +from braintree.plan import Plan as Plan +from braintree.plan_gateway import PlanGateway as PlanGateway +from braintree.processor_response_types import ProcessorResponseTypes as ProcessorResponseTypes +from braintree.resource_collection import ResourceCollection as ResourceCollection +from braintree.risk_data import RiskData as RiskData +from braintree.samsung_pay_card import SamsungPayCard as SamsungPayCard +from braintree.search import Search as Search +from braintree.settlement_batch_summary import SettlementBatchSummary as SettlementBatchSummary +from braintree.signature_service import SignatureService as SignatureService +from braintree.status_event import StatusEvent as StatusEvent +from braintree.subscription import Subscription as Subscription +from braintree.subscription_gateway import SubscriptionGateway as SubscriptionGateway +from braintree.subscription_search import SubscriptionSearch as SubscriptionSearch +from braintree.subscription_status_event import SubscriptionStatusEvent as SubscriptionStatusEvent +from braintree.successful_result import SuccessfulResult as SuccessfulResult +from braintree.testing_gateway import TestingGateway as TestingGateway +from braintree.three_d_secure_info import ThreeDSecureInfo as ThreeDSecureInfo +from braintree.transaction import Transaction as Transaction +from braintree.transaction_amounts import TransactionAmounts as TransactionAmounts +from braintree.transaction_details import TransactionDetails as TransactionDetails +from braintree.transaction_gateway import TransactionGateway as TransactionGateway +from braintree.transaction_line_item import TransactionLineItem as TransactionLineItem +from braintree.transaction_search import TransactionSearch as TransactionSearch +from braintree.unknown_payment_method import UnknownPaymentMethod as UnknownPaymentMethod +from braintree.us_bank_account import UsBankAccount as UsBankAccount +from braintree.validation_error_collection import ValidationErrorCollection as ValidationErrorCollection +from braintree.venmo_account import VenmoAccount as VenmoAccount +from braintree.version import Version as Version +from braintree.webhook_notification import WebhookNotification as WebhookNotification +from braintree.webhook_notification_gateway import WebhookNotificationGateway as WebhookNotificationGateway +from braintree.webhook_testing import WebhookTesting as WebhookTesting +from braintree.webhook_testing_gateway import WebhookTestingGateway as WebhookTestingGateway diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/account_updater_daily_report.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/account_updater_daily_report.pyi new file mode 100644 index 00000000..86b34389 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/account_updater_daily_report.pyi @@ -0,0 +1,9 @@ +from typing import Any + +from braintree.configuration import Configuration as Configuration +from braintree.resource import Resource as Resource + +class AccountUpdaterDailyReport(Resource): + report_url: Any + report_date: Any + def __init__(self, gateway, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/ach_mandate.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/ach_mandate.pyi new file mode 100644 index 00000000..053c028d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/ach_mandate.pyi @@ -0,0 +1,5 @@ +from braintree.resource import Resource as Resource +from braintree.util.datetime_parser import parse_datetime as parse_datetime + +class AchMandate(Resource): + def __init__(self, gateway, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/add_on.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/add_on.pyi new file mode 100644 index 00000000..6665ada5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/add_on.pyi @@ -0,0 +1,6 @@ +from braintree.configuration import Configuration as Configuration +from braintree.modification import Modification as Modification + +class AddOn(Modification): + @staticmethod + def all(): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/add_on_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/add_on_gateway.pyi new file mode 100644 index 00000000..131c84cf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/add_on_gateway.pyi @@ -0,0 +1,10 @@ +from typing import Any + +from braintree.add_on import AddOn as AddOn +from braintree.resource_collection import ResourceCollection as ResourceCollection + +class AddOnGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def all(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/address.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/address.pyi new file mode 100644 index 00000000..e3a3b33e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/address.pyi @@ -0,0 +1,27 @@ +from _typeshed import Incomplete + +from braintree.configuration import Configuration as Configuration +from braintree.error_result import ErrorResult as ErrorResult +from braintree.resource import Resource as Resource +from braintree.successful_result import SuccessfulResult as SuccessfulResult + +class Address(Resource): + class ShippingMethod: + SameDay: str + NextDay: str + Priority: str + Ground: str + Electronic: str + ShipToStore: str + @staticmethod + def create(params: Incomplete | None = ...): ... + @staticmethod + def delete(customer_id, address_id): ... + @staticmethod + def find(customer_id, address_id): ... + @staticmethod + def update(customer_id, address_id, params: Incomplete | None = ...): ... + @staticmethod + def create_signature(): ... + @staticmethod + def update_signature(): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/address_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/address_gateway.pyi new file mode 100644 index 00000000..15997901 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/address_gateway.pyi @@ -0,0 +1,17 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree.address import Address as Address +from braintree.error_result import ErrorResult as ErrorResult +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.resource import Resource as Resource +from braintree.successful_result import SuccessfulResult as SuccessfulResult + +class AddressGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def create(self, params: Incomplete | None = ...): ... + def delete(self, customer_id, address_id): ... + def find(self, customer_id, address_id): ... + def update(self, customer_id, address_id, params: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/amex_express_checkout_card.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/amex_express_checkout_card.pyi new file mode 100644 index 00000000..4d736b43 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/amex_express_checkout_card.pyi @@ -0,0 +1,9 @@ +from typing import Any + +from braintree.resource import Resource as Resource + +class AmexExpressCheckoutCard(Resource): + subscriptions: Any + def __init__(self, gateway, attributes) -> None: ... + @property + def expiration_date(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/android_pay_card.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/android_pay_card.pyi new file mode 100644 index 00000000..2edb683a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/android_pay_card.pyi @@ -0,0 +1,14 @@ +from typing import Any + +from braintree.resource import Resource as Resource + +class AndroidPayCard(Resource): + is_expired: Any + subscriptions: Any + def __init__(self, gateway, attributes) -> None: ... + @property + def expiration_date(self): ... + @property + def last_4(self): ... + @property + def card_type(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/apple_pay_card.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/apple_pay_card.pyi new file mode 100644 index 00000000..f1cf1075 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/apple_pay_card.pyi @@ -0,0 +1,14 @@ +from typing import Any + +from braintree.resource import Resource as Resource + +class ApplePayCard(Resource): + class CardType: + AmEx: str + MasterCard: str + Visa: str + is_expired: Any + subscriptions: Any + def __init__(self, gateway, attributes) -> None: ... + @property + def expiration_date(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/apple_pay_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/apple_pay_gateway.pyi new file mode 100644 index 00000000..37579860 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/apple_pay_gateway.pyi @@ -0,0 +1,14 @@ +from typing import Any + +from braintree.apple_pay_options import ApplePayOptions as ApplePayOptions +from braintree.error_result import ErrorResult as ErrorResult +from braintree.exceptions.unexpected_error import UnexpectedError as UnexpectedError +from braintree.successful_result import SuccessfulResult as SuccessfulResult + +class ApplePayGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def register_domain(self, domain): ... + def unregister_domain(self, domain): ... + def registered_domains(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/apple_pay_options.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/apple_pay_options.pyi new file mode 100644 index 00000000..8328c271 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/apple_pay_options.pyi @@ -0,0 +1,3 @@ +from braintree.attribute_getter import AttributeGetter as AttributeGetter + +class ApplePayOptions(AttributeGetter): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/attribute_getter.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/attribute_getter.pyi new file mode 100644 index 00000000..848f3063 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/attribute_getter.pyi @@ -0,0 +1,4 @@ +from _typeshed import Incomplete + +class AttributeGetter: + def __init__(self, attributes: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/authorization_adjustment.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/authorization_adjustment.pyi new file mode 100644 index 00000000..32e9319e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/authorization_adjustment.pyi @@ -0,0 +1,7 @@ +from typing import Any + +from braintree.attribute_getter import AttributeGetter as AttributeGetter + +class AuthorizationAdjustment(AttributeGetter): + amount: Any + def __init__(self, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/bin_data.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/bin_data.pyi new file mode 100644 index 00000000..540fa249 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/bin_data.pyi @@ -0,0 +1,3 @@ +from braintree.attribute_getter import AttributeGetter as AttributeGetter + +class BinData(AttributeGetter): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/braintree_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/braintree_gateway.pyi new file mode 100644 index 00000000..5f4947b9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/braintree_gateway.pyi @@ -0,0 +1,61 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree.add_on_gateway import AddOnGateway as AddOnGateway +from braintree.address_gateway import AddressGateway as AddressGateway +from braintree.apple_pay_gateway import ApplePayGateway as ApplePayGateway +from braintree.client_token_gateway import ClientTokenGateway as ClientTokenGateway +from braintree.configuration import Configuration as Configuration +from braintree.credit_card_gateway import CreditCardGateway as CreditCardGateway +from braintree.credit_card_verification_gateway import CreditCardVerificationGateway as CreditCardVerificationGateway +from braintree.customer_gateway import CustomerGateway as CustomerGateway +from braintree.discount_gateway import DiscountGateway as DiscountGateway +from braintree.dispute_gateway import DisputeGateway as DisputeGateway +from braintree.document_upload_gateway import DocumentUploadGateway as DocumentUploadGateway +from braintree.merchant_account_gateway import MerchantAccountGateway as MerchantAccountGateway +from braintree.merchant_gateway import MerchantGateway as MerchantGateway +from braintree.oauth_gateway import OAuthGateway as OAuthGateway +from braintree.payment_method_gateway import PaymentMethodGateway as PaymentMethodGateway +from braintree.payment_method_nonce_gateway import PaymentMethodNonceGateway as PaymentMethodNonceGateway +from braintree.paypal_account_gateway import PayPalAccountGateway as PayPalAccountGateway +from braintree.plan_gateway import PlanGateway as PlanGateway +from braintree.settlement_batch_summary_gateway import SettlementBatchSummaryGateway as SettlementBatchSummaryGateway +from braintree.subscription_gateway import SubscriptionGateway as SubscriptionGateway +from braintree.testing_gateway import TestingGateway as TestingGateway +from braintree.transaction_gateway import TransactionGateway as TransactionGateway +from braintree.transaction_line_item_gateway import TransactionLineItemGateway as TransactionLineItemGateway +from braintree.us_bank_account_gateway import UsBankAccountGateway as UsBankAccountGateway +from braintree.us_bank_account_verification_gateway import UsBankAccountVerificationGateway as UsBankAccountVerificationGateway +from braintree.webhook_notification_gateway import WebhookNotificationGateway as WebhookNotificationGateway +from braintree.webhook_testing_gateway import WebhookTestingGateway as WebhookTestingGateway + +class BraintreeGateway: + config: Any + add_on: Any + address: Any + apple_pay: Any + client_token: Any + credit_card: Any + customer: Any + discount: Any + dispute: Any + document_upload: Any + graphql_client: Any + merchant: Any + merchant_account: Any + oauth: Any + payment_method: Any + payment_method_nonce: Any + paypal_account: Any + plan: Any + settlement_batch_summary: Any + subscription: Any + testing: Any + transaction: Any + transaction_line_item: Any + us_bank_account: Any + us_bank_account_verification: Any + verification: Any + webhook_notification: Any + webhook_testing: Any + def __init__(self, config: Incomplete | None = ..., **kwargs) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/client_token.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/client_token.pyi new file mode 100644 index 00000000..42afabe6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/client_token.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete + +from braintree import exceptions as exceptions +from braintree.configuration import Configuration as Configuration +from braintree.signature_service import SignatureService as SignatureService +from braintree.util.crypto import Crypto as Crypto + +class ClientToken: + @staticmethod + def generate(params: Incomplete | None = ..., gateway: Incomplete | None = ...): ... + @staticmethod + def generate_signature(): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/client_token_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/client_token_gateway.pyi new file mode 100644 index 00000000..1f66fe5b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/client_token_gateway.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree import exceptions as exceptions +from braintree.client_token import ClientToken as ClientToken +from braintree.resource import Resource as Resource + +class ClientTokenGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def generate(self, params: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/configuration.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/configuration.pyi new file mode 100644 index 00000000..68e41550 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/configuration.pyi @@ -0,0 +1,51 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree.credentials_parser import CredentialsParser as CredentialsParser +from braintree.environment import Environment as Environment +from braintree.exceptions.configuration_error import ConfigurationError as ConfigurationError +from braintree.util.graphql_client import GraphQLClient as GraphQLClient + +class Configuration: + @staticmethod + def configure(environment, merchant_id, public_key, private_key, **kwargs) -> None: ... + @staticmethod + def for_partner(environment, partner_id, public_key, private_key, **kwargs): ... + @staticmethod + def gateway(): ... + @staticmethod + def instantiate(): ... + @staticmethod + def api_version(): ... + @staticmethod + def graphql_api_version(): ... + environment: Any + merchant_id: Any + public_key: Any + private_key: Any + client_id: Any + client_secret: Any + access_token: Any + timeout: Any + wrap_http_exceptions: Any + def __init__( + self, + environment: Incomplete | None = ..., + merchant_id: Incomplete | None = ..., + public_key: Incomplete | None = ..., + private_key: Incomplete | None = ..., + client_id: Incomplete | None = ..., + client_secret: Incomplete | None = ..., + access_token: Incomplete | None = ..., + *args, + **kwargs, + ) -> None: ... + def base_merchant_path(self): ... + def base_url(self): ... + def graphql_base_url(self): ... + def http(self): ... + def graphql_client(self): ... + def http_strategy(self): ... + def has_client_credentials(self): ... + def assert_has_client_credentials(self) -> None: ... + def has_access_token(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/connected_merchant_paypal_status_changed.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/connected_merchant_paypal_status_changed.pyi new file mode 100644 index 00000000..9f1fe6a1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/connected_merchant_paypal_status_changed.pyi @@ -0,0 +1,6 @@ +from braintree.resource import Resource as Resource + +class ConnectedMerchantPayPalStatusChanged(Resource): + def __init__(self, gateway, attributes) -> None: ... + @property + def merchant_id(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/connected_merchant_status_transitioned.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/connected_merchant_status_transitioned.pyi new file mode 100644 index 00000000..5b6fa3c2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/connected_merchant_status_transitioned.pyi @@ -0,0 +1,6 @@ +from braintree.resource import Resource as Resource + +class ConnectedMerchantStatusTransitioned(Resource): + def __init__(self, gateway, attributes) -> None: ... + @property + def merchant_id(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/credentials_parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/credentials_parser.pyi new file mode 100644 index 00000000..e801f287 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/credentials_parser.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree.environment import Environment as Environment +from braintree.exceptions.configuration_error import ConfigurationError as ConfigurationError + +class CredentialsParser: + client_id: Any + client_secret: Any + access_token: Any + def __init__( + self, client_id: Incomplete | None = ..., client_secret: Incomplete | None = ..., access_token: Incomplete | None = ... + ) -> None: ... + environment: Any + def parse_client_credentials(self) -> None: ... + merchant_id: Any + def parse_access_token(self) -> None: ... + def get_environment(self, credential): ... + def get_merchant_id(self, credential): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/credit_card.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/credit_card.pyi new file mode 100644 index 00000000..8c895824 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/credit_card.pyi @@ -0,0 +1,75 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree.address import Address as Address +from braintree.configuration import Configuration as Configuration +from braintree.credit_card_verification import CreditCardVerification as CreditCardVerification +from braintree.resource import Resource as Resource + +class CreditCard(Resource): + class CardType: + AmEx: str + CarteBlanche: str + ChinaUnionPay: str + DinersClubInternational: str + Discover: str + Electron: str + Elo: str + Hiper: str + Hipercard: str + JCB: str + Laser: str + UK_Maestro: str + Maestro: str + MasterCard: str + Solo: str + Switch: str + Visa: str + Unknown: str + + class CustomerLocation: + International: str + US: str + + class CardTypeIndicator: + Yes: str + No: str + Unknown: str + Commercial: Any + DurbinRegulated: Any + Debit: Any + Healthcare: Any + CountryOfIssuance: Any + IssuingBank: Any + Payroll: Any + Prepaid: Any + ProductId: Any + @staticmethod + def create(params: Incomplete | None = ...): ... + @staticmethod + def update(credit_card_token, params: Incomplete | None = ...): ... + @staticmethod + def delete(credit_card_token): ... + @staticmethod + def expired(): ... + @staticmethod + def expiring_between(start_date, end_date): ... + @staticmethod + def find(credit_card_token): ... + @staticmethod + def from_nonce(nonce): ... + @staticmethod + def create_signature(): ... + @staticmethod + def update_signature(): ... + @staticmethod + def signature(type): ... + is_expired: Any + billing_address: Any + subscriptions: Any + verification: Any + def __init__(self, gateway, attributes): ... + @property + def expiration_date(self): ... + @property + def masked_number(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/credit_card_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/credit_card_gateway.pyi new file mode 100644 index 00000000..68fbe2d9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/credit_card_gateway.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree.credit_card import CreditCard as CreditCard +from braintree.error_result import ErrorResult as ErrorResult +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.ids_search import IdsSearch as IdsSearch +from braintree.resource import Resource as Resource +from braintree.resource_collection import ResourceCollection as ResourceCollection +from braintree.successful_result import SuccessfulResult as SuccessfulResult + +class CreditCardGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def create(self, params: Incomplete | None = ...): ... + def delete(self, credit_card_token): ... + def expired(self): ... + def expiring_between(self, start_date, end_date): ... + def find(self, credit_card_token): ... + def forward(self, credit_card_token, receiving_merchant_id) -> None: ... + def from_nonce(self, nonce): ... + def update(self, credit_card_token, params: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/credit_card_verification.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/credit_card_verification.pyi new file mode 100644 index 00000000..29fb9d5d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/credit_card_verification.pyi @@ -0,0 +1,32 @@ +from typing import Any + +from braintree.attribute_getter import AttributeGetter as AttributeGetter +from braintree.configuration import Configuration as Configuration +from braintree.resource import Resource as Resource +from braintree.risk_data import RiskData as RiskData +from braintree.three_d_secure_info import ThreeDSecureInfo as ThreeDSecureInfo + +class CreditCardVerification(AttributeGetter): + class Status: + Failed: str + GatewayRejected: str + ProcessorDeclined: str + Verified: str + amount: Any + currency_iso_code: Any + processor_response_code: Any + processor_response_text: Any + network_response_code: Any + network_response_text: Any + risk_data: Any + three_d_secure_info: Any + def __init__(self, gateway, attributes) -> None: ... + @staticmethod + def find(verification_id): ... + @staticmethod + def search(*query): ... + @staticmethod + def create(params): ... + @staticmethod + def create_signature(): ... + def __eq__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/credit_card_verification_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/credit_card_verification_gateway.pyi new file mode 100644 index 00000000..737cf19d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/credit_card_verification_gateway.pyi @@ -0,0 +1,17 @@ +from typing import Any + +from braintree.credit_card_verification import CreditCardVerification as CreditCardVerification +from braintree.credit_card_verification_search import CreditCardVerificationSearch as CreditCardVerificationSearch +from braintree.error_result import ErrorResult as ErrorResult +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.ids_search import IdsSearch as IdsSearch +from braintree.resource_collection import ResourceCollection as ResourceCollection +from braintree.successful_result import SuccessfulResult as SuccessfulResult + +class CreditCardVerificationGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def find(self, verification_id): ... + def search(self, *query): ... + def create(self, params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/credit_card_verification_search.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/credit_card_verification_search.pyi new file mode 100644 index 00000000..4036aadd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/credit_card_verification_search.pyi @@ -0,0 +1,20 @@ +from typing import Any + +from braintree.credit_card import CreditCard as CreditCard +from braintree.credit_card_verification import CreditCardVerification as CreditCardVerification +from braintree.search import Search as Search +from braintree.util import Constants as Constants + +class CreditCardVerificationSearch: + credit_card_cardholder_name: Any + id: Any + credit_card_expiration_date: Any + credit_card_number: Any + credit_card_card_type: Any + ids: Any + created_at: Any + status: Any + billing_postal_code: Any + customer_email: Any + customer_id: Any + payment_method_token: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/customer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/customer.pyi new file mode 100644 index 00000000..6c81cc8b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/customer.pyi @@ -0,0 +1,55 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree.address import Address as Address +from braintree.amex_express_checkout_card import AmexExpressCheckoutCard as AmexExpressCheckoutCard +from braintree.android_pay_card import AndroidPayCard as AndroidPayCard +from braintree.apple_pay_card import ApplePayCard as ApplePayCard +from braintree.configuration import Configuration as Configuration +from braintree.credit_card import CreditCard as CreditCard +from braintree.error_result import ErrorResult as ErrorResult +from braintree.europe_bank_account import EuropeBankAccount as EuropeBankAccount +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.ids_search import IdsSearch as IdsSearch +from braintree.masterpass_card import MasterpassCard as MasterpassCard +from braintree.paypal_account import PayPalAccount as PayPalAccount +from braintree.resource import Resource as Resource +from braintree.resource_collection import ResourceCollection as ResourceCollection +from braintree.samsung_pay_card import SamsungPayCard as SamsungPayCard +from braintree.successful_result import SuccessfulResult as SuccessfulResult +from braintree.us_bank_account import UsBankAccount as UsBankAccount +from braintree.util.http import Http as Http +from braintree.venmo_account import VenmoAccount as VenmoAccount +from braintree.visa_checkout_card import VisaCheckoutCard as VisaCheckoutCard + +class Customer(Resource): + @staticmethod + def all(): ... + @staticmethod + def create(params: Incomplete | None = ...): ... + @staticmethod + def delete(customer_id): ... + @staticmethod + def find(customer_id, association_filter_id: Incomplete | None = ...): ... + @staticmethod + def search(*query): ... + @staticmethod + def update(customer_id, params: Incomplete | None = ...): ... + @staticmethod + def create_signature(): ... + @staticmethod + def update_signature(): ... + payment_methods: Any + credit_cards: Any + addresses: Any + paypal_accounts: Any + apple_pay_cards: Any + android_pay_cards: Any + amex_express_checkout_cards: Any + europe_bank_accounts: Any + venmo_accounts: Any + us_bank_accounts: Any + visa_checkout_cards: Any + masterpass_cards: Any + samsung_pay_cards: Any + def __init__(self, gateway, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/customer_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/customer_gateway.pyi new file mode 100644 index 00000000..077933ae --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/customer_gateway.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree.customer import Customer as Customer +from braintree.error_result import ErrorResult as ErrorResult +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.ids_search import IdsSearch as IdsSearch +from braintree.resource import Resource as Resource +from braintree.resource_collection import ResourceCollection as ResourceCollection +from braintree.successful_result import SuccessfulResult as SuccessfulResult + +class CustomerGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def all(self): ... + def create(self, params: Incomplete | None = ...): ... + def delete(self, customer_id): ... + def find(self, customer_id, association_filter_id: Incomplete | None = ...): ... + def search(self, *query): ... + def update(self, customer_id, params: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/customer_search.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/customer_search.pyi new file mode 100644 index 00000000..a55ecf78 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/customer_search.pyi @@ -0,0 +1,29 @@ +from typing import Any + +from braintree.search import Search as Search + +class CustomerSearch: + address_extended_address: Any + address_first_name: Any + address_last_name: Any + address_locality: Any + address_postal_code: Any + address_region: Any + address_street_address: Any + address_country_name: Any + cardholder_name: Any + company: Any + created_at: Any + credit_card_expiration_date: Any + credit_card_number: Any + email: Any + fax: Any + first_name: Any + id: Any + ids: Any + last_name: Any + payment_method_token: Any + payment_method_token_with_duplicates: Any + phone: Any + website: Any + paypal_account_email: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/descriptor.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/descriptor.pyi new file mode 100644 index 00000000..0ec66354 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/descriptor.pyi @@ -0,0 +1,4 @@ +from braintree.resource import Resource as Resource + +class Descriptor(Resource): + def __init__(self, gateway, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/disbursement.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/disbursement.pyi new file mode 100644 index 00000000..fc4634d3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/disbursement.pyi @@ -0,0 +1,16 @@ +from typing import Any + +from braintree.merchant_account import MerchantAccount as MerchantAccount +from braintree.resource import Resource as Resource +from braintree.transaction_search import TransactionSearch as TransactionSearch + +class Disbursement(Resource): + class Type: + Credit: str + Debit: str + amount: Any + merchant_account: Any + def __init__(self, gateway, attributes) -> None: ... + def transactions(self): ... + def is_credit(self): ... + def is_debit(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/disbursement_detail.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/disbursement_detail.pyi new file mode 100644 index 00000000..7badea3a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/disbursement_detail.pyi @@ -0,0 +1,10 @@ +from typing import Any + +from braintree.attribute_getter import AttributeGetter as AttributeGetter + +class DisbursementDetail(AttributeGetter): + settlement_amount: Any + settlement_currency_exchange_rate: Any + def __init__(self, attributes) -> None: ... + @property + def is_valid(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/discount.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/discount.pyi new file mode 100644 index 00000000..e9ff1c91 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/discount.pyi @@ -0,0 +1,6 @@ +from braintree.configuration import Configuration as Configuration +from braintree.modification import Modification as Modification + +class Discount(Modification): + @staticmethod + def all(): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/discount_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/discount_gateway.pyi new file mode 100644 index 00000000..45203a32 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/discount_gateway.pyi @@ -0,0 +1,10 @@ +from typing import Any + +from braintree.discount import Discount as Discount +from braintree.resource_collection import ResourceCollection as ResourceCollection + +class DiscountGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def all(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/dispute.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/dispute.pyi new file mode 100644 index 00000000..edda8a91 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/dispute.pyi @@ -0,0 +1,66 @@ +from typing import Any + +from braintree.attribute_getter import AttributeGetter as AttributeGetter +from braintree.configuration import Configuration as Configuration +from braintree.dispute_details import ( + DisputeEvidence as DisputeEvidence, + DisputePayPalMessage as DisputePayPalMessage, + DisputeStatusHistory as DisputeStatusHistory, +) +from braintree.transaction_details import TransactionDetails as TransactionDetails + +class Dispute(AttributeGetter): + class Status: + Accepted: str + Disputed: str + Expired: str + Open: str + Won: str + Lost: str + + class Reason: + CancelledRecurringTransaction: str + CreditNotProcessed: str + Duplicate: str + Fraud: str + General: str + InvalidAccount: str + NotRecognized: str + ProductNotReceived: str + ProductUnsatisfactory: str + Retrieval: str + TransactionAmountDiffers: str + + class Kind: + Chargeback: str + PreArbitration: str + Retrieval: str + + class ChargebackProtectionLevel: + Effortless: str + Standard: str + NotProtected: str + @staticmethod + def accept(id): ... + @staticmethod + def add_file_evidence(dispute_id, document_upload_id): ... + @staticmethod + def add_text_evidence(id, content_or_request): ... + @staticmethod + def finalize(id): ... + @staticmethod + def find(id): ... + @staticmethod + def remove_evidence(id, evidence_id): ... + @staticmethod + def search(*query): ... + amount: Any + amount_disputed: Any + amount_won: Any + transaction_details: Any + transaction: Any + evidence: Any + paypal_messages: Any + status_history: Any + forwarded_comments: Any + def __init__(self, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/dispute_details/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/dispute_details/__init__.pyi new file mode 100644 index 00000000..e03bf6a1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/dispute_details/__init__.pyi @@ -0,0 +1,3 @@ +from braintree.dispute_details.evidence import DisputeEvidence as DisputeEvidence +from braintree.dispute_details.paypal_message import DisputePayPalMessage as DisputePayPalMessage +from braintree.dispute_details.status_history import DisputeStatusHistory as DisputeStatusHistory diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/dispute_details/evidence.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/dispute_details/evidence.pyi new file mode 100644 index 00000000..c488be75 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/dispute_details/evidence.pyi @@ -0,0 +1,4 @@ +from braintree.attribute_getter import AttributeGetter as AttributeGetter + +class DisputeEvidence(AttributeGetter): + def __init__(self, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/dispute_details/paypal_message.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/dispute_details/paypal_message.pyi new file mode 100644 index 00000000..95504bfd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/dispute_details/paypal_message.pyi @@ -0,0 +1,4 @@ +from braintree.attribute_getter import AttributeGetter as AttributeGetter + +class DisputePayPalMessage(AttributeGetter): + def __init__(self, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/dispute_details/status_history.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/dispute_details/status_history.pyi new file mode 100644 index 00000000..648315f3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/dispute_details/status_history.pyi @@ -0,0 +1,4 @@ +from braintree.attribute_getter import AttributeGetter as AttributeGetter + +class DisputeStatusHistory(AttributeGetter): + def __init__(self, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/dispute_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/dispute_gateway.pyi new file mode 100644 index 00000000..72ce19c9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/dispute_gateway.pyi @@ -0,0 +1,23 @@ +from typing import Any + +from braintree.dispute import Dispute as Dispute +from braintree.dispute_details import DisputeEvidence as DisputeEvidence +from braintree.error_result import ErrorResult as ErrorResult +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.paginated_collection import PaginatedCollection as PaginatedCollection +from braintree.paginated_result import PaginatedResult as PaginatedResult +from braintree.resource_collection import ResourceCollection as ResourceCollection +from braintree.successful_result import SuccessfulResult as SuccessfulResult + +class DisputeGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def accept(self, dispute_id): ... + def add_file_evidence(self, dispute_id, document_upload_id_or_request): ... + def add_text_evidence(self, dispute_id, content_or_request): ... + def finalize(self, dispute_id): ... + def find(self, dispute_id): ... + def remove_evidence(self, dispute_id, evidence_id): ... + search_criteria: Any + def search(self, *query): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/dispute_search.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/dispute_search.pyi new file mode 100644 index 00000000..dddf55e1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/dispute_search.pyi @@ -0,0 +1,23 @@ +from typing import Any + +from braintree.search import Search as Search + +class DisputeSearch: + amount_disputed: Any + amount_won: Any + case_number: Any + chargeback_protection_level: Any + customer_id: Any + disbursement_date: Any + effective_date: Any + id: Any + kind: Any + merchant_account_id: Any + reason: Any + reason_code: Any + received_date: Any + reference_number: Any + reply_by_date: Any + status: Any + transaction_id: Any + transaction_source: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/document_upload.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/document_upload.pyi new file mode 100644 index 00000000..e0d47ae1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/document_upload.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete + +from braintree.configuration import Configuration as Configuration +from braintree.resource import Resource as Resource +from braintree.successful_result import SuccessfulResult as SuccessfulResult + +class DocumentUpload(Resource): + class Kind: + EvidenceDocument: str + @staticmethod + def create(params: Incomplete | None = ...): ... + @staticmethod + def create_signature(): ... + def __init__(self, gateway, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/document_upload_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/document_upload_gateway.pyi new file mode 100644 index 00000000..c076510c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/document_upload_gateway.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree.document_upload import DocumentUpload as DocumentUpload +from braintree.error_result import ErrorResult as ErrorResult +from braintree.resource import Resource as Resource +from braintree.successful_result import SuccessfulResult as SuccessfulResult + +class DocumentUploadGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def create(self, params: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/environment.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/environment.pyi new file mode 100644 index 00000000..7ebd8bf0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/environment.pyi @@ -0,0 +1,33 @@ +from typing import Any + +from braintree.exceptions.configuration_error import ConfigurationError as ConfigurationError + +class Environment: + __name__: Any + is_ssl: Any + ssl_certificate: Any + def __init__( + self, name, server, port, auth_url, is_ssl, ssl_certificate, graphql_server: str = ..., graphql_port: str = ... + ) -> None: ... + @property + def base_url(self): ... + @property + def port(self): ... + @property + def auth_url(self): ... + @property + def protocol(self): ... + @property + def server(self): ... + @property + def server_and_port(self): ... + @property + def graphql_server(self): ... + @property + def graphql_port(self): ... + @property + def graphql_server_and_port(self): ... + @staticmethod + def parse_environment(environment): ... + @staticmethod + def braintree_root(): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/error_codes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/error_codes.pyi new file mode 100644 index 00000000..99452114 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/error_codes.pyi @@ -0,0 +1,766 @@ +class ErrorCodes: + class Address: + CannotBeBlank: str + CompanyIsInvalid: str + CompanyIsTooLong: str + CountryCodeAlpha2IsNotAccepted: str + CountryCodeAlpha3IsNotAccepted: str + CountryCodeNumericIsNotAccepted: str + CountryNameIsNotAccepted: str + ExtendedAddressIsInvalid: str + ExtendedAddressIsTooLong: str + FirstNameIsInvalid: str + FirstNameIsTooLong: str + InconsistentCountry: str + IsInvalid: str + LastNameIsInvalid: str + LastNameIsTooLong: str + LocalityIsInvalid: str + LocalityIsTooLong: str + PostalCodeInvalidCharacters: str + PostalCodeIsInvalid: str + PostalCodeIsRequired: str + PostalCodeIsRequiredForCardBrandAndProcessor: str + PostalCodeIsTooLong: str + RegionIsInvalid: str + RegionIsTooLong: str + StateIsInvalidForSellerProtection: str + StreetAddressIsInvalid: str + StreetAddressIsRequired: str + StreetAddressIsTooLong: str + TooManyAddressesPerCustomer: str + + class ApplePay: + ApplePayCardsAreNotAccepted: str + CustomerIdIsRequiredForVaulting: str + TokenIsInUse: str + PaymentMethodNonceConsumed: str + PaymentMethodNonceUnknown: str + PaymentMethodNonceLocked: str + PaymentMethodNonceCardTypeIsNotAccepted: str + CannotUpdateApplePayCardUsingPaymentMethodNonce: str + NumberIsRequired: str + ExpirationMonthIsRequired: str + ExpirationYearIsRequired: str + CryptogramIsRequired: str + DecryptionFailed: str + Disabled: str + MerchantNotConfigured: str + MerchantKeysAlreadyConfigured: str + MerchantKeysNotConfigured: str + CertificateInvalid: str + CertificateMismatch: str + InvalidToken: str + PrivateKeyMismatch: str + KeyMismatchStoringCertificate: str + + class AuthorizationFingerprint: + MissingFingerprint: str + InvalidFormat: str + SignatureRevoked: str + InvalidCreatedAt: str + InvalidPublicKey: str + InvalidSignature: str + OptionsNotAllowedWithoutCustomer: str + + class ClientToken: + MakeDefaultRequiresCustomerId: str + VerifyCardRequiresCustomerId: str + FailOnDuplicatePaymentMethodRequiresCustomerId: str + CustomerDoesNotExist: str + ProxyMerchantDoesNotExist: str + UnsupportedVersion: str + MerchantAccountDoesNotExist: str + + class CreditCard: + BillingAddressConflict: str + BillingAddressFormatIsInvalid: str + BillingAddressIdIsInvalid: str + CannotUpdateCardUsingPaymentMethodNonce: str + CardholderNameIsTooLong: str + CreditCardTypeIsNotAccepted: str + CreditCardTypeIsNotAcceptedBySubscriptionMerchantAccount: str + CustomerIdIsInvalid: str + CustomerIdIsRequired: str + CvvIsInvalid: str + CvvIsRequired: str + CvvVerificationFailed: str + DuplicateCardExists: str + ExpirationDateConflict: str + ExpirationDateIsInvalid: str + ExpirationDateIsRequired: str + ExpirationDateYearIsInvalid: str + ExpirationMonthIsInvalid: str + ExpirationYearIsInvalid: str + InvalidParamsForCreditCardUpdate: str + InvalidVenmoSDKPaymentMethodCode: str + NumberHasInvalidLength: str + NumberLengthIsInvalid: str + NumberIsInvalid: str + NumberIsProhibited: str + NumberIsRequired: str + NumberMustBeTestNumber: str + PaymentMethodConflict: str + PaymentMethodIsNotACreditCard: str + PaymentMethodNonceCardTypeIsNotAccepted: str + PaymentMethodNonceConsumed: str + PaymentMethodNonceLocked: str + PaymentMethodNonceUnknown: str + PostalCodeVerificationFailed: str + TokenInvalid: str + TokenFormatIsInvalid: str + TokenIsInUse: str + TokenIsNotAllowed: str + TokenIsRequired: str + TokenIsTooLong: str + VenmoSDKPaymentMethodCodeCardTypeIsNotAccepted: str + VerificationNotSupportedOnThisMerchantAccount: str + VerificationAccountTypeIsInvald: str + VerificationAccountTypeNotSupported: str + + class Options: + UpdateExistingTokenIsInvalid: str + UpdateExistingTokenNotAllowed: str + VerificationAmountCannotBeNegative: str + VerificationAmountFormatIsInvalid: str + VerificationAmountIsTooLarge: str + VerificationAmountNotSupportedByProcessor: str + VerificationMerchantAccountIdIsInvalid: str + VerificationMerchantAccountIsForbidden: str + VerificationMerchantAccountIsSuspended: str + VerificationMerchantAccountCannotBeSubMerchantAccount: str + + class Customer: + CompanyIsTooLong: str + CustomFieldIsInvalid: str + CustomFieldIsTooLong: str + EmailIsInvalid: str + EmailFormatIsInvalid: str + EmailIsRequired: str + EmailIsTooLong: str + FaxIsTooLong: str + FirstNameIsTooLong: str + IdIsInUse: str + IdIsInvalid: str + IdIsNotAllowed: str + IdIsRequired: str + IdIsTooLong: str + LastNameIsTooLong: str + PhoneIsTooLong: str + VaultedPaymentInstrumentNonceBelongsToDifferentCustomer: str + WebsiteIsInvalid: str + WebsiteFormatIsInvalid: str + WebsiteIsTooLong: str + + class Descriptor: + DynamicDescriptorsDisabled: str + InternationalNameFormatIsInvalid: str + InternationalPhoneFormatIsInvalid: str + NameFormatIsInvalid: str + PhoneFormatIsInvalid: str + UrlFormatIsInvalid: str + + class Dispute: + CanOnlyAddEvidenceToOpenDispute: str + CanOnlyRemoveEvidenceFromOpenDispute: str + CanOnlyAddEvidenceDocumentToDispute: str + CanOnlyAcceptOpenDispute: str + CanOnlyFinalizeOpenDispute: str + CanOnlyCreateEvidenceWithValidCategory: str + EvidenceContentDateInvalid: str + EvidenceContentTooLong: str + EvidenceContentARNTooLong: str + EvidenceContentPhoneTooLong: str + EvidenceCategoryTextOnly: str + EvidenceCategoryDocumentOnly: str + EvidenceCategoryNotForReasonCode: str + EvidenceCategoryDuplicate: str + EvidenceContentEmailInvalid: str + DigitalGoodsMissingEvidence: str + DigitalGoodsMissingDownloadDate: str + NonDisputedPriorTransactionEvidenceMissingARN: str + NonDisputedPriorTransactionEvidenceMissingDate: str + RecurringTransactionEvidenceMissingDate: str + RecurringTransactionEvidenceMissingARN: str + ValidEvidenceRequiredToFinalize: str + + class DocumentUpload: + KindIsInvalid: str + FileIsTooLarge: str + FileTypeIsInvalid: str + FileIsMalformedOrEncrypted: str + FileIsTooLong: str + FileIsEmpty: str + + class Merchant: + CountryCannotBeBlank: str + CountryCodeAlpha2IsInvalid: str + CountryCodeAlpha2IsNotAccepted: str + CountryCodeAlpha3IsInvalid: str + CountryCodeAlpha3IsNotAccepted: str + CountryCodeNumericIsInvalid: str + CountryCodeNumericIsNotAccepted: str + CountryNameIsInvalid: str + CountryNameIsNotAccepted: str + CurrenciesAreInvalid: str + EmailFormatIsInvalid: str + EmailIsRequired: str + InconsistentCountry: str + PaymentMethodsAreInvalid: str + PaymentMethodsAreNotAllowed: str + MerchantAccountExistsForCurrency: str + CurrencyIsRequired: str + CurrencyIsInvalid: str + NoMerchantAccounts: str + MerchantAccountExistsForId: str + + class MerchantAccount: + IdFormatIsInvalid: str + IdIsInUse: str + IdIsNotAllowed: str + IdIsTooLong: str + MasterMerchantAccountIdIsInvalid: str + MasterMerchantAccountIdIsRequired: str + MasterMerchantAccountMustBeActive: str + TosAcceptedIsRequired: str + CannotBeUpdated: str + IdCannotBeUpdated: str + MasterMerchantAccountIdCannotBeUpdated: str + Declined: str + DeclinedMasterCardMatch: str + DeclinedOFAC: str + DeclinedFailedKYC: str + DeclinedSsnInvalid: str + DeclinedSsnMatchesDeceased: str + + class ApplicantDetails: + AccountNumberIsRequired: str + CompanyNameIsInvalid: str + CompanyNameIsRequiredWithTaxId: str + DateOfBirthIsRequired: str + Declined: str + DeclinedMasterCardMatch: str + DeclinedOFAC: str + DeclinedFailedKYC: str + DeclinedSsnInvalid: str + DeclinedSsnMatchesDeceased: str + EmailAddressIsInvalid: str + FirstNameIsInvalid: str + FirstNameIsRequired: str + LastNameIsInvalid: str + LastNameIsRequired: str + PhoneIsInvalid: str + RoutingNumberIsInvalid: str + RoutingNumberIsRequired: str + SsnIsInvalid: str + TaxIdIsInvalid: str + TaxIdIsRequiredWithCompanyName: str + DateOfBirthIsInvalid: str + EmailAddressIsRequired: str + AccountNumberIsInvalid: str + TaxIdMustBeBlank: str + + class Address: + LocalityIsRequired: str + PostalCodeIsInvalid: str + PostalCodeIsRequired: str + RegionIsRequired: str + StreetAddressIsInvalid: str + StreetAddressIsRequired: str + RegionIsInvalid: str + + class Individual: + FirstNameIsRequired: str + LastNameIsRequired: str + DateOfBirthIsRequired: str + SsnIsInvalid: str + EmailAddressIsInvalid: str + FirstNameIsInvalid: str + LastNameIsInvalid: str + PhoneIsInvalid: str + DateOfBirthIsInvalid: str + EmailAddressIsRequired: str + + class Address: + StreetAddressIsRequired: str + LocalityIsRequired: str + PostalCodeIsRequired: str + RegionIsRequired: str + StreetAddressIsInvalid: str + PostalCodeIsInvalid: str + RegionIsInvalid: str + + class Business: + DbaNameIsInvalid: str + LegalNameIsInvalid: str + LegalNameIsRequiredWithTaxId: str + TaxIdIsInvalid: str + TaxIdIsRequiredWithLegalName: str + TaxIdMustBeBlank: str + + class Address: + StreetAddressIsInvalid: str + PostalCodeIsInvalid: str + RegionIsInvalid: str + + class Funding: + RoutingNumberIsRequired: str + AccountNumberIsRequired: str + RoutingNumberIsInvalid: str + AccountNumberIsInvalid: str + DestinationIsInvalid: str + DestinationIsRequired: str + EmailAddressIsInvalid: str + EmailAddressIsRequired: str + MobilePhoneIsInvalid: str + MobilePhoneIsRequired: str + + class OAuth: + InvalidGrant: str + InvalidCredentials: str + InvalidScope: str + InvalidRequest: str + UnsupportedGrantType: str + + class Verification: + ThreeDSecureAuthenticationIdIsInvalid: str + ThreeDSecureAuthenticationIdDoesntMatchNonceThreeDSecureAuthentication: str + ThreeDSecureTransactionPaymentMethodDoesntMatchThreeDSecureAuthenticationPaymentMethod: str + ThreeDSecureAuthenticationIdWithThreeDSecurePassThruIsInvalid: str + ThreeDSecureAuthenticationFailed: str + ThreeDSecureTokenIsInvalid: str + ThreeDSecureVerificationDataDoesntMatchVerify: str + MerchantAccountDoesNotSupport3DSecure: str + MerchantAcountDoesNotMatch3DSecureMerchantAccount: str + AmountDoesNotMatch3DSecureAmount: str + + class ThreeDSecurePassThru: + EciFlagIsRequired: str + EciFlagIsInvalid: str + CavvIsRequired: str + ThreeDSecureVersionIsRequired: str + ThreeDSecureVersionIsInvalid: str + AuthenticationResponseIsInvalid: str + DirectoryResponseIsInvalid: str + CavvAlgorithmIsInvalid: str + + class Options: + AmountCannotBeNegative: str + AmountFormatIsInvalid: str + AmountIsTooLarge: str + AmountNotSupportedByProcessor: str + MerchantAccountIdIsInvalid: str + MerchantAccountIsSuspended: str + MerchantAccountIsForbidden: str + MerchantAccountCannotBeSubMerchantAccount: str + AccountTypeIsInvalid: str + AccountTypeNotSupported: str + + class PaymentMethod: + CannotForwardPaymentMethodType: str + PaymentMethodParamsAreRequired: str + NonceIsInvalid: str + NonceIsRequired: str + CustomerIdIsRequired: str + CustomerIdIsInvalid: str + PaymentMethodNonceConsumed: str + PaymentMethodNonceUnknown: str + PaymentMethodNonceLocked: str + PaymentMethodNoLongerSupported: str + AuthExpired: str + CannotHaveFundingSourceWithoutAccessToken: str + InvalidFundingSourceSelection: str + CannotUpdatePayPalAccountUsingPaymentMethodNonce: str + + class Options: + UsBankAccountVerificationMethodIsInvalid: str + + class PayPalAccount: + CannotHaveBothAccessTokenAndConsentCode: str + CannotVaultOneTimeUsePayPalAccount: str + ConsentCodeOrAccessTokenIsRequired: str + CustomerIdIsRequiredForVaulting: str + InvalidParamsForPayPalAccountUpdate: str + PayPalAccountsAreNotAccepted: str + PayPalCommunicationError: str + PaymentMethodNonceConsumed: str + PaymentMethodNonceLocked: str + PaymentMethodNonceUnknown: str + TokenIsInUse: str + + class SettlementBatchSummary: + CustomFieldIsInvalid: str + SettlementDateIsInvalid: str + SettlementDateIsRequired: str + + class SEPAMandate: + TypeIsRequired: str + IBANInvalidCharacter: str + BICInvalidCharacter: str + BICLengthIsInvalid: str + BICUnsupportedCountry: str + IBANUnsupportedCountry: str + IBANInvalidFormat: str + BillingAddressConflict: str + BillingAddressIdIsInvalid: str + TypeIsInvalid: str + + class EuropeBankAccount: + BICIsRequired: str + IBANIsRequired: str + AccountHolderNameIsRequired: str + + class Subscription: + BillingDayOfMonthCannotBeUpdated: str + BillingDayOfMonthIsInvalid: str + BillingDayOfMonthMustBeNumeric: str + CannotAddDuplicateAddonOrDiscount: str + CannotEditCanceledSubscription: str + CannotEditExpiredSubscription: str + CannotEditPriceChangingFieldsOnPastDueSubscription: str + FirstBillingDateCannotBeInThePast: str + FirstBillingDateCannotBeUpdated: str + FirstBillingDateIsInvalid: str + IdIsInUse: str + InconsistentNumberOfBillingCycles: str + InconsistentStartDate: str + InvalidRequestFormat: str + MerchantAccountDoesNotSupportInstrumentType: str + MerchantAccountIdIsInvalid: str + MismatchCurrencyISOCode: str + NumberOfBillingCyclesCannotBeBlank: str + NumberOfBillingCyclesIsTooSmall: str + NumberOfBillingCyclesMustBeGreaterThanZero: str + NumberOfBillingCyclesMustBeNumeric: str + PaymentMethodNonceCardTypeIsNotAccepted: str + PaymentMethodNonceInstrumentTypeDoesNotSupportSubscriptions: str + PaymentMethodNonceIsInvalid: str + PaymentMethodNonceNotAssociatedWithCustomer: str + PaymentMethodNonceUnvaultedCardIsNotAccepted: str + PaymentMethodTokenCardTypeIsNotAccepted: str + PaymentMethodTokenInstrumentTypeDoesNotSupportSubscriptions: str + PaymentMethodTokenIsInvalid: str + PaymentMethodTokenNotAssociatedWithCustomer: str + PlanBillingFrequencyCannotBeUpdated: str + PlanIdIsInvalid: str + PriceCannotBeBlank: str + PriceFormatIsInvalid: str + PriceIsTooLarge: str + StatusIsCanceled: str + TokenFormatIsInvalid: str + TrialDurationFormatIsInvalid: str + TrialDurationIsRequired: str + TrialDurationUnitIsInvalid: str + + class Modification: + AmountCannotBeBlank: str + AmountIsInvalid: str + AmountIsTooLarge: str + CannotEditModificationsOnPastDueSubscription: str + CannotUpdateAndRemove: str + ExistingIdIsIncorrectKind: str + ExistingIdIsInvalid: str + ExistingIdIsRequired: str + IdToRemoveIsIncorrectKind: str + IdToRemoveIsNotPresent: str + InconsistentNumberOfBillingCycles: str + InheritedFromIdIsInvalid: str + InheritedFromIdIsRequired: str + Missing: str + NumberOfBillingCyclesCannotBeBlank: str + NumberOfBillingCyclesIsInvalid: str + NumberOfBillingCyclesMustBeGreaterThanZero: str + QuantityCannotBeBlank: str + QuantityIsInvalid: str + QuantityMustBeGreaterThanZero: str + IdToRemoveIsInvalid: str + + class Transaction: + AdjustmentAmountMustBeGreaterThanZero: str + AmountCannotBeNegative: str + AmountDoesNotMatch3DSecureAmount: str + AmountIsInvalid: str + AmountFormatIsInvalid: str + AmountIsRequired: str + AmountIsTooLarge: str + AmountMustBeGreaterThanZero: str + AmountNotSupportedByProcessor: str + BillingAddressConflict: str + BillingPhoneNumberIsInvalid: str + CannotBeVoided: str + CannotCancelRelease: str + CannotCloneCredit: str + CannotCloneMarketplaceTransaction: str + CannotCloneTransactionWithPayPalAccount: str + CannotCloneTransactionWithVaultCreditCard: str + CannotCloneUnsuccessfulTransaction: str + CannotCloneVoiceAuthorizations: str + CannotHoldInEscrow: str + CannotPartiallyRefundEscrowedTransaction: str + CannotRefundCredit: str + CannotRefundSettlingTransaction: str + CannotRefundUnlessSettled: str + CannotRefundWithPendingMerchantAccount: str + CannotRefundWithSuspendedMerchantAccount: str + CannotReleaseFromEscrow: str + CannotSimulateTransactionSettlement: str + CannotSubmitForPartialSettlement: str + CannotSubmitForSettlement: str + CannotUpdateTransactionDetailsNotSubmittedForSettlement: str + ChannelIsTooLong: str + CreditCardIsRequired: str + CustomFieldIsInvalid: str + CustomFieldIsTooLong: str + CustomerDefaultPaymentMethodCardTypeIsNotAccepted: str + CustomerDoesNotHaveCreditCard: str + CustomerIdIsInvalid: str + DiscountAmountCannotBeNegative: str + DiscountAmountFormatIsInvalid: str + DiscountAmountIsTooLarge: str + ExchangeRateQuoteIdIsTooLong: str + FailedAuthAdjustmentAllowRetry: str + FailedAuthAdjustmentHardDecline: str + FinalAuthSubmitForSettlementForDifferentAmount: str + HasAlreadyBeenRefunded: str + LineItemsExpected: str + MerchantAccountDoesNotMatch3DSecureMerchantAccount: str + MerchantAccountDoesNotSupportMOTO: str + MerchantAccountDoesNotSupportRefunds: str + MerchantAccountIdDoesNotMatchSubscription: str + MerchantAccountIdIsInvalid: str + MerchantAccountIsSuspended: str + NoNetAmountToPerformAuthAdjustment: str + OrderIdIsTooLong: str + PayPalAuthExpired: str + PayPalNotEnabled: str + PayPalVaultRecordMissingData: str + PaymentInstrumentNotSupportedByMerchantAccount: str + PaymentInstrumentTypeIsNotAccepted: str + PaymentInstrumentWithExternalVaultIsInvalid: str + PaymentMethodConflict: str + PaymentMethodConflictWithVenmoSDK: str + PaymentMethodDoesNotBelongToCustomer: str + PaymentMethodDoesNotBelongToSubscription: str + PaymentMethodNonceCardTypeIsNotAccepted: str + PaymentMethodNonceConsumed: str + PaymentMethodNonceHasNoValidPaymentInstrumentType: str + PaymentMethodNonceLocked: str + PaymentMethodNonceUnknown: str + PaymentMethodTokenCardTypeIsNotAccepted: str + PaymentMethodTokenIsInvalid: str + ProcessorAuthorizationCodeCannotBeSet: str + ProcessorAuthorizationCodeIsInvalid: str + ProcessorDoesNotSupportAuths: str + ProcessorDoesNotSupportAuthAdjustment: str + ProcessorDoesNotSupportCredits: str + ProcessorDoesNotSupportIncrementalAuth: str + ProcessorDoesNotSupportMotoForCardType: str + ProcessorDoesNotSupportPartialAuthReversal: str + ProcessorDoesNotSupportPartialSettlement: str + ProcessorDoesNotSupportUpdatingDescriptor: str + ProcessorDoesNotSupportUpdatingOrderId: str + ProcessorDoesNotSupportUpdatingTransactionDetails: str + ProcessorDoesNotSupportVoiceAuthorizations: str + ProductSkuIsInvalid: str + PurchaseOrderNumberIsInvalid: str + PurchaseOrderNumberIsTooLong: str + RefundAmountIsTooLarge: str + RefundAuthHardDeclined: str + RefundAuthSoftDeclined: str + ScaExemptionInvalid: str + ServiceFeeAmountCannotBeNegative: str + ServiceFeeAmountFormatIsInvalid: str + ServiceFeeAmountIsTooLarge: str + ServiceFeeAmountNotAllowedOnMasterMerchantAccount: str + ServiceFeeIsNotAllowedOnCredits: str + ServiceFeeNotAcceptedForPayPal: str + SettlementAmountIsLessThanServiceFeeAmount: str + SettlementAmountIsTooLarge: str + ShippingAddressDoesntMatchCustomer: str + ShippingAmountCannotBeNegative: str + ShippingAmountFormatIsInvalid: str + ShippingAmountIsTooLarge: str + ShippingMethodIsInvalid: str + ShippingPhoneNumberIsInvalid: str + ShipsFromPostalCodeInvalidCharacters: str + ShipsFromPostalCodeIsInvalid: str + ShipsFromPostalCodeIsTooLong: str + SubMerchantAccountRequiresServiceFeeAmount: str + SubscriptionDoesNotBelongToCustomer: str + SubscriptionIdIsInvalid: str + SubscriptionStatusMustBePastDue: str + TaxAmountCannotBeNegative: str + TaxAmountFormatIsInvalid: str + TaxAmountIsRequiredForAibSwedish: str + TaxAmountIsTooLarge: str + ThreeDSecureAuthenticationFailed: str + ThreeDSecureAuthenticationIdDoesntMatchNonceThreeDSecureAuthentication: str + ThreeDSecureAuthenticationIdIsInvalid: str + ThreeDSecureAuthenticationIdWithThreeDSecurePassThruIsInvalid: str + ThreeDSecureAuthenticationResponseIsInvalid: str + ThreeDSecureCavvAlgorithmIsInvalid: str + ThreeDSecureCavvIsRequired: str + ThreeDSecureDirectoryResponseIsInvalid: str + ThreeDSecureEciFlagIsInvalid: str + ThreeDSecureEciFlagIsRequired: str + ThreeDSecureMerchantAccountDoesNotSupportCardType: str + ThreeDSecureTokenIsInvalid: str + ThreeDSecureTransactionDataDoesntMatchVerify: str + ThreeDSecureTransactionPaymentMethodDoesntMatchThreeDSecureAuthenticationPaymentMethod: str + ThreeDSecureXidIsRequired: str + TooManyLineItems: str + TransactionIsNotEligibleForAdjustment: str + TransactionMustBeInStateAuthorized: str + TransactionSourceIsInvalid: str + TypeIsInvalid: str + TypeIsRequired: str + UnsupportedVoiceAuthorization: str + UsBankAccountNonceMustBePlaidVerified: str + UsBankAccountNotVerified: str + + class ExternalVault: + StatusIsInvalid: str + StatusWithPreviousNetworkTransactionIdIsInvalid: str + CardTypeIsInvalid: str + PreviousNetworkTransactionIdIsInvalid: str + + class Options: + SubmitForSettlementIsRequiredForCloning: str + SubmitForSettlementIsRequiredForPayPalUnilateral: str + UseBillingForShippingDisabled: str + VaultIsDisabled: str + + class PayPal: + CustomFieldTooLong: str + + class CreditCard: + AccountTypeIsInvalid: str + AccountTypeNotSupported: str + AccountTypeDebitDoesNotSupportAuths: str + + class Industry: + IndustryTypeIsInvalid: str + + class Lodging: + EmptyData: str + FolioNumberIsInvalid: str + CheckInDateIsInvalid: str + CheckOutDateIsInvalid: str + CheckOutDateMustFollowCheckInDate: str + UnknownDataField: str + RoomRateMustBeGreaterThanZero: str + RoomRateFormatIsInvalid: str + RoomRateIsTooLarge: str + RoomTaxMustBeGreaterThanZero: str + RoomTaxFormatIsInvalid: str + RoomTaxIsTooLarge: str + NoShowIndicatorIsInvalid: str + AdvancedDepositIndicatorIsInvalid: str + FireSafetyIndicatorIsInvalid: str + PropertyPhoneIsInvalid: str + + class TravelCruise: + EmptyData: str + UnknownDataField: str + TravelPackageIsInvalid: str + DepartureDateIsInvalid: str + LodgingCheckInDateIsInvalid: str + LodgingCheckOutDateIsInvalid: str + + class TravelFlight: + EmptyData: str + UnknownDataField: str + CustomerCodeIsTooLong: str + FareAmountCannotBeNegative: str + FareAmountFormatIsInvalid: str + FareAmountIsTooLarge: str + FeeAmountCannotBeNegative: str + FeeAmountFormatIsInvalid: str + FeeAmountIsTooLarge: str + IssuedDateFormatIsInvalid: str + IssuingCarrierCodeIsTooLong: str + PassengerMiddleInitialIsTooLong: str + RestrictedTicketIsRequired: str + TaxAmountCannotBeNegative: str + TaxAmountFormatIsInvalid: str + TaxAmountIsTooLarge: str + TicketNumberIsTooLong: str + LegsExpected: str + TooManyLegs: str + + class Leg: + class TravelFlight: + ArrivalAirportCodeIsTooLong: str + ArrivalTimeFormatIsInvalid: str + CarrierCodeIsTooLong: str + ConjunctionTicketIsTooLong: str + CouponNumberIsTooLong: str + DepartureAirportCodeIsTooLong: str + DepartureTimeFormatIsInvalid: str + ExchangeTicketIsTooLong: str + FareAmountCannotBeNegative: str + FareAmountFormatIsInvalid: str + FareAmountIsTooLarge: str + FareBasisCodeIsTooLong: str + FeeAmountCannotBeNegative: str + FeeAmountFormatIsInvalid: str + FeeAmountIsTooLarge: str + ServiceClassIsTooLong: str + TaxAmountCannotBeNegative: str + TaxAmountFormatIsInvalid: str + TaxAmountIsTooLarge: str + TicketNumberIsTooLong: str + + class AdditionalCharge: + KindIsInvalid: str + KindMustBeUnique: str + AmountMustBeGreaterThanZero: str + AmountFormatIsInvalid: str + AmountIsTooLarge: str + AmountIsRequired: str + + class LineItem: + CommodityCodeIsTooLong: str + DescriptionIsTooLong: str + DiscountAmountFormatIsInvalid: str + DiscountAmountIsTooLarge: str + DiscountAmountCannotBeNegative: str + KindIsInvalid: str + KindIsRequired: str + NameIsRequired: str + NameIsTooLong: str + ProductCodeIsTooLong: str + QuantityFormatIsInvalid: str + QuantityIsRequired: str + QuantityIsTooLarge: str + TotalAmountFormatIsInvalid: str + TotalAmountIsRequired: str + TotalAmountIsTooLarge: str + TotalAmountMustBeGreaterThanZero: str + UnitAmountFormatIsInvalid: str + UnitAmountIsRequired: str + UnitAmountIsTooLarge: str + UnitAmountMustBeGreaterThanZero: str + UnitOfMeasureIsTooLarge: str + UnitTaxAmountFormatIsInvalid: str + UnitTaxAmountIsTooLarge: str + UnitTaxAmountCannotBeNegative: str + TaxAmountFormatIsInvalid: str + TaxAmountIsTooLarge: str + TaxAmountCannotBeNegative: str + + class UsBankAccountVerification: + NotConfirmable: str + MustBeMicroTransfersVerification: str + AmountsDoNotMatch: str + TooManyConfirmationAttempts: str + UnableToConfirmDepositAmounts: str + InvalidDepositAmounts: str + + class RiskData: + CustomerBrowserIsTooLong: str + CustomerDeviceIdIsTooLong: str + CustomerLocationZipInvalidCharacters: str + CustomerLocationZipIsInvalid: str + CustomerLocationZipIsTooLong: str + CustomerTenureIsTooLong: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/error_result.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/error_result.pyi new file mode 100644 index 00000000..6869df0d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/error_result.pyi @@ -0,0 +1,16 @@ +from typing import Any + +from braintree.credit_card_verification import CreditCardVerification as CreditCardVerification +from braintree.errors import Errors as Errors + +class ErrorResult: + params: Any + errors: Any + message: Any + credit_card_verification: Any + transaction: Any + subscription: Any + merchant_account: Any + def __init__(self, gateway, attributes) -> None: ... + @property + def is_success(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/errors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/errors.pyi new file mode 100644 index 00000000..071bdba6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/errors.pyi @@ -0,0 +1,12 @@ +from typing import Any + +from braintree.validation_error_collection import ValidationErrorCollection as ValidationErrorCollection + +class Errors: + errors: Any + size: Any + def __init__(self, data) -> None: ... + @property + def deep_errors(self): ... + def for_object(self, key): ... + def __len__(self) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/europe_bank_account.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/europe_bank_account.pyi new file mode 100644 index 00000000..6c6230c5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/europe_bank_account.pyi @@ -0,0 +1,9 @@ +from braintree.configuration import Configuration as Configuration +from braintree.resource import Resource as Resource + +class EuropeBankAccount(Resource): + class MandateType: + Business: str + Consumer: str + @staticmethod + def signature(): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/__init__.pyi new file mode 100644 index 00000000..343f22a0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/__init__.pyi @@ -0,0 +1,13 @@ +from braintree.exceptions.authentication_error import AuthenticationError as AuthenticationError +from braintree.exceptions.authorization_error import AuthorizationError as AuthorizationError +from braintree.exceptions.configuration_error import ConfigurationError as ConfigurationError +from braintree.exceptions.gateway_timeout_error import GatewayTimeoutError as GatewayTimeoutError +from braintree.exceptions.invalid_challenge_error import InvalidChallengeError as InvalidChallengeError +from braintree.exceptions.invalid_signature_error import InvalidSignatureError as InvalidSignatureError +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.exceptions.request_timeout_error import RequestTimeoutError as RequestTimeoutError +from braintree.exceptions.server_error import ServerError as ServerError +from braintree.exceptions.service_unavailable_error import ServiceUnavailableError as ServiceUnavailableError +from braintree.exceptions.too_many_requests_error import TooManyRequestsError as TooManyRequestsError +from braintree.exceptions.unexpected_error import UnexpectedError as UnexpectedError +from braintree.exceptions.upgrade_required_error import UpgradeRequiredError as UpgradeRequiredError diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/authentication_error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/authentication_error.pyi new file mode 100644 index 00000000..64e3b570 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/authentication_error.pyi @@ -0,0 +1,3 @@ +from braintree.exceptions.braintree_error import BraintreeError as BraintreeError + +class AuthenticationError(BraintreeError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/authorization_error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/authorization_error.pyi new file mode 100644 index 00000000..e945a78d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/authorization_error.pyi @@ -0,0 +1,3 @@ +from braintree.exceptions.braintree_error import BraintreeError as BraintreeError + +class AuthorizationError(BraintreeError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/braintree_error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/braintree_error.pyi new file mode 100644 index 00000000..7437f6dc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/braintree_error.pyi @@ -0,0 +1 @@ +class BraintreeError(Exception): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/configuration_error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/configuration_error.pyi new file mode 100644 index 00000000..abf8cb30 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/configuration_error.pyi @@ -0,0 +1,3 @@ +from braintree.exceptions.unexpected_error import UnexpectedError as UnexpectedError + +class ConfigurationError(UnexpectedError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/gateway_timeout_error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/gateway_timeout_error.pyi new file mode 100644 index 00000000..97a8be4d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/gateway_timeout_error.pyi @@ -0,0 +1,3 @@ +from braintree.exceptions.braintree_error import BraintreeError as BraintreeError + +class GatewayTimeoutError(BraintreeError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/http/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/http/__init__.pyi new file mode 100644 index 00000000..32eda380 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/http/__init__.pyi @@ -0,0 +1,3 @@ +from braintree.exceptions.http.connection_error import ConnectionError as ConnectionError +from braintree.exceptions.http.invalid_response_error import InvalidResponseError as InvalidResponseError +from braintree.exceptions.http.timeout_error import TimeoutError as TimeoutError diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/http/connection_error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/http/connection_error.pyi new file mode 100644 index 00000000..05b5c933 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/http/connection_error.pyi @@ -0,0 +1,3 @@ +from braintree.exceptions.unexpected_error import UnexpectedError as UnexpectedError + +class ConnectionError(UnexpectedError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/http/invalid_response_error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/http/invalid_response_error.pyi new file mode 100644 index 00000000..d65b0810 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/http/invalid_response_error.pyi @@ -0,0 +1,3 @@ +from braintree.exceptions.unexpected_error import UnexpectedError as UnexpectedError + +class InvalidResponseError(UnexpectedError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/http/timeout_error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/http/timeout_error.pyi new file mode 100644 index 00000000..b00dbb69 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/http/timeout_error.pyi @@ -0,0 +1,5 @@ +from braintree.exceptions.unexpected_error import UnexpectedError as UnexpectedError + +class TimeoutError(UnexpectedError): ... +class ConnectTimeoutError(TimeoutError): ... +class ReadTimeoutError(TimeoutError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/invalid_challenge_error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/invalid_challenge_error.pyi new file mode 100644 index 00000000..af1f68bf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/invalid_challenge_error.pyi @@ -0,0 +1,3 @@ +from braintree.exceptions.braintree_error import BraintreeError as BraintreeError + +class InvalidChallengeError(BraintreeError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/invalid_signature_error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/invalid_signature_error.pyi new file mode 100644 index 00000000..5958f8f5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/invalid_signature_error.pyi @@ -0,0 +1,3 @@ +from braintree.exceptions.braintree_error import BraintreeError as BraintreeError + +class InvalidSignatureError(BraintreeError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/not_found_error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/not_found_error.pyi new file mode 100644 index 00000000..50fb02f7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/not_found_error.pyi @@ -0,0 +1,3 @@ +from braintree.exceptions.braintree_error import BraintreeError as BraintreeError + +class NotFoundError(BraintreeError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/request_timeout_error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/request_timeout_error.pyi new file mode 100644 index 00000000..65ab265d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/request_timeout_error.pyi @@ -0,0 +1,3 @@ +from braintree.exceptions.braintree_error import BraintreeError as BraintreeError + +class RequestTimeoutError(BraintreeError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/server_error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/server_error.pyi new file mode 100644 index 00000000..1ccc2bed --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/server_error.pyi @@ -0,0 +1,3 @@ +from braintree.exceptions.braintree_error import BraintreeError as BraintreeError + +class ServerError(BraintreeError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/service_unavailable_error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/service_unavailable_error.pyi new file mode 100644 index 00000000..d51469ac --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/service_unavailable_error.pyi @@ -0,0 +1,3 @@ +from braintree.exceptions.braintree_error import BraintreeError as BraintreeError + +class ServiceUnavailableError(BraintreeError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/too_many_requests_error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/too_many_requests_error.pyi new file mode 100644 index 00000000..1980fcb4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/too_many_requests_error.pyi @@ -0,0 +1,3 @@ +from braintree.exceptions.braintree_error import BraintreeError as BraintreeError + +class TooManyRequestsError(BraintreeError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/unexpected_error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/unexpected_error.pyi new file mode 100644 index 00000000..c141ae5f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/unexpected_error.pyi @@ -0,0 +1,3 @@ +from braintree.exceptions.braintree_error import BraintreeError as BraintreeError + +class UnexpectedError(BraintreeError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/upgrade_required_error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/upgrade_required_error.pyi new file mode 100644 index 00000000..feae59a2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/exceptions/upgrade_required_error.pyi @@ -0,0 +1,3 @@ +from braintree.exceptions.braintree_error import BraintreeError as BraintreeError + +class UpgradeRequiredError(BraintreeError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/facilitated_details.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/facilitated_details.pyi new file mode 100644 index 00000000..724f3f18 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/facilitated_details.pyi @@ -0,0 +1,3 @@ +from braintree.attribute_getter import AttributeGetter as AttributeGetter + +class FacilitatedDetails(AttributeGetter): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/facilitator_details.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/facilitator_details.pyi new file mode 100644 index 00000000..0557e2d1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/facilitator_details.pyi @@ -0,0 +1,3 @@ +from braintree.attribute_getter import AttributeGetter as AttributeGetter + +class FacilitatorDetails(AttributeGetter): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/granted_payment_instrument_update.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/granted_payment_instrument_update.pyi new file mode 100644 index 00000000..73219712 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/granted_payment_instrument_update.pyi @@ -0,0 +1,7 @@ +from typing import Any + +from braintree.resource import Resource as Resource + +class GrantedPaymentInstrumentUpdate(Resource): + payment_method_nonce: Any + def __init__(self, gateway, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/iban_bank_account.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/iban_bank_account.pyi new file mode 100644 index 00000000..b8c25aba --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/iban_bank_account.pyi @@ -0,0 +1,3 @@ +from braintree.resource import Resource as Resource + +class IbanBankAccount(Resource): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/ids_search.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/ids_search.pyi new file mode 100644 index 00000000..db017bb7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/ids_search.pyi @@ -0,0 +1,6 @@ +from typing import Any + +from braintree.search import Search as Search + +class IdsSearch: + ids: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/local_payment.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/local_payment.pyi new file mode 100644 index 00000000..355a1c48 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/local_payment.pyi @@ -0,0 +1,3 @@ +from braintree.resource import Resource as Resource + +class LocalPayment(Resource): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/local_payment_completed.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/local_payment_completed.pyi new file mode 100644 index 00000000..ff8e4ca8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/local_payment_completed.pyi @@ -0,0 +1,8 @@ +from typing import Any + +from braintree.resource import Resource as Resource +from braintree.transaction import Transaction as Transaction + +class LocalPaymentCompleted(Resource): + transaction: Any + def __init__(self, gateway, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/local_payment_reversed.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/local_payment_reversed.pyi new file mode 100644 index 00000000..be538ec7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/local_payment_reversed.pyi @@ -0,0 +1,4 @@ +from braintree.resource import Resource as Resource + +class LocalPaymentReversed(Resource): + def __init__(self, gateway, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/masterpass_card.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/masterpass_card.pyi new file mode 100644 index 00000000..2921f9b8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/masterpass_card.pyi @@ -0,0 +1,13 @@ +from typing import Any + +from braintree.address import Address as Address +from braintree.resource import Resource as Resource + +class MasterpassCard(Resource): + billing_address: Any + subscriptions: Any + def __init__(self, gateway, attributes) -> None: ... + @property + def expiration_date(self): ... + @property + def masked_number(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant.pyi new file mode 100644 index 00000000..4a80756a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant.pyi @@ -0,0 +1,8 @@ +from typing import Any + +from braintree.merchant_account import MerchantAccount as MerchantAccount +from braintree.resource import Resource as Resource + +class Merchant(Resource): + merchant_accounts: Any + def __init__(self, gateway, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_account/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_account/__init__.pyi new file mode 100644 index 00000000..743f6fa8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_account/__init__.pyi @@ -0,0 +1,4 @@ +from braintree.merchant_account.business_details import BusinessDetails as BusinessDetails +from braintree.merchant_account.funding_details import FundingDetails as FundingDetails +from braintree.merchant_account.individual_details import IndividualDetails as IndividualDetails +from braintree.merchant_account.merchant_account import MerchantAccount as MerchantAccount diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_account/address_details.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_account/address_details.pyi new file mode 100644 index 00000000..3a3a9aac --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_account/address_details.pyi @@ -0,0 +1,7 @@ +from typing import Any + +from braintree.attribute_getter import AttributeGetter as AttributeGetter + +class AddressDetails(AttributeGetter): + detail_list: Any + def __init__(self, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_account/business_details.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_account/business_details.pyi new file mode 100644 index 00000000..d5800497 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_account/business_details.pyi @@ -0,0 +1,9 @@ +from typing import Any + +from braintree.attribute_getter import AttributeGetter as AttributeGetter +from braintree.merchant_account.address_details import AddressDetails as AddressDetails + +class BusinessDetails(AttributeGetter): + detail_list: Any + address_details: Any + def __init__(self, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_account/funding_details.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_account/funding_details.pyi new file mode 100644 index 00000000..220e767f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_account/funding_details.pyi @@ -0,0 +1,7 @@ +from typing import Any + +from braintree.attribute_getter import AttributeGetter as AttributeGetter + +class FundingDetails(AttributeGetter): + detail_list: Any + def __init__(self, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_account/individual_details.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_account/individual_details.pyi new file mode 100644 index 00000000..1f07477a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_account/individual_details.pyi @@ -0,0 +1,9 @@ +from typing import Any + +from braintree.attribute_getter import AttributeGetter as AttributeGetter +from braintree.merchant_account.address_details import AddressDetails as AddressDetails + +class IndividualDetails(AttributeGetter): + detail_list: Any + address_details: Any + def __init__(self, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_account/merchant_account.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_account/merchant_account.pyi new file mode 100644 index 00000000..45259e0f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_account/merchant_account.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree.configuration import Configuration as Configuration +from braintree.merchant_account import ( + BusinessDetails as BusinessDetails, + FundingDetails as FundingDetails, + IndividualDetails as IndividualDetails, +) +from braintree.resource import Resource as Resource + +class MerchantAccount(Resource): + class Status: + Active: str + Pending: str + Suspended: str + + class FundingDestination: + Bank: str + Email: str + MobilePhone: str + FundingDestinations: Any + individual_details: Any + business_details: Any + funding_details: Any + master_merchant_account: Any + def __init__(self, gateway, attributes) -> None: ... + @staticmethod + def create(params: Incomplete | None = ...): ... + @staticmethod + def update(id, attributes): ... + @staticmethod + def find(id): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_account_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_account_gateway.pyi new file mode 100644 index 00000000..1bd64629 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_account_gateway.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree.error_result import ErrorResult as ErrorResult +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.merchant_account import MerchantAccount as MerchantAccount +from braintree.paginated_collection import PaginatedCollection as PaginatedCollection +from braintree.paginated_result import PaginatedResult as PaginatedResult +from braintree.resource import Resource as Resource +from braintree.resource_collection import ResourceCollection as ResourceCollection +from braintree.successful_result import SuccessfulResult as SuccessfulResult + +class MerchantAccountGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def create(self, params: Incomplete | None = ...): ... + def update(self, merchant_account_id, params: Incomplete | None = ...): ... + def find(self, merchant_account_id): ... + def create_for_currency(self, params: Incomplete | None = ...): ... + def all(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_gateway.pyi new file mode 100644 index 00000000..efbbaaf4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/merchant_gateway.pyi @@ -0,0 +1,15 @@ +from typing import Any + +from braintree.error_result import ErrorResult as ErrorResult +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.merchant import Merchant as Merchant +from braintree.oauth_credentials import OAuthCredentials as OAuthCredentials +from braintree.resource import Resource as Resource +from braintree.resource_collection import ResourceCollection as ResourceCollection +from braintree.successful_result import SuccessfulResult as SuccessfulResult + +class MerchantGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def create(self, params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/modification.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/modification.pyi new file mode 100644 index 00000000..e55fa128 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/modification.pyi @@ -0,0 +1,7 @@ +from typing import Any + +from braintree.resource import Resource as Resource + +class Modification(Resource): + amount: Any + def __init__(self, gateway, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/oauth_access_revocation.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/oauth_access_revocation.pyi new file mode 100644 index 00000000..579dccac --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/oauth_access_revocation.pyi @@ -0,0 +1,4 @@ +from braintree.resource import Resource as Resource + +class OAuthAccessRevocation(Resource): + def __init__(self, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/oauth_credentials.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/oauth_credentials.pyi new file mode 100644 index 00000000..487034ad --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/oauth_credentials.pyi @@ -0,0 +1,3 @@ +from braintree.resource import Resource as Resource + +class OAuthCredentials(Resource): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/oauth_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/oauth_gateway.pyi new file mode 100644 index 00000000..096dfbc7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/oauth_gateway.pyi @@ -0,0 +1,15 @@ +from typing import Any + +from braintree.error_result import ErrorResult as ErrorResult +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.oauth_credentials import OAuthCredentials as OAuthCredentials +from braintree.successful_result import SuccessfulResult as SuccessfulResult + +class OAuthGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def create_token_from_code(self, params): ... + def create_token_from_refresh_token(self, params): ... + def revoke_access_token(self, access_token): ... + def connect_url(self, raw_params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/paginated_collection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/paginated_collection.pyi new file mode 100644 index 00000000..3e2182f6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/paginated_collection.pyi @@ -0,0 +1,8 @@ +from collections.abc import Generator +from typing import Any + +class PaginatedCollection: + def __init__(self, method) -> None: ... + @property + def items(self) -> Generator[Any, None, None]: ... + def __iter__(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/paginated_result.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/paginated_result.pyi new file mode 100644 index 00000000..a7bbb385 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/paginated_result.pyi @@ -0,0 +1,7 @@ +from typing import Any + +class PaginatedResult: + total_items: Any + page_size: Any + current_page: Any + def __init__(self, total_items, page_size, current_page) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/partner_merchant.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/partner_merchant.pyi new file mode 100644 index 00000000..06ea8c9f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/partner_merchant.pyi @@ -0,0 +1,12 @@ +from typing import Any + +from braintree.configuration import Configuration as Configuration +from braintree.resource import Resource as Resource + +class PartnerMerchant(Resource): + partner_merchant_id: Any + private_key: Any + public_key: Any + merchant_public_id: Any + client_side_encryption_key: Any + def __init__(self, gateway, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/payment_instrument_type.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/payment_instrument_type.pyi new file mode 100644 index 00000000..b19e7d84 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/payment_instrument_type.pyi @@ -0,0 +1,14 @@ +class PaymentInstrumentType: + PayPalAccount: str + PayPalHere: str + EuropeBankAccount: str + CreditCard: str + ApplePayCard: str + AndroidPayCard: str + AmexExpressCheckoutCard: str + VenmoAccount: str + UsBankAccount: str + VisaCheckoutCard: str + MasterpassCard: str + SamsungPayCard: str + LocalPayment: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/payment_method.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/payment_method.pyi new file mode 100644 index 00000000..c2c35811 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/payment_method.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete + +from braintree.address import Address as Address +from braintree.configuration import Configuration as Configuration +from braintree.resource import Resource as Resource + +class PaymentMethod(Resource): + @staticmethod + def create(params: Incomplete | None = ...): ... + @staticmethod + def find(payment_method_token): ... + @staticmethod + def update(payment_method_token, params): ... + @staticmethod + def delete(payment_method_token, options: Incomplete | None = ...): ... + @staticmethod + def create_signature(): ... + @staticmethod + def signature(type): ... + @staticmethod + def update_signature(): ... + @staticmethod + def delete_signature(): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/payment_method_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/payment_method_gateway.pyi new file mode 100644 index 00000000..f3d71a7b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/payment_method_gateway.pyi @@ -0,0 +1,36 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree.amex_express_checkout_card import AmexExpressCheckoutCard as AmexExpressCheckoutCard +from braintree.android_pay_card import AndroidPayCard as AndroidPayCard +from braintree.apple_pay_card import ApplePayCard as ApplePayCard +from braintree.credit_card import CreditCard as CreditCard +from braintree.error_result import ErrorResult as ErrorResult +from braintree.europe_bank_account import EuropeBankAccount as EuropeBankAccount +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.ids_search import IdsSearch as IdsSearch +from braintree.masterpass_card import MasterpassCard as MasterpassCard +from braintree.payment_method import PaymentMethod as PaymentMethod +from braintree.payment_method_nonce import PaymentMethodNonce as PaymentMethodNonce +from braintree.payment_method_parser import parse_payment_method as parse_payment_method +from braintree.paypal_account import PayPalAccount as PayPalAccount +from braintree.resource import Resource as Resource +from braintree.resource_collection import ResourceCollection as ResourceCollection +from braintree.samsung_pay_card import SamsungPayCard as SamsungPayCard +from braintree.successful_result import SuccessfulResult as SuccessfulResult +from braintree.unknown_payment_method import UnknownPaymentMethod as UnknownPaymentMethod +from braintree.us_bank_account import UsBankAccount as UsBankAccount +from braintree.venmo_account import VenmoAccount as VenmoAccount +from braintree.visa_checkout_card import VisaCheckoutCard as VisaCheckoutCard + +class PaymentMethodGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def create(self, params: Incomplete | None = ...): ... + def find(self, payment_method_token): ... + def update(self, payment_method_token, params): ... + def delete(self, payment_method_token, options: Incomplete | None = ...): ... + options: Any + def grant(self, payment_method_token, options: Incomplete | None = ...): ... + def revoke(self, payment_method_token): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/payment_method_nonce.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/payment_method_nonce.pyi new file mode 100644 index 00000000..35a804c8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/payment_method_nonce.pyi @@ -0,0 +1,16 @@ +from typing import Any + +from braintree.bin_data import BinData as BinData +from braintree.configuration import Configuration as Configuration +from braintree.resource import Resource as Resource +from braintree.three_d_secure_info import ThreeDSecureInfo as ThreeDSecureInfo + +class PaymentMethodNonce(Resource): + @staticmethod + def create(payment_method_token, params=...): ... + @staticmethod + def find(payment_method_nonce): ... + three_d_secure_info: Any + authentication_insight: Any + bin_data: Any + def __init__(self, gateway, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/payment_method_nonce_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/payment_method_nonce_gateway.pyi new file mode 100644 index 00000000..a8bd929d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/payment_method_nonce_gateway.pyi @@ -0,0 +1,15 @@ +from typing import Any + +from braintree.error_result import ErrorResult as ErrorResult +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.payment_method_nonce import PaymentMethodNonce as PaymentMethodNonce +from braintree.resource import Resource as Resource +from braintree.resource_collection import ResourceCollection as ResourceCollection +from braintree.successful_result import SuccessfulResult as SuccessfulResult + +class PaymentMethodNonceGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def create(self, payment_method_token, params=...): ... + def find(self, payment_method_nonce): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/payment_method_parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/payment_method_parser.pyi new file mode 100644 index 00000000..17d9fb75 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/payment_method_parser.pyi @@ -0,0 +1,15 @@ +from braintree.amex_express_checkout_card import AmexExpressCheckoutCard as AmexExpressCheckoutCard +from braintree.android_pay_card import AndroidPayCard as AndroidPayCard +from braintree.apple_pay_card import ApplePayCard as ApplePayCard +from braintree.credit_card import CreditCard as CreditCard +from braintree.europe_bank_account import EuropeBankAccount as EuropeBankAccount +from braintree.masterpass_card import MasterpassCard as MasterpassCard +from braintree.payment_method import PaymentMethod as PaymentMethod +from braintree.paypal_account import PayPalAccount as PayPalAccount +from braintree.samsung_pay_card import SamsungPayCard as SamsungPayCard +from braintree.unknown_payment_method import UnknownPaymentMethod as UnknownPaymentMethod +from braintree.us_bank_account import UsBankAccount as UsBankAccount +from braintree.venmo_account import VenmoAccount as VenmoAccount +from braintree.visa_checkout_card import VisaCheckoutCard as VisaCheckoutCard + +def parse_payment_method(gateway, attributes): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/paypal_account.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/paypal_account.pyi new file mode 100644 index 00000000..6dccea80 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/paypal_account.pyi @@ -0,0 +1,17 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree.configuration import Configuration as Configuration +from braintree.resource import Resource as Resource + +class PayPalAccount(Resource): + @staticmethod + def find(paypal_account_token): ... + @staticmethod + def delete(paypal_account_token): ... + @staticmethod + def update(paypal_account_token, params: Incomplete | None = ...): ... + @staticmethod + def signature(): ... + subscriptions: Any + def __init__(self, gateway, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/paypal_account_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/paypal_account_gateway.pyi new file mode 100644 index 00000000..8716ac8e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/paypal_account_gateway.pyi @@ -0,0 +1,16 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree.error_result import ErrorResult as ErrorResult +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.paypal_account import PayPalAccount as PayPalAccount +from braintree.resource import Resource as Resource +from braintree.successful_result import SuccessfulResult as SuccessfulResult + +class PayPalAccountGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def find(self, paypal_account_token): ... + def delete(self, paypal_account_token): ... + def update(self, paypal_account_token, params: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/paypal_here.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/paypal_here.pyi new file mode 100644 index 00000000..6bfd845a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/paypal_here.pyi @@ -0,0 +1,4 @@ +from braintree.resource import Resource as Resource + +class PayPalHere(Resource): + def __init__(self, gateway, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/plan.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/plan.pyi new file mode 100644 index 00000000..8ff3bcd5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/plan.pyi @@ -0,0 +1,15 @@ +from typing import Any + +from braintree.add_on import AddOn as AddOn +from braintree.configuration import Configuration as Configuration +from braintree.discount import Discount as Discount +from braintree.resource import Resource as Resource +from braintree.resource_collection import ResourceCollection as ResourceCollection +from braintree.util.http import Http as Http + +class Plan(Resource): + add_ons: Any + discounts: Any + def __init__(self, gateway, attributes) -> None: ... + @staticmethod + def all(): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/plan_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/plan_gateway.pyi new file mode 100644 index 00000000..61b37eb7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/plan_gateway.pyi @@ -0,0 +1,14 @@ +from typing import Any + +from braintree.error_result import ErrorResult as ErrorResult +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.plan import Plan as Plan +from braintree.resource import Resource as Resource +from braintree.resource_collection import ResourceCollection as ResourceCollection +from braintree.successful_result import SuccessfulResult as SuccessfulResult + +class PlanGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def all(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/processor_response_types.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/processor_response_types.pyi new file mode 100644 index 00000000..d17a9841 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/processor_response_types.pyi @@ -0,0 +1,4 @@ +class ProcessorResponseTypes: + Approved: str + SoftDeclined: str + HardDeclined: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/resource.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/resource.pyi new file mode 100644 index 00000000..1bda4975 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/resource.pyi @@ -0,0 +1,12 @@ +from typing import Any + +from braintree.attribute_getter import AttributeGetter as AttributeGetter + +text_type = str +raw_type = bytes + +class Resource(AttributeGetter): + @staticmethod + def verify_keys(params, signature) -> None: ... + gateway: Any + def __init__(self, gateway, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/resource_collection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/resource_collection.pyi new file mode 100644 index 00000000..6028d17f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/resource_collection.pyi @@ -0,0 +1,16 @@ +from collections.abc import Generator +from typing import Any + +from braintree.exceptions.unexpected_error import UnexpectedError as UnexpectedError + +class ResourceCollection: + def __init__(self, query, results, method) -> None: ... + @property + def maximum_size(self): ... + @property + def first(self): ... + @property + def items(self) -> Generator[Any, None, None]: ... + @property + def ids(self): ... + def __iter__(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/revoked_payment_method_metadata.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/revoked_payment_method_metadata.pyi new file mode 100644 index 00000000..60358ffb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/revoked_payment_method_metadata.pyi @@ -0,0 +1,10 @@ +from typing import Any + +from braintree.payment_method_parser import parse_payment_method as parse_payment_method +from braintree.resource import Resource as Resource + +class RevokedPaymentMethodMetadata(Resource): + revoked_payment_method: Any + customer_id: Any + token: Any + def __init__(self, gateway, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/risk_data.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/risk_data.pyi new file mode 100644 index 00000000..68f5de09 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/risk_data.pyi @@ -0,0 +1,6 @@ +from _typeshed import Incomplete + +from braintree.attribute_getter import AttributeGetter as AttributeGetter + +class RiskData(AttributeGetter): + def __init__(self, attributes: Incomplete) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/samsung_pay_card.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/samsung_pay_card.pyi new file mode 100644 index 00000000..ce734bb2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/samsung_pay_card.pyi @@ -0,0 +1,13 @@ +from typing import Any + +from braintree.address import Address as Address +from braintree.resource import Resource as Resource + +class SamsungPayCard(Resource): + billing_address: Any + subscriptions: Any + def __init__(self, gateway, attributes) -> None: ... + @property + def expiration_date(self): ... + @property + def masked_number(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/search.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/search.pyi new file mode 100644 index 00000000..14257a8d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/search.pyi @@ -0,0 +1,59 @@ +from typing import Any + +class Search: + class IsNodeBuilder: + name: Any + def __init__(self, name) -> None: ... + def __eq__(self, value): ... + def is_equal(self, value): ... + + class EqualityNodeBuilder(IsNodeBuilder): + def __ne__(self, value): ... + def is_not_equal(self, value): ... + + class KeyValueNodeBuilder: + name: Any + def __init__(self, name) -> None: ... + def __eq__(self, value): ... + def is_equal(self, value): ... + def __ne__(self, value): ... + def is_not_equal(self, value): ... + + class PartialMatchNodeBuilder(EqualityNodeBuilder): + def starts_with(self, value): ... + def ends_with(self, value): ... + + class EndsWithNodeBuilder: + name: Any + def __init__(self, name) -> None: ... + def ends_with(self, value): ... + + class TextNodeBuilder(PartialMatchNodeBuilder): + def contains(self, value): ... + + class Node: + name: Any + dict: Any + def __init__(self, name, dict) -> None: ... + def to_param(self): ... + + class MultipleValueNodeBuilder: + name: Any + whitelist: Any + def __init__(self, name, whitelist=...) -> None: ... + def in_list(self, *values): ... + def __eq__(self, value): ... + + class MultipleValueOrTextNodeBuilder(TextNodeBuilder, MultipleValueNodeBuilder): + def __init__(self, name, whitelist=...) -> None: ... + + class RangeNodeBuilder: + name: Any + def __init__(self, name) -> None: ... + def __eq__(self, value): ... + def is_equal(self, value): ... + def __ge__(self, min): ... + def greater_than_or_equal_to(self, min): ... + def __le__(self, max): ... + def less_than_or_equal_to(self, max): ... + def between(self, min, max): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/settlement_batch_summary.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/settlement_batch_summary.pyi new file mode 100644 index 00000000..681747e0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/settlement_batch_summary.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete + +from braintree.configuration import Configuration as Configuration +from braintree.error_result import ErrorResult as ErrorResult +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.resource import Resource as Resource +from braintree.resource_collection import ResourceCollection as ResourceCollection +from braintree.successful_result import SuccessfulResult as SuccessfulResult +from braintree.util.http import Http as Http + +class SettlementBatchSummary(Resource): + @staticmethod + def generate(settlement_date, group_by_custom_field: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/settlement_batch_summary_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/settlement_batch_summary_gateway.pyi new file mode 100644 index 00000000..f1ef2565 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/settlement_batch_summary_gateway.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree.error_result import ErrorResult as ErrorResult +from braintree.resource import Resource as Resource +from braintree.settlement_batch_summary import SettlementBatchSummary as SettlementBatchSummary +from braintree.successful_result import SuccessfulResult as SuccessfulResult + +class SettlementBatchSummaryGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def generate(self, settlement_date, group_by_custom_field: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/signature_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/signature_service.pyi new file mode 100644 index 00000000..abe5ff8b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/signature_service.pyi @@ -0,0 +1,10 @@ +from typing import Any + +from braintree.util.crypto import Crypto as Crypto + +class SignatureService: + private_key: Any + hmac_hash: Any + def __init__(self, private_key, hashfunc=...) -> None: ... + def sign(self, data): ... + def hash(self, data): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/status_event.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/status_event.pyi new file mode 100644 index 00000000..b4110a2f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/status_event.pyi @@ -0,0 +1,7 @@ +from typing import Any + +from braintree.resource import Resource as Resource + +class StatusEvent(Resource): + amount: Any + def __init__(self, gateway, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/subscription.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/subscription.pyi new file mode 100644 index 00000000..5cae727e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/subscription.pyi @@ -0,0 +1,58 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree.add_on import AddOn as AddOn +from braintree.configuration import Configuration as Configuration +from braintree.descriptor import Descriptor as Descriptor +from braintree.discount import Discount as Discount +from braintree.error_result import ErrorResult as ErrorResult +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.resource import Resource as Resource +from braintree.resource_collection import ResourceCollection as ResourceCollection +from braintree.subscription_status_event import SubscriptionStatusEvent as SubscriptionStatusEvent +from braintree.successful_result import SuccessfulResult as SuccessfulResult +from braintree.transaction import Transaction as Transaction +from braintree.util.http import Http as Http + +class Subscription(Resource): + class TrialDurationUnit: + Day: str + Month: str + + class Source: + Api: str + ControlPanel: str + Recurring: str + + class Status: + Active: str + Canceled: str + Expired: str + PastDue: str + Pending: str + @staticmethod + def create(params: Incomplete | None = ...): ... + @staticmethod + def create_signature(): ... + @staticmethod + def find(subscription_id): ... + @staticmethod + def retry_charge(subscription_id, amount: Incomplete | None = ..., submit_for_settlement: bool = ...): ... + @staticmethod + def update(subscription_id, params: Incomplete | None = ...): ... + @staticmethod + def cancel(subscription_id): ... + @staticmethod + def search(*query): ... + @staticmethod + def update_signature(): ... + price: Any + balance: Any + next_billing_period_amount: Any + add_ons: Any + descriptor: Any + description: Any + discounts: Any + status_history: Any + transactions: Any + def __init__(self, gateway, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/subscription_details.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/subscription_details.pyi new file mode 100644 index 00000000..f54a2419 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/subscription_details.pyi @@ -0,0 +1,3 @@ +from braintree.attribute_getter import AttributeGetter as AttributeGetter + +class SubscriptionDetails(AttributeGetter): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/subscription_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/subscription_gateway.pyi new file mode 100644 index 00000000..08e9dc80 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/subscription_gateway.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree.error_result import ErrorResult as ErrorResult +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.resource import Resource as Resource +from braintree.resource_collection import ResourceCollection as ResourceCollection +from braintree.subscription import Subscription as Subscription +from braintree.successful_result import SuccessfulResult as SuccessfulResult +from braintree.transaction import Transaction as Transaction + +class SubscriptionGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def cancel(self, subscription_id): ... + def create(self, params: Incomplete | None = ...): ... + def find(self, subscription_id): ... + def retry_charge(self, subscription_id, amount: Incomplete | None = ..., submit_for_settlement: bool = ...): ... + def search(self, *query): ... + def update(self, subscription_id, params: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/subscription_search.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/subscription_search.pyi new file mode 100644 index 00000000..43f7759d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/subscription_search.pyi @@ -0,0 +1,19 @@ +from typing import Any + +from braintree import Subscription as Subscription +from braintree.search import Search as Search +from braintree.util import Constants as Constants + +class SubscriptionSearch: + billing_cycles_remaining: Any + created_at: Any + days_past_due: Any + id: Any + ids: Any + in_trial_period: Any + merchant_account_id: Any + next_billing_date: Any + plan_id: Any + price: Any + status: Any + transaction_id: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/subscription_status_event.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/subscription_status_event.pyi new file mode 100644 index 00000000..c649e051 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/subscription_status_event.pyi @@ -0,0 +1,8 @@ +from typing import Any + +from braintree.resource import Resource as Resource + +class SubscriptionStatusEvent(Resource): + balance: Any + price: Any + def __init__(self, gateway, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/successful_result.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/successful_result.pyi new file mode 100644 index 00000000..66fbb74e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/successful_result.pyi @@ -0,0 +1,5 @@ +from braintree.attribute_getter import AttributeGetter as AttributeGetter + +class SuccessfulResult(AttributeGetter): + @property + def is_success(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/testing_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/testing_gateway.pyi new file mode 100644 index 00000000..262997c5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/testing_gateway.pyi @@ -0,0 +1,17 @@ +from typing import Any + +from braintree.error_result import ErrorResult as ErrorResult +from braintree.successful_result import SuccessfulResult as SuccessfulResult +from braintree.transaction import Transaction as Transaction + +class TestingGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def make_past_due(self, subscription_id, number_of_days_past_due: int = ...) -> None: ... + def escrow_transaction(self, transaction_id) -> None: ... + def settle_transaction(self, transaction_id): ... + def settlement_confirm_transaction(self, transaction_id): ... + def settlement_decline_transaction(self, transaction_id): ... + def settlement_pending_transaction(self, transaction_id): ... + def create_3ds_verification(self, merchant_account_id, params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/three_d_secure_info.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/three_d_secure_info.pyi new file mode 100644 index 00000000..8758b974 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/three_d_secure_info.pyi @@ -0,0 +1,3 @@ +from braintree.attribute_getter import AttributeGetter as AttributeGetter + +class ThreeDSecureInfo(AttributeGetter): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/transaction.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/transaction.pyi new file mode 100644 index 00000000..b0e8ba21 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/transaction.pyi @@ -0,0 +1,185 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree.add_on import AddOn as AddOn +from braintree.address import Address as Address +from braintree.amex_express_checkout_card import AmexExpressCheckoutCard as AmexExpressCheckoutCard +from braintree.android_pay_card import AndroidPayCard as AndroidPayCard +from braintree.apple_pay_card import ApplePayCard as ApplePayCard +from braintree.authorization_adjustment import AuthorizationAdjustment as AuthorizationAdjustment +from braintree.configuration import Configuration as Configuration +from braintree.credit_card import CreditCard as CreditCard +from braintree.customer import Customer as Customer +from braintree.descriptor import Descriptor as Descriptor +from braintree.disbursement_detail import DisbursementDetail as DisbursementDetail +from braintree.discount import Discount as Discount +from braintree.dispute import Dispute as Dispute +from braintree.error_result import ErrorResult as ErrorResult +from braintree.europe_bank_account import EuropeBankAccount as EuropeBankAccount +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.facilitated_details import FacilitatedDetails as FacilitatedDetails +from braintree.facilitator_details import FacilitatorDetails as FacilitatorDetails +from braintree.local_payment import LocalPayment as LocalPayment +from braintree.masterpass_card import MasterpassCard as MasterpassCard +from braintree.payment_instrument_type import PaymentInstrumentType as PaymentInstrumentType +from braintree.paypal_account import PayPalAccount as PayPalAccount +from braintree.paypal_here import PayPalHere as PayPalHere +from braintree.resource import Resource as Resource +from braintree.resource_collection import ResourceCollection as ResourceCollection +from braintree.risk_data import RiskData as RiskData +from braintree.samsung_pay_card import SamsungPayCard as SamsungPayCard +from braintree.status_event import StatusEvent as StatusEvent +from braintree.subscription_details import SubscriptionDetails as SubscriptionDetails +from braintree.successful_result import SuccessfulResult as SuccessfulResult +from braintree.three_d_secure_info import ThreeDSecureInfo as ThreeDSecureInfo +from braintree.transaction_line_item import TransactionLineItem as TransactionLineItem +from braintree.us_bank_account import UsBankAccount as UsBankAccount +from braintree.venmo_account import VenmoAccount as VenmoAccount +from braintree.visa_checkout_card import VisaCheckoutCard as VisaCheckoutCard + +class Transaction(Resource): + class CreatedUsing: + FullInformation: str + Token: str + + class GatewayRejectionReason: + ApplicationIncomplete: str + Avs: str + AvsAndCvv: str + Cvv: str + Duplicate: str + Fraud: str + RiskThreshold: str + ThreeDSecure: str + TokenIssuance: str + + class Source: + Api: str + ControlPanel: str + Recurring: str + + class EscrowStatus: + HoldPending: str + Held: str + ReleasePending: str + Released: str + Refunded: str + + class Status: + AuthorizationExpired: str + Authorized: str + Authorizing: str + Failed: str + GatewayRejected: str + ProcessorDeclined: str + Settled: str + SettlementConfirmed: str + SettlementDeclined: str + SettlementFailed: str + SettlementPending: str + Settling: str + SubmittedForSettlement: str + Voided: str + + class Type: + Credit: str + Sale: str + + class IndustryType: + Lodging: str + TravelAndCruise: str + TravelAndFlight: str + + class AdditionalCharge: + Restaurant: str + GiftShop: str + MiniBar: str + Telephone: str + Laundry: str + Other: str + @staticmethod + def adjust_authorization(transaction_id, amount): ... + @staticmethod + def clone_transaction(transaction_id, params): ... + @staticmethod + def cancel_release(transaction_id): ... + @staticmethod + def credit(params: Incomplete | None = ...): ... + @staticmethod + def find(transaction_id): ... + @staticmethod + def hold_in_escrow(transaction_id): ... + @staticmethod + def refund(transaction_id, amount_or_options: Incomplete | None = ...): ... + @staticmethod + def sale(params: Incomplete | None = ...): ... + @staticmethod + def search(*query): ... + @staticmethod + def release_from_escrow(transaction_id): ... + @staticmethod + def submit_for_settlement(transaction_id, amount: Incomplete | None = ..., params: Incomplete | None = ...): ... + @staticmethod + def update_details(transaction_id, params: Incomplete | None = ...): ... + @staticmethod + def void(transaction_id): ... + @staticmethod + def create(params): ... + @staticmethod + def clone_signature(): ... + @staticmethod + def create_signature(): ... + @staticmethod + def submit_for_settlement_signature(): ... + @staticmethod + def update_details_signature(): ... + @staticmethod + def refund_signature(): ... + @staticmethod + def submit_for_partial_settlement(transaction_id, amount, params: Incomplete | None = ...): ... + amount: Any + tax_amount: Any + discount_amount: Any + shipping_amount: Any + billing_details: Any + credit_card_details: Any + paypal_details: Any + paypal_here_details: Any + local_payment_details: Any + europe_bank_account_details: Any + us_bank_account: Any + apple_pay_details: Any + android_pay_card_details: Any + amex_express_checkout_card_details: Any + venmo_account_details: Any + visa_checkout_card_details: Any + masterpass_card_details: Any + samsung_pay_card_details: Any + sca_exemption_requested: Any + customer_details: Any + shipping_details: Any + add_ons: Any + discounts: Any + status_history: Any + subscription_details: Any + descriptor: Any + disbursement_details: Any + disputes: Any + authorization_adjustments: Any + payment_instrument_type: Any + risk_data: Any + three_d_secure_info: Any + facilitated_details: Any + facilitator_details: Any + network_transaction_id: Any + def __init__(self, gateway, attributes) -> None: ... + @property + def vault_billing_address(self): ... + @property + def vault_credit_card(self): ... + @property + def vault_customer(self): ... + @property + def is_disbursed(self): ... + @property + def line_items(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/transaction_amounts.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/transaction_amounts.pyi new file mode 100644 index 00000000..f6556f48 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/transaction_amounts.pyi @@ -0,0 +1,5 @@ +class TransactionAmounts: + Authorize: str + Decline: str + HardDecline: str + Fail: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/transaction_details.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/transaction_details.pyi new file mode 100644 index 00000000..ceb5b4b1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/transaction_details.pyi @@ -0,0 +1,7 @@ +from typing import Any + +from braintree.attribute_getter import AttributeGetter as AttributeGetter + +class TransactionDetails(AttributeGetter): + amount: Any + def __init__(self, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/transaction_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/transaction_gateway.pyi new file mode 100644 index 00000000..63b5c913 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/transaction_gateway.pyi @@ -0,0 +1,30 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree.error_result import ErrorResult as ErrorResult +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.exceptions.request_timeout_error import RequestTimeoutError as RequestTimeoutError +from braintree.resource import Resource as Resource +from braintree.resource_collection import ResourceCollection as ResourceCollection +from braintree.successful_result import SuccessfulResult as SuccessfulResult +from braintree.transaction import Transaction as Transaction + +class TransactionGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def adjust_authorization(self, transaction_id, amount): ... + def clone_transaction(self, transaction_id, params): ... + def cancel_release(self, transaction_id): ... + def create(self, params): ... + def credit(self, params): ... + def find(self, transaction_id): ... + def hold_in_escrow(self, transaction_id): ... + def refund(self, transaction_id, amount_or_options: Incomplete | None = ...): ... + def sale(self, params): ... + def search(self, *query): ... + def release_from_escrow(self, transaction_id): ... + def submit_for_settlement(self, transaction_id, amount: Incomplete | None = ..., params: Incomplete | None = ...): ... + def update_details(self, transaction_id, params: Incomplete | None = ...): ... + def submit_for_partial_settlement(self, transaction_id, amount, params: Incomplete | None = ...): ... + def void(self, transaction_id): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/transaction_line_item.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/transaction_line_item.pyi new file mode 100644 index 00000000..11cd731f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/transaction_line_item.pyi @@ -0,0 +1,11 @@ +from braintree.attribute_getter import AttributeGetter as AttributeGetter +from braintree.configuration import Configuration as Configuration +from braintree.resource import Resource as Resource + +class TransactionLineItem(AttributeGetter): + class Kind: + Credit: str + Debit: str + def __init__(self, attributes) -> None: ... + @staticmethod + def find_all(transaction_id): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/transaction_line_item_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/transaction_line_item_gateway.pyi new file mode 100644 index 00000000..8eca1b10 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/transaction_line_item_gateway.pyi @@ -0,0 +1,14 @@ +from typing import Any + +from braintree.error_result import ErrorResult as ErrorResult +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.exceptions.request_timeout_error import RequestTimeoutError as RequestTimeoutError +from braintree.resource import Resource as Resource +from braintree.resource_collection import ResourceCollection as ResourceCollection +from braintree.transaction_line_item import TransactionLineItem as TransactionLineItem + +class TransactionLineItemGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def find_all(self, transaction_id): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/transaction_search.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/transaction_search.pyi new file mode 100644 index 00000000..439cc9f7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/transaction_search.pyi @@ -0,0 +1,73 @@ +from typing import Any + +from braintree.credit_card import CreditCard as CreditCard +from braintree.search import Search as Search +from braintree.transaction import Transaction as Transaction +from braintree.util import Constants as Constants + +class TransactionSearch: + billing_first_name: Any + billing_company: Any + billing_country_name: Any + billing_extended_address: Any + billing_last_name: Any + billing_locality: Any + billing_postal_code: Any + billing_region: Any + billing_street_address: Any + credit_card_cardholder_name: Any + currency: Any + customer_company: Any + customer_email: Any + customer_fax: Any + customer_first_name: Any + customer_id: Any + customer_last_name: Any + customer_phone: Any + customer_website: Any + id: Any + order_id: Any + payment_method_token: Any + processor_authorization_code: Any + europe_bank_account_iban: Any + settlement_batch_id: Any + shipping_company: Any + shipping_country_name: Any + shipping_extended_address: Any + shipping_first_name: Any + shipping_last_name: Any + shipping_locality: Any + shipping_postal_code: Any + shipping_region: Any + shipping_street_address: Any + paypal_payer_email: Any + paypal_payment_id: Any + paypal_authorization_id: Any + credit_card_unique_identifier: Any + store_id: Any + credit_card_expiration_date: Any + credit_card_number: Any + user: Any + ids: Any + merchant_account_id: Any + payment_instrument_type: Any + store_ids: Any + created_using: Any + credit_card_card_type: Any + credit_card_customer_location: Any + source: Any + status: Any + type: Any + refund: Any + amount: Any + authorization_expired_at: Any + authorized_at: Any + created_at: Any + disbursement_date: Any + dispute_date: Any + failed_at: Any + gateway_rejected_at: Any + processor_declined_at: Any + settled_at: Any + submitted_for_settlement_at: Any + voided_at: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/unknown_payment_method.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/unknown_payment_method.pyi new file mode 100644 index 00000000..b683e6ee --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/unknown_payment_method.pyi @@ -0,0 +1,4 @@ +from braintree.resource import Resource as Resource + +class UnknownPaymentMethod(Resource): + def image_url(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/us_bank_account.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/us_bank_account.pyi new file mode 100644 index 00000000..31d005cd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/us_bank_account.pyi @@ -0,0 +1,17 @@ +from typing import Any + +from braintree.ach_mandate import AchMandate as AchMandate +from braintree.configuration import Configuration as Configuration +from braintree.resource import Resource as Resource +from braintree.us_bank_account_verification import UsBankAccountVerification as UsBankAccountVerification + +class UsBankAccount(Resource): + @staticmethod + def find(token): ... + @staticmethod + def sale(token, transactionRequest): ... + @staticmethod + def signature(): ... + ach_mandate: Any + verifications: Any + def __init__(self, gateway, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/us_bank_account_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/us_bank_account_gateway.pyi new file mode 100644 index 00000000..665b5714 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/us_bank_account_gateway.pyi @@ -0,0 +1,10 @@ +from typing import Any + +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.us_bank_account import UsBankAccount as UsBankAccount + +class UsBankAccountGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def find(self, us_bank_account_token): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/us_bank_account_verification.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/us_bank_account_verification.pyi new file mode 100644 index 00000000..517d5ec4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/us_bank_account_verification.pyi @@ -0,0 +1,28 @@ +from typing import Any + +from braintree.attribute_getter import AttributeGetter as AttributeGetter +from braintree.configuration import Configuration as Configuration + +class UsBankAccountVerification(AttributeGetter): + class Status: + Failed: str + GatewayRejected: str + ProcessorDeclined: str + Unrecognized: str + Verified: str + Pending: str + + class VerificationMethod: + NetworkCheck: str + IndependentCheck: str + TokenizedCheck: str + MicroTransfers: str + us_bank_account: Any + def __init__(self, gateway, attributes) -> None: ... + @staticmethod + def confirm_micro_transfer_amounts(verification_id, amounts): ... + @staticmethod + def find(verification_id): ... + @staticmethod + def search(*query): ... + def __eq__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/us_bank_account_verification_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/us_bank_account_verification_gateway.pyi new file mode 100644 index 00000000..1f420f98 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/us_bank_account_verification_gateway.pyi @@ -0,0 +1,16 @@ +from typing import Any + +from braintree.error_result import ErrorResult as ErrorResult +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.resource_collection import ResourceCollection as ResourceCollection +from braintree.successful_result import SuccessfulResult as SuccessfulResult +from braintree.us_bank_account_verification import UsBankAccountVerification as UsBankAccountVerification +from braintree.us_bank_account_verification_search import UsBankAccountVerificationSearch as UsBankAccountVerificationSearch + +class UsBankAccountVerificationGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def confirm_micro_transfer_amounts(self, verification_id, amounts): ... + def find(self, verification_id): ... + def search(self, *query): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/us_bank_account_verification_search.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/us_bank_account_verification_search.pyi new file mode 100644 index 00000000..3b937177 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/us_bank_account_verification_search.pyi @@ -0,0 +1,20 @@ +from typing import Any + +from braintree.search import Search as Search +from braintree.us_bank_account import UsBankAccount as UsBankAccount +from braintree.us_bank_account_verification import UsBankAccountVerification as UsBankAccountVerification +from braintree.util import Constants as Constants + +class UsBankAccountVerificationSearch: + account_holder_name: Any + customer_email: Any + customer_id: Any + id: Any + payment_method_token: Any + routing_number: Any + ids: Any + status: Any + verification_method: Any + created_at: Any + account_type: Any + account_number: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/__init__.pyi new file mode 100644 index 00000000..974d7141 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/__init__.pyi @@ -0,0 +1,7 @@ +from braintree.util.constants import Constants as Constants +from braintree.util.crypto import Crypto as Crypto +from braintree.util.generator import Generator as Generator +from braintree.util.graphql_client import GraphQLClient as GraphQLClient +from braintree.util.http import Http as Http +from braintree.util.parser import Parser as Parser +from braintree.util.xml_util import XmlUtil as XmlUtil diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/constants.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/constants.pyi new file mode 100644 index 00000000..f46d366a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/constants.pyi @@ -0,0 +1,3 @@ +class Constants: + @staticmethod + def get_all_constant_values_from_class(klass): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/crypto.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/crypto.pyi new file mode 100644 index 00000000..6ea4a83d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/crypto.pyi @@ -0,0 +1,9 @@ +text_type = str + +class Crypto: + @staticmethod + def sha1_hmac_hash(secret_key, content): ... + @staticmethod + def sha256_hmac_hash(secret_key, content): ... + @staticmethod + def secure_compare(left, right): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/datetime_parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/datetime_parser.pyi new file mode 100644 index 00000000..eb12ffa4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/datetime_parser.pyi @@ -0,0 +1 @@ +def parse_datetime(timestamp): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/generator.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/generator.pyi new file mode 100644 index 00000000..5768acfc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/generator.pyi @@ -0,0 +1,10 @@ +from typing import Any + +integer_types = int +text_type = str +binary_type = bytes + +class Generator: + dict: Any + def __init__(self, dict) -> None: ... + def generate(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/graphql_client.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/graphql_client.pyi new file mode 100644 index 00000000..ec47e43f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/graphql_client.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree.exceptions.authentication_error import AuthenticationError as AuthenticationError +from braintree.exceptions.authorization_error import AuthorizationError as AuthorizationError +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.exceptions.server_error import ServerError as ServerError +from braintree.exceptions.service_unavailable_error import ServiceUnavailableError as ServiceUnavailableError +from braintree.exceptions.too_many_requests_error import TooManyRequestsError as TooManyRequestsError +from braintree.exceptions.unexpected_error import UnexpectedError as UnexpectedError +from braintree.exceptions.upgrade_required_error import UpgradeRequiredError as UpgradeRequiredError +from braintree.util.http import Http as Http + +class GraphQLClient(Http): + @staticmethod + def raise_exception_for_graphql_error(response) -> None: ... + graphql_headers: Any + def __init__(self, config: Incomplete | None = ..., environment: Incomplete | None = ...) -> None: ... + def query(self, definition, variables: Incomplete | None = ..., operation_name: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/http.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/http.pyi new file mode 100644 index 00000000..c782212c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/http.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree import version as version +from braintree.environment import Environment as Environment +from braintree.exceptions.authentication_error import AuthenticationError as AuthenticationError +from braintree.exceptions.authorization_error import AuthorizationError as AuthorizationError +from braintree.exceptions.gateway_timeout_error import GatewayTimeoutError as GatewayTimeoutError +from braintree.exceptions.http.connection_error import ConnectionError as ConnectionError +from braintree.exceptions.http.invalid_response_error import InvalidResponseError as InvalidResponseError +from braintree.exceptions.http.timeout_error import ( + ConnectTimeoutError as ConnectTimeoutError, + ReadTimeoutError as ReadTimeoutError, + TimeoutError as TimeoutError, +) +from braintree.exceptions.not_found_error import NotFoundError as NotFoundError +from braintree.exceptions.request_timeout_error import RequestTimeoutError as RequestTimeoutError +from braintree.exceptions.server_error import ServerError as ServerError +from braintree.exceptions.service_unavailable_error import ServiceUnavailableError as ServiceUnavailableError +from braintree.exceptions.too_many_requests_error import TooManyRequestsError as TooManyRequestsError +from braintree.exceptions.unexpected_error import UnexpectedError as UnexpectedError +from braintree.exceptions.upgrade_required_error import UpgradeRequiredError as UpgradeRequiredError +from braintree.util.xml_util import XmlUtil as XmlUtil + +class Http: + class ContentType: + Xml: str + Multipart: str + Json: str + @staticmethod + def is_error_status(status): ... + @staticmethod + def raise_exception_from_status(status, message: Incomplete | None = ...) -> None: ... + config: Any + environment: Any + def __init__(self, config, environment: Incomplete | None = ...) -> None: ... + def post(self, path, params: Incomplete | None = ...): ... + def delete(self, path): ... + def get(self, path): ... + def put(self, path, params: Incomplete | None = ...): ... + def post_multipart(self, path, files, params: Incomplete | None = ...): ... + def http_do(self, http_verb, path, headers, request_body): ... + def handle_exception(self, exception) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/parser.pyi new file mode 100644 index 00000000..04f71d12 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/parser.pyi @@ -0,0 +1,10 @@ +from typing import Any + +from braintree.util.datetime_parser import parse_datetime as parse_datetime + +binary_type = bytes + +class Parser: + doc: Any + def __init__(self, xml) -> None: ... + def parse(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/xml_util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/xml_util.pyi new file mode 100644 index 00000000..d288665a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/util/xml_util.pyi @@ -0,0 +1,8 @@ +from braintree.util.generator import Generator as Generator +from braintree.util.parser import Parser as Parser + +class XmlUtil: + @staticmethod + def xml_from_dict(dict): ... + @staticmethod + def dict_from_xml(xml): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/validation_error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/validation_error.pyi new file mode 100644 index 00000000..04254c72 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/validation_error.pyi @@ -0,0 +1,3 @@ +from braintree.attribute_getter import AttributeGetter as AttributeGetter + +class ValidationError(AttributeGetter): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/validation_error_collection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/validation_error_collection.pyi new file mode 100644 index 00000000..268cd046 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/validation_error_collection.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree.validation_error import ValidationError as ValidationError + +class ValidationErrorCollection: + data: Any + def __init__(self, data: Incomplete | None = ...) -> None: ... + @property + def deep_errors(self): ... + def for_index(self, index): ... + def for_object(self, nested_key): ... + def on(self, attribute): ... + @property + def deep_size(self): ... + @property + def errors(self): ... + @property + def size(self): ... + def __getitem__(self, index): ... + def __len__(self) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/venmo_account.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/venmo_account.pyi new file mode 100644 index 00000000..8d4fe93d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/venmo_account.pyi @@ -0,0 +1,7 @@ +from typing import Any + +from braintree.resource import Resource as Resource + +class VenmoAccount(Resource): + subscriptions: Any + def __init__(self, gateway, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/version.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/version.pyi new file mode 100644 index 00000000..935fb36a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/version.pyi @@ -0,0 +1 @@ +Version: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/visa_checkout_card.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/visa_checkout_card.pyi new file mode 100644 index 00000000..6e8f37d5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/visa_checkout_card.pyi @@ -0,0 +1,15 @@ +from typing import Any + +from braintree.address import Address as Address +from braintree.credit_card_verification import CreditCardVerification as CreditCardVerification +from braintree.resource import Resource as Resource + +class VisaCheckoutCard(Resource): + billing_address: Any + subscriptions: Any + verification: Any + def __init__(self, gateway, attributes): ... + @property + def expiration_date(self): ... + @property + def masked_number(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/webhook_notification.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/webhook_notification.pyi new file mode 100644 index 00000000..34de0f77 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/webhook_notification.pyi @@ -0,0 +1,83 @@ +from typing import Any + +from braintree.account_updater_daily_report import AccountUpdaterDailyReport as AccountUpdaterDailyReport +from braintree.configuration import Configuration as Configuration +from braintree.connected_merchant_paypal_status_changed import ( + ConnectedMerchantPayPalStatusChanged as ConnectedMerchantPayPalStatusChanged, +) +from braintree.connected_merchant_status_transitioned import ( + ConnectedMerchantStatusTransitioned as ConnectedMerchantStatusTransitioned, +) +from braintree.disbursement import Disbursement as Disbursement +from braintree.dispute import Dispute as Dispute +from braintree.error_result import ErrorResult as ErrorResult +from braintree.granted_payment_instrument_update import GrantedPaymentInstrumentUpdate as GrantedPaymentInstrumentUpdate +from braintree.local_payment_completed import LocalPaymentCompleted as LocalPaymentCompleted +from braintree.local_payment_reversed import LocalPaymentReversed as LocalPaymentReversed +from braintree.merchant_account import MerchantAccount as MerchantAccount +from braintree.oauth_access_revocation import OAuthAccessRevocation as OAuthAccessRevocation +from braintree.partner_merchant import PartnerMerchant as PartnerMerchant +from braintree.resource import Resource as Resource +from braintree.revoked_payment_method_metadata import RevokedPaymentMethodMetadata as RevokedPaymentMethodMetadata +from braintree.subscription import Subscription as Subscription +from braintree.transaction import Transaction as Transaction +from braintree.validation_error_collection import ValidationErrorCollection as ValidationErrorCollection + +class WebhookNotification(Resource): + class Kind: + AccountUpdaterDailyReport: str + Check: str + ConnectedMerchantPayPalStatusChanged: str + ConnectedMerchantStatusTransitioned: str + Disbursement: str + DisbursementException: str + DisputeAccepted: str + DisputeDisputed: str + DisputeExpired: str + DisputeLost: str + DisputeOpened: str + DisputeWon: str + GrantedPaymentMethodRevoked: str + GrantorUpdatedGrantedPaymentMethod: str + LocalPaymentCompleted: str + LocalPaymentReversed: str + OAuthAccessRevoked: str + PartnerMerchantConnected: str + PartnerMerchantDeclined: str + PartnerMerchantDisconnected: str + PaymentMethodRevokedByCustomer: str + RecipientUpdatedGrantedPaymentMethod: str + SubMerchantAccountApproved: str + SubMerchantAccountDeclined: str + SubscriptionCanceled: str + SubscriptionChargedSuccessfully: str + SubscriptionChargedUnsuccessfully: str + SubscriptionExpired: str + SubscriptionTrialEnded: str + SubscriptionWentActive: str + SubscriptionWentPastDue: str + TransactionDisbursed: str + TransactionSettled: str + TransactionSettlementDeclined: str + @staticmethod + def parse(signature, payload): ... + @staticmethod + def verify(challenge): ... + source_merchant_id: Any + subscription: Any + merchant_account: Any + transaction: Any + connected_merchant_status_transitioned: Any + connected_merchant_paypal_status_changed: Any + partner_merchant: Any + oauth_access_revocation: Any + disbursement: Any + dispute: Any + account_updater_daily_report: Any + granted_payment_instrument_update: Any + revoked_payment_method_metadata: Any + local_payment_completed: Any + local_payment_reversed: Any + errors: Any + message: Any + def __init__(self, gateway, attributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/webhook_notification_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/webhook_notification_gateway.pyi new file mode 100644 index 00000000..9f26304a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/webhook_notification_gateway.pyi @@ -0,0 +1,16 @@ +from typing import Any + +from braintree.exceptions.invalid_challenge_error import InvalidChallengeError as InvalidChallengeError +from braintree.exceptions.invalid_signature_error import InvalidSignatureError as InvalidSignatureError +from braintree.util.crypto import Crypto as Crypto +from braintree.util.xml_util import XmlUtil as XmlUtil +from braintree.webhook_notification import WebhookNotification as WebhookNotification + +text_type = str + +class WebhookNotificationGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def parse(self, signature, payload): ... + def verify(self, challenge): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/webhook_testing.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/webhook_testing.pyi new file mode 100644 index 00000000..78fa449c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/webhook_testing.pyi @@ -0,0 +1,7 @@ +from _typeshed import Incomplete + +from braintree.configuration import Configuration as Configuration + +class WebhookTesting: + @staticmethod + def sample_notification(kind, id, source_merchant_id: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/webhook_testing_gateway.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/webhook_testing_gateway.pyi new file mode 100644 index 00000000..73c71e93 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/braintree/braintree/webhook_testing_gateway.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete +from typing import Any + +from braintree.util.crypto import Crypto as Crypto +from braintree.webhook_notification import WebhookNotification as WebhookNotification + +class WebhookTestingGateway: + gateway: Any + config: Any + def __init__(self, gateway) -> None: ... + def sample_notification(self, kind, id, source_merchant_id: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cachetools/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cachetools/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..077ad455 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cachetools/@tests/stubtest_allowlist.txt @@ -0,0 +1,22 @@ +cachetools.Cache.get + +# stubs omit defaulted arguments that are meant to be optimizations, not provided by user +cachetools.FIFOCache.__delitem__ +cachetools.FIFOCache.__setitem__ +cachetools.LFUCache.__delitem__ +cachetools.LFUCache.__getitem__ +cachetools.LFUCache.__setitem__ +cachetools.LRUCache.__delitem__ +cachetools.LRUCache.__getitem__ +cachetools.LRUCache.__setitem__ +cachetools.MRUCache.__delitem__ +cachetools.MRUCache.__getitem__ +cachetools.MRUCache.__setitem__ +cachetools.TLRUCache.__delitem__ +cachetools.TLRUCache.__getitem__ +cachetools.TLRUCache.__setitem__ +cachetools.TTLCache.__delitem__ +cachetools.TTLCache.__getitem__ +cachetools.TTLCache.__setitem__ +cachetools._TimedCache.__len__ +cachetools._TimedCache.__repr__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cachetools/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cachetools/METADATA.toml new file mode 100644 index 00000000..d7cde2e5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cachetools/METADATA.toml @@ -0,0 +1 @@ +version = "5.3.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cachetools/cachetools/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cachetools/cachetools/__init__.pyi new file mode 100644 index 00000000..06fcc508 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cachetools/cachetools/__init__.pyi @@ -0,0 +1,109 @@ +from _typeshed import IdentityFunction +from collections.abc import Callable, Iterator, MutableMapping, Sequence +from contextlib import AbstractContextManager +from typing import Any, Generic, TypeVar, overload + +__all__ = ("Cache", "FIFOCache", "LFUCache", "LRUCache", "MRUCache", "RRCache", "TLRUCache", "TTLCache", "cached", "cachedmethod") +__version__: str + +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") +_T = TypeVar("_T") + +class Cache(MutableMapping[_KT, _VT], Generic[_KT, _VT]): + @overload + def __init__(self, maxsize: float, getsizeof: Callable[[_VT], float]) -> None: ... + @overload + def __init__(self, maxsize: float, getsizeof: None = ...) -> None: ... + def __getitem__(self, key: _KT) -> _VT: ... + def __setitem__(self, key: _KT, value: _VT) -> None: ... + def __delitem__(self, key: _KT) -> None: ... + def __missing__(self, key: _KT) -> _VT: ... + def __iter__(self) -> Iterator[_KT]: ... + def __len__(self) -> int: ... + @overload # type: ignore[override] + def pop(self, key: _KT) -> _VT: ... + @overload + def pop(self, key: _KT, default: _VT | _T) -> _VT | _T: ... + def setdefault(self, key: _KT, default: _VT | None = ...) -> _VT: ... + @property + def maxsize(self) -> float: ... + @property + def currsize(self) -> float: ... + @staticmethod + def getsizeof(value: _VT) -> float: ... + +class FIFOCache(Cache[_KT, _VT]): ... +class LFUCache(Cache[_KT, _VT]): ... +class LRUCache(Cache[_KT, _VT]): ... +class MRUCache(Cache[_KT, _VT]): ... + +class RRCache(Cache[_KT, _VT]): + @overload + def __init__(self, maxsize: float, choice: None = ..., getsizeof: None = ...) -> None: ... + @overload + def __init__(self, maxsize: float, *, getsizeof: Callable[[_VT], float]) -> None: ... + @overload + def __init__(self, maxsize: float, choice: None, getsizeof: Callable[[_VT], float]) -> None: ... + @overload + def __init__(self, maxsize: float, choice: Callable[[Sequence[_KT]], _KT], getsizeof: None = ...) -> None: ... + @overload + def __init__(self, maxsize: float, choice: Callable[[Sequence[_KT]], _KT], getsizeof: Callable[[_VT], float]) -> None: ... + @property + def choice(self) -> Callable[[Sequence[_KT]], _KT]: ... + +class _TimedCache(Cache[_KT, _VT]): + @overload + def __init__(self, maxsize: float, timer: Callable[[], float] = ..., getsizeof: None = ...) -> None: ... + @overload + def __init__(self, maxsize: float, timer: Callable[[], float], getsizeof: Callable[[_VT], float]) -> None: ... + @overload + def __init__(self, maxsize: float, timer: Callable[[], float] = ..., *, getsizeof: Callable[[_VT], float]) -> None: ... + @property + def currsize(self) -> float: ... + + class _Timer: + def __init__(self, timer: Callable[[], float]) -> None: ... + def __call__(self) -> float: ... + def __enter__(self) -> float: ... + def __exit__(self, *exc: object) -> None: ... + + @property + def timer(self) -> _Timer: ... + +class TTLCache(_TimedCache[_KT, _VT]): + @overload + def __init__(self, maxsize: float, ttl: float, timer: Callable[[], float] = ..., getsizeof: None = ...) -> None: ... + @overload + def __init__(self, maxsize: float, ttl: float, timer: Callable[[], float], getsizeof: Callable[[_VT], float]) -> None: ... + @overload + def __init__( + self, maxsize: float, ttl: float, timer: Callable[[], float] = ..., *, getsizeof: Callable[[_VT], float] + ) -> None: ... + @property + def ttl(self) -> float: ... + def expire(self, time: float | None = ...) -> None: ... + +class TLRUCache(_TimedCache[_KT, _VT]): + def __init__( + self, + maxsize: float, + ttu: Callable[[_KT, _VT, float], float], + timer: Callable[[], float] = ..., + getsizeof: Callable[[_VT], float] | None = ..., + ) -> None: ... + @property + def ttu(self) -> Callable[[_KT, _VT, float], float]: ... + def expire(self, time: float | None = ...) -> None: ... + +def cached( + cache: MutableMapping[_KT, Any] | None, + key: Callable[..., _KT] = ..., + lock: AbstractContextManager[Any] | None = None, + info: bool = False, +) -> IdentityFunction: ... +def cachedmethod( + cache: Callable[[Any], MutableMapping[_KT, Any] | None], + key: Callable[..., _KT] = ..., + lock: Callable[[Any], AbstractContextManager[Any]] | None = ..., +) -> IdentityFunction: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cachetools/cachetools/func.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cachetools/cachetools/func.pyi new file mode 100644 index 00000000..4d223840 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cachetools/cachetools/func.pyi @@ -0,0 +1,17 @@ +from _typeshed import IdentityFunction +from collections.abc import Callable, Sequence +from typing import TypeVar + +__all__ = ("fifo_cache", "lfu_cache", "lru_cache", "mru_cache", "rr_cache", "ttl_cache") +_T = TypeVar("_T") + +def fifo_cache(maxsize: float | None = ..., typed: bool = ...) -> IdentityFunction: ... +def lfu_cache(maxsize: float | None = ..., typed: bool = ...) -> IdentityFunction: ... +def lru_cache(maxsize: float | None = ..., typed: bool = ...) -> IdentityFunction: ... +def mru_cache(maxsize: float | None = ..., typed: bool = ...) -> IdentityFunction: ... +def rr_cache( + maxsize: float | None = ..., choice: Callable[[Sequence[_T]], _T] | None = ..., typed: bool = ... +) -> IdentityFunction: ... +def ttl_cache( + maxsize: float | None = ..., ttl: float = ..., timer: Callable[[], float] = ..., typed: bool = ... +) -> IdentityFunction: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cachetools/cachetools/keys.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cachetools/cachetools/keys.pyi new file mode 100644 index 00000000..9b48a37e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cachetools/cachetools/keys.pyi @@ -0,0 +1,8 @@ +from _typeshed import Unused +from collections.abc import Hashable + +__all__ = ("hashkey", "methodkey", "typedkey") + +def hashkey(*args: Hashable, **kwargs: Hashable) -> tuple[Hashable, ...]: ... +def methodkey(self: Unused, *args: Hashable, **kwargs: Hashable) -> tuple[Hashable, ...]: ... +def typedkey(*args: Hashable, **kwargs: Hashable) -> tuple[Hashable, ...]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..293f7da8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/@tests/stubtest_allowlist.txt @@ -0,0 +1,13 @@ +# **kwargs replaced with actual arguments in stubs +caldav.DAVClient.calendar +caldav.DAVClient.principal +caldav.davclient.DAVClient.calendar +caldav.davclient.DAVClient.principal + +# Initialized in class, but immediately overwritten in __init__ +caldav.DAVClient.url +caldav.davclient.DAVClient.url +caldav.davclient.DAVResponse.headers +caldav.elements.base.BaseElement.children + +.*.findprop diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/METADATA.toml new file mode 100644 index 00000000..86bc0d79 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/METADATA.toml @@ -0,0 +1,6 @@ +version = "1.0.*" +# also types-lxml and types-icalendar when those stubs are added +requires = ["types-requests", "types-vobject"] + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/__init__.pyi new file mode 100644 index 00000000..2acc1c3b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/__init__.pyi @@ -0,0 +1,4 @@ +from .davclient import DAVClient as DAVClient +from .objects import * + +__version__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/davclient.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/davclient.pyi new file mode 100644 index 00000000..002bd44f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/davclient.pyi @@ -0,0 +1,76 @@ +from collections.abc import Iterable, Mapping +from typing import Any +from typing_extensions import Self, TypeAlias +from urllib.parse import ParseResult, SplitResult + +from requests.auth import AuthBase +from requests.models import Response +from requests.sessions import _Timeout +from requests.structures import CaseInsensitiveDict + +from .lib.url import URL +from .objects import Calendar, DAVObject, Principal + +_Element: TypeAlias = Any # actually lxml.etree._Element + +class DAVResponse: + reason: str + tree: _Element | None + status: int + headers: CaseInsensitiveDict[str] + objects: dict[str, dict[str, str]] # only defined after call to find_objects_and_props() + def __init__(self, response: Response) -> None: ... + @property + def raw(self) -> str: ... + def validate_status(self, status: str) -> None: ... + def find_objects_and_props(self) -> None: ... + def expand_simple_props( + self, props: Iterable[Any] = ..., multi_value_props: Iterable[Any] = ..., xpath: str | None = ... + ) -> dict[str, dict[str, str]]: ... + +class DAVClient: + proxy: str | None + url: URL + headers: dict[str, str] + username: str | None + password: str | None + auth: AuthBase | None + timeout: _Timeout | None + ssl_verify_cert: bool | str + ssl_cert: str | tuple[str, str] | None + def __init__( + self, + url: str, + proxy: str | None = ..., + username: str | None = ..., + password: str | None = ..., + auth: AuthBase | None = ..., + timeout: _Timeout | None = ..., + ssl_verify_cert: bool | str = ..., + ssl_cert: str | tuple[str, str] | None = ..., + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, exc_type: object, exc_value: object, traceback: object) -> None: ... + def principal(self, *, url: str | ParseResult | SplitResult | URL | None = ...) -> Principal: ... + def calendar( + self, + url: str | ParseResult | SplitResult | URL | None = ..., + parent: DAVObject | None = ..., + name: str | None = ..., + id: str | None = ..., + props: Mapping[Any, Any] = ..., + **extra: Any, + ) -> Calendar: ... + def check_dav_support(self) -> str | None: ... + def check_cdav_support(self) -> bool: ... + def check_scheduling_support(self) -> bool: ... + def propfind(self, url: str | None = ..., props: str = ..., depth: int = ...) -> DAVResponse: ... + def proppatch(self, url: str, body: str, dummy: None = ...) -> DAVResponse: ... + def report(self, url: str, query: str = ..., depth: int = ...) -> DAVResponse: ... + def mkcol(self, url: str, body: str, dummy: None = ...) -> DAVResponse: ... + def mkcalendar(self, url: str, body: str = ..., dummy: None = ...) -> DAVResponse: ... + def put(self, url: str, body: str, headers: Mapping[str, str] = ...) -> DAVResponse: ... + def post(self, url: str, body: str, headers: Mapping[str, str] = ...) -> DAVResponse: ... + def delete(self, url: str) -> DAVResponse: ... + def options(self, url: str) -> DAVResponse: ... + def request(self, url: str, method: str = ..., body: str = ..., headers: Mapping[str, str] = ...) -> DAVResponse: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/elements/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/elements/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/elements/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/elements/base.pyi new file mode 100644 index 00000000..c55dbf7d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/elements/base.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete +from collections.abc import Iterable +from typing import Any, ClassVar +from typing_extensions import Self, TypeAlias + +_Element: TypeAlias = Any # actually lxml.etree._Element + +class BaseElement: + tag: ClassVar[str | None] + children: list[BaseElement] + value: str | None + attributes: Incomplete | None + caldav_class: Incomplete | None + def __init__(self, name: str | None = ..., value: str | bytes | None = ...) -> None: ... + def __add__(self, other: BaseElement) -> Self: ... + def xmlelement(self) -> _Element: ... + def xmlchildren(self, root: _Element) -> None: ... + def append(self, element: BaseElement | Iterable[BaseElement]) -> Self: ... + +class NamedBaseElement(BaseElement): + def __init__(self, name: str | None = ...) -> None: ... + +class ValuedBaseElement(BaseElement): + def __init__(self, value: str | bytes | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/elements/cdav.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/elements/cdav.pyi new file mode 100644 index 00000000..632894a2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/elements/cdav.pyi @@ -0,0 +1,100 @@ +import datetime +from typing import ClassVar + +from .base import BaseElement, NamedBaseElement, ValuedBaseElement + +class CalendarQuery(BaseElement): + tag: ClassVar[str] + +class FreeBusyQuery(BaseElement): + tag: ClassVar[str] + +class Mkcalendar(BaseElement): + tag: ClassVar[str] + +class CalendarMultiGet(BaseElement): + tag: ClassVar[str] + +class ScheduleInboxURL(BaseElement): + tag: ClassVar[str] + +class ScheduleOutboxURL(BaseElement): + tag: ClassVar[str] + +class Filter(BaseElement): + tag: ClassVar[str] + +class CompFilter(NamedBaseElement): + tag: ClassVar[str] + +class PropFilter(NamedBaseElement): + tag: ClassVar[str] + +class ParamFilter(NamedBaseElement): + tag: ClassVar[str] + +class TextMatch(ValuedBaseElement): + tag: ClassVar[str] + def __init__(self, value, collation: str = ..., negate: bool = ...) -> None: ... + +class TimeRange(BaseElement): + tag: ClassVar[str] + def __init__(self, start: datetime.datetime | None = ..., end: datetime.datetime | None = ...) -> None: ... + +class NotDefined(BaseElement): + tag: ClassVar[str] + +class CalendarData(BaseElement): + tag: ClassVar[str] + +class Expand(BaseElement): + tag: ClassVar[str] + def __init__(self, start: datetime.datetime | None, end: datetime.datetime | None = ...) -> None: ... + +class Comp(NamedBaseElement): + tag: ClassVar[str] + +class CalendarUserAddressSet(BaseElement): + tag: ClassVar[str] + +class CalendarUserType(BaseElement): + tag: ClassVar[str] + +class CalendarHomeSet(BaseElement): + tag: ClassVar[str] + +class Calendar(BaseElement): + tag: ClassVar[str] + +class CalendarDescription(ValuedBaseElement): + tag: ClassVar[str] + +class CalendarTimeZone(ValuedBaseElement): + tag: ClassVar[str] + +class SupportedCalendarComponentSet(ValuedBaseElement): + tag: ClassVar[str] + +class SupportedCalendarData(ValuedBaseElement): + tag: ClassVar[str] + +class MaxResourceSize(ValuedBaseElement): + tag: ClassVar[str] + +class MinDateTime(ValuedBaseElement): + tag: ClassVar[str] + +class MaxDateTime(ValuedBaseElement): + tag: ClassVar[str] + +class MaxInstances(ValuedBaseElement): + tag: ClassVar[str] + +class MaxAttendeesPerInstance(ValuedBaseElement): + tag: ClassVar[str] + +class Allprop(BaseElement): + tag: ClassVar[str] + +class ScheduleTag(BaseElement): + tag: ClassVar[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/elements/dav.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/elements/dav.pyi new file mode 100644 index 00000000..4f3577e2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/elements/dav.pyi @@ -0,0 +1,63 @@ +from typing import ClassVar + +from .base import BaseElement, ValuedBaseElement + +class Propfind(BaseElement): + tag: ClassVar[str] + +class PropertyUpdate(BaseElement): + tag: ClassVar[str] + +class Mkcol(BaseElement): + tag: ClassVar[str] + +class SyncCollection(BaseElement): + tag: ClassVar[str] + +class SyncToken(BaseElement): + tag: ClassVar[str] + +class SyncLevel(BaseElement): + tag: ClassVar[str] + +class Prop(BaseElement): + tag: ClassVar[str] + +class Collection(BaseElement): + tag: ClassVar[str] + +class Set(BaseElement): + tag: ClassVar[str] + +class ResourceType(BaseElement): + tag: ClassVar[str] + +class DisplayName(ValuedBaseElement): + tag: ClassVar[str] + +class GetEtag(ValuedBaseElement): + tag: ClassVar[str] + +class Href(BaseElement): + tag: ClassVar[str] + +class Response(BaseElement): + tag: ClassVar[str] + +class Status(BaseElement): + tag: ClassVar[str] + +class PropStat(BaseElement): + tag: ClassVar[str] + +class MultiStatus(BaseElement): + tag: ClassVar[str] + +class CurrentUserPrincipal(BaseElement): + tag: ClassVar[str] + +class PrincipalCollectionSet(BaseElement): + tag: ClassVar[str] + +class Allprop(BaseElement): + tag: ClassVar[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/elements/ical.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/elements/ical.pyi new file mode 100644 index 00000000..762d6ae1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/elements/ical.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar + +from .base import ValuedBaseElement + +class CalendarColor(ValuedBaseElement): + tag: ClassVar[str] + +class CalendarOrder(ValuedBaseElement): + tag: ClassVar[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/lib/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/lib/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/lib/error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/lib/error.pyi new file mode 100644 index 00000000..66de08c1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/lib/error.pyi @@ -0,0 +1,23 @@ +def assert_(condition: object) -> None: ... + +ERR_FRAGMENT: str + +class DAVError(Exception): + url: str | None + reason: str + def __init__(self, url: str | None = ..., reason: str | None = ...) -> None: ... + +class AuthorizationError(DAVError): ... +class PropsetError(DAVError): ... +class ProppatchError(DAVError): ... +class PropfindError(DAVError): ... +class ReportError(DAVError): ... +class MkcolError(DAVError): ... +class MkcalendarError(DAVError): ... +class PutError(DAVError): ... +class DeleteError(DAVError): ... +class NotFoundError(DAVError): ... +class ConsistencyError(DAVError): ... +class ResponseError(DAVError): ... + +exception_by_method: dict[str, type[DAVError]] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/lib/namespace.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/lib/namespace.pyi new file mode 100644 index 00000000..a3ad5724 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/lib/namespace.pyi @@ -0,0 +1,4 @@ +nsmap: dict[str, str] +nsmap2: dict[str, str] + +def ns(prefix: str, tag: str | None = ...) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/lib/url.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/lib/url.pyi new file mode 100644 index 00000000..a47a1957 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/lib/url.pyi @@ -0,0 +1,27 @@ +from typing import overload +from urllib.parse import ParseResult, SplitResult + +class URL: + def __init__(self, url: str | ParseResult | SplitResult) -> None: ... + def __bool__(self) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + @overload + @classmethod + def objectify(cls, url: None) -> None: ... + @overload + @classmethod + def objectify(cls, url: URL | str | ParseResult | SplitResult) -> URL: ... + def __getattr__(self, attr: str): ... + def __unicode__(self) -> str: ... + def strip_trailing_slash(self) -> URL: ... + def is_auth(self) -> bool: ... + def unauth(self) -> URL: ... + def canonical(self) -> URL: ... + def join(self, path: object) -> URL: ... + +@overload +def make(url: None) -> None: ... +@overload +def make(url: URL | str | ParseResult | SplitResult) -> URL: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/lib/vcal.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/lib/vcal.pyi new file mode 100644 index 00000000..7b567428 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/lib/vcal.pyi @@ -0,0 +1 @@ +def fix(event): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/objects.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/objects.pyi new file mode 100644 index 00000000..310ad721 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/objects.pyi @@ -0,0 +1,249 @@ +import datetime +from _typeshed import Incomplete +from collections.abc import Iterable, Iterator, Mapping, Sequence +from typing import Any, TypeVar, overload +from typing_extensions import Literal, Self, TypeAlias +from urllib.parse import ParseResult, SplitResult + +from vobject.base import VBase + +from .davclient import DAVClient +from .elements.cdav import CalendarQuery, CompFilter, ScheduleInboxURL, ScheduleOutboxURL +from .lib.url import URL + +_CC = TypeVar("_CC", bound=CalendarObjectResource) +# Actually "type[Todo] | type[Event] | type[Journal]", but mypy doesn't like that. +_CompClass: TypeAlias = type[CalendarObjectResource] +_VCalAddress: TypeAlias = Any # actually icalendar.vCalAddress + +class DAVObject: + id: str | None + url: URL | None + client: DAVClient | None + parent: DAVObject | None + name: str | None + props: Mapping[Any, Any] + extra_init_options: dict[str, Any] + def __init__( + self, + client: DAVClient | None = ..., + url: str | ParseResult | SplitResult | URL | None = ..., + parent: DAVObject | None = ..., + name: str | None = ..., + id: str | None = ..., + props: Mapping[Any, Any] | None = ..., + **extra: Any, + ) -> None: ... + @property + def canonical_url(self) -> str: ... + def children(self, type: str | None = ...) -> list[tuple[URL, Any, Any]]: ... + def get_property(self, prop, use_cached: bool = ..., **passthrough) -> Any | None: ... + def get_properties( + self, props: Incomplete | None = ..., depth: int = ..., parse_response_xml: bool = ..., parse_props: bool = ... + ): ... + def set_properties(self, props: Incomplete | None = ...) -> Self: ... + def save(self) -> Self: ... + def delete(self) -> None: ... + +class CalendarSet(DAVObject): + def calendars(self) -> list[Calendar]: ... + def make_calendar( + self, name: str | None = ..., cal_id: str | None = ..., supported_calendar_component_set: Incomplete | None = ... + ) -> Calendar: ... + def calendar(self, name: str | None = ..., cal_id: str | None = ...) -> Calendar: ... + +class Principal(DAVObject): + def __init__(self, client: DAVClient | None = ..., url: str | ParseResult | SplitResult | URL | None = ...) -> None: ... + def calendars(self) -> list[Calendar]: ... + def make_calendar( + self, name: str | None = ..., cal_id: str | None = ..., supported_calendar_component_set: Incomplete | None = ... + ) -> Calendar: ... + def calendar(self, name: str | None = ..., cal_id: str | None = ...) -> Calendar: ... + def get_vcal_address(self) -> _VCalAddress: ... + calendar_home_set: CalendarSet # can also be set to anything URL.objectify() accepts + def freebusy_request(self, dtstart, dtend, attendees): ... + def calendar_user_address_set(self) -> list[str]: ... + def schedule_inbox(self) -> ScheduleInbox: ... + def schedule_outbox(self) -> ScheduleOutbox: ... + +class Calendar(DAVObject): + def get_supported_components(self) -> list[Any]: ... + def save_with_invites(self, ical: str, attendees, **attendeeoptions) -> None: ... + def save_event(self, ical: str | None = ..., no_overwrite: bool = ..., no_create: bool = ..., **ical_data: Any) -> Event: ... + def save_todo(self, ical: str | None = ..., no_overwrite: bool = ..., no_create: bool = ..., **ical_data: Any) -> Todo: ... + def save_journal( + self, ical: str | None = ..., no_overwrite: bool = ..., no_create: bool = ..., **ical_data: Any + ) -> Journal: ... + add_event = save_event + add_todo = save_todo + add_journal = save_journal + def calendar_multiget(self, event_urls: Iterable[URL]) -> list[Event]: ... + def build_date_search_query( + self, + start, + end: datetime.datetime | None = ..., + compfilter: Literal["VEVENT"] | None = ..., + expand: bool | Literal["maybe"] = ..., + ): ... + @overload + def date_search( + self, + start: datetime.datetime, + end: datetime.datetime | None = ..., + compfilter: Literal["VEVENT"] = ..., + expand: bool | Literal["maybe"] = ..., + verify_expand: bool = ..., + ) -> list[Event]: ... + @overload + def date_search( + self, start: datetime.datetime, *, compfilter: None, expand: bool | Literal["maybe"] = ..., verify_expand: bool = ... + ) -> list[CalendarObjectResource]: ... + @overload + def date_search( + self, + start: datetime.datetime, + end: datetime.datetime | None, + compfilter: None, + expand: bool | Literal["maybe"] = ..., + verify_expand: bool = ..., + ) -> list[CalendarObjectResource]: ... + @overload + def search( + self, + xml: None = ..., + comp_class: None = ..., + todo: bool | None = ..., + include_completed: bool = ..., + sort_keys: Sequence[str] = ..., + split_expanded: bool = ..., + **kwargs, + ) -> list[CalendarObjectResource]: ... + @overload + def search( + self, + xml, + comp_class: type[_CC], + todo: bool | None = ..., + include_completed: bool = ..., + sort_keys: Sequence[str] = ..., + split_expanded: bool = ..., + **kwargs, + ) -> list[_CC]: ... + @overload + def search( + self, + *, + comp_class: type[_CC], + todo: bool | None = ..., + include_completed: bool = ..., + sort_keys: Sequence[str] = ..., + split_expanded: bool = ..., + **kwargs, + ) -> list[_CC]: ... + def build_search_xml_query( + self, + comp_class: _CompClass | None = ..., + todo: bool | None = ..., + ignore_completed1: bool | None = ..., + ignore_completed2: bool | None = ..., + ignore_completed3: bool | None = ..., + event: bool | None = ..., + filters: list[Incomplete] | None = ..., + expand: bool | None = ..., + start: datetime.datetime | None = ..., + end: datetime.datetime | None = ..., + *, + uid=..., + summary=..., + comment=..., + description=..., + location=..., + status=..., + ) -> tuple[CalendarQuery, _CompClass]: ... + def freebusy_request(self, start: datetime.datetime, end: datetime.datetime) -> FreeBusy: ... + def todos(self, sort_keys: Iterable[str] = ..., include_completed: bool = ..., sort_key: str | None = ...) -> list[Todo]: ... + def event_by_url(self, href, data: Incomplete | None = ...) -> Event: ... + def object_by_uid(self, uid: str, comp_filter: CompFilter | None = ..., comp_class: _CompClass | None = ...) -> Event: ... + def todo_by_uid(self, uid: str) -> CalendarObjectResource: ... + def event_by_uid(self, uid: str) -> CalendarObjectResource: ... + def journal_by_uid(self, uid: str) -> CalendarObjectResource: ... + event = event_by_uid + def events(self) -> list[Event]: ... + def objects_by_sync_token( + self, sync_token: Incomplete | None = ..., load_objects: bool = ... + ) -> SynchronizableCalendarObjectCollection: ... + objects = objects_by_sync_token + def journals(self) -> list[Journal]: ... + +class ScheduleMailbox(Calendar): + def __init__( + self, + client: DAVClient | None = ..., + principal: Principal | None = ..., + url: str | ParseResult | SplitResult | URL | None = ..., + ) -> None: ... + def get_items(self): ... + +class ScheduleInbox(ScheduleMailbox): + findprop = ScheduleInboxURL + +class ScheduleOutbox(ScheduleMailbox): + findprop = ScheduleOutboxURL + +class SynchronizableCalendarObjectCollection: + def __init__(self, calendar, objects, sync_token) -> None: ... + def __iter__(self) -> Iterator[Any]: ... + def __len__(self) -> int: ... + def objects_by_url(self): ... + def sync(self) -> tuple[Any, Any]: ... + +class CalendarObjectResource(DAVObject): + def __init__( + self, + client: DAVClient | None = ..., + url: str | ParseResult | SplitResult | URL | None = ..., + data: Incomplete | None = ..., + parent: Incomplete | None = ..., + id: Incomplete | None = ..., + props: Incomplete | None = ..., + ) -> None: ... + def add_organizer(self) -> None: ... + def split_expanded(self) -> list[Self]: ... + def expand_rrule(self, start: datetime.datetime, end: datetime.datetime) -> None: ... + def add_attendee(self, attendee, no_default_parameters: bool = ..., **parameters) -> None: ... + def is_invite_request(self) -> bool: ... + def accept_invite(self, calendar: Incomplete | None = ...) -> None: ... + def decline_invite(self, calendar: Incomplete | None = ...) -> None: ... + def tentatively_accept_invite(self, calendar: Incomplete | None = ...) -> None: ... + def copy(self, keep_uid: bool = ..., new_parent: Incomplete | None = ...) -> Self: ... + def load(self) -> Self: ... + def change_attendee_status(self, attendee: Incomplete | None = ..., **kwargs) -> None: ... + def save( + self, + no_overwrite: bool = ..., + no_create: bool = ..., + obj_type: str | None = ..., + increase_seqno: bool = ..., + if_schedule_tag_match: bool = ..., + ) -> Self: ... + def get_duration(self) -> datetime.timedelta: ... + data: Any + vobject_instance: VBase + icalendar_instance: Any + instance: VBase + +class Event(CalendarObjectResource): ... +class Journal(CalendarObjectResource): ... + +class FreeBusy(CalendarObjectResource): + def __init__( + self, parent, data, url: str | ParseResult | SplitResult | URL | None = ..., id: Incomplete | None = ... + ) -> None: ... + +class Todo(CalendarObjectResource): + def complete( + self, + completion_timestamp: datetime.datetime | None = ..., + handle_rrule: bool = ..., + rrule_mode: Literal["safe", "this_and_future"] = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/requests.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/requests.pyi new file mode 100644 index 00000000..6335655e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/caldav/caldav/requests.pyi @@ -0,0 +1,8 @@ +from requests.auth import AuthBase + +class HTTPBearerAuth(AuthBase): + password: str + def __init__(self, password: str) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __call__(self, r): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..90858314 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/@tests/stubtest_allowlist.txt @@ -0,0 +1,4 @@ +# added dynamically and not detected by stubtest +cffi.(api.)?FFI.CData +cffi.(api.)?FFI.CType +cffi.(api.)?FFI.buffer diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/@tests/stubtest_allowlist_darwin.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/@tests/stubtest_allowlist_darwin.txt new file mode 100644 index 00000000..55d4b889 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/@tests/stubtest_allowlist_darwin.txt @@ -0,0 +1,2 @@ +# Technically exists on all OSs, but crashes on all but Windows. So we hide it in stubs +cffi.(api.)?FFI.getwinerror diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/@tests/stubtest_allowlist_linux.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/@tests/stubtest_allowlist_linux.txt new file mode 100644 index 00000000..55d4b889 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/@tests/stubtest_allowlist_linux.txt @@ -0,0 +1,2 @@ +# Technically exists on all OSs, but crashes on all but Windows. So we hide it in stubs +cffi.(api.)?FFI.getwinerror diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/METADATA.toml new file mode 100644 index 00000000..e7239b07 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/METADATA.toml @@ -0,0 +1,5 @@ +version = "1.15.*" + +[tool.stubtest] +# linux and darwin are mostly equivalent, except for a single `RTLD_DEEPBIND` variable +platforms = ["linux", "win32"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/_cffi_backend.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/_cffi_backend.pyi new file mode 100644 index 00000000..05d12ff3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/_cffi_backend.pyi @@ -0,0 +1,266 @@ +import sys +import types +from _typeshed import Incomplete, ReadableBuffer, WriteableBuffer +from collections.abc import Callable, Hashable +from typing import Any, ClassVar, Protocol, TypeVar, overload +from typing_extensions import Literal, TypeAlias, final + +_T = TypeVar("_T") + +class _Allocator(Protocol): + def __call__(self, cdecl: str | CType, init: Any = ...) -> _CDataBase: ... + +__version__: str + +FFI_CDECL: int +FFI_DEFAULT_ABI: int +RTLD_GLOBAL: int +RTLD_LAZY: int +RTLD_LOCAL: int +RTLD_NOW: int +if sys.platform == "linux": + RTLD_DEEPBIND: int +if sys.platform != "win32": + RTLD_NODELETE: int + RTLD_NOLOAD: int + +@final +class CField: + bitshift: Incomplete + bitsize: Incomplete + flags: Incomplete + offset: Incomplete + type: Incomplete + +@final +class CLibrary: + def close_lib(self, *args, **kwargs): ... + def load_function(self, *args, **kwargs): ... + def read_variable(self, *args, **kwargs): ... + def write_variable(self, *args, **kwargs): ... + +@final +class CType: + abi: Incomplete + args: Incomplete + cname: Incomplete + elements: Incomplete + ellipsis: Incomplete + fields: Incomplete + item: Incomplete + kind: Incomplete + length: Incomplete + relements: Incomplete + result: Incomplete + def __dir__(self): ... + +@final +class Lib: + def __dir__(self): ... + +@final +class _CDataBase: + __name__: ClassVar[str] + def __add__(self, other): ... + def __bool__(self) -> bool: ... + def __call__(self, *args, **kwargs): ... + def __complex__(self) -> complex: ... + def __delitem__(self, other) -> None: ... + def __dir__(self): ... + def __enter__(self): ... + def __eq__(self, other): ... + def __exit__(self, type, value, traceback): ... + def __float__(self) -> float: ... + def __ge__(self, other): ... + def __getitem__(self, index): ... + def __gt__(self, other): ... + def __hash__(self) -> int: ... + def __int__(self) -> int: ... + def __iter__(self): ... + def __le__(self, other): ... + def __len__(self) -> int: ... + def __lt__(self, other): ... + def __ne__(self, other): ... + def __radd__(self, other): ... + def __rsub__(self, other): ... + def __setitem__(self, index, object) -> None: ... + def __sub__(self, other): ... + +@final +class buffer: + __hash__: ClassVar[None] # type: ignore[assignment] + def __init__(self, *args, **kwargs) -> None: ... + def __delitem__(self, other) -> None: ... + def __eq__(self, other): ... + def __ge__(self, other): ... + def __getitem__(self, index): ... + def __gt__(self, other): ... + def __le__(self, other): ... + def __len__(self) -> int: ... + def __lt__(self, other): ... + def __ne__(self, other): ... + def __setitem__(self, index, object) -> None: ... + +# These aliases are to work around pyright complaints. +# Pyright doesn't like it when a class object is defined as an alias +# of a global object with the same name. +_tmp_CType = CType +_tmp_buffer = buffer + +class FFI: + CData: TypeAlias = _CDataBase + CType: TypeAlias = _tmp_CType + buffer: TypeAlias = _tmp_buffer # noqa: Y042 + + class error(Exception): ... + NULL: ClassVar[CData] + RTLD_GLOBAL: ClassVar[int] + RTLD_LAZY: ClassVar[int] + RTLD_LOCAL: ClassVar[int] + RTLD_NOW: ClassVar[int] + if sys.platform != "win32": + RTLD_DEEPBIND: ClassVar[int] + RTLD_NODELETE: ClassVar[int] + RTLD_NOLOAD: ClassVar[int] + + errno: int + + def __init__( + self, + module_name: str = ..., + _version: int = ..., + _types: str = ..., + _globals: tuple[str | int, ...] = ..., + _struct_unions: tuple[tuple[str, ...], ...] = ..., + _enums: tuple[str, ...] = ..., + _typenames: tuple[str, ...] = ..., + _includes: tuple[FFI, ...] = ..., + ) -> None: ... + @overload + def addressof(self, __cdata: CData, *field_or_index: str | int) -> CData: ... + @overload + def addressof(self, __library: Lib, __name: str) -> CData: ... + def alignof(self, __cdecl: str | CType | CData) -> int: ... + @overload + def callback( + self, + cdecl: str | CType, + python_callable: None = ..., + error: Any = ..., + onerror: Callable[[Exception, Any, Any], None] | None = ..., + ) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + @overload + def callback( + self, + cdecl: str | CType, + python_callable: Callable[..., _T], + error: Any = ..., + onerror: Callable[[Exception, Any, Any], None] | None = ..., + ) -> Callable[..., _T]: ... + def cast(self, cdecl: str | CType, value: CData) -> CData: ... + def def_extern( + self, name: str = ..., error: Any = ..., onerror: Callable[[Exception, Any, types.TracebackType], Any] = ... + ) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + def dlclose(self, __lib: Lib) -> None: ... + if sys.platform == "win32": + def dlopen(self, __libpath: str | CData, __flags: int = ...) -> Lib: ... + else: + def dlopen(self, __libpath: str | CData | None = ..., __flags: int = ...) -> Lib: ... + + @overload + def from_buffer(self, cdecl: ReadableBuffer, require_writable: Literal[False] = ...) -> CData: ... + @overload + def from_buffer(self, cdecl: WriteableBuffer, require_writable: Literal[True]) -> CData: ... + @overload + def from_buffer(self, cdecl: str | CType, python_buffer: ReadableBuffer, require_writable: Literal[False] = ...) -> CData: ... + @overload + def from_buffer(self, cdecl: str | CType, python_buffer: WriteableBuffer, require_writable: Literal[True]) -> CData: ... + def from_handle(self, __x: CData) -> Any: ... + @overload + def gc(self, cdata: CData, destructor: Callable[[CData], Any], size: int = ...) -> CData: ... + @overload + def gc(self, cdata: CData, destructor: None, size: int = ...) -> None: ... + def getctype(self, cdecl: str | CType, replace_with: str = ...) -> str: ... + if sys.platform == "win32": + def getwinerror(self, code: int = ...) -> tuple[int, str]: ... + + def init_once(self, func: Callable[[], Any], tag: Hashable) -> Any: ... + def integer_const(self, name: str) -> int: ... + def list_types(self) -> tuple[list[str], list[str], list[str]]: ... + def memmove(self, dest: CData | WriteableBuffer, src: CData | ReadableBuffer, n: int) -> None: ... + def new(self, cdecl: str | CType, init: Any = ...) -> CData: ... + @overload + def new_allocator(self, alloc: None = ..., free: None = ..., should_clear_after_alloc: bool = ...) -> _Allocator: ... + @overload + def new_allocator( + self, alloc: Callable[[int], CData], free: None = ..., should_clear_after_alloc: bool = ... + ) -> _Allocator: ... + @overload + def new_allocator( + self, alloc: Callable[[int], CData], free: Callable[[CData], Any], should_clear_after_alloc: bool = ... + ) -> _Allocator: ... + def new_handle(self, __x: Any) -> CData: ... + def offsetof(self, __cdecl: str | CType, __field_or_index: str | int, *__fields_or_indexes: str | int) -> int: ... + def release(self, __cdata: CData) -> None: ... + def sizeof(self, __cdecl: str | CType | CData) -> int: ... + def string(self, cdata: CData, maxlen: int) -> bytes | str: ... + def typeof(self, cdecl: str | CData) -> CType: ... + def unpack(self, cdata: CData, length: int) -> bytes | str | list[Any]: ... + +def alignof(__cdecl: CType) -> int: ... +def callback( + __cdecl: CType, + __python_callable: Callable[..., _T], + __error: Any = ..., + __onerror: Callable[[Exception, Any, Any], None] | None = ..., +) -> Callable[..., _T]: ... +def cast(__cdecl: CType, __value: _CDataBase) -> _CDataBase: ... +def complete_struct_or_union( + __cdecl: CType, + __fields: list[tuple[str, CType, int, int]], + __ignored: Any, + __total_size: int, + __total_alignment: int, + __sflags: int, + __pack: int, +) -> None: ... +@overload +def from_buffer(__cdecl: CType, __python_buffer: ReadableBuffer, require_writable: Literal[False] = ...) -> _CDataBase: ... +@overload +def from_buffer(__cdecl: CType, __python_buffer: WriteableBuffer, require_writable: Literal[True]) -> _CDataBase: ... +def from_handle(__x: _CDataBase) -> Any: ... +@overload +def gcp(cdata: _CDataBase, destructor: Callable[[_CDataBase], Any], size: int = ...) -> _CDataBase: ... +@overload +def gcp(cdata: _CDataBase, destructor: None, size: int = ...) -> None: ... +def get_errno() -> int: ... +def getcname(__cdecl: CType, __replace_with: str) -> str: ... + +if sys.platform == "win32": + def getwinerror(code: int = ...) -> tuple[int, str]: ... + +if sys.platform == "win32": + def load_library(__libpath: str | _CDataBase, __flags: int = ...) -> CLibrary: ... + +else: + def load_library(__libpath: str | _CDataBase | None = ..., __flags: int = ...) -> CLibrary: ... + +def memmove(dest: _CDataBase | WriteableBuffer, src: _CDataBase | ReadableBuffer, n: int) -> None: ... +def new_array_type(__cdecl: CType, __length: int | None) -> CType: ... +def new_enum_type(__name: str, __enumerators: tuple[str, ...], __enumvalues: tuple[Any, ...], __basetype: CType) -> CType: ... +def new_function_type(__args: tuple[CType, ...], __result: CType, __ellipsis: int, __abi: int) -> CType: ... +def new_pointer_type(__cdecl: CType) -> CType: ... +def new_primitive_type(__name: str) -> CType: ... +def new_struct_type(__name: str) -> CType: ... +def new_union_type(__name: str) -> CType: ... +def new_void_type() -> CType: ... +def newp(__cdecl: CType, __init: Any = ...) -> _CDataBase: ... +def newp_handle(__cdecl: CType, __x: Any) -> _CDataBase: ... +def rawaddressof(__cdecl: CType, __cdata: _CDataBase, __offset: int) -> _CDataBase: ... +def release(__cdata: _CDataBase) -> None: ... +def set_errno(__errno: int) -> None: ... +def sizeof(__cdecl: CType | _CDataBase) -> int: ... +def string(cdata: _CDataBase, maxlen: int) -> bytes | str: ... +def typeof(__cdata: _CDataBase) -> CType: ... +def typeoffsetof(__cdecl: CType, __fieldname: str | int, __following: bool = ...) -> tuple[CType, int]: ... +def unpack(cdata: _CDataBase, length: int) -> bytes | str | list[Any]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/__init__.pyi new file mode 100644 index 00000000..851066f6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/__init__.pyi @@ -0,0 +1,11 @@ +from .api import FFI as FFI +from .error import ( + CDefError as CDefError, + FFIError as FFIError, + VerificationError as VerificationError, + VerificationMissing as VerificationMissing, +) + +__version__: str +__version_info__: tuple[int, int, int] +__version_verifier_modules__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/api.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/api.pyi new file mode 100644 index 00000000..c95a5b35 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/api.pyi @@ -0,0 +1,96 @@ +import distutils.core +import sys +import types +from _typeshed import Incomplete, ReadableBuffer, WriteableBuffer +from collections.abc import Callable, Hashable +from typing import Any, TypeVar, overload +from typing_extensions import Literal, TypeAlias + +import _cffi_backend + +_T = TypeVar("_T") + +basestring: TypeAlias = str # noqa: Y042 + +class FFI: + CData: TypeAlias = _cffi_backend._CDataBase + CType: TypeAlias = _cffi_backend.CType + buffer: TypeAlias = _cffi_backend.buffer # noqa: Y042 + + BVoidP: CType + BCharA: CType + NULL: CType + errno: int + + def __init__(self, backend: types.ModuleType | None = ...) -> None: ... + def cdef(self, csource: str, override: bool = ..., packed: bool = ..., pack: int | None = ...) -> None: ... + def embedding_api(self, csource: str, packed: bool = ..., pack: bool | int | None = ...) -> None: ... + def dlopen(self, name: str, flags: int = ...) -> _cffi_backend.Lib: ... + def dlclose(self, lib: _cffi_backend.Lib) -> None: ... + def typeof(self, cdecl: str | CData | types.BuiltinFunctionType | types.FunctionType) -> CType: ... + def sizeof(self, cdecl: str | CData) -> int: ... + def alignof(self, cdecl: str | CData) -> int: ... + def offsetof(self, cdecl: str | CData, *fields_or_indexes: str | int) -> int: ... + def new(self, cdecl: str | CType, init: Incomplete | None = ...) -> CData: ... + def new_allocator( + self, + alloc: Callable[[int], CData] | None = ..., + free: Callable[[CData], Any] | None = ..., + should_clear_after_alloc: bool = ..., + ) -> _cffi_backend._Allocator: ... + def cast(self, cdecl: str | CType, source: CData) -> CData: ... + def string(self, cdata: CData, maxlen: int = ...) -> bytes | str: ... + def unpack(self, cdata: CData, length: int) -> bytes | str | list[Any]: ... + @overload + def from_buffer(self, cdecl: ReadableBuffer, require_writable: Literal[False] = ...) -> CData: ... + @overload + def from_buffer(self, cdecl: WriteableBuffer, require_writable: Literal[True]) -> CData: ... + @overload + def from_buffer(self, cdecl: str, python_buffer: ReadableBuffer, require_writable: Literal[False] = ...) -> CData: ... + @overload + def from_buffer(self, cdecl: str, python_buffer: WriteableBuffer, require_writable: Literal[True]) -> CData: ... + def memmove(self, dest: CData | WriteableBuffer, src: CData | ReadableBuffer, n: int) -> None: ... + @overload + def callback( + self, + cdecl: str | CType, + python_callable: None = ..., + error: Any = ..., + onerror: Callable[[Exception, Any, Any], None] | None = ..., + ) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + @overload + def callback( + self, + cdecl: str | CType, + python_callable: Callable[..., _T], + error: Any = ..., + onerror: Callable[[Exception, Any, Any], None] | None = ..., + ) -> Callable[..., _T]: ... + def getctype(self, cdecl: str | CType, replace_with: str = ...) -> str: ... + @overload + def gc(self, cdata: CData, destructor: Callable[[CData], Any], size: int = ...) -> CData: ... + @overload + def gc(self, cdata: CData, destructor: None, size: int = ...) -> None: ... + def verify(self, source: str = ..., tmpdir: str | None = ..., **kwargs: Any) -> _cffi_backend.Lib: ... + # Technically exists on all OSs, but crashes on all but Windows. So we hide it in stubs + if sys.platform == "win32": + def getwinerror(self, code: int = ...) -> tuple[int, str] | None: ... + + def addressof(self, cdata: CData, *fields_or_indexes: str | int) -> CData: ... + def include(self, ffi_to_include: FFI) -> None: ... + def new_handle(self, x: Any) -> CData: ... + def from_handle(self, x: CData) -> Any: ... + def release(self, x: CData) -> None: ... + def set_unicode(self, enabled_flag: bool) -> None: ... + def set_source(self, module_name: str, source: str, source_extension: str = ..., **kwds: Any) -> None: ... + def set_source_pkgconfig( + self, module_name: str, pkgconfig_libs: list[str], source: str, source_extension: str = ..., **kwds: Any + ) -> None: ... + def distutils_extension(self, tmpdir: str = ..., verbose: bool = ...) -> distutils.core.Extension: ... + def emit_c_code(self, filename: str) -> None: ... + def emit_python_code(self, filename: str) -> None: ... + def compile(self, tmpdir: str = ..., verbose: int = ..., target: str | None = ..., debug: bool | None = ...) -> str: ... + def init_once(self, func: Callable[[], Any], tag: Hashable) -> Any: ... + def embedding_init_code(self, pysource: str) -> None: ... + def def_extern(self, *args: Any, **kwds: Any) -> None: ... + def list_types(self) -> tuple[list[str], list[str], list[str]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/backend_ctypes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/backend_ctypes.pyi new file mode 100644 index 00000000..32c66b8c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/backend_ctypes.pyi @@ -0,0 +1,79 @@ +from _typeshed import Incomplete + +unicode = str +long = int +xrange = range +bytechr: Incomplete + +class CTypesType(type): ... + +class CTypesData: + __metaclass__: Incomplete + __name__: str + def __init__(self, *args) -> None: ... + def __iter__(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __lt__(self, other): ... + def __le__(self, other): ... + def __gt__(self, other): ... + def __ge__(self, other): ... + def __hash__(self) -> int: ... + def __repr__(self, c_name: str | None = ...): ... + +class CTypesGenericPrimitive(CTypesData): + def __hash__(self) -> int: ... + +class CTypesGenericArray(CTypesData): + def __iter__(self): ... + +class CTypesGenericPtr(CTypesData): + kind: str + def __nonzero__(self) -> bool: ... + def __bool__(self) -> bool: ... + +class CTypesBaseStructOrUnion(CTypesData): ... + +class CTypesBackend: + PRIMITIVE_TYPES: Incomplete + RTLD_LAZY: int + RTLD_NOW: int + RTLD_GLOBAL: Incomplete + RTLD_LOCAL: Incomplete + def __init__(self) -> None: ... + ffi: Incomplete + def set_ffi(self, ffi) -> None: ... + def load_library(self, path, flags: int = ...): ... + def new_void_type(self): ... + def new_primitive_type(self, name): ... + def new_pointer_type(self, BItem): ... + def new_array_type(self, CTypesPtr, length): ... + def new_struct_type(self, name): ... + def new_union_type(self, name): ... + def complete_struct_or_union( + self, CTypesStructOrUnion, fields, tp, totalsize: int = ..., totalalignment: int = ..., sflags: int = ..., pack: int = ... + ): ... + def new_function_type(self, BArgs, BResult, has_varargs): ... + def new_enum_type(self, name, enumerators, enumvalues, CTypesInt): ... + def get_errno(self): ... + def set_errno(self, value) -> None: ... + def string(self, b, maxlen: int = ...): ... + def buffer(self, bptr, size: int = ...) -> None: ... + def sizeof(self, cdata_or_BType): ... + def alignof(self, BType): ... + def newp(self, BType, source): ... + def cast(self, BType, source): ... + def callback(self, BType, source, error, onerror): ... + def gcp(self, cdata, destructor, size: int = ...): ... + typeof: Incomplete + def getcname(self, BType, replace_with): ... + def typeoffsetof(self, BType, fieldname, num: int = ...): ... + def rawaddressof(self, BTypePtr, cdata, offset: Incomplete | None = ...): ... + +class CTypesLibrary: + backend: Incomplete + cdll: Incomplete + def __init__(self, backend, cdll) -> None: ... + def load_function(self, BType, name): ... + def read_variable(self, BType, name): ... + def write_variable(self, BType, name, value) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/cffi_opcode.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/cffi_opcode.pyi new file mode 100644 index 00000000..364a0808 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/cffi_opcode.pyi @@ -0,0 +1,92 @@ +from _typeshed import Incomplete + +class CffiOp: + op: Incomplete + arg: Incomplete + def __init__(self, op, arg) -> None: ... + def as_c_expr(self): ... + def as_python_bytes(self): ... + +def format_four_bytes(num): ... + +OP_PRIMITIVE: int +OP_POINTER: int +OP_ARRAY: int +OP_OPEN_ARRAY: int +OP_STRUCT_UNION: int +OP_ENUM: int +OP_FUNCTION: int +OP_FUNCTION_END: int +OP_NOOP: int +OP_BITFIELD: int +OP_TYPENAME: int +OP_CPYTHON_BLTN_V: int +OP_CPYTHON_BLTN_N: int +OP_CPYTHON_BLTN_O: int +OP_CONSTANT: int +OP_CONSTANT_INT: int +OP_GLOBAL_VAR: int +OP_DLOPEN_FUNC: int +OP_DLOPEN_CONST: int +OP_GLOBAL_VAR_F: int +OP_EXTERN_PYTHON: int +PRIM_VOID: int +PRIM_BOOL: int +PRIM_CHAR: int +PRIM_SCHAR: int +PRIM_UCHAR: int +PRIM_SHORT: int +PRIM_USHORT: int +PRIM_INT: int +PRIM_UINT: int +PRIM_LONG: int +PRIM_ULONG: int +PRIM_LONGLONG: int +PRIM_ULONGLONG: int +PRIM_FLOAT: int +PRIM_DOUBLE: int +PRIM_LONGDOUBLE: int +PRIM_WCHAR: int +PRIM_INT8: int +PRIM_UINT8: int +PRIM_INT16: int +PRIM_UINT16: int +PRIM_INT32: int +PRIM_UINT32: int +PRIM_INT64: int +PRIM_UINT64: int +PRIM_INTPTR: int +PRIM_UINTPTR: int +PRIM_PTRDIFF: int +PRIM_SIZE: int +PRIM_SSIZE: int +PRIM_INT_LEAST8: int +PRIM_UINT_LEAST8: int +PRIM_INT_LEAST16: int +PRIM_UINT_LEAST16: int +PRIM_INT_LEAST32: int +PRIM_UINT_LEAST32: int +PRIM_INT_LEAST64: int +PRIM_UINT_LEAST64: int +PRIM_INT_FAST8: int +PRIM_UINT_FAST8: int +PRIM_INT_FAST16: int +PRIM_UINT_FAST16: int +PRIM_INT_FAST32: int +PRIM_UINT_FAST32: int +PRIM_INT_FAST64: int +PRIM_UINT_FAST64: int +PRIM_INTMAX: int +PRIM_UINTMAX: int +PRIM_FLOATCOMPLEX: int +PRIM_DOUBLECOMPLEX: int +PRIM_CHAR16: int +PRIM_CHAR32: int +PRIMITIVE_TO_INDEX: Incomplete +F_UNION: int +F_CHECK_FIELDS: int +F_PACKED: int +F_EXTERNAL: int +F_OPAQUE: int +G_FLAGS: Incomplete +CLASS_NAME: Incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/commontypes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/commontypes.pyi new file mode 100644 index 00000000..fe9e35ee --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/commontypes.pyi @@ -0,0 +1,6 @@ +from _typeshed import Incomplete + +COMMON_TYPES: Incomplete + +def resolve_common_type(parser, commontype): ... +def win_common_types(): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/cparser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/cparser.pyi new file mode 100644 index 00000000..8fcab85b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/cparser.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete + +lock: Incomplete +CDEF_SOURCE_STRING: str + +class Parser: + def __init__(self) -> None: ... + def convert_pycparser_error(self, e, csource) -> None: ... + def parse( + self, csource, override: bool = ..., packed: bool = ..., pack: Incomplete | None = ..., dllexport: bool = ... + ) -> None: ... + def parse_type(self, cdecl): ... + def parse_type_and_quals(self, cdecl): ... + def include(self, other) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/error.pyi new file mode 100644 index 00000000..a71f17e5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/error.pyi @@ -0,0 +1,14 @@ +class FFIError(Exception): + __module__: str + +class CDefError(Exception): + __module__: str + +class VerificationError(Exception): + __module__: str + +class VerificationMissing(Exception): + __module__: str + +class PkgConfigError(Exception): + __module__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/ffiplatform.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/ffiplatform.pyi new file mode 100644 index 00000000..d96b7559 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/ffiplatform.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete + +LIST_OF_FILE_NAMES: Incomplete + +def get_extension(srcfilename, modname, sources=..., **kwds): ... +def compile(tmpdir, ext, compiler_verbose: int = ..., debug: Incomplete | None = ...): ... +def maybe_relative_path(path): ... + +int_or_long = int + +def flatten(x): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/lock.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/lock.pyi new file mode 100644 index 00000000..fc8393af --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/lock.pyi @@ -0,0 +1 @@ +from _thread import allocate_lock as allocate_lock diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/model.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/model.pyi new file mode 100644 index 00000000..cc09cdc5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/model.pyi @@ -0,0 +1,162 @@ +from _typeshed import Incomplete +from collections.abc import Generator + +from .error import CDefError as CDefError, VerificationError as VerificationError, VerificationMissing as VerificationMissing +from .lock import allocate_lock as allocate_lock + +Q_CONST: int +Q_RESTRICT: int +Q_VOLATILE: int + +def qualify(quals, replace_with): ... + +class BaseTypeByIdentity: + is_array_type: bool + is_raw_function: bool + def get_c_name(self, replace_with: str = ..., context: str = ..., quals: int = ...): ... + def has_c_name(self): ... + def is_integer_type(self): ... + def get_cached_btype(self, ffi, finishlist, can_delay: bool = ...): ... + +class BaseType(BaseTypeByIdentity): + def __eq__(self, other): ... + def __ne__(self, other): ... + def __hash__(self) -> int: ... + +class VoidType(BaseType): + c_name_with_marker: str + def __init__(self) -> None: ... + def build_backend_type(self, ffi, finishlist): ... + +void_type: Incomplete + +class BasePrimitiveType(BaseType): + def is_complex_type(self): ... + +class PrimitiveType(BasePrimitiveType): + ALL_PRIMITIVE_TYPES: Incomplete + name: Incomplete + c_name_with_marker: Incomplete + def __init__(self, name) -> None: ... + def is_char_type(self): ... + def is_integer_type(self): ... + def is_float_type(self): ... + def is_complex_type(self): ... + def build_backend_type(self, ffi, finishlist): ... + +class UnknownIntegerType(BasePrimitiveType): + name: Incomplete + c_name_with_marker: Incomplete + def __init__(self, name) -> None: ... + def is_integer_type(self): ... + def build_backend_type(self, ffi, finishlist) -> None: ... + +class UnknownFloatType(BasePrimitiveType): + name: Incomplete + c_name_with_marker: Incomplete + def __init__(self, name) -> None: ... + def build_backend_type(self, ffi, finishlist) -> None: ... + +class BaseFunctionType(BaseType): + args: Incomplete + result: Incomplete + ellipsis: Incomplete + abi: Incomplete + c_name_with_marker: Incomplete + def __init__(self, args, result, ellipsis, abi: Incomplete | None = ...) -> None: ... + +class RawFunctionType(BaseFunctionType): + is_raw_function: bool + def build_backend_type(self, ffi, finishlist) -> None: ... + def as_function_pointer(self): ... + +class FunctionPtrType(BaseFunctionType): + def build_backend_type(self, ffi, finishlist): ... + def as_raw_function(self): ... + +class PointerType(BaseType): + totype: Incomplete + quals: Incomplete + c_name_with_marker: Incomplete + def __init__(self, totype, quals: int = ...) -> None: ... + def build_backend_type(self, ffi, finishlist): ... + +voidp_type: Incomplete + +def ConstPointerType(totype): ... + +const_voidp_type: Incomplete + +class NamedPointerType(PointerType): + name: Incomplete + c_name_with_marker: Incomplete + def __init__(self, totype, name, quals: int = ...) -> None: ... + +class ArrayType(BaseType): + is_array_type: bool + item: Incomplete + length: Incomplete + c_name_with_marker: Incomplete + def __init__(self, item, length) -> None: ... + def length_is_unknown(self): ... + def resolve_length(self, newlength): ... + def build_backend_type(self, ffi, finishlist): ... + +char_array_type: Incomplete + +class StructOrUnionOrEnum(BaseTypeByIdentity): + forcename: Incomplete + c_name_with_marker: Incomplete + def build_c_name_with_marker(self) -> None: ... + def force_the_name(self, forcename) -> None: ... + def get_official_name(self): ... + +class StructOrUnion(StructOrUnionOrEnum): + fixedlayout: Incomplete + completed: int + partial: bool + packed: int + name: Incomplete + fldnames: Incomplete + fldtypes: Incomplete + fldbitsize: Incomplete + fldquals: Incomplete + def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals: Incomplete | None = ...) -> None: ... + def anonymous_struct_fields(self) -> Generator[Incomplete, None, None]: ... + def enumfields(self, expand_anonymous_struct_union: bool = ...) -> Generator[Incomplete, None, None]: ... + def force_flatten(self) -> None: ... + def get_cached_btype(self, ffi, finishlist, can_delay: bool = ...): ... + def finish_backend_type(self, ffi, finishlist) -> None: ... + def check_not_partial(self) -> None: ... + def build_backend_type(self, ffi, finishlist): ... + +class StructType(StructOrUnion): + kind: str + +class UnionType(StructOrUnion): + kind: str + +class EnumType(StructOrUnionOrEnum): + kind: str + partial: bool + partial_resolved: bool + name: Incomplete + enumerators: Incomplete + enumvalues: Incomplete + baseinttype: Incomplete + def __init__(self, name, enumerators, enumvalues, baseinttype: Incomplete | None = ...) -> None: ... + forcename: Incomplete + def force_the_name(self, forcename) -> None: ... + def check_not_partial(self) -> None: ... + def build_backend_type(self, ffi, finishlist): ... + def build_baseinttype(self, ffi, finishlist): ... + +def unknown_type(name, structname: Incomplete | None = ...): ... +def unknown_ptr_type(name, structname: Incomplete | None = ...): ... + +global_lock: Incomplete + +def get_typecache(backend): ... +def global_cache(srctype, ffi, funcname, *args, **kwds): ... +def pointer_cache(ffi, BType): ... +def attach_exception_info(e, name) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/pkgconfig.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/pkgconfig.pyi new file mode 100644 index 00000000..bb53098f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/pkgconfig.pyi @@ -0,0 +1,3 @@ +def merge_flags(cfg1, cfg2): ... +def call(libname, flag, encoding=...): ... +def flags_from_pkgconfig(libs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/recompiler.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/recompiler.pyi new file mode 100644 index 00000000..ee17f6b1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/recompiler.pyi @@ -0,0 +1,94 @@ +import io +from _typeshed import Incomplete +from typing_extensions import TypeAlias + +from .cffi_opcode import * + +VERSION_BASE: int +VERSION_EMBEDDED: int +VERSION_CHAR16CHAR32: int +USE_LIMITED_API: Incomplete + +class GlobalExpr: + name: Incomplete + address: Incomplete + type_op: Incomplete + size: Incomplete + check_value: Incomplete + def __init__(self, name, address, type_op, size: int = ..., check_value: int = ...) -> None: ... + def as_c_expr(self): ... + def as_python_expr(self): ... + +class FieldExpr: + name: Incomplete + field_offset: Incomplete + field_size: Incomplete + fbitsize: Incomplete + field_type_op: Incomplete + def __init__(self, name, field_offset, field_size, fbitsize, field_type_op) -> None: ... + def as_c_expr(self): ... + def as_python_expr(self) -> None: ... + def as_field_python_expr(self): ... + +class StructUnionExpr: + name: Incomplete + type_index: Incomplete + flags: Incomplete + size: Incomplete + alignment: Incomplete + comment: Incomplete + first_field_index: Incomplete + c_fields: Incomplete + def __init__(self, name, type_index, flags, size, alignment, comment, first_field_index, c_fields) -> None: ... + def as_c_expr(self): ... + def as_python_expr(self): ... + +class EnumExpr: + name: Incomplete + type_index: Incomplete + size: Incomplete + signed: Incomplete + allenums: Incomplete + def __init__(self, name, type_index, size, signed, allenums) -> None: ... + def as_c_expr(self): ... + def as_python_expr(self): ... + +class TypenameExpr: + name: Incomplete + type_index: Incomplete + def __init__(self, name, type_index) -> None: ... + def as_c_expr(self): ... + def as_python_expr(self): ... + +class Recompiler: + ffi: Incomplete + module_name: Incomplete + target_is_python: Incomplete + def __init__(self, ffi, module_name, target_is_python: bool = ...) -> None: ... + def needs_version(self, ver) -> None: ... + cffi_types: Incomplete + def collect_type_table(self): ... + ALL_STEPS: Incomplete + def collect_step_tables(self): ... + def write_source_to_f(self, f, preamble) -> None: ... + def write_c_source_to_f(self, f, preamble) -> None: ... + def write_py_source_to_f(self, f) -> None: ... + +NativeIO: TypeAlias = io.StringIO + +def make_c_source(ffi, module_name, preamble, target_c_file, verbose: bool = ...): ... +def make_py_source(ffi, module_name, target_py_file, verbose: bool = ...): ... +def recompile( + ffi, + module_name, + preamble, + tmpdir: str = ..., + call_c_compiler: bool = ..., + c_file: Incomplete | None = ..., + source_extension: str = ..., + extradir: Incomplete | None = ..., + compiler_verbose: int = ..., + target: Incomplete | None = ..., + debug: Incomplete | None = ..., + **kwds, +): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/setuptools_ext.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/setuptools_ext.pyi new file mode 100644 index 00000000..beb58858 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/setuptools_ext.pyi @@ -0,0 +1,6 @@ +basestring = str + +def error(msg) -> None: ... +def execfile(filename, glob) -> None: ... +def add_cffi_module(dist, mod_spec) -> None: ... +def cffi_modules(dist, attr, value) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/vengine_cpy.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/vengine_cpy.pyi new file mode 100644 index 00000000..dea9f16f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/vengine_cpy.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete + +class VCPythonEngine: + verifier: Incomplete + ffi: Incomplete + def __init__(self, verifier) -> None: ... + def patch_extension_kwds(self, kwds) -> None: ... + def find_module(self, module_name, path, so_suffixes): ... + def collect_types(self) -> None: ... + def write_source_to_f(self) -> None: ... + def load_library(self, flags: Incomplete | None = ...): ... + +cffimod_header: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/vengine_gen.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/vengine_gen.pyi new file mode 100644 index 00000000..d4477094 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/vengine_gen.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete + +class VGenericEngine: + verifier: Incomplete + ffi: Incomplete + export_symbols: Incomplete + def __init__(self, verifier) -> None: ... + def patch_extension_kwds(self, kwds) -> None: ... + def find_module(self, module_name, path, so_suffixes): ... + def collect_types(self) -> None: ... + def write_source_to_f(self) -> None: ... + def load_library(self, flags: int = ...): ... + +cffimod_header: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/verifier.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/verifier.pyi new file mode 100644 index 00000000..3b9e558c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/cffi/cffi/verifier.pyi @@ -0,0 +1,39 @@ +import io +from _typeshed import Incomplete +from typing_extensions import TypeAlias + +NativeIO: TypeAlias = io.StringIO + +class Verifier: + ffi: Incomplete + preamble: Incomplete + flags: Incomplete + kwds: Incomplete + tmpdir: Incomplete + sourcefilename: Incomplete + modulefilename: Incomplete + ext_package: Incomplete + def __init__( + self, + ffi, + preamble, + tmpdir: Incomplete | None = ..., + modulename: Incomplete | None = ..., + ext_package: Incomplete | None = ..., + tag: str = ..., + force_generic_engine: bool = ..., + source_extension: str = ..., + flags: Incomplete | None = ..., + relative_to: Incomplete | None = ..., + **kwds, + ) -> None: ... + def write_source(self, file: Incomplete | None = ...) -> None: ... + def compile_module(self) -> None: ... + def load_library(self): ... + def get_module_name(self): ... + def get_extension(self): ... + def generates_python_module(self): ... + def make_relative_to(self, kwds, relative_to): ... + +def set_tmpdir(dirname) -> None: ... +def cleanup_tmpdir(tmpdir: Incomplete | None = ..., keep_so: bool = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..d3e6aac2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/@tests/stubtest_allowlist.txt @@ -0,0 +1,25 @@ +chardet.langbulgarianmodel.BulgarianLangModel +chardet.langbulgarianmodel.Latin5BulgarianModel +chardet.langbulgarianmodel.Latin5_BulgarianCharToOrderMap +chardet.langbulgarianmodel.Win1251BulgarianModel +chardet.langbulgarianmodel.win1251BulgarianCharToOrderMap +chardet.langcyrillicmodel +chardet.langgreekmodel.GreekLangModel +chardet.langgreekmodel.Latin7GreekModel +chardet.langgreekmodel.Latin7_char_to_order_map +chardet.langgreekmodel.Win1253GreekModel +chardet.langgreekmodel.win1253_char_to_order_map +chardet.langhebrewmodel.HEBREW_LANG_MODEL +chardet.langhebrewmodel.WIN1255_CHAR_TO_ORDER_MAP +chardet.langhebrewmodel.Win1255HebrewModel +chardet.langhungarianmodel.HungarianLangModel +chardet.langhungarianmodel.Latin2HungarianModel +chardet.langhungarianmodel.Latin2_HungarianCharToOrderMap +chardet.langhungarianmodel.Win1250HungarianModel +chardet.langhungarianmodel.win1250HungarianCharToOrderMap +chardet.langthaimodel.TIS620CharToOrderMap +chardet.langthaimodel.TIS620ThaiModel +chardet.langthaimodel.ThaiLangModel +chardet.langturkishmodel.Latin5TurkishModel +chardet.langturkishmodel.Latin5_TurkishCharToOrderMap +chardet.langturkishmodel.TurkishLangModel diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/METADATA.toml new file mode 100644 index 00000000..a8d4f2a0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/METADATA.toml @@ -0,0 +1,5 @@ +version = "5.0.*" +obsolete_since = "5.1.0" # Released on 2022-12-01 + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/__init__.pyi new file mode 100644 index 00000000..0ab16245 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/__init__.pyi @@ -0,0 +1,21 @@ +import sys + +from .universaldetector import UniversalDetector as UniversalDetector, _FinalResultType, _IntermediateResultType +from .version import VERSION as VERSION, __version__ as __version__ + +if sys.version_info >= (3, 8): + from typing import TypedDict +else: + from typing_extensions import TypedDict + +# unused in this module, but imported in multiple submodules +class _LangModelType(TypedDict): # noqa: Y049 + char_to_order_map: tuple[int, ...] + precedence_matrix: tuple[int, ...] + typical_positive_ratio: float + keep_english_letter: bool + charset_name: str + language: str + +def detect(byte_str: bytes | bytearray) -> _FinalResultType: ... +def detect_all(byte_str: bytes | bytearray, ignore_threshold: bool = ...) -> list[_IntermediateResultType]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/enums.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/enums.pyi new file mode 100644 index 00000000..71c9e10c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/enums.pyi @@ -0,0 +1,39 @@ +class InputState: + PURE_ASCII: int + ESC_ASCII: int + HIGH_BYTE: int + +class LanguageFilter: + CHINESE_SIMPLIFIED: int + CHINESE_TRADITIONAL: int + JAPANESE: int + KOREAN: int + NON_CJK: int + ALL: int + CHINESE: int + CJK: int + +class ProbingState: + DETECTING: int + FOUND_IT: int + NOT_ME: int + +class MachineState: + START: int + ERROR: int + ITS_ME: int + +class SequenceLikelihood: + NEGATIVE: int + UNLIKELY: int + LIKELY: int + POSITIVE: int + @classmethod + def get_num_categories(cls) -> int: ... + +class CharacterCategory: + UNDEFINED: int + LINE_BREAK: int + SYMBOL: int + DIGIT: int + CONTROL: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/langbulgarianmodel.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/langbulgarianmodel.pyi new file mode 100644 index 00000000..07344de5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/langbulgarianmodel.pyi @@ -0,0 +1,7 @@ +from . import _LangModelType + +Latin5_BulgarianCharToOrderMap: tuple[int, ...] +win1251BulgarianCharToOrderMap: tuple[int, ...] +BulgarianLangModel: tuple[int, ...] +Latin5BulgarianModel: _LangModelType +Win1251BulgarianModel: _LangModelType diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/langcyrillicmodel.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/langcyrillicmodel.pyi new file mode 100644 index 00000000..22e7c52d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/langcyrillicmodel.pyi @@ -0,0 +1,15 @@ +from . import _LangModelType + +KOI8R_char_to_order_map: tuple[int, ...] +win1251_char_to_order_map: tuple[int, ...] +latin5_char_to_order_map: tuple[int, ...] +macCyrillic_char_to_order_map: tuple[int, ...] +IBM855_char_to_order_map: tuple[int, ...] +IBM866_char_to_order_map: tuple[int, ...] +RussianLangModel: tuple[int, ...] +Koi8rModel: _LangModelType +Win1251CyrillicModel: _LangModelType +Latin5CyrillicModel: _LangModelType +MacCyrillicModel: _LangModelType +Ibm866Model: _LangModelType +Ibm855Model: _LangModelType diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/langgreekmodel.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/langgreekmodel.pyi new file mode 100644 index 00000000..ceee125a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/langgreekmodel.pyi @@ -0,0 +1,7 @@ +from . import _LangModelType + +Latin7_char_to_order_map: tuple[int, ...] +win1253_char_to_order_map: tuple[int, ...] +GreekLangModel: tuple[int, ...] +Latin7GreekModel: _LangModelType +Win1253GreekModel: _LangModelType diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/langhebrewmodel.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/langhebrewmodel.pyi new file mode 100644 index 00000000..a17e10de --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/langhebrewmodel.pyi @@ -0,0 +1,5 @@ +from . import _LangModelType + +WIN1255_CHAR_TO_ORDER_MAP: tuple[int, ...] +HEBREW_LANG_MODEL: tuple[int, ...] +Win1255HebrewModel: _LangModelType diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/langhungarianmodel.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/langhungarianmodel.pyi new file mode 100644 index 00000000..498c7da5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/langhungarianmodel.pyi @@ -0,0 +1,7 @@ +from . import _LangModelType + +Latin2_HungarianCharToOrderMap: tuple[int, ...] +win1250HungarianCharToOrderMap: tuple[int, ...] +HungarianLangModel: tuple[int, ...] +Latin2HungarianModel: _LangModelType +Win1250HungarianModel: _LangModelType diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/langthaimodel.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/langthaimodel.pyi new file mode 100644 index 00000000..eee2356e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/langthaimodel.pyi @@ -0,0 +1,5 @@ +from . import _LangModelType + +TIS620CharToOrderMap: tuple[int, ...] +ThaiLangModel: tuple[int, ...] +TIS620ThaiModel: _LangModelType diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/langturkishmodel.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/langturkishmodel.pyi new file mode 100644 index 00000000..6686f262 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/langturkishmodel.pyi @@ -0,0 +1,5 @@ +from . import _LangModelType + +Latin5_TurkishCharToOrderMap: tuple[int, ...] +TurkishLangModel: tuple[int, ...] +Latin5TurkishModel: _LangModelType diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/universaldetector.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/universaldetector.pyi new file mode 100644 index 00000000..a721160f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/universaldetector.pyi @@ -0,0 +1,29 @@ +from logging import Logger +from re import Pattern +from typing_extensions import TypedDict + +class _FinalResultType(TypedDict): + encoding: str + confidence: float + language: str + +class _IntermediateResultType(TypedDict): + encoding: str | None + confidence: float + language: str | None + +class UniversalDetector: + MINIMUM_THRESHOLD: float + HIGH_BYTE_DETECTOR: Pattern[bytes] + ESC_DETECTOR: Pattern[bytes] + WIN_BYTE_DETECTOR: Pattern[bytes] + ISO_WIN_MAP: dict[str, str] + + result: _IntermediateResultType + done: bool + lang_filter: int + logger: Logger + def __init__(self, lang_filter: int = ...) -> None: ... + def reset(self) -> None: ... + def feed(self, byte_str: bytes) -> None: ... + def close(self) -> _FinalResultType: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/version.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/version.pyi new file mode 100644 index 00000000..966073bd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chardet/chardet/version.pyi @@ -0,0 +1,2 @@ +__version__: str +VERSION: list[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chevron/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chevron/METADATA.toml new file mode 100644 index 00000000..48faa382 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chevron/METADATA.toml @@ -0,0 +1,3 @@ +version = "0.14.*" + +[tool.stubtest] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chevron/chevron/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chevron/chevron/__init__.pyi new file mode 100644 index 00000000..47280979 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chevron/chevron/__init__.pyi @@ -0,0 +1,5 @@ +from .main import cli_main as cli_main, main as main +from .renderer import render as render +from .tokenizer import ChevronError as ChevronError + +__all__ = ["main", "render", "cli_main", "ChevronError"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chevron/chevron/main.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chevron/chevron/main.pyi new file mode 100644 index 00000000..a9e2f57e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chevron/chevron/main.pyi @@ -0,0 +1,5 @@ +from _typeshed import FileDescriptorOrPath +from typing import Any + +def main(template: FileDescriptorOrPath, data: FileDescriptorOrPath | None = ..., **kwargs: Any) -> str: ... +def cli_main() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chevron/chevron/metadata.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chevron/chevron/metadata.pyi new file mode 100644 index 00000000..c2ee2cab --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chevron/chevron/metadata.pyi @@ -0,0 +1 @@ +version: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chevron/chevron/renderer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chevron/chevron/renderer.pyi new file mode 100644 index 00000000..1ae8065a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chevron/chevron/renderer.pyi @@ -0,0 +1,21 @@ +from _typeshed import StrPath, SupportsRead +from collections.abc import MutableSequence, Sequence +from typing import Any +from typing_extensions import Literal + +g_token_cache: dict[str, list[tuple[str, str]]] # undocumented +python3: Literal[True] + +def unicode(x: str, y: str) -> str: ... +def render( + template: SupportsRead[str] | str | Sequence[tuple[str, str]] = ..., + data: dict[str, Any] = ..., + partials_path: StrPath | None = ..., + partials_ext: str = ..., + partials_dict: dict[str, str] = ..., + padding: str = ..., + def_ldel: str | None = ..., + def_rdel: str | None = ..., + scopes: MutableSequence[int] | None = ..., + warn: bool = ..., +) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chevron/chevron/tokenizer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chevron/chevron/tokenizer.pyi new file mode 100644 index 00000000..b34ed1cc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/chevron/chevron/tokenizer.pyi @@ -0,0 +1,11 @@ +from collections.abc import Iterator + +class ChevronError(SyntaxError): ... + +def grab_literal(template: str, l_del: str | None) -> tuple[str, str]: ... # undocumented +def l_sa_check(template: str, literal: str, is_standalone: bool) -> bool | None: ... # undocumented +def r_sa_check(template: str, tag_type: str, is_standalone: bool) -> bool: ... # undocumented +def parse_tag(template: str, l_del: str | None, r_del: str | None) -> tuple[tuple[str, str], str]: ... # undocumented +def tokenize( + template: str, def_ldel: str | None = ..., def_rdel: str | None = ... +) -> Iterator[tuple[str, str]]: ... # undocumented diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/click-spinner/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/click-spinner/METADATA.toml new file mode 100644 index 00000000..37dc09b1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/click-spinner/METADATA.toml @@ -0,0 +1 @@ +version = "0.1.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/click-spinner/click_spinner/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/click-spinner/click_spinner/__init__.pyi new file mode 100644 index 00000000..8b3a64fc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/click-spinner/click_spinner/__init__.pyi @@ -0,0 +1,31 @@ +import threading +from collections.abc import Iterator +from types import TracebackType +from typing import Protocol +from typing_extensions import Literal, Self + +__version__: str + +class _Stream(Protocol): + def isatty(self) -> bool: ... + def flush(self) -> None: ... + def write(self, s: str) -> int: ... + +class Spinner: + spinner_cycle: Iterator[str] + disable: bool + beep: bool + force: bool + stream: _Stream + stop_running: threading.Event | None + spin_thread: threading.Thread | None + def __init__(self, beep: bool = ..., disable: bool = ..., force: bool = ..., stream: _Stream = ...) -> None: ... + def start(self) -> None: ... + def stop(self) -> None: ... + def init_spin(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> Literal[False]: ... + +def spinner(beep: bool = ..., disable: bool = ..., force: bool = ..., stream: _Stream = ...) -> Spinner: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/click-spinner/click_spinner/_version.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/click-spinner/click_spinner/_version.pyi new file mode 100644 index 00000000..feb0b165 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/click-spinner/click_spinner/_version.pyi @@ -0,0 +1,5 @@ +from typing import Any + +version_json: str + +def get_versions() -> dict[str, Any]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..cee2f196 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/@tests/stubtest_allowlist.txt @@ -0,0 +1,46 @@ +# These are defined as ints, but later are converted to strings via magic: +colorama.ansi.AnsiBack.BLACK +colorama.ansi.AnsiBack.BLUE +colorama.ansi.AnsiBack.CYAN +colorama.ansi.AnsiBack.GREEN +colorama.ansi.AnsiBack.LIGHTBLACK_EX +colorama.ansi.AnsiBack.LIGHTBLUE_EX +colorama.ansi.AnsiBack.LIGHTCYAN_EX +colorama.ansi.AnsiBack.LIGHTGREEN_EX +colorama.ansi.AnsiBack.LIGHTMAGENTA_EX +colorama.ansi.AnsiBack.LIGHTRED_EX +colorama.ansi.AnsiBack.LIGHTWHITE_EX +colorama.ansi.AnsiBack.LIGHTYELLOW_EX +colorama.ansi.AnsiBack.MAGENTA +colorama.ansi.AnsiBack.RED +colorama.ansi.AnsiBack.RESET +colorama.ansi.AnsiBack.WHITE +colorama.ansi.AnsiBack.YELLOW +colorama.ansi.AnsiFore.BLACK +colorama.ansi.AnsiFore.BLUE +colorama.ansi.AnsiFore.CYAN +colorama.ansi.AnsiFore.GREEN +colorama.ansi.AnsiFore.LIGHTBLACK_EX +colorama.ansi.AnsiFore.LIGHTBLUE_EX +colorama.ansi.AnsiFore.LIGHTCYAN_EX +colorama.ansi.AnsiFore.LIGHTGREEN_EX +colorama.ansi.AnsiFore.LIGHTMAGENTA_EX +colorama.ansi.AnsiFore.LIGHTRED_EX +colorama.ansi.AnsiFore.LIGHTWHITE_EX +colorama.ansi.AnsiFore.LIGHTYELLOW_EX +colorama.ansi.AnsiFore.MAGENTA +colorama.ansi.AnsiFore.RED +colorama.ansi.AnsiFore.RESET +colorama.ansi.AnsiFore.WHITE +colorama.ansi.AnsiFore.YELLOW +colorama.ansi.AnsiStyle.BRIGHT +colorama.ansi.AnsiStyle.DIM +colorama.ansi.AnsiStyle.NORMAL +colorama.ansi.AnsiStyle.RESET_ALL + +# These are defined as None, but on initialization are set to correct values: +colorama.initialise.wrapped_stderr +colorama.initialise.wrapped_stdout + +# Not planning on writing stubs for tests: +colorama.tests.* diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/@tests/stubtest_allowlist_linux.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/@tests/stubtest_allowlist_linux.txt new file mode 100644 index 00000000..1fff1061 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/@tests/stubtest_allowlist_linux.txt @@ -0,0 +1,7 @@ +# These are only available on Windows: +colorama.winterm.WinColor +colorama.winterm.WinStyle +colorama.winterm.WinTerm + +# A re-export that's an implementation detail: +colorama.winterm.get_osfhandle diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/METADATA.toml new file mode 100644 index 00000000..34da2659 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/METADATA.toml @@ -0,0 +1,4 @@ +version = "0.4.*" + +[tool.stubtest] +platforms = ["linux", "win32"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/colorama/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/colorama/__init__.pyi new file mode 100644 index 00000000..e6d15ec7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/colorama/__init__.pyi @@ -0,0 +1,9 @@ +from .ansi import Back as Back, Cursor as Cursor, Fore as Fore, Style as Style +from .ansitowin32 import AnsiToWin32 as AnsiToWin32 +from .initialise import ( + colorama_text as colorama_text, + deinit as deinit, + init as init, + just_fix_windows_console as just_fix_windows_console, + reinit as reinit, +) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/colorama/ansi.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/colorama/ansi.pyi new file mode 100644 index 00000000..7224a7a8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/colorama/ansi.pyi @@ -0,0 +1,69 @@ +CSI: str +OSC: str +BEL: str + +def code_to_chars(code: int) -> str: ... +def set_title(title: str) -> str: ... +def clear_screen(mode: int = ...) -> str: ... +def clear_line(mode: int = ...) -> str: ... + +class AnsiCodes: + def __init__(self) -> None: ... + +class AnsiCursor: + def UP(self, n: int = ...) -> str: ... + def DOWN(self, n: int = ...) -> str: ... + def FORWARD(self, n: int = ...) -> str: ... + def BACK(self, n: int = ...) -> str: ... + def POS(self, x: int = ..., y: int = ...) -> str: ... + +# All attributes in the following classes are string in instances and int in the class. +# We use str since that is the common case for users. +class AnsiFore(AnsiCodes): + BLACK: str = ... + RED: str = ... + GREEN: str = ... + YELLOW: str = ... + BLUE: str = ... + MAGENTA: str = ... + CYAN: str = ... + WHITE: str = ... + RESET: str = ... + LIGHTBLACK_EX: str = ... + LIGHTRED_EX: str = ... + LIGHTGREEN_EX: str = ... + LIGHTYELLOW_EX: str = ... + LIGHTBLUE_EX: str = ... + LIGHTMAGENTA_EX: str = ... + LIGHTCYAN_EX: str = ... + LIGHTWHITE_EX: str = ... + +class AnsiBack(AnsiCodes): + BLACK: str = ... + RED: str = ... + GREEN: str = ... + YELLOW: str = ... + BLUE: str = ... + MAGENTA: str = ... + CYAN: str = ... + WHITE: str = ... + RESET: str = ... + LIGHTBLACK_EX: str = ... + LIGHTRED_EX: str = ... + LIGHTGREEN_EX: str = ... + LIGHTYELLOW_EX: str = ... + LIGHTBLUE_EX: str = ... + LIGHTMAGENTA_EX: str = ... + LIGHTCYAN_EX: str = ... + LIGHTWHITE_EX: str = ... + +class AnsiStyle(AnsiCodes): + BRIGHT: str = ... + DIM: str = ... + NORMAL: str = ... + RESET_ALL: str = ... + +Fore: AnsiFore +Back: AnsiBack +Style: AnsiStyle +Cursor: AnsiCursor diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/colorama/ansitowin32.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/colorama/ansitowin32.pyi new file mode 100644 index 00000000..f406f2e2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/colorama/ansitowin32.pyi @@ -0,0 +1,52 @@ +import sys +from _typeshed import SupportsWrite +from collections.abc import Callable, Sequence +from re import Pattern +from types import TracebackType +from typing import Any, TextIO +from typing_extensions import TypeAlias + +if sys.platform == "win32": + from .winterm import WinTerm + + winterm: WinTerm +else: + winterm: None + +class StreamWrapper: + def __init__(self, wrapped: TextIO, converter: SupportsWrite[str]) -> None: ... + def __getattr__(self, name: str) -> Any: ... + def __enter__(self, *args: object, **kwargs: object) -> TextIO: ... + def __exit__( + self, __t: type[BaseException] | None, __value: BaseException | None, __traceback: TracebackType | None, **kwargs: Any + ) -> None: ... + def write(self, text: str) -> None: ... + def isatty(self) -> bool: ... + @property + def closed(self) -> bool: ... + +_WinTermCall: TypeAlias = Callable[[int | None, bool, bool], None] +_WinTermCallDict: TypeAlias = dict[int, tuple[_WinTermCall] | tuple[_WinTermCall, int] | tuple[_WinTermCall, int, bool]] + +class AnsiToWin32: + ANSI_CSI_RE: Pattern[str] = ... + ANSI_OSC_RE: Pattern[str] = ... + wrapped: TextIO = ... + autoreset: bool = ... + stream: StreamWrapper = ... + strip: bool = ... + convert: bool = ... + win32_calls: _WinTermCallDict = ... + on_stderr: bool = ... + def __init__(self, wrapped: TextIO, convert: bool | None = ..., strip: bool | None = ..., autoreset: bool = ...) -> None: ... + def should_wrap(self) -> bool: ... + def get_win32_calls(self) -> _WinTermCallDict: ... + def write(self, text: str) -> None: ... + def reset_all(self) -> None: ... + def write_and_convert(self, text: str) -> None: ... + def write_plain_text(self, text: str, start: int, end: int) -> None: ... + def convert_ansi(self, paramstring: str, command: str) -> None: ... + def extract_params(self, command: str, paramstring: str) -> tuple[int, ...]: ... + def call_win32(self, command: str, params: Sequence[int]) -> None: ... + def convert_osc(self, text: str) -> str: ... + def flush(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/colorama/initialise.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/colorama/initialise.pyi new file mode 100644 index 00000000..25107f16 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/colorama/initialise.pyi @@ -0,0 +1,23 @@ +from contextlib import AbstractContextManager +from typing import Any, TextIO, TypeVar + +from .ansitowin32 import StreamWrapper + +_TextIOT = TypeVar("_TextIOT", bound=TextIO) + +orig_stdout: TextIO | None +orig_stderr: TextIO | None +wrapped_stdout: TextIO | StreamWrapper +wrapped_stderr: TextIO | StreamWrapper +atexit_done: bool +fixed_windows_console: bool + +def reset_all() -> None: ... +def init(autoreset: bool = ..., convert: bool | None = ..., strip: bool | None = ..., wrap: bool = ...) -> None: ... +def deinit() -> None: ... +def colorama_text(*args: Any, **kwargs: Any) -> AbstractContextManager[None]: ... +def reinit() -> None: ... +def wrap_stream( + stream: _TextIOT, convert: bool | None, strip: bool | None, autoreset: bool, wrap: bool +) -> _TextIOT | StreamWrapper: ... +def just_fix_windows_console() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/colorama/win32.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/colorama/win32.pyi new file mode 100644 index 00000000..38825fa7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/colorama/win32.pyi @@ -0,0 +1,34 @@ +import sys +from collections.abc import Callable +from typing_extensions import Literal + +STDOUT: Literal[-11] +STDERR: Literal[-12] +ENABLE_VIRTUAL_TERMINAL_PROCESSING: int + +if sys.platform == "win32": + from ctypes import LibraryLoader, Structure, WinDLL, wintypes + + windll: LibraryLoader[WinDLL] + COORD = wintypes._COORD + + class CONSOLE_SCREEN_BUFFER_INFO(Structure): + dwSize: COORD + dwCursorPosition: COORD + wAttributes: wintypes.WORD + srWindow: wintypes.SMALL_RECT + dwMaximumWindowSize: COORD + def winapi_test() -> bool: ... + def GetConsoleScreenBufferInfo(stream_id: int = ...) -> CONSOLE_SCREEN_BUFFER_INFO: ... + def SetConsoleTextAttribute(stream_id: int, attrs: wintypes.WORD) -> wintypes.BOOL: ... + def SetConsoleCursorPosition(stream_id: int, position: COORD, adjust: bool = ...) -> wintypes.BOOL: ... + def FillConsoleOutputCharacter(stream_id: int, char: str, length: int, start: COORD) -> int: ... + def FillConsoleOutputAttribute(stream_id: int, attr: int, length: int, start: COORD) -> wintypes.BOOL: ... + def SetConsoleTitle(title: str) -> wintypes.BOOL: ... + def GetConsoleMode(handle: int) -> int: ... + def SetConsoleMode(handle: int, mode: int) -> None: ... + +else: + windll: None + SetConsoleTextAttribute: Callable[..., None] + winapi_test: Callable[..., None] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/colorama/winterm.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/colorama/winterm.pyi new file mode 100644 index 00000000..0de2c6d5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/colorama/colorama/winterm.pyi @@ -0,0 +1,37 @@ +import sys + +if sys.platform == "win32": + from . import win32 + + class WinColor: + BLACK: int = ... + BLUE: int = ... + GREEN: int = ... + CYAN: int = ... + RED: int = ... + MAGENTA: int = ... + YELLOW: int = ... + GREY: int = ... + + class WinStyle: + NORMAL: int = ... + BRIGHT: int = ... + BRIGHT_BACKGROUND: int = ... + + class WinTerm: + def __init__(self) -> None: ... + def get_attrs(self) -> int: ... + def set_attrs(self, value: int) -> None: ... + def reset_all(self, on_stderr: bool | None = ...) -> None: ... + def fore(self, fore: int | None = ..., light: bool = ..., on_stderr: bool = ...) -> None: ... + def back(self, back: int | None = ..., light: bool = ..., on_stderr: bool = ...) -> None: ... + def style(self, style: int | None = ..., on_stderr: bool = ...) -> None: ... + def set_console(self, attrs: int | None = ..., on_stderr: bool = ...) -> None: ... + def get_position(self, handle: int) -> win32.COORD: ... + def set_cursor_position(self, position: win32.COORD | None = ..., on_stderr: bool = ...) -> None: ... + def cursor_adjust(self, x: int, y: int, on_stderr: bool = ...) -> None: ... + def erase_screen(self, mode: int = ..., on_stderr: bool = ...) -> None: ... + def erase_line(self, mode: int = ..., on_stderr: bool = ...) -> None: ... + def set_title(self, title: str) -> None: ... + +def enable_vt_processing(fd: int) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/METADATA.toml new file mode 100644 index 00000000..31ce4482 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/METADATA.toml @@ -0,0 +1,4 @@ +version = "0.9.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/__init__.pyi new file mode 100644 index 00000000..9d6feae3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/__init__.pyi @@ -0,0 +1,5 @@ +from commonmark.blocks import Parser as Parser +from commonmark.dump import dumpAST as dumpAST, dumpJSON as dumpJSON +from commonmark.main import commonmark as commonmark +from commonmark.render.html import HtmlRenderer as HtmlRenderer +from commonmark.render.rst import ReStructuredTextRenderer as ReStructuredTextRenderer diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/blocks.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/blocks.pyi new file mode 100644 index 00000000..7a5bbdc0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/blocks.pyi @@ -0,0 +1,167 @@ +from _typeshed import Incomplete +from typing import Any + +CODE_INDENT: int +reHtmlBlockOpen: Any +reHtmlBlockClose: Any +reThematicBreak: Any +reMaybeSpecial: Any +reNonSpace: Any +reBulletListMarker: Any +reOrderedListMarker: Any +reATXHeadingMarker: Any +reCodeFence: Any +reClosingCodeFence: Any +reSetextHeadingLine: Any +reLineEnding: Any + +def is_blank(s): ... +def is_space_or_tab(s): ... +def peek(ln, pos): ... +def ends_with_blank_line(block): ... +def parse_list_marker(parser, container): ... +def lists_match(list_data, item_data): ... + +class Block: + accepts_lines: Any + @staticmethod + def continue_(parser: Incomplete | None = ..., container: Incomplete | None = ...) -> None: ... + @staticmethod + def finalize(parser: Incomplete | None = ..., block: Incomplete | None = ...) -> None: ... + @staticmethod + def can_contain(t) -> None: ... + +class Document(Block): + accepts_lines: bool + @staticmethod + def continue_(parser: Incomplete | None = ..., container: Incomplete | None = ...): ... + @staticmethod + def finalize(parser: Incomplete | None = ..., block: Incomplete | None = ...) -> None: ... + @staticmethod + def can_contain(t): ... + +class List(Block): + accepts_lines: bool + @staticmethod + def continue_(parser: Incomplete | None = ..., container: Incomplete | None = ...): ... + @staticmethod + def finalize(parser: Incomplete | None = ..., block: Incomplete | None = ...) -> None: ... + @staticmethod + def can_contain(t): ... + +class BlockQuote(Block): + accepts_lines: bool + @staticmethod + def continue_(parser: Incomplete | None = ..., container: Incomplete | None = ...): ... + @staticmethod + def finalize(parser: Incomplete | None = ..., block: Incomplete | None = ...) -> None: ... + @staticmethod + def can_contain(t): ... + +class Item(Block): + accepts_lines: bool + @staticmethod + def continue_(parser: Incomplete | None = ..., container: Incomplete | None = ...): ... + @staticmethod + def finalize(parser: Incomplete | None = ..., block: Incomplete | None = ...) -> None: ... + @staticmethod + def can_contain(t): ... + +class Heading(Block): + accepts_lines: bool + @staticmethod + def continue_(parser: Incomplete | None = ..., container: Incomplete | None = ...): ... + @staticmethod + def finalize(parser: Incomplete | None = ..., block: Incomplete | None = ...) -> None: ... + @staticmethod + def can_contain(t): ... + +class ThematicBreak(Block): + accepts_lines: bool + @staticmethod + def continue_(parser: Incomplete | None = ..., container: Incomplete | None = ...): ... + @staticmethod + def finalize(parser: Incomplete | None = ..., block: Incomplete | None = ...) -> None: ... + @staticmethod + def can_contain(t): ... + +class CodeBlock(Block): + accepts_lines: bool + @staticmethod + def continue_(parser: Incomplete | None = ..., container: Incomplete | None = ...): ... + @staticmethod + def finalize(parser: Incomplete | None = ..., block: Incomplete | None = ...) -> None: ... + @staticmethod + def can_contain(t): ... + +class HtmlBlock(Block): + accepts_lines: bool + @staticmethod + def continue_(parser: Incomplete | None = ..., container: Incomplete | None = ...): ... + @staticmethod + def finalize(parser: Incomplete | None = ..., block: Incomplete | None = ...) -> None: ... + @staticmethod + def can_contain(t): ... + +class Paragraph(Block): + accepts_lines: bool + @staticmethod + def continue_(parser: Incomplete | None = ..., container: Incomplete | None = ...): ... + @staticmethod + def finalize(parser: Incomplete | None = ..., block: Incomplete | None = ...) -> None: ... + @staticmethod + def can_contain(t): ... + +class BlockStarts: + METHODS: Any + @staticmethod + def block_quote(parser, container: Incomplete | None = ...): ... + @staticmethod + def atx_heading(parser, container: Incomplete | None = ...): ... + @staticmethod + def fenced_code_block(parser, container: Incomplete | None = ...): ... + @staticmethod + def html_block(parser, container: Incomplete | None = ...): ... + @staticmethod + def setext_heading(parser, container: Incomplete | None = ...): ... + @staticmethod + def thematic_break(parser, container: Incomplete | None = ...): ... + @staticmethod + def list_item(parser, container: Incomplete | None = ...): ... + @staticmethod + def indented_code_block(parser, container: Incomplete | None = ...): ... + +class Parser: + doc: Any + block_starts: Any + tip: Any + oldtip: Any + current_line: str + line_number: int + offset: int + column: int + next_nonspace: int + next_nonspace_column: int + indent: int + indented: bool + blank: bool + partially_consumed_tab: bool + all_closed: bool + last_matched_container: Any + refmap: Any + last_line_length: int + inline_parser: Any + options: Any + def __init__(self, options=...) -> None: ... + def add_line(self) -> None: ... + def add_child(self, tag, offset): ... + def close_unmatched_blocks(self) -> None: ... + def find_next_nonspace(self) -> None: ... + def advance_next_nonspace(self) -> None: ... + def advance_offset(self, count, columns) -> None: ... + def incorporate_line(self, ln) -> None: ... + def finalize(self, block, line_number) -> None: ... + def process_inlines(self, block) -> None: ... + def parse(self, my_input): ... + +CAMEL_RE: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/cmark.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/cmark.pyi new file mode 100644 index 00000000..7e7363e7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/cmark.pyi @@ -0,0 +1 @@ +def main() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/common.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/common.pyi new file mode 100644 index 00000000..c55a6796 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/common.pyi @@ -0,0 +1,35 @@ +import html +from typing import Any + +HTMLunescape = html.unescape +ENTITY: str +TAGNAME: str +ATTRIBUTENAME: str +UNQUOTEDVALUE: str +SINGLEQUOTEDVALUE: str +DOUBLEQUOTEDVALUE: str +ATTRIBUTEVALUE: Any +ATTRIBUTEVALUESPEC: Any +ATTRIBUTE: Any +OPENTAG: Any +CLOSETAG: Any +HTMLCOMMENT: str +PROCESSINGINSTRUCTION: str +DECLARATION: Any +CDATA: str +HTMLTAG: Any +reHtmlTag: Any +reBackslashOrAmp: Any +ESCAPABLE: str +reEntityOrEscapedChar: Any +XMLSPECIAL: str +reXmlSpecial: Any + +def unescape_char(s): ... +def unescape_string(s): ... +def normalize_uri(uri): ... + +UNSAFE_MAP: Any + +def replace_unsafe_char(s): ... +def escape_xml(s): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/dump.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/dump.pyi new file mode 100644 index 00000000..821fb95b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/dump.pyi @@ -0,0 +1,3 @@ +def prepare(obj, topnode: bool = ...): ... +def dumpJSON(obj): ... +def dumpAST(obj, ind: int = ..., topnode: bool = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/entitytrans.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/entitytrans.pyi new file mode 100644 index 00000000..b8b0efae --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/entitytrans.pyi @@ -0,0 +1 @@ +def _unescape(s: str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/inlines.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/inlines.pyi new file mode 100644 index 00000000..e53f22a1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/inlines.pyi @@ -0,0 +1,65 @@ +import html +from typing import Any + +HTMLunescape = html.unescape +ESCAPED_CHAR: Any +rePunctuation: Any +reLinkTitle: Any +reLinkDestinationBraces: Any +reEscapable: Any +reEntityHere: Any +reTicks: Any +reTicksHere: Any +reEllipses: Any +reDash: Any +reEmailAutolink: Any +reAutolink: Any +reSpnl: Any +reWhitespaceChar: Any +reWhitespace: Any +reUnicodeWhitespaceChar: Any +reFinalSpace: Any +reInitialSpace: Any +reSpaceAtEndOfLine: Any +reLinkLabel: Any +reMain: Any + +def text(s): ... +def smart_dashes(chars): ... + +class InlineParser: + subject: str + brackets: Any + pos: int + refmap: Any + options: Any + def __init__(self, options=...) -> None: ... + def match(self, regexString): ... + def peek(self): ... + def spnl(self): ... + def parseBackticks(self, block): ... + def parseBackslash(self, block): ... + def parseAutolink(self, block): ... + def parseHtmlTag(self, block): ... + def scanDelims(self, c): ... + delimiters: Any + def handleDelim(self, cc, block): ... + def removeDelimiter(self, delim) -> None: ... + @staticmethod + def removeDelimitersBetween(bottom, top) -> None: ... + def processEmphasis(self, stack_bottom) -> None: ... + def parseLinkTitle(self): ... + def parseLinkDestination(self): ... + def parseLinkLabel(self): ... + def parseOpenBracket(self, block): ... + def parseBang(self, block): ... + def parseCloseBracket(self, block): ... + def addBracket(self, node, index, image) -> None: ... + def removeBracket(self) -> None: ... + def parseEntity(self, block): ... + def parseString(self, block): ... + def parseNewline(self, block): ... + def parseReference(self, s, refmap): ... + def parseInline(self, block): ... + def parseInlines(self, block) -> None: ... + parse: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/main.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/main.pyi new file mode 100644 index 00000000..c7c6b421 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/main.pyi @@ -0,0 +1,3 @@ +from typing_extensions import Literal + +def commonmark(text: str, format: Literal["html", "json", "ast", "rst"] = ...) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/node.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/node.pyi new file mode 100644 index 00000000..8760df61 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/node.pyi @@ -0,0 +1,51 @@ +from typing import Any + +reContainer: Any + +def is_container(node): ... + +class NodeWalker: + current: Any + root: Any + entering: bool + def __init__(self, root) -> None: ... + def __next__(self): ... + next: Any + def __iter__(self): ... + def nxt(self): ... + def resume_at(self, node, entering) -> None: ... + +class Node: + t: Any + parent: Any + first_child: Any + last_child: Any + prv: Any + nxt: Any + sourcepos: Any + last_line_blank: bool + last_line_checked: bool + is_open: bool + string_content: str + literal: Any + list_data: Any + info: Any + destination: Any + title: Any + is_fenced: bool + fence_char: Any + fence_length: int + fence_offset: Any + level: Any + on_enter: Any + on_exit: Any + def __init__(self, node_type, sourcepos) -> None: ... + def pretty(self) -> None: ... + def normalize(self) -> None: ... + def is_container(self): ... + def append_child(self, child) -> None: ... + def prepend_child(self, child) -> None: ... + def unlink(self) -> None: ... + def insert_after(self, sibling) -> None: ... + def insert_before(self, sibling) -> None: ... + def walker(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/normalize_reference.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/normalize_reference.pyi new file mode 100644 index 00000000..96a3414d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/normalize_reference.pyi @@ -0,0 +1 @@ +def normalize_reference(string): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/render/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/render/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/render/html.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/render/html.pyi new file mode 100644 index 00000000..02637833 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/render/html.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete +from typing import Any + +from commonmark.render.renderer import Renderer + +reUnsafeProtocol: Any +reSafeDataProtocol: Any + +def potentially_unsafe(url): ... + +class HtmlRenderer(Renderer): + disable_tags: int + last_out: str + options: Any + def __init__(self, options=...) -> None: ... + def escape(self, text): ... + def tag(self, name, attrs: Incomplete | None = ..., selfclosing: Incomplete | None = ...) -> None: ... + def text(self, node, entering: Incomplete | None = ...) -> None: ... + def softbreak(self, node: Incomplete | None = ..., entering: Incomplete | None = ...) -> None: ... + def linebreak(self, node: Incomplete | None = ..., entering: Incomplete | None = ...) -> None: ... + def link(self, node, entering) -> None: ... + def image(self, node, entering) -> None: ... + def emph(self, node, entering) -> None: ... + def strong(self, node, entering) -> None: ... + def paragraph(self, node, entering) -> None: ... + def heading(self, node, entering) -> None: ... + def code(self, node, entering) -> None: ... + def code_block(self, node, entering) -> None: ... + def thematic_break(self, node, entering) -> None: ... + def block_quote(self, node, entering) -> None: ... + def list(self, node, entering) -> None: ... + def item(self, node, entering) -> None: ... + def html_inline(self, node, entering) -> None: ... + def html_block(self, node, entering) -> None: ... + def custom_inline(self, node, entering) -> None: ... + def custom_block(self, node, entering) -> None: ... + def out(self, s) -> None: ... + def attrs(self, node): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/render/renderer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/render/renderer.pyi new file mode 100644 index 00000000..b8e2e46f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/render/renderer.pyi @@ -0,0 +1,7 @@ +class Renderer: + buf: str + last_out: str + def render(self, ast): ... + def lit(self, s) -> None: ... + def cr(self) -> None: ... + def out(self, s) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/render/rst.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/render/rst.pyi new file mode 100644 index 00000000..9357bfd9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/commonmark/commonmark/render/rst.pyi @@ -0,0 +1,26 @@ +from typing import Any + +from commonmark.render.renderer import Renderer + +class ReStructuredTextRenderer(Renderer): + indent_char: Any + indent_length: int + def __init__(self, indent_char: str = ...) -> None: ... + def lit(self, s): ... + def cr(self) -> None: ... + def indent_lines(self, literal, indent_length: int = ...): ... + def document(self, node, entering) -> None: ... + def softbreak(self, node, entering) -> None: ... + def linebreak(self, node, entering) -> None: ... + def text(self, node, entering) -> None: ... + def emph(self, node, entering) -> None: ... + def strong(self, node, entering) -> None: ... + def paragraph(self, node, entering) -> None: ... + def link(self, node, entering) -> None: ... + def image(self, node, entering) -> None: ... + def code(self, node, entering) -> None: ... + def code_block(self, node, entering) -> None: ... + def list(self, node, entering) -> None: ... + def item(self, node, entering) -> None: ... + def block_quote(self, node, entering) -> None: ... + def heading(self, node, entering) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/METADATA.toml new file mode 100644 index 00000000..7431acfe --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/METADATA.toml @@ -0,0 +1 @@ +version = "0.7.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/__init__.pyi new file mode 100644 index 00000000..8bc6c6cd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/__init__.pyi @@ -0,0 +1,6 @@ +from . import items as items +from .console_menu import ConsoleMenu as ConsoleMenu, Screen as Screen, clear_terminal as clear_terminal +from .menu_formatter import MenuFormatBuilder as MenuFormatBuilder +from .multiselect_menu import MultiSelectMenu as MultiSelectMenu +from .prompt_utils import PromptUtils as PromptUtils +from .selection_menu import SelectionMenu as SelectionMenu diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/console_menu.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/console_menu.pyi new file mode 100644 index 00000000..0d701717 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/console_menu.pyi @@ -0,0 +1,87 @@ +from collections.abc import Callable + +from consolemenu.menu_formatter import MenuFormatBuilder as MenuFormatBuilder +from consolemenu.screen import Screen as Screen + +class ConsoleMenu: + currently_active_menu: ConsoleMenu | None + screen: Screen + clear_screen_before_render: bool + formatter: MenuFormatBuilder + title: str | Callable[[], str] | None + subtitle: str | Callable[[], str] | None + prologue_text: str | Callable[[], str] | None + epilogue_text: str | Callable[[], str] | None + highlight: None + normal: None + show_exit_option: bool + items: list[MenuItem] + parent: ConsoleMenu | None + exit_item: ExitItem + current_option: int + selected_option: int + returned_value: object | None + should_exit: bool + previous_active_menu: ConsoleMenu | None + def __init__( + self, + title: str | Callable[[], str] | None = ..., + subtitle: str | Callable[[], str] | None = ..., + screen: Screen | None = ..., + formatter: MenuFormatBuilder | None = ..., + prologue_text: str | Callable[[], str] | None = ..., + epilogue_text: str | Callable[[], str] | None = ..., + clear_screen: bool = ..., + show_exit_option: bool = ..., + exit_option_text: str = ..., + ) -> None: ... + @property + def current_item(self) -> MenuItem | None: ... + @property + def selected_item(self) -> MenuItem | None: ... + def append_item(self, item: MenuItem) -> None: ... + def remove_item(self, item: MenuItem) -> bool: ... + def add_exit(self) -> bool: ... + def remove_exit(self) -> bool: ... + def is_selected_item_exit(self) -> bool: ... + def start(self, show_exit_option: bool | None = ...) -> None: ... + def show(self, show_exit_option: bool | None = ...) -> None: ... + def draw(self) -> None: ... + def is_running(self) -> bool: ... + def wait_for_start(self, timeout: float | None = ...) -> bool: ... + def is_alive(self) -> bool: ... + def pause(self) -> None: ... + def resume(self) -> None: ... + def join(self, timeout: float | None = ...) -> None: ... + def get_input(self) -> int: ... + def process_user_input(self) -> int | None: ... + def go_to(self, option: int) -> None: ... + def go_down(self) -> None: ... + def go_up(self) -> None: ... + def select(self) -> None: ... + def exit(self) -> None: ... + def clear_screen(self) -> None: ... + def get_title(self) -> str: ... + def get_subtitle(self) -> str: ... + def get_prologue_text(self) -> str: ... + def get_epilogue_text(self) -> str: ... + +class MenuItem: + text: str + menu: ConsoleMenu | None + should_exit: bool + index_item_separator: str + def __init__(self, text: str | Callable[[], str], menu: ConsoleMenu | None = ..., should_exit: bool = ...) -> None: ... + def show(self, index: int) -> str: ... + def set_up(self) -> None: ... + def action(self) -> None: ... + def clean_up(self) -> None: ... + def get_return(self) -> object: ... + def __eq__(self, o: MenuItem) -> bool: ... # type: ignore[override] + def get_text(self) -> str: ... + +class ExitItem(MenuItem): + def __init__(self, text: str | Callable[[], str] = ..., menu: ConsoleMenu | None = ...) -> None: ... + def show(self, index: int, available_width: None = ...) -> str: ... + +def clear_terminal() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/format/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/format/__init__.pyi new file mode 100644 index 00000000..f43b6c56 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/format/__init__.pyi @@ -0,0 +1,14 @@ +from .menu_borders import ( + AsciiBorderStyle as AsciiBorderStyle, + DoubleLineBorderStyle as DoubleLineBorderStyle, + DoubleLineOuterLightInnerBorderStyle as DoubleLineOuterLightInnerBorderStyle, + HeavyBorderStyle as HeavyBorderStyle, + HeavyOuterLightInnerBorderStyle as HeavyOuterLightInnerBorderStyle, + LightBorderStyle as LightBorderStyle, + MenuBorderStyle as MenuBorderStyle, + MenuBorderStyleFactory as MenuBorderStyleFactory, + MenuBorderStyleType as MenuBorderStyleType, +) +from .menu_margins import MenuMargins as MenuMargins +from .menu_padding import MenuPadding as MenuPadding +from .menu_style import MenuStyle as MenuStyle diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/format/menu_borders.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/format/menu_borders.pyi new file mode 100644 index 00000000..9710b4ab --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/format/menu_borders.pyi @@ -0,0 +1,194 @@ +import logging + +class MenuBorderStyle: + @property + def bottom_left_corner(self) -> str: ... + @property + def bottom_right_corner(self) -> str: ... + @property + def inner_horizontal(self) -> str: ... + @property + def inner_vertical(self) -> str: ... + @property + def intersection(self) -> str: ... + @property + def outer_horizontal(self) -> str: ... + @property + def outer_horizontal_inner_down(self) -> str: ... + @property + def outer_horizontal_inner_up(self) -> str: ... + @property + def outer_vertical(self) -> str: ... + @property + def outer_vertical_inner_left(self) -> str: ... + @property + def outer_vertical_inner_right(self) -> str: ... + @property + def top_left_corner(self) -> str: ... + @property + def top_right_corner(self) -> str: ... + +class AsciiBorderStyle(MenuBorderStyle): + @property + def bottom_left_corner(self) -> str: ... + @property + def bottom_right_corner(self) -> str: ... + @property + def inner_horizontal(self) -> str: ... + @property + def inner_vertical(self) -> str: ... + @property + def intersection(self) -> str: ... + @property + def outer_horizontal(self) -> str: ... + @property + def outer_horizontal_inner_down(self) -> str: ... + @property + def outer_horizontal_inner_up(self) -> str: ... + @property + def outer_vertical(self) -> str: ... + @property + def outer_vertical_inner_left(self) -> str: ... + @property + def outer_vertical_inner_right(self) -> str: ... + @property + def top_left_corner(self) -> str: ... + @property + def top_right_corner(self) -> str: ... + +class LightBorderStyle(MenuBorderStyle): + @property + def bottom_left_corner(self) -> str: ... + @property + def bottom_right_corner(self) -> str: ... + @property + def inner_horizontal(self) -> str: ... + @property + def inner_vertical(self) -> str: ... + @property + def intersection(self) -> str: ... + @property + def outer_horizontal(self) -> str: ... + @property + def outer_horizontal_inner_down(self) -> str: ... + @property + def outer_horizontal_inner_up(self) -> str: ... + @property + def outer_vertical(self) -> str: ... + @property + def outer_vertical_inner_left(self) -> str: ... + @property + def outer_vertical_inner_right(self) -> str: ... + @property + def top_left_corner(self) -> str: ... + @property + def top_right_corner(self) -> str: ... + +class HeavyBorderStyle(MenuBorderStyle): + @property + def bottom_left_corner(self) -> str: ... + @property + def bottom_right_corner(self) -> str: ... + @property + def inner_horizontal(self) -> str: ... + @property + def inner_vertical(self) -> str: ... + @property + def intersection(self) -> str: ... + @property + def outer_horizontal(self) -> str: ... + @property + def outer_horizontal_inner_down(self) -> str: ... + @property + def outer_horizontal_inner_up(self) -> str: ... + @property + def outer_vertical(self) -> str: ... + @property + def outer_vertical_inner_left(self) -> str: ... + @property + def outer_vertical_inner_right(self) -> str: ... + @property + def top_left_corner(self) -> str: ... + @property + def top_right_corner(self) -> str: ... + +class HeavyOuterLightInnerBorderStyle(HeavyBorderStyle): + @property + def inner_horizontal(self) -> str: ... + @property + def inner_vertical(self) -> str: ... + @property + def intersection(self) -> str: ... + @property + def outer_horizontal_inner_down(self) -> str: ... + @property + def outer_horizontal_inner_up(self) -> str: ... + @property + def outer_vertical_inner_left(self) -> str: ... + @property + def outer_vertical_inner_right(self) -> str: ... + +class DoubleLineBorderStyle(MenuBorderStyle): + @property + def bottom_left_corner(self) -> str: ... + @property + def bottom_right_corner(self) -> str: ... + @property + def inner_horizontal(self) -> str: ... + @property + def inner_vertical(self) -> str: ... + @property + def intersection(self) -> str: ... + @property + def outer_horizontal(self) -> str: ... + @property + def outer_horizontal_inner_down(self) -> str: ... + @property + def outer_horizontal_inner_up(self) -> str: ... + @property + def outer_vertical(self) -> str: ... + @property + def outer_vertical_inner_left(self) -> str: ... + @property + def outer_vertical_inner_right(self) -> str: ... + @property + def top_left_corner(self) -> str: ... + @property + def top_right_corner(self) -> str: ... + +class DoubleLineOuterLightInnerBorderStyle(DoubleLineBorderStyle): + @property + def inner_horizontal(self) -> str: ... + @property + def inner_vertical(self) -> str: ... + @property + def intersection(self) -> str: ... + @property + def outer_horizontal_inner_down(self) -> str: ... + @property + def outer_horizontal_inner_up(self) -> str: ... + @property + def outer_vertical_inner_left(self) -> str: ... + @property + def outer_vertical_inner_right(self) -> str: ... + +class MenuBorderStyleType: + ASCII_BORDER: int + LIGHT_BORDER: int + HEAVY_BORDER: int + DOUBLE_LINE_BORDER: int + HEAVY_OUTER_LIGHT_INNER_BORDER: int + DOUBLE_LINE_OUTER_LIGHT_INNER_BORDER: int + +class MenuBorderStyleFactory: + logger: logging.Logger + def __init__(self) -> None: ... + def create_border(self, border_style_type: MenuBorderStyleType) -> MenuBorderStyle: ... + def create_ascii_border(self) -> AsciiBorderStyle: ... + def create_light_border(self) -> LightBorderStyle: ... + def create_heavy_border(self) -> HeavyBorderStyle: ... + def create_heavy_outer_light_inner_border(self) -> HeavyOuterLightInnerBorderStyle: ... + def create_doubleline_border(self) -> DoubleLineBorderStyle: ... + def create_doubleline_outer_light_inner_border(self) -> DoubleLineOuterLightInnerBorderStyle: ... + @staticmethod + def is_win_python35_or_earlier() -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/format/menu_margins.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/format/menu_margins.pyi new file mode 100644 index 00000000..f37b3338 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/format/menu_margins.pyi @@ -0,0 +1,18 @@ +class MenuMargins: + def __init__(self, top: int = ..., left: int = ..., bottom: int = ..., right: int = ...) -> None: ... + @property + def left(self) -> int: ... + @left.setter + def left(self, left: int) -> None: ... + @property + def right(self) -> int: ... + @right.setter + def right(self, right: int) -> None: ... + @property + def top(self) -> int: ... + @top.setter + def top(self, top: int) -> None: ... + @property + def bottom(self) -> int: ... + @bottom.setter + def bottom(self, bottom: int) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/format/menu_padding.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/format/menu_padding.pyi new file mode 100644 index 00000000..46855b29 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/format/menu_padding.pyi @@ -0,0 +1,18 @@ +class MenuPadding: + def __init__(self, top: int = ..., left: int = ..., bottom: int = ..., right: int = ...) -> None: ... + @property + def left(self) -> int: ... + @left.setter + def left(self, left: int) -> None: ... + @property + def right(self) -> int: ... + @right.setter + def right(self, right: int) -> None: ... + @property + def top(self) -> int: ... + @top.setter + def top(self, top: int) -> None: ... + @property + def bottom(self) -> int: ... + @bottom.setter + def bottom(self, bottom: int) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/format/menu_style.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/format/menu_style.pyi new file mode 100644 index 00000000..e7e3c11c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/format/menu_style.pyi @@ -0,0 +1,29 @@ +from consolemenu.format.menu_borders import MenuBorderStyle as MenuBorderStyle, MenuBorderStyleFactory as MenuBorderStyleFactory +from consolemenu.format.menu_margins import MenuMargins as MenuMargins +from consolemenu.format.menu_padding import MenuPadding as MenuPadding + +class MenuStyle: + def __init__( + self, + margins: MenuMargins | None = ..., + padding: MenuPadding | None = ..., + border_style: MenuBorderStyle | None = ..., + border_style_type: int | None = ..., + border_style_factory: MenuBorderStyleFactory | None = ..., + ) -> None: ... + @property + def margins(self) -> MenuMargins: ... + @margins.setter + def margins(self, margins: MenuMargins) -> None: ... + @property + def padding(self) -> MenuPadding: ... + @padding.setter + def padding(self, padding: MenuPadding) -> None: ... + @property + def border_style(self) -> MenuBorderStyle: ... + @border_style.setter + def border_style(self, border_style: MenuBorderStyle) -> None: ... + @property + def border_style_factory(self) -> MenuBorderStyleFactory: ... + @border_style_factory.setter + def border_style_factory(self, border_style_factory: MenuBorderStyleFactory) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/items/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/items/__init__.pyi new file mode 100644 index 00000000..570ff3f5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/items/__init__.pyi @@ -0,0 +1,6 @@ +from ..console_menu import ExitItem as ExitItem, MenuItem as MenuItem +from .command_item import CommandItem as CommandItem +from .external_item import ExternalItem as ExternalItem +from .function_item import FunctionItem as FunctionItem +from .selection_item import SelectionItem as SelectionItem +from .submenu_item import SubmenuItem as SubmenuItem diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/items/command_item.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/items/command_item.pyi new file mode 100644 index 00000000..f44003f8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/items/command_item.pyi @@ -0,0 +1,12 @@ +from consolemenu.console_menu import ConsoleMenu +from consolemenu.items import ExternalItem as ExternalItem + +class CommandItem(ExternalItem): + command: str + arguments: list[str] + exit_status: int | None + def __init__( + self, text: str, command: str, arguments: list[str] | None = ..., menu: ConsoleMenu | None = ..., should_exit: bool = ... + ) -> None: ... + def action(self) -> None: ... + def get_return(self) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/items/external_item.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/items/external_item.pyi new file mode 100644 index 00000000..33cf83a8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/items/external_item.pyi @@ -0,0 +1,5 @@ +from consolemenu.items import MenuItem as MenuItem + +class ExternalItem(MenuItem): + def set_up(self) -> None: ... + def clean_up(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/items/function_item.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/items/function_item.pyi new file mode 100644 index 00000000..60df226e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/items/function_item.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete +from collections.abc import Callable, Mapping, Sequence +from typing import Any + +from consolemenu.console_menu import ConsoleMenu +from consolemenu.items import ExternalItem as ExternalItem + +class FunctionItem(ExternalItem): + function: Callable[..., Any] + args: Sequence[Any] + kwargs: Mapping[str, Any] + return_value: Incomplete | None + def __init__( + self, + text: str, + function: Callable[..., Any], + args: Sequence[Any] | None = ..., + kwargs: Mapping[str, Any] | None = ..., + menu: ConsoleMenu | None = ..., + should_exit: bool = ..., + ) -> None: ... + def action(self) -> None: ... + def clean_up(self) -> None: ... + def get_return(self) -> Any | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/items/selection_item.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/items/selection_item.pyi new file mode 100644 index 00000000..07678800 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/items/selection_item.pyi @@ -0,0 +1,9 @@ +from collections.abc import Callable + +from consolemenu.console_menu import ConsoleMenu +from consolemenu.items import MenuItem as MenuItem + +class SelectionItem(MenuItem): + index: int + def __init__(self, text: str | Callable[[], str], index: int, menu: ConsoleMenu | None = ...) -> None: ... + def get_return(self) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/items/submenu_item.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/items/submenu_item.pyi new file mode 100644 index 00000000..f1d8c611 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/items/submenu_item.pyi @@ -0,0 +1,17 @@ +from collections.abc import Callable + +from consolemenu.console_menu import ConsoleMenu +from consolemenu.items import MenuItem as MenuItem + +class SubmenuItem(MenuItem): + submenu: ConsoleMenu + def __init__( + self, text: str | Callable[[], str], submenu: ConsoleMenu, menu: ConsoleMenu | None = ..., should_exit: bool = ... + ) -> None: ... + menu: ConsoleMenu + def set_menu(self, menu: ConsoleMenu) -> None: ... + def set_up(self) -> None: ... + def action(self) -> None: ... + def clean_up(self) -> None: ... + def get_return(self) -> object: ... + def get_submenu(self) -> ConsoleMenu: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/menu_component.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/menu_component.pyi new file mode 100644 index 00000000..65148365 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/menu_component.pyi @@ -0,0 +1,99 @@ +from collections.abc import Generator + +from consolemenu.console_menu import MenuItem +from consolemenu.format import MenuBorderStyle, MenuMargins, MenuPadding, MenuStyle as MenuStyle + +def ansilen(s: str) -> int: ... + +class Dimension: + width: int + height: int + def __init__(self, width: int = ..., height: int = ..., dimension: Dimension | None = ...) -> None: ... + +class MenuComponent: + def __init__(self, menu_style: MenuStyle, max_dimension: Dimension | None = ...) -> None: ... + @property + def max_dimension(self) -> Dimension: ... + @property + def style(self) -> MenuStyle: ... + @property + def margins(self) -> MenuMargins: ... + @property + def padding(self) -> MenuPadding: ... + @property + def border_style(self) -> MenuBorderStyle: ... + def calculate_border_width(self) -> int: ... + def calculate_content_width(self) -> int: ... + def generate(self) -> Generator[str, None, None]: ... + def inner_horizontals(self) -> str: ... + def inner_horizontal_border(self) -> str: ... + def outer_horizontals(self) -> str: ... + def outer_horizontal_border_bottom(self) -> str: ... + def outer_horizontal_border_top(self) -> str: ... + def row(self, content: str = ..., align: str = ..., indent_len: int = ...) -> str: ... + +class MenuHeader(MenuComponent): + title: str + title_align: str + subtitle: str + subtitle_align: str + show_bottom_border: bool + def __init__( + self, + menu_style: MenuStyle, + max_dimension: Dimension | None = ..., + title: str | None = ..., + title_align: str = ..., + subtitle: str | None = ..., + subtitle_align: str = ..., + show_bottom_border: bool = ..., + ) -> None: ... + def generate(self) -> Generator[str, None, None]: ... + +class MenuTextSection(MenuComponent): + text: str + text_align: str + show_top_border: bool + show_bottom_border: bool + def __init__( + self, + menu_style: MenuStyle, + max_dimension: Dimension | None = ..., + text: str | None = ..., + text_align: str = ..., + show_top_border: bool = ..., + show_bottom_border: bool = ..., + ) -> None: ... + def generate(self) -> Generator[str, None, None]: ... + +class MenuItemsSection(MenuComponent): + items_align: str + def __init__( + self, + menu_style: MenuStyle, + max_dimension: Dimension | None = ..., + items: list[MenuItem] | None = ..., + items_align: str = ..., + ) -> None: ... + @property + def items(self) -> list[MenuItem]: ... + @items.setter + def items(self, items: list[MenuItem]) -> None: ... + @property + def items_with_bottom_border(self) -> list[str]: ... + @property + def items_with_top_border(self) -> list[str]: ... + def show_item_bottom_border(self, item_text: str, flag: bool) -> None: ... + def show_item_top_border(self, item_text: str, flag: bool) -> None: ... + def generate(self) -> Generator[str, None, None]: ... + +class MenuFooter(MenuComponent): + def generate(self) -> Generator[str, None, None]: ... + +class MenuPrompt(MenuComponent): + def __init__(self, menu_style: MenuStyle, max_dimension: Dimension | None = ..., prompt_string: str = ...) -> None: ... + @property + def prompt(self) -> str: ... + @prompt.setter + def prompt(self, prompt: str) -> None: ... + def generate(self) -> Generator[str, None, None]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/menu_formatter.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/menu_formatter.pyi new file mode 100644 index 00000000..cfd09a69 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/menu_formatter.pyi @@ -0,0 +1,55 @@ +from consolemenu.console_menu import MenuItem +from consolemenu.format import MenuBorderStyleType +from consolemenu.format.menu_borders import MenuBorderStyle as MenuBorderStyle, MenuBorderStyleFactory as MenuBorderStyleFactory +from consolemenu.format.menu_style import MenuStyle as MenuStyle +from consolemenu.menu_component import ( + Dimension as Dimension, + MenuFooter as MenuFooter, + MenuHeader as MenuHeader, + MenuItemsSection as MenuItemsSection, + MenuPrompt as MenuPrompt, + MenuTextSection as MenuTextSection, +) + +class MenuFormatBuilder: + def __init__(self, max_dimension: Dimension | None = ...) -> None: ... + def set_border_style(self, border_style: MenuBorderStyle) -> MenuFormatBuilder: ... + def set_border_style_type(self, border_style_type: MenuBorderStyleType) -> MenuFormatBuilder: ... + def set_border_style_factory(self, border_style_factory: MenuBorderStyleFactory) -> MenuFormatBuilder: ... + def set_bottom_margin(self, bottom_margin: int) -> MenuFormatBuilder: ... + def set_left_margin(self, left_margin: int) -> MenuFormatBuilder: ... + def set_right_margin(self, right_margin: int) -> MenuFormatBuilder: ... + def set_top_margin(self, top_margin: int) -> MenuFormatBuilder: ... + def set_title_align(self, align: str = ...) -> MenuFormatBuilder: ... + def set_subtitle_align(self, align: str = ...) -> MenuFormatBuilder: ... + def set_header_left_padding(self, x: int) -> MenuFormatBuilder: ... + def set_header_right_padding(self, x: int) -> MenuFormatBuilder: ... + def set_header_bottom_padding(self, x: int) -> MenuFormatBuilder: ... + def set_header_top_padding(self, x: int) -> MenuFormatBuilder: ... + def show_header_bottom_border(self, flag: bool) -> MenuFormatBuilder: ... + def set_footer_left_padding(self, x: int) -> MenuFormatBuilder: ... + def set_footer_right_padding(self, x: int) -> MenuFormatBuilder: ... + def set_footer_bottom_padding(self, x: int) -> MenuFormatBuilder: ... + def set_footer_top_padding(self, x: int) -> MenuFormatBuilder: ... + def set_items_left_padding(self, x: int) -> MenuFormatBuilder: ... + def set_items_right_padding(self, x: int) -> MenuFormatBuilder: ... + def set_items_bottom_padding(self, x: int) -> MenuFormatBuilder: ... + def set_items_top_padding(self, x: int) -> MenuFormatBuilder: ... + def show_item_bottom_border(self, item_text: str, flag: bool) -> MenuFormatBuilder: ... + def show_item_top_border(self, item_text: str, flag: bool) -> MenuFormatBuilder: ... + def set_prologue_text_align(self, align: str = ...) -> MenuFormatBuilder: ... + def show_prologue_top_border(self, flag: bool) -> MenuFormatBuilder: ... + def show_prologue_bottom_border(self, flag: bool) -> MenuFormatBuilder: ... + def set_epilogue_text_align(self, align: str = ...) -> MenuFormatBuilder: ... + def show_epilogue_top_border(self, flag: bool) -> MenuFormatBuilder: ... + def show_epilogue_bottom_border(self, flag: bool) -> MenuFormatBuilder: ... + def set_prompt(self, prompt: MenuPrompt) -> MenuFormatBuilder: ... + def clear_data(self) -> None: ... + def format( + self, + title: str | None = ..., + subtitle: str | None = ..., + prologue_text: str | None = ..., + epilogue_text: str | None = ..., + items: list[MenuItem] | None = ..., + ) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/multiselect_menu.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/multiselect_menu.pyi new file mode 100644 index 00000000..06e2a5e7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/multiselect_menu.pyi @@ -0,0 +1,22 @@ +from consolemenu import ConsoleMenu as ConsoleMenu +from consolemenu.console_menu import MenuItem +from consolemenu.items import SubmenuItem as SubmenuItem +from consolemenu.menu_formatter import MenuFormatBuilder + +class MultiSelectMenu(ConsoleMenu): + ack_item_completion: bool + def __init__( + self, + title: str | None = ..., + subtitle: str | None = ..., + formatter: MenuFormatBuilder | None = ..., + prologue_text: str | None = ..., + epilogue_text: str | None = ..., + ack_item_completion: bool = ..., + show_exit_option: bool = ..., + exit_option_text: str = ..., + clear_screen: bool = ..., + ) -> None: ... + def append_item(self, item: MenuItem) -> None: ... + current_option: int + def process_user_input(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/prompt_utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/prompt_utils.pyi new file mode 100644 index 00000000..507d716f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/prompt_utils.pyi @@ -0,0 +1,47 @@ +from collections.abc import Iterable, Sequence +from typing import Any, NamedTuple + +from consolemenu.screen import Screen +from consolemenu.validators.base import BaseValidator + +class InputResult(NamedTuple): + input_string: str + validation_result: bool + +class PromptFormatter: + @staticmethod + def format_prompt( + prompt: str | None = ..., + default: str | None = ..., + enable_quit: bool = ..., + quit_string: str = ..., + quit_message: str = ..., + ) -> str: ... + +class PromptUtils: + def __init__(self, screen: Screen, prompt_formatter: PromptFormatter | None = ...) -> None: ... + @property + def screen(self) -> Screen: ... + def clear(self) -> None: ... + def confirm_answer(self, answer: str, message: str | None = ...) -> bool: ... + def enter_to_continue(self, message: str | None = ...) -> None: ... + def input( + self, + prompt: str | None = ..., + default: str | None = ..., + validators: Iterable[BaseValidator] | None = ..., + enable_quit: bool = ..., + quit_string: str = ..., + quit_message: str = ..., + ) -> InputResult: ... + def input_password(self, message: str | None = ...) -> str: ... + def printf(self, *args: Any) -> None: ... + def println(self, *args: Any) -> None: ... + def prompt_and_confirm_password(self, message: str) -> str: ... + def prompt_for_bilateral_choice(self, prompt: str, option1: str, option2: str) -> str: ... + def prompt_for_trilateral_choice(self, prompt: str, option1: str, option2: str, option3: str) -> str: ... + def prompt_for_yes_or_no(self, prompt: str) -> bool: ... + def prompt_for_numbered_choice(self, choices: Sequence[str], title: str | None = ..., prompt: str = ...) -> int: ... + def validate_input(self, input_string: str, validators: BaseValidator) -> bool: ... + +class UserQuit(Exception): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/screen.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/screen.pyi new file mode 100644 index 00000000..712823c8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/screen.pyi @@ -0,0 +1,17 @@ +from typing import Any + +class Screen: + def __init__(self) -> None: ... + @property + def screen_height(self) -> int: ... + @property + def screen_width(self) -> int: ... + @staticmethod + def clear() -> None: ... + @staticmethod + def flush() -> None: ... + def input(self, prompt: str = ...) -> str: ... + @staticmethod + def printf(*args: Any) -> None: ... + @staticmethod + def println(*args: Any) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/selection_menu.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/selection_menu.pyi new file mode 100644 index 00000000..4735cb80 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/selection_menu.pyi @@ -0,0 +1,31 @@ +from collections.abc import Iterable + +from consolemenu import ConsoleMenu as ConsoleMenu +from consolemenu.items import SelectionItem as SelectionItem +from consolemenu.menu_formatter import MenuFormatBuilder +from consolemenu.screen import Screen + +class SelectionMenu(ConsoleMenu): + def __init__( + self, + strings: Iterable[str], + title: str | None = ..., + subtitle: str | None = ..., + screen: Screen | None = ..., + formatter: MenuFormatBuilder | None = ..., + prologue_text: str | None = ..., + epilogue_text: str | None = ..., + show_exit_option: bool = ..., + exit_option_text: str = ..., + clear_screen: bool = ..., + ) -> None: ... + @classmethod + def get_selection( + cls, + strings: Iterable[str], + title: str = ..., + subtitle: str | None = ..., + show_exit_option: bool = ..., + _menu: ConsoleMenu | None = ..., + ) -> int: ... + def append_string(self, string: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/validators/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/validators/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/validators/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/validators/base.pyi new file mode 100644 index 00000000..eaea5c87 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/validators/base.pyi @@ -0,0 +1,11 @@ +import abc +from abc import abstractmethod +from logging import Logger + +class InvalidValidator(Exception): ... + +class BaseValidator(metaclass=abc.ABCMeta): + log: Logger + def __init__(self) -> None: ... + @abstractmethod + def validate(self, input_string: str) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/validators/regex.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/validators/regex.pyi new file mode 100644 index 00000000..fdadc44f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/validators/regex.pyi @@ -0,0 +1,7 @@ +from consolemenu.validators.base import BaseValidator as BaseValidator + +class RegexValidator(BaseValidator): + def __init__(self, pattern: str) -> None: ... + @property + def pattern(self) -> str: ... + def validate(self, input_string: str) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/validators/url.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/validators/url.pyi new file mode 100644 index 00000000..d9e92bb6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/validators/url.pyi @@ -0,0 +1,5 @@ +from consolemenu.validators.base import BaseValidator as BaseValidator + +class UrlValidator(BaseValidator): + def __init__(self) -> None: ... + def validate(self, input_string: str) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/version.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/console-menu/consolemenu/version.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/contextvars/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/contextvars/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..64f8aa01 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/contextvars/@tests/stubtest_allowlist.txt @@ -0,0 +1,4 @@ +contextvars.Context.__init__ +contextvars.Context.get +contextvars.ContextVar.reset +contextvars.ContextVar.set diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/contextvars/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/contextvars/METADATA.toml new file mode 100644 index 00000000..ea07e8d3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/contextvars/METADATA.toml @@ -0,0 +1 @@ +version = "2.4" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/contextvars/contextvars.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/contextvars/contextvars.pyi new file mode 100644 index 00000000..044d8a43 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/contextvars/contextvars.pyi @@ -0,0 +1,47 @@ +import sys +from collections.abc import Callable, Iterator, Mapping +from typing import Any, ClassVar, Generic, TypeVar, overload +from typing_extensions import ParamSpec, final + +if sys.version_info >= (3, 9): + from types import GenericAlias + +_T = TypeVar("_T") +_P = ParamSpec("_P") + +@final +class ContextVar(Generic[_T]): + @overload + def __init__(self, name: str) -> None: ... + @overload + def __init__(self, name: str, *, default: _T) -> None: ... + @property + def name(self) -> str: ... + def get(self, default: _T = ...) -> _T: ... + def set(self, value: _T) -> Token[_T]: ... + def reset(self, token: Token[_T]) -> None: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +@final +class Token(Generic[_T]): + @property + def var(self) -> ContextVar[_T]: ... + @property + def old_value(self) -> Any: ... # returns either _T or MISSING, but that's hard to express + MISSING: ClassVar[object] + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +def copy_context() -> Context: ... + +# It doesn't make sense to make this generic, because for most Contexts each ContextVar will have +# a different value. +@final +class Context(Mapping[ContextVar[Any], Any]): + def __init__(self) -> None: ... + def run(self, callable: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> _T: ... + def copy(self) -> Context: ... + def __getitem__(self, __key: ContextVar[_T]) -> _T: ... + def __iter__(self) -> Iterator[ContextVar[Any]]: ... + def __len__(self) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/croniter/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/croniter/METADATA.toml new file mode 100644 index 00000000..3ea18392 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/croniter/METADATA.toml @@ -0,0 +1 @@ +version = "1.3.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/croniter/croniter/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/croniter/croniter/__init__.pyi new file mode 100644 index 00000000..c88524ce --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/croniter/croniter/__init__.pyi @@ -0,0 +1,10 @@ +from .croniter import ( + CroniterBadCronError as CroniterBadCronError, + CroniterBadDateError as CroniterBadDateError, + CroniterBadTypeRangeError as CroniterBadTypeRangeError, + CroniterNotAlphaError as CroniterNotAlphaError, + CroniterUnsupportedSyntaxError as CroniterUnsupportedSyntaxError, + croniter as croniter, + croniter_range as croniter_range, + datetime_to_timestamp as datetime_to_timestamp, +) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/croniter/croniter/croniter.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/croniter/croniter/croniter.pyi new file mode 100644 index 00000000..0799a69f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/croniter/croniter/croniter.pyi @@ -0,0 +1,135 @@ +import datetime +from _typeshed import ReadableBuffer, Unused +from collections import OrderedDict +from collections.abc import Iterator +from re import Match, Pattern +from typing import Any, overload +from typing_extensions import Literal, Self, TypeAlias + +_RetType: TypeAlias = type[float | datetime.datetime] + +step_search_re: Pattern[str] +only_int_re: Pattern[str] +star_or_int_re: Pattern[str] +special_weekday_re: Pattern[str] +hash_expression_re: Pattern[str] +VALID_LEN_EXPRESSION: list[int] + +def timedelta_to_seconds(td: datetime.timedelta) -> float: ... + +class CroniterError(ValueError): ... +class CroniterBadTypeRangeError(TypeError): ... +class CroniterBadCronError(CroniterError): ... +class CroniterUnsupportedSyntaxError(CroniterBadCronError): ... +class CroniterBadDateError(CroniterError): ... +class CroniterNotAlphaError(CroniterError): ... + +def datetime_to_timestamp(d: datetime.datetime) -> float: ... + +class croniter(Iterator[Any]): + MONTHS_IN_YEAR: Literal[12] + RANGES: tuple[tuple[int, int], ...] + DAYS: tuple[ + Literal[31], + Literal[28], + Literal[31], + Literal[30], + Literal[31], + Literal[30], + Literal[31], + Literal[31], + Literal[30], + Literal[31], + Literal[30], + Literal[31], + ] + ALPHACONV: tuple[dict[str, Any], ...] + LOWMAP: tuple[dict[int, Any], ...] + LEN_MEANS_ALL: tuple[int, ...] + bad_length: str + tzinfo: datetime.tzinfo | None + cur: float + expanded: list[list[str]] + start_time: float + dst_start_time: float + nth_weekday_of_month: dict[str, Any] + def __init__( + self, + expr_format: str, + start_time: float | datetime.datetime | None = ..., + ret_type: _RetType | None = ..., + day_or: bool = ..., + max_years_between_matches: int | None = ..., + is_prev: bool = ..., + hash_id: str | bytes | None = ..., + ) -> None: ... + # Most return value depend on ret_type, which can be passed in both as a method argument and as + # a constructor argument. + def get_next(self, ret_type: _RetType | None = ..., start_time: float | datetime.datetime | None = ...) -> Any: ... + def get_prev(self, ret_type: _RetType | None = ...) -> Any: ... + def get_current(self, ret_type: _RetType | None = ...) -> Any: ... + def set_current(self, start_time: float | datetime.datetime | None, force: bool = ...) -> float: ... + def __iter__(self) -> Self: ... + def next( + self, ret_type: _RetType | None = ..., start_time: float | datetime.datetime | None = ..., is_prev: bool | None = ... + ) -> Any: ... + __next__ = next + def all_next(self, ret_type: _RetType | None = ...) -> Iterator[Any]: ... + def all_prev(self, ret_type: _RetType | None = ...) -> Iterator[Any]: ... + def iter(self, ret_type: _RetType | None = ...) -> Iterator[Any]: ... + def is_leap(self, year: int) -> bool: ... + @classmethod + def expand(cls, expr_format: str, hash_id: str | bytes | None = ...) -> tuple[list[list[str]], dict[str, Any]]: ... + @classmethod + def is_valid(cls, expression: str, hash_id: str | bytes | None = ...) -> bool: ... + @classmethod + def match(cls, cron_expression: str, testdate: float | datetime.datetime | None, day_or: bool = ...) -> bool: ... + +def croniter_range( + start: float | datetime.datetime, + stop: float | datetime.datetime, + expr_format: str, + ret_type: _RetType | None = ..., + day_or: bool = ..., + exclude_ends: bool = ..., + _croniter: type[croniter] | None = ..., +) -> Iterator[Any]: ... + +class HashExpander: + cron: croniter + def __init__(self, cronit: croniter) -> None: ... + @overload + def do( + self, + idx: int, + hash_type: Literal["r"], + hash_id: None = None, + range_end: int | None = None, + range_begin: int | None = None, + ) -> int: ... + @overload + def do( + self, idx: int, hash_type: str, hash_id: ReadableBuffer, range_end: int | None = None, range_begin: int | None = None + ) -> int: ... + @overload + def do( + self, + idx: int, + hash_type: str = "h", + *, + hash_id: ReadableBuffer, + range_end: int | None = None, + range_begin: int | None = None, + ) -> int: ... + def match(self, efl: Unused, idx: Unused, expr: str, hash_id: Unused = None, **kw: Unused) -> Match[str] | None: ... + def expand( + self, + efl: object, + idx: int, + expr: str, + hash_id: ReadableBuffer | None = None, + match: Match[str] | None | Literal[""] = "", + **kw: object, + ) -> str: ... + +EXPANDERS: OrderedDict[str, HashExpander] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..40724a19 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/@tests/stubtest_allowlist.txt @@ -0,0 +1,8 @@ +dateparser.calendars.hijri +dateparser.calendars.hijri_parser +dateparser.calendars.jalali +dateparser.calendars.jalali_parser +dateparser.search.detection.BaseLanguageDetector.iterate_applicable_languages + +# Timezone and other internal data: +dateparser.data.date_translation_data.* diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/METADATA.toml new file mode 100644 index 00000000..ced11fa3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/METADATA.toml @@ -0,0 +1,4 @@ +version = "1.1.*" + +[tool.stubtest] +extras = ["fasttext", "langdetect"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/__init__.pyi new file mode 100644 index 00000000..b649b36f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/__init__.pyi @@ -0,0 +1,37 @@ +import datetime +from typing_extensions import Literal, TypeAlias, TypedDict + +from .date import DateDataParser, _DetectLanguagesFunction + +__version__: str + +_default_parser: DateDataParser + +_Part: TypeAlias = Literal["day", "month", "year"] +_ParserKind: TypeAlias = Literal["timestamp", "relative-time", "custom-formats", "absolute-time", "no-spaces-time"] + +class _Settings(TypedDict, total=False): + DATE_ORDER: str + PREFER_LOCALE_DATE_ORDER: bool + TIMEZONE: str + TO_TIMEZONE: str + RETURN_AS_TIMEZONE_AWARE: bool + PREFER_DAY_OF_MONTH: Literal["current", "first", "last"] + PREFER_DATES_FROM: Literal["current_period", "future", "past"] + RELATIVE_BASE: datetime.datetime + STRICT_PARSING: bool + REQUIRE_PARTS: list[_Part] + SKIP_TOKENS: list[str] + NORMALIZE: bool + RETURN_TIME_AS_PERIOD: bool + PARSERS: list[_ParserKind] + +def parse( + date_string: str, + date_formats: list[str] | tuple[str, ...] | set[str] | None = ..., + languages: list[str] | tuple[str, ...] | set[str] | None = ..., + locales: list[str] | tuple[str, ...] | set[str] | None = ..., + region: str | None = ..., + settings: _Settings | None = ..., + detect_languages_function: _DetectLanguagesFunction | None = ..., +) -> datetime.datetime | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/calendars/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/calendars/__init__.pyi new file mode 100644 index 00000000..1f7227a3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/calendars/__init__.pyi @@ -0,0 +1,20 @@ +from typing import Any + +from dateparser.parser import _parser + +class CalendarBase: + parser: Any + source: Any + def __init__(self, source) -> None: ... + def get_date(self): ... + +class non_gregorian_parser(_parser): + calendar_converter: Any + default_year: Any + default_month: Any + default_day: Any + non_gregorian_date_cls: Any + @classmethod + def to_latin(cls, source): ... + @classmethod + def parse(cls, datestring, settings): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/calendars/hijri.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/calendars/hijri.pyi new file mode 100644 index 00000000..ef838157 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/calendars/hijri.pyi @@ -0,0 +1,6 @@ +from typing import Any + +from dateparser.calendars import CalendarBase + +class HijriCalendar(CalendarBase): + parser: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/calendars/hijri_parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/calendars/hijri_parser.pyi new file mode 100644 index 00000000..30e804f2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/calendars/hijri_parser.pyi @@ -0,0 +1,26 @@ +from _typeshed import Incomplete +from typing import Any + +from dateparser.calendars import non_gregorian_parser + +class hijri: + @classmethod + def to_gregorian(cls, year: Incomplete | None = ..., month: Incomplete | None = ..., day: Incomplete | None = ...): ... + @classmethod + def from_gregorian(cls, year: Incomplete | None = ..., month: Incomplete | None = ..., day: Incomplete | None = ...): ... + @classmethod + def month_length(cls, year, month): ... + +class HijriDate: + year: Any + month: Any + day: Any + def __init__(self, year, month, day) -> None: ... + def weekday(self): ... + +class hijri_parser(non_gregorian_parser): + calendar_converter: Any + default_year: int + default_month: int + default_day: int + non_gregorian_date_cls: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/calendars/jalali.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/calendars/jalali.pyi new file mode 100644 index 00000000..35512002 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/calendars/jalali.pyi @@ -0,0 +1,6 @@ +from typing import Any + +from . import CalendarBase + +class JalaliCalendar(CalendarBase): + parser: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/calendars/jalali_parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/calendars/jalali_parser.pyi new file mode 100644 index 00000000..e642ba25 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/calendars/jalali_parser.pyi @@ -0,0 +1,17 @@ +from typing import Any + +from dateparser.calendars import non_gregorian_parser + +class PersianDate: + year: Any + month: Any + day: Any + def __init__(self, year, month, day) -> None: ... + def weekday(self): ... + +class jalali_parser(non_gregorian_parser): + calendar_converter: Any + default_year: int + default_month: int + default_day: int + non_gregorian_date_cls: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/conf.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/conf.pyi new file mode 100644 index 00000000..08883e88 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/conf.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete +from typing import Any +from typing_extensions import Self + +class Settings: + def __new__(cls, *args, **kw) -> Self: ... + def __init__(self, settings: Incomplete | None = ...) -> None: ... + @classmethod + def get_key(cls, settings: Incomplete | None = ...): ... + def replace(self, mod_settings: Incomplete | None = ..., **kwds): ... + +settings: Any + +def apply_settings(f): ... + +class SettingValidationError(ValueError): ... + +def check_settings(settings) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/custom_language_detection/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/custom_language_detection/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/custom_language_detection/fasttext.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/custom_language_detection/fasttext.pyi new file mode 100644 index 00000000..e7440af4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/custom_language_detection/fasttext.pyi @@ -0,0 +1 @@ +def detect_languages(text: str, confidence_threshold: float) -> list[str]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/custom_language_detection/langdetect.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/custom_language_detection/langdetect.pyi new file mode 100644 index 00000000..e7440af4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/custom_language_detection/langdetect.pyi @@ -0,0 +1 @@ +def detect_languages(text: str, confidence_threshold: float) -> list[str]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/custom_language_detection/language_mapping.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/custom_language_detection/language_mapping.pyi new file mode 100644 index 00000000..0f5de55d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/custom_language_detection/language_mapping.pyi @@ -0,0 +1 @@ +def map_languages(language_codes: list[str]) -> list[str]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/data/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/data/__init__.pyi new file mode 100644 index 00000000..91633ea6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/data/__init__.pyi @@ -0,0 +1 @@ +from .languages_info import language_locale_dict as language_locale_dict, language_order as language_order diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/data/languages_info.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/data/languages_info.pyi new file mode 100644 index 00000000..21a8e508 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/data/languages_info.pyi @@ -0,0 +1,3 @@ +language_order: list[str] +language_locale_dict: dict[str, str] +language_map: dict[str, list[str]] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/date.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/date.pyi new file mode 100644 index 00000000..de772b0c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/date.pyi @@ -0,0 +1,110 @@ +import collections +from collections.abc import Callable, Iterable, Iterator +from datetime import datetime +from re import Pattern +from typing import ClassVar, overload +from typing_extensions import Literal, TypeAlias + +from dateparser import _Settings +from dateparser.conf import Settings +from dateparser.languages.loader import LocaleDataLoader +from dateparser.languages.locale import Locale + +_DetectLanguagesFunction: TypeAlias = Callable[[str, float], list[str]] +_Period: TypeAlias = Literal["time", "day", "week", "month", "year"] + +APOSTROPHE_LOOK_ALIKE_CHARS: list[str] +RE_NBSP: Pattern[str] +RE_SPACES: Pattern[str] +RE_TRIM_SPACES: Pattern[str] +RE_TRIM_COLONS: Pattern[str] +RE_SANITIZE_SKIP: Pattern[str] +RE_SANITIZE_RUSSIAN: Pattern[str] +RE_SANITIZE_PERIOD: Pattern[str] +RE_SANITIZE_ON: Pattern[str] +RE_SANITIZE_APOSTROPHE: Pattern[str] +RE_SEARCH_TIMESTAMP: Pattern[str] +RE_SANITIZE_CROATIAN: Pattern[str] +RE_SEARCH_NEGATIVE_TIMESTAMP: Pattern[str] + +def sanitize_spaces(date_string: str) -> str: ... +def date_range(begin, end, **kwargs) -> None: ... +def get_intersecting_periods(low, high, period: str = ...) -> None: ... +def sanitize_date(date_string: str) -> str: ... +def get_date_from_timestamp(date_string: str, settings: Settings, negative: bool = ...) -> datetime | None: ... +def parse_with_formats(date_string: str, date_formats: Iterable[str], settings: Settings) -> DateData: ... + +class _DateLocaleParser: + locale: Locale + date_string: str + date_formats: list[str] | tuple[str, ...] | set[str] | None + def __init__( + self, + locale: Locale, + date_string: str, + date_formats: list[str] | tuple[str, ...] | set[str] | None, + settings: Settings | None = ..., + ) -> None: ... + @classmethod + def parse( + cls, + locale: Locale, + date_string: str, + date_formats: list[str] | tuple[str, ...] | set[str] | None = ..., + settings: Settings | None = ..., + ) -> DateData: ... + def _parse(self) -> DateData | None: ... + def _try_timestamp(self) -> DateData: ... + def _try_freshness_parser(self) -> DateData | None: ... + def _try_absolute_parser(self) -> DateData | None: ... + def _try_nospaces_parser(self) -> DateData | None: ... + def _try_parser(self, parse_method) -> DateData | None: ... + def _try_given_formats(self) -> DateData | None: ... + def _get_translated_date(self) -> str: ... + def _get_translated_date_with_formatting(self) -> str: ... + def _is_valid_date_data(self, date_data: DateData) -> bool: ... + +class DateData: + date_obj: datetime | None + locale: str | None + period: _Period | None + def __init__(self, *, date_obj: datetime | None = ..., period: _Period | None = ..., locale: str | None = ...) -> None: ... + @overload + def __getitem__(self, k: Literal["date_obj"]) -> datetime | None: ... + @overload + def __getitem__(self, k: Literal["locale"]) -> str | None: ... + @overload + def __getitem__(self, k: Literal["period"]) -> _Period | None: ... + @overload + def __setitem__(self, k: Literal["date_obj"], v: datetime) -> None: ... + @overload + def __setitem__(self, k: Literal["locale"], v: str) -> None: ... + @overload + def __setitem__(self, k: Literal["period"], v: _Period) -> None: ... + +class DateDataParser: + _settings: Settings + locale_loader: ClassVar[LocaleDataLoader | None] + try_previous_locales: bool + use_given_order: bool + languages: list[str] | None + locales: list[str] | tuple[str, ...] | set[str] | None + region: str + detect_languages_function: _DetectLanguagesFunction | None + previous_locales: collections.OrderedDict[Locale, None] + def __init__( + self, + languages: list[str] | tuple[str, ...] | set[str] | None = ..., + locales: list[str] | tuple[str, ...] | set[str] | None = ..., + region: str | None = ..., + try_previous_locales: bool = ..., + use_given_order: bool = ..., + settings: _Settings | None = ..., + detect_languages_function: _DetectLanguagesFunction | None = ..., + ) -> None: ... + def get_date_data(self, date_string: str, date_formats: list[str] | tuple[str, ...] | set[str] | None = ...) -> DateData: ... + def get_date_tuple(self, date_string: str, date_formats: list[str] | tuple[str, ...] | set[str] | None = ...): ... + def _get_applicable_locales(self, date_string: str) -> Iterator[Locale]: ... + def _is_applicable_locale(self, locale: Locale, date_string: str) -> bool: ... + @classmethod + def _get_locale_loader(cls: type[DateDataParser]) -> LocaleDataLoader: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/date_parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/date_parser.pyi new file mode 100644 index 00000000..b4e92d7a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/date_parser.pyi @@ -0,0 +1,7 @@ +from _typeshed import Incomplete +from typing import Any + +class DateParser: + def parse(self, date_string, parse_method, settings: Incomplete | None = ...): ... + +date_parser: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/freshness_date_parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/freshness_date_parser.pyi new file mode 100644 index 00000000..06cec094 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/freshness_date_parser.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete +from typing import Any + +PATTERN: Any + +class FreshnessDateDataParser: + def get_local_tz(self): ... + def parse(self, date_string, settings): ... + def get_kwargs(self, date_string): ... + def get_date_data(self, date_string, settings: Incomplete | None = ...): ... + +freshness_date_parser: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/languages/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/languages/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/languages/dictionary.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/languages/dictionary.pyi new file mode 100644 index 00000000..8ace03eb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/languages/dictionary.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete +from typing import Any + +PARSER_HARDCODED_TOKENS: Any +PARSER_KNOWN_TOKENS: Any +ALWAYS_KEEP_TOKENS: list[str] +KNOWN_WORD_TOKENS: Any +PARENTHESES_PATTERN: Any +NUMERAL_PATTERN: Any +KEEP_TOKEN_PATTERN: Any + +class UnknownTokenError(Exception): ... + +class Dictionary: + info: Any + def __init__(self, locale_info, settings: Incomplete | None = ...) -> None: ... + def __contains__(self, key): ... + def __getitem__(self, key): ... + def __iter__(self) -> Any: ... + def are_tokens_valid(self, tokens): ... + def split(self, string, keep_formatting: bool = ...): ... + +class NormalizedDictionary(Dictionary): + def __init__(self, locale_info, settings: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/languages/loader.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/languages/loader.pyi new file mode 100644 index 00000000..87e532b2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/languages/loader.pyi @@ -0,0 +1,28 @@ +from collections import OrderedDict +from collections.abc import Iterator +from typing import Any + +from .locale import Locale + +LOCALE_SPLIT_PATTERN: Any + +class LocaleDataLoader: + def get_locale_map( + self, + languages: list[str] | None = ..., + locales: list[str] | None = ..., + region: str | None = ..., + use_given_order: bool = ..., + allow_conflicting_locales: bool = ..., + ) -> OrderedDict[str, list[Any] | str | int]: ... + def get_locales( + self, + languages: list[str] | None = ..., + locales: list[str] | None = ..., + region: str | None = ..., + use_given_order: bool = ..., + allow_conflicting_locales: bool = ..., + ) -> Iterator[Locale]: ... + def get_locale(self, shortname: str) -> Locale: ... + +default_loader: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/languages/locale.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/languages/locale.pyi new file mode 100644 index 00000000..cc54a5d5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/languages/locale.pyi @@ -0,0 +1,17 @@ +from re import Pattern + +from dateparser.conf import Settings + +NUMERAL_PATTERN: Pattern[str] + +class Locale: + shortname: str + def __init__(self, shortname: str, language_info) -> None: ... + def is_applicable(self, date_string: str, strip_timezone: bool = ..., settings: Settings | None = ...) -> bool: ... + def count_applicability(self, text: str, strip_timezone: bool = ..., settings: Settings | None = ...): ... + @staticmethod + def clean_dictionary(dictionary, threshold: int = ...): ... + def translate(self, date_string: str, keep_formatting: bool = ..., settings: Settings | None = ...) -> str: ... + def translate_search(self, search_string, settings: Settings | None = ...): ... + def get_wordchars_for_detection(self, settings): ... + def to_parserinfo(self, base_cls=...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/languages/validation.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/languages/validation.pyi new file mode 100644 index 00000000..790faa6a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/languages/validation.pyi @@ -0,0 +1,9 @@ +from typing import Any + +class LanguageValidator: + logger: Any + VALID_KEYS: Any + @classmethod + def get_logger(cls): ... + @classmethod + def validate_info(cls, language_id, info): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/parser.pyi new file mode 100644 index 00000000..889187a8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/parser.pyi @@ -0,0 +1,53 @@ +import datetime +from _typeshed import Incomplete +from typing import Any + +NSP_COMPATIBLE: Any +MERIDIAN: Any +MICROSECOND: Any +EIGHT_DIGIT: Any +HOUR_MINUTE_REGEX: Any + +def no_space_parser_eligibile(datestring): ... +def get_unresolved_attrs(parser_object): ... + +date_order_chart: Any + +def resolve_date_order(order, lst: Incomplete | None = ...): ... + +class _time_parser: + time_directives: Any + def __call__(self, timestring): ... + +time_parser: Any + +class _no_spaces_parser: + period: Any + date_formats: Any + def __init__(self, *args, **kwargs): ... + @classmethod + def parse(cls, datestring, settings): ... + +class _parser: + alpha_directives: Any + num_directives: Any + settings: Any + tokens: Any + filtered_tokens: Any + unset_tokens: Any + day: Any + month: Any + year: Any + time: Any + auto_order: Any + ordered_num_directives: Any + def __init__(self, tokens, settings): ... + @classmethod + def parse(cls, datestring, settings, tz: datetime.tzinfo | None = ...): ... + +class tokenizer: + digits: str + letters: str + instream: Any + def __init__(self, ds) -> None: ... + def tokenize(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/search/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/search/__init__.pyi new file mode 100644 index 00000000..b9b1d3e6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/search/__init__.pyi @@ -0,0 +1,23 @@ +from collections.abc import Mapping, Set as AbstractSet +from datetime import datetime +from typing import Any, overload +from typing_extensions import Literal + +from ..date import _DetectLanguagesFunction + +@overload +def search_dates( + text: str, + languages: list[str] | tuple[str, ...] | AbstractSet[str] | None, + settings: Mapping[Any, Any] | None, + add_detected_language: Literal[True], + detect_languages_function: _DetectLanguagesFunction | None = ..., +) -> list[tuple[str, datetime, str]]: ... +@overload +def search_dates( + text: str, + languages: list[str] | tuple[str, ...] | AbstractSet[str] | None = ..., + settings: Mapping[Any, Any] | None = ..., + add_detected_language: Literal[False] = ..., + detect_languages_function: _DetectLanguagesFunction | None = ..., +) -> list[tuple[str, datetime]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/search/detection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/search/detection.pyi new file mode 100644 index 00000000..e8d03080 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/search/detection.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete +from typing import Any + +class BaseLanguageDetector: + languages: Any + def __init__(self, languages) -> None: ... + def iterate_applicable_languages(self, date_string, modify: bool = ..., settings: Incomplete | None = ...) -> None: ... + +class AutoDetectLanguage(BaseLanguageDetector): + language_pool: Any + allow_redetection: Any + def __init__(self, languages, allow_redetection: bool = ...) -> None: ... + languages: Any + def iterate_applicable_languages(self, date_string, modify: bool = ..., settings: Incomplete | None = ...) -> None: ... + +class ExactLanguages(BaseLanguageDetector): + def __init__(self, languages) -> None: ... + def iterate_applicable_languages(self, date_string, modify: bool = ..., settings: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/search/search.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/search/search.pyi new file mode 100644 index 00000000..cd5904a5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/search/search.pyi @@ -0,0 +1,40 @@ +from _typeshed import Incomplete +from typing import Any + +from ..date import _DetectLanguagesFunction + +RELATIVE_REG: Any + +def date_is_relative(translation): ... + +class _ExactLanguageSearch: + loader: Any + language: Any + def __init__(self, loader) -> None: ... + def get_current_language(self, shortname) -> None: ... + def search(self, shortname, text, settings): ... + @staticmethod + def set_relative_base(substring, already_parsed): ... + def choose_best_split(self, possible_parsed_splits, possible_substrings_splits): ... + def split_by(self, item, original, splitter): ... + def split_if_not_parsed(self, item, original): ... + def parse_item(self, parser, item, translated_item, parsed, need_relative_base): ... + def parse_found_objects(self, parser, to_parse, original, translated, settings): ... + def search_parse(self, shortname, text, settings): ... + +class DateSearchWithDetection: + loader: Any + available_language_map: Any + search: Any + def __init__(self) -> None: ... + language_detector: Any + def detect_language( + self, text, languages, settings: Incomplete | None = ..., detect_languages_function: _DetectLanguagesFunction | None = ... + ): ... + def search_dates( + self, + text, + languages: Incomplete | None = ..., + settings: Incomplete | None = ..., + detect_languages_function: _DetectLanguagesFunction | None = ..., + ): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/search/text_detection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/search/text_detection.pyi new file mode 100644 index 00000000..ee6d30ec --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/search/text_detection.pyi @@ -0,0 +1,11 @@ +from typing import Any + +from dateparser.search.detection import BaseLanguageDetector + +class FullTextLanguageDetector(BaseLanguageDetector): + languages: Any + language_unique_chars: Any + language_chars: Any + def __init__(self, languages) -> None: ... + def get_unique_characters(self, settings) -> None: ... + def character_check(self, date_string, settings) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/timezone_parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/timezone_parser.pyi new file mode 100644 index 00000000..dc7dccd6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/timezone_parser.pyi @@ -0,0 +1,18 @@ +from datetime import tzinfo +from typing import Any + +class StaticTzInfo(tzinfo): + def __init__(self, name, offset) -> None: ... + def tzname(self, dt): ... + def utcoffset(self, dt): ... + def dst(self, dt): ... + def localize(self, dt, is_dst: bool = ...): ... + def __getinitargs__(self): ... + +def pop_tz_offset_from_string(date_string, as_offset: bool = ...): ... +def word_is_tz(word): ... +def convert_to_local_tz(datetime_obj, datetime_tz_offset): ... +def build_tz_offsets(search_regex_parts): ... +def get_local_tz_offset(): ... + +local_tz_offset: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/timezones.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/timezones.pyi new file mode 100644 index 00000000..aeb47a82 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/timezones.pyi @@ -0,0 +1 @@ +timezone_info_list: list[dict[str, list[str | tuple[str, ...]]]] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/utils/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/utils/__init__.pyi new file mode 100644 index 00000000..1814c88f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/utils/__init__.pyi @@ -0,0 +1,26 @@ +from _typeshed import Incomplete +from collections import OrderedDict +from collections.abc import Mapping +from typing import Any + +def strip_braces(date_string: str) -> str: ... +def normalize_unicode(string: str, form: str = ...) -> str: ... +def combine_dicts( + primary_dict: Mapping[Any, Any], supplementary_dict: Mapping[Any, Any] +) -> OrderedDict[str, str | list[Any]]: ... +def find_date_separator(format) -> Any: ... +def localize_timezone(date_time, tz_string): ... +def apply_tzdatabase_timezone(date_time, pytz_string): ... +def apply_dateparser_timezone(utc_datetime, offset_or_timezone_abb): ... +def apply_timezone(date_time, tz_string): ... +def apply_timezone_from_settings(date_obj, settings): ... +def get_last_day_of_month(year, month): ... +def get_previous_leap_year(year): ... +def get_next_leap_year(year): ... +def set_correct_day_from_settings(date_obj, settings, current_day: Incomplete | None = ...): ... +def registry(cls): ... +def get_logger() -> Any: ... +def setup_logging() -> None: ... + +# TODO: this needs `types-pytz` and a type-alias +def get_timezone_from_tz_string(tz_string: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/utils/strptime.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/utils/strptime.pyi new file mode 100644 index 00000000..5fceb4c1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser/utils/strptime.pyi @@ -0,0 +1,8 @@ +from datetime import datetime +from typing import Any + +TIME_MATCHER: Any +MS_SEARCHER: Any + +def patch_strptime() -> Any: ... +def strptime(date_string, format) -> datetime: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser_data/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser_data/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser_data/settings.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser_data/settings.pyi new file mode 100644 index 00000000..2de84e3d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dateparser/dateparser_data/settings.pyi @@ -0,0 +1,4 @@ +from typing import Any + +default_parsers: Any +settings: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/decorator/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/decorator/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..c256a8ca --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/decorator/@tests/stubtest_allowlist.txt @@ -0,0 +1,9 @@ +decorator.ContextManager.__init__ +decorator.FunctionMaker.args +decorator.FunctionMaker.kwonlyargs +decorator.FunctionMaker.kwonlydefaults +decorator.FunctionMaker.varargs +decorator.FunctionMaker.varkw +decorator.decorate +decorator.decorator +decorator.get_init diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/decorator/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/decorator/METADATA.toml new file mode 100644 index 00000000..2c151642 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/decorator/METADATA.toml @@ -0,0 +1 @@ +version = "5.1.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/decorator/decorator.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/decorator/decorator.pyi new file mode 100644 index 00000000..3eb509a0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/decorator/decorator.pyi @@ -0,0 +1,73 @@ +import inspect +from builtins import dict as _dict # alias to avoid conflicts with attribute name +from collections.abc import Callable, Iterator +from contextlib import _GeneratorContextManager +from inspect import Signature, getfullargspec as getfullargspec, iscoroutinefunction as iscoroutinefunction +from re import Pattern +from typing import Any, TypeVar +from typing_extensions import Literal, ParamSpec + +_C = TypeVar("_C", bound=Callable[..., Any]) +_Func = TypeVar("_Func", bound=Callable[..., Any]) +_T = TypeVar("_T") +_P = ParamSpec("_P") + +def get_init(cls: type) -> None: ... + +DEF: Pattern[str] +POS: Literal[inspect._ParameterKind.POSITIONAL_OR_KEYWORD] + +class FunctionMaker: + args: list[str] + varargs: str | None + varkw: str | None + defaults: tuple[Any, ...] + kwonlyargs: list[str] + kwonlydefaults: str | None + shortsignature: str | None + name: str + doc: str | None + module: str | None + annotations: _dict[str, Any] + signature: str + dict: _dict[str, Any] + def __init__( + self, + func: Callable[..., Any] | None = ..., + name: str | None = ..., + signature: str | None = ..., + defaults: tuple[Any, ...] | None = ..., + doc: str | None = ..., + module: str | None = ..., + funcdict: _dict[str, Any] | None = ..., + ) -> None: ... + def update(self, func: Any, **kw: Any) -> None: ... + def make( + self, src_templ: str, evaldict: _dict[str, Any] | None = ..., addsource: bool = ..., **attrs: Any + ) -> Callable[..., Any]: ... + @classmethod + def create( + cls, + obj: Any, + body: str, + evaldict: _dict[str, Any], + defaults: tuple[Any, ...] | None = ..., + doc: str | None = ..., + module: str | None = ..., + addsource: bool = ..., + **attrs: Any, + ) -> Callable[..., Any]: ... + +def fix(args: tuple[Any, ...], kwargs: dict[str, Any], sig: Signature) -> tuple[tuple[Any, ...], dict[str, Any]]: ... +def decorate(func: _Func, caller: Callable[..., Any], extras: Any = ...) -> _Func: ... +def decoratorx(caller: Callable[..., Any]) -> Callable[..., Any]: ... +def decorator( + caller: Callable[..., Any], _func: Callable[..., Any] | None = ... +) -> Callable[[Callable[..., Any]], Callable[..., Any]]: ... + +class ContextManager(_GeneratorContextManager[_T]): + def __call__(self, func: _C) -> _C: ... + +def contextmanager(func: Callable[_P, Iterator[_T]]) -> Callable[_P, ContextManager[_T]]: ... +def append(a: type, vancestors: list[type]) -> None: ... +def dispatch_on(*dispatch_args: Any) -> Callable[[Callable[..., Any]], Callable[..., Any]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dj-database-url/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dj-database-url/METADATA.toml new file mode 100644 index 00000000..249b0a7c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dj-database-url/METADATA.toml @@ -0,0 +1 @@ +version = "1.2.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dj-database-url/dj_database_url.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dj-database-url/dj_database_url.pyi new file mode 100644 index 00000000..c0d3f567 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dj-database-url/dj_database_url.pyi @@ -0,0 +1,40 @@ +from _typeshed import Incomplete +from typing import Any +from typing_extensions import TypedDict + +DEFAULT_ENV: str +SCHEMES: dict[str, str] + +# From https://docs.djangoproject.com/en/4.0/ref/settings/#databases +class _DBConfig(TypedDict, total=False): + ATOMIC_REQUESTS: bool + AUTOCOMMIT: bool + CONN_MAX_AGE: int + DISABLE_SERVER_SIDE_CURSORS: bool + ENGINE: str + HOST: str + NAME: str + OPTIONS: dict[str, Any] | None + PASSWORD: str + PORT: str + TEST: dict[str, Any] + TIME_ZONE: str + USER: str + +def parse( + url: str, + engine: str | None = ..., + conn_max_age: int = ..., + conn_health_checks: bool = ..., + ssl_require: bool = ..., + test_options: dict[Incomplete, Incomplete] | None = ..., +) -> _DBConfig: ... +def config( + env: str = ..., + default: str | None = ..., + engine: str | None = ..., + conn_max_age: int = ..., + conn_health_checks: bool = ..., + ssl_require: bool = ..., + test_options: dict[Incomplete, Incomplete] | None = ..., +) -> _DBConfig: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dockerfile-parse/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dockerfile-parse/METADATA.toml new file mode 100644 index 00000000..58bc3834 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dockerfile-parse/METADATA.toml @@ -0,0 +1 @@ +version = "2.0.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dockerfile-parse/dockerfile_parse/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dockerfile-parse/dockerfile_parse/__init__.pyi new file mode 100644 index 00000000..eeb43a01 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dockerfile-parse/dockerfile_parse/__init__.pyi @@ -0,0 +1,3 @@ +from .parser import DockerfileParser as DockerfileParser + +__version__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dockerfile-parse/dockerfile_parse/constants.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dockerfile-parse/dockerfile_parse/constants.pyi new file mode 100644 index 00000000..9bf1c78f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dockerfile-parse/dockerfile_parse/constants.pyi @@ -0,0 +1,2 @@ +DOCKERFILE_FILENAME: str +COMMENT_INSTRUCTION: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dockerfile-parse/dockerfile_parse/parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dockerfile-parse/dockerfile_parse/parser.pyi new file mode 100644 index 00000000..30939a41 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dockerfile-parse/dockerfile_parse/parser.pyi @@ -0,0 +1,70 @@ +import logging +from collections.abc import Mapping, Sequence +from typing import IO, ClassVar +from typing_extensions import TypedDict + +from .util import Context + +logger: logging.Logger + +class KeyValues(dict[str, str]): + parser_attr: ClassVar[str | None] + parser: DockerfileParser + def __init__(self, key_values: Mapping[str, str], parser: DockerfileParser) -> None: ... + def __delitem__(self, key: str) -> None: ... + def __setitem__(self, key: str, value: str) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... # type: ignore[override] + +class Labels(KeyValues): ... +class Envs(KeyValues): ... +class Args(KeyValues): ... + +class _InstructionDict(TypedDict): + instruction: str + startline: int + endline: int + content: str + value: str + +class DockerfileParser: + fileobj: IO[str] + dockerfile_path: str + cache_content: bool + cached_content: str + env_replace: bool + parent_env: dict[str, str] + build_args: dict[str, str] + def __init__( + self, + path: str | None = ..., + cache_content: bool = ..., + env_replace: bool = ..., + parent_env: dict[str, str] | None = ..., + fileobj: IO[str] | None = ..., + build_args: dict[str, str] | None = ..., + ) -> None: ... + lines: list[str] + content: str + @property + def structure(self) -> list[_InstructionDict]: ... + @property + def json(self) -> str: ... + parent_images: Sequence[str] + @property + def is_multistage(self) -> bool: ... + baseimage: str + cmd: str + labels: Mapping[str, str] + envs: Mapping[str, str] + args: Mapping[str, str] + def add_lines( + self, *lines: str, all_stages: bool | None = ..., at_start: bool | None = ..., skip_scratch: bool | None = ... + ) -> None: ... + def add_lines_at( + self, anchor: str | int | dict[str, int], *lines: str, replace: bool | None = ..., after: bool | None = ... + ) -> None: ... + @property + def context_structure(self) -> list[Context]: ... + +def image_from(from_value: str) -> tuple[str | None, str | None]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dockerfile-parse/dockerfile_parse/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dockerfile-parse/dockerfile_parse/util.pyi new file mode 100644 index 00000000..73f29abe --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/dockerfile-parse/dockerfile_parse/util.pyi @@ -0,0 +1,49 @@ +from collections.abc import Generator, Mapping, MutableMapping +from io import StringIO +from typing import ClassVar +from typing_extensions import Literal, TypeAlias + +def b2u(string: bytes | str) -> str: ... +def u2b(string: str | bytes) -> bytes: ... + +_Quotes: TypeAlias = Literal["'", '"'] +_ContextType: TypeAlias = Literal["ARG", "ENV", "LABEL"] + +class WordSplitter: + SQUOTE: ClassVar[_Quotes] + DQUOTE: ClassVar[_Quotes] + stream: StringIO + args: Mapping[str, str] | None + envs: Mapping[str, str] | None + quotes: _Quotes | None + escaped: bool + def __init__(self, s: str, args: Mapping[str, str] | None = ..., envs: Mapping[str, str] | None = ...) -> None: ... + def dequote(self) -> str: ... + def split(self, maxsplit: int | None = ..., dequote: bool = ...) -> Generator[str | None, None, None]: ... + +def extract_key_values( + env_replace: bool, args: Mapping[str, str], envs: Mapping[str, str], instruction_value: str +) -> list[tuple[str, str]]: ... +def get_key_val_dictionary( + instruction_value: str, env_replace: bool = ..., args: Mapping[str, str] | None = ..., envs: Mapping[str, str] | None = ... +) -> dict[str, str]: ... + +class Context: + args: MutableMapping[str, str] + envs: MutableMapping[str, str] + labels: MutableMapping[str, str] + line_args: Mapping[str, str] + line_envs: Mapping[str, str] + line_labels: Mapping[str, str] + def __init__( + self, + args: MutableMapping[str, str] | None = ..., + envs: MutableMapping[str, str] | None = ..., + labels: MutableMapping[str, str] | None = ..., + line_args: Mapping[str, str] | None = ..., + line_envs: Mapping[str, str] | None = ..., + line_labels: Mapping[str, str] | None = ..., + ) -> None: ... + def set_line_value(self, context_type: _ContextType, value: Mapping[str, str]) -> None: ... + def get_line_value(self, context_type: _ContextType) -> Mapping[str, str]: ... + def get_values(self, context_type: _ContextType) -> Mapping[str, str]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docopt/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docopt/METADATA.toml new file mode 100644 index 00000000..03031f1e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docopt/METADATA.toml @@ -0,0 +1 @@ +version = "0.6.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docopt/docopt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docopt/docopt.pyi new file mode 100644 index 00000000..6003f152 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docopt/docopt.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete +from collections.abc import Iterable +from typing import Any +from typing_extensions import TypeAlias + +__version__: str + +_Argv: TypeAlias = Iterable[str] | str + +def printable_usage(doc: str) -> str: ... +def docopt( + doc: str, argv: _Argv | None = ..., help: bool = ..., version: Incomplete | None = ..., options_first: bool = ... +) -> dict[str, Any]: ... # Really should be dict[str, str | bool] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..e8229378 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/@tests/stubtest_allowlist.txt @@ -0,0 +1,23 @@ +docutils.TransformSpec.unknown_reference_resolvers +docutils.frontend.ConfigParser.__getattr__ +docutils.frontend.ConfigParser.read +docutils.frontend.OptionParser.__getattr__ +docutils.io.FileOutput.__getattr__ +docutils.io.FileOutput.__init__ +docutils.io.Input.__init__ +docutils.languages.LanguageImporter.__getattr__ +docutils.nodes.Element.__getattr__ +docutils.nodes.NodeVisitor.__getattr__ +docutils.nodes.document.__getattr__ +docutils.nodes.document.__init__ +docutils.parsers.rst.Directive.__getattr__ +docutils.transforms.Transform.__getattr__ +docutils.transforms.Transformer.__getattr__ +docutils.utils.Reporter.__getattr__ +docutils.parsers.recommonmark_wrapper + +# the constructor appears to be mostly internal API, public API users are meant to use docutils.utils.new_reporter instead +docutils.utils.Reporter.__init__ + +# these methods take a rawsource parameter that has been deprecated and is completely ignored, so we omit it from the stub +docutils.nodes.Text.__new__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/METADATA.toml new file mode 100644 index 00000000..1647ad35 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/METADATA.toml @@ -0,0 +1,4 @@ +version = "0.19.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/__init__.pyi new file mode 100644 index 00000000..1a2f9df4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/__init__.pyi @@ -0,0 +1,42 @@ +from typing import Any, ClassVar, NamedTuple +from typing_extensions import Self + +__docformat__: str +__version__: str + +class _VersionInfo(NamedTuple): + major: int + minor: int + micro: int + releaselevel: str + serial: int + release: bool + +class VersionInfo(_VersionInfo): + def __new__( + cls, major: int = ..., minor: int = ..., micro: int = ..., releaselevel: str = ..., serial: int = ..., release: bool = ... + ) -> Self: ... + +__version_info__: VersionInfo +__version_details__: str + +class ApplicationError(Exception): ... +class DataError(ApplicationError): ... + +class SettingsSpec: + settings_spec: ClassVar[tuple[Any, ...]] + settings_defaults: ClassVar[dict[Any, Any] | None] + settings_default_overrides: ClassVar[dict[Any, Any] | None] + relative_path_settings: ClassVar[tuple[Any, ...]] + config_section: ClassVar[str | None] + config_section_dependencies: ClassVar[tuple[str, ...] | None] + +class TransformSpec: + def get_transforms(self) -> list[Any]: ... + default_transforms: ClassVar[tuple[Any, ...]] + unknown_reference_resolvers: ClassVar[list[Any]] + +class Component(SettingsSpec, TransformSpec): + component_type: ClassVar[str | None] + supported: ClassVar[tuple[str, ...]] + def supports(self, format: str) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/core.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/core.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/core.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/examples.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/examples.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/examples.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/frontend.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/frontend.pyi new file mode 100644 index 00000000..328decf9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/frontend.pyi @@ -0,0 +1,83 @@ +import optparse +from _typeshed import Incomplete +from collections.abc import Iterable, Mapping +from configparser import RawConfigParser +from typing import Any, ClassVar + +from docutils import SettingsSpec +from docutils.parsers import Parser +from docutils.utils import DependencyList + +__docformat__: str + +def store_multiple(option, opt, value, parser, *args, **kwargs) -> None: ... +def read_config_file(option, opt, value, parser) -> None: ... +def validate_encoding( + setting, value, option_parser, config_parser: Incomplete | None = ..., config_section: Incomplete | None = ... +): ... +def validate_encoding_error_handler( + setting, value, option_parser, config_parser: Incomplete | None = ..., config_section: Incomplete | None = ... +): ... +def validate_encoding_and_error_handler( + setting, value, option_parser, config_parser: Incomplete | None = ..., config_section: Incomplete | None = ... +): ... +def validate_boolean( + setting, value, option_parser, config_parser: Incomplete | None = ..., config_section: Incomplete | None = ... +) -> bool: ... +def validate_nonnegative_int( + setting, value, option_parser, config_parser: Incomplete | None = ..., config_section: Incomplete | None = ... +) -> int: ... +def validate_threshold( + setting, value, option_parser, config_parser: Incomplete | None = ..., config_section: Incomplete | None = ... +) -> int: ... +def validate_colon_separated_string_list( + setting, value, option_parser, config_parser: Incomplete | None = ..., config_section: Incomplete | None = ... +) -> list[str]: ... +def validate_comma_separated_list( + setting, value, option_parser, config_parser: Incomplete | None = ..., config_section: Incomplete | None = ... +) -> list[str]: ... +def validate_url_trailing_slash( + setting, value, option_parser, config_parser: Incomplete | None = ..., config_section: Incomplete | None = ... +) -> str: ... +def validate_dependency_file( + setting, value, option_parser, config_parser: Incomplete | None = ..., config_section: Incomplete | None = ... +) -> DependencyList: ... +def validate_strip_class( + setting, value, option_parser, config_parser: Incomplete | None = ..., config_section: Incomplete | None = ... +): ... +def validate_smartquotes_locales( + setting, value, option_parser, config_parser: Incomplete | None = ..., config_section: Incomplete | None = ... +) -> list[tuple[str, str]]: ... +def make_paths_absolute(pathdict, keys, base_path: Incomplete | None = ...) -> None: ... +def make_one_path_absolute(base_path, path) -> str: ... +def filter_settings_spec(settings_spec, *exclude, **replace) -> tuple[Any, ...]: ... + +class Values(optparse.Values): + def update(self, other_dict, option_parser) -> None: ... + def copy(self) -> Values: ... + +class Option(optparse.Option): ... + +class OptionParser(optparse.OptionParser, SettingsSpec): + standard_config_files: ClassVar[list[str]] + threshold_choices: ClassVar[list[str]] + thresholds: ClassVar[dict[str, int]] + booleans: ClassVar[dict[str, bool]] + default_error_encoding: ClassVar[str] + default_error_encoding_error_handler: ClassVar[str] + config_section: ClassVar[str] + version_template: ClassVar[str] + def __init__( + self, + components: Iterable[type[Parser]] = ..., + defaults: Mapping[str, Any] | None = ..., + read_config_files: bool | None = ..., + *args, + **kwargs, + ) -> None: ... + def __getattr__(self, name: str) -> Incomplete: ... + +class ConfigParser(RawConfigParser): + def __getattr__(self, name: str) -> Incomplete: ... + +class ConfigDeprecationWarning(DeprecationWarning): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/io.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/io.pyi new file mode 100644 index 00000000..8a2a7d07 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/io.pyi @@ -0,0 +1,95 @@ +from _typeshed import ( + Incomplete, + OpenBinaryModeReading, + OpenBinaryModeWriting, + OpenTextModeReading, + OpenTextModeWriting, + SupportsWrite, + Unused, +) +from re import Pattern +from typing import Any, ClassVar +from typing_extensions import Literal + +from docutils import TransformSpec + +__docformat__: str + +class InputError(OSError): ... +class OutputError(OSError): ... + +def check_encoding(stream: Any, encoding: str) -> bool | None: ... +def error_string(err: BaseException) -> str: ... + +class Input(TransformSpec): + component_type: ClassVar[str] + default_source_path: ClassVar[str | None] + def read(self) -> Any: ... + def decode(self, data: str | bytes) -> str: ... + coding_slug: ClassVar[Pattern[bytes]] + byte_order_marks: ClassVar[tuple[tuple[bytes, str], ...]] + def determine_encoding_from_data(self, data: str | bytes) -> str | None: ... + def isatty(self) -> bool: ... + +class Output(TransformSpec): + component_type: ClassVar[str] + default_destination_path: ClassVar[str | None] + def __init__( + self, + destination: Incomplete | None = ..., + destination_path: Incomplete | None = ..., + encoding: str | None = ..., + error_handler: str = ..., + ) -> None: ... + def write(self, data: str) -> Any: ... # returns bytes or str + def encode(self, data: str) -> Any: ... # returns bytes or str + +class ErrorOutput: + def __init__( + self, + destination: str | SupportsWrite[str] | SupportsWrite[bytes] | Literal[False] | None = ..., + encoding: str | None = ..., + encoding_errors: str = ..., + decoding_errors: str = ..., + ) -> None: ... + def write(self, data: str | bytes | Exception) -> None: ... + def close(self) -> None: ... + def isatty(self) -> bool: ... + +class FileInput(Input): + def __init__( + self, + source: Incomplete | None = ..., + source_path: Incomplete | None = ..., + encoding: str | None = ..., + error_handler: str = ..., + autoclose: bool = ..., + mode: OpenTextModeReading | OpenBinaryModeReading = ..., + ) -> None: ... + def read(self) -> str: ... + def readlines(self) -> list[str]: ... + def close(self) -> None: ... + +class FileOutput(Output): + mode: ClassVar[OpenTextModeWriting | OpenBinaryModeWriting] + def __getattr__(self, name: str) -> Incomplete: ... + +class BinaryFileOutput(FileOutput): ... + +class StringInput(Input): + default_source_path: ClassVar[str] + +class StringOutput(Output): + default_destination_path: ClassVar[str] + destination: str | bytes # only defined after call to write() + +class NullInput(Input): + default_source_path: ClassVar[str] + def read(self) -> str: ... + +class NullOutput(Output): + default_destination_path: ClassVar[str] + def write(self, data: Unused) -> None: ... + +class DocTreeInput(Input): + default_source_path: ClassVar[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/languages/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/languages/__init__.pyi new file mode 100644 index 00000000..3638be18 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/languages/__init__.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete +from typing import Protocol + +from docutils.utils import Reporter + +class _LanguageModule(Protocol): + labels: dict[str, str] + author_separators: list[str] + bibliographic_fields: list[str] + +class LanguageImporter: + def __call__(self, language_code: str, reporter: Reporter | None = ...) -> _LanguageModule: ... + def __getattr__(self, __name: str) -> Incomplete: ... + +get_language: LanguageImporter diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/nodes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/nodes.pyi new file mode 100644 index 00000000..80ee6bbe --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/nodes.pyi @@ -0,0 +1,137 @@ +import xml.dom.minidom +from _typeshed import Incomplete +from abc import abstractmethod +from collections.abc import Callable, Generator, Iterable, Sequence +from typing import Any, ClassVar, Protocol, TypeVar, overload +from typing_extensions import Literal, Self + +from docutils.transforms import Transformer + +_N = TypeVar("_N", bound=Node) + +class _DomModule(Protocol): + Document: type[xml.dom.minidom.Document] + +class Node: + # children is initialized by the subclasses + children: Sequence[Node] + parent: Node | None + source: str | None + line: int | None + document: document | None + def __bool__(self) -> Literal[True]: ... + def asdom(self, dom: _DomModule | None = ...) -> xml.dom.minidom.Element: ... + # While docutils documents the Node class to be abstract it does not + # actually use the ABCMeta metaclass. We still set @abstractmethod here + # (although it's not used in the docutils implementation) because it + # makes Mypy reject Node() with "Cannot instantiate abstract class". + @abstractmethod + def copy(self) -> Self: ... + @abstractmethod + def deepcopy(self) -> Self: ... + @abstractmethod + def pformat(self, indent: str = ..., level: int = ...) -> str: ... + @abstractmethod + def astext(self) -> str: ... + def setup_child(self, child: Node) -> None: ... + def walk(self, visitor: NodeVisitor) -> bool: ... + def walkabout(self, visitor: NodeVisitor) -> bool: ... + @overload + def findall( + self, condition: type[_N], include_self: bool = ..., descend: bool = ..., siblings: bool = ..., ascend: bool = ... + ) -> Generator[_N, None, None]: ... + @overload + def findall( + self, + condition: Callable[[Node], bool] | None = ..., + include_self: bool = ..., + descend: bool = ..., + siblings: bool = ..., + ascend: bool = ..., + ) -> Generator[Node, None, None]: ... + @overload + def traverse( + self, condition: type[_N], include_self: bool = ..., descend: bool = ..., siblings: bool = ..., ascend: bool = ... + ) -> list[_N]: ... + @overload + def traverse( + self, + condition: Callable[[Node], bool] | None = ..., + include_self: bool = ..., + descend: bool = ..., + siblings: bool = ..., + ascend: bool = ..., + ) -> list[Node]: ... + @overload + def next_node( + self, condition: type[_N], include_self: bool = ..., descend: bool = ..., siblings: bool = ..., ascend: bool = ... + ) -> _N: ... + @overload + def next_node( + self, + condition: Callable[[Node], bool] | None = ..., + include_self: bool = ..., + descend: bool = ..., + siblings: bool = ..., + ascend: bool = ..., + ) -> Node: ... + def previous_sibling(self) -> Node | None: ... + +class Element(Node): + children: list[Node] + def __init__(self, rawsource: str = ..., *children: Node, **attributes): ... + def __len__(self) -> int: ... + def __contains__(self, key: str | Node) -> bool: ... + @overload + def __getitem__(self, key: str) -> Any: ... + @overload + def __getitem__(self, key: int) -> Node: ... + @overload + def __getitem__(self, key: slice) -> list[Node]: ... + @overload + def __setitem__(self, key: str, item: Any) -> None: ... + @overload + def __setitem__(self, key: int, item: Node) -> None: ... + @overload + def __setitem__(self, key: slice, item: Iterable[Node]) -> None: ... + def __delitem__(self, key: str | int | slice) -> None: ... + def __add__(self, other: list[Node]) -> list[Node]: ... + def __radd__(self, other: list[Node]) -> list[Node]: ... + def __iadd__(self, other: Node | Iterable[Node]) -> Self: ... + def copy(self) -> Self: ... + def deepcopy(self) -> Self: ... + def pformat(self, indent: str = ..., level: int = ...) -> str: ... + def astext(self) -> str: ... + def __getattr__(self, __name: str) -> Incomplete: ... + +class Text(Node, str): + tagname: ClassVar[str] + children: tuple[()] + + # we omit the rawsource parameter because it has been deprecated and is ignored + def __new__(cls, data: str) -> Self: ... + def __init__(self, data: str) -> None: ... + def shortrepr(self, maxlen: int = ...) -> str: ... + def copy(self) -> Self: ... + def deepcopy(self) -> Self: ... + def pformat(self, indent: str = ..., level: int = ...) -> str: ... + def astext(self) -> str: ... + def rstrip(self, chars: str | None = ...) -> str: ... + def lstrip(self, chars: str | None = ...) -> str: ... + +class Structural: ... +class Root: ... + +class document(Root, Structural, Element): + transformer: Transformer + def copy(self) -> Self: ... + def deepcopy(self) -> Self: ... + def pformat(self, indent: str = ..., level: int = ...) -> str: ... + def astext(self) -> str: ... + def __getattr__(self, __name: str) -> Incomplete: ... + +class NodeVisitor: + def __init__(self, document: document): ... + def __getattr__(self, __name: str) -> Incomplete: ... + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/__init__.pyi new file mode 100644 index 00000000..04683301 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/__init__.pyi @@ -0,0 +1,17 @@ +from typing import Any, ClassVar + +from docutils import Component +from docutils.nodes import document as _document + +class Parser(Component): + component_type: ClassVar[str] + config_section: ClassVar[str] + inputstring: Any # defined after call to setup_parse() + document: Any # defined after call to setup_parse() + def parse(self, inputstring: str, document: _document) -> None: ... + def setup_parse(self, inputstring: str, document: _document) -> None: ... + def finish_parse(self) -> None: ... + +_parser_aliases: dict[str, str] + +def get_parser_class(parser_name: str) -> type[Parser]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/null.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/null.pyi new file mode 100644 index 00000000..edc97732 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/null.pyi @@ -0,0 +1,6 @@ +from typing import ClassVar + +from docutils import parsers + +class Parser(parsers.Parser): + config_section_dependencies: ClassVar[tuple[str, ...]] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/recommonmark_wrapper.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/recommonmark_wrapper.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/recommonmark_wrapper.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/__init__.pyi new file mode 100644 index 00000000..04a24b3d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/__init__.pyi @@ -0,0 +1,35 @@ +from _typeshed import Incomplete +from typing import Any, ClassVar +from typing_extensions import Literal + +from docutils import parsers +from docutils.parsers.rst import states + +class Parser(parsers.Parser): + config_section_dependencies: ClassVar[tuple[str, ...]] + initial_state: Literal["Body", "RFC2822Body"] + state_classes: Any + inliner: Any + def __init__(self, rfc2822: bool = ..., inliner: Incomplete | None = ...) -> None: ... + +class DirectiveError(Exception): + level: Any + msg: str + def __init__(self, level: Any, message: str) -> None: ... + +class Directive: + def __init__( + self, + name: str, + arguments: list[Any], + options: dict[str, Any], + content: list[str], + lineno: int, + content_offset: int, + block_text: str, + state: states.RSTState, + state_machine: states.RSTStateMachine, + ) -> None: ... + def __getattr__(self, name: str) -> Incomplete: ... + +def convert_directive_function(directive_fn): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/__init__.pyi new file mode 100644 index 00000000..5ffbcf7a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/__init__.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete + +from docutils.languages import _LanguageModule +from docutils.nodes import document +from docutils.parsers.rst import Directive +from docutils.utils import SystemMessage + +def register_directive(name: str, directive: type[Directive]) -> None: ... +def directive( + directive_name: str, language_module: _LanguageModule, document: document +) -> tuple[type[Directive] | None, list[SystemMessage]]: ... +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/admonitions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/admonitions.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/admonitions.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/body.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/body.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/body.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/html.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/html.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/html.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/images.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/images.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/images.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/misc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/misc.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/misc.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/parts.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/parts.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/parts.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/references.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/references.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/references.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/tables.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/tables.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/directives/tables.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/roles.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/roles.pyi new file mode 100644 index 00000000..59580d68 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/roles.pyi @@ -0,0 +1,20 @@ +from _typeshed import Incomplete +from collections.abc import Callable +from typing import Any +from typing_extensions import TypeAlias + +import docutils.nodes +import docutils.parsers.rst.states +from docutils.languages import _LanguageModule +from docutils.utils import Reporter, SystemMessage + +_RoleFn: TypeAlias = Callable[ + [str, str, str, int, docutils.parsers.rst.states.Inliner, dict[str, Any], list[str]], + tuple[list[docutils.nodes.reference], list[docutils.nodes.reference]], +] + +def register_local_role(name: str, role_fn: _RoleFn) -> None: ... +def role( + role_name: str, language_module: _LanguageModule, lineno: int, reporter: Reporter +) -> tuple[_RoleFn | None, list[SystemMessage]]: ... +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/states.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/states.pyi new file mode 100644 index 00000000..1ef7be91 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/parsers/rst/states.pyi @@ -0,0 +1,6 @@ +from _typeshed import Incomplete + +class Inliner: + def __init__(self) -> None: ... + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/readers/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/readers/__init__.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/readers/__init__.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/readers/doctree.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/readers/doctree.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/readers/doctree.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/readers/pep.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/readers/pep.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/readers/pep.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/readers/standalone.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/readers/standalone.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/readers/standalone.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/statemachine.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/statemachine.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/statemachine.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/transforms/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/transforms/__init__.pyi new file mode 100644 index 00000000..1fdb00b4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/transforms/__init__.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete + +from docutils.nodes import Node, document + +class Transform: + def __init__(self, document: document, startnode: Node | None = ...): ... + def __getattr__(self, __name: str) -> Incomplete: ... + +class Transformer: + def __init__(self, document: document): ... + def add_transform(self, transform_class: type[Transform], priority: int | None = ..., **kwargs) -> None: ... + def __getattr__(self, __name: str) -> Incomplete: ... + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/utils/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/utils/__init__.pyi new file mode 100644 index 00000000..8c3733db --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/utils/__init__.pyi @@ -0,0 +1,39 @@ +import optparse +from _typeshed import Incomplete +from builtins import list as _list # alias to avoid name clashes with fields named list +from collections.abc import Iterable +from typing_extensions import Literal, TypeAlias + +from docutils import ApplicationError +from docutils.io import FileOutput +from docutils.nodes import document + +class DependencyList: + list: _list[str] + file: FileOutput | None + def __init__(self, output_file: str | None = ..., dependencies: Iterable[str] = ...) -> None: ... + def set_output(self, output_file: str | None) -> None: ... + def add(self, *filenames: str) -> None: ... + def close(self) -> None: ... + +_SystemMessageLevel: TypeAlias = Literal[0, 1, 2, 3, 4] + +class Reporter: + DEBUG_LEVEL: Literal[0] + INFO_LEVEL: Literal[1] + WARNING_LEVEL: Literal[2] + ERROR_LEVEL: Literal[3] + SEVERE_LEVEL: Literal[4] + + source: str + report_level: _SystemMessageLevel + halt_level: _SystemMessageLevel + def __getattr__(self, __name: str) -> Incomplete: ... + +class SystemMessage(ApplicationError): + level: _SystemMessageLevel + def __init__(self, system_message: object, level: _SystemMessageLevel): ... + +def new_reporter(source_path: str, settings: optparse.Values) -> Reporter: ... +def new_document(source_path: str, settings: optparse.Values | None = ...) -> document: ... +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/__init__.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/__init__.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/docutils_xml.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/docutils_xml.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/docutils_xml.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/html4css1.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/html4css1.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/html4css1.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/html5_polyglot.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/html5_polyglot.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/html5_polyglot.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/latex2e.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/latex2e.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/latex2e.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/manpage.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/manpage.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/manpage.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/null.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/null.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/null.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/odf_odt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/odf_odt.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/odf_odt.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/pep_html.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/pep_html.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/pep_html.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/pseudoxml.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/pseudoxml.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/pseudoxml.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/s5_html.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/s5_html.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/s5_html.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/xetex.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/xetex.pyi new file mode 100644 index 00000000..0f6820f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/docutils/docutils/writers/xetex.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/editdistance/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/editdistance/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..ef46eff1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/editdistance/@tests/stubtest_allowlist.txt @@ -0,0 +1,2 @@ +# Not public API -- the submodule is an implementation detail due to it being a cythonized package +editdistance.bycython diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/editdistance/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/editdistance/METADATA.toml new file mode 100644 index 00000000..03031f1e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/editdistance/METADATA.toml @@ -0,0 +1 @@ +version = "0.6.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/editdistance/editdistance/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/editdistance/editdistance/__init__.pyi new file mode 100644 index 00000000..36088497 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/editdistance/editdistance/__init__.pyi @@ -0,0 +1,4 @@ +from collections.abc import Hashable, Iterable + +def eval(a: Iterable[Hashable], b: Iterable[Hashable]) -> int: ... +def distance(a: Iterable[Hashable], b: Iterable[Hashable]) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/emoji/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/emoji/METADATA.toml new file mode 100644 index 00000000..b0a6e7c5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/emoji/METADATA.toml @@ -0,0 +1,2 @@ +version = "2.1.*" +obsolete_since = "2.2.0" # Released on 2022-10-31 diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/emoji/emoji/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/emoji/emoji/__init__.pyi new file mode 100644 index 00000000..d81dfe62 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/emoji/emoji/__init__.pyi @@ -0,0 +1,30 @@ +from .core import ( + demojize as demojize, + distinct_emoji_list as distinct_emoji_list, + emoji_count as emoji_count, + emoji_list as emoji_list, + emojize as emojize, + is_emoji as is_emoji, + replace_emoji as replace_emoji, + version as version, +) +from .unicode_codes import EMOJI_DATA, LANGUAGES, STATUS + +__all__ = [ + "emojize", + "demojize", + "emoji_count", + "emoji_list", + "distinct_emoji_list", + "replace_emoji", + "version", + "is_emoji", + "EMOJI_DATA", + "STATUS", + "LANGUAGES", +] +__version__: str +__author__: str +__email__: str +__source__: str +__license__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/emoji/emoji/core.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/emoji/emoji/core.pyi new file mode 100644 index 00000000..498586de --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/emoji/emoji/core.pyi @@ -0,0 +1,31 @@ +from collections.abc import Callable +from typing_extensions import Literal, TypedDict + +_DEFAULT_DELIMITER: str + +class _EmojiListReturn(TypedDict): + emoji: str + match_start: int + match_end: int + +def emojize( + string: str, + delimiters: tuple[str, str] = ..., + variant: Literal["text_type", "emoji_type", None] = ..., + language: str = ..., + version: float | None = ..., + handle_version: str | Callable[[str, dict[str, str]], str] | None = ..., +) -> str: ... +def demojize( + string: str, + delimiters: tuple[str, str] = ..., + language: str = ..., + version: float | None = ..., + handle_version: str | Callable[[str, dict[str, str]], str] | None = ..., +) -> str: ... +def replace_emoji(string: str, replace: str | Callable[[str, dict[str, str]], str] = ..., version: float = ...) -> str: ... +def emoji_list(string: str) -> list[_EmojiListReturn]: ... +def distinct_emoji_list(string: str) -> list[str]: ... +def emoji_count(string: str, unique: bool = ...) -> int: ... +def version(string: str) -> float: ... +def is_emoji(string: str) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/emoji/emoji/unicode_codes/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/emoji/emoji/unicode_codes/__init__.pyi new file mode 100644 index 00000000..9e698a6b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/emoji/emoji/unicode_codes/__init__.pyi @@ -0,0 +1,6 @@ +from .data_dict import * + +__all__ = ["get_emoji_unicode_dict", "get_aliases_unicode_dict", "EMOJI_DATA", "STATUS", "LANGUAGES"] + +def get_emoji_unicode_dict(lang: str) -> dict[str, str]: ... +def get_aliases_unicode_dict() -> dict[str, str]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/emoji/emoji/unicode_codes/data_dict.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/emoji/emoji/unicode_codes/data_dict.pyi new file mode 100644 index 00000000..ffbd89d0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/emoji/emoji/unicode_codes/data_dict.pyi @@ -0,0 +1,6 @@ +from _typeshed import Incomplete + +__all__ = ["EMOJI_DATA", "STATUS", "LANGUAGES"] +STATUS: dict[str, int] +LANGUAGES: list[str] +EMOJI_DATA: dict[str, dict[str, Incomplete]] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/entrypoints/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/entrypoints/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..7bfaa84f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/entrypoints/@tests/stubtest_allowlist.txt @@ -0,0 +1,2 @@ +# staticmethod weirdness: +entrypoints.CaseSensitiveConfigParser.optionxform diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/entrypoints/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/entrypoints/METADATA.toml new file mode 100644 index 00000000..582104d3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/entrypoints/METADATA.toml @@ -0,0 +1 @@ +version = "0.4.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/entrypoints/entrypoints.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/entrypoints/entrypoints.pyi new file mode 100644 index 00000000..08e89ec5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/entrypoints/entrypoints.pyi @@ -0,0 +1,53 @@ +import sys +from collections.abc import Iterator, Sequence +from configparser import ConfigParser +from typing import Any +from typing_extensions import Self + +if sys.version_info >= (3, 8): + from re import Pattern +else: + from re import Pattern + +entry_point_pattern: Pattern[str] +file_in_zip_pattern: Pattern[str] + +class BadEntryPoint(Exception): + epstr: str + def __init__(self, epstr: str) -> None: ... + @staticmethod + def err_to_warnings() -> Iterator[None]: ... + +class NoSuchEntryPoint(Exception): + group: str + name: str + def __init__(self, group: str, name: str) -> None: ... + +class CaseSensitiveConfigParser(ConfigParser): ... + +class EntryPoint: + name: str + module_name: str + object_name: str + extras: Sequence[str] | None + distro: Distribution | None + def __init__( + self, name: str, module_name: str, object_name: str, extras: Sequence[str] | None = ..., distro: Distribution | None = ... + ) -> None: ... + def load(self) -> Any: ... + @classmethod + def from_string(cls, epstr: str, name: str, distro: Distribution | None = ...) -> Self: ... + +class Distribution: + name: str + version: str + def __init__(self, name: str, version: str) -> None: ... + @classmethod + def from_name_version(cls, name: str) -> Self: ... + +def iter_files_distros( + path: Sequence[str] | None = ..., repeated_distro: str = ... +) -> Iterator[tuple[ConfigParser, Distribution | None]]: ... +def get_single(group: str, name: str, path: Sequence[str] | None = ...) -> EntryPoint: ... +def get_group_named(group: str, path: Sequence[str] | None = ...) -> dict[str, EntryPoint]: ... +def get_group_all(group: str, path: Sequence[str] | None = ...) -> list[EntryPoint]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/first/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/first/METADATA.toml new file mode 100644 index 00000000..58bc3834 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/first/METADATA.toml @@ -0,0 +1 @@ +version = "2.0.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/first/first.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/first/first.pyi new file mode 100644 index 00000000..03997de5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/first/first.pyi @@ -0,0 +1,17 @@ +from collections.abc import Callable, Iterable +from typing import Any, TypeVar, overload + +_T = TypeVar("_T") +_S = TypeVar("_S") + +__license__: str +__title__: str + +@overload +def first(iterable: Iterable[_T]) -> _T | None: ... +@overload +def first(iterable: Iterable[_T], default: _S) -> _T | _S: ... +@overload +def first(iterable: Iterable[_T], default: _S, key: Callable[[_T], Any] | None) -> _T | _S: ... +@overload +def first(iterable: Iterable[_T], *, key: Callable[[_T], Any] | None) -> _T | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-2020/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-2020/METADATA.toml new file mode 100644 index 00000000..ef796600 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-2020/METADATA.toml @@ -0,0 +1 @@ +version = "1.7.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-2020/flake8_2020.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-2020/flake8_2020.pyi new file mode 100644 index 00000000..7af7c26b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-2020/flake8_2020.pyi @@ -0,0 +1,26 @@ +# flake8-2020 has type annotations, but PEP 561 states: +# This PEP does not support distributing typing information as part of module-only distributions or single-file modules within namespace packages. +# Therefore typeshed is the best place. + +import ast +from collections.abc import Generator +from typing import Any, ClassVar + +YTT101: str +YTT102: str +YTT103: str +YTT201: str +YTT202: str +YTT203: str +YTT204: str +YTT301: str +YTT302: str +YTT303: str + +class Visitor(ast.NodeVisitor): ... + +class Plugin: + name: ClassVar[str] + version: ClassVar[str] + def __init__(self, tree: ast.AST) -> None: ... + def run(self) -> Generator[tuple[int, int, str, type[Any]], None, None]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-bugbear/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-bugbear/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..82f06909 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-bugbear/@tests/stubtest_allowlist.txt @@ -0,0 +1 @@ +bugbear.BugBearChecker.__getattr__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-bugbear/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-bugbear/METADATA.toml new file mode 100644 index 00000000..78fea08a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-bugbear/METADATA.toml @@ -0,0 +1,4 @@ +version = "23.2.13" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-bugbear/bugbear.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-bugbear/bugbear.pyi new file mode 100644 index 00000000..d0b61fa8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-bugbear/bugbear.pyi @@ -0,0 +1,29 @@ +import argparse +import ast +from _typeshed import Incomplete +from collections.abc import Sequence +from typing import Any + +class BugBearChecker: + name: str + version: str + tree: ast.AST | None + filename: str + lines: Sequence[str] | None + max_line_length: int + visitor: ast.NodeVisitor + options: argparse.Namespace | None + def run(self) -> None: ... + @staticmethod + def add_options(optmanager: Any) -> None: ... + def __init__( + self, + tree: ast.AST | None = ..., + filename: str = ..., + lines: Sequence[str] | None = ..., + max_line_length: int = ..., + options: argparse.Namespace | None = ..., + ) -> None: ... + def __getattr__(self, name: str) -> Incomplete: ... # incomplete (other attributes are normally not accessed) + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-builtins/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-builtins/METADATA.toml new file mode 100644 index 00000000..db11b9ea --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-builtins/METADATA.toml @@ -0,0 +1,4 @@ +version = "2.1.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-builtins/flake8_builtins.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-builtins/flake8_builtins.pyi new file mode 100644 index 00000000..c6aa608c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-builtins/flake8_builtins.pyi @@ -0,0 +1,12 @@ +import ast +from _typeshed import Incomplete +from collections.abc import Generator +from typing import Any, ClassVar + +class BuiltinsChecker: + name: ClassVar[str] + version: ClassVar[str] + def __init__(self, tree: ast.AST, filename: str) -> None: ... + def run(self) -> Generator[tuple[int, int, str, type[Any]], None, None]: ... + +def __getattr__(name: str) -> Incomplete: ... # incomplete (other attributes are normally not accessed) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-docstrings/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-docstrings/METADATA.toml new file mode 100644 index 00000000..bf44fdd1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-docstrings/METADATA.toml @@ -0,0 +1,4 @@ +version = "1.7.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-docstrings/flake8_docstrings.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-docstrings/flake8_docstrings.pyi new file mode 100644 index 00000000..49d96ed6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-docstrings/flake8_docstrings.pyi @@ -0,0 +1,21 @@ +import argparse +import ast +from _typeshed import Incomplete +from collections.abc import Generator, Iterable +from typing import Any, ClassVar + +class pep257Checker: + name: ClassVar[str] + version: ClassVar[str] + tree: ast.AST + filename: str + checker: Any + source: str + def __init__(self, tree: ast.AST, filename: str, lines: Iterable[str]) -> None: ... + @classmethod + def add_options(cls, parser: Any) -> None: ... + @classmethod + def parse_options(cls, options: argparse.Namespace) -> None: ... + def run(self) -> Generator[tuple[int, int, str, type[Any]], None, None]: ... + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..c458a541 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/@tests/stubtest_allowlist.txt @@ -0,0 +1,6 @@ +flake8_plugin_utils +flake8_plugin_utils.plugin +flake8_plugin_utils.utils +flake8_plugin_utils.utils.assertions +flake8_plugin_utils.utils.constants +flake8_plugin_utils.utils.equiv_nodes diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/METADATA.toml new file mode 100644 index 00000000..9a01e52f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/METADATA.toml @@ -0,0 +1,4 @@ +version = "1.3.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/flake8_plugin_utils/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/flake8_plugin_utils/__init__.pyi new file mode 100644 index 00000000..0c3f4bce --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/flake8_plugin_utils/__init__.pyi @@ -0,0 +1,9 @@ +from .plugin import Error as Error, Plugin as Plugin, Visitor as Visitor +from .utils import ( + assert_error as assert_error, + assert_not_error as assert_not_error, + check_equivalent_nodes as check_equivalent_nodes, + is_false as is_false, + is_none as is_none, + is_true as is_true, +) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/flake8_plugin_utils/plugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/flake8_plugin_utils/plugin.pyi new file mode 100644 index 00000000..6949ec27 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/flake8_plugin_utils/plugin.pyi @@ -0,0 +1,41 @@ +import argparse +import ast +from collections.abc import Iterable, Iterator +from typing import Any, Generic, TypeVar, overload +from typing_extensions import TypeAlias + +FLAKE8_ERROR: TypeAlias = tuple[int, int, str, type[Any]] +TConfig = TypeVar("TConfig") # noqa: Y001 # Name of the TypeVar matches the name at runtime + +class Error: + code: str + message: str + lineno: int + col_offset: int + def __init__(self, lineno: int, col_offset: int, **kwargs: Any) -> None: ... + @classmethod + def formatted_message(cls, **kwargs: Any) -> str: ... + +class Visitor(ast.NodeVisitor, Generic[TConfig]): + errors: list[Error] + @overload + def __init__(self, config: None = ...) -> None: ... + @overload + def __init__(self, config: TConfig) -> None: ... + @property + def config(self) -> TConfig: ... + def error_from_node(self, error: type[Error], node: ast.AST, **kwargs: Any) -> None: ... + +class Plugin(Generic[TConfig]): + name: str + version: str + visitors: list[type[Visitor[TConfig]]] + config: TConfig + def __init__(self, tree: ast.AST) -> None: ... + def run(self) -> Iterable[FLAKE8_ERROR]: ... + @classmethod + def parse_options(cls, option_manager: Any, options: argparse.Namespace, args: list[str]) -> None: ... + @classmethod + def parse_options_to_config(cls, option_manager: Any, options: argparse.Namespace, args: list[str]) -> TConfig | None: ... + @classmethod + def test_config(cls, config: TConfig) -> Iterator[None]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/__init__.pyi new file mode 100644 index 00000000..095dbe4c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/__init__.pyi @@ -0,0 +1,3 @@ +from .assertions import assert_error as assert_error, assert_not_error as assert_not_error +from .constants import is_false as is_false, is_none as is_none, is_true as is_true +from .equiv_nodes import check_equivalent_nodes as check_equivalent_nodes diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/assertions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/assertions.pyi new file mode 100644 index 00000000..d030a527 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/assertions.pyi @@ -0,0 +1,8 @@ +from typing import Any + +from ..plugin import Error as Error, TConfig as TConfig, Visitor as Visitor + +def assert_error( + visitor_cls: type[Visitor[TConfig]], src: str, expected: type[Error], config: TConfig | None = ..., **kwargs: Any +) -> None: ... +def assert_not_error(visitor_cls: type[Visitor[TConfig]], src: str, config: TConfig | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/constants.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/constants.pyi new file mode 100644 index 00000000..8503693c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/constants.pyi @@ -0,0 +1,5 @@ +import ast + +def is_none(node: ast.AST) -> bool: ... +def is_false(node: ast.AST) -> bool: ... +def is_true(node: ast.AST) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/equiv_nodes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/equiv_nodes.pyi new file mode 100644 index 00000000..641945b1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/equiv_nodes.pyi @@ -0,0 +1,3 @@ +import ast + +def check_equivalent_nodes(node1: ast.AST, node2: ast.AST) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-rst-docstrings/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-rst-docstrings/METADATA.toml new file mode 100644 index 00000000..c8abb998 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-rst-docstrings/METADATA.toml @@ -0,0 +1,4 @@ +version = "0.3.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-rst-docstrings/flake8_rst_docstrings.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-rst-docstrings/flake8_rst_docstrings.pyi new file mode 100644 index 00000000..1ccdb3d6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-rst-docstrings/flake8_rst_docstrings.pyi @@ -0,0 +1,34 @@ +import ast +from argparse import Namespace +from collections.abc import Container, Generator +from typing import Any + +rst_prefix: str +rst_fail_load: int +rst_fail_lint: int +code_mapping_info: dict[str, int] +code_mapping_warning: dict[str, int] +code_mapping_error: dict[str, int] +code_mapping_severe: dict[str, int] +code_mappings_by_level: dict[int, dict[str, int]] + +def code_mapping( + level: int, + msg: str, + extra_directives: Container[str], + extra_roles: Container[str], + extra_substitutions: Container[str], + default: int = ..., +) -> int: ... + +class reStructuredTextChecker: + name: str + version: str + tree: ast.AST + filename: str + def __init__(self, tree: ast.AST, filename: str = ...) -> None: ... + @classmethod + def add_options(cls, parser: Any) -> None: ... + @classmethod + def parse_options(cls, options: Namespace) -> None: ... + def run(self) -> Generator[tuple[int, int, str, type[reStructuredTextChecker]], None, None]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-simplify/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-simplify/METADATA.toml new file mode 100644 index 00000000..1647ad35 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-simplify/METADATA.toml @@ -0,0 +1,4 @@ +version = "0.19.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-simplify/flake8_simplify/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-simplify/flake8_simplify/__init__.pyi new file mode 100644 index 00000000..c1831fa3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-simplify/flake8_simplify/__init__.pyi @@ -0,0 +1,9 @@ +import ast +from collections.abc import Generator +from typing import Any, ClassVar + +class Plugin: + name: ClassVar[str] + version: ClassVar[str] + def __init__(self, tree: ast.AST) -> None: ... + def run(self) -> Generator[tuple[int, int, str, type[Any]], None, None]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-typing-imports/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-typing-imports/METADATA.toml new file mode 100644 index 00000000..93e1f8e0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-typing-imports/METADATA.toml @@ -0,0 +1,4 @@ +version = "1.14.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-typing-imports/flake8_typing_imports.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-typing-imports/flake8_typing_imports.pyi new file mode 100644 index 00000000..93212549 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/flake8-typing-imports/flake8_typing_imports.pyi @@ -0,0 +1,17 @@ +import argparse +import ast +from _typeshed import Incomplete +from collections.abc import Generator +from typing import Any, ClassVar + +class Plugin: + name: ClassVar[str] + version: ClassVar[str] + @staticmethod + def add_options(option_manager: Any) -> None: ... + @classmethod + def parse_options(cls, options: argparse.Namespace) -> None: ... + def __init__(self, tree: ast.AST) -> None: ... + def run(self) -> Generator[tuple[int, int, str, type[Any]], None, None]: ... + +def __getattr__(name: str) -> Incomplete: ... # incomplete (other attributes are normally not accessed) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..1c5d0a0c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/@tests/stubtest_allowlist.txt @@ -0,0 +1,15 @@ +# The "dest" argument is unused and will be removed. +fpdf.FPDF.output +fpdf.fpdf.FPDF.output + +# Argument has default at runtime, but using it raises a TypeError. +fpdf.FPDF.set_creation_date +fpdf.fpdf.FPDF.set_creation_date + +# fonttools shims since we can't import it +fpdf._fonttools_shims + +# Checking the following function crashes stubtest 0.991, but seems to be +# fixed in later versions. +fpdf.FPDF.set_encryption +fpdf.fpdf.FPDF.set_encryption diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/METADATA.toml new file mode 100644 index 00000000..c83645eb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/METADATA.toml @@ -0,0 +1,5 @@ +version = "2.6.1" +requires = ["types-Pillow>=9.2.0"] + +[tool.stubtest] +stubtest_requirements = ["cryptography"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/__init__.pyi new file mode 100644 index 00000000..250fb72a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/__init__.pyi @@ -0,0 +1,29 @@ +from pathlib import Path + +from .enums import Align as Align, XPos as XPos, YPos as YPos +from .fpdf import FPDF as FPDF, TitleStyle as TitleStyle +from .html import HTML2FPDF as HTML2FPDF, HTMLMixin as HTMLMixin +from .prefs import ViewerPreferences as ViewerPreferences +from .template import FlexTemplate as FlexTemplate, Template as Template + +__license__: str +__version__: str +FPDF_VERSION: str +FPDF_FONT_DIR: Path + +__all__ = [ + "__version__", + "__license__", + "FPDF", + "Align", + "XPos", + "YPos", + "Template", + "FlexTemplate", + "TitleStyle", + "ViewerPreferences", + "HTMLMixin", + "HTML2FPDF", + "FPDF_VERSION", + "FPDF_FONT_DIR", +] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/_fonttools_shims.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/_fonttools_shims.pyi new file mode 100644 index 00000000..95c6c6d3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/_fonttools_shims.pyi @@ -0,0 +1,52 @@ +# from fontTools.misc.loggingTools +from abc import ABCMeta, abstractmethod +from collections.abc import Mapping +from logging import Logger +from typing import Protocol +from typing_extensions import TypeAlias + +# from fonttools.ttLib.ttGlyphSet +class _TTGlyph(Protocol): + def __init__(self, glyphSet: _TTGlyphSet, glyphName: str) -> None: ... + def draw(self, pen) -> None: ... + def drawPoints(self, pen) -> None: ... + +_TTGlyphSet: TypeAlias = Mapping[str, _TTGlyph] # Simplified for our needs + +# from fontTools.misc.loggingTools + +class LogMixin: + @property + def log(self) -> Logger: ... + +# from fontTools.pens.basePen +class AbstractPen: + @abstractmethod + def moveTo(self, pt: tuple[float, float]) -> None: ... + @abstractmethod + def lineTo(self, pt: tuple[float, float]) -> None: ... + @abstractmethod + def curveTo(self, *points: tuple[float, float]) -> None: ... + @abstractmethod + def qCurveTo(self, *points: tuple[float, float]) -> None: ... + def closePath(self) -> None: ... + def endPath(self) -> None: ... + @abstractmethod + def addComponent(self, glyphName: str, transformation: tuple[float, float, float, float, float, float]) -> None: ... + +class LoggingPen(LogMixin, AbstractPen, metaclass=ABCMeta): ... + +class DecomposingPen(LoggingPen, metaclass=ABCMeta): + skipMissingComponents: bool + glyphSet: _TTGlyphSet | None + def __init__(self, glyphSet: _TTGlyphSet | None) -> None: ... + def addComponent(self, glyphName: str, transformation: tuple[float, float, float, float, float, float]) -> None: ... + +class BasePen(DecomposingPen): + def __init__(self, glyphSet: _TTGlyphSet | None = ...) -> None: ... + def closePath(self) -> None: ... + def endPath(self) -> None: ... + def moveTo(self, pt: tuple[float, float]) -> None: ... + def lineTo(self, pt: tuple[float, float]) -> None: ... + def curveTo(self, *points: tuple[float, float]) -> None: ... + def qCurveTo(self, *points: tuple[float, float]) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/actions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/actions.pyi new file mode 100644 index 00000000..128155fa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/actions.pyi @@ -0,0 +1,36 @@ +from _typeshed import Incomplete +from abc import ABC, abstractmethod + +from .syntax import PDFObject + +class Action(ABC): + next: PDFObject | str | None + def __init__(self, next_action: PDFObject | str | None = ...) -> None: ... + @abstractmethod + def serialize(self) -> str: ... + +class URIAction(Action): + uri: str + def __init__(self, uri: str, next_action: PDFObject | str | None = ...) -> None: ... + def serialize(self) -> str: ... + +class NamedAction(Action): + action_name: str + def __init__(self, action_name: str, next_action: PDFObject | str | None = ...) -> None: ... + def serialize(self) -> str: ... + +class GoToAction(Action): + dest: Incomplete + def __init__(self, dest, next_action: PDFObject | str | None = ...) -> None: ... + def serialize(self) -> str: ... + +class GoToRemoteAction(Action): + file: str + dest: Incomplete + def __init__(self, file: str, dest, next_action: PDFObject | str | None = ...) -> None: ... + def serialize(self) -> str: ... + +class LaunchAction(Action): + file: str + def __init__(self, file: str, next_action: PDFObject | str | None = ...) -> None: ... + def serialize(self) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/annotations.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/annotations.pyi new file mode 100644 index 00000000..acf715b1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/annotations.pyi @@ -0,0 +1,80 @@ +from _typeshed import Incomplete +from datetime import datetime +from typing import NamedTuple + +from .actions import Action +from .enums import AnnotationFlag, AnnotationName, FileAttachmentAnnotationName +from .syntax import Destination, Name, PDFContentStream, PDFObject + +DEFAULT_ANNOT_FLAGS: Incomplete + +class AnnotationMixin: + type: Name + subtype: Name + rect: str + border: str + f_t: Name | None + v: Incomplete | None + f: int # AnnotationFlags + contents: str | None + a: Action | None + dest: Destination | None + c: str | None + t: str | None + m: str | None + quad_points: str | None + p: Incomplete | None + name: AnnotationName | FileAttachmentAnnotationName | None + ink_list: str | None + f_s: str | None + def __init__( + self, + subtype: str, + x: int, + y: int, + width: int, + height: int, + flags: tuple[AnnotationFlag, ...] = ..., + contents: str | None = ..., + dest: Destination | None = ..., + action: Action | None = ..., + color: tuple[int, int, int] | None = ..., + modification_time: datetime | None = ..., + title: str | None = ..., + quad_points: tuple[float, ...] | None = ..., # multiple of 8 floats + border_width: int = ..., + name: AnnotationName | FileAttachmentAnnotationName | None = ..., + ink_list: tuple[int, ...] = ..., + file_spec: str | None = ..., + field_type: str | None = ..., + value: Incomplete | None = ..., + ) -> None: ... + +class PDFAnnotation(AnnotationMixin, PDFObject): ... + +class AnnotationDict(AnnotationMixin): + def serialize(self) -> str: ... + +class PDFEmbeddedFile(PDFContentStream): + type: Name + params: str + def __init__( + self, + basename: str, + contents: bytes, + desc: str = ..., + creation_date: datetime | None = ..., + modification_date: datetime | None = ..., + compress: bool = ..., + checksum: bool = ..., + ) -> None: ... + def globally_enclosed(self) -> bool: ... + def set_globally_enclosed(self, value: bool) -> None: ... + def basename(self) -> str: ... + def file_spec(self) -> FileSpec: ... + +class FileSpec(NamedTuple): + embedded_file: PDFEmbeddedFile + basename: str + desc: str + def serialize(self) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/deprecation.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/deprecation.pyi new file mode 100644 index 00000000..91885bd3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/deprecation.pyi @@ -0,0 +1,5 @@ +from types import ModuleType + +class WarnOnDeprecatedModuleAttributes(ModuleType): + def __getattr__(self, name: str): ... + def __setattr__(self, name: str, value) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/drawing.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/drawing.pyi new file mode 100644 index 00000000..24a1b543 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/drawing.pyi @@ -0,0 +1,417 @@ +import decimal +from _typeshed import Incomplete +from collections import OrderedDict +from collections.abc import Callable, Generator, Iterator +from contextlib import contextmanager +from re import Pattern +from typing import Any, ClassVar, NamedTuple, TypeVar +from typing_extensions import Self, TypeAlias + +from .syntax import Name, Raw + +__pdoc__: dict[str, bool] + +_CallableT = TypeVar("_CallableT", bound=Callable[..., Any]) + +def force_nodocument(item: _CallableT) -> _CallableT: ... +def force_document(item: _CallableT) -> _CallableT: ... + +Number: TypeAlias = int | float | decimal.Decimal +NumberClass: tuple[type, ...] +WHITESPACE: frozenset[str] +EOL_CHARS: frozenset[str] +DELIMITERS: frozenset[str] +STR_ESC: Pattern[str] +STR_ESC_MAP: dict[str, str] + +class GraphicsStateDictRegistry(OrderedDict[Raw, Name]): + def register_style(self, style: GraphicsStyle) -> Name | None: ... + +def number_to_str(number) -> str: ... +def render_pdf_primitive(primitive) -> Raw: ... + +class _DeviceRGBBase(NamedTuple): + r: Number + g: Number + b: Number + a: Number | None + +class DeviceRGB(_DeviceRGBBase): + OPERATOR: ClassVar[str] + def __new__(cls, r: Number, g: Number, b: Number, a: Number | None = ...) -> Self: ... + @property + def colors(self) -> tuple[Number, Number, Number]: ... + def serialize(self) -> str: ... + +class _DeviceGrayBase(NamedTuple): + g: Number + a: Number | None + +class DeviceGray(_DeviceGrayBase): + OPERATOR: ClassVar[str] + def __new__(cls, g: Number, a: Number | None = ...) -> Self: ... + @property + def colors(self) -> tuple[Number]: ... + def serialize(self) -> str: ... + +class _DeviceCMYKBase(NamedTuple): + c: Number + m: Number + y: Number + k: Number + a: Number | None + +class DeviceCMYK(_DeviceCMYKBase): + OPERATOR: ClassVar[str] + def __new__(cls, c: Number, m: Number, y: Number, k: Number, a: Number | None = ...) -> Self: ... + @property + def colors(self) -> tuple[Number, Number, Number, Number]: ... + def serialize(self) -> str: ... + +def rgb8(r, g, b, a: Incomplete | None = ...) -> DeviceRGB: ... +def gray8(g, a: Incomplete | None = ...) -> DeviceGray: ... +def cmyk8(c, m, y, k, a: Incomplete | None = ...) -> DeviceCMYK: ... +def color_from_hex_string(hexstr) -> DeviceRGB: ... +def color_from_rgb_string(rgbstr) -> DeviceRGB: ... + +class Point(NamedTuple): + x: Number + y: Number + def render(self): ... + def dot(self, other): ... + def angle(self, other): ... + def mag(self): ... + def __add__(self, other): ... + def __sub__(self, other): ... + def __neg__(self): ... + def __mul__(self, other): ... + def __rmul__(self, other): ... + def __truediv__(self, other): ... + def __floordiv__(self, other): ... + def __matmul__(self, other): ... + +class Transform(NamedTuple): + a: Number + b: Number + c: Number + d: Number + e: Number + f: Number + @classmethod + def identity(cls): ... + @classmethod + def translation(cls, x, y): ... + @classmethod + def scaling(cls, x, y: Incomplete | None = ...): ... + @classmethod + def rotation(cls, theta): ... + @classmethod + def rotation_d(cls, theta_d): ... + @classmethod + def shearing(cls, x, y: Incomplete | None = ...): ... + def translate(self, x, y): ... + def scale(self, x, y: Incomplete | None = ...): ... + def rotate(self, theta): ... + def rotate_d(self, theta_d): ... + def shear(self, x, y: Incomplete | None = ...): ... + def about(self, x, y): ... + def __mul__(self, other): ... + def __rmul__(self, other): ... + def __matmul__(self, other): ... + def render(self, last_item): ... + +class GraphicsStyle: + INHERIT: ClassVar[Incomplete] + MERGE_PROPERTIES: ClassVar[tuple[str, ...]] + TRANSPARENCY_KEYS: ClassVar[tuple[Name, ...]] + PDF_STYLE_KEYS: ClassVar[tuple[Name, ...]] + @classmethod + def merge(cls, parent, child): ... + def __init__(self) -> None: ... + def __deepcopy__(self, memo) -> Self: ... + @property + def allow_transparency(self): ... + @allow_transparency.setter + def allow_transparency(self, new): ... + @property + def paint_rule(self): ... + @paint_rule.setter + def paint_rule(self, new) -> None: ... + @property + def auto_close(self): ... + @auto_close.setter + def auto_close(self, new) -> None: ... + @property + def intersection_rule(self): ... + @intersection_rule.setter + def intersection_rule(self, new) -> None: ... + @property + def fill_color(self): ... + @fill_color.setter + def fill_color(self, color) -> None: ... + @property + def fill_opacity(self): ... + @fill_opacity.setter + def fill_opacity(self, new) -> None: ... + @property + def stroke_color(self): ... + @stroke_color.setter + def stroke_color(self, color) -> None: ... + @property + def stroke_opacity(self): ... + @stroke_opacity.setter + def stroke_opacity(self, new) -> None: ... + @property + def blend_mode(self): ... + @blend_mode.setter + def blend_mode(self, value) -> None: ... + @property + def stroke_width(self): ... + @stroke_width.setter + def stroke_width(self, width) -> None: ... + @property + def stroke_cap_style(self): ... + @stroke_cap_style.setter + def stroke_cap_style(self, value) -> None: ... + @property + def stroke_join_style(self): ... + @stroke_join_style.setter + def stroke_join_style(self, value) -> None: ... + @property + def stroke_miter_limit(self): ... + @stroke_miter_limit.setter + def stroke_miter_limit(self, value) -> None: ... + @property + def stroke_dash_pattern(self): ... + @stroke_dash_pattern.setter + def stroke_dash_pattern(self, value) -> None: ... + @property + def stroke_dash_phase(self): ... + @stroke_dash_phase.setter + def stroke_dash_phase(self, value): ... + def serialize(self) -> Raw | None: ... + def resolve_paint_rule(self): ... + +class Move(NamedTuple): + pt: Point + @property + def end_point(self): ... + def render(self, gsd_registry, style, last_item, initial_point): ... + def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + +class RelativeMove(NamedTuple): + pt: Point + def render(self, gsd_registry, style, last_item, initial_point): ... + def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + +class Line(NamedTuple): + pt: Point + @property + def end_point(self): ... + def render(self, gsd_registry, style, last_item, initial_point): ... + def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + +class RelativeLine(NamedTuple): + pt: Point + def render(self, gsd_registry, style, last_item, initial_point): ... + def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + +class HorizontalLine(NamedTuple): + x: Number + def render(self, gsd_registry, style, last_item, initial_point): ... + def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + +class RelativeHorizontalLine(NamedTuple): + x: Number + def render(self, gsd_registry, style, last_item, initial_point): ... + def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + +class VerticalLine(NamedTuple): + y: Number + def render(self, gsd_registry, style, last_item, initial_point): ... + def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + +class RelativeVerticalLine(NamedTuple): + y: Number + def render(self, gsd_registry, style, last_item, initial_point): ... + def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + +class BezierCurve(NamedTuple): + c1: Point + c2: Point + end: Point + @property + def end_point(self): ... + def render(self, gsd_registry, style, last_item, initial_point): ... + def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + +class RelativeBezierCurve(NamedTuple): + c1: Point + c2: Point + end: Point + def render(self, gsd_registry, style, last_item, initial_point): ... + def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + +class QuadraticBezierCurve(NamedTuple): + ctrl: Point + end: Point + @property + def end_point(self): ... + def to_cubic_curve(self, start_point): ... + def render(self, gsd_registry, style, last_item, initial_point): ... + def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + +class RelativeQuadraticBezierCurve(NamedTuple): + ctrl: Point + end: Point + def render(self, gsd_registry, style, last_item, initial_point): ... + def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + +class Arc(NamedTuple): + radii: Point + rotation: Number + large: bool + sweep: bool + end: Point + @staticmethod + def subdivde_sweep(sweep_angle) -> Generator[Incomplete, None, None]: ... + def render(self, gsd_registry, style, last_item, initial_point): ... + def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + +class RelativeArc(NamedTuple): + radii: Point + rotation: Number + large: bool + sweep: bool + end: Point + def render(self, gsd_registry, style, last_item, initial_point): ... + def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + +class Rectangle(NamedTuple): + org: Point + size: Point + def render(self, gsd_registry, style, last_item, initial_point): ... + def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + +class RoundedRectangle(NamedTuple): + org: Point + size: Point + corner_radii: Point + def render(self, gsd_registry, style, last_item, initial_point): ... + def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + +class Ellipse(NamedTuple): + radii: Point + center: Point + def render(self, gsd_registry, style, last_item, initial_point): ... + def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + +class ImplicitClose(NamedTuple): + def render(self, gsd_registry, style, last_item, initial_point): ... + def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + +class Close(NamedTuple): + def render(self, gsd_registry, style, last_item, initial_point): ... + def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + +class DrawingContext: + def __init__(self) -> None: ... + def add_item(self, item, _copy: bool = ...) -> None: ... + def render(self, gsd_registry, first_point, scale, height, starting_style): ... + def render_debug(self, gsd_registry, first_point, scale, height, starting_style, debug_stream): ... + +class PaintedPath: + def __init__(self, x: int = ..., y: int = ...) -> None: ... + def __deepcopy__(self, memo) -> Self: ... + @property + def style(self): ... + @property + def transform(self): ... + @transform.setter + def transform(self, tf) -> None: ... + @property + def auto_close(self): ... + @auto_close.setter + def auto_close(self, should) -> None: ... + @property + def paint_rule(self): ... + @paint_rule.setter + def paint_rule(self, style) -> None: ... + @property + def clipping_path(self): ... + @clipping_path.setter + def clipping_path(self, new_clipath) -> None: ... + @contextmanager + def transform_group(self, transform) -> Iterator[Self]: ... + def add_path_element(self, item, _copy: bool = ...) -> None: ... + def remove_last_path_element(self) -> None: ... + def rectangle(self, x, y, w, h, rx: int = ..., ry: int = ...) -> Self: ... + def circle(self, cx, cy, r) -> Self: ... + def ellipse(self, cx, cy, rx, ry) -> Self: ... + def move_to(self, x, y) -> Self: ... + def move_relative(self, x, y) -> Self: ... + def line_to(self, x, y) -> Self: ... + def line_relative(self, dx, dy) -> Self: ... + def horizontal_line_to(self, x) -> Self: ... + def horizontal_line_relative(self, dx) -> Self: ... + def vertical_line_to(self, y) -> Self: ... + def vertical_line_relative(self, dy) -> Self: ... + def curve_to(self, x1, y1, x2, y2, x3, y3) -> Self: ... + def curve_relative(self, dx1, dy1, dx2, dy2, dx3, dy3) -> Self: ... + def quadratic_curve_to(self, x1, y1, x2, y2) -> Self: ... + def quadratic_curve_relative(self, dx1, dy1, dx2, dy2) -> Self: ... + def arc_to(self, rx, ry, rotation, large_arc, positive_sweep, x, y) -> Self: ... + def arc_relative(self, rx, ry, rotation, large_arc, positive_sweep, dx, dy) -> Self: ... + def close(self) -> None: ... + def render( + self, gsd_registry, style, last_item, initial_point, debug_stream: Incomplete | None = ..., pfx: Incomplete | None = ... + ): ... + def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + +class ClippingPath(PaintedPath): + paint_rule: Incomplete + def __init__(self, x: int = ..., y: int = ...) -> None: ... + def render( + self, gsd_registry, style, last_item, initial_point, debug_stream: Incomplete | None = ..., pfx: Incomplete | None = ... + ): ... + def render_debug(self, gsd_registry, style, last_item, initial_point, debug_stream, pfx): ... + +class GraphicsContext: + style: GraphicsStyle + path_items: list[Incomplete] + def __init__(self) -> None: ... + def __deepcopy__(self, memo) -> Self: ... + @property + def transform(self) -> Transform | None: ... + @transform.setter + def transform(self, tf) -> None: ... + @property + def clipping_path(self) -> ClippingPath | None: ... + @clipping_path.setter + def clipping_path(self, new_clipath) -> None: ... + def add_item(self, item, _copy: bool = ...) -> None: ... + def remove_last_item(self) -> None: ... + def merge(self, other_context) -> None: ... + def build_render_list( + self, + gsd_registry, + style, + last_item, + initial_point, + debug_stream: Incomplete | None = ..., + pfx: Incomplete | None = ..., + _push_stack: bool = ..., + ): ... + def render( + self, + gsd_registry, + style: DrawingContext, + last_item, + initial_point, + debug_stream: Incomplete | None = ..., + pfx: Incomplete | None = ..., + _push_stack: bool = ..., + ): ... + def render_debug( + self, gsd_registry, style: DrawingContext, last_item, initial_point, debug_stream, pfx, _push_stack: bool = ... + ): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/encryption.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/encryption.pyi new file mode 100644 index 00000000..1888d6f3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/encryption.pyi @@ -0,0 +1,89 @@ +from _typeshed import Incomplete, SupportsLenAndGetItem +from collections.abc import Generator, Iterable +from typing import ClassVar, Protocol, TypeVar +from typing_extensions import TypeAlias + +from .enums import EncryptionMethod +from .fpdf import FPDF +from .syntax import Name, PDFObject + +_Key: TypeAlias = SupportsLenAndGetItem[int] +_T_co = TypeVar("_T_co", covariant=True) + +import_error: ImportError | None + +class _SupportsGetItem(Protocol[_T_co]): + def __getitem__(self, __k: int) -> _T_co: ... + +class ARC4: + MOD: ClassVar[int] + def KSA(self, key: _Key) -> list[int]: ... + def PRGA(self, S: _SupportsGetItem[int]) -> Generator[int, None, None]: ... + def encrypt(self, key: _Key, text: Iterable[int]) -> list[int]: ... + +class CryptFilter: + type: Name + c_f_m: Name + length: int + def __init__(self, mode, length) -> None: ... + def serialize(self) -> str: ... + +class EncryptionDictionary(PDFObject): + filter: Name + length: int + r: int + o: str + u: str + v: int + p: int + encrypt_metadata: str # not always defined + c_f: str # not always defined + stm_f: Name + str_f: Name + def __init__(self, security_handler: StandardSecurityHandler) -> None: ... + +class StandardSecurityHandler: + DEFAULT_PADDING: ClassVar[bytes] + fpdf: FPDF + access_permission: int + owner_password: str + user_password: str + encryption_method: EncryptionMethod | None + cf: CryptFilter | None + key_length: int + v: int + r: int + encrypt_metadata: bool + + # The following fields are only defined after a call to generate_passwords(). + file_id: Incomplete + info_id: Incomplete + o: str + k: str + u: str + + def __init__( + self, + fpdf: FPDF, + owner_password: str, + user_password: str | None = None, + permission: Incomplete | None = None, + encryption_method: EncryptionMethod | None = None, + encrypt_metadata: bool = False, + ) -> None: ... + def generate_passwords(self, file_id) -> None: ... + def get_encryption_obj(self) -> EncryptionDictionary: ... + def encrypt(self, text: str | bytes | bytearray, obj_id) -> bytes: ... + def encrypt_string(self, string, obj_id): ... + def encrypt_stream(self, stream, obj_id): ... + def is_aes_algorithm(self) -> bool: ... + def encrypt_bytes(self, data, obj_id) -> list[int]: ... + def encrypt_AES_cryptography(self, key, data): ... + def get_initialization_vector(self, size: int) -> bytearray: ... + def padded_password(self, password: str) -> bytearray: ... + def generate_owner_password(self) -> str: ... + def generate_user_password(self) -> str: ... + def generate_encryption_key(self) -> bytes: ... + +def md5(data: bytes) -> bytes: ... +def int32(n: int) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/enums.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/enums.pyi new file mode 100644 index 00000000..9f82c600 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/enums.pyi @@ -0,0 +1,204 @@ +from enum import Enum, Flag, IntEnum, IntFlag +from typing_extensions import Literal, Self + +from .syntax import Name + +class SignatureFlag(IntEnum): + SIGNATURES_EXIST: int + APPEND_ONLY: int + +class CoerciveEnum(Enum): + @classmethod + def coerce(cls, value: Self | str) -> Self: ... + +class CoerciveIntEnum(IntEnum): + @classmethod + def coerce(cls, value: Self | str | int) -> Self: ... + +class CharVPos(CoerciveEnum): + SUP: str + SUB: str + NOM: str + DENOM: str + LINE: str + +class Align(CoerciveEnum): + C: str + X: str + L: str + R: str + J: str + +class RenderStyle(CoerciveEnum): + D: str + F: str + DF: str + @property + def operator(self) -> str: ... + @property + def is_draw(self) -> bool: ... + @property + def is_fill(self) -> bool: ... + +class TextMode(CoerciveIntEnum): + FILL: int + STROKE: int + FILL_STROKE: int + INVISIBLE: int + FILL_CLIP: int + STROKE_CLIP: int + FILL_STROKE_CLIP: int + CLIP: int + +class XPos(CoerciveEnum): + LEFT: str + RIGHT: str + START: str + END: str + WCONT: str + CENTER: str + LMARGIN: str + RMARGIN: str + +class YPos(CoerciveEnum): + TOP: str + LAST: str + NEXT: str + TMARGIN: str + BMARGIN: str + +class PageLayout(CoerciveEnum): + SINGLE_PAGE: Name + ONE_COLUMN: Name + TWO_COLUMN_LEFT: Name + TWO_COLUMN_RIGHT: Name + TWO_PAGE_LEFT: Name + TWO_PAGE_RIGHT: Name + +class PageMode(CoerciveEnum): + USE_NONE: Name + USE_OUTLINES: Name + USE_THUMBS: Name + FULL_SCREEN: Name + USE_OC: Name + USE_ATTACHMENTS: Name + +class TextMarkupType(CoerciveEnum): + HIGHLIGHT: Name + UNDERLINE: Name + SQUIGGLY: Name + STRIKE_OUT: Name + +class BlendMode(CoerciveEnum): + NORMAL: Name + MULTIPLY: Name + SCREEN: Name + OVERLAY: Name + DARKEN: Name + LIGHTEN: Name + COLOR_DODGE: Name + COLOR_BURN: Name + HARD_LIGHT: Name + SOFT_LIGHT: Name + DIFFERENCE: Name + EXCLUSION: Name + HUE: Name + SATURATION: Name + COLOR: Name + LUMINOSITY: Name + +class AnnotationFlag(CoerciveIntEnum): + INVISIBLE: int + HIDDEN: int + PRINT: int + NO_ZOOM: int + NO_ROTATE: int + NO_VIEW: int + READ_ONLY: int + LOCKED: int + TOGGLE_NO_VIEW: int + LOCKED_CONTENTS: int + +class AnnotationName(CoerciveEnum): + NOTE: Name + COMMENT: Name + HELP: Name + PARAGRAPH: Name + NEW_PARAGRAPH: Name + INSERT: Name + +class FileAttachmentAnnotationName(CoerciveEnum): + PUSH_PIN: Name + GRAPH_PUSH_PIN: Name + PAPERCLIP_TAG: Name + +class IntersectionRule(CoerciveEnum): + NONZERO: str + EVENODD: str + +class PathPaintRule(CoerciveEnum): + STROKE: str + FILL_NONZERO: str + FILL_EVENODD: str + STROKE_FILL_NONZERO: str + STROKE_FILL_EVENODD: str + DONT_PAINT: str + AUTO: str + +class ClippingPathIntersectionRule(CoerciveEnum): + NONZERO: str + EVENODD: str + +class StrokeCapStyle(CoerciveIntEnum): + BUTT: int + ROUND: int + SQUARE: int + +class StrokeJoinStyle(CoerciveIntEnum): + MITER: int + ROUND: int + BEVEL: int + +class PDFStyleKeys(Enum): + FILL_ALPHA: Name + BLEND_MODE: Name + STROKE_ALPHA: Name + STROKE_ADJUSTMENT: Name + STROKE_WIDTH: Name + STROKE_CAP_STYLE: Name + STROKE_JOIN_STYLE: Name + STROKE_MITER_LIMIT: Name + STROKE_DASH_PATTERN: Name + +class Corner(CoerciveEnum): + TOP_RIGHT: str + TOP_LEFT: str + BOTTOM_RIGHT: str + BOTTOM_LEFT: str + +class FontDescriptorFlags(Flag): + FIXED_PITCH: int + SYMBOLIC: int + ITALIC: int + FORCE_BOLD: int + +class AccessPermission(IntFlag): + PRINT_LOW_RES: int + MODIFY: int + COPY: int + ANNOTATION: int + FILL_FORMS: int + COPY_FOR_ACCESSIBILITY: int + ASSEMBLE: int + PRINT_HIGH_RES: int + @classmethod + def all(cls) -> int: ... + @classmethod + def none(cls) -> Literal[0]: ... + +class EncryptionMethod(Enum): + NO_ENCRYPTION: int + RC4: int + AES_128: int + +__pdoc__: dict[str, bool] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/errors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/errors.pyi new file mode 100644 index 00000000..c417166e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/errors.pyi @@ -0,0 +1,12 @@ +from typing import Any + +class FPDFException(Exception): ... + +class FPDFPageFormatException(FPDFException): + argument: Any + unknown: Any + one: Any + def __init__(self, argument, unknown: bool = ..., one: bool = ...) -> None: ... + +class FPDFUnicodeEncodingException(FPDFException): + def __init__(self, text_index, character, font_name) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/fonts.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/fonts.pyi new file mode 100644 index 00000000..5a894fff --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/fonts.pyi @@ -0,0 +1,4 @@ +from typing import Any + +courier: Any +fpdf_charwidths: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/fpdf.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/fpdf.pyi new file mode 100644 index 00000000..0c6b86b3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/fpdf.pyi @@ -0,0 +1,502 @@ +import datetime +from _typeshed import Incomplete, StrPath +from collections.abc import Callable, Iterable, Sequence +from contextlib import _GeneratorContextManager +from io import BytesIO +from pathlib import PurePath +from re import Pattern +from typing import Any, ClassVar, NamedTuple, overload +from typing_extensions import Literal, TypeAlias + +from fpdf import ViewerPreferences +from PIL import Image + +from .annotations import AnnotationDict, PDFEmbeddedFile +from .drawing import DrawingContext, PaintedPath +from .enums import ( + Align, + AnnotationFlag, + AnnotationName, + Corner, + FileAttachmentAnnotationName, + PageLayout, + PathPaintRule, + RenderStyle, + TextMarkupType, + TextMode as TextMode, + XPos as XPos, + YPos as YPos, +) +from .html import HTML2FPDF +from .output import PDFPage +from .recorder import FPDFRecorder +from .structure_tree import StructureTreeBuilder +from .syntax import DestinationXYZ +from .util import _Unit + +__all__ = ["FPDF", "XPos", "YPos", "get_page_format", "TextMode", "TitleStyle", "PAGE_FORMATS"] + +_Orientation: TypeAlias = Literal["", "portrait", "p", "P", "landscape", "l", "L"] +_Format: TypeAlias = Literal["", "a3", "A3", "a4", "A4", "a5", "A5", "letter", "Letter", "legal", "Legal"] +_FontStyle: TypeAlias = Literal["", "B", "I"] +_FontStyles: TypeAlias = Literal["", "B", "I", "U", "BU", "UB", "BI", "IB", "IU", "UI", "BIU", "BUI", "IBU", "IUB", "UBI", "UIB"] +PAGE_FORMATS: dict[_Format, tuple[float, float]] + +class TitleStyle(NamedTuple): + font_family: str | None = ... + font_style: str | None = ... + font_size_pt: int | None = ... + color: int | tuple[int, int, int] | None = ... + underline: bool = ... + t_margin: int | None = ... + l_margin: int | None = ... + b_margin: int | None = ... + +class ToCPlaceholder(NamedTuple): + render_function: Callable[[FPDF, Any], object] + start_page: int + y: int + pages: int = ... + +class SubsetMap: + def __init__(self, identities: Iterable[int]) -> None: ... + def __len__(self) -> int: ... + def pick(self, unicode: int) -> int: ... + def dict(self) -> dict[int, int]: ... + +def get_page_format(format: _Format | tuple[float, float], k: float | None = ...) -> tuple[float, float]: ... + +# TODO: TypedDicts +_Font: TypeAlias = dict[str, Any] +_Image: TypeAlias = dict[str, Any] + +class FPDF: + MARKDOWN_BOLD_MARKER: ClassVar[str] + MARKDOWN_ITALICS_MARKER: ClassVar[str] + MARKDOWN_UNDERLINE_MARKER: ClassVar[str] + MARKDOWN_LINK_REGEX: ClassVar[Pattern[str]] + MARKDOWN_LINK_COLOR: ClassVar[Incomplete | None] + + HTML2FPDF_CLASS: ClassVar[type[HTML2FPDF]] + + page: int + pages: dict[int, PDFPage] + fonts: dict[str, _Font] + images: dict[str, _Image] + links: dict[int, DestinationXYZ] + embedded_files: list[PDFEmbeddedFile] + + in_footer: bool + str_alias_nb_pages: str + + xmp_metadata: str | None + image_filter: str + page_duration: int + page_transition: Incomplete | None + allow_images_transparency: bool + oversized_images: Incomplete | None + oversized_images_ratio: float + struct_builder: StructureTreeBuilder + section_title_styles: dict[int, Incomplete] + + core_fonts: dict[str, str] + core_fonts_encoding: str + font_aliases: dict[str, str] + k: float + + font_family: str + font_style: str + font_size_pt: float + font_stretching: float + char_spacing: float + underline: bool + current_font: _Font + draw_color: str + fill_color: str + text_color: str + page_background: Incomplete | None + dash_pattern: dict[str, int] # TODO: TypedDict + line_width: float + text_mode: TextMode + + dw_pt: float + dh_pt: float + def_orientation: Literal["P", "L"] + x: float + y: float + l_margin: float + t_margin: float + c_margin: float + viewer_preferences: ViewerPreferences | None + compress: bool + pdf_version: str + creation_date: datetime.datetime + + buffer: bytearray | None + + # Set during call to _set_orientation(), called from __init__(). + cur_orientation: Literal["P", "L"] + w_pt: float + h_pt: float + w: float + h: float + + def __init__( + self, + orientation: _Orientation = ..., + unit: _Unit | float = ..., + format: _Format | tuple[float, float] = ..., + font_cache_dir: Literal["DEPRECATED"] = ..., + ) -> None: ... + # The following definition crashes stubtest 0.991, but seems to be fixed + # in later versions. + # def set_encryption( + # self, + # owner_password: str, + # user_password: str | None = None, + # encryption_method: EncryptionMethod | str = ..., + # permissions: AccessPermission = ..., + # encrypt_metadata: bool = False, + # ) -> None: ... + # args and kwargs are passed to HTML2FPDF_CLASS constructor. + def write_html(self, text: str, *args: Any, **kwargs: Any) -> None: ... + @property + def is_ttf_font(self) -> bool: ... + @property + def page_mode(self): ... + @property + def epw(self) -> float: ... + @property + def eph(self) -> float: ... + @property + def pages_count(self) -> int: ... + def set_margin(self, margin: float) -> None: ... + def set_margins(self, left: float, top: float, right: float = ...) -> None: ... + def set_left_margin(self, margin: float) -> None: ... + def set_top_margin(self, margin: float) -> None: ... + r_margin: float + def set_right_margin(self, margin: float) -> None: ... + auto_page_break: bool + b_margin: float + page_break_trigger: float + def set_auto_page_break(self, auto: bool, margin: float = ...) -> None: ... + @property + def default_page_dimensions(self) -> tuple[float, float]: ... + zoom_mode: Literal["fullpage", "fullwidth", "real", "default"] | float + page_layout: PageLayout | None + def set_display_mode( + self, + zoom: Literal["fullpage", "fullwidth", "real", "default"] | float, + layout: Literal["single", "continuous", "two", "default"] = ..., + ) -> None: ... + def set_compression(self, compress: bool) -> None: ... + title: str + def set_title(self, title: str) -> None: ... + lang: str + def set_lang(self, lang: str) -> None: ... + subject: str + def set_subject(self, subject: str) -> None: ... + author: str + def set_author(self, author: str) -> None: ... + keywords: str + def set_keywords(self, keywords: str) -> None: ... + creator: str + def set_creator(self, creator: str) -> None: ... + producer: str + def set_producer(self, producer: str) -> None: ... + def set_creation_date(self, date: datetime.datetime) -> None: ... + def set_xmp_metadata(self, xmp_metadata: str) -> None: ... + def set_doc_option(self, opt: str, value: str) -> None: ... + def set_image_filter(self, image_filter: str) -> None: ... + def alias_nb_pages(self, alias: str = ...) -> None: ... + def add_page( + self, + orientation: _Orientation = ..., + format: _Format | tuple[float, float] = ..., + same: bool = ..., + duration: int = ..., + transition: Incomplete | None = ..., + ) -> None: ... + def header(self) -> None: ... + def footer(self) -> None: ... + def page_no(self) -> int: ... + def set_draw_color(self, r: int, g: int = ..., b: int = ...) -> None: ... + def set_fill_color(self, r: int, g: int = ..., b: int = ...) -> None: ... + def set_text_color(self, r: int, g: int = ..., b: int = ...) -> None: ... + def get_string_width(self, s: str, normalized: bool = ..., markdown: bool = ...) -> float: ... + def set_line_width(self, width: float) -> None: ... + def set_page_background(self, background) -> None: ... + def drawing_context(self, debug_stream: Incomplete | None = ...) -> _GeneratorContextManager[DrawingContext]: ... + def new_path( + self, x: float = ..., y: float = ..., paint_rule: PathPaintRule = ..., debug_stream: Incomplete | None = ... + ) -> _GeneratorContextManager[PaintedPath]: ... + def draw_path(self, path: PaintedPath, debug_stream: Incomplete | None = ...) -> None: ... + def set_dash_pattern(self, dash: float = ..., gap: float = ..., phase: float = ...) -> None: ... + def line(self, x1: float, y1: float, x2: float, y2: float) -> None: ... + def polyline( + self, point_list: list[tuple[float, float]], fill: bool = ..., polygon: bool = ..., style: RenderStyle | str | None = ... + ) -> None: ... + def polygon(self, point_list: list[tuple[float, float]], fill: bool = ..., style: RenderStyle | str | None = ...) -> None: ... + def dashed_line(self, x1, y1, x2, y2, dash_length: int = ..., space_length: int = ...) -> None: ... + def rect( + self, + x: float, + y: float, + w: float, + h: float, + style: RenderStyle | str | None = ..., + round_corners: tuple[str, ...] | tuple[Corner, ...] | bool = ..., + corner_radius: float = ..., + ) -> None: ... + def ellipse(self, x: float, y: float, w: float, h: float, style: RenderStyle | str | None = ...) -> None: ... + def circle(self, x: float, y: float, r, style: RenderStyle | str | None = ...) -> None: ... + def regular_polygon( + self, + x: float, + y: float, + numSides: int, + polyWidth: float, + rotateDegrees: float = ..., + style: RenderStyle | str | None = ..., + ): ... + def star( + self, + x: float, + y: float, + r_in: float, + r_out: float, + corners: int, + rotate_degrees: float = ..., + style: RenderStyle | str | None = ..., + ): ... + def arc( + self, + x: float, + y: float, + a: float, + start_angle: float, + end_angle: float, + b: float | None = ..., + inclination: float = ..., + clockwise: bool = ..., + start_from_center: bool = ..., + end_at_center: bool = ..., + style: RenderStyle | str | None = ..., + ) -> None: ... + def solid_arc( + self, + x: float, + y: float, + a: float, + start_angle: float, + end_angle: float, + b: float | None = ..., + inclination: float = ..., + clockwise: bool = ..., + style: RenderStyle | str | None = ..., + ) -> None: ... + def add_font( + self, + family: str | None = None, + style: _FontStyle = "", + fname: str | PurePath | None = None, + uni: bool | Literal["DEPRECATED"] = "DEPRECATED", + ) -> None: ... + def set_font(self, family: str | None = ..., style: _FontStyles = ..., size: int = ...) -> None: ... + def set_font_size(self, size: float) -> None: ... + def set_char_spacing(self, spacing: float) -> None: ... + def set_stretching(self, stretching: float) -> None: ... + def add_link(self, y: float = 0, x: float = 0, page: int = -1, zoom: float | Literal["null"] = "null") -> int: ... + def set_link(self, link, y: float = 0, x: float = 0, page: int = -1, zoom: float | Literal["null"] = "null") -> None: ... + def link( + self, x: float, y: float, w: float, h: float, link: str | int, alt_text: str | None = ..., border_width: int = ... + ) -> AnnotationDict: ... + def embed_file( + self, + file_path: StrPath | None = ..., + bytes: bytes | None = ..., + basename: str | None = ..., + modification_date: datetime.datetime | None = ..., + *, + creation_date: datetime.datetime | None = ..., + desc: str = ..., + compress: bool = ..., + checksum: bool = ..., + ) -> str: ... + def file_attachment_annotation( + self, + file_path: StrPath, + x: float, + y: float, + w: float = ..., + h: float = ..., + name: FileAttachmentAnnotationName | str | None = ..., + flags: Iterable[AnnotationFlag | str] = ..., + *, + bytes: bytes | None = ..., + basename: str | None = ..., + creation_date: datetime.datetime | None = ..., + modification_date: datetime.datetime | None = ..., + desc: str = ..., + compress: bool = ..., + checksum: bool = ..., + ) -> AnnotationDict: ... + def text_annotation( + self, + x: float, + y: float, + text: str, + w: float = ..., + h: float = ..., + name: AnnotationName | str | None = ..., + flags: tuple[AnnotationFlag, ...] | tuple[str, ...] = ..., + ) -> None: ... + def add_action(self, action, x: float, y: float, w: float, h: float) -> None: ... + def highlight( + self, + text: str, + title: str = ..., + type: TextMarkupType | str = ..., + color: tuple[float, float, float] = ..., + modification_time: datetime.datetime | None = ..., + ) -> _GeneratorContextManager[None]: ... + add_highlight = highlight + def add_text_markup_annotation( + self, + type: str, + text: str, + quad_points: Sequence[int], + title: str = ..., + color: tuple[float, float, float] = ..., + modification_time: datetime.datetime | None = ..., + page: int | None = ..., + ) -> AnnotationDict: ... + def ink_annotation( + self, + coords: Iterable[Incomplete], + contents: str = ..., + title: str = ..., + color: Sequence[float] = ..., + border_width: int = ..., + ) -> AnnotationDict: ... + def text(self, x: float, y: float, txt: str = ...) -> None: ... + def rotate(self, angle: float, x: float | None = ..., y: float | None = ...) -> None: ... + def rotation(self, angle: float, x: float | None = ..., y: float | None = ...) -> _GeneratorContextManager[None]: ... + def skew( + self, ax: float = 0, ay: float = 0, x: float | None = None, y: float | None = None + ) -> _GeneratorContextManager[None]: ... + def local_context( + self, + font_family: Incomplete | None = ..., + font_style: Incomplete | None = ..., + font_size: Incomplete | None = ..., + line_width: Incomplete | None = ..., + draw_color: Incomplete | None = ..., + fill_color: Incomplete | None = ..., + text_color: Incomplete | None = ..., + dash_pattern: Incomplete | None = ..., + **kwargs, + ) -> _GeneratorContextManager[None]: ... + @property + def accept_page_break(self) -> bool: ... + def cell( + self, + w: float | None = ..., + h: float | None = ..., + txt: str = ..., + border: bool | Literal[0, 1] | str = ..., + ln: int | Literal["DEPRECATED"] = ..., + align: str | Align = ..., + fill: bool = ..., + link: str = ..., + center: bool | Literal["DEPRECATED"] = ..., + markdown: bool = ..., + new_x: XPos | str = ..., + new_y: YPos | str = ..., + ) -> bool: ... + def will_page_break(self, height: float) -> bool: ... + def multi_cell( + self, + w: float, + h: float | None = ..., + txt: str = ..., + border: bool | Literal[0, 1] | str = ..., + align: str | Align = ..., + fill: bool = ..., + split_only: bool = ..., + link: str | int = ..., + ln: int | Literal["DEPRECATED"] = ..., + max_line_height: float | None = ..., + markdown: bool = ..., + print_sh: bool = ..., + new_x: XPos | str = ..., + new_y: YPos | str = ..., + ): ... + def write(self, h: float | None = ..., txt: str = ..., link: str = ..., print_sh: bool = ...) -> None: ... + def image( + self, + name: str | Image.Image | BytesIO | StrPath, + x: float | Align | None = None, + y: float | None = None, + w: float = 0, + h: float = 0, + type: str = "", + link: str = "", + title: str | None = None, + alt_text: str | None = None, + dims: tuple[float, float] | None = None, + ) -> _Image: ... + def ln(self, h: float | None = ...) -> None: ... + def get_x(self) -> float: ... + def set_x(self, x: float) -> None: ... + def get_y(self) -> float: ... + def set_y(self, y: float) -> None: ... + def set_xy(self, x: float, y: float) -> None: ... + @overload + def output(self, name: Literal[""] = ...) -> bytearray: ... # type: ignore[misc] + @overload + def output(self, name: str) -> None: ... + def normalize_text(self, txt: str) -> str: ... + def sign_pkcs12( + self, + pkcs_filepath: str, + password: bytes | None = ..., + hashalgo: str = ..., + contact_info: str | None = ..., + location: str | None = ..., + signing_time: datetime.datetime | None = ..., + reason: str | None = ..., + flags: tuple[AnnotationFlag, ...] = ..., + ) -> None: ... + def sign( + self, + key, + cert, + extra_certs: Sequence[Incomplete] = ..., + hashalgo: str = ..., + contact_info: str | None = ..., + location: str | None = ..., + signing_time: datetime.datetime | None = ..., + reason: str | None = ..., + flags: tuple[AnnotationFlag, ...] = ..., + ) -> None: ... + def file_id(self) -> str: ... + def interleaved2of5(self, txt, x: float, y: float, w: float = ..., h: float = ...) -> None: ... + def code39(self, txt, x: float, y: float, w: float = ..., h: float = ...) -> None: ... + def rect_clip(self, x: float, y: float, w: float, h: float) -> _GeneratorContextManager[None]: ... + def elliptic_clip(self, x: float, y: float, w: float, h: float) -> _GeneratorContextManager[None]: ... + def round_clip(self, x: float, y: float, r: float) -> _GeneratorContextManager[None]: ... + def unbreakable(self) -> _GeneratorContextManager[FPDFRecorder]: ... + def offset_rendering(self) -> _GeneratorContextManager[FPDFRecorder]: ... + def insert_toc_placeholder(self, render_toc_function, pages: int = ...) -> None: ... + def set_section_title_styles( + self, + level0: TitleStyle, + level1: TitleStyle | None = ..., + level2: TitleStyle | None = ..., + level3: TitleStyle | None = ..., + level4: TitleStyle | None = ..., + level5: TitleStyle | None = ..., + level6: TitleStyle | None = ..., + ) -> None: ... + def start_section(self, name: str, level: int = 0, strict: bool = True) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/graphics_state.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/graphics_state.pyi new file mode 100644 index 00000000..a364a527 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/graphics_state.pyi @@ -0,0 +1,102 @@ +from typing import Any, ClassVar + +from .drawing import DeviceGray, DeviceRGB +from .enums import TextMode + +class GraphicsStateMixin: + DEFAULT_DRAW_COLOR: ClassVar[DeviceGray] + DEFAULT_FILL_COLOR: ClassVar[DeviceGray] + DEFAULT_TEXT_COLOR: ClassVar[DeviceGray] + def __init__(self, *args, **kwargs) -> None: ... + @property + def draw_color(self) -> DeviceGray | DeviceRGB: ... + @draw_color.setter + def draw_color(self, v: DeviceGray | DeviceRGB) -> None: ... + @property + def fill_color(self) -> DeviceGray | DeviceRGB: ... + @fill_color.setter + def fill_color(self, v: DeviceGray | DeviceRGB) -> None: ... + @property + def text_color(self) -> DeviceGray | DeviceRGB: ... + @text_color.setter + def text_color(self, v: DeviceGray | DeviceRGB) -> None: ... + @property + def underline(self) -> bool: ... + @underline.setter + def underline(self, v: bool) -> None: ... + @property + def font_style(self) -> str: ... + @font_style.setter + def font_style(self, v: str) -> None: ... + @property + def font_stretching(self) -> float: ... + @font_stretching.setter + def font_stretching(self, v: float) -> None: ... + @property + def char_spacing(self) -> float: ... + @char_spacing.setter + def char_spacing(self, v: float) -> None: ... + @property + def font_family(self) -> str: ... + @font_family.setter + def font_family(self, v: str) -> None: ... + @property + def font_size_pt(self) -> float: ... + @font_size_pt.setter + def font_size_pt(self, v: float) -> None: ... + @property + def font_size(self) -> float: ... + @font_size.setter + def font_size(self, v: float) -> None: ... + @property + def current_font(self) -> dict[str, Any]: ... + @current_font.setter + def current_font(self, v: dict[str, Any]) -> None: ... + @property + def dash_pattern(self) -> dict[str, float]: ... + @dash_pattern.setter + def dash_pattern(self, v: dict[str, float]) -> None: ... + @property + def line_width(self) -> float: ... + @line_width.setter + def line_width(self, v: float) -> None: ... + @property + def text_mode(self) -> TextMode: ... + @text_mode.setter + def text_mode(self, v: int | str) -> None: ... + @property + def char_vpos(self): ... + @char_vpos.setter + def char_vpos(self, v) -> None: ... + @property + def sub_scale(self): ... + @sub_scale.setter + def sub_scale(self, v) -> None: ... + @property + def sup_scale(self): ... + @sup_scale.setter + def sup_scale(self, v) -> None: ... + @property + def nom_scale(self): ... + @nom_scale.setter + def nom_scale(self, v) -> None: ... + @property + def denom_scale(self): ... + @denom_scale.setter + def denom_scale(self, v) -> None: ... + @property + def sub_lift(self): ... + @sub_lift.setter + def sub_lift(self, v) -> None: ... + @property + def sup_lift(self): ... + @sup_lift.setter + def sup_lift(self, v) -> None: ... + @property + def nom_lift(self): ... + @nom_lift.setter + def nom_lift(self, v) -> None: ... + @property + def denom_lift(self): ... + @denom_lift.setter + def denom_lift(self, v) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/html.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/html.pyi new file mode 100644 index 00000000..34ae1968 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/html.pyi @@ -0,0 +1,97 @@ +from _typeshed import Incomplete, Unused +from collections.abc import Callable +from html.parser import HTMLParser +from logging import Logger +from re import Match, Pattern +from typing import ClassVar +from typing_extensions import Final + +from fpdf import FPDF + +__author__: Final[str] +__copyright__: Final[str] +__license__: Final[str] + +LOGGER: Logger +BULLET_WIN1252: Final[str] +DEFAULT_HEADING_SIZES: dict[str, int] +LEADING_SPACE: Pattern[str] +WHITESPACE: Pattern[str] +TRAILING_SPACE: Pattern[str] + +COLOR_DICT: Final[dict[str, str]] + +def px2mm(px: float) -> float: ... +def color_as_decimal(color: str | None = ...) -> tuple[int, int, int] | None: ... + +class HTML2FPDF(HTMLParser): + HTML_UNCLOSED_TAGS: ClassVar[tuple[str, ...]] + pdf: Incomplete + image_map: Incomplete + li_tag_indent: Incomplete + table_line_separators: Incomplete + ul_bullet_char: Incomplete + style: Incomplete + href: str + align: str + page_links: Incomplete + font_stack: Incomplete + indent: int + bullet: Incomplete + font_size: Incomplete + font_color: Incomplete + table: Incomplete + table_col_width: Incomplete + table_col_index: Incomplete + td: Incomplete + th: Incomplete + tr: Incomplete + thead: Incomplete + tfoot: Incomplete + tr_index: Incomplete + theader: Incomplete + tfooter: Incomplete + theader_out: bool + table_row_height: int + heading_level: Incomplete + heading_sizes: Incomplete + heading_above: float + heading_below: float + warn_on_tags_not_matching: bool + def __init__( + self, + pdf: FPDF, + image_map: Callable[[str], str] | None = None, + li_tag_indent: int = 5, + dd_tag_indent: int = 10, + table_line_separators: bool = False, + ul_bullet_char: str = ..., + heading_sizes: Incomplete | None = None, + warn_on_tags_not_matching: bool = True, + **_: Unused, + ): ... + def width2unit(self, length): ... + def handle_data(self, data) -> None: ... + def box_shadow(self, w, h, bgcolor) -> None: ... + def output_table_header(self) -> None: ... + tfooter_out: bool + def output_table_footer(self) -> None: ... + def output_table_sep(self) -> None: ... + font_face: Incomplete + table_offset: Incomplete + def handle_starttag(self, tag, attrs) -> None: ... + tbody: Incomplete + def handle_endtag(self, tag) -> None: ... + h: Incomplete + def set_font(self, face: Incomplete | None = ..., size: Incomplete | None = ...) -> None: ... + def set_style(self, tag: Incomplete | None = ..., enable: bool = ...) -> None: ... + def set_text_color(self, r: Incomplete | None = ..., g: int = ..., b: int = ...) -> None: ... + def put_link(self, txt) -> None: ... + def render_toc(self, pdf, outline) -> None: ... + def error(self, message: str) -> None: ... + +def leading_whitespace_repl(matchobj: Match[str]) -> str: ... +def whitespace_repl(matchobj: Match[str]) -> str: ... + +class HTMLMixin: + def __init__(self, *args: Incomplete, **kwargs: Incomplete) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/image_parsing.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/image_parsing.pyi new file mode 100644 index 00000000..24060e55 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/image_parsing.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete +from typing import Any +from typing_extensions import Literal, TypeAlias + +from PIL.Image import Resampling + +_ImageFilter: TypeAlias = Literal["AUTO", "FlateDecode", "DCTDecode", "JPXDecode"] + +RESAMPLE: Resampling +SUPPORTED_IMAGE_FILTERS: tuple[_ImageFilter, ...] + +def load_image(filename): ... + +# Returned dict could be typed as a TypedDict. +def get_img_info(img, image_filter: _ImageFilter = ..., dims: Incomplete | None = ...) -> dict[str, Any]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/line_break.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/line_break.pyi new file mode 100644 index 00000000..01d3a351 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/line_break.pyi @@ -0,0 +1,116 @@ +from _typeshed import Incomplete +from collections.abc import Sequence +from typing import NamedTuple + +SOFT_HYPHEN: str +HYPHEN: str +SPACE: str +NEWLINE: str + +class Fragment: + characters: list[str] + graphics_state: dict[str, Incomplete] + k: float + url: str | None + def __init__( + self, characters: list[str] | str, graphics_state: dict[str, Incomplete], k: float, url: str | None = None + ) -> None: ... + @property + def font(self): ... + @font.setter + def font(self, v) -> None: ... + @property + def is_ttf_font(self): ... + @property + def font_style(self): ... + @property + def font_family(self): ... + @property + def font_size_pt(self): ... + @property + def font_size(self): ... + @property + def font_stretching(self): ... + @property + def char_spacing(self): ... + @property + def text_mode(self): ... + @property + def underline(self): ... + @property + def draw_color(self): ... + @property + def fill_color(self): ... + @property + def text_color(self): ... + @property + def line_width(self): ... + @property + def char_vpos(self): ... + @property + def lift(self): ... + @property + def string(self): ... + def trim(self, index: int): ... + def __eq__(self, other: Fragment) -> bool: ... # type: ignore[override] + def get_width(self, start: int = ..., end: int | None = ..., chars: str | None = ..., initial_cs: bool = ...): ... + def get_character_width(self, character: str, print_sh: bool = ..., initial_cs: bool = ...): ... + +class TextLine(NamedTuple): + fragments: tuple[Incomplete, ...] + text_width: float + number_of_spaces: int + justify: bool + trailing_nl: bool = ... + +class SpaceHint(NamedTuple): + original_fragment_index: int + original_character_index: int + current_line_fragment_index: int + current_line_character_index: int + line_width: float + number_of_spaces: int + +class HyphenHint(NamedTuple): + original_fragment_index: int + original_character_index: int + current_line_fragment_index: int + current_line_character_index: int + line_width: float + number_of_spaces: int + curchar: str + curchar_width: float + graphics_state: dict[str, Incomplete] + k: float + +class CurrentLine: + print_sh: Incomplete + fragments: Incomplete + width: int + number_of_spaces: int + space_break_hint: Incomplete + hyphen_break_hint: Incomplete + def __init__(self, print_sh: bool = ...) -> None: ... + def add_character( + self, + character: str, + character_width: float, + graphics_state: dict[str, Incomplete], + k: float, + original_fragment_index: int, + original_character_index: int, + url: str | None = None, + ): ... + def manual_break(self, justify: bool = ..., trailing_nl: bool = ...): ... + def automatic_break_possible(self): ... + def automatic_break(self, justify: bool): ... + +class MultiLineBreak: + styled_text_fragments: Incomplete + justify: Incomplete + print_sh: Incomplete + fragment_index: int + character_index: int + idx_last_forced_break: Incomplete + def __init__(self, styled_text_fragments: Sequence[Fragment], justify: bool = ..., print_sh: bool = ...) -> None: ... + def get_line_of_given_width(self, maximum_width: float, wordsplit: bool = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/linearization.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/linearization.pyi new file mode 100644 index 00000000..cfbaaf5b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/linearization.pyi @@ -0,0 +1,54 @@ +from _typeshed import Incomplete +from typing_extensions import Final + +from .encryption import StandardSecurityHandler +from .output import ContentWithoutID, OutputProducer +from .syntax import PDFContentStream, PDFObject + +HINT_STREAM_OFFSET_LENGTH_PLACEHOLDER: Final[str] +FIRST_PAGE_END_OFFSET_PLACEHOLDER: Final[str] +MAIN_XREF_1ST_ENTRY_OFFSET_PLACEHOLDER: Final[str] +FILE_LENGTH_PLACEHOLDER: Final[str] + +class PDFLinearization(PDFObject): + linearized: str + n: int + h: str + o: Incomplete | None + e: str + t: str + l: str + def __init__(self, pages_count: int) -> None: ... + +class PDFXrefAndTrailer(ContentWithoutID): + PREV_MAIN_XREF_START_PLACEHOLDER: str + output_builder: Incomplete + count: int + start_obj_id: int + catalog_obj: Incomplete | None + info_obj: Incomplete | None + first_xref: Incomplete | None + main_xref: Incomplete | None + startxref: Incomplete | None + def __init__(self, output_builder) -> None: ... + @property + def is_first_xref(self) -> bool: ... + @property + def is_main_xref(self) -> bool: ... + def serialize(self, _security_handler: StandardSecurityHandler | None = None) -> str: ... + +class PDFHintStream(PDFContentStream): + s: Incomplete | None + t: Incomplete | None + o: Incomplete | None + a: Incomplete | None + e: Incomplete | None + v: Incomplete | None + i: Incomplete | None + c: Incomplete | None + l: Incomplete | None + r: Incomplete | None + b: Incomplete | None + +class LinearizedOutputProducer(OutputProducer): + def bufferize(self) -> bytearray: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/outline.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/outline.pyi new file mode 100644 index 00000000..4e57c1d8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/outline.pyi @@ -0,0 +1,36 @@ +from _typeshed import Incomplete +from collections.abc import Generator, Iterable +from typing import NamedTuple + +from .structure_tree import StructElem +from .syntax import Destination, PDFObject, PDFString + +class OutlineSection(NamedTuple): + name: str + level: str + page_number: int + dest: Destination + struct_elem: StructElem | None = ... + +class OutlineItemDictionary(PDFObject): + title: PDFString + parent: Incomplete | None + prev: Incomplete | None + next: Incomplete | None + first: Incomplete | None + last: Incomplete | None + count: int + dest: Destination | None + struct_elem: StructElem | None + def __init__(self, title: str, dest: Destination | None = ..., struct_elem: StructElem | None = ...) -> None: ... + +class OutlineDictionary(PDFObject): + type: str + first: Incomplete | None + last: Incomplete | None + count: int + def __init__(self) -> None: ... + +def build_outline_objs( + sections: Iterable[Incomplete], +) -> Generator[Incomplete, None, list[OutlineDictionary | OutlineItemDictionary]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/output.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/output.pyi new file mode 100644 index 00000000..686abcbb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/output.pyi @@ -0,0 +1,187 @@ +from _typeshed import Incomplete, Unused +from collections import defaultdict +from logging import Logger +from typing_extensions import Final + +from .annotations import AnnotationDict +from .encryption import StandardSecurityHandler +from .syntax import Name, PDFArray, PDFContentStream, PDFObject + +LOGGER: Logger +ZOOM_CONFIGS: Final[dict[str, tuple[str, ...]]] + +class ContentWithoutID: + def serialize(self, _security_handler: StandardSecurityHandler | None = None) -> str | None: ... + +class PDFHeader(ContentWithoutID): + pdf_version: str + def __init__(self, pdf_version: str) -> None: ... + def serialize(self, _security_handler: StandardSecurityHandler | None = None) -> str: ... + +class PDFFont(PDFObject): + type: Name + subtype: Name + base_font: Name + encoding: Name | None + d_w: Incomplete | None + w: Incomplete | None + descendant_fonts: Incomplete | None + to_unicode: Incomplete | None + c_i_d_system_info: Incomplete | None + font_descriptor: Incomplete | None + c_i_d_to_g_i_d_map: Incomplete | None + def __init__( + self, subtype: str, base_font: str, encoding: str | None = ..., d_w: Incomplete | None = ..., w: Incomplete | None = ... + ) -> None: ... + +class PDFFontDescriptor(PDFObject): + type: Name + ascent: Incomplete + descent: Incomplete + cap_height: Incomplete + flags: Incomplete + font_b_box: Incomplete + italic_angle: Incomplete + stem_v: Incomplete + missing_width: Incomplete + font_name: Incomplete | None + def __init__(self, ascent, descent, cap_height, flags, font_b_box, italic_angle, stem_v, missing_width) -> None: ... + +class CIDSystemInfo(PDFObject): + registry: str + ordering: str + supplement: Incomplete + def __init__(self, registry: str | None, ordering: str | None, supplement) -> None: ... + +class PDFInfo(PDFObject): + title: str | None + subject: str | None + author: str | None + keywords: str | None + creator: str | None + producer: str | None + creation_date: Incomplete + def __init__( + self, + title: str | None, + subject: str | None, + author: str | None, + keywords: str | None, + creator: str | None, + producer: str | None, + creation_date, + ) -> None: ... + +class AcroForm: + fields: Incomplete + sig_flags: Incomplete + def __init__(self, fields, sig_flags) -> None: ... + def serialize(self) -> str: ... + +class PDFCatalog(PDFObject): + type: Name + lang: str | None + page_layout: Incomplete | None + page_mode: Incomplete | None + viewer_preferences: Incomplete | None + pages: Incomplete | None + acro_form: Incomplete | None + open_action: Incomplete | None + mark_info: Incomplete | None + metadata: Incomplete | None + names: Incomplete | None + outlines: Incomplete | None + struct_tree_root: Incomplete | None + def __init__( + self, + lang: str | None = ..., + page_layout: Incomplete | None = ..., + page_mode: Incomplete | None = ..., + viewer_preferences: Incomplete | None = ..., + ) -> None: ... + +class PDFResources(PDFObject): + proc_set: Incomplete + font: Incomplete + x_object: Incomplete + ext_g_state: Incomplete + def __init__(self, proc_set, font, x_object, ext_g_state) -> None: ... + +class PDFFontStream(PDFContentStream): + length1: int + def __init__(self, contents: bytes) -> None: ... + +class PDFXmpMetadata(PDFContentStream): + type: Name + subtype: Name + def __init__(self, contents: bytes) -> None: ... + +class PDFXObject(PDFContentStream): + type: Name + subtype: Name + width: Incomplete + height: Incomplete + color_space: Incomplete + bits_per_component: Incomplete + filter: Name + decode: Incomplete | None + decode_parms: Incomplete | None + s_mask: Incomplete | None + def __init__( + self, + contents, + subtype: str, + width, + height, + color_space, + bits_per_component, + img_filter: str | None = ..., + decode: Incomplete | None = ..., + decode_parms: Incomplete | None = ..., + ) -> None: ... + +class PDFPage(PDFObject): + type: Name + contents: Incomplete + dur: Incomplete | None + trans: Incomplete + annots: PDFArray[AnnotationDict] + group: Incomplete | None + media_box: Incomplete | None + struct_parents: Incomplete | None + resources: Incomplete | None + parent: Incomplete | None + def __init__(self, duration: Incomplete | None, transition, contents, index) -> None: ... + def index(self): ... + def dimensions(self) -> tuple[float | None, float | None]: ... + def set_dimensions(self, width_pt: float | None, height_pt: float | None) -> None: ... + +class PDFPagesRoot(PDFObject): + type: Name + count: Incomplete + media_box: Incomplete + kids: Incomplete | None + def __init__(self, count, media_box) -> None: ... + +class PDFExtGState(PDFObject): + def __init__(self, dict_as_str) -> None: ... + def serialize(self, obj_dict: Unused = None, _security_handler: StandardSecurityHandler | None = None) -> str: ... + +class PDFXrefAndTrailer(ContentWithoutID): + output_builder: Incomplete + count: int + catalog_obj: Incomplete | None + info_obj: Incomplete | None + def __init__(self, output_builder) -> None: ... + def serialize(self, _security_handler: StandardSecurityHandler | None = None) -> str: ... + +class OutputProducer: + fpdf: Incomplete + pdf_objs: list[Incomplete] + obj_id: int + offsets: dict[Incomplete, Incomplete] + trace_labels_per_obj_id: dict[Incomplete, Incomplete] + sections_size_per_trace_label: defaultdict[Incomplete, int] + buffer: bytearray + def __init__(self, fpdf) -> None: ... + def bufferize(self) -> bytearray: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/prefs.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/prefs.pyi new file mode 100644 index 00000000..2ec95cd7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/prefs.pyi @@ -0,0 +1,24 @@ +from .enums import PageMode + +class ViewerPreferences: + hide_toolbar: bool + hide_menubar: bool + hide_window_u_i: bool + fit_window: bool + center_window: bool + display_doc_title: bool + def __init__( + self, + hide_toolbar: bool = ..., + hide_menubar: bool = ..., + hide_window_u_i: bool = ..., + fit_window: bool = ..., + center_window: bool = ..., + display_doc_title: bool = ..., + non_full_screen_page_mode: PageMode | str = ..., + ) -> None: ... + @property + def non_full_screen_page_mode(self): ... + @non_full_screen_page_mode.setter + def non_full_screen_page_mode(self, page_mode) -> None: ... + def serialize(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/recorder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/recorder.pyi new file mode 100644 index 00000000..9d73e9fc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/recorder.pyi @@ -0,0 +1,13 @@ +from typing import Any + +class FPDFRecorder: + pdf: Any + accept_page_break: bool + def __init__(self, pdf, accept_page_break: bool = ...) -> None: ... + def __getattr__(self, name: str): ... + def rewind(self) -> None: ... + def replay(self) -> None: ... + +class CallRecorder: + def __init__(self, func, calls) -> None: ... + def __call__(self, *args, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/sign.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/sign.pyi new file mode 100644 index 00000000..e6878ea8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/sign.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete + +class Signature: + type: str + filter: str + sub_filter: str + contact_info: Incomplete | None + location: Incomplete | None + m: Incomplete | None + reason: Incomplete | None + byte_range: str + contents: str + def __init__( + self, + contact_info: Incomplete | None = ..., + location: Incomplete | None = ..., + m: Incomplete | None = ..., + reason: Incomplete | None = ..., + ) -> None: ... + def serialize(self): ... + +def sign_content(signer, buffer, key, cert, extra_certs, hashalgo, sign_time): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/structure_tree.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/structure_tree.pyi new file mode 100644 index 00000000..2aec226f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/structure_tree.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete, Unused +from collections import defaultdict +from collections.abc import Generator, Iterable + +from .encryption import StandardSecurityHandler +from .syntax import PDFArray, PDFObject, PDFString + +class NumberTree(PDFObject): + nums: defaultdict[Incomplete, list[Incomplete]] + def __init__(self) -> None: ... + def serialize(self, obj_dict: Unused = None, _security_handler: StandardSecurityHandler | None = None) -> str: ... + +class StructTreeRoot(PDFObject): + type: str + parent_tree: NumberTree + k: PDFArray[Incomplete] + def __init__(self) -> None: ... + +class StructElem(PDFObject): + type: str + s: str + p: PDFObject + k: PDFArray[Incomplete] + t: PDFString | None + alt: PDFString | None + pg: Incomplete | None + def __init__( + self, + struct_type: str, + parent: PDFObject, + kids: Iterable[int] | Iterable[StructElem], + page_number: int | None = ..., + title: str | None = ..., + alt: str | None = ..., + ) -> None: ... + def page_number(self) -> int | None: ... + +class StructureTreeBuilder: + struct_tree_root: Incomplete + doc_struct_elem: Incomplete + struct_elem_per_mc: Incomplete + def __init__(self) -> None: ... + def add_marked_content( + self, page_number: int, struct_type: str, mcid: int | None = ..., title: str | None = ..., alt_text: str | None = ... + ) -> tuple[Incomplete, Incomplete]: ... + def next_mcid_for_page(self, page_number: int) -> int: ... + def empty(self) -> bool: ... + def __iter__(self) -> Generator[Incomplete, None, None]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/svg.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/svg.pyi new file mode 100644 index 00000000..0d4c9cb4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/svg.pyi @@ -0,0 +1,89 @@ +from _typeshed import Incomplete +from collections.abc import Callable +from re import Pattern + +from fpdf.drawing import PaintedPath + +from ._fonttools_shims import BasePen, _TTGlyphSet + +__pdoc__: dict[str, bool] + +def force_nodocument(item): ... + +NUMBER_SPLIT: Pattern[str] +TRANSFORM_GETTER: Pattern[str] + +class Percent(float): ... + +unit_splitter: Pattern[str] +relative_length_units: set[str] +absolute_length_units: dict[str, int] +angle_units: dict[str, float] + +def resolve_length(length_str, default_unit: str = ...): ... +def resolve_angle(angle_str, default_unit: str = ...): ... +def xmlns(space, name): ... +def xmlns_lookup(space, *names): ... + +shape_tags: Incomplete + +def svgcolor(colorstr): ... +def convert_stroke_width(incoming): ... +def convert_miterlimit(incoming): ... +def clamp_float(min_val, max_val): ... +def inheritable(value, converter=...): ... +def optional(value, converter=...): ... + +svg_attr_map: dict[str, Callable[[Incomplete], tuple[str, Incomplete]]] + +def parse_style(svg_element) -> None: ... +def apply_styles(stylable, svg_element) -> None: ... + +class ShapeBuilder: + @staticmethod + def new_path(tag): ... + @classmethod + def rect(cls, tag): ... + @classmethod + def circle(cls, tag): ... + @classmethod + def ellipse(cls, tag): ... + @classmethod + def line(cls, tag): ... + @classmethod + def polyline(cls, tag): ... + @classmethod + def polygon(cls, tag): ... + +def convert_transforms(tfstr): ... + +class PathPen(BasePen): + pdf_path: PaintedPath + last_was_line_to: bool + first_is_move: bool | None + def __init__(self, pdf_path: PaintedPath, glyphSet: _TTGlyphSet | None = ...): ... + def arcTo(self, rx, ry, rotation, arc, sweep, end) -> None: ... + +def svg_path_converter(pdf_path: PaintedPath, svg_path: str) -> None: ... + +class SVGObject: + @classmethod + def from_file(cls, filename, *args, encoding: str = ..., **kwargs): ... + cross_references: Incomplete + def __init__(self, svg_text) -> None: ... + preserve_ar: Incomplete + width: Incomplete + height: Incomplete + viewbox: Incomplete + def extract_shape_info(self, root_tag) -> None: ... + base_group: Incomplete + def convert_graphics(self, root_tag) -> None: ... + def transform_to_page_viewport(self, pdf, align_viewbox: bool = ...): ... + def transform_to_rect_viewport(self, scale, width, height, align_viewbox: bool = ..., ignore_svg_top_attrs: bool = ...): ... + def draw_to_page( + self, pdf, x: Incomplete | None = ..., y: Incomplete | None = ..., debug_stream: Incomplete | None = ... + ) -> None: ... + def handle_defs(self, defs) -> None: ... + def build_xref(self, xref): ... + def build_group(self, group, pdf_group: Incomplete | None = ...): ... + def build_path(self, path): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/syntax.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/syntax.pyi new file mode 100644 index 00000000..bff2ec0f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/syntax.pyi @@ -0,0 +1,70 @@ +from _typeshed import Incomplete, SupportsItems +from abc import ABC, abstractmethod +from re import Pattern +from typing import ClassVar, Generic, TypeVar +from typing_extensions import Literal + +from .encryption import StandardSecurityHandler + +_T = TypeVar("_T") + +def clear_empty_fields(d): ... +def create_dictionary_string( + dict_, + open_dict: str = ..., + close_dict: str = ..., + field_join: str = ..., + key_value_join: str = ..., + has_empty_fields: bool = ..., +): ... +def create_list_string(list_): ... +def iobj_ref(n): ... +def create_stream( + stream: str | bytes | bytearray, encryption_handler: StandardSecurityHandler | None = None, obj_id: Incomplete | None = None +): ... + +class Raw(str): ... + +class Name(str): + NAME_ESC: ClassVar[Pattern[bytes]] + def serialize(self) -> str: ... + +class PDFObject: + def __init__(self) -> None: ... + @property + def id(self) -> int: ... + @id.setter + def id(self, n: int) -> None: ... + @property + def ref(self) -> str: ... + def serialize(self, obj_dict: Incomplete | None = ..., _security_handler: StandardSecurityHandler | None = None) -> str: ... + def content_stream(self) -> bytes: ... + +class PDFContentStream(PDFObject): + filter: Name | None + length: int + def __init__(self, contents: bytes, compress: bool = ...) -> None: ... + def encrypt(self, security_handler: StandardSecurityHandler) -> None: ... + +def build_obj_dict(key_values: SupportsItems[str, Incomplete]) -> dict[str, str]: ... +def camel_case(snake_case: str) -> str: ... + +class PDFString(str): + USE_HEX_ENCODING: ClassVar[bool] + def serialize(self) -> str: ... + +class PDFArray(list[_T], Generic[_T]): + def serialize(self) -> str: ... + +class Destination(ABC): + @abstractmethod + def serialize(self) -> str: ... + +class DestinationXYZ(Destination): + page_number: int + top: float + left: float + zoom: float | Literal["null"] + page_ref: Incomplete | None + def __init__(self, page: int, top: float, left: float = ..., zoom: float | Literal["null"] = ...) -> None: ... + def serialize(self) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/template.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/template.pyi new file mode 100644 index 00000000..c0546012 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/template.pyi @@ -0,0 +1,40 @@ +from _typeshed import Incomplete +from typing import Any + +__author__: str +__copyright__: str +__license__: str + +class FlexTemplate: + pdf: Any + splitting_pdf: Any + handlers: Any + texts: Any + def __init__(self, pdf, elements: Incomplete | None = ...) -> None: ... + elements: Any + keys: Any + def load_elements(self, elements) -> None: ... + def parse_csv(self, infile, delimiter: str = ..., decimal_sep: str = ..., encoding: Incomplete | None = ...): ... + def __setitem__(self, name, value) -> None: ... + set: Any + def __contains__(self, name): ... + def __getitem__(self, name): ... + def split_multicell(self, text, element_name): ... + def render(self, offsetx: float = ..., offsety: float = ..., rotate: float = ..., scale: float = ...): ... + +class Template(FlexTemplate): + def __init__( + self, + infile: Incomplete | None = ..., + elements: Incomplete | None = ..., + format: str = ..., + orientation: str = ..., + unit: str = ..., + title: str = ..., + author: str = ..., + subject: str = ..., + creator: str = ..., + keywords: str = ..., + ) -> None: ... + def add_page(self) -> None: ... + def render(self, outfile: Incomplete | None = ..., dest: Incomplete | None = ...) -> None: ... # type: ignore[override] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/transitions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/transitions.pyi new file mode 100644 index 00000000..5180d148 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/transitions.pyi @@ -0,0 +1,59 @@ +from abc import ABC, abstractmethod +from typing_extensions import Literal + +class Transition(ABC): + @abstractmethod + def serialize(self) -> str: ... + +class SplitTransition(Transition): + dimension: Literal["H", "V"] + direction: Literal["I", "O"] + def __init__(self, dimension: Literal["H", "V"], direction: Literal["I", "O"]) -> None: ... + def serialize(self) -> str: ... + +class BlindsTransition(Transition): + dimension: Literal["H", "V"] + def __init__(self, dimension: Literal["H", "V"]) -> None: ... + def serialize(self) -> str: ... + +class BoxTransition(Transition): + direction: Literal["I", "O"] + def __init__(self, direction: Literal["I", "O"]) -> None: ... + def serialize(self) -> str: ... + +class WipeTransition(Transition): + direction: Literal[0, 90, 180, 270] + def __init__(self, direction: Literal[0, 90, 180, 270]) -> None: ... + def serialize(self) -> str: ... + +class DissolveTransition(Transition): + def serialize(self) -> str: ... + +class GlitterTransition(Transition): + direction: Literal[0, 270, 315] + def __init__(self, direction: Literal[0, 270, 315]) -> None: ... + def serialize(self) -> str: ... + +class FlyTransition(Transition): + dimension: Literal["H", "V"] + direction: Literal[0, 270] | None + def __init__(self, dimension: Literal["H", "V"], direction: Literal[0, 270] | None = ...) -> None: ... + def serialize(self) -> str: ... + +class PushTransition(Transition): + direction: Literal[0, 270] + def __init__(self, direction: Literal[0, 270]) -> None: ... + def serialize(self) -> str: ... + +class CoverTransition(Transition): + direction: Literal[0, 270] + def __init__(self, direction: Literal[0, 270]) -> None: ... + def serialize(self) -> str: ... + +class UncoverTransition(Transition): + direction: Literal[0, 270] + def __init__(self, direction: Literal[0, 270]) -> None: ... + def serialize(self) -> str: ... + +class FadeTransition(Transition): + def serialize(self) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/util.pyi new file mode 100644 index 00000000..805a44c6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/fpdf2/fpdf/util.pyi @@ -0,0 +1,20 @@ +import datetime +from collections.abc import Iterable +from typing import Any +from typing_extensions import Literal, TypeAlias + +_Unit: TypeAlias = Literal["pt", "mm", "cm", "in"] + +def buffer_subst(buffer: bytearray, placeholder: str, value: str) -> bytearray: ... +def format_date(date: datetime.datetime, with_tz: bool = ...) -> str: ... +def enclose_in_parens(s: str) -> str: ... +def escape_parens(s): ... +def b(s): ... +def get_scale_factor(unit: _Unit | float) -> float: ... +def convert_unit( + # to_convert has a recursive type + to_convert: float | Iterable[float | Iterable[Any]], + old_unit: str | float, + new_unit: str | float, +) -> float | tuple[float, ...]: ... +def dochecks() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/METADATA.toml new file mode 100644 index 00000000..67395fbe --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/METADATA.toml @@ -0,0 +1,13 @@ +version = "12.1.*" +extra_description = """\ + Type hints for GDB's \ + [Python API](https://sourceware.org/gdb/onlinedocs/gdb/Python-API.html). \ + Note that this API is available only when running Python scripts under GDB: \ + is is not possible to install the `gdb` package separately, for instance \ + using `pip`.\ +""" + +[tool.stubtest] +# Since the "gdb" Python package is available only inside GDB, it is not +# possible to install it through pip, so stub tests cannot install it. +skip = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/gdb/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/gdb/__init__.pyi new file mode 100644 index 00000000..f343d3aa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/gdb/__init__.pyi @@ -0,0 +1,703 @@ +# The GDB Python API is implemented in C, so the type hints below were made +# reading the documentation +# (https://sourceware.org/gdb/onlinedocs/gdb/Python-API.html). + +import _typeshed +from collections.abc import Callable, Iterator, Sequence +from contextlib import AbstractContextManager +from typing import Protocol, overload +from typing_extensions import TypeAlias + +import gdb.types + +# The following submodules are automatically imported +from . import events as events, printing as printing, prompt as prompt, types as types + +# Basic + +PYTHONDIR: str + +STDOUT: int +STDERR: int +STDLOG: int + +def execute(command: str, from_tty: bool = ..., to_string: bool = ...) -> str | None: ... +def breakpoints() -> Sequence[Breakpoint]: ... +def rbreak(regex: str, minsyms: bool = ..., throttle: int = ..., symtabs: Iterator[Symtab] = ...) -> list[Breakpoint]: ... +def parameter(__parameter: str) -> bool | int | str | None: ... +def set_parameter(name: str, value: bool | int | str | None) -> None: ... +def with_parameter(name: str, value: bool | int | str | None) -> AbstractContextManager[None]: ... +def history(__number: int) -> Value: ... +def add_history(__value: Value) -> int: ... +def history_count() -> int: ... +def convenience_variable(__name: str) -> Value | None: ... +def set_convenience_variable(__name: str, __value: _ValueOrNative | None) -> None: ... +def parse_and_eval(__expression: str) -> Value: ... +def find_pc_line(pc: int | Value) -> Symtab_and_line: ... +def post_event(__event: Callable[[], object]) -> None: ... +def write(string: str, stream: int = ...) -> None: ... +def flush(stream: int = ...) -> None: ... +def target_charset() -> str: ... +def target_wide_charset() -> str: ... +def host_charset() -> str: ... +def solib_name(address: int) -> str | None: ... +def decode_line(__expression: str = ...) -> tuple[str | None, tuple[Symtab_and_line, ...] | None]: ... +def prompt_hook(current_prompt: str) -> str: ... +def architecture_names() -> list[str]: ... +def connections() -> list[TargetConnection]: ... + +# Exceptions + +class error(RuntimeError): ... +class MemoryError(error): ... +class GdbError(Exception): ... + +# Values + +_ValueOrNative: TypeAlias = bool | float | str | Value +_ValueOrInt: TypeAlias = Value | int + +class Value: + address: Value + is_optimized_out: bool + type: Type + dynamic_type: Type + is_lazy: bool + + def __index__(self) -> int: ... + def __int__(self) -> int: ... + def __float__(self) -> float: ... + def __add__(self, other: _ValueOrInt) -> Value: ... + def __sub__(self, other: _ValueOrInt) -> Value: ... + def __mul__(self, other: _ValueOrInt) -> Value: ... + def __truediv__(self, other: _ValueOrInt) -> Value: ... + def __mod__(self, other: _ValueOrInt) -> Value: ... + def __and__(self, other: _ValueOrInt) -> Value: ... + def __or__(self, other: _ValueOrInt) -> Value: ... + def __xor__(self, other: _ValueOrInt) -> Value: ... + def __lshift__(self, other: _ValueOrInt) -> Value: ... + def __rshift__(self, other: _ValueOrInt) -> Value: ... + def __eq__(self, other: _ValueOrInt) -> bool: ... # type: ignore[override] + def __ne__(self, other: _ValueOrInt) -> bool: ... # type: ignore[override] + def __lt__(self, other: _ValueOrInt) -> bool: ... + def __le__(self, other: _ValueOrInt) -> bool: ... + def __gt__(self, other: _ValueOrInt) -> bool: ... + def __ge__(self, other: _ValueOrInt) -> bool: ... + def __getitem__(self, key: int | str | Field) -> Value: ... + def __call__(self, *args: _ValueOrNative) -> Value: ... + def __init__(self, val: _ValueOrNative) -> None: ... + def cast(self, type: Type) -> Value: ... + def dereference(self) -> Value: ... + def referenced_value(self) -> Value: ... + def reference_value(self) -> Value: ... + def const_value(self) -> Value: ... + def dynamic_cast(self, type: Type) -> Value: ... + def reinterpret_cast(self, type: Type) -> Value: ... + def format_string( + self, + raw: bool = ..., + pretty_arrays: bool = ..., + pretty_structs: bool = ..., + array_indexes: bool = ..., + symbols: bool = ..., + unions: bool = ..., + address: bool = ..., + deref_refs: bool = ..., + actual_objects: bool = ..., + static_members: bool = ..., + max_elements: int = ..., + max_depth: int = ..., + repeat_threshold: int = ..., + format: str = ..., + ) -> str: ... + def string(self, encoding: str = ..., errors: str = ..., length: int = ...) -> str: ... + def lazy_string(self, encoding: str = ..., length: int = ...) -> LazyString: ... + def fetch_lazy(self) -> None: ... + +# Types + +def lookup_type(name: str, block: Block = ...) -> Type: ... + +class Type: + alignof: int + code: int + dynamic: bool + name: str + sizeof: int + tag: str | None + objfile: Objfile | None + + def fields(self) -> list[Field]: ... + def array(self, n1: int | Value, n2: int | Value = ...) -> Type: ... + def vector(self, n1: int, n2: int = ...) -> Type: ... + def const(self) -> Type: ... + def volatile(self) -> Type: ... + def unqualified(self) -> Type: ... + def range(self) -> tuple[int, int]: ... + def reference(self) -> Type: ... + def pointer(self) -> Type: ... + def strip_typedefs(self) -> Type: ... + def target(self) -> Type: ... + def template_argument(self, n: int, block: Block = ...) -> Type: ... + def optimized_out(self) -> Value: ... + +class Field: + bitpos: int + enumval: int + name: str | None + artificial: bool + is_base_class: bool + bitsize: int + type: Type + parent_type: Type + +TYPE_CODE_PTR: int +TYPE_CODE_ARRAY: int +TYPE_CODE_STRUCT: int +TYPE_CODE_UNION: int +TYPE_CODE_ENUM: int +TYPE_CODE_FLAGS: int +TYPE_CODE_FUNC: int +TYPE_CODE_INT: int +TYPE_CODE_FLT: int +TYPE_CODE_VOID: int +TYPE_CODE_SET: int +TYPE_CODE_RANGE: int +TYPE_CODE_STRING: int +TYPE_CODE_BITSTRING: int +TYPE_CODE_ERROR: int +TYPE_CODE_METHOD: int +TYPE_CODE_METHODPTR: int +TYPE_CODE_MEMBERPTR: int +TYPE_CODE_REF: int +TYPE_CODE_RVALUE_REF: int +TYPE_CODE_CHAR: int +TYPE_CODE_BOOL: int +TYPE_CODE_COMPLEX: int +TYPE_CODE_TYPEDEF: int +TYPE_CODE_NAMESPACE: int +TYPE_CODE_DECFLOAT: int +TYPE_CODE_INTERNAL_FUNCTION: int + +# Pretty Printing + +class _PrettyPrinter(Protocol): + # TODO: The "children" and "display_hint" methods are optional for + # pretty-printers. Unfortunately, there is no such thing as an optional + # method in the type system at the moment. + # + # def children(self) -> Iterator[tuple[str, _ValueOrNative]]: ... + # def display_hint(self) -> str | None: ... + def to_string(self) -> str | LazyString: ... + +_PrettyPrinterLookupFunction: TypeAlias = Callable[[Value], _PrettyPrinter | None] + +def default_visualizer(__value: Value) -> _PrettyPrinter | None: ... + +# Selecting Pretty-Printers + +pretty_printers: list[_PrettyPrinterLookupFunction] + +# Filtering Frames + +class _FrameFilter(Protocol): + name: str + enabled: bool + priority: int + + def filter(self, iterator: Iterator[_FrameDecorator]) -> Iterator[_FrameDecorator]: ... + +# Decorating Frames + +class _SymValueWrapper(Protocol): + def symbol(self) -> Symbol | str: ... + def value(self) -> _ValueOrNative | None: ... + +class _FrameDecorator(Protocol): + def elided(self) -> Iterator[Frame] | None: ... + def function(self) -> str | None: ... + def address(self) -> int | None: ... + def filename(self) -> str | None: ... + def line(self) -> int | None: ... + def frame_args(self) -> Iterator[_SymValueWrapper] | None: ... + def frame_locals(self) -> Iterator[_SymValueWrapper] | None: ... + def inferior_frame(self) -> Frame: ... + +# Unwinding Frames + +class PendingFrame: + def read_register(self, __reg: str | RegisterDescriptor | int) -> Value: ... + def create_unwind_info(self, __frame_id: object) -> UnwindInfo: ... + def architecture(self) -> Architecture: ... + def level(self) -> int: ... + +class UnwindInfo: + def add_saved_register(self, __reg: str | RegisterDescriptor | int, __value: Value) -> None: ... + +class Unwinder: + name: str + enabled: bool + + def __call__(self, pending_frame: Frame) -> UnwindInfo | None: ... + +# Xmethods: the API is defined in the "xmethod" module + +# Inferiors + +def inferiors() -> tuple[Inferior, ...]: ... +def selected_inferior() -> Inferior: ... + +_BufferType: TypeAlias = _typeshed.ReadableBuffer + +class Inferior: + num: int + connection_num: int + pid: int + was_attached: bool + progspace: Progspace + + def is_valid(self) -> bool: ... + def threads(self) -> tuple[InferiorThread, ...]: ... + def architecture(self) -> Architecture: ... + def read_memory(self, address: _ValueOrInt, length: int) -> memoryview: ... + def write_memory(self, address: _ValueOrInt, buffer: _BufferType, length: int = ...) -> memoryview: ... + def search_memory(self, address: _ValueOrInt, length: int, pattern: _BufferType) -> int | None: ... + def thread_from_handle(self, handle: Value) -> InferiorThread: ... + +# Threads + +def selected_thread() -> InferiorThread: ... + +class InferiorThread: + name: str | None + num: int + global_num: int + ptid: tuple[int, int, int] + inferior: Inferior + + def is_valid(self) -> bool: ... + def switch(self) -> None: ... + def is_stopped(self) -> bool: ... + def is_running(self) -> bool: ... + def is_exited(self) -> bool: ... + def handle(self) -> bytes: ... + +# Recordings + +def start_recording(__method: str = ..., __format: str = ...) -> Record: ... +def current_recording() -> Record | None: ... +def stop_recording() -> None: ... + +class Record: + method: str + format: str | None + begin: Instruction + end: Instruction + replay_position: Instruction | None + instruction_history: list[Instruction] + function_call_history: list[RecordFunctionSegment] + + def goto(self, __instruction: Instruction) -> None: ... + +class Instruction: + pc: int + data: memoryview + decoded: str + size: int + +class RecordInstruction(Instruction): + number: int + sal: Symtab_and_line | None + is_speculative: bool + +class RecordGap(Instruction): + number: int + error_code: int + error_string: str + +class RecordFunctionSegment: + number: int + symbol: Symbol | None + level: int | None + instructions: list[RecordInstruction | RecordGap] + up: RecordFunctionSegment | None + prev: RecordFunctionSegment | None + next: RecordFunctionSegment | None + +# CLI Commands + +class Command: + def __init__(self, name: str, command_class: int, completer_class: int = ..., prefix: bool = ...) -> None: ... + def dont_repeat(self) -> None: ... + def invoke(self, argument: str, from_tty: bool) -> None: ... + def complete(self, text: str, word: str) -> object: ... + +def string_to_argv(__argv: str) -> list[str]: ... + +COMMAND_NONE: int +COMMAND_RUNNING: int +COMMAND_DATA: int +COMMAND_STACK: int +COMMAND_FILES: int +COMMAND_SUPPORT: int +COMMAND_STATUS: int +COMMAND_BREAKPOINTS: int +COMMAND_TRACEPOINTS: int +COMMAND_TUI: int +COMMAND_USER: int +COMMAND_OBSCURE: int +COMMAND_MAINTENANCE: int + +COMPLETE_NONE: int +COMPLETE_FILENAME: int +COMPLETE_LOCATION: int +COMPLETE_COMMAND: int +COMPLETE_SYMBOL: int +COMPLETE_EXPRESSION: int + +# GDB/MI Commands + +class MICommand: + name: str + installed: bool + + def __init__(self, name: str) -> None: ... + def invoke(self, arguments: list[str]) -> dict[str, object] | None: ... + +# Parameters + +class Parameter: + set_doc: str + show_doc: str + value: object + + def __init__(self, name: str, command_class: int, parameter_class: int, enum_sequence: Sequence[str] = ...) -> None: ... + def get_set_string(self) -> str: ... + def get_show_string(self, svalue: str) -> str: ... + +PARAM_BOOLEAN: int +PARAM_AUTO_BOOLEAN: int +PARAM_UINTEGER: int +PARAM_INTEGER: int +PARAM_STRING: int +PARAM_STRING_NOESCAPE: int +PARAM_OPTIONAL_FILENAME: int +PARAM_FILENAME: int +PARAM_ZINTEGER: int +PARAM_ZUINTEGER: int +PARAM_ZUINTEGER_UNLIMITED: int +PARAM_ENUM: int + +# Convenience functions + +class Function: + def __init__(self, name: str) -> None: ... + def invoke(self, *args: Value) -> _ValueOrNative: ... + +# Progspaces + +def current_progspace() -> Progspace | None: ... +def progspaces() -> Sequence[Progspace]: ... + +class Progspace: + filename: str + pretty_printers: list[_PrettyPrinterLookupFunction] + type_printers: list[gdb.types._TypePrinter] + frame_filters: list[_FrameFilter] + + def block_for_pc(self, __pc: int) -> Block | None: ... + def find_pc_line(self, __pc: int) -> Symtab_and_line: ... + def is_valid(self) -> bool: ... + def objfiles(self) -> Sequence[Objfile]: ... + def solib_name(self, __address: int) -> str | None: ... + +# Objfiles + +def current_objfile() -> Objfile | None: ... +def objfiles() -> list[Objfile]: ... +def lookup_objfile(name: str, by_build_id: bool = ...) -> Objfile | None: ... + +class Objfile: + filename: str | None + username: str | None + owner: Objfile | None + build_id: str | None + progspace: Progspace + pretty_printers: list[_PrettyPrinterLookupFunction] + type_printers: list[gdb.types._TypePrinter] + frame_filters: list[_FrameFilter] + + def is_valid(self) -> bool: ... + def add_separate_debug_file(self, file: str) -> None: ... + def lookup_global_symbol(self, name: str, domain: int = ...) -> Symbol | None: ... + def lookup_static_method(self, name: str, domain: int = ...) -> Symbol | None: ... + +# Frames + +def selected_frame() -> Frame: ... +def newest_frame() -> Frame: ... +def frame_stop_reason_string(__code: int) -> str: ... +def invalidate_cached_frames() -> None: ... + +NORMAL_FRAME: int +INLINE_FRAME: int +TAILCALL_FRAME: int +SIGTRAMP_FRAME: int +ARCH_FRAME: int +SENTINEL_FRAME: int + +FRAME_UNWIND_NO_REASON: int +FRAME_UNWIND_NULL_ID: int +FRAME_UNWIND_OUTERMOST: int +FRAME_UNWIND_UNAVAILABLE: int +FRAME_UNWIND_INNER_ID: int +FRAME_UNWIND_SAME_ID: int +FRAME_UNWIND_NO_SAVED_PC: int +FRAME_UNWIND_MEMORY_ERROR: int +FRAME_UNWIND_FIRST_ERROR: int + +class Frame: + def is_valid(self) -> bool: ... + def name(self) -> str | None: ... + def architecture(self) -> Architecture: ... + def type(self) -> int: ... + def unwind_stop_reason(self) -> int: ... + def pc(self) -> Value: ... + def block(self) -> Block: ... + def function(self) -> Symbol: ... + def older(self) -> Frame | None: ... + def newer(self) -> Frame | None: ... + def find_sal(self) -> Symtab_and_line: ... + def read_register(self, __register: str | RegisterDescriptor | int) -> Value: ... + def read_var(self, __variable: str | Symbol, block: Block | None = ...) -> Value: ... + def select(self) -> None: ... + def level(self) -> int: ... + +# Blocks + +def block_for_pc(pc: int) -> Block | None: ... + +class Block: + start: int + end: int + function: Symbol | None + superblock: Block | None + global_block: Block + static_block: Block | None + is_global: bool + is_static: bool + + def is_valid(self) -> bool: ... + def __iter__(self) -> BlockIterator: ... + +class BlockIterator: + def is_valid(self) -> bool: ... + def __iter__(self: _typeshed.Self) -> _typeshed.Self: ... + def __next__(self) -> Symbol: ... + +# Symbols + +def lookup_symbol(name: str, block: Block | None = ..., domain: int = ...) -> tuple[Symbol | None, bool]: ... +def lookup_global_symbol(name: str, domain: int = ...) -> Symbol | None: ... +def lookup_static_symbol(name: str, domain: int = ...) -> Symbol | None: ... +def lookup_static_symbols(name: str, domain: int = ...) -> list[Symbol]: ... + +class Symbol: + type: Type | None + symtab: Symtab + line: int + name: str + linkage_name: str + print_name: str + addr_class: int + needs_frame: bool + is_argument: bool + is_constant: bool + is_function: bool + is_variable: bool + + def is_valid(self) -> bool: ... + def value(self, __frame: Frame = ...) -> Value: ... + +SYMBOL_UNDEF_DOMAIN: int +SYMBOL_VAR_DOMAIN: int +SYMBOL_STRUCT_DOMAIN: int +SYMBOL_LABEL_DOMAIN: int +SYMBOL_MODULE_DOMAIN: int +SYMBOL_COMMON_BLOCK_DOMAIN: int + +SYMBOL_LOC_UNDEF: int +SYMBOL_LOC_CONST: int +SYMBOL_LOC_STATIC: int +SYMBOL_LOC_REGISTER: int +SYMBOL_LOC_ARG: int +SYMBOL_LOC_REF_ARG: int +SYMBOL_LOC_REGPARM_ADDR: int +SYMBOL_LOC_LOCAL: int +SYMBOL_LOC_TYPEDEF: int +SYMBOL_LOC_LABEL: int +SYMBOL_LOC_BLOCK: int +SYMBOL_LOC_CONST_BYTES: int +SYMBOL_LOC_UNRESOLVED: int +SYMBOL_LOC_OPTIMIZED_OUT: int +SYMBOL_LOC_COMPUTED: int +SYMBOL_LOC_COMMON_BLOCK: int + +# Symbol tables + +class Symtab_and_line: + symtab: Symtab + pc: int + last: int + line: int + + def is_valid(self) -> bool: ... + +class Symtab: + filename: str + objfile: Objfile + producer: str + + def is_valid(self) -> bool: ... + def fullname(self) -> str: ... + def global_block(self) -> Block: ... + def static_block(self) -> Block: ... + def linetable(self) -> LineTable: ... + +# Line Tables + +class LineTableEntry: + line: int + pc: int + +class LineTable(Iterator[LineTableEntry]): + def __iter__(self: _typeshed.Self) -> _typeshed.Self: ... + def __next__(self) -> LineTableEntry: ... + def line(self, __line: int) -> tuple[LineTableEntry, ...]: ... + def has_line(self, __line: int) -> bool: ... + def source_lnes(self) -> list[int]: ... + +# Breakpoints + +class Breakpoint: + @overload + def __init__( + self, spec: str, type: int = ..., wp_class: int = ..., internal: bool = ..., temporary: bool = ..., qualified: bool = ... + ) -> None: ... + @overload + def __init__( + self, + source: str = ..., + function: str = ..., + label: str = ..., + line: int = ..., + internal: bool = ..., + temporary: bool = ..., + qualified: bool = ..., + ) -> None: ... + def stop(self) -> bool: ... + def is_valid(self) -> bool: ... + def delete(self) -> None: ... + + enabled: bool + silent: bool + pending: bool + thread: int | None + task: str | None + ignore_count: int + number: int + type: int + visible: bool + temporary: bool + hit_count: int + location: str | None + expression: str | None + condition: str | None + commands: str | None + +BP_BREAKPOINT: int +BP_HARDWARE_BREAKPOINT: int +BP_WATCHPOINT: int +BP_HARDWARE_WATCHPOINT: int +BP_READ_WATCHPOINT: int +BP_ACCESS_WATCHPOINT: int +BP_CATCHPOINT: int + +WP_READ: int +WP_WRITE: int +WP_ACCESS: int + +# Finish Breakpoints + +class FinishBreakpoint(Breakpoint): + return_value: Value | None + + def __init__(self, frame: Frame = ..., internal: bool = ...) -> None: ... + def out_of_scope(self) -> None: ... + +# Lazy strings + +class LazyString: + def value(self) -> Value: ... + + address: Value + length: int + encoding: str + type: Type + +# Architectures + +class Architecture: + def name(self) -> str: ... + def disassemble(self, start_pc: int, end_pc: int = ..., count: int = ...) -> list[dict[str, object]]: ... + def integer_type(self, size: int, signed: bool = ...) -> Type: ... + def registers(self, reggroup: str = ...) -> RegisterDescriptorIterator: ... + def register_groups(self) -> RegisterGroupsIterator: ... + +# Registers + +class RegisterDescriptor: + name: str + +class RegisterDescriptorIterator(Iterator[RegisterDescriptor]): + def __next__(self) -> RegisterDescriptor: ... + def find(self, name: str) -> RegisterDescriptor | None: ... + +class RegisterGroup: + name: str + +class RegisterGroupsIterator(Iterator[RegisterGroup]): + def __next__(self) -> RegisterGroup: ... + +# Connections + +class TargetConnection: + def is_valid(self) -> bool: ... + + num: int + type: str + description: str + details: str | None + +class RemoteTargetConnection(TargetConnection): + def send_packet(self, packet: str | bytes) -> bytes: ... + +# TUI Windows + +def register_window_type(name: str, factory: Callable[[TuiWindow], _Window]) -> None: ... + +class TuiWindow: + width: int + height: int + title: str + + def is_valid(self) -> bool: ... + def erase(self) -> None: ... + def write(self, __string: str, __full_window: bool = ...) -> None: ... + +class _Window(Protocol): + def close(self) -> None: ... + def render(self) -> None: ... + def hscroll(self, num: int) -> None: ... + def vscroll(self, num: int) -> None: ... + def click(self, x: int, y: int, button: int) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/gdb/events.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/gdb/events.pyi new file mode 100644 index 00000000..72d4fc50 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/gdb/events.pyi @@ -0,0 +1,131 @@ +from collections.abc import Callable, Sequence + +import gdb + +class ThreadEvent: + inferior_thread: gdb.InferiorThread + +class ContinueEvent(ThreadEvent): ... + +class ContinueEventRegistry: + def connect(self, __object: Callable[[ContinueEvent], object]) -> None: ... + def disconnect(self, __object: Callable[[ContinueEvent], object]) -> None: ... + +cont: ContinueEventRegistry + +class ExitedEvent: + exit_code: int + inferior: gdb.Inferior + +class ExitedEventRegistry: + def connect(self, __object: Callable[[ExitedEvent], object]) -> None: ... + def disconnect(self, __object: Callable[[ExitedEvent], object]) -> None: ... + +exited: ExitedEventRegistry + +class StopEvent(ThreadEvent): + stop_signal: str + +class BreakpointEvent(StopEvent): + breakpoints: Sequence[gdb.Breakpoint] + breakpoint: gdb.Breakpoint + +class StopEventRegistry: + def connect(self, __object: Callable[[StopEvent], object]) -> None: ... + def disconnect(self, __object: Callable[[StopEvent], object]) -> None: ... + +stop: StopEventRegistry + +class NewObjFileEvent: + new_objfile: gdb.Objfile + +class NewObjFileEventRegistry: + def connect(self, __object: Callable[[NewObjFileEvent], object]) -> None: ... + def disconnect(self, __object: Callable[[NewObjFileEvent], object]) -> None: ... + +new_objfile: NewObjFileEventRegistry + +class ClearObjFilesEvent: + progspace: gdb.Progspace + +class ClearObjFilesEventRegistry: + def connect(self, __object: Callable[[ClearObjFilesEvent], object]) -> None: ... + def disconnect(self, __object: Callable[[ClearObjFilesEvent], object]) -> None: ... + +clear_objfiles: ClearObjFilesEventRegistry + +class InferiorCallEvent: ... + +class InferiorCallPreEvent(InferiorCallEvent): + ptid: gdb.InferiorThread + address: gdb.Value + +class InferiorCallPostEvent(InferiorCallEvent): + ptid: gdb.InferiorThread + address: gdb.Value + +class InferiorCallEventRegistry: + def connect(self, __object: Callable[[InferiorCallEvent], object]) -> None: ... + def disconnect(self, __object: Callable[[InferiorCallEvent], object]) -> None: ... + +inferior_call: InferiorCallEventRegistry + +class MemoryChangedEvent: + address: gdb.Value + length: int + +class MemoryChangedEventRegistry: + def connect(self, __object: Callable[[MemoryChangedEvent], object]) -> None: ... + def disconnect(self, __object: Callable[[MemoryChangedEvent], object]) -> None: ... + +memory_changed: MemoryChangedEventRegistry + +class RegisterChangedEvent: + frame: gdb.Frame + regnum: str + +class RegisterChangedEventRegistry: + def connect(self, __object: Callable[[RegisterChangedEvent], object]) -> None: ... + def disconnect(self, __object: Callable[[RegisterChangedEvent], object]) -> None: ... + +register_changed: RegisterChangedEventRegistry + +class BreakpointEventRegistry: + def connect(self, __object: Callable[[gdb.Breakpoint], object]) -> None: ... + def disconnect(self, __object: Callable[[gdb.Breakpoint], object]) -> None: ... + +breakpoint_created: BreakpointEventRegistry +breakpoint_modified: BreakpointEventRegistry +breakpoint_deleted: BreakpointEventRegistry + +class BeforePromptEventRegistry: + def connect(self, __object: Callable[[], object]) -> None: ... + def disconnect(self, __object: Callable[[], object]) -> None: ... + +before_prompt: BeforePromptEventRegistry + +class NewInferiorEvent: + inferior: gdb.Inferior + +class NewInferiorEventRegistry: + def connect(self, __object: Callable[[NewInferiorEvent], object]) -> None: ... + def disconnect(self, __object: Callable[[NewInferiorEvent], object]) -> None: ... + +new_inferior: NewInferiorEventRegistry + +class InferiorDeletedEvent: + inferior: gdb.Inferior + +class InferiorDeletedEventRegistry: + def connect(self, __object: Callable[[InferiorDeletedEvent], object]) -> None: ... + def disconnect(self, __object: Callable[[InferiorDeletedEvent], object]) -> None: ... + +inferior_deleted: InferiorDeletedEventRegistry + +class NewThreadEvent(ThreadEvent): ... + +class NewThreadEventRegistry: + def connect(self, __object: Callable[[NewThreadEvent], object]) -> None: ... + def disconnect(self, __object: Callable[[NewThreadEvent], object]) -> None: ... + +new_thread: NewThreadEventRegistry diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/gdb/printing.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/gdb/printing.pyi new file mode 100644 index 00000000..591fdd09 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/gdb/printing.pyi @@ -0,0 +1,31 @@ +from collections.abc import Callable, Iterable + +import gdb +from gdb import _PrettyPrinterLookupFunction + +class PrettyPrinter: + name: str + subprinters: list[SubPrettyPrinter] | None + enabled: bool + + def __init__(self, name: str, subprinters: Iterable[SubPrettyPrinter] | None = ...) -> None: ... + def __call__(self, val: gdb.Value) -> gdb._PrettyPrinter | None: ... + +class SubPrettyPrinter: + name: str + enabled: bool + + def __init__(self, name: str) -> None: ... + +class RegexpCollectionPrettyPrinter(PrettyPrinter): + def __init__(self, name: str) -> None: ... + def add_printer(self, name: str, regexp: str, gen_printer: _PrettyPrinterLookupFunction) -> None: ... + +class FlagEnumerationPrinter(PrettyPrinter): + def __init__(self, enum_type: str) -> None: ... + +def register_pretty_printer( + obj: gdb.Objfile | gdb.Progspace | None, + printer: PrettyPrinter | Callable[[gdb.Value], gdb._PrettyPrinter | None], + replace: bool = ..., +) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/gdb/prompt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/gdb/prompt.pyi new file mode 100644 index 00000000..ed14e757 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/gdb/prompt.pyi @@ -0,0 +1 @@ +def substitute_prompt(string: str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/gdb/types.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/gdb/types.pyi new file mode 100644 index 00000000..b39b9d86 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/gdb/types.pyi @@ -0,0 +1,24 @@ +from collections.abc import Iterator +from typing import Protocol + +import gdb + +def get_basic_type(type_: gdb.Type) -> gdb.Type: ... +def has_field(type_: gdb.Type, field: str) -> bool: ... +def make_enum_dict(type_: gdb.Type) -> dict[str, int]: ... +def deep_items(type_: gdb.Type) -> Iterator[tuple[str, gdb.Field]]: ... +def get_type_recognizers() -> list[_TypeRecognizer]: ... +def apply_type_recognizers(recognizers: list[_TypeRecognizer], type_obj: gdb.Type) -> str | None: ... +def register_type_printer(locus: gdb.Objfile | gdb.Progspace | None, printer: _TypePrinter) -> None: ... + +class _TypePrinter(Protocol): + enabled: bool + name: str + + def instantiate(self) -> _TypeRecognizer | None: ... + +class _TypeRecognizer(Protocol): + def recognize(self, __type: gdb.Type) -> str | None: ... + +class TypePrinter: + def __init__(self, name: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/gdb/unwinder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/gdb/unwinder.pyi new file mode 100644 index 00000000..1f3279fa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/gdb/unwinder.pyi @@ -0,0 +1,3 @@ +import gdb + +def register_unwinder(locus: gdb.Objfile | gdb.Progspace | None, unwinder: gdb.Unwinder, replace: bool = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/gdb/xmethod.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/gdb/xmethod.pyi new file mode 100644 index 00000000..cab42aea --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/gdb/gdb/xmethod.pyi @@ -0,0 +1,34 @@ +from collections.abc import Sequence +from typing import Protocol + +import gdb + +def register_xmethod_matcher( + locus: gdb.Objfile | gdb.Progspace | None, matcher: _XMethodMatcher, replace: bool = ... +) -> None: ... + +class _XMethod(Protocol): + name: str + enabled: bool + +class XMethod: + name: str + enabled: bool + + def __init__(self, name: str) -> None: ... + +class _XMethodWorker(Protocol): + def get_arg_types(self) -> Sequence[gdb.Type]: ... + def get_result_type(self, *args: gdb.Value) -> gdb.Type: ... + def __call__(self, *args: gdb.Value) -> object: ... + +class XMethodWorker: ... + +class _XMethodMatcher(Protocol): + enabled: bool + methods: list[_XMethod] + + def __init__(self, name: str) -> None: ... + def match(self, class_type: gdb.Type, method_name: str) -> _XMethodWorker: ... + +class XMethodMatcher: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..a67cb1a3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/@tests/stubtest_allowlist.txt @@ -0,0 +1,4 @@ +# inconsistency of signatures between stub and implementation (cls vs self) +google.cloud.ndb.ModelAdapter.__new__ +google.cloud.ndb.metadata.EntityGroup.__new__ +google.cloud.ndb.model.ModelAdapter.__new__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/METADATA.toml new file mode 100644 index 00000000..63fe58df --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/METADATA.toml @@ -0,0 +1,5 @@ +version = "2.1.*" + +[tool.stubtest] +stubtest_requirements = ["protobuf==3.20.2"] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/__init__.pyi new file mode 100644 index 00000000..3fb429e6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/__init__.pyi @@ -0,0 +1,109 @@ +from google.cloud.ndb._datastore_api import EVENTUAL as EVENTUAL, EVENTUAL_CONSISTENCY as EVENTUAL_CONSISTENCY, STRONG as STRONG +from google.cloud.ndb._datastore_query import Cursor as Cursor, QueryIterator as QueryIterator +from google.cloud.ndb._transaction import ( + in_transaction as in_transaction, + non_transactional as non_transactional, + transaction as transaction, + transaction_async as transaction_async, + transactional as transactional, + transactional_async as transactional_async, + transactional_tasklet as transactional_tasklet, +) +from google.cloud.ndb.client import Client as Client +from google.cloud.ndb.context import ( + AutoBatcher as AutoBatcher, + Context as Context, + ContextOptions as ContextOptions, + TransactionOptions as TransactionOptions, + get_context as get_context, + get_toplevel_context as get_toplevel_context, +) +from google.cloud.ndb.global_cache import GlobalCache as GlobalCache, MemcacheCache as MemcacheCache, RedisCache as RedisCache +from google.cloud.ndb.key import Key as Key +from google.cloud.ndb.model import ( + BadProjectionError as BadProjectionError, + BlobKey as BlobKey, + BlobKeyProperty as BlobKeyProperty, + BlobProperty as BlobProperty, + BooleanProperty as BooleanProperty, + ComputedProperty as ComputedProperty, + ComputedPropertyError as ComputedPropertyError, + DateProperty as DateProperty, + DateTimeProperty as DateTimeProperty, + Expando as Expando, + FloatProperty as FloatProperty, + GenericProperty as GenericProperty, + GeoPt as GeoPt, + GeoPtProperty as GeoPtProperty, + Index as Index, + IndexProperty as IndexProperty, + IndexState as IndexState, + IntegerProperty as IntegerProperty, + InvalidPropertyError as InvalidPropertyError, + JsonProperty as JsonProperty, + KeyProperty as KeyProperty, + KindError as KindError, + LocalStructuredProperty as LocalStructuredProperty, + MetaModel as MetaModel, + Model as Model, + ModelAdapter as ModelAdapter, + ModelAttribute as ModelAttribute, + ModelKey as ModelKey, + PickleProperty as PickleProperty, + Property as Property, + ReadonlyPropertyError as ReadonlyPropertyError, + Rollback as Rollback, + StringProperty as StringProperty, + StructuredProperty as StructuredProperty, + TextProperty as TextProperty, + TimeProperty as TimeProperty, + UnprojectedPropertyError as UnprojectedPropertyError, + User as User, + UserNotFoundError as UserNotFoundError, + UserProperty as UserProperty, + delete_multi as delete_multi, + delete_multi_async as delete_multi_async, + get_indexes as get_indexes, + get_indexes_async as get_indexes_async, + get_multi as get_multi, + get_multi_async as get_multi_async, + make_connection as make_connection, + put_multi as put_multi, + put_multi_async as put_multi_async, +) +from google.cloud.ndb.polymodel import PolyModel as PolyModel +from google.cloud.ndb.query import ( + AND as AND, + OR as OR, + ConjunctionNode as ConjunctionNode, + DisjunctionNode as DisjunctionNode, + FalseNode as FalseNode, + FilterNode as FilterNode, + Node as Node, + Parameter as Parameter, + ParameterizedFunction as ParameterizedFunction, + ParameterizedThing as ParameterizedThing, + ParameterNode as ParameterNode, + PostFilterNode as PostFilterNode, + Query as Query, + QueryOptions as QueryOptions, + RepeatedStructuredPropertyPredicate as RepeatedStructuredPropertyPredicate, + gql as gql, +) +from google.cloud.ndb.tasklets import ( + Future as Future, + QueueFuture as QueueFuture, + ReducingFuture as ReducingFuture, + Return as Return, + SerialQueueFuture as SerialQueueFuture, + add_flow_exception as add_flow_exception, + make_context as make_context, + make_default_context as make_default_context, + set_context as set_context, + sleep as sleep, + synctasklet as synctasklet, + tasklet as tasklet, + toplevel as toplevel, + wait_all as wait_all, + wait_any as wait_any, +) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/_batch.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/_batch.pyi new file mode 100644 index 00000000..be6e53a0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/_batch.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def get_batch(batch_cls, options: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/_cache.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/_cache.pyi new file mode 100644 index 00000000..811eb180 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/_cache.pyi @@ -0,0 +1,73 @@ +from _typeshed import Incomplete +from typing import Any + +from google.cloud.ndb import tasklets as tasklets + +class ContextCache: + def get_and_validate(self, key): ... + +class _GlobalCacheBatch: + def full(self): ... + def idle_callback(self) -> None: ... + def done_callback(self, cache_call) -> None: ... + def make_call(self) -> None: ... + def future_info(self, key) -> None: ... + +global_get: Any + +class _GlobalCacheGetBatch(_GlobalCacheBatch): + todo: Any + keys: Any + def __init__(self, ignore_options) -> None: ... + def add(self, key): ... + def done_callback(self, cache_call) -> None: ... + def make_call(self): ... + def future_info(self, key): ... + +def global_set(key, value, expires: Incomplete | None = ..., read: bool = ...): ... + +class _GlobalCacheSetBatch(_GlobalCacheBatch): + expires: Any + todo: object + futures: object + def __init__(self, options) -> None: ... + def done_callback(self, cache_call) -> None: ... + def add(self, key, value): ... + def make_call(self): ... + def future_info(self, key, value): ... + +class _GlobalCacheSetIfNotExistsBatch(_GlobalCacheSetBatch): + def add(self, key, value): ... + def make_call(self): ... + def future_info(self, key, value): ... + +global_delete: Any + +class _GlobalCacheDeleteBatch(_GlobalCacheBatch): + keys: Any + futures: Any + def __init__(self, ignore_options) -> None: ... + def add(self, key): ... + def make_call(self): ... + def future_info(self, key): ... + +global_watch: Any + +class _GlobalCacheWatchBatch(_GlobalCacheDeleteBatch): + def make_call(self): ... + def future_info(self, key, value): ... + +def global_unwatch(key): ... + +class _GlobalCacheUnwatchBatch(_GlobalCacheDeleteBatch): + def make_call(self): ... + def future_info(self, key): ... + +global_compare_and_swap: Any + +class _GlobalCacheCompareAndSwapBatch(_GlobalCacheSetBatch): + def make_call(self): ... + def future_info(self, key, value): ... + +def is_locked_value(value): ... +def global_cache_key(key): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/_datastore_api.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/_datastore_api.pyi new file mode 100644 index 00000000..67b9641d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/_datastore_api.pyi @@ -0,0 +1,5 @@ +from typing_extensions import Literal + +EVENTUAL: Literal[2] +EVENTUAL_CONSISTENCY: Literal[2] +STRONG: Literal[1] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/_datastore_query.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/_datastore_query.pyi new file mode 100644 index 00000000..597b8810 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/_datastore_query.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete +from typing import Any + +class QueryIterator: + def __iter__(self): ... + def has_next(self) -> None: ... + def has_next_async(self) -> None: ... + def probably_has_next(self) -> None: ... + def next(self) -> None: ... + def cursor_before(self) -> None: ... + def cursor_after(self) -> None: ... + def index_list(self) -> None: ... + +class Cursor: + @classmethod + def from_websafe_string(cls, urlsafe): ... + cursor: Any + def __init__(self, cursor: Incomplete | None = ..., urlsafe: Incomplete | None = ...) -> None: ... + def to_websafe_string(self): ... + def urlsafe(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __hash__(self) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/_eventloop.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/_eventloop.pyi new file mode 100644 index 00000000..2c80cd37 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/_eventloop.pyi @@ -0,0 +1,26 @@ +from typing import Any, NamedTuple + +class _Event(NamedTuple): + when: Any + callback: Any + args: Any + kwargs: Any + +class EventLoop: + current: Any + idlers: Any + inactive: int + queue: Any + rpcs: Any + rpc_results: Any + def __init__(self) -> None: ... + def clear(self) -> None: ... + def insort_event_right(self, event) -> None: ... + def call_soon(self, callback, *args, **kwargs) -> None: ... + def queue_call(self, delay, callback, *args, **kwargs) -> None: ... + def queue_rpc(self, rpc, callback) -> None: ... + def add_idle(self, callback, *args, **kwargs) -> None: ... + def run_idle(self): ... + def run0(self): ... + def run1(self): ... + def run(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/_options.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/_options.pyi new file mode 100644 index 00000000..a038bc27 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/_options.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class Options: + @classmethod + def options(cls, wrapped, _disambiguate_from_model_properties: bool = ...): ... + @classmethod + def slots(cls): ... + def __init__(self, config: Incomplete | None = ..., **kwargs) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def copy(self, **kwargs): ... + def items(self) -> None: ... + +class ReadOptions(Options): + def __init__(self, config: Incomplete | None = ..., **kwargs) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/_transaction.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/_transaction.pyi new file mode 100644 index 00000000..c19dc18b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/_transaction.pyi @@ -0,0 +1,20 @@ +from _typeshed import Incomplete + +def in_transaction(): ... +def transaction( + callback, retries=..., read_only: bool = ..., join: bool = ..., xg: bool = ..., propagation: Incomplete | None = ... +): ... +def transaction_async( + callback, retries=..., read_only: bool = ..., join: bool = ..., xg: bool = ..., propagation: Incomplete | None = ... +): ... +def transaction_async_( + callback, retries=..., read_only: bool = ..., join: bool = ..., xg: bool = ..., propagation: Incomplete | None = ... +): ... +def transactional(retries=..., read_only: bool = ..., join: bool = ..., xg: bool = ..., propagation: Incomplete | None = ...): ... +def transactional_async( + retries=..., read_only: bool = ..., join: bool = ..., xg: bool = ..., propagation: Incomplete | None = ... +): ... +def transactional_tasklet( + retries=..., read_only: bool = ..., join: bool = ..., xg: bool = ..., propagation: Incomplete | None = ... +): ... +def non_transactional(allow_existing: bool = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/blobstore.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/blobstore.pyi new file mode 100644 index 00000000..acb002aa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/blobstore.pyi @@ -0,0 +1,65 @@ +from typing import Any + +from google.cloud.ndb import model + +BlobKey: Any +BLOB_INFO_KIND: str +BLOB_MIGRATION_KIND: str +BLOB_KEY_HEADER: str +BLOB_RANGE_HEADER: str +MAX_BLOB_FETCH_SIZE: int +UPLOAD_INFO_CREATION_HEADER: str +BlobKeyProperty = model.BlobKeyProperty + +class BlobFetchSizeTooLargeError: + def __init__(self, *args, **kwargs) -> None: ... + +class BlobInfo: + def __init__(self, *args, **kwargs) -> None: ... + @classmethod + def get(cls, *args, **kwargs) -> None: ... + @classmethod + def get_async(cls, *args, **kwargs) -> None: ... + @classmethod + def get_multi(cls, *args, **kwargs) -> None: ... + @classmethod + def get_multi_async(cls, *args, **kwargs) -> None: ... + +class BlobInfoParseError: + def __init__(self, *args, **kwargs) -> None: ... + +class BlobNotFoundError: + def __init__(self, *args, **kwargs) -> None: ... + +class BlobReader: + def __init__(self, *args, **kwargs) -> None: ... + +def create_upload_url(*args, **kwargs) -> None: ... +def create_upload_url_async(*args, **kwargs) -> None: ... + +class DataIndexOutOfRangeError: + def __init__(self, *args, **kwargs) -> None: ... + +def delete(*args, **kwargs) -> None: ... +def delete_async(*args, **kwargs) -> None: ... +def delete_multi(*args, **kwargs) -> None: ... +def delete_multi_async(*args, **kwargs) -> None: ... + +class Error: + def __init__(self, *args, **kwargs) -> None: ... + +def fetch_data(*args, **kwargs) -> None: ... +def fetch_data_async(*args, **kwargs) -> None: ... + +get: Any +get_async: Any +get_multi: Any +get_multi_async: Any + +class InternalError: + def __init__(self, *args, **kwargs) -> None: ... + +def parse_blob_info(*args, **kwargs) -> None: ... + +class PermissionDeniedError: + def __init__(self, *args, **kwargs) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/client.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/client.pyi new file mode 100644 index 00000000..09812a7f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/client.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete +from collections.abc import Callable, Iterator +from contextlib import contextmanager +from typing import ClassVar + +from google.cloud.ndb import context as context_module, key + +DATASTORE_API_HOST: str + +class Client: + SCOPE: ClassVar[tuple[str, ...]] + namespace: str | None + host: str + client_info: Incomplete + secure: bool + stub: Incomplete + def __init__( + self, + project: str | None = ..., + namespace: str | None = ..., + credentials: Incomplete | None = ..., + client_options: Incomplete | None = ..., + ) -> None: ... + @contextmanager + def context( + self, + namespace=..., + cache_policy: Callable[[key.Key], bool] | None = ..., + global_cache: Incomplete | None = ..., + global_cache_policy: Callable[[key.Key], bool] | None = ..., + global_cache_timeout_policy: Callable[[key.Key], int] | None = ..., + legacy_data: bool = ..., + ) -> Iterator[context_module.Context]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/context.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/context.pyi new file mode 100644 index 00000000..ec1ca402 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/context.pyi @@ -0,0 +1,110 @@ +from _typeshed import Incomplete +from collections.abc import Callable +from typing import Any, NamedTuple + +from google.cloud.ndb import Key, exceptions as exceptions + +class _LocalState: + def __init__(self) -> None: ... + @property + def context(self): ... + @context.setter + def context(self, value) -> None: ... + @property + def toplevel_context(self): ... + @toplevel_context.setter + def toplevel_context(self, value) -> None: ... + +def get_context(raise_context_error: bool = ...): ... +def get_toplevel_context(raise_context_error: bool = ...): ... + +class _ContextTuple(NamedTuple): + id: Any + client: Any + namespace: Any + eventloop: Any + batches: Any + commit_batches: Any + transaction: Any + cache: Any + global_cache: Any + on_commit_callbacks: Any + transaction_complete_callbacks: Any + legacy_data: Any + +class _Context(_ContextTuple): + def __new__( + cls, + client, + id: Incomplete | None = ..., + namespace=..., + eventloop: Incomplete | None = ..., + batches: Incomplete | None = ..., + commit_batches: Incomplete | None = ..., + transaction: Incomplete | None = ..., + cache: Incomplete | None = ..., + cache_policy: Incomplete | None = ..., + global_cache: Incomplete | None = ..., + global_cache_policy: Callable[[Key], bool] | None = ..., + global_cache_timeout_policy: Incomplete | None = ..., + datastore_policy: Incomplete | None = ..., + on_commit_callbacks: Incomplete | None = ..., + transaction_complete_callbacks: Incomplete | None = ..., + legacy_data: bool = ..., + retry: Incomplete | None = ..., + rpc_time: Incomplete | None = ..., + wait_time: Incomplete | None = ..., + ): ... + def new(self, **kwargs): ... + rpc_time: int + wait_time: int + def use(self) -> None: ... + +class Context(_Context): + def clear_cache(self) -> None: ... + def flush(self) -> None: ... + def get_namespace(self): ... + def get_cache_policy(self): ... + def get_datastore_policy(self) -> None: ... + def get_global_cache_policy(self): ... + get_memcache_policy: Any + def get_global_cache_timeout_policy(self): ... + get_memcache_timeout_policy: Any + cache_policy: Any + def set_cache_policy(self, policy): ... + datastore_policy: Any + def set_datastore_policy(self, policy): ... + global_cache_policy: Any + def set_global_cache_policy(self, policy): ... + set_memcache_policy: Any + global_cache_timeout_policy: Any + def set_global_cache_timeout_policy(self, policy): ... + set_memcache_timeout_policy: Any + def get_retry_state(self): ... + def set_retry_state(self, state) -> None: ... + def clear_retry_state(self) -> None: ... + def call_on_commit(self, callback) -> None: ... + def in_transaction(self): ... + def in_retry(self): ... + def memcache_add(self, *args, **kwargs) -> None: ... + def memcache_cas(self, *args, **kwargs) -> None: ... + def memcache_decr(self, *args, **kwargs) -> None: ... + def memcache_delete(self, *args, **kwargs) -> None: ... + def memcache_get(self, *args, **kwargs) -> None: ... + def memcache_gets(self, *args, **kwargs) -> None: ... + def memcache_incr(self, *args, **kwargs) -> None: ... + def memcache_replace(self, *args, **kwargs) -> None: ... + def memcache_set(self, *args, **kwargs) -> None: ... + def urlfetch(self, *args, **kwargs) -> None: ... + +class ContextOptions: + def __init__(self, *args, **kwargs) -> None: ... + +class TransactionOptions: + NESTED: int + MANDATORY: int + ALLOWED: int + INDEPENDENT: int + +class AutoBatcher: + def __init__(self, *args, **kwargs) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/django_middleware.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/django_middleware.pyi new file mode 100644 index 00000000..8d4c846e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/django_middleware.pyi @@ -0,0 +1,2 @@ +class NdbDjangoMiddleware: + def __init__(self, *args, **kwargs) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/exceptions.pyi new file mode 100644 index 00000000..ab5a3a0a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/exceptions.pyi @@ -0,0 +1,22 @@ +from typing import Any + +class Error(Exception): ... + +class ContextError(Error): + def __init__(self) -> None: ... + +class BadValueError(Error): ... +class BadArgumentError(Error): ... +class BadRequestError(Error): ... +class Rollback(Error): ... +class BadQueryError(Error): ... + +class BadFilterError(Error): + filter: Any + def __init__(self, filter) -> None: ... + +class NoLongerImplementedError(NotImplementedError): + def __init__(self) -> None: ... + +class Cancelled(Error): ... +class NestedRetryException(Error): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/global_cache.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/global_cache.pyi new file mode 100644 index 00000000..8a495a68 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/global_cache.pyi @@ -0,0 +1,77 @@ +import abc +from _typeshed import Incomplete +from typing import Any +from typing_extensions import Self + +ConnectionError: Any + +class GlobalCache(metaclass=abc.ABCMeta): + __metaclass__: Any + transient_errors: Any + strict_read: bool + strict_write: bool + @abc.abstractmethod + def get(self, keys): ... + @abc.abstractmethod + def set(self, items, expires: Incomplete | None = ...): ... + @abc.abstractmethod + def delete(self, keys): ... + @abc.abstractmethod + def watch(self, items): ... + @abc.abstractmethod + def unwatch(self, keys): ... + @abc.abstractmethod + def compare_and_swap(self, items, expires: Incomplete | None = ...): ... + @abc.abstractmethod + def clear(self): ... + +class _InProcessGlobalCache(GlobalCache): + cache: Any + def __init__(self) -> None: ... + def get(self, keys): ... + def set(self, items, expires: Incomplete | None = ...) -> None: ... + def delete(self, keys) -> None: ... + def watch(self, items) -> None: ... + def unwatch(self, keys) -> None: ... + def compare_and_swap(self, items, expires: Incomplete | None = ...): ... + def clear(self) -> None: ... + +class RedisCache(GlobalCache): + transient_errors: Any + @classmethod + def from_environment(cls, strict_read: bool = ..., strict_write: bool = ...) -> Self: ... + redis: Any + strict_read: Any + strict_write: Any + def __init__(self, redis, strict_read: bool = ..., strict_write: bool = ...) -> None: ... + @property + def pipes(self): ... + def get(self, keys): ... + def set(self, items, expires: Incomplete | None = ...) -> None: ... + def delete(self, keys) -> None: ... + def watch(self, items) -> None: ... + def unwatch(self, keys) -> None: ... + def compare_and_swap(self, items, expires: Incomplete | None = ...): ... + def clear(self) -> None: ... + +class MemcacheCache(GlobalCache): + class KeyNotSet(Exception): + key: Any + def __init__(self, key) -> None: ... + def __eq__(self, other): ... + transient_errors: Any + @classmethod + def from_environment(cls, max_pool_size: int = ..., strict_read: bool = ..., strict_write: bool = ...) -> Self: ... + client: Any + strict_read: Any + strict_write: Any + def __init__(self, client, strict_read: bool = ..., strict_write: bool = ...) -> None: ... + @property + def caskeys(self): ... + def get(self, keys): ... + def set(self, items, expires: Incomplete | None = ...): ... + def delete(self, keys) -> None: ... + def watch(self, items) -> None: ... + def unwatch(self, keys) -> None: ... + def compare_and_swap(self, items, expires: Incomplete | None = ...): ... + def clear(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/key.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/key.pyi new file mode 100644 index 00000000..5139ae1b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/key.pyi @@ -0,0 +1,99 @@ +from _typeshed import Incomplete +from typing import Any + +UNDEFINED: Any + +class Key: + def __new__(cls, *path_args, **kwargs): ... + def __hash__(self) -> int: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __lt__(self, other): ... + def __le__(self, other): ... + def __gt__(self, other): ... + def __ge__(self, other): ... + def __getnewargs__(self): ... + def parent(self): ... + def root(self): ... + def namespace(self): ... + def project(self): ... + app: Any + def id(self): ... + def string_id(self): ... + def integer_id(self): ... + def pairs(self): ... + def flat(self): ... + def kind(self): ... + def reference(self): ... + def serialized(self): ... + def urlsafe(self): ... + def to_legacy_urlsafe(self, location_prefix): ... + def get( + self, + read_consistency: Incomplete | None = ..., + read_policy: Incomplete | None = ..., + transaction: Incomplete | None = ..., + retries: Incomplete | None = ..., + timeout: Incomplete | None = ..., + deadline: Incomplete | None = ..., + use_cache: Incomplete | None = ..., + use_global_cache: Incomplete | None = ..., + use_datastore: Incomplete | None = ..., + global_cache_timeout: Incomplete | None = ..., + use_memcache: Incomplete | None = ..., + memcache_timeout: Incomplete | None = ..., + max_memcache_items: Incomplete | None = ..., + force_writes: Incomplete | None = ..., + _options: Incomplete | None = ..., + ): ... + def get_async( + self, + read_consistency: Incomplete | None = ..., + read_policy: Incomplete | None = ..., + transaction: Incomplete | None = ..., + retries: Incomplete | None = ..., + timeout: Incomplete | None = ..., + deadline: Incomplete | None = ..., + use_cache: Incomplete | None = ..., + use_global_cache: Incomplete | None = ..., + use_datastore: Incomplete | None = ..., + global_cache_timeout: Incomplete | None = ..., + use_memcache: Incomplete | None = ..., + memcache_timeout: Incomplete | None = ..., + max_memcache_items: Incomplete | None = ..., + force_writes: Incomplete | None = ..., + _options: Incomplete | None = ..., + ): ... + def delete( + self, + retries: Incomplete | None = ..., + timeout: Incomplete | None = ..., + deadline: Incomplete | None = ..., + use_cache: Incomplete | None = ..., + use_global_cache: Incomplete | None = ..., + use_datastore: Incomplete | None = ..., + global_cache_timeout: Incomplete | None = ..., + use_memcache: Incomplete | None = ..., + memcache_timeout: Incomplete | None = ..., + max_memcache_items: Incomplete | None = ..., + force_writes: Incomplete | None = ..., + _options: Incomplete | None = ..., + ): ... + def delete_async( + self, + retries: Incomplete | None = ..., + timeout: Incomplete | None = ..., + deadline: Incomplete | None = ..., + use_cache: Incomplete | None = ..., + use_global_cache: Incomplete | None = ..., + use_datastore: Incomplete | None = ..., + global_cache_timeout: Incomplete | None = ..., + use_memcache: Incomplete | None = ..., + memcache_timeout: Incomplete | None = ..., + max_memcache_items: Incomplete | None = ..., + force_writes: Incomplete | None = ..., + _options: Incomplete | None = ..., + ): ... + @classmethod + def from_old_key(cls, old_key) -> None: ... + def to_old_key(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/metadata.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/metadata.pyi new file mode 100644 index 00000000..3f52576f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/metadata.pyi @@ -0,0 +1,52 @@ +from _typeshed import Incomplete +from typing import Any + +from google.cloud.ndb import model + +class _BaseMetadata(model.Model): + KIND_NAME: str + def __new__(cls, *args, **kwargs): ... + +class Namespace(_BaseMetadata): + KIND_NAME: str + EMPTY_NAMESPACE_ID: int + @property + def namespace_name(self): ... + @classmethod + def key_for_namespace(cls, namespace): ... + @classmethod + def key_to_namespace(cls, key): ... + +class Kind(_BaseMetadata): + KIND_NAME: str + @property + def kind_name(self): ... + @classmethod + def key_for_kind(cls, kind): ... + @classmethod + def key_to_kind(cls, key): ... + +class Property(_BaseMetadata): + KIND_NAME: str + @property + def property_name(self): ... + @property + def kind_name(self): ... + property_representation: Any + @classmethod + def key_for_kind(cls, kind): ... + @classmethod + def key_for_property(cls, kind, property): ... + @classmethod + def key_to_kind(cls, key): ... + @classmethod + def key_to_property(cls, key): ... + +class EntityGroup: + def __new__(cls, *args, **kwargs): ... + +def get_entity_group_version(*args, **kwargs) -> None: ... +def get_kinds(start: Incomplete | None = ..., end: Incomplete | None = ...): ... +def get_namespaces(start: Incomplete | None = ..., end: Incomplete | None = ...): ... +def get_properties_of_kind(kind, start: Incomplete | None = ..., end: Incomplete | None = ...): ... +def get_representations_of_kind(kind, start: Incomplete | None = ..., end: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/model.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/model.pyi new file mode 100644 index 00000000..8a8dc180 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/model.pyi @@ -0,0 +1,508 @@ +import datetime +from _typeshed import Unused +from collections.abc import Callable, Iterable, Sequence +from typing import Any, NoReturn +from typing_extensions import Literal, Self, TypeAlias + +from google.cloud.ndb import exceptions, key as key_module, query as query_module, tasklets as tasklets_module + +Key = key_module.Key +Rollback = exceptions.Rollback +BlobKey: object +GeoPt: object + +class KindError(exceptions.BadValueError): ... +class InvalidPropertyError(exceptions.Error): ... + +BadProjectionError = InvalidPropertyError + +class UnprojectedPropertyError(exceptions.Error): ... +class ReadonlyPropertyError(exceptions.Error): ... +class ComputedPropertyError(ReadonlyPropertyError): ... +class UserNotFoundError(exceptions.Error): ... + +class _NotEqualMixin: + def __ne__(self, other: object) -> bool: ... + +_Direction: TypeAlias = Literal["asc", "desc"] + +class IndexProperty(_NotEqualMixin): + def __new__(cls, name: str, direction: _Direction) -> Self: ... + @property + def name(self) -> str: ... + @property + def direction(self) -> _Direction: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + +class Index(_NotEqualMixin): + def __new__(cls, kind: str, properties: list[IndexProperty], ancestor: bool) -> Self: ... + @property + def kind(self) -> str: ... + @property + def properties(self) -> list[IndexProperty]: ... + @property + def ancestor(self) -> bool: ... + def __eq__(self, other) -> bool: ... + def __hash__(self) -> int: ... + +class IndexState(_NotEqualMixin): + def __new__(cls, definition, state, id): ... + @property + def definition(self): ... + @property + def state(self): ... + @property + def id(self): ... + def __eq__(self, other) -> bool: ... + def __hash__(self) -> int: ... + +class ModelAdapter: + # This actually returns NoReturn, but mypy can't handle that + def __new__(cls, *args, **kwargs) -> Self: ... + +def make_connection(*args, **kwargs) -> NoReturn: ... + +class ModelAttribute: ... + +class _BaseValue(_NotEqualMixin): + b_val: object = ... + def __init__(self, b_val) -> None: ... + def __eq__(self, other) -> bool: ... + def __hash__(self) -> int: ... + +class Property(ModelAttribute): + def __init__( + self, + name: str | None = ..., + indexed: bool | None = ..., + repeated: bool | None = ..., + required: bool | None = ..., + default: object = None, + choices: Iterable[object] | None = ..., + validator: Callable[[Property, Any], object] | None = ..., + verbose_name: str | None = ..., + write_empty_list: bool | None = ..., + ) -> None: ... + def __eq__(self, value: object) -> query_module.FilterNode: ... # type: ignore[override] + def __ne__(self, value: object) -> query_module.FilterNode: ... # type: ignore[override] + def __lt__(self, value: object) -> query_module.FilterNode: ... + def __le__(self, value: object) -> query_module.FilterNode: ... + def __gt__(self, value: object) -> query_module.FilterNode: ... + def __ge__(self, value: object) -> query_module.FilterNode: ... + def IN(self, value: Iterable[object]) -> query_module.DisjunctionNode | query_module.FilterNode | query_module.FalseNode: ... + def __neg__(self) -> query_module.PropertyOrder: ... + def __pos__(self) -> query_module.PropertyOrder: ... + def __set__(self, entity: Model, value: object) -> None: ... + def __delete__(self, entity: Model) -> None: ... + +class ModelKey(Property): + def __init__(self) -> None: ... + def __get__(self, entity: Model, unused_cls: type[Model] | None = ...) -> key_module.Key | list[key_module.Key] | None: ... + +class BooleanProperty(Property): + def __get__(self, entity: Model, unused_cls: type[Model] | None = ...) -> bool | list[bool] | None: ... + +class IntegerProperty(Property): + def __get__(self, entity: Model, unused_cls: type[Model] | None = ...) -> int | list[int] | None: ... + +class FloatProperty(Property): + def __get__(self, entity: Model, unused_cls: type[Model] | None = ...) -> float | list[float] | None: ... + +class _CompressedValue(bytes): + z_val: bytes = ... + def __init__(self, z_val: bytes) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> NoReturn: ... + +class BlobProperty(Property): + def __init__( + self, + name: str | None = ..., + compressed: bool | None = ..., + indexed: bool | None = ..., + repeated: bool | None = ..., + required: bool | None = ..., + default: bytes | None = ..., + choices: Iterable[bytes] | None = ..., + validator: Callable[[Property, Any], object] | None = ..., + verbose_name: str | None = ..., + write_empty_list: bool | None = ..., + ) -> None: ... + def __get__(self, entity: Model, unused_cls: type[Model] | None = ...) -> bytes | list[bytes] | None: ... + +class CompressedTextProperty(BlobProperty): + def __init__(self, *args, **kwargs) -> None: ... + +class TextProperty(Property): + def __new__(cls, *args, **kwargs): ... + def __init__(self, *args, **kwargs) -> None: ... + def __get__(self, entity: Model, unused_cls: type[Model] | None = ...) -> str | list[str] | None: ... + +class StringProperty(TextProperty): + def __init__(self, *args, **kwargs) -> None: ... + +class GeoPtProperty(Property): ... +class PickleProperty(BlobProperty): ... + +class JsonProperty(BlobProperty): + def __init__( + self, + name: str | None = ..., + compressed: bool | None = ..., + json_type: type | None = ..., + indexed: bool | None = ..., + repeated: bool | None = ..., + required: bool | None = ..., + default: object = None, + choices: Iterable[object] | None = ..., + validator: Callable[[Property, Any], object] | None = ..., + verbose_name: str | None = ..., + write_empty_list: bool | None = ..., + ) -> None: ... + +class User: + def __init__(self, email: str | None = ..., _auth_domain: str | None = ..., _user_id: str | None = ...) -> None: ... + def nickname(self) -> str: ... + def email(self): ... + def user_id(self) -> str | None: ... + def auth_domain(self) -> str: ... + def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + def __lt__(self, other: object) -> bool: ... + +class UserProperty(Property): + def __init__( + self, + name: str | None = ..., + auto_current_user: bool | None = ..., + auto_current_user_add: bool | None = ..., + indexed: bool | None = ..., + repeated: bool | None = ..., + required: bool | None = ..., + default: bytes | None = ..., + choices: Iterable[bytes] | None = ..., + validator: Callable[[Property, Any], object] | None = ..., + verbose_name: str | None = ..., + write_empty_list: bool | None = ..., + ) -> None: ... + +class KeyProperty(Property): + def __init__( + self, + name: str | None = ..., + kind: type[Model] | str | None = ..., + indexed: bool | None = ..., + repeated: bool | None = ..., + required: bool | None = ..., + default: key_module.Key | None = ..., + choices: Iterable[key_module.Key] | None = ..., + validator: Callable[[Property, key_module.Key], key_module.Key] | None = ..., + verbose_name: str | None = ..., + write_empty_list: bool | None = ..., + ) -> None: ... + +class BlobKeyProperty(Property): ... + +class DateTimeProperty(Property): + def __init__( + self, + name: str | None = ..., + auto_now: bool | None = ..., + auto_now_add: bool | None = ..., + tzinfo: datetime.tzinfo | None = ..., + indexed: bool | None = ..., + repeated: bool | None = ..., + required: bool | None = ..., + default: datetime.datetime | None = ..., + choices: Iterable[datetime.datetime] | None = ..., + validator: Callable[[Property, Any], object] | None = ..., + verbose_name: str | None = ..., + write_empty_list: bool | None = ..., + ) -> None: ... + +class DateProperty(DateTimeProperty): ... +class TimeProperty(DateTimeProperty): ... + +class StructuredProperty(Property): + def __init__(self, model_class: type, name: str | None = ..., **kwargs) -> None: ... + def __getattr__(self, attrname: str): ... + def IN(self, value: Iterable[object]) -> query_module.DisjunctionNode | query_module.FalseNode: ... + +class LocalStructuredProperty(BlobProperty): + def __init__(self, model_class: type[Model], **kwargs) -> None: ... + +class GenericProperty(Property): + def __init__(self, name: str | None = ..., compressed: bool = ..., **kwargs) -> None: ... + +class ComputedProperty(GenericProperty): + def __init__( + self, + func: Callable[[Model], object], + name: str | None = ..., + indexed: bool | None = ..., + repeated: bool | None = ..., + verbose_name: str | None = ..., + ) -> None: ... + +class MetaModel(type): + def __init__(cls, name: str, bases, classdict) -> None: ... + +class Model(_NotEqualMixin, metaclass=MetaModel): + key: ModelKey = ... + def __init__(_self, **kwargs) -> None: ... + def __hash__(self) -> NoReturn: ... + def __eq__(self, other: object) -> bool: ... + @classmethod + def gql(cls: type[Model], query_string: str, *args, **kwargs) -> query_module.Query: ... + def put(self, **kwargs): ... + def put_async(self, **kwargs) -> tasklets_module.Future: ... + @classmethod + def query(cls: type[Model], *args, **kwargs) -> query_module.Query: ... + @classmethod + def allocate_ids( + cls: type[Model], + size: int | None = ..., + max: int | None = ..., + parent: key_module.Key | None = ..., + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options=..., + ) -> tuple[key_module.Key, key_module.Key]: ... + @classmethod + def allocate_ids_async( + cls: type[Model], + size: int | None = ..., + max: int | None = ..., + parent: key_module.Key | None = ..., + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options=..., + ) -> tasklets_module.Future: ... + @classmethod + def get_by_id( + cls: type[Model], + id: int | str | None, + parent: key_module.Key | None = ..., + namespace: str | None = ..., + project: str | None = ..., + app: str | None = ..., + read_consistency: Literal["EVENTUAL"] | None = ..., + read_policy: Literal["EVENTUAL"] | None = ..., + transaction: bytes | None = ..., + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options=..., + ) -> Model | None: ... + @classmethod + def get_by_id_async( + cls: type[Model], + id: int | str, + parent: key_module.Key | None = ..., + namespace: str | None = ..., + project: str | None = ..., + app: str | None = ..., + read_consistency: Literal["EVENTUAL"] | None = ..., + read_policy: Literal["EVENTUAL"] | None = ..., + transaction: bytes | None = ..., + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options=..., + ) -> tasklets_module.Future: ... + @classmethod + def get_or_insert( + cls: type[Model], + _name: str, + parent: key_module.Key | None = ..., + namespace: str | None = ..., + project: str | None = ..., + app: str | None = ..., + read_consistency: Literal["EVENTUAL"] | None = ..., + read_policy: Literal["EVENTUAL"] | None = ..., + transaction: bytes | None = ..., + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options=..., + **kw_model_args, + ) -> Model: ... + @classmethod + def get_or_insert_async( + cls: type[Model], + _name: str, + parent: key_module.Key | None = ..., + namespace: str | None = ..., + project: str | None = ..., + app: str | None = ..., + read_consistency: Literal["EVENTUAL"] | None = ..., + read_policy: Literal["EVENTUAL"] | None = ..., + transaction: bytes | None = ..., + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options=..., + **kw_model_args, + ) -> tasklets_module.Future: ... + def populate(self, **kwargs) -> None: ... + def has_complete_key(self) -> bool: ... + def to_dict( + self, + include: list[object] | tuple[object, object] | set[object] | None = ..., + exclude: list[object] | tuple[object, object] | set[object] | None = ..., + ): ... + +class Expando(Model): + def __getattr__(self, name: str): ... + def __setattr__(self, name: str, value) -> None: ... + def __delattr__(self, name: str) -> None: ... + +def get_multi_async( + keys: Sequence[type[key_module.Key]], + read_consistency: Literal["EVENTUAL"] | None = ..., + read_policy: Literal["EVENTUAL"] | None = ..., + transaction: bytes | None = ..., + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options: object = None, +) -> list[type[tasklets_module.Future]]: ... +def get_multi( + keys: Sequence[type[key_module.Key]], + read_consistency: Literal["EVENTUAL"] | None = ..., + read_policy: Literal["EVENTUAL"] | None = ..., + transaction: bytes | None = ..., + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options: object = None, +) -> list[type[Model] | None]: ... +def put_multi_async( + entities: list[type[Model]], + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options: object = None, +) -> list[tasklets_module.Future]: ... +def put_multi( + entities: list[Model], + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options: object = None, +) -> list[key_module.Key]: ... +def delete_multi_async( + keys: list[key_module.Key], + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options: object = None, +) -> list[tasklets_module.Future]: ... +def delete_multi( + keys: Sequence[key_module.Key], + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options: object = None, +) -> list[None]: ... +def get_indexes_async(**options: Unused) -> NoReturn: ... +def get_indexes(**options: Unused) -> NoReturn: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/msgprop.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/msgprop.pyi new file mode 100644 index 00000000..2bb3e67d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/msgprop.pyi @@ -0,0 +1,5 @@ +class EnumProperty: + def __init__(self, *args, **kwargs) -> None: ... + +class MessageProperty: + def __init__(self, *args, **kwargs) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/polymodel.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/polymodel.pyi new file mode 100644 index 00000000..43cabd28 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/polymodel.pyi @@ -0,0 +1,9 @@ +from typing import Any + +from google.cloud.ndb import model + +class _ClassKeyProperty(model.StringProperty): + def __init__(self, name=..., indexed: bool = ...) -> None: ... + +class PolyModel(model.Model): + class_: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/query.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/query.pyi new file mode 100644 index 00000000..12da5f4f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/query.pyi @@ -0,0 +1,147 @@ +from _typeshed import Incomplete +from typing import Any + +from google.cloud.ndb import _options + +class PropertyOrder: + name: Any + reverse: Any + def __init__(self, name, reverse: bool = ...) -> None: ... + def __neg__(self): ... + +class RepeatedStructuredPropertyPredicate: + name: Any + match_keys: Any + match_values: Any + def __init__(self, name, match_keys, entity_pb) -> None: ... + def __call__(self, entity_pb): ... + +class ParameterizedThing: + def __eq__(self, other): ... + def __ne__(self, other): ... + +class Parameter(ParameterizedThing): + def __init__(self, key) -> None: ... + def __eq__(self, other): ... + @property + def key(self): ... + def resolve(self, bindings, used): ... + +class ParameterizedFunction(ParameterizedThing): + func: Any + values: Any + def __init__(self, func, values) -> None: ... + def __eq__(self, other): ... + def is_parameterized(self): ... + def resolve(self, bindings, used): ... + +class Node: + def __new__(cls): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __le__(self, unused_other): ... + def __lt__(self, unused_other): ... + def __ge__(self, unused_other): ... + def __gt__(self, unused_other): ... + def resolve(self, bindings, used): ... + +class FalseNode(Node): + def __eq__(self, other): ... + +class ParameterNode(Node): + def __new__(cls, prop, op, param): ... + def __getnewargs__(self): ... + def __eq__(self, other): ... + def resolve(self, bindings, used): ... + +class FilterNode(Node): + def __new__(cls, name, opsymbol, value): ... + def __getnewargs__(self): ... + def __eq__(self, other): ... + +class PostFilterNode(Node): + def __new__(cls, predicate): ... + def __getnewargs__(self): ... + def __eq__(self, other): ... + +class _BooleanClauses: + name: Any + combine_or: Any + or_parts: Any + def __init__(self, name, combine_or) -> None: ... + def add_node(self, node) -> None: ... + +class ConjunctionNode(Node): + def __new__(cls, *nodes): ... + def __getnewargs__(self): ... + def __iter__(self): ... + def __eq__(self, other): ... + def resolve(self, bindings, used): ... + +class DisjunctionNode(Node): + def __new__(cls, *nodes): ... + def __getnewargs__(self): ... + def __iter__(self): ... + def __eq__(self, other): ... + def resolve(self, bindings, used): ... + +AND = ConjunctionNode +OR = DisjunctionNode + +class QueryOptions(_options.ReadOptions): + project: Any + namespace: Any + def __init__(self, config: Incomplete | None = ..., context: Incomplete | None = ..., **kwargs) -> None: ... + +class Query: + default_options: Any + kind: Any + ancestor: Any + filters: Any + order_by: Any + project: Any + namespace: Any + limit: Any + offset: Any + keys_only: Any + projection: Any + distinct_on: Any + def __init__( + self, + kind: Incomplete | None = ..., + filters: Incomplete | None = ..., + ancestor: Incomplete | None = ..., + order_by: Incomplete | None = ..., + orders: Incomplete | None = ..., + project: Incomplete | None = ..., + app: Incomplete | None = ..., + namespace: Incomplete | None = ..., + projection: Incomplete | None = ..., + distinct_on: Incomplete | None = ..., + group_by: Incomplete | None = ..., + limit: Incomplete | None = ..., + offset: Incomplete | None = ..., + keys_only: Incomplete | None = ..., + default_options: Incomplete | None = ..., + ) -> None: ... + @property + def is_distinct(self): ... + def filter(self, *filters): ... + def order(self, *props): ... + def analyze(self): ... + def bind(self, *positional, **keyword): ... + def fetch(self, limit: Incomplete | None = ..., **kwargs): ... + def fetch_async(self, limit: Incomplete | None = ..., **kwargs): ... + def run_to_queue(self, queue, conn, options: Incomplete | None = ..., dsquery: Incomplete | None = ...) -> None: ... + def iter(self, **kwargs): ... + __iter__: Any + def map(self, callback, **kwargs): ... + def map_async(self, callback, **kwargs) -> None: ... + def get(self, **kwargs): ... + def get_async(self, **kwargs) -> None: ... + def count(self, limit: Incomplete | None = ..., **kwargs): ... + def count_async(self, limit: Incomplete | None = ..., **kwargs): ... + def fetch_page(self, page_size, **kwargs): ... + def fetch_page_async(self, page_size, **kwargs) -> None: ... + +def gql(query_string: str, *args: Any, **kwds: Any) -> Query: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/stats.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/stats.pyi new file mode 100644 index 00000000..1ffa02ff --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/stats.pyi @@ -0,0 +1,102 @@ +from typing import Any + +from google.cloud.ndb import model + +class BaseStatistic(model.Model): + STORED_KIND_NAME: str + bytes: Any + count: Any + timestamp: Any + +class BaseKindStatistic(BaseStatistic): + STORED_KIND_NAME: str + kind_name: Any + entity_bytes: Any + +class GlobalStat(BaseStatistic): + STORED_KIND_NAME: str + entity_bytes: Any + builtin_index_bytes: Any + builtin_index_count: Any + composite_index_bytes: Any + composite_index_count: Any + +class NamespaceStat(BaseStatistic): + STORED_KIND_NAME: str + subject_namespace: Any + entity_bytes: Any + builtin_index_bytes: Any + builtin_index_count: Any + composite_index_bytes: Any + composite_index_count: Any + +class KindStat(BaseKindStatistic): + STORED_KIND_NAME: str + builtin_index_bytes: Any + builtin_index_count: Any + composite_index_bytes: Any + composite_index_count: Any + +class KindRootEntityStat(BaseKindStatistic): + STORED_KIND_NAME: str + +class KindNonRootEntityStat(BaseKindStatistic): + STORED_KIND_NAME: str + +class PropertyTypeStat(BaseStatistic): + STORED_KIND_NAME: str + property_type: Any + entity_bytes: Any + builtin_index_bytes: Any + builtin_index_count: Any + +class KindPropertyTypeStat(BaseKindStatistic): + STORED_KIND_NAME: str + property_type: Any + builtin_index_bytes: Any + builtin_index_count: Any + +class KindPropertyNameStat(BaseKindStatistic): + STORED_KIND_NAME: str + property_name: Any + builtin_index_bytes: Any + builtin_index_count: Any + +class KindPropertyNamePropertyTypeStat(BaseKindStatistic): + STORED_KIND_NAME: str + property_type: Any + property_name: Any + builtin_index_bytes: Any + builtin_index_count: Any + +class KindCompositeIndexStat(BaseStatistic): + STORED_KIND_NAME: str + index_id: Any + kind_name: Any + +class NamespaceGlobalStat(GlobalStat): + STORED_KIND_NAME: str + +class NamespaceKindStat(KindStat): + STORED_KIND_NAME: str + +class NamespaceKindRootEntityStat(KindRootEntityStat): + STORED_KIND_NAME: str + +class NamespaceKindNonRootEntityStat(KindNonRootEntityStat): + STORED_KIND_NAME: str + +class NamespacePropertyTypeStat(PropertyTypeStat): + STORED_KIND_NAME: str + +class NamespaceKindPropertyTypeStat(KindPropertyTypeStat): + STORED_KIND_NAME: str + +class NamespaceKindPropertyNameStat(KindPropertyNameStat): + STORED_KIND_NAME: str + +class NamespaceKindPropertyNamePropertyTypeStat(KindPropertyNamePropertyTypeStat): + STORED_KIND_NAME: str + +class NamespaceKindCompositeIndexStat(KindCompositeIndexStat): + STORED_KIND_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/tasklets.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/tasklets.pyi new file mode 100644 index 00000000..4d9f34d6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/tasklets.pyi @@ -0,0 +1,58 @@ +from typing import Any + +class Future: + info: Any + def __init__(self, info: str = ...) -> None: ... + def done(self): ... + def running(self): ... + def wait(self) -> None: ... + def check_success(self) -> None: ... + def set_result(self, result) -> None: ... + def set_exception(self, exception) -> None: ... + def result(self): ... + get_result: Any + def exception(self): ... + get_exception: Any + def get_traceback(self): ... + def add_done_callback(self, callback) -> None: ... + def cancel(self) -> None: ... + def cancelled(self): ... + @staticmethod + def wait_any(futures): ... + @staticmethod + def wait_all(futures): ... + +class _TaskletFuture(Future): + generator: Any + context: Any + waiting_on: Any + def __init__(self, generator, context, info: str = ...) -> None: ... + def cancel(self) -> None: ... + +class _MultiFuture(Future): + def __init__(self, dependencies) -> None: ... + def cancel(self) -> None: ... + +def tasklet(wrapped): ... +def wait_any(futures): ... +def wait_all(futures) -> None: ... + +class Return(Exception): ... + +def sleep(seconds): ... +def add_flow_exception(*args, **kwargs) -> None: ... +def make_context(*args, **kwargs) -> None: ... +def make_default_context(*args, **kwargs) -> None: ... + +class QueueFuture: + def __init__(self, *args, **kwargs) -> None: ... + +class ReducingFuture: + def __init__(self, *args, **kwargs) -> None: ... + +class SerialQueueFuture: + def __init__(self, *args, **kwargs) -> None: ... + +def set_context(*args, **kwargs) -> None: ... +def synctasklet(wrapped): ... +def toplevel(wrapped): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/utils.pyi new file mode 100644 index 00000000..3ddb9607 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/google-cloud-ndb/google/cloud/ndb/utils.pyi @@ -0,0 +1,28 @@ +import threading +from typing import Any + +TRUTHY_STRINGS: Any + +def asbool(value): ... + +DEBUG: Any + +def code_info(*args, **kwargs) -> None: ... +def decorator(*args, **kwargs) -> None: ... +def frame_info(*args, **kwargs) -> None: ... +def func_info(*args, **kwargs) -> None: ... +def gen_info(*args, **kwargs) -> None: ... +def get_stack(*args, **kwargs) -> None: ... +def logging_debug(log, message, *args, **kwargs) -> None: ... + +class keyword_only: + defaults: Any + def __init__(self, **kwargs) -> None: ... + def __call__(self, wrapped): ... + +def positional(max_pos_args): ... + +threading_local = threading.local + +def tweak_logging(*args, **kwargs) -> None: ... +def wrapping(*args, **kwargs) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/hdbcli/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/hdbcli/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..bb918515 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/hdbcli/@tests/stubtest_allowlist.txt @@ -0,0 +1,6 @@ +# Are set to `None` by default, initialized later: +hdbcli.dbapi.Error.errorcode +hdbcli.dbapi.Error.errortext +hdbcli.dbapi.Warning.errorcode +hdbcli.dbapi.Warning.errortext +hdbcli.dbapi.ExecuteManyErrorEntry.rownumber diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/hdbcli/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/hdbcli/METADATA.toml new file mode 100644 index 00000000..d3860c9b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/hdbcli/METADATA.toml @@ -0,0 +1 @@ +version = "2.15.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/hdbcli/hdbcli/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/hdbcli/hdbcli/__init__.pyi new file mode 100644 index 00000000..af0d55ba --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/hdbcli/hdbcli/__init__.pyi @@ -0,0 +1,3 @@ +from . import dbapi as dbapi + +__version__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/hdbcli/hdbcli/dbapi.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/hdbcli/hdbcli/dbapi.pyi new file mode 100644 index 00000000..51216e87 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/hdbcli/hdbcli/dbapi.pyi @@ -0,0 +1,136 @@ +import decimal +from _typeshed import Incomplete, ReadableBuffer +from collections.abc import Sequence +from datetime import date, datetime, time +from types import TracebackType +from typing import Any, overload +from typing_extensions import Literal, TypeAlias + +from .resultrow import ResultRow + +apilevel: str +threadsafety: int +paramstyle: tuple[str, ...] +connect = Connection + +class Connection: + def __init__( + self, + address: str, + port: int, + username: str, + password: str, + autocommit: bool = ..., + packetsize: int | None = ..., + userkey: str | None = ..., + *, + sessionvariables: dict[str, str] | None = ..., + forcebulkfetch: bool | None = ..., + ) -> None: ... + def cancel(self) -> bool: ... + def close(self) -> None: ... + def commit(self) -> None: ... + def cursor(self) -> Cursor: ... + def getaddress(self) -> str: ... + def getautocommit(self) -> bool: ... + def getclientinfo(self, key: str = ...) -> str | dict[str, str]: ... + def getproperty(self, *args: Incomplete, **kwargs: Incomplete) -> Incomplete: ... + def isconnected(self) -> bool: ... + def rollback(self) -> None: ... + def setautocommit(self, auto: bool = ...) -> None: ... + def setclientinfo(self, key: str, value: str | None = ...) -> None: ... + +class LOB: + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + def close(self) -> bool: ... + def find(self, object: str, length: int, position: int = ...) -> int: ... + def read(self, size: int = ..., position: int = ...) -> str | bytes: ... + def write(self, object: str | bytes) -> int: ... + +_Parameters: TypeAlias = Sequence[tuple[Any, ...]] + +class Cursor: + description: tuple[tuple[Any, ...], ...] + rowcount: int + statementhash: str | None + connection: Connection + arraysize: int + refreshts: Incomplete + maxage: int + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + def __enter__(self) -> Incomplete: ... + def __exit__(self, typ: type[BaseException] | None, val: BaseException | None, tb: TracebackType | None) -> None: ... + def callproc(self, procname: str, parameters: tuple[Any, ...] = ..., overview: bool = ...) -> tuple[Any, ...]: ... + def close(self) -> None: ... + def description_ext(self) -> Sequence[tuple[Any, ...]]: ... + def execute(self, operation: str, parameters: tuple[Any, ...]) -> bool: ... + def executemany(self, operation: str, parameters: _Parameters) -> Any: ... + def executemanyprepared(self, parameters: _Parameters) -> Any: ... + def executeprepared(self, parameters: _Parameters = ...) -> Any: ... + def fetchone(self, uselob: bool = ...) -> ResultRow | None: ... + def fetchall(self) -> list[ResultRow]: ... + def fetchmany(self, size: int | None = ...) -> list[ResultRow]: ... + def getrowsaffectedcounts(self) -> tuple[Any, ...]: ... + def getpacketsize(self) -> int: ... + def get_resultset_holdability(self) -> int: ... + def getwarning(self) -> Warning | None: ... + def haswarning(self) -> bool: ... + def clearwarning(self) -> None: ... + def has_result_set(self) -> bool: ... + def nextset(self) -> None: ... + def parameter_description(self) -> tuple[str, ...]: ... + @overload + def prepare(self, operation: str, newcursor: Literal[True]) -> Cursor: ... + @overload + def prepare(self, operation: str, newcursor: Literal[False]) -> Any: ... + def print_message(self, *args: Incomplete, **kwargs: Incomplete) -> Incomplete: ... + def parsenamedquery(self, *args: Incomplete, **kwargs: Incomplete) -> Incomplete: ... + def scroll(self, value: int, mode: Literal["absolute", "relative"] = ...) -> None: ... + def server_cpu_time(self) -> int: ... + def server_memory_usage(self) -> int: ... + def server_processing_time(self) -> int: ... + def setinputsizes(self, *args: Any, **kwargs: Any) -> None: ... + def setfetchsize(self, value: int) -> None: ... + def setquerytimeout(self, value: int) -> None: ... + def setpacketsize(self, value: int) -> None: ... + def set_resultset_holdability(self, holdability: int) -> None: ... + def setoutputsize(self, *args: Any, **kwargs: Any) -> None: ... + +class Warning(Exception): + errorcode: int + errortext: str + +class Error(Exception): + errorcode: int + errortext: str + +class DatabaseError(Error): ... +class OperationalError(DatabaseError): ... +class ProgrammingError(DatabaseError): ... +class IntegrityError(DatabaseError): ... +class InterfaceError(Error): ... +class InternalError(DatabaseError): ... +class DataError(DatabaseError): ... +class NotSupportedError(DatabaseError): ... + +class ExecuteManyError(Error): + errors: Incomplete + +class ExecuteManyErrorEntry(Error): + rownumber: int + +def Date(year: int, month: int, day: int) -> date: ... +def Time(hour: int, minute: int, second: int, millisecond: int = ...) -> time: ... +def Timestamp(year: int, month: int, day: int, hour: int, minute: int, second: int, millisecond: int = ...) -> datetime: ... +def DateFromTicks(ticks: float) -> date: ... +def TimeFromTicks(ticks: float) -> time: ... +def TimestampFromTicks(ticks: float) -> datetime: ... +def Binary(data: ReadableBuffer) -> memoryview: ... + +Decimal = decimal.Decimal + +NUMBER: type[int] | type[float] | type[complex] +DATETIME: type[date] | type[time] | type[datetime] +STRING = str +BINARY = memoryview +ROWID = int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/hdbcli/hdbcli/resultrow.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/hdbcli/hdbcli/resultrow.pyi new file mode 100644 index 00000000..cf0ee12e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/hdbcli/hdbcli/resultrow.pyi @@ -0,0 +1,6 @@ +from typing import Any + +class ResultRow: + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + column_names: tuple[str, ...] + column_values: tuple[Any, ...] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/METADATA.toml new file mode 100644 index 00000000..0d6ac713 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/METADATA.toml @@ -0,0 +1,5 @@ +version = "1.1.*" + +[tool.stubtest] +ignore_missing_stub = true +extras = ["all"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/__init__.pyi new file mode 100644 index 00000000..709bc661 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/__init__.pyi @@ -0,0 +1,6 @@ +from .html5parser import HTMLParser as HTMLParser, parse as parse, parseFragment as parseFragment +from .serializer import serialize as serialize +from .treebuilders import getTreeBuilder as getTreeBuilder +from .treewalkers import getTreeWalker as getTreeWalker + +__version__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/_ihatexml.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/_ihatexml.pyi new file mode 100644 index 00000000..60edb702 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/_ihatexml.pyi @@ -0,0 +1,56 @@ +from _typeshed import Incomplete +from typing import Any + +baseChar: str +ideographic: str +combiningCharacter: str +digit: str +extender: str +letter: Any +name: Any +nameFirst: Any +reChar: Any +reCharRange: Any + +def charStringToList(chars): ... +def normaliseCharList(charList): ... + +max_unicode: Any + +def missingRanges(charList): ... +def listToRegexpStr(charList): ... +def hexToInt(hex_str): ... +def escapeRegexp(string): ... + +nonXmlNameBMPRegexp: Any +nonXmlNameFirstBMPRegexp: Any +nonPubidCharRegexp: Any + +class InfosetFilter: + replacementRegexp: Any + dropXmlnsLocalName: Any + dropXmlnsAttrNs: Any + preventDoubleDashComments: Any + preventDashAtCommentEnd: Any + replaceFormFeedCharacters: Any + preventSingleQuotePubid: Any + replaceCache: Any + def __init__( + self, + dropXmlnsLocalName: bool = ..., + dropXmlnsAttrNs: bool = ..., + preventDoubleDashComments: bool = ..., + preventDashAtCommentEnd: bool = ..., + replaceFormFeedCharacters: bool = ..., + preventSingleQuotePubid: bool = ..., + ) -> None: ... + def coerceAttribute(self, name, namespace: Incomplete | None = ...): ... + def coerceElement(self, name): ... + def coerceComment(self, data): ... + def coerceCharacters(self, data): ... + def coercePubid(self, data): ... + def toXmlName(self, name): ... + def getReplacementCharacter(self, char): ... + def fromXmlName(self, name): ... + def escapeChar(self, char): ... + def unescapeChar(self, charcode): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/_inputstream.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/_inputstream.pyi new file mode 100644 index 00000000..00019338 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/_inputstream.pyi @@ -0,0 +1,111 @@ +from _typeshed import Incomplete +from typing import Any + +spaceCharactersBytes: Any +asciiLettersBytes: Any +asciiUppercaseBytes: Any +spacesAngleBrackets: Any +invalid_unicode_no_surrogate: str +invalid_unicode_re: Any +non_bmp_invalid_codepoints: Any +ascii_punctuation_re: Any +charsUntilRegEx: Any + +class BufferedStream: + stream: Any + buffer: Any + position: Any + def __init__(self, stream) -> None: ... + def tell(self): ... + def seek(self, pos) -> None: ... + def read(self, bytes): ... + +def HTMLInputStream(source, **kwargs): ... + +class HTMLUnicodeInputStream: + reportCharacterErrors: Any + newLines: Any + charEncoding: Any + dataStream: Any + def __init__(self, source) -> None: ... + chunk: str + chunkSize: int + chunkOffset: int + errors: Any + prevNumLines: int + prevNumCols: int + def reset(self) -> None: ... + def openStream(self, source): ... + def position(self): ... + def char(self): ... + def readChunk(self, chunkSize: Incomplete | None = ...): ... + def characterErrorsUCS4(self, data) -> None: ... + def characterErrorsUCS2(self, data) -> None: ... + def charsUntil(self, characters, opposite: bool = ...): ... + def unget(self, char) -> None: ... + +class HTMLBinaryInputStream(HTMLUnicodeInputStream): + rawStream: Any + numBytesMeta: int + numBytesChardet: int + override_encoding: Any + transport_encoding: Any + same_origin_parent_encoding: Any + likely_encoding: Any + default_encoding: Any + charEncoding: Any + def __init__( + self, + source, + override_encoding: Incomplete | None = ..., + transport_encoding: Incomplete | None = ..., + same_origin_parent_encoding: Incomplete | None = ..., + likely_encoding: Incomplete | None = ..., + default_encoding: str = ..., + useChardet: bool = ..., + ) -> None: ... + dataStream: Any + def reset(self) -> None: ... + def openStream(self, source): ... + def determineEncoding(self, chardet: bool = ...): ... + def changeEncoding(self, newEncoding) -> None: ... + def detectBOM(self): ... + def detectEncodingMeta(self): ... + +class EncodingBytes(bytes): + def __new__(self, value): ... + def __init__(self, value) -> None: ... + def __iter__(self): ... + def __next__(self): ... + def next(self): ... + def previous(self): ... + def setPosition(self, position) -> None: ... + def getPosition(self): ... + position: Any + def getCurrentByte(self): ... + @property + def currentByte(self): ... + def skip(self, chars=...): ... + def skipUntil(self, chars): ... + def matchBytes(self, bytes): ... + def jumpTo(self, bytes): ... + +class EncodingParser: + data: Any + encoding: Any + def __init__(self, data) -> None: ... + def getEncoding(self): ... + def handleComment(self): ... + def handleMeta(self): ... + def handlePossibleStartTag(self): ... + def handlePossibleEndTag(self): ... + def handlePossibleTag(self, endTag): ... + def handleOther(self): ... + def getAttribute(self): ... + +class ContentAttrParser: + data: Any + def __init__(self, data) -> None: ... + def parse(self): ... + +def lookupEncoding(encoding): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/_tokenizer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/_tokenizer.pyi new file mode 100644 index 00000000..639a4ca9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/_tokenizer.pyi @@ -0,0 +1,89 @@ +from _typeshed import Incomplete +from typing import Any + +entitiesTrie: Any +attributeMap = dict + +class HTMLTokenizer: + stream: Any + parser: Any + escapeFlag: bool + lastFourChars: Any + state: Any + escape: bool + currentToken: Any + def __init__(self, stream, parser: Incomplete | None = ..., **kwargs) -> None: ... + tokenQueue: Any + def __iter__(self): ... + def consumeNumberEntity(self, isHex): ... + def consumeEntity(self, allowedChar: Incomplete | None = ..., fromAttribute: bool = ...) -> None: ... + def processEntityInAttribute(self, allowedChar) -> None: ... + def emitCurrentToken(self) -> None: ... + def dataState(self): ... + def entityDataState(self): ... + def rcdataState(self): ... + def characterReferenceInRcdata(self): ... + def rawtextState(self): ... + def scriptDataState(self): ... + def plaintextState(self): ... + def tagOpenState(self): ... + def closeTagOpenState(self): ... + def tagNameState(self): ... + temporaryBuffer: str + def rcdataLessThanSignState(self): ... + def rcdataEndTagOpenState(self): ... + def rcdataEndTagNameState(self): ... + def rawtextLessThanSignState(self): ... + def rawtextEndTagOpenState(self): ... + def rawtextEndTagNameState(self): ... + def scriptDataLessThanSignState(self): ... + def scriptDataEndTagOpenState(self): ... + def scriptDataEndTagNameState(self): ... + def scriptDataEscapeStartState(self): ... + def scriptDataEscapeStartDashState(self): ... + def scriptDataEscapedState(self): ... + def scriptDataEscapedDashState(self): ... + def scriptDataEscapedDashDashState(self): ... + def scriptDataEscapedLessThanSignState(self): ... + def scriptDataEscapedEndTagOpenState(self): ... + def scriptDataEscapedEndTagNameState(self): ... + def scriptDataDoubleEscapeStartState(self): ... + def scriptDataDoubleEscapedState(self): ... + def scriptDataDoubleEscapedDashState(self): ... + def scriptDataDoubleEscapedDashDashState(self): ... + def scriptDataDoubleEscapedLessThanSignState(self): ... + def scriptDataDoubleEscapeEndState(self): ... + def beforeAttributeNameState(self): ... + def attributeNameState(self): ... + def afterAttributeNameState(self): ... + def beforeAttributeValueState(self): ... + def attributeValueDoubleQuotedState(self): ... + def attributeValueSingleQuotedState(self): ... + def attributeValueUnQuotedState(self): ... + def afterAttributeValueState(self): ... + def selfClosingStartTagState(self): ... + def bogusCommentState(self): ... + def markupDeclarationOpenState(self): ... + def commentStartState(self): ... + def commentStartDashState(self): ... + def commentState(self): ... + def commentEndDashState(self): ... + def commentEndState(self): ... + def commentEndBangState(self): ... + def doctypeState(self): ... + def beforeDoctypeNameState(self): ... + def doctypeNameState(self): ... + def afterDoctypeNameState(self): ... + def afterDoctypePublicKeywordState(self): ... + def beforeDoctypePublicIdentifierState(self): ... + def doctypePublicIdentifierDoubleQuotedState(self): ... + def doctypePublicIdentifierSingleQuotedState(self): ... + def afterDoctypePublicIdentifierState(self): ... + def betweenDoctypePublicAndSystemIdentifiersState(self): ... + def afterDoctypeSystemKeywordState(self): ... + def beforeDoctypeSystemIdentifierState(self): ... + def doctypeSystemIdentifierDoubleQuotedState(self): ... + def doctypeSystemIdentifierSingleQuotedState(self): ... + def afterDoctypeSystemIdentifierState(self): ... + def bogusDoctypeState(self): ... + def cdataSectionState(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/_trie/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/_trie/__init__.pyi new file mode 100644 index 00000000..7095d84c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/_trie/__init__.pyi @@ -0,0 +1 @@ +from .py import Trie as Trie diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/_trie/_base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/_trie/_base.pyi new file mode 100644 index 00000000..3b0773a7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/_trie/_base.pyi @@ -0,0 +1,10 @@ +from _typeshed import Incomplete +from abc import ABCMeta +from collections.abc import Mapping +from typing import Any + +class Trie(Mapping[Any, Any], metaclass=ABCMeta): + def keys(self, prefix: Incomplete | None = ...): ... + def has_keys_with_prefix(self, prefix): ... + def longest_prefix(self, prefix): ... + def longest_prefix_item(self, prefix): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/_trie/py.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/_trie/py.pyi new file mode 100644 index 00000000..ec50ef0d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/_trie/py.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete + +from ._base import Trie as ABCTrie + +class Trie(ABCTrie): + def __init__(self, data) -> None: ... + def __contains__(self, key): ... + def __len__(self) -> int: ... + def __iter__(self): ... + def __getitem__(self, key): ... + def keys(self, prefix: Incomplete | None = ...): ... + def has_keys_with_prefix(self, prefix): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/_utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/_utils.pyi new file mode 100644 index 00000000..cfd97de7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/_utils.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete +from collections.abc import Mapping +from typing import Any + +supports_lone_surrogates: bool + +class MethodDispatcher(dict[Any, Any]): + default: Any + def __init__(self, items=...) -> None: ... + def __getitem__(self, key): ... + def __get__(self, instance, owner: Incomplete | None = ...): ... + +class BoundMethodDispatcher(Mapping[Any, Any]): + instance: Any + dispatcher: Any + def __init__(self, instance, dispatcher) -> None: ... + def __getitem__(self, key): ... + def get(self, key, default): ... + def __iter__(self): ... + def __len__(self) -> int: ... + def __contains__(self, key): ... + +def isSurrogatePair(data): ... +def surrogatePairToCodepoint(data): ... +def moduleFactoryFactory(factory): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/constants.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/constants.pyi new file mode 100644 index 00000000..b8a3ac8f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/constants.pyi @@ -0,0 +1,37 @@ +from typing import Any + +EOF: Any +E: Any +namespaces: Any +scopingElements: Any +formattingElements: Any +specialElements: Any +htmlIntegrationPointElements: Any +mathmlTextIntegrationPointElements: Any +adjustSVGAttributes: Any +adjustMathMLAttributes: Any +adjustForeignAttributes: Any +unadjustForeignAttributes: Any +spaceCharacters: Any +tableInsertModeElements: Any +asciiLowercase: Any +asciiUppercase: Any +asciiLetters: Any +digits: Any +hexDigits: Any +asciiUpper2Lower: Any +headingElements: Any +voidElements: Any +cdataElements: Any +rcdataElements: Any +booleanAttributes: Any +entitiesWindows1252: Any +xmlEntities: Any +entities: Any +replacementCharacters: Any +tokenTypes: Any +tagTokenTypes: Any +prefixes: Any + +class DataLossWarning(UserWarning): ... +class _ReparseException(Exception): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/filters/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/filters/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/filters/alphabeticalattributes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/filters/alphabeticalattributes.pyi new file mode 100644 index 00000000..20b694d6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/filters/alphabeticalattributes.pyi @@ -0,0 +1,4 @@ +from . import base + +class Filter(base.Filter): + def __iter__(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/filters/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/filters/base.pyi new file mode 100644 index 00000000..166f2040 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/filters/base.pyi @@ -0,0 +1,7 @@ +from typing import Any + +class Filter: + source: Any + def __init__(self, source) -> None: ... + def __iter__(self): ... + def __getattr__(self, name: str): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/filters/inject_meta_charset.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/filters/inject_meta_charset.pyi new file mode 100644 index 00000000..9cde8fde --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/filters/inject_meta_charset.pyi @@ -0,0 +1,8 @@ +from typing import Any + +from . import base + +class Filter(base.Filter): + encoding: Any + def __init__(self, source, encoding) -> None: ... + def __iter__(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/filters/lint.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/filters/lint.pyi new file mode 100644 index 00000000..5803963a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/filters/lint.pyi @@ -0,0 +1,8 @@ +from typing import Any + +from . import base + +class Filter(base.Filter): + require_matching_tags: Any + def __init__(self, source, require_matching_tags: bool = ...) -> None: ... + def __iter__(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/filters/optionaltags.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/filters/optionaltags.pyi new file mode 100644 index 00000000..5d53a467 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/filters/optionaltags.pyi @@ -0,0 +1,7 @@ +from . import base + +class Filter(base.Filter): + def slider(self) -> None: ... + def __iter__(self): ... + def is_optional_start(self, tagname, previous, next): ... + def is_optional_end(self, tagname, next): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/filters/sanitizer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/filters/sanitizer.pyi new file mode 100644 index 00000000..176e81a0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/filters/sanitizer.pyi @@ -0,0 +1,34 @@ +from typing import Any + +from . import base + +class Filter(base.Filter): + allowed_elements: Any + allowed_attributes: Any + allowed_css_properties: Any + allowed_css_keywords: Any + allowed_svg_properties: Any + allowed_protocols: Any + allowed_content_types: Any + attr_val_is_uri: Any + svg_attr_val_allows_ref: Any + svg_allow_local_href: Any + def __init__( + self, + source, + allowed_elements=..., + allowed_attributes=..., + allowed_css_properties=..., + allowed_css_keywords=..., + allowed_svg_properties=..., + allowed_protocols=..., + allowed_content_types=..., + attr_val_is_uri=..., + svg_attr_val_allows_ref=..., + svg_allow_local_href=..., + ) -> None: ... + def __iter__(self): ... + def sanitize_token(self, token): ... + def allowed_token(self, token): ... + def disallowed_token(self, token): ... + def sanitize_css(self, style): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/filters/whitespace.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/filters/whitespace.pyi new file mode 100644 index 00000000..117aaf64 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/filters/whitespace.pyi @@ -0,0 +1,11 @@ +from typing import Any + +from . import base + +SPACES_REGEX: Any + +class Filter(base.Filter): + spacePreserveElements: Any + def __iter__(self): ... + +def collapse_spaces(text): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/html5parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/html5parser.pyi new file mode 100644 index 00000000..79dc56c0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/html5parser.pyi @@ -0,0 +1,57 @@ +from _typeshed import Incomplete, SupportsRead +from typing import Any, overload +from typing_extensions import Literal +from xml.etree.ElementTree import Element + +@overload +def parse( + doc: str | bytes | SupportsRead[str] | SupportsRead[bytes], + treebuilder: Literal["etree"] = ..., + namespaceHTMLElements: bool = ..., + **kwargs, +) -> Element: ... +@overload +def parse( + doc: str | bytes | SupportsRead[str] | SupportsRead[bytes], treebuilder: str, namespaceHTMLElements: bool = ..., **kwargs +): ... +def parseFragment(doc, container: str = ..., treebuilder: str = ..., namespaceHTMLElements: bool = ..., **kwargs): ... +def method_decorator_metaclass(function): ... + +class HTMLParser: + strict: Any + tree: Any + errors: Any + phases: Any + def __init__( + self, tree: Incomplete | None = ..., strict: bool = ..., namespaceHTMLElements: bool = ..., debug: bool = ... + ) -> None: ... + firstStartTag: bool + log: Any + compatMode: str + innerHTML: Any + phase: Any + lastPhase: Any + beforeRCDataPhase: Any + framesetOK: bool + def reset(self) -> None: ... + @property + def documentEncoding(self) -> str | None: ... + def isHTMLIntegrationPoint(self, element) -> bool: ... + def isMathMLTextIntegrationPoint(self, element) -> bool: ... + def mainLoop(self) -> None: ... + def parse(self, stream, scripting: bool = ..., **kwargs): ... + def parseFragment(self, stream, *args, **kwargs): ... + def parseError(self, errorcode: str = ..., datavars: Incomplete | None = ...) -> None: ... + def adjustMathMLAttributes(self, token) -> None: ... + def adjustSVGAttributes(self, token) -> None: ... + def adjustForeignAttributes(self, token) -> None: ... + def reparseTokenNormal(self, token) -> None: ... + def resetInsertionMode(self) -> None: ... + originalPhase: Any + def parseRCDataRawtext(self, token, contentType) -> None: ... + +def getPhases(debug): ... +def adjust_attributes(token, replacements) -> None: ... +def impliedTagToken(name, type: str = ..., attributes: Incomplete | None = ..., selfClosing: bool = ...): ... + +class ParseError(Exception): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/serializer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/serializer.pyi new file mode 100644 index 00000000..dc505f24 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/serializer.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete +from typing import Any, overload + +def htmlentityreplace_errors(exc: Exception) -> tuple[str | bytes, int]: ... +@overload +def serialize(input, tree: str = ..., encoding: None = ..., **serializer_opts) -> str: ... +@overload +def serialize(input, tree: str, encoding: str, **serializer_opts) -> bytes: ... +@overload +def serialize(input, *, encoding: str, **serializer_opts) -> bytes: ... + +class HTMLSerializer: + quote_attr_values: str + quote_char: str + use_best_quote_char: bool + omit_optional_tags: bool + minimize_boolean_attributes: bool + use_trailing_solidus: bool + space_before_trailing_solidus: bool + escape_lt_in_attrs: bool + escape_rcdata: bool + resolve_entities: bool + alphabetical_attributes: bool + inject_meta_charset: bool + strip_whitespace: bool + sanitize: bool + options: Any + errors: Any + strict: bool + def __init__(self, **kwargs) -> None: ... + def encode(self, string): ... + def encodeStrict(self, string): ... + encoding: Any + def serialize(self, treewalker, encoding: Incomplete | None = ...) -> None: ... + def render(self, treewalker, encoding: Incomplete | None = ...): ... + def serializeError(self, data: str = ...) -> None: ... + +class SerializeError(Exception): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treeadapters/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treeadapters/__init__.pyi new file mode 100644 index 00000000..bf126748 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treeadapters/__init__.pyi @@ -0,0 +1 @@ +from . import genshi as genshi, sax as sax diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treeadapters/genshi.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treeadapters/genshi.pyi new file mode 100644 index 00000000..fa42d5d0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treeadapters/genshi.pyi @@ -0,0 +1 @@ +def to_genshi(walker) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treeadapters/sax.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treeadapters/sax.pyi new file mode 100644 index 00000000..87ebcb1c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treeadapters/sax.pyi @@ -0,0 +1,5 @@ +from typing import Any + +prefix_mapping: Any + +def to_sax(walker, handler) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treebuilders/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treebuilders/__init__.pyi new file mode 100644 index 00000000..f577600b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treebuilders/__init__.pyi @@ -0,0 +1,6 @@ +from _typeshed import Incomplete +from typing import Any + +treeBuilderCache: Any + +def getTreeBuilder(treeType, implementation: Incomplete | None = ..., **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treebuilders/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treebuilders/base.pyi new file mode 100644 index 00000000..b1623bae --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treebuilders/base.pyi @@ -0,0 +1,56 @@ +from _typeshed import Incomplete +from typing import Any + +Marker: Any +listElementsMap: Any + +class Node: + name: Any + parent: Any + value: Any + attributes: Any + childNodes: Any + def __init__(self, name) -> None: ... + def appendChild(self, node) -> None: ... + def insertText(self, data, insertBefore: Incomplete | None = ...) -> None: ... + def insertBefore(self, node, refNode) -> None: ... + def removeChild(self, node) -> None: ... + def reparentChildren(self, newParent) -> None: ... + def cloneNode(self) -> None: ... + def hasContent(self) -> None: ... + +class ActiveFormattingElements(list[Any]): + def append(self, node) -> None: ... + def nodesEqual(self, node1, node2): ... + +class TreeBuilder: + documentClass: Any + elementClass: Any + commentClass: Any + doctypeClass: Any + fragmentClass: Any + defaultNamespace: str + def __init__(self, namespaceHTMLElements) -> None: ... + openElements: Any + activeFormattingElements: Any + headPointer: Any + formPointer: Any + insertFromTable: bool + document: Any + def reset(self) -> None: ... + def elementInScope(self, target, variant: Incomplete | None = ...): ... + def reconstructActiveFormattingElements(self) -> None: ... + def clearActiveFormattingElements(self) -> None: ... + def elementInActiveFormattingElements(self, name): ... + def insertRoot(self, token) -> None: ... + def insertDoctype(self, token) -> None: ... + def insertComment(self, token, parent: Incomplete | None = ...) -> None: ... + def createElement(self, token): ... + def insertElementNormal(self, token): ... + def insertElementTable(self, token): ... + def insertText(self, data, parent: Incomplete | None = ...) -> None: ... + def getTableMisnestedNodePosition(self): ... + def generateImpliedEndTags(self, exclude: Incomplete | None = ...) -> None: ... + def getDocument(self): ... + def getFragment(self): ... + def testSerializer(self, node) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treebuilders/dom.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treebuilders/dom.pyi new file mode 100644 index 00000000..12de3344 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treebuilders/dom.pyi @@ -0,0 +1,5 @@ +from typing import Any + +def getDomBuilder(DomImplementation): ... + +getDomModule: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treebuilders/etree.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treebuilders/etree.pyi new file mode 100644 index 00000000..f4b8e54b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treebuilders/etree.pyi @@ -0,0 +1,7 @@ +from typing import Any + +tag_regexp: Any + +def getETreeBuilder(ElementTreeImplementation, fullTree: bool = ...): ... + +getETreeModule: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treebuilders/etree_lxml.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treebuilders/etree_lxml.pyi new file mode 100644 index 00000000..1f76a6fa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treebuilders/etree_lxml.pyi @@ -0,0 +1,45 @@ +from _typeshed import Incomplete +from typing import Any + +from . import base + +fullTree: bool +tag_regexp: Any +comment_type: Any + +class DocumentType: + name: Any + publicId: Any + systemId: Any + def __init__(self, name, publicId, systemId) -> None: ... + +class Document: + def __init__(self) -> None: ... + def appendChild(self, element) -> None: ... + @property + def childNodes(self): ... + +def testSerializer(element): ... +def tostring(element): ... + +class TreeBuilder(base.TreeBuilder): + documentClass: Any + doctypeClass: Any + elementClass: Any + commentClass: Any + fragmentClass: Any + implementation: Any + namespaceHTMLElements: Any + def __init__(self, namespaceHTMLElements, fullTree: bool = ...): ... + insertComment: Any + initial_comments: Any + doctype: Any + def reset(self) -> None: ... + def testSerializer(self, element): ... + def getDocument(self): ... + def getFragment(self): ... + def insertDoctype(self, token) -> None: ... + def insertCommentInitial(self, data, parent: Incomplete | None = ...) -> None: ... + def insertCommentMain(self, data, parent: Incomplete | None = ...) -> None: ... + document: Any + def insertRoot(self, token) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treewalkers/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treewalkers/__init__.pyi new file mode 100644 index 00000000..0eda7003 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treewalkers/__init__.pyi @@ -0,0 +1,4 @@ +from _typeshed import Incomplete + +def getTreeWalker(treeType, implementation: Incomplete | None = ..., **kwargs): ... +def pprint(walker): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treewalkers/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treewalkers/base.pyi new file mode 100644 index 00000000..919b2201 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treewalkers/base.pyi @@ -0,0 +1,31 @@ +from _typeshed import Incomplete +from typing import Any + +DOCUMENT: Any +DOCTYPE: Any +TEXT: Any +ELEMENT: Any +COMMENT: Any +ENTITY: Any +UNKNOWN: str + +class TreeWalker: + tree: Any + def __init__(self, tree) -> None: ... + def __iter__(self): ... + def error(self, msg): ... + def emptyTag(self, namespace, name, attrs, hasChildren: bool = ...) -> None: ... + def startTag(self, namespace, name, attrs): ... + def endTag(self, namespace, name): ... + def text(self, data) -> None: ... + def comment(self, data): ... + def doctype(self, name, publicId: Incomplete | None = ..., systemId: Incomplete | None = ...): ... + def entity(self, name): ... + def unknown(self, nodeType): ... + +class NonRecursiveTreeWalker(TreeWalker): + def getNodeDetails(self, node) -> None: ... + def getFirstChild(self, node) -> None: ... + def getNextSibling(self, node) -> None: ... + def getParentNode(self, node) -> None: ... + def __iter__(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treewalkers/dom.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treewalkers/dom.pyi new file mode 100644 index 00000000..72a71d00 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treewalkers/dom.pyi @@ -0,0 +1,7 @@ +from .base import NonRecursiveTreeWalker + +class TreeWalker(NonRecursiveTreeWalker): + def getNodeDetails(self, node): ... + def getFirstChild(self, node): ... + def getNextSibling(self, node): ... + def getParentNode(self, node): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treewalkers/etree.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treewalkers/etree.pyi new file mode 100644 index 00000000..33f6d800 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treewalkers/etree.pyi @@ -0,0 +1,7 @@ +from typing import Any + +tag_regexp: Any + +def getETreeBuilder(ElementTreeImplementation): ... + +getETreeModule: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treewalkers/etree_lxml.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treewalkers/etree_lxml.pyi new file mode 100644 index 00000000..6ef2c483 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treewalkers/etree_lxml.pyi @@ -0,0 +1,54 @@ +from typing import Any + +from .base import NonRecursiveTreeWalker + +def ensure_str(s): ... + +class Root: + elementtree: Any + children: Any + text: Any + tail: Any + def __init__(self, et) -> None: ... + def __getitem__(self, key): ... + def getnext(self) -> None: ... + def __len__(self) -> int: ... + +class Doctype: + root_node: Any + name: Any + public_id: Any + system_id: Any + text: Any + tail: Any + def __init__(self, root_node, name, public_id, system_id) -> None: ... + def getnext(self): ... + +class FragmentRoot(Root): + children: Any + text: Any + def __init__(self, children) -> None: ... + def getnext(self) -> None: ... + +class FragmentWrapper: + root_node: Any + obj: Any + text: Any + tail: Any + def __init__(self, fragment_root, obj) -> None: ... + def __getattr__(self, name: str): ... + def getnext(self): ... + def __getitem__(self, key): ... + def __bool__(self) -> bool: ... + def getparent(self) -> None: ... + def __unicode__(self) -> str: ... + def __len__(self) -> int: ... + +class TreeWalker(NonRecursiveTreeWalker): + fragmentChildren: Any + filter: Any + def __init__(self, tree) -> None: ... + def getNodeDetails(self, node): ... + def getFirstChild(self, node): ... + def getNextSibling(self, node): ... + def getParentNode(self, node): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treewalkers/genshi.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treewalkers/genshi.pyi new file mode 100644 index 00000000..2e75daf2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/html5lib/html5lib/treewalkers/genshi.pyi @@ -0,0 +1,5 @@ +from . import base + +class TreeWalker(base.TreeWalker): + def __iter__(self): ... + def tokens(self, event, next) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..6b90fbda --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/@tests/stubtest_allowlist.txt @@ -0,0 +1,2 @@ +# __getattr__() replaced with actual field in stub +httplib2.Response.dict diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/METADATA.toml new file mode 100644 index 00000000..e22598a6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/METADATA.toml @@ -0,0 +1 @@ +version = "0.21.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/httplib2/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/httplib2/__init__.pyi new file mode 100644 index 00000000..485c0917 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/httplib2/__init__.pyi @@ -0,0 +1,188 @@ +import http.client +from _typeshed import Incomplete +from collections.abc import Generator +from typing import Any, ClassVar +from typing_extensions import Self + +from .error import * + +__author__: str +__copyright__: str +__contributors__: list[str] +__license__: str +__version__: str + +debuglevel: int +RETRIES: int + +class Authentication: + path: Any + host: Any + credentials: Any + http: Any + def __init__(self, credentials, host, request_uri, headers, response, content, http) -> None: ... + def depth(self, request_uri): ... + def inscope(self, host, request_uri): ... + def request(self, method, request_uri, headers, content) -> None: ... + def response(self, response, content): ... + def __eq__(self, auth): ... + def __ne__(self, auth): ... + def __lt__(self, auth): ... + def __gt__(self, auth): ... + def __le__(self, auth): ... + def __ge__(self, auth): ... + def __bool__(self) -> bool: ... + +class BasicAuthentication(Authentication): + def __init__(self, credentials, host, request_uri, headers, response, content, http) -> None: ... + def request(self, method, request_uri, headers, content) -> None: ... + +class DigestAuthentication(Authentication): + challenge: Any + A1: Any + def __init__(self, credentials, host, request_uri, headers, response, content, http) -> None: ... + def request(self, method, request_uri, headers, content, cnonce: Incomplete | None = ...): ... + def response(self, response, content): ... + +class HmacDigestAuthentication(Authentication): + challenge: Any + hashmod: Any + pwhashmod: Any + key: Any + __author__: ClassVar[str] + def __init__(self, credentials, host, request_uri, headers, response, content, http) -> None: ... + def request(self, method, request_uri, headers, content) -> None: ... + def response(self, response, content): ... + +class WsseAuthentication(Authentication): + def __init__(self, credentials, host, request_uri, headers, response, content, http) -> None: ... + def request(self, method, request_uri, headers, content) -> None: ... + +class GoogleLoginAuthentication(Authentication): + Auth: str + def __init__(self, credentials, host, request_uri, headers, response, content, http) -> None: ... + def request(self, method, request_uri, headers, content) -> None: ... + +class FileCache: + cache: Any + safe: Any + def __init__(self, cache, safe=...) -> None: ... + def get(self, key): ... + def set(self, key, value) -> None: ... + def delete(self, key) -> None: ... + +class Credentials: + credentials: Any + def __init__(self) -> None: ... + def add(self, name, password, domain: str = ...) -> None: ... + def clear(self) -> None: ... + def iter(self, domain) -> Generator[tuple[str, str], None, None]: ... + +class KeyCerts(Credentials): + def add(self, key, cert, domain, password) -> None: ... # type: ignore[override] + def iter(self, domain) -> Generator[tuple[str, str, str], None, None]: ... # type: ignore[override] + +class AllHosts: ... + +class ProxyInfo: + bypass_hosts: Any + def __init__( + self, + proxy_type, + proxy_host, + proxy_port, + proxy_rdns: bool = ..., + proxy_user: Incomplete | None = ..., + proxy_pass: Incomplete | None = ..., + proxy_headers: Incomplete | None = ..., + ) -> None: ... + def astuple(self): ... + def isgood(self): ... + def applies_to(self, hostname): ... + def bypass_host(self, hostname): ... + +class HTTPConnectionWithTimeout(http.client.HTTPConnection): + proxy_info: Any + def __init__( + self, host, port: Incomplete | None = ..., timeout: Incomplete | None = ..., proxy_info: Incomplete | None = ... + ) -> None: ... + sock: Any + def connect(self) -> None: ... + +class HTTPSConnectionWithTimeout(http.client.HTTPSConnection): + disable_ssl_certificate_validation: Any + ca_certs: Any + proxy_info: Any + key_file: Any + cert_file: Any + key_password: Any + def __init__( + self, + host, + port: Incomplete | None = ..., + key_file: Incomplete | None = ..., + cert_file: Incomplete | None = ..., + timeout: Incomplete | None = ..., + proxy_info: Incomplete | None = ..., + ca_certs: Incomplete | None = ..., + disable_ssl_certificate_validation: bool = ..., + tls_maximum_version: Incomplete | None = ..., + tls_minimum_version: Incomplete | None = ..., + key_password: Incomplete | None = ..., + ) -> None: ... + sock: Any + def connect(self) -> None: ... + +class Http: + proxy_info: Any + ca_certs: Any + disable_ssl_certificate_validation: Any + tls_maximum_version: Any + tls_minimum_version: Any + connections: Any + cache: Any + credentials: Any + certificates: Any + authorizations: Any + follow_redirects: bool + redirect_codes: Any + optimistic_concurrency_methods: Any + safe_methods: Any + follow_all_redirects: bool + ignore_etag: bool + force_exception_to_status_code: bool + timeout: Any + forward_authorization_headers: bool + def __init__( + self, + cache: Incomplete | None = ..., + timeout: Incomplete | None = ..., + proxy_info=..., + ca_certs: Incomplete | None = ..., + disable_ssl_certificate_validation: bool = ..., + tls_maximum_version: Incomplete | None = ..., + tls_minimum_version: Incomplete | None = ..., + ) -> None: ... + def close(self) -> None: ... + def add_credentials(self, name, password, domain: str = ...) -> None: ... + def add_certificate(self, key, cert, domain, password: Incomplete | None = ...) -> None: ... + def clear_credentials(self) -> None: ... + def request( + self, + uri, + method: str = ..., + body: Incomplete | None = ..., + headers: Incomplete | None = ..., + redirections=..., + connection_type: Incomplete | None = ..., + ): ... + +class Response(dict[str, Any]): + fromcache: bool + version: int + status: int + reason: str + previous: Any + def __init__(self, info) -> None: ... + @property + def dict(self) -> Self: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/httplib2/auth.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/httplib2/auth.pyi new file mode 100644 index 00000000..d7168f15 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/httplib2/auth.pyi @@ -0,0 +1,15 @@ +from typing import Any + +UNQUOTE_PAIRS: Any +unquote: Any +tchar: Any +token: Any +token68: Any +quoted_string: Any +auth_param_name: Any +auth_param: Any +params: Any +scheme: Any +challenge: Any +authentication_info: Any +www_authenticate: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/httplib2/certs.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/httplib2/certs.pyi new file mode 100644 index 00000000..c0a5ce2b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/httplib2/certs.pyi @@ -0,0 +1,9 @@ +from typing import Any + +certifi_available: bool +certifi_where: Any +custom_ca_locater_available: bool +custom_ca_locater_where: Any +BUILTIN_CA_CERTS: Any + +def where(): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/httplib2/error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/httplib2/error.pyi new file mode 100644 index 00000000..1287702e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/httplib2/error.pyi @@ -0,0 +1,18 @@ +from typing import Any + +class HttpLib2Error(Exception): ... + +class HttpLib2ErrorWithResponse(HttpLib2Error): + response: Any + content: Any + def __init__(self, desc, response, content) -> None: ... + +class RedirectMissingLocation(HttpLib2ErrorWithResponse): ... +class RedirectLimit(HttpLib2ErrorWithResponse): ... +class FailedToDecompressContent(HttpLib2ErrorWithResponse): ... +class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): ... +class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): ... +class MalformedHeader(HttpLib2Error): ... +class RelativeURIError(HttpLib2Error): ... +class ServerNotFoundError(HttpLib2Error): ... +class ProxiesUnavailableError(HttpLib2Error): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/httplib2/iri2uri.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/httplib2/iri2uri.pyi new file mode 100644 index 00000000..7365d894 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/httplib2/iri2uri.pyi @@ -0,0 +1,12 @@ +from typing import Any + +__author__: str +__copyright__: str +__contributors__: list[str] +__version__: str +__license__: str + +escape_range: Any + +def encode(c): ... +def iri2uri(uri): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/httplib2/socks.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/httplib2/socks.pyi new file mode 100644 index 00000000..ab1cff46 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/httplib2/httplib2/socks.pyi @@ -0,0 +1,42 @@ +import socket +from _typeshed import Incomplete + +PROXY_TYPE_SOCKS4: int +PROXY_TYPE_SOCKS5: int +PROXY_TYPE_HTTP: int +PROXY_TYPE_HTTP_NO_TUNNEL: int + +class ProxyError(Exception): ... +class GeneralProxyError(ProxyError): ... +class Socks5AuthError(ProxyError): ... +class Socks5Error(ProxyError): ... +class Socks4Error(ProxyError): ... +class HTTPError(ProxyError): ... + +def setdefaultproxy( + proxytype: Incomplete | None = ..., + addr: Incomplete | None = ..., + port: Incomplete | None = ..., + rdns: bool = ..., + username: Incomplete | None = ..., + password: Incomplete | None = ..., +) -> None: ... +def wrapmodule(module) -> None: ... + +class socksocket(socket.socket): + def __init__(self, family=..., type=..., proto: int = ..., _sock: Incomplete | None = ...) -> None: ... + def sendall(self, content, *args): ... + def setproxy( + self, + proxytype: Incomplete | None = ..., + addr: Incomplete | None = ..., + port: Incomplete | None = ..., + rdns: bool = ..., + username: Incomplete | None = ..., + password: Incomplete | None = ..., + headers: Incomplete | None = ..., + ) -> None: ... + def getproxysockname(self): ... + def getproxypeername(self): ... + def getpeername(self): ... + def connect(self, destpair) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..a7ffb9bd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/@tests/stubtest_allowlist.txt @@ -0,0 +1,11 @@ +humanfriendly.compat.StringIO.seek +humanfriendly.compat.StringIO.truncate + +# Re-exports: +humanfriendly.usage.import_module +humanfriendly.compat.which +humanfriendly.compat.name2codepoint +humanfriendly.compat.monotonic + +# Tests are not included into stubs: +humanfriendly.tests diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/METADATA.toml new file mode 100644 index 00000000..21700a74 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/METADATA.toml @@ -0,0 +1,4 @@ +version = "10.0.*" + +[tool.stubtest] +stubtest_requirements = ["docutils", "mock"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/__init__.pyi new file mode 100644 index 00000000..7e776182 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/__init__.pyi @@ -0,0 +1,57 @@ +import datetime +from _typeshed import Incomplete +from re import Pattern +from types import TracebackType +from typing import Any, NamedTuple + +class SizeUnit(NamedTuple): + divider: int + symbol: str + name: str + +class CombinedUnit(NamedTuple): + decimal: SizeUnit + binary: SizeUnit + +disk_size_units: Any +length_size_units: Any +time_units: Any + +def coerce_boolean(value: object) -> bool: ... +def coerce_pattern(value: str | Pattern[str], flags: int = ...) -> Pattern[str]: ... +def coerce_seconds(value: float | datetime.timedelta) -> float: ... +def format_size(num_bytes: float, keep_width: bool = ..., binary: bool = ...) -> str: ... +def parse_size(size: str, binary: bool = ...) -> int: ... +def format_length(num_metres: float, keep_width: bool = ...) -> str: ... +def parse_length(length: str) -> float: ... +def format_number(number: float, num_decimals: int = ...) -> str: ... +def round_number(count: float, keep_width: bool = ...) -> str: ... +def format_timespan(num_seconds: float | datetime.timedelta, detailed: bool = ..., max_units: int = ...) -> str: ... +def parse_timespan(timespan: str) -> float: ... +def parse_date(datestring: str) -> tuple[int, int, int, int, int, int]: ... +def format_path(pathname: str) -> str: ... +def parse_path(pathname: str) -> str: ... + +class Timer: + monotonic: bool + resumable: bool + start_time: float + total_time: float + def __init__(self, start_time: Incomplete | None = ..., resumable: bool = ...) -> None: ... + def __enter__(self): ... + def __exit__( + self, + exc_type: type[BaseException] | None = ..., + exc_value: BaseException | None = ..., + traceback: TracebackType | None = ..., + ) -> None: ... + def sleep(self, seconds: float) -> None: ... + @property + def elapsed_time(self): ... + @property + def rounded(self): ... + +class InvalidDate(Exception): ... +class InvalidSize(Exception): ... +class InvalidLength(Exception): ... +class InvalidTimespan(Exception): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/case.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/case.pyi new file mode 100644 index 00000000..1ab0b017 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/case.pyi @@ -0,0 +1,27 @@ +from _typeshed import Incomplete +from collections import OrderedDict +from typing import Generic, TypeVar + +from humanfriendly.compat import unicode + +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") + +class CaseInsensitiveDict(OrderedDict[_KT, _VT], Generic[_KT, _VT]): + def __init__(self, other: Incomplete | None = ..., **kw) -> None: ... + def coerce_key(self, key): ... + @classmethod + def fromkeys(cls, iterable, value: Incomplete | None = ...): ... + def get(self, key, default: Incomplete | None = ...): ... + def pop(self, key, default: Incomplete | None = ...): ... + def setdefault(self, key, default: Incomplete | None = ...): ... + def update(self, other: Incomplete | None = ..., **kw) -> None: ... # type: ignore[override] + def __contains__(self, key): ... + def __delitem__(self, key) -> None: ... + def __getitem__(self, key): ... + def __setitem__(self, key, value) -> None: ... + +class CaseInsensitiveKey(unicode): + def __new__(cls, value): ... + def __hash__(self) -> int: ... + def __eq__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/cli.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/cli.pyi new file mode 100644 index 00000000..a97befdc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/cli.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete + +def main() -> None: ... +def run_command(command_line) -> None: ... +def print_formatted_length(value) -> None: ... +def print_formatted_number(value) -> None: ... +def print_formatted_size(value, binary) -> None: ... +def print_formatted_table(delimiter) -> None: ... +def print_formatted_timespan(value) -> None: ... +def print_parsed_length(value) -> None: ... +def print_parsed_size(value) -> None: ... +def demonstrate_ansi_formatting() -> None: ... +def demonstrate_256_colors(i, j, group: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/compat.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/compat.pyi new file mode 100644 index 00000000..c0f19717 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/compat.pyi @@ -0,0 +1,13 @@ +from html.parser import HTMLParser as HTMLParser +from io import StringIO as StringIO + +unicode = str +unichr = chr +basestring = str +interactive_prompt = input + +def coerce_string(value): ... +def is_string(value): ... +def is_unicode(value): ... +def on_macos(): ... +def on_windows(): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/decorators.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/decorators.pyi new file mode 100644 index 00000000..e961fd39 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/decorators.pyi @@ -0,0 +1,3 @@ +RESULTS_ATTRIBUTE: str + +def cached(function): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/deprecation.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/deprecation.pyi new file mode 100644 index 00000000..db18163b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/deprecation.pyi @@ -0,0 +1,14 @@ +import types +from typing import Any + +def define_aliases(module_name, **aliases) -> None: ... +def get_aliases(module_name): ... +def deprecated_args(*names): ... +def is_method(function): ... + +class DeprecationProxy(types.ModuleType): + module: Any + aliases: Any + def __init__(self, module, aliases) -> None: ... + def __getattr__(self, name: str): ... + def resolve(self, target): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/prompts.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/prompts.pyi new file mode 100644 index 00000000..909fa611 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/prompts.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete +from typing import Any + +MAX_ATTEMPTS: int +logger: Any + +def prompt_for_confirmation(question, default: Incomplete | None = ..., padding: bool = ...): ... +def prompt_for_choice(choices, default: Incomplete | None = ..., padding: bool = ...): ... +def prompt_for_input(question, default: Incomplete | None = ..., padding: bool = ..., strip: bool = ...): ... +def prepare_prompt_text(prompt_text, **options): ... +def prepare_friendly_prompts() -> None: ... +def retry_limit(limit=...) -> None: ... + +class TooManyInvalidReplies(Exception): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/sphinx.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/sphinx.pyi new file mode 100644 index 00000000..eb6a6354 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/sphinx.pyi @@ -0,0 +1,15 @@ +from typing import Any + +logger: Any + +def deprecation_note_callback(app, what, name, obj, options, lines) -> None: ... +def enable_deprecation_notes(app) -> None: ... +def enable_man_role(app) -> None: ... +def enable_pypi_role(app) -> None: ... +def enable_special_methods(app) -> None: ... +def enable_usage_formatting(app) -> None: ... +def man_role(role, rawtext, text, lineno, inliner, options=..., content=...): ... +def pypi_role(role, rawtext, text, lineno, inliner, options=..., content=...): ... +def setup(app): ... +def special_methods_callback(app, what, name, obj, skip, options): ... +def usage_message_callback(app, what, name, obj, options, lines) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/tables.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/tables.pyi new file mode 100644 index 00000000..658a5832 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/tables.pyi @@ -0,0 +1,6 @@ +from _typeshed import Incomplete + +def format_smart_table(data, column_names): ... +def format_pretty_table(data, column_names: Incomplete | None = ..., horizontal_bar: str = ..., vertical_bar: str = ...): ... +def format_robust_table(data, column_names): ... +def format_rst_table(data, column_names: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/terminal/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/terminal/__init__.pyi new file mode 100644 index 00000000..f7c26eaa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/terminal/__init__.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete +from typing import Any + +ANSI_CSI: str +ANSI_SGR: str +ANSI_ERASE_LINE: Any +ANSI_RESET: Any +ANSI_HIDE_CURSOR: Any +ANSI_SHOW_CURSOR: Any +ANSI_COLOR_CODES: Any +ANSI_TEXT_STYLES: Any +CLEAN_OUTPUT_PATTERN: Any +DEFAULT_LINES: int +DEFAULT_COLUMNS: int +DEFAULT_ENCODING: str +HIGHLIGHT_COLOR: Any + +def ansi_strip(text, readline_hints: bool = ...): ... +def ansi_style(**kw): ... +def ansi_width(text): ... +def ansi_wrap(text, **kw): ... +def auto_encode(stream, text, *args, **kw) -> None: ... +def clean_terminal_output(text): ... +def connected_to_terminal(stream: Incomplete | None = ...): ... +def enable_ansi_support(): ... +def find_terminal_size(): ... +def find_terminal_size_using_ioctl(stream): ... +def find_terminal_size_using_stty(): ... +def get_pager_command(text: Incomplete | None = ...): ... +def have_windows_native_ansi_support(): ... +def message(text, *args, **kw) -> None: ... +def output(text, *args, **kw) -> None: ... +def readline_strip(expr): ... +def readline_wrap(expr): ... +def show_pager(formatted_text, encoding=...) -> None: ... +def terminal_supports_colors(stream: Incomplete | None = ...): ... +def usage(usage_text) -> None: ... +def warning(text, *args, **kw) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/terminal/html.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/terminal/html.pyi new file mode 100644 index 00000000..2525fc4f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/terminal/html.pyi @@ -0,0 +1,32 @@ +from _typeshed import Incomplete +from typing import Any + +from humanfriendly.compat import HTMLParser + +def html_to_ansi(data, callback: Incomplete | None = ...): ... + +class HTMLConverter(HTMLParser): + BLOCK_TAGS: Any + callback: Any + output: Any + def __init__(self, *args, **kw) -> None: ... + def __call__(self, data): ... + @property + def current_style(self): ... + stack: Any + def close(self) -> None: ... + def emit_style(self, style: Incomplete | None = ...) -> None: ... + def handle_charref(self, value) -> None: ... + link_text: Any + def handle_data(self, data) -> None: ... + def handle_endtag(self, tag) -> None: ... + def handle_entityref(self, name) -> None: ... + link_url: Any + def handle_starttag(self, tag, attrs) -> None: ... + def normalize_url(self, url): ... + def parse_color(self, value): ... + def push_styles(self, **changes) -> None: ... + def render_url(self, url): ... + preformatted_text_level: int + def reset(self) -> None: ... + def urls_match(self, a, b): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/terminal/spinners.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/terminal/spinners.pyi new file mode 100644 index 00000000..307201e2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/terminal/spinners.pyi @@ -0,0 +1,42 @@ +from _typeshed import Incomplete +from types import TracebackType +from typing import Any + +GLYPHS: Any +MINIMUM_INTERVAL: float + +class Spinner: + interactive: Any + interval: Any + label: Any + states: Any + stream: Any + timer: Any + total: Any + counter: int + last_update: int + def __init__(self, **options) -> None: ... + def step(self, progress: int = ..., label: Incomplete | None = ...) -> None: ... + def sleep(self) -> None: ... + def clear(self) -> None: ... + def __enter__(self): ... + def __exit__( + self, + exc_type: type[BaseException] | None = ..., + exc_value: BaseException | None = ..., + traceback: TracebackType | None = ..., + ) -> None: ... + +class AutomaticSpinner: + label: Any + show_time: Any + shutdown_event: Any + subprocess: Any + def __init__(self, label, show_time: bool = ...) -> None: ... + def __enter__(self) -> None: ... + def __exit__( + self, + exc_type: type[BaseException] | None = ..., + exc_value: BaseException | None = ..., + traceback: TracebackType | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/testing.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/testing.pyi new file mode 100644 index 00000000..3cdadf04 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/testing.pyi @@ -0,0 +1,87 @@ +import unittest +from _typeshed import Incomplete +from types import TracebackType +from typing import Any + +from humanfriendly.compat import StringIO + +def configure_logging(log_level=...) -> None: ... +def make_dirs(pathname) -> None: ... +def retry(func, timeout: int = ..., exc_type=...): ... +def run_cli(entry_point, *arguments, **options): ... +def skip_on_raise(*exc_types): ... +def touch(filename) -> None: ... + +class CallableTimedOut(Exception): ... + +class ContextManager: + def __enter__(self): ... + def __exit__( + self, + exc_type: type[BaseException] | None = ..., + exc_value: BaseException | None = ..., + traceback: TracebackType | None = ..., + ) -> None: ... + +class PatchedAttribute(ContextManager): + object_to_patch: Any + attribute_to_patch: Any + patched_value: Any + original_value: Any + def __init__(self, obj, name, value) -> None: ... + def __enter__(self): ... + +class PatchedItem(ContextManager): + object_to_patch: Any + item_to_patch: Any + patched_value: Any + original_value: Any + def __init__(self, obj, item, value) -> None: ... + def __enter__(self): ... + +class TemporaryDirectory(ContextManager): + mkdtemp_options: Any + temporary_directory: Any + def __init__(self, **options) -> None: ... + def __enter__(self): ... + +class MockedHomeDirectory(PatchedItem, TemporaryDirectory): + def __init__(self) -> None: ... + patched_value: Any + def __enter__(self): ... + +class CustomSearchPath(PatchedItem, TemporaryDirectory): + isolated_search_path: Any + def __init__(self, isolated: bool = ...) -> None: ... + patched_value: Any + def __enter__(self): ... + @property + def current_search_path(self): ... + +class MockedProgram(CustomSearchPath): + program_name: Any + program_returncode: Any + program_script: Any + program_signal_file: Any + def __init__(self, name, returncode: int = ..., script: Incomplete | None = ...) -> None: ... + def __enter__(self): ... + def __exit__(self, *args, **kw): ... + +class CaptureOutput(ContextManager): + stdin: Any + stdout: Any + stderr: Any + patched_attributes: Any + def __init__(self, merged: bool = ..., input: str = ..., enabled: bool = ...) -> None: ... + def __enter__(self): ... + def get_lines(self): ... + def get_text(self): ... + def getvalue(self): ... + +class CaptureBuffer(StringIO): + def get_lines(self): ... + def get_text(self): ... + +class TestCase(unittest.TestCase): + def __init__(self, *args, **kw) -> None: ... + def setUp(self, log_level=...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/text.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/text.pyi new file mode 100644 index 00000000..98cf0588 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/text.pyi @@ -0,0 +1,17 @@ +from _typeshed import Incomplete + +def compact(text, *args, **kw): ... +def compact_empty_lines(text): ... +def concatenate(items, conjunction: str = ..., serial_comma: bool = ...): ... +def dedent(text, *args, **kw): ... +def format(text, *args, **kw): ... +def generate_slug(text, delimiter: str = ...): ... +def is_empty_line(text): ... +def join_lines(text): ... +def pluralize(count, singular, plural: Incomplete | None = ...): ... +def pluralize_raw(count, singular, plural: Incomplete | None = ...): ... +def random_string(length=..., characters=...): ... +def split(text, delimiter: str = ...): ... +def split_paragraphs(text): ... +def tokenize(text): ... +def trim_empty_lines(text): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/usage.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/usage.pyi new file mode 100644 index 00000000..9ebfb108 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/humanfriendly/humanfriendly/usage.pyi @@ -0,0 +1,7 @@ +USAGE_MARKER: str + +def format_usage(usage_text): ... +def find_meta_variables(usage_text): ... +def parse_usage(text): ... +def render_usage(text): ... +def inject_usage(module_name) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ibm-db/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ibm-db/METADATA.toml new file mode 100644 index 00000000..84307529 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ibm-db/METADATA.toml @@ -0,0 +1 @@ +version = "3.1.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ibm-db/ibm_db.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ibm-db/ibm_db.pyi new file mode 100644 index 00000000..ba306350 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ibm-db/ibm_db.pyi @@ -0,0 +1,258 @@ +from typing import Any, overload +from typing_extensions import Self, final + +ATTR_CASE: int +CASE_LOWER: int +CASE_NATURAL: int +CASE_UPPER: int +PARAM_FILE: int +QUOTED_LITERAL_REPLACEMENT_OFF: int +QUOTED_LITERAL_REPLACEMENT_ON: int +SQL_API_SQLROWCOUNT: int +SQL_ATTR_AUTOCOMMIT: int +SQL_ATTR_CURRENT_SCHEMA: int +SQL_ATTR_CURSOR_TYPE: int +SQL_ATTR_INFO_ACCTSTR: int +SQL_ATTR_INFO_APPLNAME: int +SQL_ATTR_INFO_PROGRAMNAME: int +SQL_ATTR_INFO_USERID: int +SQL_ATTR_INFO_WRKSTNNAME: int +SQL_ATTR_PARAMSET_SIZE: int +SQL_ATTR_PARAM_BIND_TYPE: int +SQL_ATTR_QUERY_TIMEOUT: int +SQL_ATTR_ROWCOUNT_PREFETCH: int +SQL_ATTR_TRUSTED_CONTEXT_PASSWORD: int +SQL_ATTR_TRUSTED_CONTEXT_USERID: int +SQL_ATTR_USE_TRUSTED_CONTEXT: int +SQL_ATTR_XML_DECLARATION: int +SQL_AUTOCOMMIT_OFF: int +SQL_AUTOCOMMIT_ON: int +SQL_BIGINT: int +SQL_BINARY: int +SQL_BIT: int +SQL_BLOB: int +SQL_BLOB_LOCATOR: int +SQL_BOOLEAN: int +SQL_CHAR: int +SQL_CLOB: int +SQL_CLOB_LOCATOR: int +SQL_CURSOR_DYNAMIC: int +SQL_CURSOR_FORWARD_ONLY: int +SQL_CURSOR_KEYSET_DRIVEN: int +SQL_CURSOR_STATIC: int +SQL_DBCLOB: int +SQL_DBCLOB_LOCATOR: int +SQL_DBMS_NAME: int +SQL_DBMS_VER: int +SQL_DECFLOAT: int +SQL_DECIMAL: int +SQL_DOUBLE: int +SQL_FALSE: int +SQL_FLOAT: int +SQL_GRAPHIC: int +SQL_INDEX_CLUSTERED: int +SQL_INDEX_OTHER: int +SQL_INTEGER: int +SQL_LONGVARBINARY: int +SQL_LONGVARCHAR: int +SQL_LONGVARGRAPHIC: int +SQL_NUMERIC: int +SQL_PARAM_BIND_BY_COLUMN: int +SQL_PARAM_INPUT: int +SQL_PARAM_INPUT_OUTPUT: int +SQL_PARAM_OUTPUT: int +SQL_REAL: int +SQL_ROWCOUNT_PREFETCH_OFF: int +SQL_ROWCOUNT_PREFETCH_ON: int +SQL_SMALLINT: int +SQL_TABLE_STAT: int +SQL_TINYINT: int +SQL_TRUE: int +SQL_TYPE_DATE: int +SQL_TYPE_TIME: int +SQL_TYPE_TIMESTAMP: int +SQL_VARBINARY: int +SQL_VARCHAR: int +SQL_VARGRAPHIC: int +SQL_WCHAR: int +SQL_WLONGVARCHAR: int +SQL_WVARCHAR: int +SQL_XML: int +USE_WCHAR: int +WCHAR_NO: int +WCHAR_YES: int + +@final +class IBM_DBClientInfo: + def __new__(cls, *args: object, **kwargs: object) -> Self: ... + APPL_CODEPAGE: int + CONN_CODEPAGE: int + DATA_SOURCE_NAME: str + DRIVER_NAME: str + DRIVER_ODBC_VER: str + DRIVER_VER: str + ODBC_SQL_CONFORMANCE: str + ODBC_VER: str + +@final +class IBM_DBConnection: + def __new__(cls, *args: object, **kwargs: object) -> Self: ... + +@final +class IBM_DBServerInfo: + def __new__(cls, *args: object, **kwargs: object) -> Self: ... + DBMS_NAME: str + DBMS_VER: str + DB_CODEPAGE: int + DB_NAME: str + DFT_ISOLATION: str + IDENTIFIER_QUOTE_CHAR: str + INST_NAME: str + ISOLATION_OPTION: tuple[str, str, str, str, str] + KEYWORDS: str + LIKE_ESCAPE_CLAUSE: bool + MAX_COL_NAME_LEN: int + MAX_IDENTIFIER_LEN: int + MAX_INDEX_SIZE: int + MAX_PROC_NAME_LEN: int + MAX_ROW_SIZE: int + MAX_SCHEMA_NAME_LEN: int + MAX_STATEMENT_LEN: int + MAX_TABLE_NAME_LEN: int + NON_NULLABLE_COLUMNS: bool + PROCEDURES: bool + SPECIAL_CHARS: str + SQL_CONFORMANCE: str + +@final +class IBM_DBStatement: + def __new__(cls, *args: object, **kwargs: object) -> Self: ... + +def active(__connection: IBM_DBConnection | None) -> bool: ... +def autocommit(__connection: IBM_DBConnection, __value: int = ...) -> int | bool: ... +def bind_param( + __stmt: IBM_DBStatement, + __parameter_number: int, + __variable: str, + __parameter_type: int | None = ..., + __data_type: int | None = ..., + __precision: int | None = ..., + __scale: int | None = ..., + __size: int | None = ..., +) -> bool: ... +@overload +def callproc(__connection: IBM_DBConnection, __procname: str) -> IBM_DBStatement | None: ... +@overload +def callproc(__connection: IBM_DBConnection, __procname: str, __parameters: tuple[object, ...]) -> tuple[object, ...] | None: ... +def check_function_support(__connection: IBM_DBConnection, __function_id: int) -> bool: ... +def client_info(__connection: IBM_DBConnection) -> IBM_DBClientInfo | bool: ... +def close(__connection: IBM_DBConnection) -> bool: ... +def column_privileges( + __connection: IBM_DBConnection, + __qualifier: str | None = ..., + __schema: str | None = ..., + __table_name: str | None = ..., + __column_name: str | None = ..., +) -> IBM_DBStatement: ... +def columns( + __connection: IBM_DBConnection, + __qualifier: str | None = ..., + __schema: str | None = ..., + __table_name: str | None = ..., + __column_name: str | None = ..., +) -> IBM_DBStatement: ... +def commit(__connection: IBM_DBConnection) -> bool: ... +def conn_error(__connection: IBM_DBConnection | None = ...) -> str: ... +def conn_errormsg(__connection: IBM_DBConnection | None = ...) -> str: ... +def conn_warn(__connection: IBM_DBConnection | None = ...) -> str: ... +def connect( + __database: str, + __user: str, + __password: str, + __options: dict[int, int | str] | None = ..., + __replace_quoted_literal: int = ..., +) -> IBM_DBConnection | None: ... +def createdb(__connection: IBM_DBConnection, __dbName: str, __codeSet: str = ..., __mode: str = ...) -> bool: ... +def createdbNX(__connection: IBM_DBConnection, __dbName: str, __codeSet: str = ..., __mode: str = ...) -> bool: ... +def cursor_type(__stmt: IBM_DBStatement) -> int: ... +def dropdb(__connection: IBM_DBConnection, __dbName: str) -> bool: ... +def exec_immediate( + __connection: IBM_DBConnection, __statement: str | None, __options: dict[int, int] = ... +) -> IBM_DBStatement | bool: ... +def execute(__stmt: IBM_DBStatement, __parameters: tuple[object, ...] | None = ...) -> bool: ... +def execute_many( + __stmt: IBM_DBStatement, __seq_of_parameters: tuple[object, ...], __options: dict[int, int] = ... +) -> int | None: ... +def fetch_assoc(__stmt: IBM_DBStatement, __row_number: int = ...) -> dict[str, object] | bool: ... +def fetch_both(__stmt: IBM_DBStatement, __row_number: int = ...) -> dict[int | str, object] | bool: ... +def fetch_row(__stmt: IBM_DBStatement, __row_number: int = ...) -> bool: ... +def fetch_tuple(__stmt: IBM_DBStatement, __row_number: int = ...) -> tuple[object, ...]: ... +def field_display_size(__stmt: IBM_DBStatement, __column: int | str) -> int | bool: ... +def field_name(__stmt: IBM_DBStatement, __column: int | str) -> str | bool: ... +def field_nullable(__stmt: IBM_DBStatement, __column: int | str) -> bool: ... +def field_num(__stmt: IBM_DBStatement, __column: int | str) -> int | bool: ... +def field_precision(__stmt: IBM_DBStatement, __column: int | str) -> int | bool: ... +def field_scale(__stmt: IBM_DBStatement, __column: int | str) -> int | bool: ... +def field_type(__stmt: IBM_DBStatement, __column: int | str) -> str | bool: ... +def field_width(__stmt: IBM_DBStatement, __column: int | str) -> int | bool: ... +def foreign_keys( + __connection: IBM_DBConnection, + __pk_qualifier: str | None, + __pk_schema: str | None, + __pk_table_name: str | None, + __fk_qualifier: str | None = ..., + __fk_schema: str | None = ..., + __fk_table_name: str | None = ..., +) -> IBM_DBStatement: ... +def free_result(__stmt: IBM_DBStatement) -> bool: ... +def free_stmt(__stmt: IBM_DBStatement) -> bool: ... +def get_db_info(__connection: IBM_DBConnection, __option: int) -> str | bool: ... +def get_last_serial_value(__stmt: IBM_DBStatement) -> str | bool: ... +def get_num_result(__stmt: IBM_DBStatement) -> int | bool: ... +def get_option(__resc: IBM_DBConnection | IBM_DBStatement, __options: int, __type: int) -> Any: ... +def next_result(__stmt: IBM_DBStatement) -> IBM_DBStatement | bool: ... +def num_fields(__stmt: IBM_DBStatement) -> int | bool: ... +def num_rows(__stmt: IBM_DBStatement) -> int: ... +def pconnect( + __database: str, __username: str, __password: str, __options: dict[int, int | str] | None = ... +) -> IBM_DBConnection | None: ... +def prepare( + __connection: IBM_DBConnection, __statement: str, __options: dict[int, int | str] | None = ... +) -> IBM_DBStatement | bool: ... +def primary_keys( + __connection: IBM_DBConnection, __qualifier: str | None, __schema: str | None, __table_name: str | None +) -> IBM_DBStatement: ... +def procedure_columns( + __connection: IBM_DBConnection, + __qualifier: str | None, + __schema: str | None, + __procedure: str | None, + __parameter: str | None, +) -> IBM_DBStatement | bool: ... +def procedures( + __connection: IBM_DBConnection, __qualifier: str | None, __schema: str | None, __procedure: str | None +) -> IBM_DBStatement | bool: ... +def recreatedb(__connection: IBM_DBConnection, __dbName: str, __codeSet: str | None = ..., __mode: str | None = ...) -> bool: ... +def result(__stmt: IBM_DBStatement, __column: int | str) -> Any: ... +def rollback(__connection: IBM_DBConnection) -> bool: ... +def server_info(__connection: IBM_DBConnection) -> IBM_DBServerInfo | bool: ... +def set_option(__resc: IBM_DBConnection | IBM_DBStatement, __options: dict[int, int | str], __type: int) -> bool: ... +def special_columns( + __connection: IBM_DBConnection, __qualifier: str | None, __schema: str | None, __table_name: str | None, __scope: int +) -> IBM_DBStatement: ... +def statistics( + __connection: IBM_DBConnection, __qualifier: str | None, __schema: str | None, __table_name: str | None, __unique: bool | None +) -> IBM_DBStatement: ... +def stmt_error(__stmt: IBM_DBStatement = ...) -> str: ... +def stmt_errormsg(__stmt: IBM_DBStatement = ...) -> str: ... +def stmt_warn(__connection: IBM_DBConnection = ...) -> IBM_DBStatement: ... +def table_privileges( + __connection: IBM_DBConnection, __qualifier: str | None = ..., __schema: str | None = ..., __table_name: str | None = ... +) -> IBM_DBStatement | bool: ... +def tables( + __connection: IBM_DBConnection, + __qualifier: str | None = ..., + __schema: str | None = ..., + __table_name: str | None = ..., + __table_type: str | None = ..., +) -> IBM_DBStatement | bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..d38fbca2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/@tests/stubtest_allowlist.txt @@ -0,0 +1,2 @@ +# Metaclass problem inherited from urllib3. +influxdb_client.client.write.retry.WritesRetry diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/METADATA.toml new file mode 100644 index 00000000..bf082ace --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/METADATA.toml @@ -0,0 +1,6 @@ +version = "1.36.*" +requires = ["types-urllib3"] + +[tool.stubtest] +extras = ["extra"] +stubtest_requirements = ["aiohttp"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/__init__.pyi new file mode 100644 index 00000000..b3757b58 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/__init__.pyi @@ -0,0 +1,427 @@ +from influxdb_client.client.authorizations_api import AuthorizationsApi as AuthorizationsApi +from influxdb_client.client.bucket_api import BucketsApi as BucketsApi +from influxdb_client.client.delete_api import DeleteApi as DeleteApi +from influxdb_client.client.influxdb_client import InfluxDBClient as InfluxDBClient +from influxdb_client.client.invokable_scripts_api import InvokableScriptsApi as InvokableScriptsApi +from influxdb_client.client.labels_api import LabelsApi as LabelsApi +from influxdb_client.client.logging_handler import InfluxLoggingHandler as InfluxLoggingHandler +from influxdb_client.client.organizations_api import OrganizationsApi as OrganizationsApi +from influxdb_client.client.query_api import QueryApi as QueryApi +from influxdb_client.client.tasks_api import TasksApi as TasksApi +from influxdb_client.client.users_api import UsersApi as UsersApi +from influxdb_client.client.write.point import Point as Point +from influxdb_client.client.write_api import WriteApi as WriteApi, WriteOptions as WriteOptions +from influxdb_client.configuration import Configuration as Configuration +from influxdb_client.domain.add_resource_member_request_body import AddResourceMemberRequestBody as AddResourceMemberRequestBody +from influxdb_client.domain.analyze_query_response import AnalyzeQueryResponse as AnalyzeQueryResponse +from influxdb_client.domain.analyze_query_response_errors import AnalyzeQueryResponseErrors as AnalyzeQueryResponseErrors +from influxdb_client.domain.array_expression import ArrayExpression as ArrayExpression +from influxdb_client.domain.ast_response import ASTResponse as ASTResponse +from influxdb_client.domain.authorization import Authorization as Authorization +from influxdb_client.domain.authorization_post_request import AuthorizationPostRequest as AuthorizationPostRequest +from influxdb_client.domain.authorization_update_request import AuthorizationUpdateRequest as AuthorizationUpdateRequest +from influxdb_client.domain.authorizations import Authorizations as Authorizations +from influxdb_client.domain.axes import Axes as Axes +from influxdb_client.domain.axis import Axis as Axis +from influxdb_client.domain.axis_scale import AxisScale as AxisScale +from influxdb_client.domain.bad_statement import BadStatement as BadStatement +from influxdb_client.domain.band_view_properties import BandViewProperties as BandViewProperties +from influxdb_client.domain.binary_expression import BinaryExpression as BinaryExpression +from influxdb_client.domain.block import Block as Block +from influxdb_client.domain.boolean_literal import BooleanLiteral as BooleanLiteral +from influxdb_client.domain.bucket import Bucket as Bucket +from influxdb_client.domain.bucket_links import BucketLinks as BucketLinks +from influxdb_client.domain.bucket_metadata_manifest import BucketMetadataManifest as BucketMetadataManifest +from influxdb_client.domain.bucket_retention_rules import BucketRetentionRules as BucketRetentionRules +from influxdb_client.domain.bucket_shard_mapping import BucketShardMapping as BucketShardMapping +from influxdb_client.domain.buckets import Buckets as Buckets +from influxdb_client.domain.builder_aggregate_function_type import BuilderAggregateFunctionType as BuilderAggregateFunctionType +from influxdb_client.domain.builder_config import BuilderConfig as BuilderConfig +from influxdb_client.domain.builder_config_aggregate_window import BuilderConfigAggregateWindow as BuilderConfigAggregateWindow +from influxdb_client.domain.builder_functions_type import BuilderFunctionsType as BuilderFunctionsType +from influxdb_client.domain.builder_tags_type import BuilderTagsType as BuilderTagsType +from influxdb_client.domain.builtin_statement import BuiltinStatement as BuiltinStatement +from influxdb_client.domain.call_expression import CallExpression as CallExpression +from influxdb_client.domain.cell import Cell as Cell +from influxdb_client.domain.cell_links import CellLinks as CellLinks +from influxdb_client.domain.cell_update import CellUpdate as CellUpdate +from influxdb_client.domain.cell_with_view_properties import CellWithViewProperties as CellWithViewProperties +from influxdb_client.domain.check import Check as Check +from influxdb_client.domain.check_base import CheckBase as CheckBase +from influxdb_client.domain.check_base_links import CheckBaseLinks as CheckBaseLinks +from influxdb_client.domain.check_discriminator import CheckDiscriminator as CheckDiscriminator +from influxdb_client.domain.check_patch import CheckPatch as CheckPatch +from influxdb_client.domain.check_status_level import CheckStatusLevel as CheckStatusLevel +from influxdb_client.domain.check_view_properties import CheckViewProperties as CheckViewProperties +from influxdb_client.domain.checks import Checks as Checks +from influxdb_client.domain.column_data_type import ColumnDataType as ColumnDataType +from influxdb_client.domain.column_semantic_type import ColumnSemanticType as ColumnSemanticType +from influxdb_client.domain.conditional_expression import ConditionalExpression as ConditionalExpression +from influxdb_client.domain.config import Config as Config +from influxdb_client.domain.constant_variable_properties import ConstantVariableProperties as ConstantVariableProperties +from influxdb_client.domain.create_cell import CreateCell as CreateCell +from influxdb_client.domain.create_dashboard_request import CreateDashboardRequest as CreateDashboardRequest +from influxdb_client.domain.custom_check import CustomCheck as CustomCheck +from influxdb_client.domain.dashboard import Dashboard as Dashboard +from influxdb_client.domain.dashboard_color import DashboardColor as DashboardColor +from influxdb_client.domain.dashboard_query import DashboardQuery as DashboardQuery +from influxdb_client.domain.dashboard_with_view_properties import DashboardWithViewProperties as DashboardWithViewProperties +from influxdb_client.domain.dashboards import Dashboards as Dashboards +from influxdb_client.domain.date_time_literal import DateTimeLiteral as DateTimeLiteral +from influxdb_client.domain.dbr_ps import DBRPs as DBRPs +from influxdb_client.domain.dbrp import DBRP as DBRP +from influxdb_client.domain.dbrp_create import DBRPCreate as DBRPCreate +from influxdb_client.domain.dbrp_get import DBRPGet as DBRPGet +from influxdb_client.domain.dbrp_update import DBRPUpdate as DBRPUpdate +from influxdb_client.domain.deadman_check import DeadmanCheck as DeadmanCheck +from influxdb_client.domain.decimal_places import DecimalPlaces as DecimalPlaces +from influxdb_client.domain.delete_predicate_request import DeletePredicateRequest as DeletePredicateRequest +from influxdb_client.domain.dialect import Dialect as Dialect +from influxdb_client.domain.dict_expression import DictExpression as DictExpression +from influxdb_client.domain.dict_item import DictItem as DictItem +from influxdb_client.domain.duration import Duration as Duration +from influxdb_client.domain.duration_literal import DurationLiteral as DurationLiteral +from influxdb_client.domain.error import Error as Error +from influxdb_client.domain.expression import Expression as Expression +from influxdb_client.domain.expression_statement import ExpressionStatement as ExpressionStatement +from influxdb_client.domain.field import Field as Field +from influxdb_client.domain.file import File as File +from influxdb_client.domain.float_literal import FloatLiteral as FloatLiteral +from influxdb_client.domain.flux_response import FluxResponse as FluxResponse +from influxdb_client.domain.flux_suggestion import FluxSuggestion as FluxSuggestion +from influxdb_client.domain.flux_suggestions import FluxSuggestions as FluxSuggestions +from influxdb_client.domain.function_expression import FunctionExpression as FunctionExpression +from influxdb_client.domain.gauge_view_properties import GaugeViewProperties as GaugeViewProperties +from influxdb_client.domain.greater_threshold import GreaterThreshold as GreaterThreshold +from influxdb_client.domain.health_check import HealthCheck as HealthCheck +from influxdb_client.domain.heatmap_view_properties import HeatmapViewProperties as HeatmapViewProperties +from influxdb_client.domain.histogram_view_properties import HistogramViewProperties as HistogramViewProperties +from influxdb_client.domain.http_notification_endpoint import HTTPNotificationEndpoint as HTTPNotificationEndpoint +from influxdb_client.domain.http_notification_rule import HTTPNotificationRule as HTTPNotificationRule +from influxdb_client.domain.http_notification_rule_base import HTTPNotificationRuleBase as HTTPNotificationRuleBase +from influxdb_client.domain.identifier import Identifier as Identifier +from influxdb_client.domain.import_declaration import ImportDeclaration as ImportDeclaration +from influxdb_client.domain.index_expression import IndexExpression as IndexExpression +from influxdb_client.domain.integer_literal import IntegerLiteral as IntegerLiteral +from influxdb_client.domain.is_onboarding import IsOnboarding as IsOnboarding +from influxdb_client.domain.label import Label as Label +from influxdb_client.domain.label_create_request import LabelCreateRequest as LabelCreateRequest +from influxdb_client.domain.label_mapping import LabelMapping as LabelMapping +from influxdb_client.domain.label_response import LabelResponse as LabelResponse +from influxdb_client.domain.label_update import LabelUpdate as LabelUpdate +from influxdb_client.domain.labels_response import LabelsResponse as LabelsResponse +from influxdb_client.domain.language_request import LanguageRequest as LanguageRequest +from influxdb_client.domain.legacy_authorization_post_request import ( + LegacyAuthorizationPostRequest as LegacyAuthorizationPostRequest, +) +from influxdb_client.domain.lesser_threshold import LesserThreshold as LesserThreshold +from influxdb_client.domain.line_plus_single_stat_properties import LinePlusSingleStatProperties as LinePlusSingleStatProperties +from influxdb_client.domain.line_protocol_error import LineProtocolError as LineProtocolError +from influxdb_client.domain.line_protocol_length_error import LineProtocolLengthError as LineProtocolLengthError +from influxdb_client.domain.links import Links as Links +from influxdb_client.domain.list_stacks_response import ListStacksResponse as ListStacksResponse +from influxdb_client.domain.log_event import LogEvent as LogEvent +from influxdb_client.domain.logical_expression import LogicalExpression as LogicalExpression +from influxdb_client.domain.logs import Logs as Logs +from influxdb_client.domain.map_variable_properties import MapVariableProperties as MapVariableProperties +from influxdb_client.domain.markdown_view_properties import MarkdownViewProperties as MarkdownViewProperties +from influxdb_client.domain.measurement_schema import MeasurementSchema as MeasurementSchema +from influxdb_client.domain.measurement_schema_column import MeasurementSchemaColumn as MeasurementSchemaColumn +from influxdb_client.domain.measurement_schema_create_request import ( + MeasurementSchemaCreateRequest as MeasurementSchemaCreateRequest, +) +from influxdb_client.domain.measurement_schema_list import MeasurementSchemaList as MeasurementSchemaList +from influxdb_client.domain.measurement_schema_update_request import ( + MeasurementSchemaUpdateRequest as MeasurementSchemaUpdateRequest, +) +from influxdb_client.domain.member_assignment import MemberAssignment as MemberAssignment +from influxdb_client.domain.member_expression import MemberExpression as MemberExpression +from influxdb_client.domain.metadata_backup import MetadataBackup as MetadataBackup +from influxdb_client.domain.model_property import ModelProperty as ModelProperty +from influxdb_client.domain.mosaic_view_properties import MosaicViewProperties as MosaicViewProperties +from influxdb_client.domain.node import Node as Node +from influxdb_client.domain.notification_endpoint import NotificationEndpoint as NotificationEndpoint +from influxdb_client.domain.notification_endpoint_base import NotificationEndpointBase as NotificationEndpointBase +from influxdb_client.domain.notification_endpoint_base_links import NotificationEndpointBaseLinks as NotificationEndpointBaseLinks +from influxdb_client.domain.notification_endpoint_discriminator import ( + NotificationEndpointDiscriminator as NotificationEndpointDiscriminator, +) +from influxdb_client.domain.notification_endpoint_type import NotificationEndpointType as NotificationEndpointType +from influxdb_client.domain.notification_endpoint_update import NotificationEndpointUpdate as NotificationEndpointUpdate +from influxdb_client.domain.notification_endpoints import NotificationEndpoints as NotificationEndpoints +from influxdb_client.domain.notification_rule import NotificationRule as NotificationRule +from influxdb_client.domain.notification_rule_base import NotificationRuleBase as NotificationRuleBase +from influxdb_client.domain.notification_rule_base_links import NotificationRuleBaseLinks as NotificationRuleBaseLinks +from influxdb_client.domain.notification_rule_discriminator import NotificationRuleDiscriminator as NotificationRuleDiscriminator +from influxdb_client.domain.notification_rule_update import NotificationRuleUpdate as NotificationRuleUpdate +from influxdb_client.domain.notification_rules import NotificationRules as NotificationRules +from influxdb_client.domain.object_expression import ObjectExpression as ObjectExpression +from influxdb_client.domain.onboarding_request import OnboardingRequest as OnboardingRequest +from influxdb_client.domain.onboarding_response import OnboardingResponse as OnboardingResponse +from influxdb_client.domain.option_statement import OptionStatement as OptionStatement +from influxdb_client.domain.organization import Organization as Organization +from influxdb_client.domain.organization_links import OrganizationLinks as OrganizationLinks +from influxdb_client.domain.organizations import Organizations as Organizations +from influxdb_client.domain.package import Package as Package +from influxdb_client.domain.package_clause import PackageClause as PackageClause +from influxdb_client.domain.pager_duty_notification_endpoint import PagerDutyNotificationEndpoint as PagerDutyNotificationEndpoint +from influxdb_client.domain.pager_duty_notification_rule import PagerDutyNotificationRule as PagerDutyNotificationRule +from influxdb_client.domain.pager_duty_notification_rule_base import ( + PagerDutyNotificationRuleBase as PagerDutyNotificationRuleBase, +) +from influxdb_client.domain.paren_expression import ParenExpression as ParenExpression +from influxdb_client.domain.password_reset_body import PasswordResetBody as PasswordResetBody +from influxdb_client.domain.patch_bucket_request import PatchBucketRequest as PatchBucketRequest +from influxdb_client.domain.patch_dashboard_request import PatchDashboardRequest as PatchDashboardRequest +from influxdb_client.domain.patch_organization_request import PatchOrganizationRequest as PatchOrganizationRequest +from influxdb_client.domain.patch_retention_rule import PatchRetentionRule as PatchRetentionRule +from influxdb_client.domain.patch_stack_request import PatchStackRequest as PatchStackRequest +from influxdb_client.domain.patch_stack_request_additional_resources import ( + PatchStackRequestAdditionalResources as PatchStackRequestAdditionalResources, +) +from influxdb_client.domain.permission import Permission as Permission +from influxdb_client.domain.permission_resource import PermissionResource as PermissionResource +from influxdb_client.domain.pipe_expression import PipeExpression as PipeExpression +from influxdb_client.domain.pipe_literal import PipeLiteral as PipeLiteral +from influxdb_client.domain.post_bucket_request import PostBucketRequest as PostBucketRequest +from influxdb_client.domain.post_check import PostCheck as PostCheck +from influxdb_client.domain.post_notification_endpoint import PostNotificationEndpoint as PostNotificationEndpoint +from influxdb_client.domain.post_notification_rule import PostNotificationRule as PostNotificationRule +from influxdb_client.domain.post_organization_request import PostOrganizationRequest as PostOrganizationRequest +from influxdb_client.domain.post_restore_kv_response import PostRestoreKVResponse as PostRestoreKVResponse +from influxdb_client.domain.post_stack_request import PostStackRequest as PostStackRequest +from influxdb_client.domain.property_key import PropertyKey as PropertyKey +from influxdb_client.domain.query import Query as Query +from influxdb_client.domain.query_edit_mode import QueryEditMode as QueryEditMode +from influxdb_client.domain.query_variable_properties import QueryVariableProperties as QueryVariableProperties +from influxdb_client.domain.query_variable_properties_values import QueryVariablePropertiesValues as QueryVariablePropertiesValues +from influxdb_client.domain.range_threshold import RangeThreshold as RangeThreshold +from influxdb_client.domain.ready import Ready as Ready +from influxdb_client.domain.regexp_literal import RegexpLiteral as RegexpLiteral +from influxdb_client.domain.remote_connection import RemoteConnection as RemoteConnection +from influxdb_client.domain.remote_connection_creation_request import ( + RemoteConnectionCreationRequest as RemoteConnectionCreationRequest, +) +from influxdb_client.domain.remote_connection_update_request import RemoteConnectionUpdateRequest as RemoteConnectionUpdateRequest +from influxdb_client.domain.remote_connections import RemoteConnections as RemoteConnections +from influxdb_client.domain.renamable_field import RenamableField as RenamableField +from influxdb_client.domain.replication import Replication as Replication +from influxdb_client.domain.replication_creation_request import ReplicationCreationRequest as ReplicationCreationRequest +from influxdb_client.domain.replication_update_request import ReplicationUpdateRequest as ReplicationUpdateRequest +from influxdb_client.domain.replications import Replications as Replications +from influxdb_client.domain.resource_member import ResourceMember as ResourceMember +from influxdb_client.domain.resource_members import ResourceMembers as ResourceMembers +from influxdb_client.domain.resource_members_links import ResourceMembersLinks as ResourceMembersLinks +from influxdb_client.domain.resource_owner import ResourceOwner as ResourceOwner +from influxdb_client.domain.resource_owners import ResourceOwners as ResourceOwners +from influxdb_client.domain.restored_bucket_mappings import RestoredBucketMappings as RestoredBucketMappings +from influxdb_client.domain.retention_policy_manifest import RetentionPolicyManifest as RetentionPolicyManifest +from influxdb_client.domain.return_statement import ReturnStatement as ReturnStatement +from influxdb_client.domain.routes import Routes as Routes +from influxdb_client.domain.routes_external import RoutesExternal as RoutesExternal +from influxdb_client.domain.routes_query import RoutesQuery as RoutesQuery +from influxdb_client.domain.routes_system import RoutesSystem as RoutesSystem +from influxdb_client.domain.rule_status_level import RuleStatusLevel as RuleStatusLevel +from influxdb_client.domain.run import Run as Run +from influxdb_client.domain.run_links import RunLinks as RunLinks +from influxdb_client.domain.run_manually import RunManually as RunManually +from influxdb_client.domain.runs import Runs as Runs +from influxdb_client.domain.scatter_view_properties import ScatterViewProperties as ScatterViewProperties +from influxdb_client.domain.schema_type import SchemaType as SchemaType +from influxdb_client.domain.scraper_target_request import ScraperTargetRequest as ScraperTargetRequest +from influxdb_client.domain.scraper_target_response import ScraperTargetResponse as ScraperTargetResponse +from influxdb_client.domain.scraper_target_responses import ScraperTargetResponses as ScraperTargetResponses +from influxdb_client.domain.script import Script as Script +from influxdb_client.domain.script_create_request import ScriptCreateRequest as ScriptCreateRequest +from influxdb_client.domain.script_invocation_params import ScriptInvocationParams as ScriptInvocationParams +from influxdb_client.domain.script_language import ScriptLanguage as ScriptLanguage +from influxdb_client.domain.script_update_request import ScriptUpdateRequest as ScriptUpdateRequest +from influxdb_client.domain.scripts import Scripts as Scripts +from influxdb_client.domain.secret_keys import SecretKeys as SecretKeys +from influxdb_client.domain.secret_keys_response import SecretKeysResponse as SecretKeysResponse +from influxdb_client.domain.shard_group_manifest import ShardGroupManifest as ShardGroupManifest +from influxdb_client.domain.shard_manifest import ShardManifest as ShardManifest +from influxdb_client.domain.shard_owner import ShardOwner as ShardOwner +from influxdb_client.domain.simple_table_view_properties import SimpleTableViewProperties as SimpleTableViewProperties +from influxdb_client.domain.single_stat_view_properties import SingleStatViewProperties as SingleStatViewProperties +from influxdb_client.domain.slack_notification_endpoint import SlackNotificationEndpoint as SlackNotificationEndpoint +from influxdb_client.domain.slack_notification_rule import SlackNotificationRule as SlackNotificationRule +from influxdb_client.domain.slack_notification_rule_base import SlackNotificationRuleBase as SlackNotificationRuleBase +from influxdb_client.domain.smtp_notification_rule import SMTPNotificationRule as SMTPNotificationRule +from influxdb_client.domain.smtp_notification_rule_base import SMTPNotificationRuleBase as SMTPNotificationRuleBase +from influxdb_client.domain.source import Source as Source +from influxdb_client.domain.source_links import SourceLinks as SourceLinks +from influxdb_client.domain.sources import Sources as Sources +from influxdb_client.domain.stack import Stack as Stack +from influxdb_client.domain.stack_associations import StackAssociations as StackAssociations +from influxdb_client.domain.stack_events import StackEvents as StackEvents +from influxdb_client.domain.stack_links import StackLinks as StackLinks +from influxdb_client.domain.stack_resources import StackResources as StackResources +from influxdb_client.domain.statement import Statement as Statement +from influxdb_client.domain.static_legend import StaticLegend as StaticLegend +from influxdb_client.domain.status_rule import StatusRule as StatusRule +from influxdb_client.domain.string_literal import StringLiteral as StringLiteral +from influxdb_client.domain.subscription_manifest import SubscriptionManifest as SubscriptionManifest +from influxdb_client.domain.table_view_properties import TableViewProperties as TableViewProperties +from influxdb_client.domain.table_view_properties_table_options import ( + TableViewPropertiesTableOptions as TableViewPropertiesTableOptions, +) +from influxdb_client.domain.tag_rule import TagRule as TagRule +from influxdb_client.domain.task import Task as Task +from influxdb_client.domain.task_create_request import TaskCreateRequest as TaskCreateRequest +from influxdb_client.domain.task_links import TaskLinks as TaskLinks +from influxdb_client.domain.task_status_type import TaskStatusType as TaskStatusType +from influxdb_client.domain.task_update_request import TaskUpdateRequest as TaskUpdateRequest +from influxdb_client.domain.tasks import Tasks as Tasks +from influxdb_client.domain.telegraf import Telegraf as Telegraf +from influxdb_client.domain.telegraf_plugin import TelegrafPlugin as TelegrafPlugin +from influxdb_client.domain.telegraf_plugin_request import TelegrafPluginRequest as TelegrafPluginRequest +from influxdb_client.domain.telegraf_plugin_request_plugins import TelegrafPluginRequestPlugins as TelegrafPluginRequestPlugins +from influxdb_client.domain.telegraf_plugins import TelegrafPlugins as TelegrafPlugins +from influxdb_client.domain.telegraf_request import TelegrafRequest as TelegrafRequest +from influxdb_client.domain.telegraf_request_metadata import TelegrafRequestMetadata as TelegrafRequestMetadata +from influxdb_client.domain.telegrafs import Telegrafs as Telegrafs +from influxdb_client.domain.telegram_notification_endpoint import TelegramNotificationEndpoint as TelegramNotificationEndpoint +from influxdb_client.domain.telegram_notification_rule import TelegramNotificationRule as TelegramNotificationRule +from influxdb_client.domain.telegram_notification_rule_base import TelegramNotificationRuleBase as TelegramNotificationRuleBase +from influxdb_client.domain.template_apply import TemplateApply as TemplateApply +from influxdb_client.domain.template_apply_remotes import TemplateApplyRemotes as TemplateApplyRemotes +from influxdb_client.domain.template_apply_template import TemplateApplyTemplate as TemplateApplyTemplate +from influxdb_client.domain.template_chart import TemplateChart as TemplateChart +from influxdb_client.domain.template_export_by_id import TemplateExportByID as TemplateExportByID +from influxdb_client.domain.template_export_by_id_org_ids import TemplateExportByIDOrgIDs as TemplateExportByIDOrgIDs +from influxdb_client.domain.template_export_by_id_resource_filters import ( + TemplateExportByIDResourceFilters as TemplateExportByIDResourceFilters, +) +from influxdb_client.domain.template_export_by_id_resources import TemplateExportByIDResources as TemplateExportByIDResources +from influxdb_client.domain.template_kind import TemplateKind as TemplateKind +from influxdb_client.domain.template_summary import TemplateSummary as TemplateSummary +from influxdb_client.domain.template_summary_diff import TemplateSummaryDiff as TemplateSummaryDiff +from influxdb_client.domain.template_summary_diff_buckets import TemplateSummaryDiffBuckets as TemplateSummaryDiffBuckets +from influxdb_client.domain.template_summary_diff_buckets_new_old import ( + TemplateSummaryDiffBucketsNewOld as TemplateSummaryDiffBucketsNewOld, +) +from influxdb_client.domain.template_summary_diff_checks import TemplateSummaryDiffChecks as TemplateSummaryDiffChecks +from influxdb_client.domain.template_summary_diff_dashboards import TemplateSummaryDiffDashboards as TemplateSummaryDiffDashboards +from influxdb_client.domain.template_summary_diff_dashboards_new_old import ( + TemplateSummaryDiffDashboardsNewOld as TemplateSummaryDiffDashboardsNewOld, +) +from influxdb_client.domain.template_summary_diff_label_mappings import ( + TemplateSummaryDiffLabelMappings as TemplateSummaryDiffLabelMappings, +) +from influxdb_client.domain.template_summary_diff_labels import TemplateSummaryDiffLabels as TemplateSummaryDiffLabels +from influxdb_client.domain.template_summary_diff_labels_new_old import ( + TemplateSummaryDiffLabelsNewOld as TemplateSummaryDiffLabelsNewOld, +) +from influxdb_client.domain.template_summary_diff_notification_endpoints import ( + TemplateSummaryDiffNotificationEndpoints as TemplateSummaryDiffNotificationEndpoints, +) +from influxdb_client.domain.template_summary_diff_notification_rules import ( + TemplateSummaryDiffNotificationRules as TemplateSummaryDiffNotificationRules, +) +from influxdb_client.domain.template_summary_diff_notification_rules_new_old import ( + TemplateSummaryDiffNotificationRulesNewOld as TemplateSummaryDiffNotificationRulesNewOld, +) +from influxdb_client.domain.template_summary_diff_tasks import TemplateSummaryDiffTasks as TemplateSummaryDiffTasks +from influxdb_client.domain.template_summary_diff_tasks_new_old import ( + TemplateSummaryDiffTasksNewOld as TemplateSummaryDiffTasksNewOld, +) +from influxdb_client.domain.template_summary_diff_telegraf_configs import ( + TemplateSummaryDiffTelegrafConfigs as TemplateSummaryDiffTelegrafConfigs, +) +from influxdb_client.domain.template_summary_diff_variables import TemplateSummaryDiffVariables as TemplateSummaryDiffVariables +from influxdb_client.domain.template_summary_diff_variables_new_old import ( + TemplateSummaryDiffVariablesNewOld as TemplateSummaryDiffVariablesNewOld, +) +from influxdb_client.domain.template_summary_errors import TemplateSummaryErrors as TemplateSummaryErrors +from influxdb_client.domain.template_summary_label import TemplateSummaryLabel as TemplateSummaryLabel +from influxdb_client.domain.template_summary_label_properties import ( + TemplateSummaryLabelProperties as TemplateSummaryLabelProperties, +) +from influxdb_client.domain.template_summary_summary import TemplateSummarySummary as TemplateSummarySummary +from influxdb_client.domain.template_summary_summary_buckets import TemplateSummarySummaryBuckets as TemplateSummarySummaryBuckets +from influxdb_client.domain.template_summary_summary_dashboards import ( + TemplateSummarySummaryDashboards as TemplateSummarySummaryDashboards, +) +from influxdb_client.domain.template_summary_summary_label_mappings import ( + TemplateSummarySummaryLabelMappings as TemplateSummarySummaryLabelMappings, +) +from influxdb_client.domain.template_summary_summary_notification_rules import ( + TemplateSummarySummaryNotificationRules as TemplateSummarySummaryNotificationRules, +) +from influxdb_client.domain.template_summary_summary_status_rules import ( + TemplateSummarySummaryStatusRules as TemplateSummarySummaryStatusRules, +) +from influxdb_client.domain.template_summary_summary_tag_rules import ( + TemplateSummarySummaryTagRules as TemplateSummarySummaryTagRules, +) +from influxdb_client.domain.template_summary_summary_tasks import TemplateSummarySummaryTasks as TemplateSummarySummaryTasks +from influxdb_client.domain.template_summary_summary_variables import ( + TemplateSummarySummaryVariables as TemplateSummarySummaryVariables, +) +from influxdb_client.domain.test_statement import TestStatement as TestStatement +from influxdb_client.domain.threshold import Threshold as Threshold +from influxdb_client.domain.threshold_base import ThresholdBase as ThresholdBase +from influxdb_client.domain.threshold_check import ThresholdCheck as ThresholdCheck +from influxdb_client.domain.unary_expression import UnaryExpression as UnaryExpression +from influxdb_client.domain.unsigned_integer_literal import UnsignedIntegerLiteral as UnsignedIntegerLiteral +from influxdb_client.domain.user import User as User +from influxdb_client.domain.user_response import UserResponse as UserResponse +from influxdb_client.domain.user_response_links import UserResponseLinks as UserResponseLinks +from influxdb_client.domain.users import Users as Users +from influxdb_client.domain.variable import Variable as Variable +from influxdb_client.domain.variable_assignment import VariableAssignment as VariableAssignment +from influxdb_client.domain.variable_links import VariableLinks as VariableLinks +from influxdb_client.domain.variable_properties import VariableProperties as VariableProperties +from influxdb_client.domain.variables import Variables as Variables +from influxdb_client.domain.view import View as View +from influxdb_client.domain.view_links import ViewLinks as ViewLinks +from influxdb_client.domain.view_properties import ViewProperties as ViewProperties +from influxdb_client.domain.views import Views as Views +from influxdb_client.domain.write_precision import WritePrecision as WritePrecision +from influxdb_client.domain.xy_geom import XYGeom as XYGeom +from influxdb_client.domain.xy_view_properties import XYViewProperties as XYViewProperties +from influxdb_client.service.authorizations_service import AuthorizationsService as AuthorizationsService +from influxdb_client.service.backup_service import BackupService as BackupService +from influxdb_client.service.bucket_schemas_service import BucketSchemasService as BucketSchemasService +from influxdb_client.service.buckets_service import BucketsService as BucketsService +from influxdb_client.service.cells_service import CellsService as CellsService +from influxdb_client.service.checks_service import ChecksService as ChecksService +from influxdb_client.service.config_service import ConfigService as ConfigService +from influxdb_client.service.dashboards_service import DashboardsService as DashboardsService +from influxdb_client.service.dbr_ps_service import DBRPsService as DBRPsService +from influxdb_client.service.delete_service import DeleteService as DeleteService +from influxdb_client.service.health_service import HealthService as HealthService +from influxdb_client.service.invokable_scripts_service import InvokableScriptsService as InvokableScriptsService +from influxdb_client.service.labels_service import LabelsService as LabelsService +from influxdb_client.service.legacy_authorizations_service import LegacyAuthorizationsService as LegacyAuthorizationsService +from influxdb_client.service.metrics_service import MetricsService as MetricsService +from influxdb_client.service.notification_endpoints_service import NotificationEndpointsService as NotificationEndpointsService +from influxdb_client.service.notification_rules_service import NotificationRulesService as NotificationRulesService +from influxdb_client.service.organizations_service import OrganizationsService as OrganizationsService +from influxdb_client.service.ping_service import PingService as PingService +from influxdb_client.service.query_service import QueryService as QueryService +from influxdb_client.service.ready_service import ReadyService as ReadyService +from influxdb_client.service.remote_connections_service import RemoteConnectionsService as RemoteConnectionsService +from influxdb_client.service.replications_service import ReplicationsService as ReplicationsService +from influxdb_client.service.resources_service import ResourcesService as ResourcesService +from influxdb_client.service.restore_service import RestoreService as RestoreService +from influxdb_client.service.routes_service import RoutesService as RoutesService +from influxdb_client.service.rules_service import RulesService as RulesService +from influxdb_client.service.scraper_targets_service import ScraperTargetsService as ScraperTargetsService +from influxdb_client.service.secrets_service import SecretsService as SecretsService +from influxdb_client.service.setup_service import SetupService as SetupService +from influxdb_client.service.signin_service import SigninService as SigninService +from influxdb_client.service.signout_service import SignoutService as SignoutService +from influxdb_client.service.sources_service import SourcesService as SourcesService +from influxdb_client.service.tasks_service import TasksService as TasksService +from influxdb_client.service.telegraf_plugins_service import TelegrafPluginsService as TelegrafPluginsService +from influxdb_client.service.telegrafs_service import TelegrafsService as TelegrafsService +from influxdb_client.service.templates_service import TemplatesService as TemplatesService +from influxdb_client.service.users_service import UsersService as UsersService +from influxdb_client.service.variables_service import VariablesService as VariablesService +from influxdb_client.service.views_service import ViewsService as ViewsService +from influxdb_client.service.write_service import WriteService as WriteService +from influxdb_client.version import VERSION as VERSION + +__version__ = VERSION diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/_async/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/_async/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/_async/api_client.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/_async/api_client.pyi new file mode 100644 index 00000000..5c06e8a7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/_async/api_client.pyi @@ -0,0 +1,65 @@ +from _typeshed import Incomplete + +class ApiClientAsync: + PRIMITIVE_TYPES: Incomplete + NATIVE_TYPES_MAPPING: Incomplete + configuration: Incomplete + pool_threads: Incomplete + rest_client: Incomplete + default_headers: Incomplete + cookie: Incomplete + def __init__( + self, + configuration: Incomplete | None = ..., + header_name: Incomplete | None = ..., + header_value: Incomplete | None = ..., + cookie: Incomplete | None = ..., + pool_threads: Incomplete | None = ..., + **kwargs, + ) -> None: ... + async def close(self) -> None: ... + @property + def pool(self): ... + @property + def user_agent(self): ... + @user_agent.setter + def user_agent(self, value) -> None: ... + def set_default_header(self, header_name, header_value) -> None: ... + def sanitize_for_serialization(self, obj): ... + def deserialize(self, response, response_type): ... + def call_api( + self, + resource_path, + method, + path_params: Incomplete | None = ..., + query_params: Incomplete | None = ..., + header_params: Incomplete | None = ..., + body: Incomplete | None = ..., + post_params: Incomplete | None = ..., + files: Incomplete | None = ..., + response_type: Incomplete | None = ..., + auth_settings: Incomplete | None = ..., + async_req: Incomplete | None = ..., + _return_http_data_only: Incomplete | None = ..., + collection_formats: Incomplete | None = ..., + _preload_content: bool = ..., + _request_timeout: Incomplete | None = ..., + urlopen_kw: Incomplete | None = ..., + ): ... + def request( + self, + method, + url, + query_params: Incomplete | None = ..., + headers: Incomplete | None = ..., + post_params: Incomplete | None = ..., + body: Incomplete | None = ..., + _preload_content: bool = ..., + _request_timeout: Incomplete | None = ..., + **urlopen_kw, + ): ... + def parameters_to_tuples(self, params, collection_formats): ... + def prepare_post_parameters(self, post_params: Incomplete | None = ..., files: Incomplete | None = ...): ... + def select_header_accept(self, accepts): ... + def select_header_content_type(self, content_types): ... + def update_params_for_auth(self, headers, querys, auth_settings) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/_async/rest.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/_async/rest.pyi new file mode 100644 index 00000000..912928e1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/_async/rest.pyi @@ -0,0 +1,96 @@ +import io +from _typeshed import Incomplete + +class RESTResponseAsync(io.IOBase): + aiohttp_response: Incomplete + status: Incomplete + reason: Incomplete + data: Incomplete + def __init__(self, resp, data) -> None: ... + def getheaders(self): ... + def getheader(self, name, default: Incomplete | None = ...): ... + +class RESTClientObjectAsync: + proxy: Incomplete + proxy_headers: Incomplete + allow_redirects: Incomplete + max_redirects: Incomplete + pool_manager: Incomplete + def __init__(self, configuration, pools_size: int = ..., maxsize: Incomplete | None = ..., **kwargs) -> None: ... + async def close(self) -> None: ... + async def request( + self, + method, + url, + query_params: Incomplete | None = ..., + headers: Incomplete | None = ..., + body: Incomplete | None = ..., + post_params: Incomplete | None = ..., + _preload_content: bool = ..., + _request_timeout: Incomplete | None = ..., + ): ... + async def GET( + self, + url, + headers: Incomplete | None = ..., + query_params: Incomplete | None = ..., + _preload_content: bool = ..., + _request_timeout: Incomplete | None = ..., + ): ... + async def HEAD( + self, + url, + headers: Incomplete | None = ..., + query_params: Incomplete | None = ..., + _preload_content: bool = ..., + _request_timeout: Incomplete | None = ..., + ): ... + async def OPTIONS( + self, + url, + headers: Incomplete | None = ..., + query_params: Incomplete | None = ..., + post_params: Incomplete | None = ..., + body: Incomplete | None = ..., + _preload_content: bool = ..., + _request_timeout: Incomplete | None = ..., + ): ... + async def DELETE( + self, + url, + headers: Incomplete | None = ..., + query_params: Incomplete | None = ..., + body: Incomplete | None = ..., + _preload_content: bool = ..., + _request_timeout: Incomplete | None = ..., + ): ... + async def POST( + self, + url, + headers: Incomplete | None = ..., + query_params: Incomplete | None = ..., + post_params: Incomplete | None = ..., + body: Incomplete | None = ..., + _preload_content: bool = ..., + _request_timeout: Incomplete | None = ..., + ): ... + async def PUT( + self, + url, + headers: Incomplete | None = ..., + query_params: Incomplete | None = ..., + post_params: Incomplete | None = ..., + body: Incomplete | None = ..., + _preload_content: bool = ..., + _request_timeout: Incomplete | None = ..., + ): ... + async def PATCH( + self, + url, + headers: Incomplete | None = ..., + query_params: Incomplete | None = ..., + post_params: Incomplete | None = ..., + body: Incomplete | None = ..., + _preload_content: bool = ..., + _request_timeout: Incomplete | None = ..., + ): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/_sync/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/_sync/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/_sync/api_client.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/_sync/api_client.pyi new file mode 100644 index 00000000..379ea2e2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/_sync/api_client.pyi @@ -0,0 +1,65 @@ +from _typeshed import Incomplete + +class ApiClient: + PRIMITIVE_TYPES: Incomplete + NATIVE_TYPES_MAPPING: Incomplete + configuration: Incomplete + pool_threads: Incomplete + rest_client: Incomplete + default_headers: Incomplete + cookie: Incomplete + def __init__( + self, + configuration: Incomplete | None = ..., + header_name: Incomplete | None = ..., + header_value: Incomplete | None = ..., + cookie: Incomplete | None = ..., + pool_threads: Incomplete | None = ..., + retries: bool = ..., + ) -> None: ... + def __del__(self) -> None: ... + @property + def pool(self): ... + @property + def user_agent(self): ... + @user_agent.setter + def user_agent(self, value) -> None: ... + def set_default_header(self, header_name, header_value) -> None: ... + def sanitize_for_serialization(self, obj): ... + def deserialize(self, response, response_type): ... + def call_api( + self, + resource_path, + method, + path_params: Incomplete | None = ..., + query_params: Incomplete | None = ..., + header_params: Incomplete | None = ..., + body: Incomplete | None = ..., + post_params: Incomplete | None = ..., + files: Incomplete | None = ..., + response_type: Incomplete | None = ..., + auth_settings: Incomplete | None = ..., + async_req: Incomplete | None = ..., + _return_http_data_only: Incomplete | None = ..., + collection_formats: Incomplete | None = ..., + _preload_content: bool = ..., + _request_timeout: Incomplete | None = ..., + urlopen_kw: Incomplete | None = ..., + ): ... + def request( + self, + method, + url, + query_params: Incomplete | None = ..., + headers: Incomplete | None = ..., + post_params: Incomplete | None = ..., + body: Incomplete | None = ..., + _preload_content: bool = ..., + _request_timeout: Incomplete | None = ..., + **urlopen_kw, + ): ... + def parameters_to_tuples(self, params, collection_formats): ... + def prepare_post_parameters(self, post_params: Incomplete | None = ..., files: Incomplete | None = ...): ... + def select_header_accept(self, accepts): ... + def select_header_content_type(self, content_types): ... + def update_params_for_auth(self, headers, querys, auth_settings) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/_sync/rest.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/_sync/rest.pyi new file mode 100644 index 00000000..f8b84cce --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/_sync/rest.pyi @@ -0,0 +1,103 @@ +import io +from _typeshed import Incomplete + +class RESTResponse(io.IOBase): + urllib3_response: Incomplete + status: Incomplete + reason: Incomplete + data: Incomplete + def __init__(self, resp) -> None: ... + def getheaders(self): ... + def getheader(self, name, default: Incomplete | None = ...): ... + +class RESTClientObject: + configuration: Incomplete + pools_size: Incomplete + maxsize: Incomplete + retries: Incomplete + pool_manager: Incomplete + def __init__(self, configuration, pools_size: int = ..., maxsize: Incomplete | None = ..., retries: bool = ...) -> None: ... + def request( + self, + method, + url, + query_params: Incomplete | None = ..., + headers: Incomplete | None = ..., + body: Incomplete | None = ..., + post_params: Incomplete | None = ..., + _preload_content: bool = ..., + _request_timeout: Incomplete | None = ..., + **urlopen_kw, + ): ... + def GET( + self, + url, + headers: Incomplete | None = ..., + query_params: Incomplete | None = ..., + _preload_content: bool = ..., + _request_timeout: Incomplete | None = ..., + **urlopen_kw, + ): ... + def HEAD( + self, + url, + headers: Incomplete | None = ..., + query_params: Incomplete | None = ..., + _preload_content: bool = ..., + _request_timeout: Incomplete | None = ..., + **urlopen_kw, + ): ... + def OPTIONS( + self, + url, + headers: Incomplete | None = ..., + query_params: Incomplete | None = ..., + post_params: Incomplete | None = ..., + body: Incomplete | None = ..., + _preload_content: bool = ..., + _request_timeout: Incomplete | None = ..., + **urlopen_kw, + ): ... + def DELETE( + self, + url, + headers: Incomplete | None = ..., + query_params: Incomplete | None = ..., + body: Incomplete | None = ..., + _preload_content: bool = ..., + _request_timeout: Incomplete | None = ..., + **urlopen_kw, + ): ... + def POST( + self, + url, + headers: Incomplete | None = ..., + query_params: Incomplete | None = ..., + post_params: Incomplete | None = ..., + body: Incomplete | None = ..., + _preload_content: bool = ..., + _request_timeout: Incomplete | None = ..., + **urlopen_kw, + ): ... + def PUT( + self, + url, + headers: Incomplete | None = ..., + query_params: Incomplete | None = ..., + post_params: Incomplete | None = ..., + body: Incomplete | None = ..., + _preload_content: bool = ..., + _request_timeout: Incomplete | None = ..., + **urlopen_kw, + ): ... + def PATCH( + self, + url, + headers: Incomplete | None = ..., + query_params: Incomplete | None = ..., + post_params: Incomplete | None = ..., + body: Incomplete | None = ..., + _preload_content: bool = ..., + _request_timeout: Incomplete | None = ..., + **urlopen_kw, + ): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/__init__.pyi new file mode 100644 index 00000000..4285a8aa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/__init__.pyi @@ -0,0 +1,41 @@ +from influxdb_client.service.authorizations_service import AuthorizationsService as AuthorizationsService +from influxdb_client.service.backup_service import BackupService as BackupService +from influxdb_client.service.bucket_schemas_service import BucketSchemasService as BucketSchemasService +from influxdb_client.service.buckets_service import BucketsService as BucketsService +from influxdb_client.service.cells_service import CellsService as CellsService +from influxdb_client.service.checks_service import ChecksService as ChecksService +from influxdb_client.service.config_service import ConfigService as ConfigService +from influxdb_client.service.dashboards_service import DashboardsService as DashboardsService +from influxdb_client.service.dbr_ps_service import DBRPsService as DBRPsService +from influxdb_client.service.delete_service import DeleteService as DeleteService +from influxdb_client.service.health_service import HealthService as HealthService +from influxdb_client.service.invokable_scripts_service import InvokableScriptsService as InvokableScriptsService +from influxdb_client.service.labels_service import LabelsService as LabelsService +from influxdb_client.service.legacy_authorizations_service import LegacyAuthorizationsService as LegacyAuthorizationsService +from influxdb_client.service.metrics_service import MetricsService as MetricsService +from influxdb_client.service.notification_endpoints_service import NotificationEndpointsService as NotificationEndpointsService +from influxdb_client.service.notification_rules_service import NotificationRulesService as NotificationRulesService +from influxdb_client.service.organizations_service import OrganizationsService as OrganizationsService +from influxdb_client.service.ping_service import PingService as PingService +from influxdb_client.service.query_service import QueryService as QueryService +from influxdb_client.service.ready_service import ReadyService as ReadyService +from influxdb_client.service.remote_connections_service import RemoteConnectionsService as RemoteConnectionsService +from influxdb_client.service.replications_service import ReplicationsService as ReplicationsService +from influxdb_client.service.resources_service import ResourcesService as ResourcesService +from influxdb_client.service.restore_service import RestoreService as RestoreService +from influxdb_client.service.routes_service import RoutesService as RoutesService +from influxdb_client.service.rules_service import RulesService as RulesService +from influxdb_client.service.scraper_targets_service import ScraperTargetsService as ScraperTargetsService +from influxdb_client.service.secrets_service import SecretsService as SecretsService +from influxdb_client.service.setup_service import SetupService as SetupService +from influxdb_client.service.signin_service import SigninService as SigninService +from influxdb_client.service.signout_service import SignoutService as SignoutService +from influxdb_client.service.sources_service import SourcesService as SourcesService +from influxdb_client.service.tasks_service import TasksService as TasksService +from influxdb_client.service.telegraf_plugins_service import TelegrafPluginsService as TelegrafPluginsService +from influxdb_client.service.telegrafs_service import TelegrafsService as TelegrafsService +from influxdb_client.service.templates_service import TemplatesService as TemplatesService +from influxdb_client.service.users_service import UsersService as UsersService +from influxdb_client.service.variables_service import VariablesService as VariablesService +from influxdb_client.service.views_service import ViewsService as ViewsService +from influxdb_client.service.write_service import WriteService as WriteService diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/_base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/_base.pyi new file mode 100644 index 00000000..20d5f7e4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/_base.pyi @@ -0,0 +1,60 @@ +from _typeshed import Incomplete + +from influxdb_client import Configuration + +LOGGERS_NAMES: Incomplete + +class _BaseClient: + url: str + token: str | None + org: str | None + default_tags: Incomplete | None + conf: _Configuration + auth_header_name: Incomplete | None + auth_header_value: Incomplete | None + retries: bool | Incomplete + profilers: Incomplete | None + def __init__( + self, + url: str, + token: str | None, + debug: bool | None = ..., + timeout: int = ..., + enable_gzip: bool = ..., + org: str | None = ..., + default_tags: dict[Incomplete, Incomplete] | None = ..., + http_client_logger: str | None = ..., + *, + verify_ssl: bool = ..., + ssl_ca_cert: Incomplete | None = ..., + cert_file: Incomplete | None = ..., + cert_key_file: Incomplete | None = ..., + cert_key_password: Incomplete | None = ..., + ssl_context: Incomplete | None = ..., + proxy: Incomplete | None = ..., + proxy_headers: Incomplete | None = ..., + connection_pool_maxsize: int = ..., + username: Incomplete | None = ..., + password: Incomplete | None = ..., + auth_basic: bool = ..., + retries: bool | Incomplete = ..., + profilers: Incomplete | None = ..., + ) -> None: ... + +class _BaseQueryApi: + default_dialect: Incomplete + def __init__(self, influxdb_client, query_options: Incomplete | None = ...) -> None: ... + +class _BaseWriteApi: + def __init__(self, influxdb_client, point_settings: Incomplete | None = ...) -> None: ... + +class _BaseDeleteApi: + def __init__(self, influxdb_client) -> None: ... + +class _Configuration(Configuration): + enable_gzip: bool + username: Incomplete + password: Incomplete + def __init__(self) -> None: ... + def update_request_header_params(self, path: str, params: dict[Incomplete, Incomplete]): ... + def update_request_body(self, path: str, body): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/authorizations_api.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/authorizations_api.pyi new file mode 100644 index 00000000..1e4ef9a5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/authorizations_api.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete + +from influxdb_client import Authorization, Organization, User + +class AuthorizationsApi: + def __init__(self, influxdb_client) -> None: ... + def create_authorization( + self, + org_id: Incomplete | None = ..., + permissions: list[Incomplete] | None = ..., + authorization: Authorization | None = ..., + ) -> Authorization: ... + def find_authorization_by_id(self, auth_id: str) -> Authorization: ... + def find_authorizations(self, **kwargs): ... + def find_authorizations_by_user(self, user: User): ... + def find_authorizations_by_user_id(self, user_id: str): ... + def find_authorizations_by_user_name(self, user_name: str): ... + def find_authorizations_by_org(self, org: Organization): ... + def find_authorizations_by_org_name(self, org_name: str): ... + def find_authorizations_by_org_id(self, org_id: str): ... + def update_authorization(self, auth): ... + def clone_authorization(self, auth) -> Authorization: ... + def delete_authorization(self, auth): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/bucket_api.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/bucket_api.pyi new file mode 100644 index 00000000..1231020d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/bucket_api.pyi @@ -0,0 +1,20 @@ +from _typeshed import Incomplete + +from influxdb_client import Bucket + +class BucketsApi: + def __init__(self, influxdb_client) -> None: ... + def create_bucket( + self, + bucket: Incomplete | None = ..., + bucket_name: Incomplete | None = ..., + org_id: Incomplete | None = ..., + retention_rules: Incomplete | None = ..., + description: Incomplete | None = ..., + org: Incomplete | None = ..., + ) -> Bucket: ... + def update_bucket(self, bucket: Bucket) -> Bucket: ... + def delete_bucket(self, bucket): ... + def find_bucket_by_id(self, id): ... + def find_bucket_by_name(self, bucket_name): ... + def find_buckets(self, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/delete_api.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/delete_api.pyi new file mode 100644 index 00000000..ad1c992f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/delete_api.pyi @@ -0,0 +1,10 @@ +from datetime import datetime + +from influxdb_client import Organization +from influxdb_client.client._base import _BaseDeleteApi + +class DeleteApi(_BaseDeleteApi): + def __init__(self, influxdb_client) -> None: ... + def delete( + self, start: str | datetime, stop: str | datetime, predicate: str, bucket: str, org: str | Organization | None = ... + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/delete_api_async.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/delete_api_async.pyi new file mode 100644 index 00000000..803794e4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/delete_api_async.pyi @@ -0,0 +1,10 @@ +from datetime import datetime + +from influxdb_client import Organization +from influxdb_client.client._base import _BaseDeleteApi + +class DeleteApiAsync(_BaseDeleteApi): + def __init__(self, influxdb_client) -> None: ... + async def delete( + self, start: str | datetime, stop: str | datetime, predicate: str, bucket: str, org: str | Organization | None = ... + ) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/exceptions.pyi new file mode 100644 index 00000000..c3ec2670 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/exceptions.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete + +from urllib3 import HTTPResponse + +logger: Incomplete + +class InfluxDBError(Exception): + response: Incomplete + message: Incomplete + retry_after: Incomplete + def __init__(self, response: HTTPResponse | None = ..., message: str | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/flux_csv_parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/flux_csv_parser.pyi new file mode 100644 index 00000000..0a96f5f6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/flux_csv_parser.pyi @@ -0,0 +1,69 @@ +from _typeshed import Incomplete +from collections.abc import Generator +from enum import Enum +from typing_extensions import Self + +from influxdb_client.client.flux_table import TableList + +ANNOTATION_DEFAULT: str +ANNOTATION_GROUP: str +ANNOTATION_DATATYPE: str +ANNOTATIONS: Incomplete + +class FluxQueryException(Exception): + message: Incomplete + reference: Incomplete + def __init__(self, message, reference) -> None: ... + +class FluxCsvParserException(Exception): ... + +class FluxSerializationMode(Enum): + tables: int + stream: int + dataFrame: int + +class FluxResponseMetadataMode(Enum): + full: int + only_names: int + +class _FluxCsvParserMetadata: + table_index: int + table_id: int + start_new_table: bool + table: Incomplete + groups: Incomplete + parsing_state_error: bool + def __init__(self) -> None: ... + +class FluxCsvParser: + tables: Incomplete + def __init__( + self, + response, + serialization_mode: FluxSerializationMode, + data_frame_index: list[str] | None = ..., + query_options: Incomplete | None = ..., + response_metadata_mode: FluxResponseMetadataMode = ..., + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, exc_type, exc_val, exc_tb) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__(self, exc_type, exc_val, exc_tb) -> None: ... + def generator(self) -> Generator[Incomplete, None, None]: ... + def generator_async(self): ... + def parse_record(self, table_index, table, csv): ... + @staticmethod + def add_data_types(table, data_types) -> None: ... + @staticmethod + def add_groups(table, csv) -> None: ... + @staticmethod + def add_default_empty_values(table, default_values) -> None: ... + @staticmethod + def add_column_names_and_tags(table, csv) -> None: ... + def table_list(self) -> TableList: ... + +class _StreamReaderToWithAsyncRead: + response: Incomplete + decoder: Incomplete + def __init__(self, response) -> None: ... + async def read(self, size: int) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/flux_table.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/flux_table.pyi new file mode 100644 index 00000000..4c2ba139 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/flux_table.pyi @@ -0,0 +1,56 @@ +from _typeshed import Incomplete +from collections.abc import Iterator +from http.client import HTTPResponse +from json import JSONEncoder + +class FluxStructure: ... + +class FluxStructureEncoder(JSONEncoder): + def default(self, obj): ... + +class FluxTable(FluxStructure): + columns: Incomplete + records: Incomplete + def __init__(self) -> None: ... + def get_group_key(self): ... + def __iter__(self): ... + +class FluxColumn(FluxStructure): + default_value: Incomplete + group: Incomplete + data_type: Incomplete + label: Incomplete + index: Incomplete + def __init__( + self, + index: Incomplete | None = ..., + label: Incomplete | None = ..., + data_type: Incomplete | None = ..., + group: Incomplete | None = ..., + default_value: Incomplete | None = ..., + ) -> None: ... + +class FluxRecord(FluxStructure): + table: Incomplete + values: Incomplete + row: Incomplete + def __init__(self, table, values: Incomplete | None = ...) -> None: ... + def get_start(self): ... + def get_stop(self): ... + def get_time(self): ... + def get_value(self): ... + def get_field(self): ... + def get_measurement(self): ... + def __getitem__(self, key): ... + def __setitem__(self, key, value): ... + +class TableList(list[FluxTable]): + def to_values(self, columns: list[str] | None = ...) -> list[list[object]]: ... + def to_json(self, columns: list[str] | None = ..., **kwargs) -> str: ... + +class CSVIterator(Iterator[list[str]]): + delegate: Incomplete + def __init__(self, response: HTTPResponse) -> None: ... + def __iter__(self): ... + def __next__(self): ... + def to_values(self) -> list[list[str]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/influxdb_client.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/influxdb_client.pyi new file mode 100644 index 00000000..6b525498 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/influxdb_client.pyi @@ -0,0 +1,67 @@ +from _typeshed import Incomplete +from typing_extensions import Self + +from influxdb_client import HealthCheck, InvokableScriptsApi, Ready +from influxdb_client.client._base import _BaseClient +from influxdb_client.client.authorizations_api import AuthorizationsApi +from influxdb_client.client.bucket_api import BucketsApi +from influxdb_client.client.delete_api import DeleteApi +from influxdb_client.client.labels_api import LabelsApi +from influxdb_client.client.organizations_api import OrganizationsApi +from influxdb_client.client.query_api import QueryApi, QueryOptions +from influxdb_client.client.tasks_api import TasksApi +from influxdb_client.client.users_api import UsersApi +from influxdb_client.client.write_api import PointSettings, WriteApi, WriteOptions + +logger: Incomplete + +class InfluxDBClient(_BaseClient): + api_client: Incomplete + def __init__( + self, + url: str, + token: str | None = ..., + debug: bool | None = ..., + timeout: int = ..., + enable_gzip: bool = ..., + org: str | None = ..., + default_tags: dict[Incomplete, Incomplete] | None = ..., + *, + verify_ssl: bool = ..., + ssl_ca_cert: Incomplete | None = ..., + cert_file: Incomplete | None = ..., + cert_key_file: Incomplete | None = ..., + cert_key_password: Incomplete | None = ..., + ssl_context: Incomplete | None = ..., + proxy: Incomplete | None = ..., + proxy_headers: Incomplete | None = ..., + connection_pool_maxsize: int = ..., + username: Incomplete | None = ..., + password: Incomplete | None = ..., + auth_basic: bool = ..., + retries: bool | Incomplete = ..., + profilers: Incomplete | None = ..., + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, exc_type: object, exc_value: object, traceback: object) -> None: ... + @classmethod + def from_config_file(cls, config_file: str = ..., debug: Incomplete | None = ..., enable_gzip: bool = ..., **kwargs): ... + @classmethod + def from_env_properties(cls, debug: Incomplete | None = ..., enable_gzip: bool = ..., **kwargs): ... + def write_api(self, write_options: WriteOptions = ..., point_settings: PointSettings = ..., **kwargs) -> WriteApi: ... + def query_api(self, query_options: QueryOptions = ...) -> QueryApi: ... + def invokable_scripts_api(self) -> InvokableScriptsApi: ... + def close(self) -> None: ... + def __del__(self) -> None: ... + def buckets_api(self) -> BucketsApi: ... + def authorizations_api(self) -> AuthorizationsApi: ... + def users_api(self) -> UsersApi: ... + def organizations_api(self) -> OrganizationsApi: ... + def tasks_api(self) -> TasksApi: ... + def labels_api(self) -> LabelsApi: ... + def health(self) -> HealthCheck: ... + def ping(self) -> bool: ... + def version(self) -> str: ... + def build(self) -> str: ... + def ready(self) -> Ready: ... + def delete_api(self) -> DeleteApi: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/influxdb_client_async.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/influxdb_client_async.pyi new file mode 100644 index 00000000..daad06d7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/influxdb_client_async.pyi @@ -0,0 +1,51 @@ +from _typeshed import Incomplete +from typing_extensions import Self + +from influxdb_client.client._base import _BaseClient +from influxdb_client.client.delete_api_async import DeleteApiAsync +from influxdb_client.client.query_api import QueryOptions +from influxdb_client.client.query_api_async import QueryApiAsync +from influxdb_client.client.write_api import PointSettings +from influxdb_client.client.write_api_async import WriteApiAsync + +logger: Incomplete + +class InfluxDBClientAsync(_BaseClient): + api_client: Incomplete + def __init__( + self, + url: str, + token: str | None = ..., + org: str | None = ..., + debug: bool | None = ..., + timeout: int = ..., + enable_gzip: bool = ..., + *, + verify_ssl: bool = ..., + ssl_ca_cert: Incomplete | None = ..., + cert_file: Incomplete | None = ..., + cert_key_file: Incomplete | None = ..., + cert_key_password: Incomplete | None = ..., + ssl_context: Incomplete | None = ..., + proxy: Incomplete | None = ..., + proxy_headers: Incomplete | None = ..., + connection_pool_maxsize: int = ..., + username: Incomplete | None = ..., + password: Incomplete | None = ..., + auth_basic: bool = ..., + retries: bool | Incomplete = ..., + profilers: Incomplete | None = ..., + ) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__(self, exc_type: object, exc: object, tb: object) -> None: ... + async def close(self) -> None: ... + @classmethod + def from_config_file(cls, config_file: str = ..., debug: Incomplete | None = ..., enable_gzip: bool = ..., **kwargs): ... + @classmethod + def from_env_properties(cls, debug: Incomplete | None = ..., enable_gzip: bool = ..., **kwargs): ... + async def ping(self) -> bool: ... + async def version(self) -> str: ... + async def build(self) -> str: ... + def query_api(self, query_options: QueryOptions = ...) -> QueryApiAsync: ... + def write_api(self, point_settings: PointSettings = ...) -> WriteApiAsync: ... + def delete_api(self) -> DeleteApiAsync: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/invokable_scripts_api.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/invokable_scripts_api.pyi new file mode 100644 index 00000000..f4b85afb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/invokable_scripts_api.pyi @@ -0,0 +1,26 @@ +from _typeshed import Incomplete +from collections.abc import Generator, Iterator +from typing import Any + +from influxdb_client import Script, ScriptCreateRequest, ScriptUpdateRequest +from influxdb_client.client._base import _BaseQueryApi +from influxdb_client.client.flux_table import CSVIterator, FluxRecord, TableList + +class InvokableScriptsApi(_BaseQueryApi): + def __init__(self, influxdb_client) -> None: ... + def create_script(self, create_request: ScriptCreateRequest) -> Script: ... + def update_script(self, script_id: str, update_request: ScriptUpdateRequest) -> Script: ... + def delete_script(self, script_id: str) -> None: ... + def find_scripts(self, **kwargs): ... + def invoke_script(self, script_id: str, params: dict[Incomplete, Incomplete] | None = ...) -> TableList: ... + def invoke_script_stream( + self, script_id: str, params: dict[Incomplete, Incomplete] | None = ... + ) -> Generator[FluxRecord, Any, None]: ... + def invoke_script_data_frame( + self, script_id: str, params: dict[Incomplete, Incomplete] | None = ..., data_frame_index: list[str] | None = ... + ): ... + def invoke_script_data_frame_stream( + self, script_id: str, params: dict[Incomplete, Incomplete] | None = ..., data_frame_index: list[str] | None = ... + ): ... + def invoke_script_csv(self, script_id: str, params: dict[Incomplete, Incomplete] | None = ...) -> CSVIterator: ... + def invoke_script_raw(self, script_id: str, params: dict[Incomplete, Incomplete] | None = ...) -> Iterator[list[str]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/labels_api.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/labels_api.pyi new file mode 100644 index 00000000..01d3e06e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/labels_api.pyi @@ -0,0 +1,11 @@ +from influxdb_client import Label + +class LabelsApi: + def __init__(self, influxdb_client) -> None: ... + def create_label(self, name: str, org_id: str, properties: dict[str, str] | None = ...) -> Label: ... + def update_label(self, label: Label): ... + def delete_label(self, label: str | Label): ... + def clone_label(self, cloned_name: str, label: Label) -> Label: ... + def find_labels(self, **kwargs) -> list[Label]: ... + def find_label_by_id(self, label_id: str): ... + def find_label_by_org(self, org_id) -> list[Label]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/logging_handler.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/logging_handler.pyi new file mode 100644 index 00000000..28c8363a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/logging_handler.pyi @@ -0,0 +1,14 @@ +import logging +from _typeshed import Incomplete + +class InfluxLoggingHandler(logging.Handler): + DEFAULT_LOG_RECORD_KEYS: Incomplete + bucket: Incomplete + client: Incomplete + write_api: Incomplete + def __init__( + self, *, url, token, org, bucket, client_args: Incomplete | None = ..., write_api_args: Incomplete | None = ... + ) -> None: ... + def __del__(self) -> None: ... + def close(self) -> None: ... + def emit(self, record: logging.LogRecord) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/organizations_api.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/organizations_api.pyi new file mode 100644 index 00000000..873cca7b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/organizations_api.pyi @@ -0,0 +1,10 @@ +from influxdb_client import Organization + +class OrganizationsApi: + def __init__(self, influxdb_client) -> None: ... + def me(self): ... + def find_organization(self, org_id): ... + def find_organizations(self, **kwargs): ... + def create_organization(self, name: str | None = ..., organization: Organization | None = ...) -> Organization: ... + def update_organization(self, organization: Organization) -> Organization: ... + def delete_organization(self, org_id: str): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/query_api.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/query_api.pyi new file mode 100644 index 00000000..0dc0c97a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/query_api.pyi @@ -0,0 +1,40 @@ +from _typeshed import Incomplete +from collections.abc import Callable, Generator +from typing import Any + +from influxdb_client import Dialect +from influxdb_client.client._base import _BaseQueryApi +from influxdb_client.client.flux_table import CSVIterator, FluxRecord, TableList + +class QueryOptions: + profilers: Incomplete + profiler_callback: Incomplete + def __init__(self, profilers: list[str] | None = ..., profiler_callback: Callable[..., Incomplete] | None = ...) -> None: ... + +class QueryApi(_BaseQueryApi): + def __init__(self, influxdb_client, query_options=...) -> None: ... + def query_csv( + self, query: str, org: Incomplete | None = ..., dialect: Dialect = ..., params: dict[Incomplete, Incomplete] | None = ... + ) -> CSVIterator: ... + def query_raw( + self, query: str, org: Incomplete | None = ..., dialect=..., params: dict[Incomplete, Incomplete] | None = ... + ): ... + def query(self, query: str, org: Incomplete | None = ..., params: dict[Incomplete, Incomplete] | None = ...) -> TableList: ... + def query_stream( + self, query: str, org: Incomplete | None = ..., params: dict[Incomplete, Incomplete] | None = ... + ) -> Generator[FluxRecord, Any, None]: ... + def query_data_frame( + self, + query: str, + org: Incomplete | None = ..., + data_frame_index: list[str] | None = ..., + params: dict[Incomplete, Incomplete] | None = ..., + ): ... + def query_data_frame_stream( + self, + query: str, + org: Incomplete | None = ..., + data_frame_index: list[str] | None = ..., + params: dict[Incomplete, Incomplete] | None = ..., + ): ... + def __del__(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/query_api_async.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/query_api_async.pyi new file mode 100644 index 00000000..465e8165 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/query_api_async.pyi @@ -0,0 +1,31 @@ +from _typeshed import Incomplete +from collections.abc import AsyncGenerator + +from influxdb_client.client._base import _BaseQueryApi +from influxdb_client.client.flux_table import FluxRecord, TableList + +class QueryApiAsync(_BaseQueryApi): + def __init__(self, influxdb_client, query_options=...) -> None: ... + async def query( + self, query: str, org: Incomplete | None = ..., params: dict[Incomplete, Incomplete] | None = ... + ) -> TableList: ... + async def query_stream( + self, query: str, org: Incomplete | None = ..., params: dict[Incomplete, Incomplete] | None = ... + ) -> AsyncGenerator[FluxRecord, None]: ... + async def query_data_frame( + self, + query: str, + org: Incomplete | None = ..., + data_frame_index: list[str] | None = ..., + params: dict[Incomplete, Incomplete] | None = ..., + ): ... + async def query_data_frame_stream( + self, + query: str, + org: Incomplete | None = ..., + data_frame_index: list[str] | None = ..., + params: dict[Incomplete, Incomplete] | None = ..., + ): ... + async def query_raw( + self, query: str, org: Incomplete | None = ..., dialect=..., params: dict[Incomplete, Incomplete] | None = ... + ): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/tasks_api.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/tasks_api.pyi new file mode 100644 index 00000000..97d78972 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/tasks_api.pyi @@ -0,0 +1,32 @@ +from datetime import datetime + +from influxdb_client import LabelResponse, LogEvent, Run, Task, TaskCreateRequest, TaskUpdateRequest + +class TasksApi: + def __init__(self, influxdb_client) -> None: ... + def find_task_by_id(self, task_id) -> Task: ... + def find_tasks(self, **kwargs): ... + def create_task(self, task: Task | None = ..., task_create_request: TaskCreateRequest | None = ...) -> Task: ... + def create_task_every(self, name, flux, every, organization) -> Task: ... + def create_task_cron(self, name: str, flux: str, cron: str, org_id: str) -> Task: ... + def delete_task(self, task_id: str): ... + def update_task(self, task: Task) -> Task: ... + def update_task_request(self, task_id, task_update_request: TaskUpdateRequest) -> Task: ... + def clone_task(self, task: Task) -> Task: ... + def get_labels(self, task_id): ... + def add_label(self, label_id: str, task_id: str) -> LabelResponse: ... + def delete_label(self, label_id: str, task_id: str): ... + def get_members(self, task_id: str): ... + def add_member(self, member_id, task_id): ... + def delete_member(self, member_id, task_id): ... + def get_owners(self, task_id): ... + def add_owner(self, owner_id, task_id): ... + def delete_owner(self, owner_id, task_id): ... + def get_runs(self, task_id, **kwargs) -> list[Run]: ... + def get_run(self, task_id: str, run_id: str) -> Run: ... + def get_run_logs(self, task_id: str, run_id: str) -> list[LogEvent]: ... + def run_manually(self, task_id: str, scheduled_for: datetime | None = ...): ... + def retry_run(self, task_id: str, run_id: str): ... + def cancel_run(self, task_id: str, run_id: str): ... + def get_logs(self, task_id: str) -> list[LogEvent]: ... + def find_tasks_by_user(self, task_user_id): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/users_api.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/users_api.pyi new file mode 100644 index 00000000..8be06535 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/users_api.pyi @@ -0,0 +1,10 @@ +from influxdb_client import User, UserResponse, Users + +class UsersApi: + def __init__(self, influxdb_client) -> None: ... + def me(self) -> User: ... + def create_user(self, name: str) -> User: ... + def update_user(self, user: User) -> UserResponse: ... + def update_password(self, user: str | User | UserResponse, password: str) -> None: ... + def delete_user(self, user: str | User | UserResponse) -> None: ... + def find_users(self, **kwargs) -> Users: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/util/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/util/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/util/date_utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/util/date_utils.pyi new file mode 100644 index 00000000..0f5d8ef0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/util/date_utils.pyi @@ -0,0 +1,16 @@ +from datetime import datetime, timedelta, tzinfo +from threading import Lock + +date_helper: DateHelper | None +lock_: Lock + +class DateHelper: + timezone: tzinfo + def __init__(self, timezone: tzinfo = ...) -> None: ... + # This returns None in the implementation, but a datetime-compatible + # object is monkey-patched in at runtime. + def parse_date(self, date_string: str) -> datetime: ... + def to_nanoseconds(self, delta: timedelta) -> int: ... + def to_utc(self, value: datetime) -> datetime: ... + +def get_date_helper() -> DateHelper: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/util/date_utils_pandas.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/util/date_utils_pandas.pyi new file mode 100644 index 00000000..2cd7570c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/util/date_utils_pandas.pyi @@ -0,0 +1,5 @@ +from influxdb_client.client.util.date_utils import DateHelper + +class PandasDateTimeHelper(DateHelper): + def parse_date(self, date_string: str): ... + def to_nanoseconds(self, delta): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/util/helpers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/util/helpers.pyi new file mode 100644 index 00000000..e5e7609b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/util/helpers.pyi @@ -0,0 +1 @@ +def get_org_query_param(org, client, required_id: bool = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/util/multiprocessing_helper.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/util/multiprocessing_helper.pyi new file mode 100644 index 00000000..7bddf572 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/util/multiprocessing_helper.pyi @@ -0,0 +1,22 @@ +import multiprocessing +from _typeshed import Incomplete + +logger: Incomplete + +class _PoisonPill: ... + +class MultiprocessingWriter(multiprocessing.Process): + __started__: bool + __disposed__: bool + kwargs: Incomplete + client: Incomplete + write_api: Incomplete + queue_: Incomplete + def __init__(self, **kwargs) -> None: ... + def write(self, **kwargs) -> None: ... + def run(self) -> None: ... + def start(self) -> None: ... + def terminate(self) -> None: ... + def __enter__(self): ... + def __exit__(self, exc_type, exc_value, traceback) -> None: ... + def __del__(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/warnings.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/warnings.pyi new file mode 100644 index 00000000..e198f507 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/warnings.pyi @@ -0,0 +1,7 @@ +class MissingPivotFunction(UserWarning): + @staticmethod + def print_warning(query: str): ... + +class CloudOnlyWarning(UserWarning): + @staticmethod + def print_warning(api_name: str, doc_url: str): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/write/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/write/__init__.pyi new file mode 100644 index 00000000..4285a8aa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/write/__init__.pyi @@ -0,0 +1,41 @@ +from influxdb_client.service.authorizations_service import AuthorizationsService as AuthorizationsService +from influxdb_client.service.backup_service import BackupService as BackupService +from influxdb_client.service.bucket_schemas_service import BucketSchemasService as BucketSchemasService +from influxdb_client.service.buckets_service import BucketsService as BucketsService +from influxdb_client.service.cells_service import CellsService as CellsService +from influxdb_client.service.checks_service import ChecksService as ChecksService +from influxdb_client.service.config_service import ConfigService as ConfigService +from influxdb_client.service.dashboards_service import DashboardsService as DashboardsService +from influxdb_client.service.dbr_ps_service import DBRPsService as DBRPsService +from influxdb_client.service.delete_service import DeleteService as DeleteService +from influxdb_client.service.health_service import HealthService as HealthService +from influxdb_client.service.invokable_scripts_service import InvokableScriptsService as InvokableScriptsService +from influxdb_client.service.labels_service import LabelsService as LabelsService +from influxdb_client.service.legacy_authorizations_service import LegacyAuthorizationsService as LegacyAuthorizationsService +from influxdb_client.service.metrics_service import MetricsService as MetricsService +from influxdb_client.service.notification_endpoints_service import NotificationEndpointsService as NotificationEndpointsService +from influxdb_client.service.notification_rules_service import NotificationRulesService as NotificationRulesService +from influxdb_client.service.organizations_service import OrganizationsService as OrganizationsService +from influxdb_client.service.ping_service import PingService as PingService +from influxdb_client.service.query_service import QueryService as QueryService +from influxdb_client.service.ready_service import ReadyService as ReadyService +from influxdb_client.service.remote_connections_service import RemoteConnectionsService as RemoteConnectionsService +from influxdb_client.service.replications_service import ReplicationsService as ReplicationsService +from influxdb_client.service.resources_service import ResourcesService as ResourcesService +from influxdb_client.service.restore_service import RestoreService as RestoreService +from influxdb_client.service.routes_service import RoutesService as RoutesService +from influxdb_client.service.rules_service import RulesService as RulesService +from influxdb_client.service.scraper_targets_service import ScraperTargetsService as ScraperTargetsService +from influxdb_client.service.secrets_service import SecretsService as SecretsService +from influxdb_client.service.setup_service import SetupService as SetupService +from influxdb_client.service.signin_service import SigninService as SigninService +from influxdb_client.service.signout_service import SignoutService as SignoutService +from influxdb_client.service.sources_service import SourcesService as SourcesService +from influxdb_client.service.tasks_service import TasksService as TasksService +from influxdb_client.service.telegraf_plugins_service import TelegrafPluginsService as TelegrafPluginsService +from influxdb_client.service.telegrafs_service import TelegrafsService as TelegrafsService +from influxdb_client.service.templates_service import TemplatesService as TemplatesService +from influxdb_client.service.users_service import UsersService as UsersService +from influxdb_client.service.variables_service import VariablesService as VariablesService +from influxdb_client.service.views_service import ViewsService as ViewsService +from influxdb_client.service.write_service import WriteService as WriteService diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/write/dataframe_serializer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/write/dataframe_serializer.pyi new file mode 100644 index 00000000..b5b4b7ab --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/write/dataframe_serializer.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +logger: Incomplete + +class DataframeSerializer: + data_frame: Incomplete + f: Incomplete + field_indexes: Incomplete + first_field_maybe_null: Incomplete + chunk_size: Incomplete + def __init__(self, data_frame, point_settings, precision=..., chunk_size: int | None = ..., **kwargs) -> None: ... + def serialize(self, chunk_idx: int | None = ...): ... + def number_of_chunks(self): ... + +def data_frame_to_list_of_points(data_frame, point_settings, precision=..., **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/write/point.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/write/point.pyi new file mode 100644 index 00000000..d18ecc28 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/write/point.pyi @@ -0,0 +1,39 @@ +from _typeshed import Incomplete, SupportsGetItem, SupportsItems +from collections.abc import Iterable +from datetime import datetime, timedelta +from numbers import Integral +from typing import Any +from typing_extensions import Literal, Self, TypeAlias + +from influxdb_client.domain.write_precision import _WritePrecision + +_Value: TypeAlias = Incomplete +_Time: TypeAlias = Integral | str | datetime | timedelta + +EPOCH: datetime +DEFAULT_WRITE_PRECISION: _WritePrecision + +class Point: + @staticmethod + def measurement(measurement: str) -> Point: ... + @staticmethod + def from_dict( + dictionary: SupportsGetItem[str, Any], + write_precision: _WritePrecision = ..., + *, + record_measurement_name: str | None = ..., + record_measurement_key: str = ..., + record_tag_keys: Iterable[str] | None = ..., + record_field_keys: Iterable[str] | None = ..., + record_time_key: str = ..., + fields: SupportsItems[str, Literal["int", "uint", "float"]] = ..., + ) -> Point: ... + def __init__(self, measurement_name: str) -> None: ... + def time(self, time: _Time, write_precision: _WritePrecision = ...) -> Self: ... + def tag(self, key: str, value: _Value) -> Self: ... + def field(self, field: str, value: _Value) -> Self: ... + def to_line_protocol(self, precision: _WritePrecision | None = ...) -> str: ... + @property + def write_precision(self) -> _WritePrecision: ... + @classmethod + def set_str_rep(cls, rep_function: Any) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/write/retry.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/write/retry.pyi new file mode 100644 index 00000000..7a814a49 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/write/retry.pyi @@ -0,0 +1,40 @@ +from _typeshed import Incomplete +from collections.abc import Callable + +from urllib3 import Retry + +logger: Incomplete + +class WritesRetry(Retry): + jitter_interval: Incomplete + total: Incomplete + retry_interval: Incomplete + max_retry_delay: Incomplete + max_retry_time: Incomplete + exponential_base: Incomplete + retry_timeout: Incomplete + retry_callback: Incomplete + def __init__( + self, + jitter_interval: int = ..., + max_retry_delay: int = ..., + exponential_base: int = ..., + max_retry_time: int = ..., + total: int = ..., + retry_interval: int = ..., + retry_callback: Callable[[Exception], int] | None = ..., + **kw, + ) -> None: ... + def new(self, **kw): ... + def is_retry(self, method, status_code, has_retry_after: bool = ...): ... + def get_backoff_time(self): ... + def get_retry_after(self, response): ... + def increment( + self, + method: Incomplete | None = ..., + url: Incomplete | None = ..., + response: Incomplete | None = ..., + error: Incomplete | None = ..., + _pool: Incomplete | None = ..., + _stacktrace: Incomplete | None = ..., + ): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/write_api.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/write_api.pyi new file mode 100644 index 00000000..a87fde00 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/write_api.pyi @@ -0,0 +1,104 @@ +from _typeshed import Incomplete +from collections.abc import Iterable +from enum import Enum +from typing import Any +from typing_extensions import TypeAlias + +from influxdb_client.client._base import _BaseWriteApi +from influxdb_client.client.write.point import Point +from influxdb_client.domain.write_precision import _WritePrecision + +_DataClass: TypeAlias = Any # any dataclass +_NamedTuple: TypeAlias = tuple[Any, ...] # any NamedTuple +_Observable: TypeAlias = Any # reactivex.Observable + +logger: Incomplete + +class WriteType(Enum): + batching: int + asynchronous: int + synchronous: int + +class WriteOptions: + write_type: Incomplete + batch_size: Incomplete + flush_interval: Incomplete + jitter_interval: Incomplete + retry_interval: Incomplete + max_retries: Incomplete + max_retry_delay: Incomplete + max_retry_time: Incomplete + exponential_base: Incomplete + write_scheduler: Incomplete + def __init__( + self, + write_type: WriteType = ..., + batch_size: int = ..., + flush_interval: int = ..., + jitter_interval: int = ..., + retry_interval: int = ..., + max_retries: int = ..., + max_retry_delay: int = ..., + max_retry_time: int = ..., + exponential_base: int = ..., + write_scheduler=..., + ) -> None: ... + def to_retry_strategy(self, **kwargs): ... + +SYNCHRONOUS: Incomplete +ASYNCHRONOUS: Incomplete + +class PointSettings: + defaultTags: Incomplete + def __init__(self, **default_tags) -> None: ... + def add_default_tag(self, key, value) -> None: ... + +class _BatchItemKey: + bucket: Incomplete + org: Incomplete + precision: Incomplete + def __init__(self, bucket, org, precision=...) -> None: ... + def __hash__(self) -> int: ... + def __eq__(self, o: object) -> bool: ... + +class _BatchItem: + key: Incomplete + data: Incomplete + size: Incomplete + def __init__(self, key: _BatchItemKey, data, size: int = ...) -> None: ... + def to_key_tuple(self) -> tuple[str, str, str]: ... + +class _BatchResponse: + data: Incomplete + exception: Incomplete + def __init__(self, data: _BatchItem, exception: Exception | None = ...) -> None: ... + +class WriteApi(_BaseWriteApi): + def __init__( + self, influxdb_client, write_options: WriteOptions = ..., point_settings: PointSettings = ..., **kwargs + ) -> None: ... + def write( + self, + bucket: str, + org: str | None = ..., + record: str + | Iterable[str] + | Point + | Iterable[Point] + | dict[Incomplete, Incomplete] + | Iterable[dict[Incomplete, Incomplete]] + | bytes + | Iterable[bytes] + | _Observable + | _NamedTuple + | Iterable[_NamedTuple] + | _DataClass + | Iterable[_DataClass] = ..., + write_precision: _WritePrecision = ..., + **kwargs, + ) -> Any: ... + def flush(self) -> None: ... + def close(self) -> None: ... + def __enter__(self): ... + def __exit__(self, exc_type, exc_val, exc_tb) -> None: ... + def __del__(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/write_api_async.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/write_api_async.pyi new file mode 100644 index 00000000..f33896d5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/client/write_api_async.pyi @@ -0,0 +1,36 @@ +from _typeshed import Incomplete +from collections.abc import Iterable +from typing import Any +from typing_extensions import TypeAlias + +from influxdb_client.client._base import _BaseWriteApi +from influxdb_client.client.write.point import Point +from influxdb_client.client.write_api import PointSettings +from influxdb_client.domain.write_precision import _WritePrecision + +_DataClass: TypeAlias = Any # any dataclass +_NamedTuple: TypeAlias = tuple[Any, ...] # any NamedTuple + +logger: Incomplete + +class WriteApiAsync(_BaseWriteApi): + def __init__(self, influxdb_client, point_settings: PointSettings = ...) -> None: ... + async def write( + self, + bucket: str, + org: str | None = ..., + record: str + | Iterable[str] + | Point + | Iterable[Point] + | dict[Incomplete, Incomplete] + | Iterable[dict[Incomplete, Incomplete]] + | bytes + | Iterable[bytes] + | _NamedTuple + | Iterable[_NamedTuple] + | _DataClass + | Iterable[_DataClass] = ..., + write_precision: _WritePrecision = ..., + **kwargs, + ) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/configuration.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/configuration.pyi new file mode 100644 index 00000000..eb52aa69 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/configuration.pyi @@ -0,0 +1,50 @@ +from _typeshed import Incomplete + +class TypeWithDefault(type): + def __init__(cls, name, bases, dct) -> None: ... + def __call__(cls): ... + def set_default(cls, default) -> None: ... + +class Configuration(metaclass=TypeWithDefault): + host: str + temp_folder_path: Incomplete + api_key: Incomplete + api_key_prefix: Incomplete + username: str + password: str + loggers: Incomplete + logger_stream_handler: Incomplete + logger_file_handler: Incomplete + verify_ssl: bool + ssl_ca_cert: Incomplete + cert_file: Incomplete + cert_key_file: Incomplete + cert_key_password: Incomplete + assert_hostname: Incomplete + ssl_context: Incomplete + connection_pool_maxsize: Incomplete + timeout: Incomplete + auth_basic: bool + proxy: Incomplete + proxy_headers: Incomplete + safe_chars_for_path_param: str + logger_formatter: Incomplete + def __init__(self) -> None: ... + @property + def logger_file(self): ... + @logger_file.setter + def logger_file(self, value) -> None: ... + @property + def debug(self): ... + @debug.setter + def debug(self, value): ... + @property + def logger_format(self): ... + @logger_format.setter + def logger_format(self, value) -> None: ... + def get_api_key_with_prefix(self, identifier): ... + def get_basic_auth_token(self): ... + def auth_settings(self): ... + def to_debug_report(self): ... + def update_request_header_params(self, path: str, params: dict[Incomplete, Incomplete]): ... + def update_request_body(self, path: str, body): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/__init__.pyi new file mode 100644 index 00000000..def5bd65 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/__init__.pyi @@ -0,0 +1,373 @@ +from influxdb_client.domain.add_resource_member_request_body import AddResourceMemberRequestBody as AddResourceMemberRequestBody +from influxdb_client.domain.analyze_query_response import AnalyzeQueryResponse as AnalyzeQueryResponse +from influxdb_client.domain.analyze_query_response_errors import AnalyzeQueryResponseErrors as AnalyzeQueryResponseErrors +from influxdb_client.domain.array_expression import ArrayExpression as ArrayExpression +from influxdb_client.domain.ast_response import ASTResponse as ASTResponse +from influxdb_client.domain.authorization import Authorization as Authorization +from influxdb_client.domain.authorization_post_request import AuthorizationPostRequest as AuthorizationPostRequest +from influxdb_client.domain.authorization_update_request import AuthorizationUpdateRequest as AuthorizationUpdateRequest +from influxdb_client.domain.authorizations import Authorizations as Authorizations +from influxdb_client.domain.axes import Axes as Axes +from influxdb_client.domain.axis import Axis as Axis +from influxdb_client.domain.axis_scale import AxisScale as AxisScale +from influxdb_client.domain.bad_statement import BadStatement as BadStatement +from influxdb_client.domain.band_view_properties import BandViewProperties as BandViewProperties +from influxdb_client.domain.binary_expression import BinaryExpression as BinaryExpression +from influxdb_client.domain.block import Block as Block +from influxdb_client.domain.boolean_literal import BooleanLiteral as BooleanLiteral +from influxdb_client.domain.bucket import Bucket as Bucket +from influxdb_client.domain.bucket_links import BucketLinks as BucketLinks +from influxdb_client.domain.bucket_metadata_manifest import BucketMetadataManifest as BucketMetadataManifest +from influxdb_client.domain.bucket_retention_rules import BucketRetentionRules as BucketRetentionRules +from influxdb_client.domain.bucket_shard_mapping import BucketShardMapping as BucketShardMapping +from influxdb_client.domain.buckets import Buckets as Buckets +from influxdb_client.domain.builder_aggregate_function_type import BuilderAggregateFunctionType as BuilderAggregateFunctionType +from influxdb_client.domain.builder_config import BuilderConfig as BuilderConfig +from influxdb_client.domain.builder_config_aggregate_window import BuilderConfigAggregateWindow as BuilderConfigAggregateWindow +from influxdb_client.domain.builder_functions_type import BuilderFunctionsType as BuilderFunctionsType +from influxdb_client.domain.builder_tags_type import BuilderTagsType as BuilderTagsType +from influxdb_client.domain.builtin_statement import BuiltinStatement as BuiltinStatement +from influxdb_client.domain.call_expression import CallExpression as CallExpression +from influxdb_client.domain.cell import Cell as Cell +from influxdb_client.domain.cell_links import CellLinks as CellLinks +from influxdb_client.domain.cell_update import CellUpdate as CellUpdate +from influxdb_client.domain.cell_with_view_properties import CellWithViewProperties as CellWithViewProperties +from influxdb_client.domain.check import Check as Check +from influxdb_client.domain.check_base import CheckBase as CheckBase +from influxdb_client.domain.check_base_links import CheckBaseLinks as CheckBaseLinks +from influxdb_client.domain.check_discriminator import CheckDiscriminator as CheckDiscriminator +from influxdb_client.domain.check_patch import CheckPatch as CheckPatch +from influxdb_client.domain.check_status_level import CheckStatusLevel as CheckStatusLevel +from influxdb_client.domain.check_view_properties import CheckViewProperties as CheckViewProperties +from influxdb_client.domain.checks import Checks as Checks +from influxdb_client.domain.column_data_type import ColumnDataType as ColumnDataType +from influxdb_client.domain.column_semantic_type import ColumnSemanticType as ColumnSemanticType +from influxdb_client.domain.conditional_expression import ConditionalExpression as ConditionalExpression +from influxdb_client.domain.config import Config as Config +from influxdb_client.domain.constant_variable_properties import ConstantVariableProperties as ConstantVariableProperties +from influxdb_client.domain.create_cell import CreateCell as CreateCell +from influxdb_client.domain.create_dashboard_request import CreateDashboardRequest as CreateDashboardRequest +from influxdb_client.domain.custom_check import CustomCheck as CustomCheck +from influxdb_client.domain.dashboard import Dashboard as Dashboard +from influxdb_client.domain.dashboard_color import DashboardColor as DashboardColor +from influxdb_client.domain.dashboard_query import DashboardQuery as DashboardQuery +from influxdb_client.domain.dashboard_with_view_properties import DashboardWithViewProperties as DashboardWithViewProperties +from influxdb_client.domain.dashboards import Dashboards as Dashboards +from influxdb_client.domain.date_time_literal import DateTimeLiteral as DateTimeLiteral +from influxdb_client.domain.dbr_ps import DBRPs as DBRPs +from influxdb_client.domain.dbrp import DBRP as DBRP +from influxdb_client.domain.dbrp_create import DBRPCreate as DBRPCreate +from influxdb_client.domain.dbrp_get import DBRPGet as DBRPGet +from influxdb_client.domain.dbrp_update import DBRPUpdate as DBRPUpdate +from influxdb_client.domain.deadman_check import DeadmanCheck as DeadmanCheck +from influxdb_client.domain.decimal_places import DecimalPlaces as DecimalPlaces +from influxdb_client.domain.delete_predicate_request import DeletePredicateRequest as DeletePredicateRequest +from influxdb_client.domain.dialect import Dialect as Dialect +from influxdb_client.domain.dict_expression import DictExpression as DictExpression +from influxdb_client.domain.dict_item import DictItem as DictItem +from influxdb_client.domain.duration import Duration as Duration +from influxdb_client.domain.duration_literal import DurationLiteral as DurationLiteral +from influxdb_client.domain.error import Error as Error +from influxdb_client.domain.expression import Expression as Expression +from influxdb_client.domain.expression_statement import ExpressionStatement as ExpressionStatement +from influxdb_client.domain.field import Field as Field +from influxdb_client.domain.file import File as File +from influxdb_client.domain.float_literal import FloatLiteral as FloatLiteral +from influxdb_client.domain.flux_response import FluxResponse as FluxResponse +from influxdb_client.domain.flux_suggestion import FluxSuggestion as FluxSuggestion +from influxdb_client.domain.flux_suggestions import FluxSuggestions as FluxSuggestions +from influxdb_client.domain.function_expression import FunctionExpression as FunctionExpression +from influxdb_client.domain.gauge_view_properties import GaugeViewProperties as GaugeViewProperties +from influxdb_client.domain.greater_threshold import GreaterThreshold as GreaterThreshold +from influxdb_client.domain.health_check import HealthCheck as HealthCheck +from influxdb_client.domain.heatmap_view_properties import HeatmapViewProperties as HeatmapViewProperties +from influxdb_client.domain.histogram_view_properties import HistogramViewProperties as HistogramViewProperties +from influxdb_client.domain.http_notification_endpoint import HTTPNotificationEndpoint as HTTPNotificationEndpoint +from influxdb_client.domain.http_notification_rule import HTTPNotificationRule as HTTPNotificationRule +from influxdb_client.domain.http_notification_rule_base import HTTPNotificationRuleBase as HTTPNotificationRuleBase +from influxdb_client.domain.identifier import Identifier as Identifier +from influxdb_client.domain.import_declaration import ImportDeclaration as ImportDeclaration +from influxdb_client.domain.index_expression import IndexExpression as IndexExpression +from influxdb_client.domain.integer_literal import IntegerLiteral as IntegerLiteral +from influxdb_client.domain.is_onboarding import IsOnboarding as IsOnboarding +from influxdb_client.domain.label import Label as Label +from influxdb_client.domain.label_create_request import LabelCreateRequest as LabelCreateRequest +from influxdb_client.domain.label_mapping import LabelMapping as LabelMapping +from influxdb_client.domain.label_response import LabelResponse as LabelResponse +from influxdb_client.domain.label_update import LabelUpdate as LabelUpdate +from influxdb_client.domain.labels_response import LabelsResponse as LabelsResponse +from influxdb_client.domain.language_request import LanguageRequest as LanguageRequest +from influxdb_client.domain.legacy_authorization_post_request import ( + LegacyAuthorizationPostRequest as LegacyAuthorizationPostRequest, +) +from influxdb_client.domain.lesser_threshold import LesserThreshold as LesserThreshold +from influxdb_client.domain.line_plus_single_stat_properties import LinePlusSingleStatProperties as LinePlusSingleStatProperties +from influxdb_client.domain.line_protocol_error import LineProtocolError as LineProtocolError +from influxdb_client.domain.line_protocol_length_error import LineProtocolLengthError as LineProtocolLengthError +from influxdb_client.domain.links import Links as Links +from influxdb_client.domain.list_stacks_response import ListStacksResponse as ListStacksResponse +from influxdb_client.domain.log_event import LogEvent as LogEvent +from influxdb_client.domain.logical_expression import LogicalExpression as LogicalExpression +from influxdb_client.domain.logs import Logs as Logs +from influxdb_client.domain.map_variable_properties import MapVariableProperties as MapVariableProperties +from influxdb_client.domain.markdown_view_properties import MarkdownViewProperties as MarkdownViewProperties +from influxdb_client.domain.measurement_schema import MeasurementSchema as MeasurementSchema +from influxdb_client.domain.measurement_schema_column import MeasurementSchemaColumn as MeasurementSchemaColumn +from influxdb_client.domain.measurement_schema_create_request import ( + MeasurementSchemaCreateRequest as MeasurementSchemaCreateRequest, +) +from influxdb_client.domain.measurement_schema_list import MeasurementSchemaList as MeasurementSchemaList +from influxdb_client.domain.measurement_schema_update_request import ( + MeasurementSchemaUpdateRequest as MeasurementSchemaUpdateRequest, +) +from influxdb_client.domain.member_assignment import MemberAssignment as MemberAssignment +from influxdb_client.domain.member_expression import MemberExpression as MemberExpression +from influxdb_client.domain.metadata_backup import MetadataBackup as MetadataBackup +from influxdb_client.domain.model_property import ModelProperty as ModelProperty +from influxdb_client.domain.mosaic_view_properties import MosaicViewProperties as MosaicViewProperties +from influxdb_client.domain.node import Node as Node +from influxdb_client.domain.notification_endpoint import NotificationEndpoint as NotificationEndpoint +from influxdb_client.domain.notification_endpoint_base import NotificationEndpointBase as NotificationEndpointBase +from influxdb_client.domain.notification_endpoint_base_links import NotificationEndpointBaseLinks as NotificationEndpointBaseLinks +from influxdb_client.domain.notification_endpoint_discriminator import ( + NotificationEndpointDiscriminator as NotificationEndpointDiscriminator, +) +from influxdb_client.domain.notification_endpoint_type import NotificationEndpointType as NotificationEndpointType +from influxdb_client.domain.notification_endpoint_update import NotificationEndpointUpdate as NotificationEndpointUpdate +from influxdb_client.domain.notification_endpoints import NotificationEndpoints as NotificationEndpoints +from influxdb_client.domain.notification_rule import NotificationRule as NotificationRule +from influxdb_client.domain.notification_rule_base import NotificationRuleBase as NotificationRuleBase +from influxdb_client.domain.notification_rule_base_links import NotificationRuleBaseLinks as NotificationRuleBaseLinks +from influxdb_client.domain.notification_rule_discriminator import NotificationRuleDiscriminator as NotificationRuleDiscriminator +from influxdb_client.domain.notification_rule_update import NotificationRuleUpdate as NotificationRuleUpdate +from influxdb_client.domain.notification_rules import NotificationRules as NotificationRules +from influxdb_client.domain.object_expression import ObjectExpression as ObjectExpression +from influxdb_client.domain.onboarding_request import OnboardingRequest as OnboardingRequest +from influxdb_client.domain.onboarding_response import OnboardingResponse as OnboardingResponse +from influxdb_client.domain.option_statement import OptionStatement as OptionStatement +from influxdb_client.domain.organization import Organization as Organization +from influxdb_client.domain.organization_links import OrganizationLinks as OrganizationLinks +from influxdb_client.domain.organizations import Organizations as Organizations +from influxdb_client.domain.package import Package as Package +from influxdb_client.domain.package_clause import PackageClause as PackageClause +from influxdb_client.domain.pager_duty_notification_endpoint import PagerDutyNotificationEndpoint as PagerDutyNotificationEndpoint +from influxdb_client.domain.pager_duty_notification_rule import PagerDutyNotificationRule as PagerDutyNotificationRule +from influxdb_client.domain.pager_duty_notification_rule_base import ( + PagerDutyNotificationRuleBase as PagerDutyNotificationRuleBase, +) +from influxdb_client.domain.paren_expression import ParenExpression as ParenExpression +from influxdb_client.domain.password_reset_body import PasswordResetBody as PasswordResetBody +from influxdb_client.domain.patch_bucket_request import PatchBucketRequest as PatchBucketRequest +from influxdb_client.domain.patch_dashboard_request import PatchDashboardRequest as PatchDashboardRequest +from influxdb_client.domain.patch_organization_request import PatchOrganizationRequest as PatchOrganizationRequest +from influxdb_client.domain.patch_retention_rule import PatchRetentionRule as PatchRetentionRule +from influxdb_client.domain.patch_stack_request import PatchStackRequest as PatchStackRequest +from influxdb_client.domain.patch_stack_request_additional_resources import ( + PatchStackRequestAdditionalResources as PatchStackRequestAdditionalResources, +) +from influxdb_client.domain.permission import Permission as Permission +from influxdb_client.domain.permission_resource import PermissionResource as PermissionResource +from influxdb_client.domain.pipe_expression import PipeExpression as PipeExpression +from influxdb_client.domain.pipe_literal import PipeLiteral as PipeLiteral +from influxdb_client.domain.post_bucket_request import PostBucketRequest as PostBucketRequest +from influxdb_client.domain.post_check import PostCheck as PostCheck +from influxdb_client.domain.post_notification_endpoint import PostNotificationEndpoint as PostNotificationEndpoint +from influxdb_client.domain.post_notification_rule import PostNotificationRule as PostNotificationRule +from influxdb_client.domain.post_organization_request import PostOrganizationRequest as PostOrganizationRequest +from influxdb_client.domain.post_restore_kv_response import PostRestoreKVResponse as PostRestoreKVResponse +from influxdb_client.domain.post_stack_request import PostStackRequest as PostStackRequest +from influxdb_client.domain.property_key import PropertyKey as PropertyKey +from influxdb_client.domain.query import Query as Query +from influxdb_client.domain.query_edit_mode import QueryEditMode as QueryEditMode +from influxdb_client.domain.query_variable_properties import QueryVariableProperties as QueryVariableProperties +from influxdb_client.domain.query_variable_properties_values import QueryVariablePropertiesValues as QueryVariablePropertiesValues +from influxdb_client.domain.range_threshold import RangeThreshold as RangeThreshold +from influxdb_client.domain.ready import Ready as Ready +from influxdb_client.domain.regexp_literal import RegexpLiteral as RegexpLiteral +from influxdb_client.domain.remote_connection import RemoteConnection as RemoteConnection +from influxdb_client.domain.remote_connection_creation_request import ( + RemoteConnectionCreationRequest as RemoteConnectionCreationRequest, +) +from influxdb_client.domain.remote_connection_update_request import RemoteConnectionUpdateRequest as RemoteConnectionUpdateRequest +from influxdb_client.domain.remote_connections import RemoteConnections as RemoteConnections +from influxdb_client.domain.renamable_field import RenamableField as RenamableField +from influxdb_client.domain.replication import Replication as Replication +from influxdb_client.domain.replication_creation_request import ReplicationCreationRequest as ReplicationCreationRequest +from influxdb_client.domain.replication_update_request import ReplicationUpdateRequest as ReplicationUpdateRequest +from influxdb_client.domain.replications import Replications as Replications +from influxdb_client.domain.resource_member import ResourceMember as ResourceMember +from influxdb_client.domain.resource_members import ResourceMembers as ResourceMembers +from influxdb_client.domain.resource_members_links import ResourceMembersLinks as ResourceMembersLinks +from influxdb_client.domain.resource_owner import ResourceOwner as ResourceOwner +from influxdb_client.domain.resource_owners import ResourceOwners as ResourceOwners +from influxdb_client.domain.restored_bucket_mappings import RestoredBucketMappings as RestoredBucketMappings +from influxdb_client.domain.retention_policy_manifest import RetentionPolicyManifest as RetentionPolicyManifest +from influxdb_client.domain.return_statement import ReturnStatement as ReturnStatement +from influxdb_client.domain.routes import Routes as Routes +from influxdb_client.domain.routes_external import RoutesExternal as RoutesExternal +from influxdb_client.domain.routes_query import RoutesQuery as RoutesQuery +from influxdb_client.domain.routes_system import RoutesSystem as RoutesSystem +from influxdb_client.domain.rule_status_level import RuleStatusLevel as RuleStatusLevel +from influxdb_client.domain.run import Run as Run +from influxdb_client.domain.run_links import RunLinks as RunLinks +from influxdb_client.domain.run_manually import RunManually as RunManually +from influxdb_client.domain.runs import Runs as Runs +from influxdb_client.domain.scatter_view_properties import ScatterViewProperties as ScatterViewProperties +from influxdb_client.domain.schema_type import SchemaType as SchemaType +from influxdb_client.domain.scraper_target_request import ScraperTargetRequest as ScraperTargetRequest +from influxdb_client.domain.scraper_target_response import ScraperTargetResponse as ScraperTargetResponse +from influxdb_client.domain.scraper_target_responses import ScraperTargetResponses as ScraperTargetResponses +from influxdb_client.domain.script import Script as Script +from influxdb_client.domain.script_create_request import ScriptCreateRequest as ScriptCreateRequest +from influxdb_client.domain.script_invocation_params import ScriptInvocationParams as ScriptInvocationParams +from influxdb_client.domain.script_language import ScriptLanguage as ScriptLanguage +from influxdb_client.domain.script_update_request import ScriptUpdateRequest as ScriptUpdateRequest +from influxdb_client.domain.scripts import Scripts as Scripts +from influxdb_client.domain.secret_keys import SecretKeys as SecretKeys +from influxdb_client.domain.secret_keys_response import SecretKeysResponse as SecretKeysResponse +from influxdb_client.domain.shard_group_manifest import ShardGroupManifest as ShardGroupManifest +from influxdb_client.domain.shard_manifest import ShardManifest as ShardManifest +from influxdb_client.domain.shard_owner import ShardOwner as ShardOwner +from influxdb_client.domain.simple_table_view_properties import SimpleTableViewProperties as SimpleTableViewProperties +from influxdb_client.domain.single_stat_view_properties import SingleStatViewProperties as SingleStatViewProperties +from influxdb_client.domain.slack_notification_endpoint import SlackNotificationEndpoint as SlackNotificationEndpoint +from influxdb_client.domain.slack_notification_rule import SlackNotificationRule as SlackNotificationRule +from influxdb_client.domain.slack_notification_rule_base import SlackNotificationRuleBase as SlackNotificationRuleBase +from influxdb_client.domain.smtp_notification_rule import SMTPNotificationRule as SMTPNotificationRule +from influxdb_client.domain.smtp_notification_rule_base import SMTPNotificationRuleBase as SMTPNotificationRuleBase +from influxdb_client.domain.source import Source as Source +from influxdb_client.domain.source_links import SourceLinks as SourceLinks +from influxdb_client.domain.sources import Sources as Sources +from influxdb_client.domain.stack import Stack as Stack +from influxdb_client.domain.stack_associations import StackAssociations as StackAssociations +from influxdb_client.domain.stack_events import StackEvents as StackEvents +from influxdb_client.domain.stack_links import StackLinks as StackLinks +from influxdb_client.domain.stack_resources import StackResources as StackResources +from influxdb_client.domain.statement import Statement as Statement +from influxdb_client.domain.static_legend import StaticLegend as StaticLegend +from influxdb_client.domain.status_rule import StatusRule as StatusRule +from influxdb_client.domain.string_literal import StringLiteral as StringLiteral +from influxdb_client.domain.subscription_manifest import SubscriptionManifest as SubscriptionManifest +from influxdb_client.domain.table_view_properties import TableViewProperties as TableViewProperties +from influxdb_client.domain.table_view_properties_table_options import ( + TableViewPropertiesTableOptions as TableViewPropertiesTableOptions, +) +from influxdb_client.domain.tag_rule import TagRule as TagRule +from influxdb_client.domain.task import Task as Task +from influxdb_client.domain.task_create_request import TaskCreateRequest as TaskCreateRequest +from influxdb_client.domain.task_links import TaskLinks as TaskLinks +from influxdb_client.domain.task_status_type import TaskStatusType as TaskStatusType +from influxdb_client.domain.task_update_request import TaskUpdateRequest as TaskUpdateRequest +from influxdb_client.domain.tasks import Tasks as Tasks +from influxdb_client.domain.telegraf import Telegraf as Telegraf +from influxdb_client.domain.telegraf_plugin import TelegrafPlugin as TelegrafPlugin +from influxdb_client.domain.telegraf_plugin_request import TelegrafPluginRequest as TelegrafPluginRequest +from influxdb_client.domain.telegraf_plugin_request_plugins import TelegrafPluginRequestPlugins as TelegrafPluginRequestPlugins +from influxdb_client.domain.telegraf_plugins import TelegrafPlugins as TelegrafPlugins +from influxdb_client.domain.telegraf_request import TelegrafRequest as TelegrafRequest +from influxdb_client.domain.telegraf_request_metadata import TelegrafRequestMetadata as TelegrafRequestMetadata +from influxdb_client.domain.telegrafs import Telegrafs as Telegrafs +from influxdb_client.domain.telegram_notification_endpoint import TelegramNotificationEndpoint as TelegramNotificationEndpoint +from influxdb_client.domain.telegram_notification_rule import TelegramNotificationRule as TelegramNotificationRule +from influxdb_client.domain.telegram_notification_rule_base import TelegramNotificationRuleBase as TelegramNotificationRuleBase +from influxdb_client.domain.template_apply import TemplateApply as TemplateApply +from influxdb_client.domain.template_apply_remotes import TemplateApplyRemotes as TemplateApplyRemotes +from influxdb_client.domain.template_apply_template import TemplateApplyTemplate as TemplateApplyTemplate +from influxdb_client.domain.template_chart import TemplateChart as TemplateChart +from influxdb_client.domain.template_export_by_id import TemplateExportByID as TemplateExportByID +from influxdb_client.domain.template_export_by_id_org_ids import TemplateExportByIDOrgIDs as TemplateExportByIDOrgIDs +from influxdb_client.domain.template_export_by_id_resource_filters import ( + TemplateExportByIDResourceFilters as TemplateExportByIDResourceFilters, +) +from influxdb_client.domain.template_export_by_id_resources import TemplateExportByIDResources as TemplateExportByIDResources +from influxdb_client.domain.template_export_by_name import TemplateExportByName as TemplateExportByName +from influxdb_client.domain.template_export_by_name_resources import ( + TemplateExportByNameResources as TemplateExportByNameResources, +) +from influxdb_client.domain.template_kind import TemplateKind as TemplateKind +from influxdb_client.domain.template_summary import TemplateSummary as TemplateSummary +from influxdb_client.domain.template_summary_diff import TemplateSummaryDiff as TemplateSummaryDiff +from influxdb_client.domain.template_summary_diff_buckets import TemplateSummaryDiffBuckets as TemplateSummaryDiffBuckets +from influxdb_client.domain.template_summary_diff_buckets_new_old import ( + TemplateSummaryDiffBucketsNewOld as TemplateSummaryDiffBucketsNewOld, +) +from influxdb_client.domain.template_summary_diff_checks import TemplateSummaryDiffChecks as TemplateSummaryDiffChecks +from influxdb_client.domain.template_summary_diff_dashboards import TemplateSummaryDiffDashboards as TemplateSummaryDiffDashboards +from influxdb_client.domain.template_summary_diff_dashboards_new_old import ( + TemplateSummaryDiffDashboardsNewOld as TemplateSummaryDiffDashboardsNewOld, +) +from influxdb_client.domain.template_summary_diff_label_mappings import ( + TemplateSummaryDiffLabelMappings as TemplateSummaryDiffLabelMappings, +) +from influxdb_client.domain.template_summary_diff_labels import TemplateSummaryDiffLabels as TemplateSummaryDiffLabels +from influxdb_client.domain.template_summary_diff_labels_new_old import ( + TemplateSummaryDiffLabelsNewOld as TemplateSummaryDiffLabelsNewOld, +) +from influxdb_client.domain.template_summary_diff_notification_endpoints import ( + TemplateSummaryDiffNotificationEndpoints as TemplateSummaryDiffNotificationEndpoints, +) +from influxdb_client.domain.template_summary_diff_notification_rules import ( + TemplateSummaryDiffNotificationRules as TemplateSummaryDiffNotificationRules, +) +from influxdb_client.domain.template_summary_diff_notification_rules_new_old import ( + TemplateSummaryDiffNotificationRulesNewOld as TemplateSummaryDiffNotificationRulesNewOld, +) +from influxdb_client.domain.template_summary_diff_tasks import TemplateSummaryDiffTasks as TemplateSummaryDiffTasks +from influxdb_client.domain.template_summary_diff_tasks_new_old import ( + TemplateSummaryDiffTasksNewOld as TemplateSummaryDiffTasksNewOld, +) +from influxdb_client.domain.template_summary_diff_telegraf_configs import ( + TemplateSummaryDiffTelegrafConfigs as TemplateSummaryDiffTelegrafConfigs, +) +from influxdb_client.domain.template_summary_diff_variables import TemplateSummaryDiffVariables as TemplateSummaryDiffVariables +from influxdb_client.domain.template_summary_diff_variables_new_old import ( + TemplateSummaryDiffVariablesNewOld as TemplateSummaryDiffVariablesNewOld, +) +from influxdb_client.domain.template_summary_errors import TemplateSummaryErrors as TemplateSummaryErrors +from influxdb_client.domain.template_summary_label import TemplateSummaryLabel as TemplateSummaryLabel +from influxdb_client.domain.template_summary_label_properties import ( + TemplateSummaryLabelProperties as TemplateSummaryLabelProperties, +) +from influxdb_client.domain.template_summary_summary import TemplateSummarySummary as TemplateSummarySummary +from influxdb_client.domain.template_summary_summary_buckets import TemplateSummarySummaryBuckets as TemplateSummarySummaryBuckets +from influxdb_client.domain.template_summary_summary_dashboards import ( + TemplateSummarySummaryDashboards as TemplateSummarySummaryDashboards, +) +from influxdb_client.domain.template_summary_summary_label_mappings import ( + TemplateSummarySummaryLabelMappings as TemplateSummarySummaryLabelMappings, +) +from influxdb_client.domain.template_summary_summary_notification_rules import ( + TemplateSummarySummaryNotificationRules as TemplateSummarySummaryNotificationRules, +) +from influxdb_client.domain.template_summary_summary_status_rules import ( + TemplateSummarySummaryStatusRules as TemplateSummarySummaryStatusRules, +) +from influxdb_client.domain.template_summary_summary_tag_rules import ( + TemplateSummarySummaryTagRules as TemplateSummarySummaryTagRules, +) +from influxdb_client.domain.template_summary_summary_tasks import TemplateSummarySummaryTasks as TemplateSummarySummaryTasks +from influxdb_client.domain.template_summary_summary_variables import ( + TemplateSummarySummaryVariables as TemplateSummarySummaryVariables, +) +from influxdb_client.domain.test_statement import TestStatement as TestStatement +from influxdb_client.domain.threshold import Threshold as Threshold +from influxdb_client.domain.threshold_base import ThresholdBase as ThresholdBase +from influxdb_client.domain.threshold_check import ThresholdCheck as ThresholdCheck +from influxdb_client.domain.unary_expression import UnaryExpression as UnaryExpression +from influxdb_client.domain.unsigned_integer_literal import UnsignedIntegerLiteral as UnsignedIntegerLiteral +from influxdb_client.domain.user import User as User +from influxdb_client.domain.user_response import UserResponse as UserResponse +from influxdb_client.domain.user_response_links import UserResponseLinks as UserResponseLinks +from influxdb_client.domain.users import Users as Users +from influxdb_client.domain.variable import Variable as Variable +from influxdb_client.domain.variable_assignment import VariableAssignment as VariableAssignment +from influxdb_client.domain.variable_links import VariableLinks as VariableLinks +from influxdb_client.domain.variable_properties import VariableProperties as VariableProperties +from influxdb_client.domain.variables import Variables as Variables +from influxdb_client.domain.view import View as View +from influxdb_client.domain.view_links import ViewLinks as ViewLinks +from influxdb_client.domain.view_properties import ViewProperties as ViewProperties +from influxdb_client.domain.views import Views as Views +from influxdb_client.domain.write_precision import WritePrecision as WritePrecision +from influxdb_client.domain.xy_geom import XYGeom as XYGeom +from influxdb_client.domain.xy_view_properties import XYViewProperties as XYViewProperties diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/add_resource_member_request_body.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/add_resource_member_request_body.pyi new file mode 100644 index 00000000..5632abf9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/add_resource_member_request_body.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class AddResourceMemberRequestBody: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, id: Incomplete | None = ..., name: Incomplete | None = ...) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/analyze_query_response.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/analyze_query_response.pyi new file mode 100644 index 00000000..2736b432 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/analyze_query_response.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class AnalyzeQueryResponse: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, errors: Incomplete | None = ...) -> None: ... + @property + def errors(self): ... + @errors.setter + def errors(self, errors) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/analyze_query_response_errors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/analyze_query_response_errors.pyi new file mode 100644 index 00000000..6f107832 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/analyze_query_response_errors.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete + +class AnalyzeQueryResponseErrors: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + line: Incomplete | None = ..., + column: Incomplete | None = ..., + character: Incomplete | None = ..., + message: Incomplete | None = ..., + ) -> None: ... + @property + def line(self): ... + @line.setter + def line(self, line) -> None: ... + @property + def column(self): ... + @column.setter + def column(self, column) -> None: ... + @property + def character(self): ... + @character.setter + def character(self, character) -> None: ... + @property + def message(self): ... + @message.setter + def message(self, message) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/array_expression.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/array_expression.pyi new file mode 100644 index 00000000..8d1a6cb1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/array_expression.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.expression import Expression + +class ArrayExpression(Expression): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., elements: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def elements(self): ... + @elements.setter + def elements(self, elements) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/ast_response.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/ast_response.pyi new file mode 100644 index 00000000..586efd22 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/ast_response.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class ASTResponse: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, ast: Incomplete | None = ...) -> None: ... + @property + def ast(self): ... + @ast.setter + def ast(self, ast) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/authorization.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/authorization.pyi new file mode 100644 index 00000000..b8afa636 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/authorization.pyi @@ -0,0 +1,67 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.authorization_update_request import AuthorizationUpdateRequest + +class Authorization(AuthorizationUpdateRequest): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + org_id: Incomplete | None = ..., + permissions: Incomplete | None = ..., + id: Incomplete | None = ..., + token: Incomplete | None = ..., + user_id: Incomplete | None = ..., + user: Incomplete | None = ..., + org: Incomplete | None = ..., + links: Incomplete | None = ..., + status: str = ..., + description: Incomplete | None = ..., + ) -> None: ... + @property + def created_at(self): ... + @created_at.setter + def created_at(self, created_at) -> None: ... + @property + def updated_at(self): ... + @updated_at.setter + def updated_at(self, updated_at) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def permissions(self): ... + @permissions.setter + def permissions(self, permissions) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def token(self): ... + @token.setter + def token(self, token) -> None: ... + @property + def user_id(self): ... + @user_id.setter + def user_id(self, user_id) -> None: ... + @property + def user(self): ... + @user.setter + def user(self, user) -> None: ... + @property + def org(self): ... + @org.setter + def org(self, org) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/authorization_post_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/authorization_post_request.pyi new file mode 100644 index 00000000..98afba0f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/authorization_post_request.pyi @@ -0,0 +1,32 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.authorization_update_request import AuthorizationUpdateRequest + +class AuthorizationPostRequest(AuthorizationUpdateRequest): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + org_id: Incomplete | None = ..., + user_id: Incomplete | None = ..., + permissions: Incomplete | None = ..., + status: str = ..., + description: Incomplete | None = ..., + ) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def user_id(self): ... + @user_id.setter + def user_id(self, user_id) -> None: ... + @property + def permissions(self): ... + @permissions.setter + def permissions(self, permissions) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/authorization_update_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/authorization_update_request.pyi new file mode 100644 index 00000000..e6b69b75 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/authorization_update_request.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class AuthorizationUpdateRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, status: str = ..., description: Incomplete | None = ...) -> None: ... + @property + def status(self): ... + @status.setter + def status(self, status) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/authorizations.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/authorizations.pyi new file mode 100644 index 00000000..1da35137 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/authorizations.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class Authorizations: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, links: Incomplete | None = ..., authorizations: Incomplete | None = ...) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + @property + def authorizations(self): ... + @authorizations.setter + def authorizations(self, authorizations) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/axes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/axes.pyi new file mode 100644 index 00000000..73989df6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/axes.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class Axes: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, x: Incomplete | None = ..., y: Incomplete | None = ...) -> None: ... + @property + def x(self): ... + @x.setter + def x(self, x) -> None: ... + @property + def y(self): ... + @y.setter + def y(self, y) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/axis.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/axis.pyi new file mode 100644 index 00000000..0c487a22 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/axis.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +class Axis: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + bounds: Incomplete | None = ..., + label: Incomplete | None = ..., + prefix: Incomplete | None = ..., + suffix: Incomplete | None = ..., + base: Incomplete | None = ..., + scale: Incomplete | None = ..., + ) -> None: ... + @property + def bounds(self): ... + @bounds.setter + def bounds(self, bounds) -> None: ... + @property + def label(self): ... + @label.setter + def label(self, label) -> None: ... + @property + def prefix(self): ... + @prefix.setter + def prefix(self, prefix) -> None: ... + @property + def suffix(self): ... + @suffix.setter + def suffix(self, suffix) -> None: ... + @property + def base(self): ... + @base.setter + def base(self, base) -> None: ... + @property + def scale(self): ... + @scale.setter + def scale(self, scale) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/axis_scale.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/axis_scale.pyi new file mode 100644 index 00000000..765352ae --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/axis_scale.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete + +class AxisScale: + LOG: str + LINEAR: str + openapi_types: Incomplete + attribute_map: Incomplete + def __init__(self) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/bad_statement.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/bad_statement.pyi new file mode 100644 index 00000000..306ee23b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/bad_statement.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.statement import Statement + +class BadStatement(Statement): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., text: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def text(self): ... + @text.setter + def text(self, text) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/band_view_properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/band_view_properties.pyi new file mode 100644 index 00000000..d3948591 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/band_view_properties.pyi @@ -0,0 +1,155 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.view_properties import ViewProperties + +class BandViewProperties(ViewProperties): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + time_format: Incomplete | None = ..., + type: Incomplete | None = ..., + queries: Incomplete | None = ..., + colors: Incomplete | None = ..., + shape: Incomplete | None = ..., + note: Incomplete | None = ..., + show_note_when_empty: Incomplete | None = ..., + axes: Incomplete | None = ..., + static_legend: Incomplete | None = ..., + x_column: Incomplete | None = ..., + generate_x_axis_ticks: Incomplete | None = ..., + x_total_ticks: Incomplete | None = ..., + x_tick_start: Incomplete | None = ..., + x_tick_step: Incomplete | None = ..., + y_column: Incomplete | None = ..., + generate_y_axis_ticks: Incomplete | None = ..., + y_total_ticks: Incomplete | None = ..., + y_tick_start: Incomplete | None = ..., + y_tick_step: Incomplete | None = ..., + upper_column: Incomplete | None = ..., + main_column: Incomplete | None = ..., + lower_column: Incomplete | None = ..., + hover_dimension: Incomplete | None = ..., + geom: Incomplete | None = ..., + legend_colorize_rows: Incomplete | None = ..., + legend_hide: Incomplete | None = ..., + legend_opacity: Incomplete | None = ..., + legend_orientation_threshold: Incomplete | None = ..., + ) -> None: ... + @property + def time_format(self): ... + @time_format.setter + def time_format(self, time_format) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def queries(self): ... + @queries.setter + def queries(self, queries) -> None: ... + @property + def colors(self): ... + @colors.setter + def colors(self, colors) -> None: ... + @property + def shape(self): ... + @shape.setter + def shape(self, shape) -> None: ... + @property + def note(self): ... + @note.setter + def note(self, note) -> None: ... + @property + def show_note_when_empty(self): ... + @show_note_when_empty.setter + def show_note_when_empty(self, show_note_when_empty) -> None: ... + @property + def axes(self): ... + @axes.setter + def axes(self, axes) -> None: ... + @property + def static_legend(self): ... + @static_legend.setter + def static_legend(self, static_legend) -> None: ... + @property + def x_column(self): ... + @x_column.setter + def x_column(self, x_column) -> None: ... + @property + def generate_x_axis_ticks(self): ... + @generate_x_axis_ticks.setter + def generate_x_axis_ticks(self, generate_x_axis_ticks) -> None: ... + @property + def x_total_ticks(self): ... + @x_total_ticks.setter + def x_total_ticks(self, x_total_ticks) -> None: ... + @property + def x_tick_start(self): ... + @x_tick_start.setter + def x_tick_start(self, x_tick_start) -> None: ... + @property + def x_tick_step(self): ... + @x_tick_step.setter + def x_tick_step(self, x_tick_step) -> None: ... + @property + def y_column(self): ... + @y_column.setter + def y_column(self, y_column) -> None: ... + @property + def generate_y_axis_ticks(self): ... + @generate_y_axis_ticks.setter + def generate_y_axis_ticks(self, generate_y_axis_ticks) -> None: ... + @property + def y_total_ticks(self): ... + @y_total_ticks.setter + def y_total_ticks(self, y_total_ticks) -> None: ... + @property + def y_tick_start(self): ... + @y_tick_start.setter + def y_tick_start(self, y_tick_start) -> None: ... + @property + def y_tick_step(self): ... + @y_tick_step.setter + def y_tick_step(self, y_tick_step) -> None: ... + @property + def upper_column(self): ... + @upper_column.setter + def upper_column(self, upper_column) -> None: ... + @property + def main_column(self): ... + @main_column.setter + def main_column(self, main_column) -> None: ... + @property + def lower_column(self): ... + @lower_column.setter + def lower_column(self, lower_column) -> None: ... + @property + def hover_dimension(self): ... + @hover_dimension.setter + def hover_dimension(self, hover_dimension) -> None: ... + @property + def geom(self): ... + @geom.setter + def geom(self, geom) -> None: ... + @property + def legend_colorize_rows(self): ... + @legend_colorize_rows.setter + def legend_colorize_rows(self, legend_colorize_rows) -> None: ... + @property + def legend_hide(self): ... + @legend_hide.setter + def legend_hide(self, legend_hide) -> None: ... + @property + def legend_opacity(self): ... + @legend_opacity.setter + def legend_opacity(self, legend_opacity) -> None: ... + @property + def legend_orientation_threshold(self): ... + @legend_orientation_threshold.setter + def legend_orientation_threshold(self, legend_orientation_threshold) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/binary_expression.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/binary_expression.pyi new file mode 100644 index 00000000..cfb13da0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/binary_expression.pyi @@ -0,0 +1,35 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.expression import Expression + +class BinaryExpression(Expression): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: Incomplete | None = ..., + operator: Incomplete | None = ..., + left: Incomplete | None = ..., + right: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def operator(self): ... + @operator.setter + def operator(self, operator) -> None: ... + @property + def left(self): ... + @left.setter + def left(self, left) -> None: ... + @property + def right(self): ... + @right.setter + def right(self, right) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/block.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/block.pyi new file mode 100644 index 00000000..48a29bd2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/block.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.node import Node + +class Block(Node): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., body: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def body(self): ... + @body.setter + def body(self, body) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/boolean_literal.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/boolean_literal.pyi new file mode 100644 index 00000000..cea20b46 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/boolean_literal.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.expression import Expression + +class BooleanLiteral(Expression): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., value: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def value(self): ... + @value.setter + def value(self, value) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/bucket.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/bucket.pyi new file mode 100644 index 00000000..1cb79117 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/bucket.pyi @@ -0,0 +1,73 @@ +from _typeshed import Incomplete + +class Bucket: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + links: Incomplete | None = ..., + id: Incomplete | None = ..., + type: str = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + org_id: Incomplete | None = ..., + rp: Incomplete | None = ..., + schema_type: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + retention_rules: Incomplete | None = ..., + labels: Incomplete | None = ..., + ) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def rp(self): ... + @rp.setter + def rp(self, rp) -> None: ... + @property + def schema_type(self): ... + @schema_type.setter + def schema_type(self, schema_type) -> None: ... + @property + def created_at(self): ... + @created_at.setter + def created_at(self, created_at) -> None: ... + @property + def updated_at(self): ... + @updated_at.setter + def updated_at(self, updated_at) -> None: ... + @property + def retention_rules(self): ... + @retention_rules.setter + def retention_rules(self, retention_rules) -> None: ... + @property + def labels(self): ... + @labels.setter + def labels(self, labels) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/bucket_links.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/bucket_links.pyi new file mode 100644 index 00000000..ace2e00d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/bucket_links.pyi @@ -0,0 +1,39 @@ +from _typeshed import Incomplete + +class BucketLinks: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + labels: Incomplete | None = ..., + members: Incomplete | None = ..., + org: Incomplete | None = ..., + owners: Incomplete | None = ..., + _self: Incomplete | None = ..., + write: Incomplete | None = ..., + ) -> None: ... + @property + def labels(self): ... + @labels.setter + def labels(self, labels) -> None: ... + @property + def members(self): ... + @members.setter + def members(self, members) -> None: ... + @property + def org(self): ... + @org.setter + def org(self, org) -> None: ... + @property + def owners(self): ... + @owners.setter + def owners(self, owners) -> None: ... + @property + def write(self): ... + @write.setter + def write(self, write) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/bucket_metadata_manifest.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/bucket_metadata_manifest.pyi new file mode 100644 index 00000000..24cce2c9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/bucket_metadata_manifest.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete + +class BucketMetadataManifest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + organization_id: Incomplete | None = ..., + organization_name: Incomplete | None = ..., + bucket_id: Incomplete | None = ..., + bucket_name: Incomplete | None = ..., + description: Incomplete | None = ..., + default_retention_policy: Incomplete | None = ..., + retention_policies: Incomplete | None = ..., + ) -> None: ... + @property + def organization_id(self): ... + @organization_id.setter + def organization_id(self, organization_id) -> None: ... + @property + def organization_name(self): ... + @organization_name.setter + def organization_name(self, organization_name) -> None: ... + @property + def bucket_id(self): ... + @bucket_id.setter + def bucket_id(self, bucket_id) -> None: ... + @property + def bucket_name(self): ... + @bucket_name.setter + def bucket_name(self, bucket_name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def default_retention_policy(self): ... + @default_retention_policy.setter + def default_retention_policy(self, default_retention_policy) -> None: ... + @property + def retention_policies(self): ... + @retention_policies.setter + def retention_policies(self, retention_policies) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/bucket_retention_rules.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/bucket_retention_rules.pyi new file mode 100644 index 00000000..7ddc6748 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/bucket_retention_rules.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class BucketRetentionRules: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, type: str = ..., every_seconds: Incomplete | None = ..., shard_group_duration_seconds: Incomplete | None = ... + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def every_seconds(self): ... + @every_seconds.setter + def every_seconds(self, every_seconds) -> None: ... + @property + def shard_group_duration_seconds(self): ... + @shard_group_duration_seconds.setter + def shard_group_duration_seconds(self, shard_group_duration_seconds) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/bucket_shard_mapping.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/bucket_shard_mapping.pyi new file mode 100644 index 00000000..3977aa0c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/bucket_shard_mapping.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class BucketShardMapping: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, old_id: Incomplete | None = ..., new_id: Incomplete | None = ...) -> None: ... + @property + def old_id(self): ... + @old_id.setter + def old_id(self, old_id) -> None: ... + @property + def new_id(self): ... + @new_id.setter + def new_id(self, new_id) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/buckets.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/buckets.pyi new file mode 100644 index 00000000..d230562d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/buckets.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class Buckets: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, links: Incomplete | None = ..., buckets: Incomplete | None = ...) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + @property + def buckets(self): ... + @buckets.setter + def buckets(self, buckets) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/builder_aggregate_function_type.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/builder_aggregate_function_type.pyi new file mode 100644 index 00000000..77ebfb9e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/builder_aggregate_function_type.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete + +class BuilderAggregateFunctionType: + FILTER: str + GROUP: str + openapi_types: Incomplete + attribute_map: Incomplete + def __init__(self) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/builder_config.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/builder_config.pyi new file mode 100644 index 00000000..c012e523 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/builder_config.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete + +class BuilderConfig: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + buckets: Incomplete | None = ..., + tags: Incomplete | None = ..., + functions: Incomplete | None = ..., + aggregate_window: Incomplete | None = ..., + ) -> None: ... + @property + def buckets(self): ... + @buckets.setter + def buckets(self, buckets) -> None: ... + @property + def tags(self): ... + @tags.setter + def tags(self, tags) -> None: ... + @property + def functions(self): ... + @functions.setter + def functions(self, functions) -> None: ... + @property + def aggregate_window(self): ... + @aggregate_window.setter + def aggregate_window(self, aggregate_window) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/builder_config_aggregate_window.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/builder_config_aggregate_window.pyi new file mode 100644 index 00000000..74848bc0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/builder_config_aggregate_window.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class BuilderConfigAggregateWindow: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, period: Incomplete | None = ..., fill_values: Incomplete | None = ...) -> None: ... + @property + def period(self): ... + @period.setter + def period(self, period) -> None: ... + @property + def fill_values(self): ... + @fill_values.setter + def fill_values(self, fill_values) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/builder_functions_type.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/builder_functions_type.pyi new file mode 100644 index 00000000..4b6e331d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/builder_functions_type.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class BuilderFunctionsType: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, name: Incomplete | None = ...) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/builder_tags_type.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/builder_tags_type.pyi new file mode 100644 index 00000000..580c6280 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/builder_tags_type.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class BuilderTagsType: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, key: Incomplete | None = ..., values: Incomplete | None = ..., aggregate_function_type: Incomplete | None = ... + ) -> None: ... + @property + def key(self): ... + @key.setter + def key(self, key) -> None: ... + @property + def values(self): ... + @values.setter + def values(self, values) -> None: ... + @property + def aggregate_function_type(self): ... + @aggregate_function_type.setter + def aggregate_function_type(self, aggregate_function_type) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/builtin_statement.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/builtin_statement.pyi new file mode 100644 index 00000000..5dba736e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/builtin_statement.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.statement import Statement + +class BuiltinStatement(Statement): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., id: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/call_expression.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/call_expression.pyi new file mode 100644 index 00000000..679705f3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/call_expression.pyi @@ -0,0 +1,27 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.expression import Expression + +class CallExpression(Expression): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, type: Incomplete | None = ..., callee: Incomplete | None = ..., arguments: Incomplete | None = ... + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def callee(self): ... + @callee.setter + def callee(self, callee) -> None: ... + @property + def arguments(self): ... + @arguments.setter + def arguments(self, arguments) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/cell.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/cell.pyi new file mode 100644 index 00000000..9c0be23e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/cell.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete + +class Cell: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + links: Incomplete | None = ..., + x: Incomplete | None = ..., + y: Incomplete | None = ..., + w: Incomplete | None = ..., + h: Incomplete | None = ..., + view_id: Incomplete | None = ..., + ) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + @property + def x(self): ... + @x.setter + def x(self, x) -> None: ... + @property + def y(self): ... + @y.setter + def y(self, y) -> None: ... + @property + def w(self): ... + @w.setter + def w(self, w) -> None: ... + @property + def h(self): ... + @h.setter + def h(self, h) -> None: ... + @property + def view_id(self): ... + @view_id.setter + def view_id(self, view_id) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/cell_links.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/cell_links.pyi new file mode 100644 index 00000000..90fbc708 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/cell_links.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class CellLinks: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, _self: Incomplete | None = ..., view: Incomplete | None = ...) -> None: ... + @property + def view(self): ... + @view.setter + def view(self, view) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/cell_update.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/cell_update.pyi new file mode 100644 index 00000000..78b5f200 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/cell_update.pyi @@ -0,0 +1,29 @@ +from _typeshed import Incomplete + +class CellUpdate: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, x: Incomplete | None = ..., y: Incomplete | None = ..., w: Incomplete | None = ..., h: Incomplete | None = ... + ) -> None: ... + @property + def x(self): ... + @x.setter + def x(self, x) -> None: ... + @property + def y(self): ... + @y.setter + def y(self, y) -> None: ... + @property + def w(self): ... + @w.setter + def w(self, w) -> None: ... + @property + def h(self): ... + @h.setter + def h(self, h) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/cell_with_view_properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/cell_with_view_properties.pyi new file mode 100644 index 00000000..fcb4fa48 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/cell_with_view_properties.pyi @@ -0,0 +1,32 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.cell import Cell + +class CellWithViewProperties(Cell): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + properties: Incomplete | None = ..., + id: Incomplete | None = ..., + links: Incomplete | None = ..., + x: Incomplete | None = ..., + y: Incomplete | None = ..., + w: Incomplete | None = ..., + h: Incomplete | None = ..., + view_id: Incomplete | None = ..., + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def properties(self): ... + @properties.setter + def properties(self, properties) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/check.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/check.pyi new file mode 100644 index 00000000..d2013bda --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/check.pyi @@ -0,0 +1,17 @@ +from _typeshed import Incomplete + +class Check: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator_value_class_map: Incomplete + discriminator: str + def __init__(self, type: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + def get_real_child_model(self, data): ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/check_base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/check_base.pyi new file mode 100644 index 00000000..48c7aa77 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/check_base.pyi @@ -0,0 +1,88 @@ +from _typeshed import Incomplete + +class CheckBase: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + name: Incomplete | None = ..., + org_id: Incomplete | None = ..., + task_id: Incomplete | None = ..., + owner_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + query: Incomplete | None = ..., + status: Incomplete | None = ..., + description: Incomplete | None = ..., + latest_completed: Incomplete | None = ..., + last_run_status: Incomplete | None = ..., + last_run_error: Incomplete | None = ..., + labels: Incomplete | None = ..., + links: Incomplete | None = ..., + ) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def task_id(self): ... + @task_id.setter + def task_id(self, task_id) -> None: ... + @property + def owner_id(self): ... + @owner_id.setter + def owner_id(self, owner_id) -> None: ... + @property + def created_at(self): ... + @created_at.setter + def created_at(self, created_at) -> None: ... + @property + def updated_at(self): ... + @updated_at.setter + def updated_at(self, updated_at) -> None: ... + @property + def query(self): ... + @query.setter + def query(self, query) -> None: ... + @property + def status(self): ... + @status.setter + def status(self, status) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def latest_completed(self): ... + @latest_completed.setter + def latest_completed(self, latest_completed) -> None: ... + @property + def last_run_status(self): ... + @last_run_status.setter + def last_run_status(self, last_run_status) -> None: ... + @property + def last_run_error(self): ... + @last_run_error.setter + def last_run_error(self, last_run_error) -> None: ... + @property + def labels(self): ... + @labels.setter + def labels(self, labels) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/check_base_links.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/check_base_links.pyi new file mode 100644 index 00000000..da935fb9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/check_base_links.pyi @@ -0,0 +1,34 @@ +from _typeshed import Incomplete + +class CheckBaseLinks: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + _self: Incomplete | None = ..., + labels: Incomplete | None = ..., + members: Incomplete | None = ..., + owners: Incomplete | None = ..., + query: Incomplete | None = ..., + ) -> None: ... + @property + def labels(self): ... + @labels.setter + def labels(self, labels) -> None: ... + @property + def members(self): ... + @members.setter + def members(self, members) -> None: ... + @property + def owners(self): ... + @owners.setter + def owners(self, owners) -> None: ... + @property + def query(self): ... + @query.setter + def query(self, query) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/check_discriminator.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/check_discriminator.pyi new file mode 100644 index 00000000..50e776cb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/check_discriminator.pyi @@ -0,0 +1,30 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.check_base import CheckBase + +class CheckDiscriminator(CheckBase): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + name: Incomplete | None = ..., + org_id: Incomplete | None = ..., + task_id: Incomplete | None = ..., + owner_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + query: Incomplete | None = ..., + status: Incomplete | None = ..., + description: Incomplete | None = ..., + latest_completed: Incomplete | None = ..., + last_run_status: Incomplete | None = ..., + last_run_error: Incomplete | None = ..., + labels: Incomplete | None = ..., + links: Incomplete | None = ..., + ) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/check_patch.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/check_patch.pyi new file mode 100644 index 00000000..9354879e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/check_patch.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class CheckPatch: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, name: Incomplete | None = ..., description: Incomplete | None = ..., status: Incomplete | None = ... + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def status(self): ... + @status.setter + def status(self, status) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/check_status_level.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/check_status_level.pyi new file mode 100644 index 00000000..9dafd3a6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/check_status_level.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class CheckStatusLevel: + UNKNOWN: str + OK: str + INFO: str + CRIT: str + WARN: str + openapi_types: Incomplete + attribute_map: Incomplete + def __init__(self) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/check_view_properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/check_view_properties.pyi new file mode 100644 index 00000000..4e207048 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/check_view_properties.pyi @@ -0,0 +1,65 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.view_properties import ViewProperties + +class CheckViewProperties(ViewProperties): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: Incomplete | None = ..., + shape: Incomplete | None = ..., + check_id: Incomplete | None = ..., + check: Incomplete | None = ..., + queries: Incomplete | None = ..., + colors: Incomplete | None = ..., + legend_colorize_rows: Incomplete | None = ..., + legend_hide: Incomplete | None = ..., + legend_opacity: Incomplete | None = ..., + legend_orientation_threshold: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def shape(self): ... + @shape.setter + def shape(self, shape) -> None: ... + @property + def check_id(self): ... + @check_id.setter + def check_id(self, check_id) -> None: ... + @property + def check(self): ... + @check.setter + def check(self, check) -> None: ... + @property + def queries(self): ... + @queries.setter + def queries(self, queries) -> None: ... + @property + def colors(self): ... + @colors.setter + def colors(self, colors) -> None: ... + @property + def legend_colorize_rows(self): ... + @legend_colorize_rows.setter + def legend_colorize_rows(self, legend_colorize_rows) -> None: ... + @property + def legend_hide(self): ... + @legend_hide.setter + def legend_hide(self, legend_hide) -> None: ... + @property + def legend_opacity(self): ... + @legend_opacity.setter + def legend_opacity(self, legend_opacity) -> None: ... + @property + def legend_orientation_threshold(self): ... + @legend_orientation_threshold.setter + def legend_orientation_threshold(self, legend_orientation_threshold) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/checks.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/checks.pyi new file mode 100644 index 00000000..d9879c8d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/checks.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class Checks: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, checks: Incomplete | None = ..., links: Incomplete | None = ...) -> None: ... + @property + def checks(self): ... + @checks.setter + def checks(self, checks) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/column_data_type.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/column_data_type.pyi new file mode 100644 index 00000000..fd59dd0c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/column_data_type.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class ColumnDataType: + INTEGER: str + FLOAT: str + BOOLEAN: str + STRING: str + UNSIGNED: str + openapi_types: Incomplete + attribute_map: Incomplete + def __init__(self) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/column_semantic_type.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/column_semantic_type.pyi new file mode 100644 index 00000000..feaa3a9e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/column_semantic_type.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete + +class ColumnSemanticType: + TIMESTAMP: str + TAG: str + FIELD: str + openapi_types: Incomplete + attribute_map: Incomplete + def __init__(self) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/conditional_expression.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/conditional_expression.pyi new file mode 100644 index 00000000..89493f62 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/conditional_expression.pyi @@ -0,0 +1,35 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.expression import Expression + +class ConditionalExpression(Expression): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: Incomplete | None = ..., + test: Incomplete | None = ..., + alternate: Incomplete | None = ..., + consequent: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def test(self): ... + @test.setter + def test(self, test) -> None: ... + @property + def alternate(self): ... + @alternate.setter + def alternate(self, alternate) -> None: ... + @property + def consequent(self): ... + @consequent.setter + def consequent(self, consequent) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/config.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/config.pyi new file mode 100644 index 00000000..d81e64af --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/config.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class Config: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, config: Incomplete | None = ...) -> None: ... + @property + def config(self): ... + @config.setter + def config(self, config) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/constant_variable_properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/constant_variable_properties.pyi new file mode 100644 index 00000000..cd0c6ced --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/constant_variable_properties.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.variable_properties import VariableProperties + +class ConstantVariableProperties(VariableProperties): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., values: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def values(self): ... + @values.setter + def values(self, values) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/create_cell.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/create_cell.pyi new file mode 100644 index 00000000..ff8b1b42 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/create_cell.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +class CreateCell: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + x: Incomplete | None = ..., + y: Incomplete | None = ..., + w: Incomplete | None = ..., + h: Incomplete | None = ..., + using_view: Incomplete | None = ..., + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def x(self): ... + @x.setter + def x(self, x) -> None: ... + @property + def y(self): ... + @y.setter + def y(self, y) -> None: ... + @property + def w(self): ... + @w.setter + def w(self, w) -> None: ... + @property + def h(self): ... + @h.setter + def h(self, h) -> None: ... + @property + def using_view(self): ... + @using_view.setter + def using_view(self, using_view) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/create_dashboard_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/create_dashboard_request.pyi new file mode 100644 index 00000000..70a91578 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/create_dashboard_request.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class CreateDashboardRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, org_id: Incomplete | None = ..., name: Incomplete | None = ..., description: Incomplete | None = ... + ) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/custom_check.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/custom_check.pyi new file mode 100644 index 00000000..3734b93c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/custom_check.pyi @@ -0,0 +1,35 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.check_discriminator import CheckDiscriminator + +class CustomCheck(CheckDiscriminator): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: str = ..., + id: Incomplete | None = ..., + name: Incomplete | None = ..., + org_id: Incomplete | None = ..., + task_id: Incomplete | None = ..., + owner_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + query: Incomplete | None = ..., + status: Incomplete | None = ..., + description: Incomplete | None = ..., + latest_completed: Incomplete | None = ..., + last_run_status: Incomplete | None = ..., + last_run_error: Incomplete | None = ..., + labels: Incomplete | None = ..., + links: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dashboard.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dashboard.pyi new file mode 100644 index 00000000..105115ce --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dashboard.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.create_dashboard_request import CreateDashboardRequest + +class Dashboard(CreateDashboardRequest): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + links: Incomplete | None = ..., + id: Incomplete | None = ..., + meta: Incomplete | None = ..., + cells: Incomplete | None = ..., + labels: Incomplete | None = ..., + org_id: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + ) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def meta(self): ... + @meta.setter + def meta(self, meta) -> None: ... + @property + def cells(self): ... + @cells.setter + def cells(self, cells) -> None: ... + @property + def labels(self): ... + @labels.setter + def labels(self, labels) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dashboard_color.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dashboard_color.pyi new file mode 100644 index 00000000..2afe2bea --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dashboard_color.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete + +class DashboardColor: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + type: Incomplete | None = ..., + hex: Incomplete | None = ..., + name: Incomplete | None = ..., + value: Incomplete | None = ..., + ) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def hex(self): ... + @hex.setter + def hex(self, hex) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def value(self): ... + @value.setter + def value(self, value) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dashboard_query.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dashboard_query.pyi new file mode 100644 index 00000000..54f9104f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dashboard_query.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete + +class DashboardQuery: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + text: Incomplete | None = ..., + edit_mode: Incomplete | None = ..., + name: Incomplete | None = ..., + builder_config: Incomplete | None = ..., + ) -> None: ... + @property + def text(self): ... + @text.setter + def text(self, text) -> None: ... + @property + def edit_mode(self): ... + @edit_mode.setter + def edit_mode(self, edit_mode) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def builder_config(self): ... + @builder_config.setter + def builder_config(self, builder_config) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dashboard_with_view_properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dashboard_with_view_properties.pyi new file mode 100644 index 00000000..aaeeff0e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dashboard_with_view_properties.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.create_dashboard_request import CreateDashboardRequest + +class DashboardWithViewProperties(CreateDashboardRequest): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + links: Incomplete | None = ..., + id: Incomplete | None = ..., + meta: Incomplete | None = ..., + cells: Incomplete | None = ..., + labels: Incomplete | None = ..., + org_id: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + ) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def meta(self): ... + @meta.setter + def meta(self, meta) -> None: ... + @property + def cells(self): ... + @cells.setter + def cells(self, cells) -> None: ... + @property + def labels(self): ... + @labels.setter + def labels(self, labels) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dashboards.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dashboards.pyi new file mode 100644 index 00000000..56b2a9d0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dashboards.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class Dashboards: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, links: Incomplete | None = ..., dashboards: Incomplete | None = ...) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + @property + def dashboards(self): ... + @dashboards.setter + def dashboards(self, dashboards) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/date_time_literal.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/date_time_literal.pyi new file mode 100644 index 00000000..c1226ad9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/date_time_literal.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.expression import Expression + +class DateTimeLiteral(Expression): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., value: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def value(self): ... + @value.setter + def value(self, value) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dbr_ps.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dbr_ps.pyi new file mode 100644 index 00000000..190351f7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dbr_ps.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class DBRPs: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, content: Incomplete | None = ...) -> None: ... + @property + def content(self): ... + @content.setter + def content(self, content) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dbrp.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dbrp.pyi new file mode 100644 index 00000000..23ecea54 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dbrp.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete + +class DBRP: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + bucket_id: Incomplete | None = ..., + database: Incomplete | None = ..., + retention_policy: Incomplete | None = ..., + default: Incomplete | None = ..., + links: Incomplete | None = ..., + ) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def bucket_id(self): ... + @bucket_id.setter + def bucket_id(self, bucket_id) -> None: ... + @property + def database(self): ... + @database.setter + def database(self, database) -> None: ... + @property + def retention_policy(self): ... + @retention_policy.setter + def retention_policy(self, retention_policy) -> None: ... + @property + def default(self): ... + @default.setter + def default(self, default) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dbrp_create.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dbrp_create.pyi new file mode 100644 index 00000000..3d462b1b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dbrp_create.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +class DBRPCreate: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + org_id: Incomplete | None = ..., + org: Incomplete | None = ..., + bucket_id: Incomplete | None = ..., + database: Incomplete | None = ..., + retention_policy: Incomplete | None = ..., + default: Incomplete | None = ..., + ) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def org(self): ... + @org.setter + def org(self, org) -> None: ... + @property + def bucket_id(self): ... + @bucket_id.setter + def bucket_id(self, bucket_id) -> None: ... + @property + def database(self): ... + @database.setter + def database(self, database) -> None: ... + @property + def retention_policy(self): ... + @retention_policy.setter + def retention_policy(self, retention_policy) -> None: ... + @property + def default(self): ... + @default.setter + def default(self, default) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dbrp_get.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dbrp_get.pyi new file mode 100644 index 00000000..df0e24b0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dbrp_get.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class DBRPGet: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, content: Incomplete | None = ...) -> None: ... + @property + def content(self): ... + @content.setter + def content(self, content) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dbrp_update.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dbrp_update.pyi new file mode 100644 index 00000000..e36ca333 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dbrp_update.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class DBRPUpdate: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, retention_policy: Incomplete | None = ..., default: Incomplete | None = ...) -> None: ... + @property + def retention_policy(self): ... + @retention_policy.setter + def retention_policy(self, retention_policy) -> None: ... + @property + def default(self): ... + @default.setter + def default(self, default) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/deadman_check.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/deadman_check.pyi new file mode 100644 index 00000000..7f270cee --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/deadman_check.pyi @@ -0,0 +1,75 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.check_discriminator import CheckDiscriminator + +class DeadmanCheck(CheckDiscriminator): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: str = ..., + time_since: Incomplete | None = ..., + stale_time: Incomplete | None = ..., + report_zero: Incomplete | None = ..., + level: Incomplete | None = ..., + every: Incomplete | None = ..., + offset: Incomplete | None = ..., + tags: Incomplete | None = ..., + status_message_template: Incomplete | None = ..., + id: Incomplete | None = ..., + name: Incomplete | None = ..., + org_id: Incomplete | None = ..., + task_id: Incomplete | None = ..., + owner_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + query: Incomplete | None = ..., + status: Incomplete | None = ..., + description: Incomplete | None = ..., + latest_completed: Incomplete | None = ..., + last_run_status: Incomplete | None = ..., + last_run_error: Incomplete | None = ..., + labels: Incomplete | None = ..., + links: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def time_since(self): ... + @time_since.setter + def time_since(self, time_since) -> None: ... + @property + def stale_time(self): ... + @stale_time.setter + def stale_time(self, stale_time) -> None: ... + @property + def report_zero(self): ... + @report_zero.setter + def report_zero(self, report_zero) -> None: ... + @property + def level(self): ... + @level.setter + def level(self, level) -> None: ... + @property + def every(self): ... + @every.setter + def every(self, every) -> None: ... + @property + def offset(self): ... + @offset.setter + def offset(self, offset) -> None: ... + @property + def tags(self): ... + @tags.setter + def tags(self, tags) -> None: ... + @property + def status_message_template(self): ... + @status_message_template.setter + def status_message_template(self, status_message_template) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/decimal_places.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/decimal_places.pyi new file mode 100644 index 00000000..15400d4b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/decimal_places.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class DecimalPlaces: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, is_enforced: Incomplete | None = ..., digits: Incomplete | None = ...) -> None: ... + @property + def is_enforced(self): ... + @is_enforced.setter + def is_enforced(self, is_enforced) -> None: ... + @property + def digits(self): ... + @digits.setter + def digits(self, digits) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/delete_predicate_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/delete_predicate_request.pyi new file mode 100644 index 00000000..a4606ae4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/delete_predicate_request.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class DeletePredicateRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, start: Incomplete | None = ..., stop: Incomplete | None = ..., predicate: Incomplete | None = ... + ) -> None: ... + @property + def start(self): ... + @start.setter + def start(self, start) -> None: ... + @property + def stop(self): ... + @stop.setter + def stop(self, stop) -> None: ... + @property + def predicate(self): ... + @predicate.setter + def predicate(self, predicate) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dialect.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dialect.pyi new file mode 100644 index 00000000..420a1f23 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dialect.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete + +class Dialect: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + header: bool = ..., + delimiter: str = ..., + annotations: Incomplete | None = ..., + comment_prefix: str = ..., + date_time_format: str = ..., + ) -> None: ... + @property + def header(self): ... + @header.setter + def header(self, header) -> None: ... + @property + def delimiter(self): ... + @delimiter.setter + def delimiter(self, delimiter) -> None: ... + @property + def annotations(self): ... + @annotations.setter + def annotations(self, annotations) -> None: ... + @property + def comment_prefix(self): ... + @comment_prefix.setter + def comment_prefix(self, comment_prefix) -> None: ... + @property + def date_time_format(self): ... + @date_time_format.setter + def date_time_format(self, date_time_format) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dict_expression.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dict_expression.pyi new file mode 100644 index 00000000..0200e72c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dict_expression.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.expression import Expression + +class DictExpression(Expression): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., elements: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def elements(self): ... + @elements.setter + def elements(self, elements) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dict_item.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dict_item.pyi new file mode 100644 index 00000000..06ebcad4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/dict_item.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete + +class DictItem: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., key: Incomplete | None = ..., val: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def key(self): ... + @key.setter + def key(self, key) -> None: ... + @property + def val(self): ... + @val.setter + def val(self, val) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/duration.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/duration.pyi new file mode 100644 index 00000000..4cbf5e98 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/duration.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class Duration: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, type: Incomplete | None = ..., magnitude: Incomplete | None = ..., unit: Incomplete | None = ... + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def magnitude(self): ... + @magnitude.setter + def magnitude(self, magnitude) -> None: ... + @property + def unit(self): ... + @unit.setter + def unit(self, unit) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/duration_literal.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/duration_literal.pyi new file mode 100644 index 00000000..783fa7fd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/duration_literal.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.expression import Expression + +class DurationLiteral(Expression): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., values: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def values(self): ... + @values.setter + def values(self, values) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/error.pyi new file mode 100644 index 00000000..27d3183b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/error.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete + +class Error: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + code: Incomplete | None = ..., + message: Incomplete | None = ..., + op: Incomplete | None = ..., + err: Incomplete | None = ..., + ) -> None: ... + @property + def code(self): ... + @code.setter + def code(self, code) -> None: ... + @property + def message(self): ... + @message.setter + def message(self, message) -> None: ... + @property + def op(self): ... + @op.setter + def op(self, op) -> None: ... + @property + def err(self): ... + @err.setter + def err(self, err) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/expression.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/expression.pyi new file mode 100644 index 00000000..0add99c9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/expression.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.node import Node + +class Expression(Node): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/expression_statement.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/expression_statement.pyi new file mode 100644 index 00000000..815bfae9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/expression_statement.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.statement import Statement + +class ExpressionStatement(Statement): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., expression: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def expression(self): ... + @expression.setter + def expression(self, expression) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/field.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/field.pyi new file mode 100644 index 00000000..830b1567 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/field.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete + +class Field: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + value: Incomplete | None = ..., + type: Incomplete | None = ..., + alias: Incomplete | None = ..., + args: Incomplete | None = ..., + ) -> None: ... + @property + def value(self): ... + @value.setter + def value(self, value) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def alias(self): ... + @alias.setter + def alias(self, alias) -> None: ... + @property + def args(self): ... + @args.setter + def args(self, args) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/file.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/file.pyi new file mode 100644 index 00000000..5a413d6b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/file.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete + +class File: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: Incomplete | None = ..., + name: Incomplete | None = ..., + package: Incomplete | None = ..., + imports: Incomplete | None = ..., + body: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def package(self): ... + @package.setter + def package(self, package) -> None: ... + @property + def imports(self): ... + @imports.setter + def imports(self, imports) -> None: ... + @property + def body(self): ... + @body.setter + def body(self, body) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/float_literal.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/float_literal.pyi new file mode 100644 index 00000000..2033f430 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/float_literal.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.expression import Expression + +class FloatLiteral(Expression): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., value: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def value(self): ... + @value.setter + def value(self, value) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/flux_response.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/flux_response.pyi new file mode 100644 index 00000000..edcc9796 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/flux_response.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class FluxResponse: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, flux: Incomplete | None = ...) -> None: ... + @property + def flux(self): ... + @flux.setter + def flux(self, flux) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/flux_suggestion.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/flux_suggestion.pyi new file mode 100644 index 00000000..0eb66df4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/flux_suggestion.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class FluxSuggestion: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, name: Incomplete | None = ..., params: Incomplete | None = ...) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def params(self): ... + @params.setter + def params(self, params) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/flux_suggestions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/flux_suggestions.pyi new file mode 100644 index 00000000..8518329c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/flux_suggestions.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class FluxSuggestions: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, funcs: Incomplete | None = ...) -> None: ... + @property + def funcs(self): ... + @funcs.setter + def funcs(self, funcs) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/function_expression.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/function_expression.pyi new file mode 100644 index 00000000..5b1fb029 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/function_expression.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.expression import Expression + +class FunctionExpression(Expression): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., params: Incomplete | None = ..., body: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def params(self): ... + @params.setter + def params(self, params) -> None: ... + @property + def body(self): ... + @body.setter + def body(self, body) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/gauge_view_properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/gauge_view_properties.pyi new file mode 100644 index 00000000..b128a261 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/gauge_view_properties.pyi @@ -0,0 +1,70 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.view_properties import ViewProperties + +class GaugeViewProperties(ViewProperties): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: Incomplete | None = ..., + queries: Incomplete | None = ..., + colors: Incomplete | None = ..., + shape: Incomplete | None = ..., + note: Incomplete | None = ..., + show_note_when_empty: Incomplete | None = ..., + prefix: Incomplete | None = ..., + tick_prefix: Incomplete | None = ..., + suffix: Incomplete | None = ..., + tick_suffix: Incomplete | None = ..., + decimal_places: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def queries(self): ... + @queries.setter + def queries(self, queries) -> None: ... + @property + def colors(self): ... + @colors.setter + def colors(self, colors) -> None: ... + @property + def shape(self): ... + @shape.setter + def shape(self, shape) -> None: ... + @property + def note(self): ... + @note.setter + def note(self, note) -> None: ... + @property + def show_note_when_empty(self): ... + @show_note_when_empty.setter + def show_note_when_empty(self, show_note_when_empty) -> None: ... + @property + def prefix(self): ... + @prefix.setter + def prefix(self, prefix) -> None: ... + @property + def tick_prefix(self): ... + @tick_prefix.setter + def tick_prefix(self, tick_prefix) -> None: ... + @property + def suffix(self): ... + @suffix.setter + def suffix(self, suffix) -> None: ... + @property + def tick_suffix(self): ... + @tick_suffix.setter + def tick_suffix(self, tick_suffix) -> None: ... + @property + def decimal_places(self): ... + @decimal_places.setter + def decimal_places(self, decimal_places) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/greater_threshold.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/greater_threshold.pyi new file mode 100644 index 00000000..689b53be --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/greater_threshold.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.threshold_base import ThresholdBase + +class GreaterThreshold(ThresholdBase): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, type: str = ..., value: Incomplete | None = ..., level: Incomplete | None = ..., all_values: Incomplete | None = ... + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def value(self): ... + @value.setter + def value(self, value) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/health_check.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/health_check.pyi new file mode 100644 index 00000000..c8b1c93f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/health_check.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +class HealthCheck: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + message: Incomplete | None = ..., + checks: Incomplete | None = ..., + status: Incomplete | None = ..., + version: Incomplete | None = ..., + commit: Incomplete | None = ..., + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def message(self): ... + @message.setter + def message(self, message) -> None: ... + @property + def checks(self): ... + @checks.setter + def checks(self, checks) -> None: ... + @property + def status(self): ... + @status.setter + def status(self, status) -> None: ... + @property + def version(self): ... + @version.setter + def version(self, version) -> None: ... + @property + def commit(self): ... + @commit.setter + def commit(self, commit) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/heatmap_view_properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/heatmap_view_properties.pyi new file mode 100644 index 00000000..4de9504e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/heatmap_view_properties.pyi @@ -0,0 +1,165 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.view_properties import ViewProperties + +class HeatmapViewProperties(ViewProperties): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + time_format: Incomplete | None = ..., + type: Incomplete | None = ..., + queries: Incomplete | None = ..., + colors: Incomplete | None = ..., + shape: Incomplete | None = ..., + note: Incomplete | None = ..., + show_note_when_empty: Incomplete | None = ..., + x_column: Incomplete | None = ..., + generate_x_axis_ticks: Incomplete | None = ..., + x_total_ticks: Incomplete | None = ..., + x_tick_start: Incomplete | None = ..., + x_tick_step: Incomplete | None = ..., + y_column: Incomplete | None = ..., + generate_y_axis_ticks: Incomplete | None = ..., + y_total_ticks: Incomplete | None = ..., + y_tick_start: Incomplete | None = ..., + y_tick_step: Incomplete | None = ..., + x_domain: Incomplete | None = ..., + y_domain: Incomplete | None = ..., + x_axis_label: Incomplete | None = ..., + y_axis_label: Incomplete | None = ..., + x_prefix: Incomplete | None = ..., + x_suffix: Incomplete | None = ..., + y_prefix: Incomplete | None = ..., + y_suffix: Incomplete | None = ..., + bin_size: Incomplete | None = ..., + legend_colorize_rows: Incomplete | None = ..., + legend_hide: Incomplete | None = ..., + legend_opacity: Incomplete | None = ..., + legend_orientation_threshold: Incomplete | None = ..., + ) -> None: ... + @property + def time_format(self): ... + @time_format.setter + def time_format(self, time_format) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def queries(self): ... + @queries.setter + def queries(self, queries) -> None: ... + @property + def colors(self): ... + @colors.setter + def colors(self, colors) -> None: ... + @property + def shape(self): ... + @shape.setter + def shape(self, shape) -> None: ... + @property + def note(self): ... + @note.setter + def note(self, note) -> None: ... + @property + def show_note_when_empty(self): ... + @show_note_when_empty.setter + def show_note_when_empty(self, show_note_when_empty) -> None: ... + @property + def x_column(self): ... + @x_column.setter + def x_column(self, x_column) -> None: ... + @property + def generate_x_axis_ticks(self): ... + @generate_x_axis_ticks.setter + def generate_x_axis_ticks(self, generate_x_axis_ticks) -> None: ... + @property + def x_total_ticks(self): ... + @x_total_ticks.setter + def x_total_ticks(self, x_total_ticks) -> None: ... + @property + def x_tick_start(self): ... + @x_tick_start.setter + def x_tick_start(self, x_tick_start) -> None: ... + @property + def x_tick_step(self): ... + @x_tick_step.setter + def x_tick_step(self, x_tick_step) -> None: ... + @property + def y_column(self): ... + @y_column.setter + def y_column(self, y_column) -> None: ... + @property + def generate_y_axis_ticks(self): ... + @generate_y_axis_ticks.setter + def generate_y_axis_ticks(self, generate_y_axis_ticks) -> None: ... + @property + def y_total_ticks(self): ... + @y_total_ticks.setter + def y_total_ticks(self, y_total_ticks) -> None: ... + @property + def y_tick_start(self): ... + @y_tick_start.setter + def y_tick_start(self, y_tick_start) -> None: ... + @property + def y_tick_step(self): ... + @y_tick_step.setter + def y_tick_step(self, y_tick_step) -> None: ... + @property + def x_domain(self): ... + @x_domain.setter + def x_domain(self, x_domain) -> None: ... + @property + def y_domain(self): ... + @y_domain.setter + def y_domain(self, y_domain) -> None: ... + @property + def x_axis_label(self): ... + @x_axis_label.setter + def x_axis_label(self, x_axis_label) -> None: ... + @property + def y_axis_label(self): ... + @y_axis_label.setter + def y_axis_label(self, y_axis_label) -> None: ... + @property + def x_prefix(self): ... + @x_prefix.setter + def x_prefix(self, x_prefix) -> None: ... + @property + def x_suffix(self): ... + @x_suffix.setter + def x_suffix(self, x_suffix) -> None: ... + @property + def y_prefix(self): ... + @y_prefix.setter + def y_prefix(self, y_prefix) -> None: ... + @property + def y_suffix(self): ... + @y_suffix.setter + def y_suffix(self, y_suffix) -> None: ... + @property + def bin_size(self): ... + @bin_size.setter + def bin_size(self, bin_size) -> None: ... + @property + def legend_colorize_rows(self): ... + @legend_colorize_rows.setter + def legend_colorize_rows(self, legend_colorize_rows) -> None: ... + @property + def legend_hide(self): ... + @legend_hide.setter + def legend_hide(self, legend_hide) -> None: ... + @property + def legend_opacity(self): ... + @legend_opacity.setter + def legend_opacity(self, legend_opacity) -> None: ... + @property + def legend_orientation_threshold(self): ... + @legend_orientation_threshold.setter + def legend_orientation_threshold(self, legend_orientation_threshold) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/histogram_view_properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/histogram_view_properties.pyi new file mode 100644 index 00000000..54b641b4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/histogram_view_properties.pyi @@ -0,0 +1,95 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.view_properties import ViewProperties + +class HistogramViewProperties(ViewProperties): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: Incomplete | None = ..., + queries: Incomplete | None = ..., + colors: Incomplete | None = ..., + shape: Incomplete | None = ..., + note: Incomplete | None = ..., + show_note_when_empty: Incomplete | None = ..., + x_column: Incomplete | None = ..., + fill_columns: Incomplete | None = ..., + x_domain: Incomplete | None = ..., + x_axis_label: Incomplete | None = ..., + position: Incomplete | None = ..., + bin_count: Incomplete | None = ..., + legend_colorize_rows: Incomplete | None = ..., + legend_hide: Incomplete | None = ..., + legend_opacity: Incomplete | None = ..., + legend_orientation_threshold: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def queries(self): ... + @queries.setter + def queries(self, queries) -> None: ... + @property + def colors(self): ... + @colors.setter + def colors(self, colors) -> None: ... + @property + def shape(self): ... + @shape.setter + def shape(self, shape) -> None: ... + @property + def note(self): ... + @note.setter + def note(self, note) -> None: ... + @property + def show_note_when_empty(self): ... + @show_note_when_empty.setter + def show_note_when_empty(self, show_note_when_empty) -> None: ... + @property + def x_column(self): ... + @x_column.setter + def x_column(self, x_column) -> None: ... + @property + def fill_columns(self): ... + @fill_columns.setter + def fill_columns(self, fill_columns) -> None: ... + @property + def x_domain(self): ... + @x_domain.setter + def x_domain(self, x_domain) -> None: ... + @property + def x_axis_label(self): ... + @x_axis_label.setter + def x_axis_label(self, x_axis_label) -> None: ... + @property + def position(self): ... + @position.setter + def position(self, position) -> None: ... + @property + def bin_count(self): ... + @bin_count.setter + def bin_count(self, bin_count) -> None: ... + @property + def legend_colorize_rows(self): ... + @legend_colorize_rows.setter + def legend_colorize_rows(self, legend_colorize_rows) -> None: ... + @property + def legend_hide(self): ... + @legend_hide.setter + def legend_hide(self, legend_hide) -> None: ... + @property + def legend_opacity(self): ... + @legend_opacity.setter + def legend_opacity(self, legend_opacity) -> None: ... + @property + def legend_orientation_threshold(self): ... + @legend_orientation_threshold.setter + def legend_orientation_threshold(self, legend_orientation_threshold) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/http_notification_endpoint.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/http_notification_endpoint.pyi new file mode 100644 index 00000000..e0bdf153 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/http_notification_endpoint.pyi @@ -0,0 +1,66 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.notification_endpoint_discriminator import NotificationEndpointDiscriminator + +class HTTPNotificationEndpoint(NotificationEndpointDiscriminator): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + url: Incomplete | None = ..., + username: Incomplete | None = ..., + password: Incomplete | None = ..., + token: Incomplete | None = ..., + method: Incomplete | None = ..., + auth_method: Incomplete | None = ..., + content_template: Incomplete | None = ..., + headers: Incomplete | None = ..., + id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + user_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + description: Incomplete | None = ..., + name: Incomplete | None = ..., + status: str = ..., + labels: Incomplete | None = ..., + links: Incomplete | None = ..., + type: str = ..., + ) -> None: ... + @property + def url(self): ... + @url.setter + def url(self, url) -> None: ... + @property + def username(self): ... + @username.setter + def username(self, username) -> None: ... + @property + def password(self): ... + @password.setter + def password(self, password) -> None: ... + @property + def token(self): ... + @token.setter + def token(self, token) -> None: ... + @property + def method(self): ... + @method.setter + def method(self, method) -> None: ... + @property + def auth_method(self): ... + @auth_method.setter + def auth_method(self, auth_method) -> None: ... + @property + def content_template(self): ... + @content_template.setter + def content_template(self, content_template) -> None: ... + @property + def headers(self): ... + @headers.setter + def headers(self, headers) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/http_notification_rule.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/http_notification_rule.pyi new file mode 100644 index 00000000..48b75ff8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/http_notification_rule.pyi @@ -0,0 +1,40 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.http_notification_rule_base import HTTPNotificationRuleBase + +class HTTPNotificationRule(HTTPNotificationRuleBase): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: str = ..., + url: Incomplete | None = ..., + latest_completed: Incomplete | None = ..., + last_run_status: Incomplete | None = ..., + last_run_error: Incomplete | None = ..., + id: Incomplete | None = ..., + endpoint_id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + task_id: Incomplete | None = ..., + owner_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + status: Incomplete | None = ..., + name: Incomplete | None = ..., + sleep_until: Incomplete | None = ..., + every: Incomplete | None = ..., + offset: Incomplete | None = ..., + runbook_link: Incomplete | None = ..., + limit_every: Incomplete | None = ..., + limit: Incomplete | None = ..., + tag_rules: Incomplete | None = ..., + description: Incomplete | None = ..., + status_rules: Incomplete | None = ..., + labels: Incomplete | None = ..., + links: Incomplete | None = ..., + ) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/http_notification_rule_base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/http_notification_rule_base.pyi new file mode 100644 index 00000000..94b8b48f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/http_notification_rule_base.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.notification_rule_discriminator import NotificationRuleDiscriminator + +class HTTPNotificationRuleBase(NotificationRuleDiscriminator): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: Incomplete | None = ..., + url: Incomplete | None = ..., + latest_completed: Incomplete | None = ..., + last_run_status: Incomplete | None = ..., + last_run_error: Incomplete | None = ..., + id: Incomplete | None = ..., + endpoint_id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + task_id: Incomplete | None = ..., + owner_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + status: Incomplete | None = ..., + name: Incomplete | None = ..., + sleep_until: Incomplete | None = ..., + every: Incomplete | None = ..., + offset: Incomplete | None = ..., + runbook_link: Incomplete | None = ..., + limit_every: Incomplete | None = ..., + limit: Incomplete | None = ..., + tag_rules: Incomplete | None = ..., + description: Incomplete | None = ..., + status_rules: Incomplete | None = ..., + labels: Incomplete | None = ..., + links: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def url(self): ... + @url.setter + def url(self, url) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/identifier.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/identifier.pyi new file mode 100644 index 00000000..9243f8b0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/identifier.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.property_key import PropertyKey + +class Identifier(PropertyKey): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., name: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/import_declaration.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/import_declaration.pyi new file mode 100644 index 00000000..3312ef86 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/import_declaration.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class ImportDeclaration: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., _as: Incomplete | None = ..., path: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def path(self): ... + @path.setter + def path(self, path) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/index_expression.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/index_expression.pyi new file mode 100644 index 00000000..7fe30f49 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/index_expression.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.expression import Expression + +class IndexExpression(Expression): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., array: Incomplete | None = ..., index: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def array(self): ... + @array.setter + def array(self, array) -> None: ... + @property + def index(self): ... + @index.setter + def index(self, index) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/integer_literal.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/integer_literal.pyi new file mode 100644 index 00000000..f679b652 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/integer_literal.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.expression import Expression + +class IntegerLiteral(Expression): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., value: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def value(self): ... + @value.setter + def value(self, value) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/is_onboarding.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/is_onboarding.pyi new file mode 100644 index 00000000..f0c9af7d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/is_onboarding.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class IsOnboarding: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, allowed: Incomplete | None = ...) -> None: ... + @property + def allowed(self): ... + @allowed.setter + def allowed(self, allowed) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/label.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/label.pyi new file mode 100644 index 00000000..913a3cf8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/label.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete + +class Label: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + name: Incomplete | None = ..., + properties: Incomplete | None = ..., + ) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def properties(self): ... + @properties.setter + def properties(self, properties) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/label_create_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/label_create_request.pyi new file mode 100644 index 00000000..dd1cb27b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/label_create_request.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class LabelCreateRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, org_id: Incomplete | None = ..., name: Incomplete | None = ..., properties: Incomplete | None = ... + ) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def properties(self): ... + @properties.setter + def properties(self, properties) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/label_mapping.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/label_mapping.pyi new file mode 100644 index 00000000..951992ba --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/label_mapping.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class LabelMapping: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, label_id: Incomplete | None = ...) -> None: ... + @property + def label_id(self): ... + @label_id.setter + def label_id(self, label_id) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/label_response.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/label_response.pyi new file mode 100644 index 00000000..0d73edee --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/label_response.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class LabelResponse: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, label: Incomplete | None = ..., links: Incomplete | None = ...) -> None: ... + @property + def label(self): ... + @label.setter + def label(self, label) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/label_update.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/label_update.pyi new file mode 100644 index 00000000..a7b982fd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/label_update.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class LabelUpdate: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, name: Incomplete | None = ..., properties: Incomplete | None = ...) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def properties(self): ... + @properties.setter + def properties(self, properties) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/labels_response.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/labels_response.pyi new file mode 100644 index 00000000..1597ef0b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/labels_response.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class LabelsResponse: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, labels: Incomplete | None = ..., links: Incomplete | None = ...) -> None: ... + @property + def labels(self): ... + @labels.setter + def labels(self, labels) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/language_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/language_request.pyi new file mode 100644 index 00000000..6301bec7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/language_request.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class LanguageRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, query: Incomplete | None = ...) -> None: ... + @property + def query(self): ... + @query.setter + def query(self, query) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/legacy_authorization_post_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/legacy_authorization_post_request.pyi new file mode 100644 index 00000000..3a0817d5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/legacy_authorization_post_request.pyi @@ -0,0 +1,37 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.authorization_update_request import AuthorizationUpdateRequest + +class LegacyAuthorizationPostRequest(AuthorizationUpdateRequest): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + org_id: Incomplete | None = ..., + user_id: Incomplete | None = ..., + token: Incomplete | None = ..., + permissions: Incomplete | None = ..., + status: str = ..., + description: Incomplete | None = ..., + ) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def user_id(self): ... + @user_id.setter + def user_id(self, user_id) -> None: ... + @property + def token(self): ... + @token.setter + def token(self, token) -> None: ... + @property + def permissions(self): ... + @permissions.setter + def permissions(self, permissions) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/lesser_threshold.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/lesser_threshold.pyi new file mode 100644 index 00000000..626af5b4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/lesser_threshold.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.threshold_base import ThresholdBase + +class LesserThreshold(ThresholdBase): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, type: str = ..., value: Incomplete | None = ..., level: Incomplete | None = ..., all_values: Incomplete | None = ... + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def value(self): ... + @value.setter + def value(self, value) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/line_plus_single_stat_properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/line_plus_single_stat_properties.pyi new file mode 100644 index 00000000..aded4747 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/line_plus_single_stat_properties.pyi @@ -0,0 +1,160 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.view_properties import ViewProperties + +class LinePlusSingleStatProperties(ViewProperties): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + time_format: Incomplete | None = ..., + type: Incomplete | None = ..., + queries: Incomplete | None = ..., + colors: Incomplete | None = ..., + shape: Incomplete | None = ..., + note: Incomplete | None = ..., + show_note_when_empty: Incomplete | None = ..., + axes: Incomplete | None = ..., + static_legend: Incomplete | None = ..., + x_column: Incomplete | None = ..., + generate_x_axis_ticks: Incomplete | None = ..., + x_total_ticks: Incomplete | None = ..., + x_tick_start: Incomplete | None = ..., + x_tick_step: Incomplete | None = ..., + y_column: Incomplete | None = ..., + generate_y_axis_ticks: Incomplete | None = ..., + y_total_ticks: Incomplete | None = ..., + y_tick_start: Incomplete | None = ..., + y_tick_step: Incomplete | None = ..., + shade_below: Incomplete | None = ..., + hover_dimension: Incomplete | None = ..., + position: Incomplete | None = ..., + prefix: Incomplete | None = ..., + suffix: Incomplete | None = ..., + decimal_places: Incomplete | None = ..., + legend_colorize_rows: Incomplete | None = ..., + legend_hide: Incomplete | None = ..., + legend_opacity: Incomplete | None = ..., + legend_orientation_threshold: Incomplete | None = ..., + ) -> None: ... + @property + def time_format(self): ... + @time_format.setter + def time_format(self, time_format) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def queries(self): ... + @queries.setter + def queries(self, queries) -> None: ... + @property + def colors(self): ... + @colors.setter + def colors(self, colors) -> None: ... + @property + def shape(self): ... + @shape.setter + def shape(self, shape) -> None: ... + @property + def note(self): ... + @note.setter + def note(self, note) -> None: ... + @property + def show_note_when_empty(self): ... + @show_note_when_empty.setter + def show_note_when_empty(self, show_note_when_empty) -> None: ... + @property + def axes(self): ... + @axes.setter + def axes(self, axes) -> None: ... + @property + def static_legend(self): ... + @static_legend.setter + def static_legend(self, static_legend) -> None: ... + @property + def x_column(self): ... + @x_column.setter + def x_column(self, x_column) -> None: ... + @property + def generate_x_axis_ticks(self): ... + @generate_x_axis_ticks.setter + def generate_x_axis_ticks(self, generate_x_axis_ticks) -> None: ... + @property + def x_total_ticks(self): ... + @x_total_ticks.setter + def x_total_ticks(self, x_total_ticks) -> None: ... + @property + def x_tick_start(self): ... + @x_tick_start.setter + def x_tick_start(self, x_tick_start) -> None: ... + @property + def x_tick_step(self): ... + @x_tick_step.setter + def x_tick_step(self, x_tick_step) -> None: ... + @property + def y_column(self): ... + @y_column.setter + def y_column(self, y_column) -> None: ... + @property + def generate_y_axis_ticks(self): ... + @generate_y_axis_ticks.setter + def generate_y_axis_ticks(self, generate_y_axis_ticks) -> None: ... + @property + def y_total_ticks(self): ... + @y_total_ticks.setter + def y_total_ticks(self, y_total_ticks) -> None: ... + @property + def y_tick_start(self): ... + @y_tick_start.setter + def y_tick_start(self, y_tick_start) -> None: ... + @property + def y_tick_step(self): ... + @y_tick_step.setter + def y_tick_step(self, y_tick_step) -> None: ... + @property + def shade_below(self): ... + @shade_below.setter + def shade_below(self, shade_below) -> None: ... + @property + def hover_dimension(self): ... + @hover_dimension.setter + def hover_dimension(self, hover_dimension) -> None: ... + @property + def position(self): ... + @position.setter + def position(self, position) -> None: ... + @property + def prefix(self): ... + @prefix.setter + def prefix(self, prefix) -> None: ... + @property + def suffix(self): ... + @suffix.setter + def suffix(self, suffix) -> None: ... + @property + def decimal_places(self): ... + @decimal_places.setter + def decimal_places(self, decimal_places) -> None: ... + @property + def legend_colorize_rows(self): ... + @legend_colorize_rows.setter + def legend_colorize_rows(self, legend_colorize_rows) -> None: ... + @property + def legend_hide(self): ... + @legend_hide.setter + def legend_hide(self, legend_hide) -> None: ... + @property + def legend_opacity(self): ... + @legend_opacity.setter + def legend_opacity(self, legend_opacity) -> None: ... + @property + def legend_orientation_threshold(self): ... + @legend_orientation_threshold.setter + def legend_orientation_threshold(self, legend_orientation_threshold) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/line_protocol_error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/line_protocol_error.pyi new file mode 100644 index 00000000..f610aed6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/line_protocol_error.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete + +class LineProtocolError: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + code: Incomplete | None = ..., + message: Incomplete | None = ..., + op: Incomplete | None = ..., + err: Incomplete | None = ..., + line: Incomplete | None = ..., + ) -> None: ... + @property + def code(self): ... + @code.setter + def code(self, code) -> None: ... + @property + def message(self): ... + @message.setter + def message(self, message) -> None: ... + @property + def op(self): ... + @op.setter + def op(self, op) -> None: ... + @property + def err(self): ... + @err.setter + def err(self, err) -> None: ... + @property + def line(self): ... + @line.setter + def line(self, line) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/line_protocol_length_error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/line_protocol_length_error.pyi new file mode 100644 index 00000000..5a278a79 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/line_protocol_length_error.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class LineProtocolLengthError: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, code: Incomplete | None = ..., message: Incomplete | None = ...) -> None: ... + @property + def code(self): ... + @code.setter + def code(self, code) -> None: ... + @property + def message(self): ... + @message.setter + def message(self, message) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/links.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/links.pyi new file mode 100644 index 00000000..a24c7454 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/links.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class Links: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, next: Incomplete | None = ..., _self: Incomplete | None = ..., prev: Incomplete | None = ...) -> None: ... + @property + def next(self): ... + @next.setter + def next(self, next) -> None: ... + @property + def prev(self): ... + @prev.setter + def prev(self, prev) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/list_stacks_response.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/list_stacks_response.pyi new file mode 100644 index 00000000..6b550a0a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/list_stacks_response.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class ListStacksResponse: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, stacks: Incomplete | None = ...) -> None: ... + @property + def stacks(self): ... + @stacks.setter + def stacks(self, stacks) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/log_event.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/log_event.pyi new file mode 100644 index 00000000..85d8fade --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/log_event.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class LogEvent: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, time: Incomplete | None = ..., message: Incomplete | None = ..., run_id: Incomplete | None = ... + ) -> None: ... + @property + def time(self): ... + @time.setter + def time(self, time) -> None: ... + @property + def message(self): ... + @message.setter + def message(self, message) -> None: ... + @property + def run_id(self): ... + @run_id.setter + def run_id(self, run_id) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/logical_expression.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/logical_expression.pyi new file mode 100644 index 00000000..154462de --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/logical_expression.pyi @@ -0,0 +1,35 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.expression import Expression + +class LogicalExpression(Expression): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: Incomplete | None = ..., + operator: Incomplete | None = ..., + left: Incomplete | None = ..., + right: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def operator(self): ... + @operator.setter + def operator(self, operator) -> None: ... + @property + def left(self): ... + @left.setter + def left(self, left) -> None: ... + @property + def right(self): ... + @right.setter + def right(self, right) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/logs.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/logs.pyi new file mode 100644 index 00000000..a48b6f91 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/logs.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class Logs: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, events: Incomplete | None = ...) -> None: ... + @property + def events(self): ... + @events.setter + def events(self, events) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/map_variable_properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/map_variable_properties.pyi new file mode 100644 index 00000000..d624ce94 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/map_variable_properties.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.variable_properties import VariableProperties + +class MapVariableProperties(VariableProperties): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., values: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def values(self): ... + @values.setter + def values(self, values) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/markdown_view_properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/markdown_view_properties.pyi new file mode 100644 index 00000000..77fd8035 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/markdown_view_properties.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.view_properties import ViewProperties + +class MarkdownViewProperties(ViewProperties): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., shape: Incomplete | None = ..., note: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def shape(self): ... + @shape.setter + def shape(self, shape) -> None: ... + @property + def note(self): ... + @note.setter + def note(self, note) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/measurement_schema.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/measurement_schema.pyi new file mode 100644 index 00000000..c6f8d6e0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/measurement_schema.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete + +class MeasurementSchema: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + bucket_id: Incomplete | None = ..., + name: Incomplete | None = ..., + columns: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + ) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def bucket_id(self): ... + @bucket_id.setter + def bucket_id(self, bucket_id) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def columns(self): ... + @columns.setter + def columns(self, columns) -> None: ... + @property + def created_at(self): ... + @created_at.setter + def created_at(self, created_at) -> None: ... + @property + def updated_at(self): ... + @updated_at.setter + def updated_at(self, updated_at) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/measurement_schema_column.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/measurement_schema_column.pyi new file mode 100644 index 00000000..b24812dd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/measurement_schema_column.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class MeasurementSchemaColumn: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, name: Incomplete | None = ..., type: Incomplete | None = ..., data_type: Incomplete | None = ... + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def data_type(self): ... + @data_type.setter + def data_type(self, data_type) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/measurement_schema_create_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/measurement_schema_create_request.pyi new file mode 100644 index 00000000..ad1dfdcd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/measurement_schema_create_request.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class MeasurementSchemaCreateRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, name: Incomplete | None = ..., columns: Incomplete | None = ...) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def columns(self): ... + @columns.setter + def columns(self, columns) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/measurement_schema_list.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/measurement_schema_list.pyi new file mode 100644 index 00000000..f0fafb2a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/measurement_schema_list.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class MeasurementSchemaList: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, measurement_schemas: Incomplete | None = ...) -> None: ... + @property + def measurement_schemas(self): ... + @measurement_schemas.setter + def measurement_schemas(self, measurement_schemas) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/measurement_schema_update_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/measurement_schema_update_request.pyi new file mode 100644 index 00000000..5bdf0260 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/measurement_schema_update_request.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class MeasurementSchemaUpdateRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, columns: Incomplete | None = ...) -> None: ... + @property + def columns(self): ... + @columns.setter + def columns(self, columns) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/member_assignment.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/member_assignment.pyi new file mode 100644 index 00000000..68e2590f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/member_assignment.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.statement import Statement + +class MemberAssignment(Statement): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., member: Incomplete | None = ..., init: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def member(self): ... + @member.setter + def member(self, member) -> None: ... + @property + def init(self): ... + @init.setter + def init(self, init) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/member_expression.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/member_expression.pyi new file mode 100644 index 00000000..419a6dcc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/member_expression.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.expression import Expression + +class MemberExpression(Expression): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, type: Incomplete | None = ..., object: Incomplete | None = ..., _property: Incomplete | None = ... + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def object(self): ... + @object.setter + def object(self, object) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/metadata_backup.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/metadata_backup.pyi new file mode 100644 index 00000000..3cf8ef9a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/metadata_backup.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete + +class MetadataBackup: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, kv: Incomplete | None = ..., sql: Incomplete | None = ..., buckets: Incomplete | None = ...) -> None: ... + @property + def kv(self): ... + @kv.setter + def kv(self, kv) -> None: ... + @property + def sql(self): ... + @sql.setter + def sql(self, sql) -> None: ... + @property + def buckets(self): ... + @buckets.setter + def buckets(self, buckets) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/model_property.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/model_property.pyi new file mode 100644 index 00000000..759dc4f3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/model_property.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete + +class ModelProperty: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., key: Incomplete | None = ..., value: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def key(self): ... + @key.setter + def key(self, key) -> None: ... + @property + def value(self): ... + @value.setter + def value(self, value) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/mosaic_view_properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/mosaic_view_properties.pyi new file mode 100644 index 00000000..fdc39b0a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/mosaic_view_properties.pyi @@ -0,0 +1,160 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.view_properties import ViewProperties + +class MosaicViewProperties(ViewProperties): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + time_format: Incomplete | None = ..., + type: Incomplete | None = ..., + queries: Incomplete | None = ..., + colors: Incomplete | None = ..., + shape: Incomplete | None = ..., + note: Incomplete | None = ..., + show_note_when_empty: Incomplete | None = ..., + x_column: Incomplete | None = ..., + generate_x_axis_ticks: Incomplete | None = ..., + x_total_ticks: Incomplete | None = ..., + x_tick_start: Incomplete | None = ..., + x_tick_step: Incomplete | None = ..., + y_label_column_separator: Incomplete | None = ..., + y_label_columns: Incomplete | None = ..., + y_series_columns: Incomplete | None = ..., + fill_columns: Incomplete | None = ..., + x_domain: Incomplete | None = ..., + y_domain: Incomplete | None = ..., + x_axis_label: Incomplete | None = ..., + y_axis_label: Incomplete | None = ..., + x_prefix: Incomplete | None = ..., + x_suffix: Incomplete | None = ..., + y_prefix: Incomplete | None = ..., + y_suffix: Incomplete | None = ..., + hover_dimension: Incomplete | None = ..., + legend_colorize_rows: Incomplete | None = ..., + legend_hide: Incomplete | None = ..., + legend_opacity: Incomplete | None = ..., + legend_orientation_threshold: Incomplete | None = ..., + ) -> None: ... + @property + def time_format(self): ... + @time_format.setter + def time_format(self, time_format) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def queries(self): ... + @queries.setter + def queries(self, queries) -> None: ... + @property + def colors(self): ... + @colors.setter + def colors(self, colors) -> None: ... + @property + def shape(self): ... + @shape.setter + def shape(self, shape) -> None: ... + @property + def note(self): ... + @note.setter + def note(self, note) -> None: ... + @property + def show_note_when_empty(self): ... + @show_note_when_empty.setter + def show_note_when_empty(self, show_note_when_empty) -> None: ... + @property + def x_column(self): ... + @x_column.setter + def x_column(self, x_column) -> None: ... + @property + def generate_x_axis_ticks(self): ... + @generate_x_axis_ticks.setter + def generate_x_axis_ticks(self, generate_x_axis_ticks) -> None: ... + @property + def x_total_ticks(self): ... + @x_total_ticks.setter + def x_total_ticks(self, x_total_ticks) -> None: ... + @property + def x_tick_start(self): ... + @x_tick_start.setter + def x_tick_start(self, x_tick_start) -> None: ... + @property + def x_tick_step(self): ... + @x_tick_step.setter + def x_tick_step(self, x_tick_step) -> None: ... + @property + def y_label_column_separator(self): ... + @y_label_column_separator.setter + def y_label_column_separator(self, y_label_column_separator) -> None: ... + @property + def y_label_columns(self): ... + @y_label_columns.setter + def y_label_columns(self, y_label_columns) -> None: ... + @property + def y_series_columns(self): ... + @y_series_columns.setter + def y_series_columns(self, y_series_columns) -> None: ... + @property + def fill_columns(self): ... + @fill_columns.setter + def fill_columns(self, fill_columns) -> None: ... + @property + def x_domain(self): ... + @x_domain.setter + def x_domain(self, x_domain) -> None: ... + @property + def y_domain(self): ... + @y_domain.setter + def y_domain(self, y_domain) -> None: ... + @property + def x_axis_label(self): ... + @x_axis_label.setter + def x_axis_label(self, x_axis_label) -> None: ... + @property + def y_axis_label(self): ... + @y_axis_label.setter + def y_axis_label(self, y_axis_label) -> None: ... + @property + def x_prefix(self): ... + @x_prefix.setter + def x_prefix(self, x_prefix) -> None: ... + @property + def x_suffix(self): ... + @x_suffix.setter + def x_suffix(self, x_suffix) -> None: ... + @property + def y_prefix(self): ... + @y_prefix.setter + def y_prefix(self, y_prefix) -> None: ... + @property + def y_suffix(self): ... + @y_suffix.setter + def y_suffix(self, y_suffix) -> None: ... + @property + def hover_dimension(self): ... + @hover_dimension.setter + def hover_dimension(self, hover_dimension) -> None: ... + @property + def legend_colorize_rows(self): ... + @legend_colorize_rows.setter + def legend_colorize_rows(self, legend_colorize_rows) -> None: ... + @property + def legend_hide(self): ... + @legend_hide.setter + def legend_hide(self, legend_hide) -> None: ... + @property + def legend_opacity(self): ... + @legend_opacity.setter + def legend_opacity(self, legend_opacity) -> None: ... + @property + def legend_orientation_threshold(self): ... + @legend_orientation_threshold.setter + def legend_orientation_threshold(self, legend_orientation_threshold) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/node.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/node.pyi new file mode 100644 index 00000000..6535e25d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/node.pyi @@ -0,0 +1,10 @@ +from _typeshed import Incomplete + +class Node: + openapi_types: Incomplete + attribute_map: Incomplete + def __init__(self) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_endpoint.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_endpoint.pyi new file mode 100644 index 00000000..de8ffbdc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_endpoint.pyi @@ -0,0 +1,17 @@ +from _typeshed import Incomplete + +class NotificationEndpoint: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator_value_class_map: Incomplete + discriminator: str + def __init__(self, type: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + def get_real_child_model(self, data): ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_base.pyi new file mode 100644 index 00000000..bb7a37d4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_base.pyi @@ -0,0 +1,68 @@ +from _typeshed import Incomplete + +class NotificationEndpointBase: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + user_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + description: Incomplete | None = ..., + name: Incomplete | None = ..., + status: str = ..., + labels: Incomplete | None = ..., + links: Incomplete | None = ..., + type: Incomplete | None = ..., + ) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def user_id(self): ... + @user_id.setter + def user_id(self, user_id) -> None: ... + @property + def created_at(self): ... + @created_at.setter + def created_at(self, created_at) -> None: ... + @property + def updated_at(self): ... + @updated_at.setter + def updated_at(self, updated_at) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def status(self): ... + @status.setter + def status(self, status) -> None: ... + @property + def labels(self): ... + @labels.setter + def labels(self, labels) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_base_links.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_base_links.pyi new file mode 100644 index 00000000..e945e848 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_base_links.pyi @@ -0,0 +1,29 @@ +from _typeshed import Incomplete + +class NotificationEndpointBaseLinks: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + _self: Incomplete | None = ..., + labels: Incomplete | None = ..., + members: Incomplete | None = ..., + owners: Incomplete | None = ..., + ) -> None: ... + @property + def labels(self): ... + @labels.setter + def labels(self, labels) -> None: ... + @property + def members(self): ... + @members.setter + def members(self, members) -> None: ... + @property + def owners(self): ... + @owners.setter + def owners(self, owners) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_discriminator.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_discriminator.pyi new file mode 100644 index 00000000..2315c3d5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_discriminator.pyi @@ -0,0 +1,26 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.notification_endpoint_base import NotificationEndpointBase + +class NotificationEndpointDiscriminator(NotificationEndpointBase): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + user_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + description: Incomplete | None = ..., + name: Incomplete | None = ..., + status: str = ..., + labels: Incomplete | None = ..., + links: Incomplete | None = ..., + type: Incomplete | None = ..., + ) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_type.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_type.pyi new file mode 100644 index 00000000..130fdf44 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_type.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete + +class NotificationEndpointType: + SLACK: str + PAGERDUTY: str + HTTP: str + TELEGRAM: str + openapi_types: Incomplete + attribute_map: Incomplete + def __init__(self) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_update.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_update.pyi new file mode 100644 index 00000000..80f30dff --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_endpoint_update.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class NotificationEndpointUpdate: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, name: Incomplete | None = ..., description: Incomplete | None = ..., status: Incomplete | None = ... + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def status(self): ... + @status.setter + def status(self, status) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_endpoints.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_endpoints.pyi new file mode 100644 index 00000000..5f3fab0e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_endpoints.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class NotificationEndpoints: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, notification_endpoints: Incomplete | None = ..., links: Incomplete | None = ...) -> None: ... + @property + def notification_endpoints(self): ... + @notification_endpoints.setter + def notification_endpoints(self, notification_endpoints) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_rule.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_rule.pyi new file mode 100644 index 00000000..8eeca7f6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_rule.pyi @@ -0,0 +1,17 @@ +from _typeshed import Incomplete + +class NotificationRule: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator_value_class_map: Incomplete + discriminator: str + def __init__(self, type: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + def get_real_child_model(self, data): ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_rule_base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_rule_base.pyi new file mode 100644 index 00000000..636a256f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_rule_base.pyi @@ -0,0 +1,128 @@ +from _typeshed import Incomplete + +class NotificationRuleBase: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + latest_completed: Incomplete | None = ..., + last_run_status: Incomplete | None = ..., + last_run_error: Incomplete | None = ..., + id: Incomplete | None = ..., + endpoint_id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + task_id: Incomplete | None = ..., + owner_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + status: Incomplete | None = ..., + name: Incomplete | None = ..., + sleep_until: Incomplete | None = ..., + every: Incomplete | None = ..., + offset: Incomplete | None = ..., + runbook_link: Incomplete | None = ..., + limit_every: Incomplete | None = ..., + limit: Incomplete | None = ..., + tag_rules: Incomplete | None = ..., + description: Incomplete | None = ..., + status_rules: Incomplete | None = ..., + labels: Incomplete | None = ..., + links: Incomplete | None = ..., + ) -> None: ... + @property + def latest_completed(self): ... + @latest_completed.setter + def latest_completed(self, latest_completed) -> None: ... + @property + def last_run_status(self): ... + @last_run_status.setter + def last_run_status(self, last_run_status) -> None: ... + @property + def last_run_error(self): ... + @last_run_error.setter + def last_run_error(self, last_run_error) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def endpoint_id(self): ... + @endpoint_id.setter + def endpoint_id(self, endpoint_id) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def task_id(self): ... + @task_id.setter + def task_id(self, task_id) -> None: ... + @property + def owner_id(self): ... + @owner_id.setter + def owner_id(self, owner_id) -> None: ... + @property + def created_at(self): ... + @created_at.setter + def created_at(self, created_at) -> None: ... + @property + def updated_at(self): ... + @updated_at.setter + def updated_at(self, updated_at) -> None: ... + @property + def status(self): ... + @status.setter + def status(self, status) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def sleep_until(self): ... + @sleep_until.setter + def sleep_until(self, sleep_until) -> None: ... + @property + def every(self): ... + @every.setter + def every(self, every) -> None: ... + @property + def offset(self): ... + @offset.setter + def offset(self, offset) -> None: ... + @property + def runbook_link(self): ... + @runbook_link.setter + def runbook_link(self, runbook_link) -> None: ... + @property + def limit_every(self): ... + @limit_every.setter + def limit_every(self, limit_every) -> None: ... + @property + def limit(self): ... + @limit.setter + def limit(self, limit) -> None: ... + @property + def tag_rules(self): ... + @tag_rules.setter + def tag_rules(self, tag_rules) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def status_rules(self): ... + @status_rules.setter + def status_rules(self, status_rules) -> None: ... + @property + def labels(self): ... + @labels.setter + def labels(self, labels) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_rule_base_links.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_rule_base_links.pyi new file mode 100644 index 00000000..a745181a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_rule_base_links.pyi @@ -0,0 +1,34 @@ +from _typeshed import Incomplete + +class NotificationRuleBaseLinks: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + _self: Incomplete | None = ..., + labels: Incomplete | None = ..., + members: Incomplete | None = ..., + owners: Incomplete | None = ..., + query: Incomplete | None = ..., + ) -> None: ... + @property + def labels(self): ... + @labels.setter + def labels(self, labels) -> None: ... + @property + def members(self): ... + @members.setter + def members(self, members) -> None: ... + @property + def owners(self): ... + @owners.setter + def owners(self, owners) -> None: ... + @property + def query(self): ... + @query.setter + def query(self, query) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_rule_discriminator.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_rule_discriminator.pyi new file mode 100644 index 00000000..bbb750af --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_rule_discriminator.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.notification_rule_base import NotificationRuleBase + +class NotificationRuleDiscriminator(NotificationRuleBase): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + latest_completed: Incomplete | None = ..., + last_run_status: Incomplete | None = ..., + last_run_error: Incomplete | None = ..., + id: Incomplete | None = ..., + endpoint_id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + task_id: Incomplete | None = ..., + owner_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + status: Incomplete | None = ..., + name: Incomplete | None = ..., + sleep_until: Incomplete | None = ..., + every: Incomplete | None = ..., + offset: Incomplete | None = ..., + runbook_link: Incomplete | None = ..., + limit_every: Incomplete | None = ..., + limit: Incomplete | None = ..., + tag_rules: Incomplete | None = ..., + description: Incomplete | None = ..., + status_rules: Incomplete | None = ..., + labels: Incomplete | None = ..., + links: Incomplete | None = ..., + ) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_rule_update.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_rule_update.pyi new file mode 100644 index 00000000..84062324 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_rule_update.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class NotificationRuleUpdate: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, name: Incomplete | None = ..., description: Incomplete | None = ..., status: Incomplete | None = ... + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def status(self): ... + @status.setter + def status(self, status) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_rules.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_rules.pyi new file mode 100644 index 00000000..e1d717b6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/notification_rules.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class NotificationRules: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, notification_rules: Incomplete | None = ..., links: Incomplete | None = ...) -> None: ... + @property + def notification_rules(self): ... + @notification_rules.setter + def notification_rules(self, notification_rules) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/object_expression.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/object_expression.pyi new file mode 100644 index 00000000..e251b611 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/object_expression.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.expression import Expression + +class ObjectExpression(Expression): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., properties: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def properties(self): ... + @properties.setter + def properties(self, properties) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/onboarding_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/onboarding_request.pyi new file mode 100644 index 00000000..fa70e843 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/onboarding_request.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete + +class OnboardingRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + username: Incomplete | None = ..., + password: Incomplete | None = ..., + org: Incomplete | None = ..., + bucket: Incomplete | None = ..., + retention_period_seconds: Incomplete | None = ..., + retention_period_hrs: Incomplete | None = ..., + token: Incomplete | None = ..., + ) -> None: ... + @property + def username(self): ... + @username.setter + def username(self, username) -> None: ... + @property + def password(self): ... + @password.setter + def password(self, password) -> None: ... + @property + def org(self): ... + @org.setter + def org(self, org) -> None: ... + @property + def bucket(self): ... + @bucket.setter + def bucket(self, bucket) -> None: ... + @property + def retention_period_seconds(self): ... + @retention_period_seconds.setter + def retention_period_seconds(self, retention_period_seconds) -> None: ... + @property + def retention_period_hrs(self): ... + @retention_period_hrs.setter + def retention_period_hrs(self, retention_period_hrs) -> None: ... + @property + def token(self): ... + @token.setter + def token(self, token) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/onboarding_response.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/onboarding_response.pyi new file mode 100644 index 00000000..9b81563a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/onboarding_response.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete + +class OnboardingResponse: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + user: Incomplete | None = ..., + org: Incomplete | None = ..., + bucket: Incomplete | None = ..., + auth: Incomplete | None = ..., + ) -> None: ... + @property + def user(self): ... + @user.setter + def user(self, user) -> None: ... + @property + def org(self): ... + @org.setter + def org(self, org) -> None: ... + @property + def bucket(self): ... + @bucket.setter + def bucket(self, bucket) -> None: ... + @property + def auth(self): ... + @auth.setter + def auth(self, auth) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/option_statement.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/option_statement.pyi new file mode 100644 index 00000000..a36e898a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/option_statement.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.statement import Statement + +class OptionStatement(Statement): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., assignment: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def assignment(self): ... + @assignment.setter + def assignment(self, assignment) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/organization.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/organization.pyi new file mode 100644 index 00000000..01274bd2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/organization.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete + +class Organization: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + links: Incomplete | None = ..., + id: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + status: str = ..., + ) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def created_at(self): ... + @created_at.setter + def created_at(self, created_at) -> None: ... + @property + def updated_at(self): ... + @updated_at.setter + def updated_at(self, updated_at) -> None: ... + @property + def status(self): ... + @status.setter + def status(self, status) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/organization_links.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/organization_links.pyi new file mode 100644 index 00000000..7f958c5e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/organization_links.pyi @@ -0,0 +1,49 @@ +from _typeshed import Incomplete + +class OrganizationLinks: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + _self: Incomplete | None = ..., + members: Incomplete | None = ..., + owners: Incomplete | None = ..., + labels: Incomplete | None = ..., + secrets: Incomplete | None = ..., + buckets: Incomplete | None = ..., + tasks: Incomplete | None = ..., + dashboards: Incomplete | None = ..., + ) -> None: ... + @property + def members(self): ... + @members.setter + def members(self, members) -> None: ... + @property + def owners(self): ... + @owners.setter + def owners(self, owners) -> None: ... + @property + def labels(self): ... + @labels.setter + def labels(self, labels) -> None: ... + @property + def secrets(self): ... + @secrets.setter + def secrets(self, secrets) -> None: ... + @property + def buckets(self): ... + @buckets.setter + def buckets(self, buckets) -> None: ... + @property + def tasks(self): ... + @tasks.setter + def tasks(self, tasks) -> None: ... + @property + def dashboards(self): ... + @dashboards.setter + def dashboards(self, dashboards) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/organizations.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/organizations.pyi new file mode 100644 index 00000000..9cfa3535 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/organizations.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class Organizations: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, links: Incomplete | None = ..., orgs: Incomplete | None = ...) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + @property + def orgs(self): ... + @orgs.setter + def orgs(self, orgs) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/package.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/package.pyi new file mode 100644 index 00000000..2a9b332b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/package.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete + +class Package: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: Incomplete | None = ..., + path: Incomplete | None = ..., + package: Incomplete | None = ..., + files: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def path(self): ... + @path.setter + def path(self, path) -> None: ... + @property + def package(self): ... + @package.setter + def package(self, package) -> None: ... + @property + def files(self): ... + @files.setter + def files(self, files) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/package_clause.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/package_clause.pyi new file mode 100644 index 00000000..4561d412 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/package_clause.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class PackageClause: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., name: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/pager_duty_notification_endpoint.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/pager_duty_notification_endpoint.pyi new file mode 100644 index 00000000..c3b15a36 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/pager_duty_notification_endpoint.pyi @@ -0,0 +1,36 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.notification_endpoint_discriminator import NotificationEndpointDiscriminator + +class PagerDutyNotificationEndpoint(NotificationEndpointDiscriminator): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + client_url: Incomplete | None = ..., + routing_key: Incomplete | None = ..., + id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + user_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + description: Incomplete | None = ..., + name: Incomplete | None = ..., + status: str = ..., + labels: Incomplete | None = ..., + links: Incomplete | None = ..., + type: str = ..., + ) -> None: ... + @property + def client_url(self): ... + @client_url.setter + def client_url(self, client_url) -> None: ... + @property + def routing_key(self): ... + @routing_key.setter + def routing_key(self, routing_key) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/pager_duty_notification_rule.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/pager_duty_notification_rule.pyi new file mode 100644 index 00000000..226f29d6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/pager_duty_notification_rule.pyi @@ -0,0 +1,40 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.pager_duty_notification_rule_base import PagerDutyNotificationRuleBase + +class PagerDutyNotificationRule(PagerDutyNotificationRuleBase): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: str = ..., + message_template: Incomplete | None = ..., + latest_completed: Incomplete | None = ..., + last_run_status: Incomplete | None = ..., + last_run_error: Incomplete | None = ..., + id: Incomplete | None = ..., + endpoint_id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + task_id: Incomplete | None = ..., + owner_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + status: Incomplete | None = ..., + name: Incomplete | None = ..., + sleep_until: Incomplete | None = ..., + every: Incomplete | None = ..., + offset: Incomplete | None = ..., + runbook_link: Incomplete | None = ..., + limit_every: Incomplete | None = ..., + limit: Incomplete | None = ..., + tag_rules: Incomplete | None = ..., + description: Incomplete | None = ..., + status_rules: Incomplete | None = ..., + labels: Incomplete | None = ..., + links: Incomplete | None = ..., + ) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/pager_duty_notification_rule_base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/pager_duty_notification_rule_base.pyi new file mode 100644 index 00000000..5b4e9ac6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/pager_duty_notification_rule_base.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.notification_rule_discriminator import NotificationRuleDiscriminator + +class PagerDutyNotificationRuleBase(NotificationRuleDiscriminator): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: Incomplete | None = ..., + message_template: Incomplete | None = ..., + latest_completed: Incomplete | None = ..., + last_run_status: Incomplete | None = ..., + last_run_error: Incomplete | None = ..., + id: Incomplete | None = ..., + endpoint_id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + task_id: Incomplete | None = ..., + owner_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + status: Incomplete | None = ..., + name: Incomplete | None = ..., + sleep_until: Incomplete | None = ..., + every: Incomplete | None = ..., + offset: Incomplete | None = ..., + runbook_link: Incomplete | None = ..., + limit_every: Incomplete | None = ..., + limit: Incomplete | None = ..., + tag_rules: Incomplete | None = ..., + description: Incomplete | None = ..., + status_rules: Incomplete | None = ..., + labels: Incomplete | None = ..., + links: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def message_template(self): ... + @message_template.setter + def message_template(self, message_template) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/paren_expression.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/paren_expression.pyi new file mode 100644 index 00000000..bf43111c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/paren_expression.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.expression import Expression + +class ParenExpression(Expression): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., expression: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def expression(self): ... + @expression.setter + def expression(self, expression) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/password_reset_body.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/password_reset_body.pyi new file mode 100644 index 00000000..fe33edc2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/password_reset_body.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class PasswordResetBody: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, password: Incomplete | None = ...) -> None: ... + @property + def password(self): ... + @password.setter + def password(self, password) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/patch_bucket_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/patch_bucket_request.pyi new file mode 100644 index 00000000..743ce3d6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/patch_bucket_request.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class PatchBucketRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, name: Incomplete | None = ..., description: Incomplete | None = ..., retention_rules: Incomplete | None = ... + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def retention_rules(self): ... + @retention_rules.setter + def retention_rules(self, retention_rules) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/patch_dashboard_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/patch_dashboard_request.pyi new file mode 100644 index 00000000..4101ccf8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/patch_dashboard_request.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class PatchDashboardRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, name: Incomplete | None = ..., description: Incomplete | None = ..., cells: Incomplete | None = ... + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def cells(self): ... + @cells.setter + def cells(self, cells) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/patch_organization_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/patch_organization_request.pyi new file mode 100644 index 00000000..d8297503 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/patch_organization_request.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class PatchOrganizationRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, name: Incomplete | None = ..., description: Incomplete | None = ...) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/patch_retention_rule.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/patch_retention_rule.pyi new file mode 100644 index 00000000..4078c21f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/patch_retention_rule.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class PatchRetentionRule: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, type: str = ..., every_seconds: Incomplete | None = ..., shard_group_duration_seconds: Incomplete | None = ... + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def every_seconds(self): ... + @every_seconds.setter + def every_seconds(self, every_seconds) -> None: ... + @property + def shard_group_duration_seconds(self): ... + @shard_group_duration_seconds.setter + def shard_group_duration_seconds(self, shard_group_duration_seconds) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/patch_stack_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/patch_stack_request.pyi new file mode 100644 index 00000000..f272f1b3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/patch_stack_request.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete + +class PatchStackRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + description: Incomplete | None = ..., + template_ur_ls: Incomplete | None = ..., + additional_resources: Incomplete | None = ..., + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def template_ur_ls(self): ... + @template_ur_ls.setter + def template_ur_ls(self, template_ur_ls) -> None: ... + @property + def additional_resources(self): ... + @additional_resources.setter + def additional_resources(self, additional_resources) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/patch_stack_request_additional_resources.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/patch_stack_request_additional_resources.pyi new file mode 100644 index 00000000..9976e39e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/patch_stack_request_additional_resources.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class PatchStackRequestAdditionalResources: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, resource_id: Incomplete | None = ..., kind: Incomplete | None = ..., template_meta_name: Incomplete | None = ... + ) -> None: ... + @property + def resource_id(self): ... + @resource_id.setter + def resource_id(self, resource_id) -> None: ... + @property + def kind(self): ... + @kind.setter + def kind(self, kind) -> None: ... + @property + def template_meta_name(self): ... + @template_meta_name.setter + def template_meta_name(self, template_meta_name) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/permission.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/permission.pyi new file mode 100644 index 00000000..f755646e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/permission.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class Permission: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, action: Incomplete | None = ..., resource: Incomplete | None = ...) -> None: ... + @property + def action(self): ... + @action.setter + def action(self, action) -> None: ... + @property + def resource(self): ... + @resource.setter + def resource(self, resource) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/permission_resource.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/permission_resource.pyi new file mode 100644 index 00000000..6c86fd5a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/permission_resource.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete + +class PermissionResource: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: Incomplete | None = ..., + id: Incomplete | None = ..., + name: Incomplete | None = ..., + org_id: Incomplete | None = ..., + org: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def org(self): ... + @org.setter + def org(self, org) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/pipe_expression.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/pipe_expression.pyi new file mode 100644 index 00000000..70c9bd86 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/pipe_expression.pyi @@ -0,0 +1,27 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.expression import Expression + +class PipeExpression(Expression): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, type: Incomplete | None = ..., argument: Incomplete | None = ..., call: Incomplete | None = ... + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def argument(self): ... + @argument.setter + def argument(self, argument) -> None: ... + @property + def call(self): ... + @call.setter + def call(self, call) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/pipe_literal.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/pipe_literal.pyi new file mode 100644 index 00000000..10080609 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/pipe_literal.pyi @@ -0,0 +1,17 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.expression import Expression + +class PipeLiteral(Expression): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/post_bucket_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/post_bucket_request.pyi new file mode 100644 index 00000000..36e452ad --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/post_bucket_request.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +class PostBucketRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + org_id: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + rp: Incomplete | None = ..., + retention_rules: Incomplete | None = ..., + schema_type: Incomplete | None = ..., + ) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def rp(self): ... + @rp.setter + def rp(self, rp) -> None: ... + @property + def retention_rules(self): ... + @retention_rules.setter + def retention_rules(self, retention_rules) -> None: ... + @property + def schema_type(self): ... + @schema_type.setter + def schema_type(self, schema_type) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/post_check.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/post_check.pyi new file mode 100644 index 00000000..b2a71ff5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/post_check.pyi @@ -0,0 +1,17 @@ +from _typeshed import Incomplete + +class PostCheck: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator_value_class_map: Incomplete + discriminator: str + def __init__(self, type: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + def get_real_child_model(self, data): ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/post_notification_endpoint.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/post_notification_endpoint.pyi new file mode 100644 index 00000000..63916caa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/post_notification_endpoint.pyi @@ -0,0 +1,17 @@ +from _typeshed import Incomplete + +class PostNotificationEndpoint: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator_value_class_map: Incomplete + discriminator: str + def __init__(self, type: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + def get_real_child_model(self, data): ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/post_notification_rule.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/post_notification_rule.pyi new file mode 100644 index 00000000..708f39c9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/post_notification_rule.pyi @@ -0,0 +1,17 @@ +from _typeshed import Incomplete + +class PostNotificationRule: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator_value_class_map: Incomplete + discriminator: str + def __init__(self, type: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + def get_real_child_model(self, data): ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/post_organization_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/post_organization_request.pyi new file mode 100644 index 00000000..e5621f0a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/post_organization_request.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class PostOrganizationRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, name: Incomplete | None = ..., description: Incomplete | None = ...) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/post_restore_kv_response.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/post_restore_kv_response.pyi new file mode 100644 index 00000000..b429b19c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/post_restore_kv_response.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class PostRestoreKVResponse: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, token: Incomplete | None = ...) -> None: ... + @property + def token(self): ... + @token.setter + def token(self, token) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/post_stack_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/post_stack_request.pyi new file mode 100644 index 00000000..91696b89 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/post_stack_request.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete + +class PostStackRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + org_id: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + urls: Incomplete | None = ..., + ) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def urls(self): ... + @urls.setter + def urls(self, urls) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/property_key.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/property_key.pyi new file mode 100644 index 00000000..17673037 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/property_key.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.expression import Expression + +class PropertyKey(Expression): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/query.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/query.pyi new file mode 100644 index 00000000..35f787ec --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/query.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +class Query: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + extern: Incomplete | None = ..., + query: Incomplete | None = ..., + type: Incomplete | None = ..., + params: Incomplete | None = ..., + dialect: Incomplete | None = ..., + now: Incomplete | None = ..., + ) -> None: ... + @property + def extern(self): ... + @extern.setter + def extern(self, extern) -> None: ... + @property + def query(self): ... + @query.setter + def query(self, query) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def params(self): ... + @params.setter + def params(self, params) -> None: ... + @property + def dialect(self): ... + @dialect.setter + def dialect(self, dialect) -> None: ... + @property + def now(self): ... + @now.setter + def now(self, now) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/query_edit_mode.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/query_edit_mode.pyi new file mode 100644 index 00000000..60ce7aa3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/query_edit_mode.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete + +class QueryEditMode: + BUILDER: str + ADVANCED: str + openapi_types: Incomplete + attribute_map: Incomplete + def __init__(self) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/query_variable_properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/query_variable_properties.pyi new file mode 100644 index 00000000..008172f1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/query_variable_properties.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.variable_properties import VariableProperties + +class QueryVariableProperties(VariableProperties): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., values: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def values(self): ... + @values.setter + def values(self, values) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/query_variable_properties_values.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/query_variable_properties_values.pyi new file mode 100644 index 00000000..e2194b3d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/query_variable_properties_values.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class QueryVariablePropertiesValues: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, query: Incomplete | None = ..., language: Incomplete | None = ...) -> None: ... + @property + def query(self): ... + @query.setter + def query(self, query) -> None: ... + @property + def language(self): ... + @language.setter + def language(self, language) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/range_threshold.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/range_threshold.pyi new file mode 100644 index 00000000..af6352b7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/range_threshold.pyi @@ -0,0 +1,37 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.threshold_base import ThresholdBase + +class RangeThreshold(ThresholdBase): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: str = ..., + min: Incomplete | None = ..., + max: Incomplete | None = ..., + within: Incomplete | None = ..., + level: Incomplete | None = ..., + all_values: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def min(self): ... + @min.setter + def min(self, min) -> None: ... + @property + def max(self): ... + @max.setter + def max(self, max) -> None: ... + @property + def within(self): ... + @within.setter + def within(self, within) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/ready.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/ready.pyi new file mode 100644 index 00000000..60e4c346 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/ready.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class Ready: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, status: Incomplete | None = ..., started: Incomplete | None = ..., up: Incomplete | None = ... + ) -> None: ... + @property + def status(self): ... + @status.setter + def status(self, status) -> None: ... + @property + def started(self): ... + @started.setter + def started(self, started) -> None: ... + @property + def up(self): ... + @up.setter + def up(self, up) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/regexp_literal.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/regexp_literal.pyi new file mode 100644 index 00000000..7061ab11 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/regexp_literal.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.expression import Expression + +class RegexpLiteral(Expression): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., value: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def value(self): ... + @value.setter + def value(self, value) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/remote_connection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/remote_connection.pyi new file mode 100644 index 00000000..4e18ae18 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/remote_connection.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete + +class RemoteConnection: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + name: Incomplete | None = ..., + org_id: Incomplete | None = ..., + description: Incomplete | None = ..., + remote_url: Incomplete | None = ..., + remote_org_id: Incomplete | None = ..., + allow_insecure_tls: bool = ..., + ) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def remote_url(self): ... + @remote_url.setter + def remote_url(self, remote_url) -> None: ... + @property + def remote_org_id(self): ... + @remote_org_id.setter + def remote_org_id(self, remote_org_id) -> None: ... + @property + def allow_insecure_tls(self): ... + @allow_insecure_tls.setter + def allow_insecure_tls(self, allow_insecure_tls) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/remote_connection_creation_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/remote_connection_creation_request.pyi new file mode 100644 index 00000000..9abbc37c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/remote_connection_creation_request.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete + +class RemoteConnectionCreationRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + description: Incomplete | None = ..., + org_id: Incomplete | None = ..., + remote_url: Incomplete | None = ..., + remote_api_token: Incomplete | None = ..., + remote_org_id: Incomplete | None = ..., + allow_insecure_tls: bool = ..., + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def remote_url(self): ... + @remote_url.setter + def remote_url(self, remote_url) -> None: ... + @property + def remote_api_token(self): ... + @remote_api_token.setter + def remote_api_token(self, remote_api_token) -> None: ... + @property + def remote_org_id(self): ... + @remote_org_id.setter + def remote_org_id(self, remote_org_id) -> None: ... + @property + def allow_insecure_tls(self): ... + @allow_insecure_tls.setter + def allow_insecure_tls(self, allow_insecure_tls) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/remote_connection_update_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/remote_connection_update_request.pyi new file mode 100644 index 00000000..54533544 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/remote_connection_update_request.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +class RemoteConnectionUpdateRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + description: Incomplete | None = ..., + remote_url: Incomplete | None = ..., + remote_api_token: Incomplete | None = ..., + remote_org_id: Incomplete | None = ..., + allow_insecure_tls: bool = ..., + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def remote_url(self): ... + @remote_url.setter + def remote_url(self, remote_url) -> None: ... + @property + def remote_api_token(self): ... + @remote_api_token.setter + def remote_api_token(self, remote_api_token) -> None: ... + @property + def remote_org_id(self): ... + @remote_org_id.setter + def remote_org_id(self, remote_org_id) -> None: ... + @property + def allow_insecure_tls(self): ... + @allow_insecure_tls.setter + def allow_insecure_tls(self, allow_insecure_tls) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/remote_connections.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/remote_connections.pyi new file mode 100644 index 00000000..a886cfca --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/remote_connections.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class RemoteConnections: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, remotes: Incomplete | None = ...) -> None: ... + @property + def remotes(self): ... + @remotes.setter + def remotes(self, remotes) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/renamable_field.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/renamable_field.pyi new file mode 100644 index 00000000..1c1cd738 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/renamable_field.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class RenamableField: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, internal_name: Incomplete | None = ..., display_name: Incomplete | None = ..., visible: Incomplete | None = ... + ) -> None: ... + @property + def internal_name(self): ... + @internal_name.setter + def internal_name(self, internal_name) -> None: ... + @property + def display_name(self): ... + @display_name.setter + def display_name(self, display_name) -> None: ... + @property + def visible(self): ... + @visible.setter + def visible(self, visible) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/replication.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/replication.pyi new file mode 100644 index 00000000..88890532 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/replication.pyi @@ -0,0 +1,73 @@ +from _typeshed import Incomplete + +class Replication: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + org_id: Incomplete | None = ..., + remote_id: Incomplete | None = ..., + local_bucket_id: Incomplete | None = ..., + remote_bucket_id: Incomplete | None = ..., + max_queue_size_bytes: Incomplete | None = ..., + current_queue_size_bytes: Incomplete | None = ..., + latest_response_code: Incomplete | None = ..., + latest_error_message: Incomplete | None = ..., + drop_non_retryable_data: Incomplete | None = ..., + ) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def remote_id(self): ... + @remote_id.setter + def remote_id(self, remote_id) -> None: ... + @property + def local_bucket_id(self): ... + @local_bucket_id.setter + def local_bucket_id(self, local_bucket_id) -> None: ... + @property + def remote_bucket_id(self): ... + @remote_bucket_id.setter + def remote_bucket_id(self, remote_bucket_id) -> None: ... + @property + def max_queue_size_bytes(self): ... + @max_queue_size_bytes.setter + def max_queue_size_bytes(self, max_queue_size_bytes) -> None: ... + @property + def current_queue_size_bytes(self): ... + @current_queue_size_bytes.setter + def current_queue_size_bytes(self, current_queue_size_bytes) -> None: ... + @property + def latest_response_code(self): ... + @latest_response_code.setter + def latest_response_code(self, latest_response_code) -> None: ... + @property + def latest_error_message(self): ... + @latest_error_message.setter + def latest_error_message(self, latest_error_message) -> None: ... + @property + def drop_non_retryable_data(self): ... + @drop_non_retryable_data.setter + def drop_non_retryable_data(self, drop_non_retryable_data) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/replication_creation_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/replication_creation_request.pyi new file mode 100644 index 00000000..860ce912 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/replication_creation_request.pyi @@ -0,0 +1,53 @@ +from _typeshed import Incomplete + +class ReplicationCreationRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + description: Incomplete | None = ..., + org_id: Incomplete | None = ..., + remote_id: Incomplete | None = ..., + local_bucket_id: Incomplete | None = ..., + remote_bucket_id: Incomplete | None = ..., + max_queue_size_bytes: int = ..., + drop_non_retryable_data: bool = ..., + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def remote_id(self): ... + @remote_id.setter + def remote_id(self, remote_id) -> None: ... + @property + def local_bucket_id(self): ... + @local_bucket_id.setter + def local_bucket_id(self, local_bucket_id) -> None: ... + @property + def remote_bucket_id(self): ... + @remote_bucket_id.setter + def remote_bucket_id(self, remote_bucket_id) -> None: ... + @property + def max_queue_size_bytes(self): ... + @max_queue_size_bytes.setter + def max_queue_size_bytes(self, max_queue_size_bytes) -> None: ... + @property + def drop_non_retryable_data(self): ... + @drop_non_retryable_data.setter + def drop_non_retryable_data(self, drop_non_retryable_data) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/replication_update_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/replication_update_request.pyi new file mode 100644 index 00000000..31ef087c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/replication_update_request.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +class ReplicationUpdateRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + description: Incomplete | None = ..., + remote_id: Incomplete | None = ..., + remote_bucket_id: Incomplete | None = ..., + max_queue_size_bytes: Incomplete | None = ..., + drop_non_retryable_data: Incomplete | None = ..., + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def remote_id(self): ... + @remote_id.setter + def remote_id(self, remote_id) -> None: ... + @property + def remote_bucket_id(self): ... + @remote_bucket_id.setter + def remote_bucket_id(self, remote_bucket_id) -> None: ... + @property + def max_queue_size_bytes(self): ... + @max_queue_size_bytes.setter + def max_queue_size_bytes(self, max_queue_size_bytes) -> None: ... + @property + def drop_non_retryable_data(self): ... + @drop_non_retryable_data.setter + def drop_non_retryable_data(self, drop_non_retryable_data) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/replications.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/replications.pyi new file mode 100644 index 00000000..504ae66d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/replications.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class Replications: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, replications: Incomplete | None = ...) -> None: ... + @property + def replications(self): ... + @replications.setter + def replications(self, replications) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/resource_member.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/resource_member.pyi new file mode 100644 index 00000000..3ca8877e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/resource_member.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.user_response import UserResponse + +class ResourceMember(UserResponse): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + role: str = ..., + id: Incomplete | None = ..., + oauth_id: Incomplete | None = ..., + name: Incomplete | None = ..., + status: str = ..., + links: Incomplete | None = ..., + ) -> None: ... + @property + def role(self): ... + @role.setter + def role(self, role) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/resource_members.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/resource_members.pyi new file mode 100644 index 00000000..a167c520 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/resource_members.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class ResourceMembers: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, links: Incomplete | None = ..., users: Incomplete | None = ...) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + @property + def users(self): ... + @users.setter + def users(self, users) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/resource_members_links.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/resource_members_links.pyi new file mode 100644 index 00000000..20aa58c4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/resource_members_links.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete + +class ResourceMembersLinks: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, _self: Incomplete | None = ...) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/resource_owner.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/resource_owner.pyi new file mode 100644 index 00000000..8fd8644b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/resource_owner.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.user_response import UserResponse + +class ResourceOwner(UserResponse): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + role: str = ..., + id: Incomplete | None = ..., + oauth_id: Incomplete | None = ..., + name: Incomplete | None = ..., + status: str = ..., + links: Incomplete | None = ..., + ) -> None: ... + @property + def role(self): ... + @role.setter + def role(self, role) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/resource_owners.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/resource_owners.pyi new file mode 100644 index 00000000..f1edc5d3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/resource_owners.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class ResourceOwners: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, links: Incomplete | None = ..., users: Incomplete | None = ...) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + @property + def users(self): ... + @users.setter + def users(self, users) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/restored_bucket_mappings.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/restored_bucket_mappings.pyi new file mode 100644 index 00000000..e64d8cf3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/restored_bucket_mappings.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class RestoredBucketMappings: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, id: Incomplete | None = ..., name: Incomplete | None = ..., shard_mappings: Incomplete | None = ... + ) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def shard_mappings(self): ... + @shard_mappings.setter + def shard_mappings(self, shard_mappings) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/retention_policy_manifest.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/retention_policy_manifest.pyi new file mode 100644 index 00000000..735b9669 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/retention_policy_manifest.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +class RetentionPolicyManifest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + replica_n: Incomplete | None = ..., + duration: Incomplete | None = ..., + shard_group_duration: Incomplete | None = ..., + shard_groups: Incomplete | None = ..., + subscriptions: Incomplete | None = ..., + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def replica_n(self): ... + @replica_n.setter + def replica_n(self, replica_n) -> None: ... + @property + def duration(self): ... + @duration.setter + def duration(self, duration) -> None: ... + @property + def shard_group_duration(self): ... + @shard_group_duration.setter + def shard_group_duration(self, shard_group_duration) -> None: ... + @property + def shard_groups(self): ... + @shard_groups.setter + def shard_groups(self, shard_groups) -> None: ... + @property + def subscriptions(self): ... + @subscriptions.setter + def subscriptions(self, subscriptions) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/return_statement.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/return_statement.pyi new file mode 100644 index 00000000..c4483197 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/return_statement.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.statement import Statement + +class ReturnStatement(Statement): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., argument: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def argument(self): ... + @argument.setter + def argument(self, argument) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/routes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/routes.pyi new file mode 100644 index 00000000..72c93de2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/routes.pyi @@ -0,0 +1,103 @@ +from _typeshed import Incomplete + +class Routes: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + authorizations: Incomplete | None = ..., + buckets: Incomplete | None = ..., + dashboards: Incomplete | None = ..., + external: Incomplete | None = ..., + variables: Incomplete | None = ..., + me: Incomplete | None = ..., + flags: Incomplete | None = ..., + orgs: Incomplete | None = ..., + query: Incomplete | None = ..., + setup: Incomplete | None = ..., + signin: Incomplete | None = ..., + signout: Incomplete | None = ..., + sources: Incomplete | None = ..., + system: Incomplete | None = ..., + tasks: Incomplete | None = ..., + telegrafs: Incomplete | None = ..., + users: Incomplete | None = ..., + write: Incomplete | None = ..., + ) -> None: ... + @property + def authorizations(self): ... + @authorizations.setter + def authorizations(self, authorizations) -> None: ... + @property + def buckets(self): ... + @buckets.setter + def buckets(self, buckets) -> None: ... + @property + def dashboards(self): ... + @dashboards.setter + def dashboards(self, dashboards) -> None: ... + @property + def external(self): ... + @external.setter + def external(self, external) -> None: ... + @property + def variables(self): ... + @variables.setter + def variables(self, variables) -> None: ... + @property + def me(self): ... + @me.setter + def me(self, me) -> None: ... + @property + def flags(self): ... + @flags.setter + def flags(self, flags) -> None: ... + @property + def orgs(self): ... + @orgs.setter + def orgs(self, orgs) -> None: ... + @property + def query(self): ... + @query.setter + def query(self, query) -> None: ... + @property + def setup(self): ... + @setup.setter + def setup(self, setup) -> None: ... + @property + def signin(self): ... + @signin.setter + def signin(self, signin) -> None: ... + @property + def signout(self): ... + @signout.setter + def signout(self, signout) -> None: ... + @property + def sources(self): ... + @sources.setter + def sources(self, sources) -> None: ... + @property + def system(self): ... + @system.setter + def system(self, system) -> None: ... + @property + def tasks(self): ... + @tasks.setter + def tasks(self, tasks) -> None: ... + @property + def telegrafs(self): ... + @telegrafs.setter + def telegrafs(self, telegrafs) -> None: ... + @property + def users(self): ... + @users.setter + def users(self, users) -> None: ... + @property + def write(self): ... + @write.setter + def write(self, write) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/routes_external.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/routes_external.pyi new file mode 100644 index 00000000..3c942fe2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/routes_external.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class RoutesExternal: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, status_feed: Incomplete | None = ...) -> None: ... + @property + def status_feed(self): ... + @status_feed.setter + def status_feed(self, status_feed) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/routes_query.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/routes_query.pyi new file mode 100644 index 00000000..4341f7fc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/routes_query.pyi @@ -0,0 +1,29 @@ +from _typeshed import Incomplete + +class RoutesQuery: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + _self: Incomplete | None = ..., + ast: Incomplete | None = ..., + analyze: Incomplete | None = ..., + suggestions: Incomplete | None = ..., + ) -> None: ... + @property + def ast(self): ... + @ast.setter + def ast(self, ast) -> None: ... + @property + def analyze(self): ... + @analyze.setter + def analyze(self, analyze) -> None: ... + @property + def suggestions(self): ... + @suggestions.setter + def suggestions(self, suggestions) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/routes_system.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/routes_system.pyi new file mode 100644 index 00000000..810c4c3b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/routes_system.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class RoutesSystem: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, metrics: Incomplete | None = ..., debug: Incomplete | None = ..., health: Incomplete | None = ... + ) -> None: ... + @property + def metrics(self): ... + @metrics.setter + def metrics(self, metrics) -> None: ... + @property + def debug(self): ... + @debug.setter + def debug(self, debug) -> None: ... + @property + def health(self): ... + @health.setter + def health(self, health) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/rule_status_level.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/rule_status_level.pyi new file mode 100644 index 00000000..73b580cb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/rule_status_level.pyi @@ -0,0 +1,16 @@ +from _typeshed import Incomplete + +class RuleStatusLevel: + UNKNOWN: str + OK: str + INFO: str + CRIT: str + WARN: str + ANY: str + openapi_types: Incomplete + attribute_map: Incomplete + def __init__(self) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/run.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/run.pyi new file mode 100644 index 00000000..d89f7c0c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/run.pyi @@ -0,0 +1,58 @@ +from _typeshed import Incomplete + +class Run: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + task_id: Incomplete | None = ..., + status: Incomplete | None = ..., + scheduled_for: Incomplete | None = ..., + log: Incomplete | None = ..., + started_at: Incomplete | None = ..., + finished_at: Incomplete | None = ..., + requested_at: Incomplete | None = ..., + links: Incomplete | None = ..., + ) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def task_id(self): ... + @task_id.setter + def task_id(self, task_id) -> None: ... + @property + def status(self): ... + @status.setter + def status(self, status) -> None: ... + @property + def scheduled_for(self): ... + @scheduled_for.setter + def scheduled_for(self, scheduled_for) -> None: ... + @property + def log(self): ... + @log.setter + def log(self, log) -> None: ... + @property + def started_at(self): ... + @started_at.setter + def started_at(self, started_at) -> None: ... + @property + def finished_at(self): ... + @finished_at.setter + def finished_at(self, finished_at) -> None: ... + @property + def requested_at(self): ... + @requested_at.setter + def requested_at(self, requested_at) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/run_links.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/run_links.pyi new file mode 100644 index 00000000..0c70c4f9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/run_links.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class RunLinks: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, _self: Incomplete | None = ..., task: Incomplete | None = ..., retry: Incomplete | None = ...) -> None: ... + @property + def task(self): ... + @task.setter + def task(self, task) -> None: ... + @property + def retry(self): ... + @retry.setter + def retry(self, retry) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/run_manually.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/run_manually.pyi new file mode 100644 index 00000000..bd81d470 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/run_manually.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class RunManually: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, scheduled_for: Incomplete | None = ...) -> None: ... + @property + def scheduled_for(self): ... + @scheduled_for.setter + def scheduled_for(self, scheduled_for) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/runs.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/runs.pyi new file mode 100644 index 00000000..2d79c00a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/runs.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class Runs: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, links: Incomplete | None = ..., runs: Incomplete | None = ...) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + @property + def runs(self): ... + @runs.setter + def runs(self, runs) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/scatter_view_properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/scatter_view_properties.pyi new file mode 100644 index 00000000..72ec314c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/scatter_view_properties.pyi @@ -0,0 +1,170 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.view_properties import ViewProperties + +class ScatterViewProperties(ViewProperties): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + time_format: Incomplete | None = ..., + type: Incomplete | None = ..., + queries: Incomplete | None = ..., + colors: Incomplete | None = ..., + shape: Incomplete | None = ..., + note: Incomplete | None = ..., + show_note_when_empty: Incomplete | None = ..., + x_column: Incomplete | None = ..., + generate_x_axis_ticks: Incomplete | None = ..., + x_total_ticks: Incomplete | None = ..., + x_tick_start: Incomplete | None = ..., + x_tick_step: Incomplete | None = ..., + y_column: Incomplete | None = ..., + generate_y_axis_ticks: Incomplete | None = ..., + y_total_ticks: Incomplete | None = ..., + y_tick_start: Incomplete | None = ..., + y_tick_step: Incomplete | None = ..., + fill_columns: Incomplete | None = ..., + symbol_columns: Incomplete | None = ..., + x_domain: Incomplete | None = ..., + y_domain: Incomplete | None = ..., + x_axis_label: Incomplete | None = ..., + y_axis_label: Incomplete | None = ..., + x_prefix: Incomplete | None = ..., + x_suffix: Incomplete | None = ..., + y_prefix: Incomplete | None = ..., + y_suffix: Incomplete | None = ..., + legend_colorize_rows: Incomplete | None = ..., + legend_hide: Incomplete | None = ..., + legend_opacity: Incomplete | None = ..., + legend_orientation_threshold: Incomplete | None = ..., + ) -> None: ... + @property + def time_format(self): ... + @time_format.setter + def time_format(self, time_format) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def queries(self): ... + @queries.setter + def queries(self, queries) -> None: ... + @property + def colors(self): ... + @colors.setter + def colors(self, colors) -> None: ... + @property + def shape(self): ... + @shape.setter + def shape(self, shape) -> None: ... + @property + def note(self): ... + @note.setter + def note(self, note) -> None: ... + @property + def show_note_when_empty(self): ... + @show_note_when_empty.setter + def show_note_when_empty(self, show_note_when_empty) -> None: ... + @property + def x_column(self): ... + @x_column.setter + def x_column(self, x_column) -> None: ... + @property + def generate_x_axis_ticks(self): ... + @generate_x_axis_ticks.setter + def generate_x_axis_ticks(self, generate_x_axis_ticks) -> None: ... + @property + def x_total_ticks(self): ... + @x_total_ticks.setter + def x_total_ticks(self, x_total_ticks) -> None: ... + @property + def x_tick_start(self): ... + @x_tick_start.setter + def x_tick_start(self, x_tick_start) -> None: ... + @property + def x_tick_step(self): ... + @x_tick_step.setter + def x_tick_step(self, x_tick_step) -> None: ... + @property + def y_column(self): ... + @y_column.setter + def y_column(self, y_column) -> None: ... + @property + def generate_y_axis_ticks(self): ... + @generate_y_axis_ticks.setter + def generate_y_axis_ticks(self, generate_y_axis_ticks) -> None: ... + @property + def y_total_ticks(self): ... + @y_total_ticks.setter + def y_total_ticks(self, y_total_ticks) -> None: ... + @property + def y_tick_start(self): ... + @y_tick_start.setter + def y_tick_start(self, y_tick_start) -> None: ... + @property + def y_tick_step(self): ... + @y_tick_step.setter + def y_tick_step(self, y_tick_step) -> None: ... + @property + def fill_columns(self): ... + @fill_columns.setter + def fill_columns(self, fill_columns) -> None: ... + @property + def symbol_columns(self): ... + @symbol_columns.setter + def symbol_columns(self, symbol_columns) -> None: ... + @property + def x_domain(self): ... + @x_domain.setter + def x_domain(self, x_domain) -> None: ... + @property + def y_domain(self): ... + @y_domain.setter + def y_domain(self, y_domain) -> None: ... + @property + def x_axis_label(self): ... + @x_axis_label.setter + def x_axis_label(self, x_axis_label) -> None: ... + @property + def y_axis_label(self): ... + @y_axis_label.setter + def y_axis_label(self, y_axis_label) -> None: ... + @property + def x_prefix(self): ... + @x_prefix.setter + def x_prefix(self, x_prefix) -> None: ... + @property + def x_suffix(self): ... + @x_suffix.setter + def x_suffix(self, x_suffix) -> None: ... + @property + def y_prefix(self): ... + @y_prefix.setter + def y_prefix(self, y_prefix) -> None: ... + @property + def y_suffix(self): ... + @y_suffix.setter + def y_suffix(self, y_suffix) -> None: ... + @property + def legend_colorize_rows(self): ... + @legend_colorize_rows.setter + def legend_colorize_rows(self, legend_colorize_rows) -> None: ... + @property + def legend_hide(self): ... + @legend_hide.setter + def legend_hide(self, legend_hide) -> None: ... + @property + def legend_opacity(self): ... + @legend_opacity.setter + def legend_opacity(self, legend_opacity) -> None: ... + @property + def legend_orientation_threshold(self): ... + @legend_orientation_threshold.setter + def legend_orientation_threshold(self, legend_orientation_threshold) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/schema_type.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/schema_type.pyi new file mode 100644 index 00000000..aef84af1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/schema_type.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete + +class SchemaType: + IMPLICIT: str + EXPLICIT: str + openapi_types: Incomplete + attribute_map: Incomplete + def __init__(self) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/scraper_target_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/scraper_target_request.pyi new file mode 100644 index 00000000..225d84df --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/scraper_target_request.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +class ScraperTargetRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + type: Incomplete | None = ..., + url: Incomplete | None = ..., + org_id: Incomplete | None = ..., + bucket_id: Incomplete | None = ..., + allow_insecure: bool = ..., + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def url(self): ... + @url.setter + def url(self, url) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def bucket_id(self): ... + @bucket_id.setter + def bucket_id(self, bucket_id) -> None: ... + @property + def allow_insecure(self): ... + @allow_insecure.setter + def allow_insecure(self, allow_insecure) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/scraper_target_response.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/scraper_target_response.pyi new file mode 100644 index 00000000..12f3caf1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/scraper_target_response.pyi @@ -0,0 +1,41 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.scraper_target_request import ScraperTargetRequest + +class ScraperTargetResponse(ScraperTargetRequest): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + org: Incomplete | None = ..., + bucket: Incomplete | None = ..., + links: Incomplete | None = ..., + name: Incomplete | None = ..., + type: Incomplete | None = ..., + url: Incomplete | None = ..., + org_id: Incomplete | None = ..., + bucket_id: Incomplete | None = ..., + allow_insecure: bool = ..., + ) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def org(self): ... + @org.setter + def org(self, org) -> None: ... + @property + def bucket(self): ... + @bucket.setter + def bucket(self, bucket) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/scraper_target_responses.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/scraper_target_responses.pyi new file mode 100644 index 00000000..cc2b9730 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/scraper_target_responses.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class ScraperTargetResponses: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, configurations: Incomplete | None = ...) -> None: ... + @property + def configurations(self): ... + @configurations.setter + def configurations(self, configurations) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/script.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/script.pyi new file mode 100644 index 00000000..04fc8c0f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/script.pyi @@ -0,0 +1,58 @@ +from _typeshed import Incomplete + +class Script: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + org_id: Incomplete | None = ..., + script: Incomplete | None = ..., + language: Incomplete | None = ..., + url: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + ) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def script(self): ... + @script.setter + def script(self, script) -> None: ... + @property + def language(self): ... + @language.setter + def language(self, language) -> None: ... + @property + def url(self): ... + @url.setter + def url(self, url) -> None: ... + @property + def created_at(self): ... + @created_at.setter + def created_at(self, created_at) -> None: ... + @property + def updated_at(self): ... + @updated_at.setter + def updated_at(self, updated_at) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/script_create_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/script_create_request.pyi new file mode 100644 index 00000000..a48f36c7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/script_create_request.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete + +class ScriptCreateRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + description: Incomplete | None = ..., + script: Incomplete | None = ..., + language: Incomplete | None = ..., + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def script(self): ... + @script.setter + def script(self, script) -> None: ... + @property + def language(self): ... + @language.setter + def language(self, language) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/script_invocation_params.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/script_invocation_params.pyi new file mode 100644 index 00000000..c2074495 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/script_invocation_params.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class ScriptInvocationParams: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, params: Incomplete | None = ...) -> None: ... + @property + def params(self): ... + @params.setter + def params(self, params) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/script_language.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/script_language.pyi new file mode 100644 index 00000000..8b969d83 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/script_language.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete + +class ScriptLanguage: + FLUX: str + openapi_types: Incomplete + attribute_map: Incomplete + def __init__(self) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/script_update_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/script_update_request.pyi new file mode 100644 index 00000000..71b5dcab --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/script_update_request.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class ScriptUpdateRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, name: Incomplete | None = ..., description: Incomplete | None = ..., script: Incomplete | None = ... + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def script(self): ... + @script.setter + def script(self, script) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/scripts.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/scripts.pyi new file mode 100644 index 00000000..72315239 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/scripts.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class Scripts: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, scripts: Incomplete | None = ...) -> None: ... + @property + def scripts(self): ... + @scripts.setter + def scripts(self, scripts) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/secret_keys.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/secret_keys.pyi new file mode 100644 index 00000000..f8c58550 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/secret_keys.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class SecretKeys: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, secrets: Incomplete | None = ...) -> None: ... + @property + def secrets(self): ... + @secrets.setter + def secrets(self, secrets) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/secret_keys_response.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/secret_keys_response.pyi new file mode 100644 index 00000000..877920fd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/secret_keys_response.pyi @@ -0,0 +1,17 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.secret_keys import SecretKeys + +class SecretKeysResponse(SecretKeys): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, links: Incomplete | None = ..., secrets: Incomplete | None = ...) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/shard_group_manifest.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/shard_group_manifest.pyi new file mode 100644 index 00000000..7fcab869 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/shard_group_manifest.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +class ShardGroupManifest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + start_time: Incomplete | None = ..., + end_time: Incomplete | None = ..., + deleted_at: Incomplete | None = ..., + truncated_at: Incomplete | None = ..., + shards: Incomplete | None = ..., + ) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def start_time(self): ... + @start_time.setter + def start_time(self, start_time) -> None: ... + @property + def end_time(self): ... + @end_time.setter + def end_time(self, end_time) -> None: ... + @property + def deleted_at(self): ... + @deleted_at.setter + def deleted_at(self, deleted_at) -> None: ... + @property + def truncated_at(self): ... + @truncated_at.setter + def truncated_at(self, truncated_at) -> None: ... + @property + def shards(self): ... + @shards.setter + def shards(self, shards) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/shard_manifest.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/shard_manifest.pyi new file mode 100644 index 00000000..1ba7aa6c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/shard_manifest.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class ShardManifest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, id: Incomplete | None = ..., shard_owners: Incomplete | None = ...) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def shard_owners(self): ... + @shard_owners.setter + def shard_owners(self, shard_owners) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/shard_owner.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/shard_owner.pyi new file mode 100644 index 00000000..4bae4158 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/shard_owner.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class ShardOwner: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, node_id: Incomplete | None = ...) -> None: ... + @property + def node_id(self): ... + @node_id.setter + def node_id(self, node_id) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/simple_table_view_properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/simple_table_view_properties.pyi new file mode 100644 index 00000000..4c661057 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/simple_table_view_properties.pyi @@ -0,0 +1,45 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.view_properties import ViewProperties + +class SimpleTableViewProperties(ViewProperties): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: Incomplete | None = ..., + show_all: Incomplete | None = ..., + queries: Incomplete | None = ..., + shape: Incomplete | None = ..., + note: Incomplete | None = ..., + show_note_when_empty: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def show_all(self): ... + @show_all.setter + def show_all(self, show_all) -> None: ... + @property + def queries(self): ... + @queries.setter + def queries(self, queries) -> None: ... + @property + def shape(self): ... + @shape.setter + def shape(self, shape) -> None: ... + @property + def note(self): ... + @note.setter + def note(self, note) -> None: ... + @property + def show_note_when_empty(self): ... + @show_note_when_empty.setter + def show_note_when_empty(self, show_note_when_empty) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/single_stat_view_properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/single_stat_view_properties.pyi new file mode 100644 index 00000000..320cc4db --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/single_stat_view_properties.pyi @@ -0,0 +1,75 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.view_properties import ViewProperties + +class SingleStatViewProperties(ViewProperties): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: Incomplete | None = ..., + queries: Incomplete | None = ..., + colors: Incomplete | None = ..., + shape: Incomplete | None = ..., + note: Incomplete | None = ..., + show_note_when_empty: Incomplete | None = ..., + prefix: Incomplete | None = ..., + tick_prefix: Incomplete | None = ..., + suffix: Incomplete | None = ..., + tick_suffix: Incomplete | None = ..., + static_legend: Incomplete | None = ..., + decimal_places: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def queries(self): ... + @queries.setter + def queries(self, queries) -> None: ... + @property + def colors(self): ... + @colors.setter + def colors(self, colors) -> None: ... + @property + def shape(self): ... + @shape.setter + def shape(self, shape) -> None: ... + @property + def note(self): ... + @note.setter + def note(self, note) -> None: ... + @property + def show_note_when_empty(self): ... + @show_note_when_empty.setter + def show_note_when_empty(self, show_note_when_empty) -> None: ... + @property + def prefix(self): ... + @prefix.setter + def prefix(self, prefix) -> None: ... + @property + def tick_prefix(self): ... + @tick_prefix.setter + def tick_prefix(self, tick_prefix) -> None: ... + @property + def suffix(self): ... + @suffix.setter + def suffix(self, suffix) -> None: ... + @property + def tick_suffix(self): ... + @tick_suffix.setter + def tick_suffix(self, tick_suffix) -> None: ... + @property + def static_legend(self): ... + @static_legend.setter + def static_legend(self, static_legend) -> None: ... + @property + def decimal_places(self): ... + @decimal_places.setter + def decimal_places(self, decimal_places) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/slack_notification_endpoint.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/slack_notification_endpoint.pyi new file mode 100644 index 00000000..76b00f50 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/slack_notification_endpoint.pyi @@ -0,0 +1,36 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.notification_endpoint_discriminator import NotificationEndpointDiscriminator + +class SlackNotificationEndpoint(NotificationEndpointDiscriminator): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + url: Incomplete | None = ..., + token: Incomplete | None = ..., + id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + user_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + description: Incomplete | None = ..., + name: Incomplete | None = ..., + status: str = ..., + labels: Incomplete | None = ..., + links: Incomplete | None = ..., + type: str = ..., + ) -> None: ... + @property + def url(self): ... + @url.setter + def url(self, url) -> None: ... + @property + def token(self): ... + @token.setter + def token(self, token) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/slack_notification_rule.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/slack_notification_rule.pyi new file mode 100644 index 00000000..1f53e48f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/slack_notification_rule.pyi @@ -0,0 +1,41 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.slack_notification_rule_base import SlackNotificationRuleBase + +class SlackNotificationRule(SlackNotificationRuleBase): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: str = ..., + channel: Incomplete | None = ..., + message_template: Incomplete | None = ..., + latest_completed: Incomplete | None = ..., + last_run_status: Incomplete | None = ..., + last_run_error: Incomplete | None = ..., + id: Incomplete | None = ..., + endpoint_id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + task_id: Incomplete | None = ..., + owner_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + status: Incomplete | None = ..., + name: Incomplete | None = ..., + sleep_until: Incomplete | None = ..., + every: Incomplete | None = ..., + offset: Incomplete | None = ..., + runbook_link: Incomplete | None = ..., + limit_every: Incomplete | None = ..., + limit: Incomplete | None = ..., + tag_rules: Incomplete | None = ..., + description: Incomplete | None = ..., + status_rules: Incomplete | None = ..., + labels: Incomplete | None = ..., + links: Incomplete | None = ..., + ) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/slack_notification_rule_base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/slack_notification_rule_base.pyi new file mode 100644 index 00000000..414ed918 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/slack_notification_rule_base.pyi @@ -0,0 +1,53 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.notification_rule_discriminator import NotificationRuleDiscriminator + +class SlackNotificationRuleBase(NotificationRuleDiscriminator): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: Incomplete | None = ..., + channel: Incomplete | None = ..., + message_template: Incomplete | None = ..., + latest_completed: Incomplete | None = ..., + last_run_status: Incomplete | None = ..., + last_run_error: Incomplete | None = ..., + id: Incomplete | None = ..., + endpoint_id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + task_id: Incomplete | None = ..., + owner_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + status: Incomplete | None = ..., + name: Incomplete | None = ..., + sleep_until: Incomplete | None = ..., + every: Incomplete | None = ..., + offset: Incomplete | None = ..., + runbook_link: Incomplete | None = ..., + limit_every: Incomplete | None = ..., + limit: Incomplete | None = ..., + tag_rules: Incomplete | None = ..., + description: Incomplete | None = ..., + status_rules: Incomplete | None = ..., + labels: Incomplete | None = ..., + links: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def channel(self): ... + @channel.setter + def channel(self, channel) -> None: ... + @property + def message_template(self): ... + @message_template.setter + def message_template(self, message_template) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/smtp_notification_rule.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/smtp_notification_rule.pyi new file mode 100644 index 00000000..58ef677a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/smtp_notification_rule.pyi @@ -0,0 +1,42 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.smtp_notification_rule_base import SMTPNotificationRuleBase + +class SMTPNotificationRule(SMTPNotificationRuleBase): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: str = ..., + subject_template: Incomplete | None = ..., + body_template: Incomplete | None = ..., + to: Incomplete | None = ..., + latest_completed: Incomplete | None = ..., + last_run_status: Incomplete | None = ..., + last_run_error: Incomplete | None = ..., + id: Incomplete | None = ..., + endpoint_id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + task_id: Incomplete | None = ..., + owner_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + status: Incomplete | None = ..., + name: Incomplete | None = ..., + sleep_until: Incomplete | None = ..., + every: Incomplete | None = ..., + offset: Incomplete | None = ..., + runbook_link: Incomplete | None = ..., + limit_every: Incomplete | None = ..., + limit: Incomplete | None = ..., + tag_rules: Incomplete | None = ..., + description: Incomplete | None = ..., + status_rules: Incomplete | None = ..., + labels: Incomplete | None = ..., + links: Incomplete | None = ..., + ) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/smtp_notification_rule_base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/smtp_notification_rule_base.pyi new file mode 100644 index 00000000..48017fc8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/smtp_notification_rule_base.pyi @@ -0,0 +1,58 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.notification_rule_discriminator import NotificationRuleDiscriminator + +class SMTPNotificationRuleBase(NotificationRuleDiscriminator): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: Incomplete | None = ..., + subject_template: Incomplete | None = ..., + body_template: Incomplete | None = ..., + to: Incomplete | None = ..., + latest_completed: Incomplete | None = ..., + last_run_status: Incomplete | None = ..., + last_run_error: Incomplete | None = ..., + id: Incomplete | None = ..., + endpoint_id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + task_id: Incomplete | None = ..., + owner_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + status: Incomplete | None = ..., + name: Incomplete | None = ..., + sleep_until: Incomplete | None = ..., + every: Incomplete | None = ..., + offset: Incomplete | None = ..., + runbook_link: Incomplete | None = ..., + limit_every: Incomplete | None = ..., + limit: Incomplete | None = ..., + tag_rules: Incomplete | None = ..., + description: Incomplete | None = ..., + status_rules: Incomplete | None = ..., + labels: Incomplete | None = ..., + links: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def subject_template(self): ... + @subject_template.setter + def subject_template(self, subject_template) -> None: ... + @property + def body_template(self): ... + @body_template.setter + def body_template(self, body_template) -> None: ... + @property + def to(self): ... + @to.setter + def to(self, to) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/source.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/source.pyi new file mode 100644 index 00000000..07ddff9a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/source.pyi @@ -0,0 +1,93 @@ +from _typeshed import Incomplete + +class Source: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + links: Incomplete | None = ..., + id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + default: Incomplete | None = ..., + name: Incomplete | None = ..., + type: Incomplete | None = ..., + url: Incomplete | None = ..., + insecure_skip_verify: Incomplete | None = ..., + telegraf: Incomplete | None = ..., + token: Incomplete | None = ..., + username: Incomplete | None = ..., + password: Incomplete | None = ..., + shared_secret: Incomplete | None = ..., + meta_url: Incomplete | None = ..., + default_rp: Incomplete | None = ..., + languages: Incomplete | None = ..., + ) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def default(self): ... + @default.setter + def default(self, default) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def url(self): ... + @url.setter + def url(self, url) -> None: ... + @property + def insecure_skip_verify(self): ... + @insecure_skip_verify.setter + def insecure_skip_verify(self, insecure_skip_verify) -> None: ... + @property + def telegraf(self): ... + @telegraf.setter + def telegraf(self, telegraf) -> None: ... + @property + def token(self): ... + @token.setter + def token(self, token) -> None: ... + @property + def username(self): ... + @username.setter + def username(self, username) -> None: ... + @property + def password(self): ... + @password.setter + def password(self, password) -> None: ... + @property + def shared_secret(self): ... + @shared_secret.setter + def shared_secret(self, shared_secret) -> None: ... + @property + def meta_url(self): ... + @meta_url.setter + def meta_url(self, meta_url) -> None: ... + @property + def default_rp(self): ... + @default_rp.setter + def default_rp(self, default_rp) -> None: ... + @property + def languages(self): ... + @languages.setter + def languages(self, languages) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/source_links.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/source_links.pyi new file mode 100644 index 00000000..5ce803e5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/source_links.pyi @@ -0,0 +1,29 @@ +from _typeshed import Incomplete + +class SourceLinks: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + _self: Incomplete | None = ..., + query: Incomplete | None = ..., + health: Incomplete | None = ..., + buckets: Incomplete | None = ..., + ) -> None: ... + @property + def query(self): ... + @query.setter + def query(self, query) -> None: ... + @property + def health(self): ... + @health.setter + def health(self, health) -> None: ... + @property + def buckets(self): ... + @buckets.setter + def buckets(self, buckets) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/sources.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/sources.pyi new file mode 100644 index 00000000..287c49c0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/sources.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class Sources: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, links: Incomplete | None = ..., sources: Incomplete | None = ...) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + @property + def sources(self): ... + @sources.setter + def sources(self, sources) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/stack.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/stack.pyi new file mode 100644 index 00000000..36353fe4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/stack.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete + +class Stack: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + events: Incomplete | None = ..., + ) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def created_at(self): ... + @created_at.setter + def created_at(self, created_at) -> None: ... + @property + def events(self): ... + @events.setter + def events(self, events) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/stack_associations.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/stack_associations.pyi new file mode 100644 index 00000000..deda169d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/stack_associations.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class StackAssociations: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, kind: Incomplete | None = ..., meta_name: Incomplete | None = ...) -> None: ... + @property + def kind(self): ... + @kind.setter + def kind(self, kind) -> None: ... + @property + def meta_name(self): ... + @meta_name.setter + def meta_name(self, meta_name) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/stack_events.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/stack_events.pyi new file mode 100644 index 00000000..f89bbd67 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/stack_events.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete + +class StackEvents: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + event_type: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + sources: Incomplete | None = ..., + resources: Incomplete | None = ..., + urls: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + ) -> None: ... + @property + def event_type(self): ... + @event_type.setter + def event_type(self, event_type) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def sources(self): ... + @sources.setter + def sources(self, sources) -> None: ... + @property + def resources(self): ... + @resources.setter + def resources(self, resources) -> None: ... + @property + def urls(self): ... + @urls.setter + def urls(self, urls) -> None: ... + @property + def updated_at(self): ... + @updated_at.setter + def updated_at(self, updated_at) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/stack_links.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/stack_links.pyi new file mode 100644 index 00000000..972a761d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/stack_links.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete + +class StackLinks: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, _self: Incomplete | None = ...) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/stack_resources.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/stack_resources.pyi new file mode 100644 index 00000000..1dd5b27a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/stack_resources.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +class StackResources: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + api_version: Incomplete | None = ..., + resource_id: Incomplete | None = ..., + kind: Incomplete | None = ..., + template_meta_name: Incomplete | None = ..., + associations: Incomplete | None = ..., + links: Incomplete | None = ..., + ) -> None: ... + @property + def api_version(self): ... + @api_version.setter + def api_version(self, api_version) -> None: ... + @property + def resource_id(self): ... + @resource_id.setter + def resource_id(self, resource_id) -> None: ... + @property + def kind(self): ... + @kind.setter + def kind(self, kind) -> None: ... + @property + def template_meta_name(self): ... + @template_meta_name.setter + def template_meta_name(self, template_meta_name) -> None: ... + @property + def associations(self): ... + @associations.setter + def associations(self, associations) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/statement.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/statement.pyi new file mode 100644 index 00000000..789212db --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/statement.pyi @@ -0,0 +1,10 @@ +from _typeshed import Incomplete + +class Statement: + openapi_types: Incomplete + attribute_map: Incomplete + def __init__(self) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/static_legend.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/static_legend.pyi new file mode 100644 index 00000000..21a19ce9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/static_legend.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete + +class StaticLegend: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + colorize_rows: Incomplete | None = ..., + height_ratio: Incomplete | None = ..., + show: Incomplete | None = ..., + opacity: Incomplete | None = ..., + orientation_threshold: Incomplete | None = ..., + value_axis: Incomplete | None = ..., + width_ratio: Incomplete | None = ..., + ) -> None: ... + @property + def colorize_rows(self): ... + @colorize_rows.setter + def colorize_rows(self, colorize_rows) -> None: ... + @property + def height_ratio(self): ... + @height_ratio.setter + def height_ratio(self, height_ratio) -> None: ... + @property + def show(self): ... + @show.setter + def show(self, show) -> None: ... + @property + def opacity(self): ... + @opacity.setter + def opacity(self, opacity) -> None: ... + @property + def orientation_threshold(self): ... + @orientation_threshold.setter + def orientation_threshold(self, orientation_threshold) -> None: ... + @property + def value_axis(self): ... + @value_axis.setter + def value_axis(self, value_axis) -> None: ... + @property + def width_ratio(self): ... + @width_ratio.setter + def width_ratio(self, width_ratio) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/status_rule.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/status_rule.pyi new file mode 100644 index 00000000..75a13b51 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/status_rule.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete + +class StatusRule: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + current_level: Incomplete | None = ..., + previous_level: Incomplete | None = ..., + count: Incomplete | None = ..., + period: Incomplete | None = ..., + ) -> None: ... + @property + def current_level(self): ... + @current_level.setter + def current_level(self, current_level) -> None: ... + @property + def previous_level(self): ... + @previous_level.setter + def previous_level(self, previous_level) -> None: ... + @property + def count(self): ... + @count.setter + def count(self, count) -> None: ... + @property + def period(self): ... + @period.setter + def period(self, period) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/string_literal.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/string_literal.pyi new file mode 100644 index 00000000..e261c13a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/string_literal.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.property_key import PropertyKey + +class StringLiteral(PropertyKey): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., value: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def value(self): ... + @value.setter + def value(self, value) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/subscription_manifest.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/subscription_manifest.pyi new file mode 100644 index 00000000..1187b9ad --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/subscription_manifest.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class SubscriptionManifest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, name: Incomplete | None = ..., mode: Incomplete | None = ..., destinations: Incomplete | None = ... + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def mode(self): ... + @mode.setter + def mode(self, mode) -> None: ... + @property + def destinations(self): ... + @destinations.setter + def destinations(self, destinations) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/table_view_properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/table_view_properties.pyi new file mode 100644 index 00000000..be791e62 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/table_view_properties.pyi @@ -0,0 +1,65 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.view_properties import ViewProperties + +class TableViewProperties(ViewProperties): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: Incomplete | None = ..., + queries: Incomplete | None = ..., + colors: Incomplete | None = ..., + shape: Incomplete | None = ..., + note: Incomplete | None = ..., + show_note_when_empty: Incomplete | None = ..., + table_options: Incomplete | None = ..., + field_options: Incomplete | None = ..., + time_format: Incomplete | None = ..., + decimal_places: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def queries(self): ... + @queries.setter + def queries(self, queries) -> None: ... + @property + def colors(self): ... + @colors.setter + def colors(self, colors) -> None: ... + @property + def shape(self): ... + @shape.setter + def shape(self, shape) -> None: ... + @property + def note(self): ... + @note.setter + def note(self, note) -> None: ... + @property + def show_note_when_empty(self): ... + @show_note_when_empty.setter + def show_note_when_empty(self, show_note_when_empty) -> None: ... + @property + def table_options(self): ... + @table_options.setter + def table_options(self, table_options) -> None: ... + @property + def field_options(self): ... + @field_options.setter + def field_options(self, field_options) -> None: ... + @property + def time_format(self): ... + @time_format.setter + def time_format(self, time_format) -> None: ... + @property + def decimal_places(self): ... + @decimal_places.setter + def decimal_places(self, decimal_places) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/table_view_properties_table_options.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/table_view_properties_table_options.pyi new file mode 100644 index 00000000..847eb8d3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/table_view_properties_table_options.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete + +class TableViewPropertiesTableOptions: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + vertical_time_axis: Incomplete | None = ..., + sort_by: Incomplete | None = ..., + wrapping: Incomplete | None = ..., + fix_first_column: Incomplete | None = ..., + ) -> None: ... + @property + def vertical_time_axis(self): ... + @vertical_time_axis.setter + def vertical_time_axis(self, vertical_time_axis) -> None: ... + @property + def sort_by(self): ... + @sort_by.setter + def sort_by(self, sort_by) -> None: ... + @property + def wrapping(self): ... + @wrapping.setter + def wrapping(self, wrapping) -> None: ... + @property + def fix_first_column(self): ... + @fix_first_column.setter + def fix_first_column(self, fix_first_column) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/tag_rule.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/tag_rule.pyi new file mode 100644 index 00000000..dae4d4da --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/tag_rule.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class TagRule: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, key: Incomplete | None = ..., value: Incomplete | None = ..., operator: Incomplete | None = ... + ) -> None: ... + @property + def key(self): ... + @key.setter + def key(self, key) -> None: ... + @property + def value(self): ... + @value.setter + def value(self, value) -> None: ... + @property + def operator(self): ... + @operator.setter + def operator(self, operator) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/task.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/task.pyi new file mode 100644 index 00000000..19995ac5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/task.pyi @@ -0,0 +1,113 @@ +from _typeshed import Incomplete + +class Task: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + type: Incomplete | None = ..., + org_id: Incomplete | None = ..., + org: Incomplete | None = ..., + name: Incomplete | None = ..., + owner_id: Incomplete | None = ..., + description: Incomplete | None = ..., + status: Incomplete | None = ..., + labels: Incomplete | None = ..., + authorization_id: Incomplete | None = ..., + flux: Incomplete | None = ..., + every: Incomplete | None = ..., + cron: Incomplete | None = ..., + offset: Incomplete | None = ..., + latest_completed: Incomplete | None = ..., + last_run_status: Incomplete | None = ..., + last_run_error: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + links: Incomplete | None = ..., + ) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def org(self): ... + @org.setter + def org(self, org) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def owner_id(self): ... + @owner_id.setter + def owner_id(self, owner_id) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def status(self): ... + @status.setter + def status(self, status) -> None: ... + @property + def labels(self): ... + @labels.setter + def labels(self, labels) -> None: ... + @property + def authorization_id(self): ... + @authorization_id.setter + def authorization_id(self, authorization_id) -> None: ... + @property + def flux(self): ... + @flux.setter + def flux(self, flux) -> None: ... + @property + def every(self): ... + @every.setter + def every(self, every) -> None: ... + @property + def cron(self): ... + @cron.setter + def cron(self, cron) -> None: ... + @property + def offset(self): ... + @offset.setter + def offset(self, offset) -> None: ... + @property + def latest_completed(self): ... + @latest_completed.setter + def latest_completed(self, latest_completed) -> None: ... + @property + def last_run_status(self): ... + @last_run_status.setter + def last_run_status(self, last_run_status) -> None: ... + @property + def last_run_error(self): ... + @last_run_error.setter + def last_run_error(self, last_run_error) -> None: ... + @property + def created_at(self): ... + @created_at.setter + def created_at(self, created_at) -> None: ... + @property + def updated_at(self): ... + @updated_at.setter + def updated_at(self, updated_at) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/task_create_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/task_create_request.pyi new file mode 100644 index 00000000..49057460 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/task_create_request.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete + +class TaskCreateRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + org_id: Incomplete | None = ..., + org: Incomplete | None = ..., + status: Incomplete | None = ..., + flux: Incomplete | None = ..., + description: Incomplete | None = ..., + ) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def org(self): ... + @org.setter + def org(self, org) -> None: ... + @property + def status(self): ... + @status.setter + def status(self, status) -> None: ... + @property + def flux(self): ... + @flux.setter + def flux(self, flux) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/task_links.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/task_links.pyi new file mode 100644 index 00000000..b9bf306a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/task_links.pyi @@ -0,0 +1,39 @@ +from _typeshed import Incomplete + +class TaskLinks: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + _self: Incomplete | None = ..., + owners: Incomplete | None = ..., + members: Incomplete | None = ..., + runs: Incomplete | None = ..., + logs: Incomplete | None = ..., + labels: Incomplete | None = ..., + ) -> None: ... + @property + def owners(self): ... + @owners.setter + def owners(self, owners) -> None: ... + @property + def members(self): ... + @members.setter + def members(self, members) -> None: ... + @property + def runs(self): ... + @runs.setter + def runs(self, runs) -> None: ... + @property + def logs(self): ... + @logs.setter + def logs(self, logs) -> None: ... + @property + def labels(self): ... + @labels.setter + def labels(self, labels) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/task_status_type.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/task_status_type.pyi new file mode 100644 index 00000000..936a3e72 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/task_status_type.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete + +class TaskStatusType: + ACTIVE: str + INACTIVE: str + openapi_types: Incomplete + attribute_map: Incomplete + def __init__(self) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/task_update_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/task_update_request.pyi new file mode 100644 index 00000000..7aa0ad52 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/task_update_request.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete + +class TaskUpdateRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + status: Incomplete | None = ..., + flux: Incomplete | None = ..., + name: Incomplete | None = ..., + every: Incomplete | None = ..., + cron: Incomplete | None = ..., + offset: Incomplete | None = ..., + description: Incomplete | None = ..., + ) -> None: ... + @property + def status(self): ... + @status.setter + def status(self, status) -> None: ... + @property + def flux(self): ... + @flux.setter + def flux(self, flux) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def every(self): ... + @every.setter + def every(self, every) -> None: ... + @property + def cron(self): ... + @cron.setter + def cron(self, cron) -> None: ... + @property + def offset(self): ... + @offset.setter + def offset(self, offset) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/tasks.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/tasks.pyi new file mode 100644 index 00000000..61ca67a0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/tasks.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class Tasks: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, links: Incomplete | None = ..., tasks: Incomplete | None = ...) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + @property + def tasks(self): ... + @tasks.setter + def tasks(self, tasks) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegraf.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegraf.pyi new file mode 100644 index 00000000..18db320a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegraf.pyi @@ -0,0 +1,35 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.telegraf_request import TelegrafRequest + +class Telegraf(TelegrafRequest): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + links: Incomplete | None = ..., + labels: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + metadata: Incomplete | None = ..., + config: Incomplete | None = ..., + org_id: Incomplete | None = ..., + ) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + @property + def labels(self): ... + @labels.setter + def labels(self, labels) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegraf_plugin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegraf_plugin.pyi new file mode 100644 index 00000000..aee33a0e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegraf_plugin.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete + +class TelegrafPlugin: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + config: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def config(self): ... + @config.setter + def config(self, config) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegraf_plugin_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegraf_plugin_request.pyi new file mode 100644 index 00000000..346a2f5a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegraf_plugin_request.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +class TelegrafPluginRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + description: Incomplete | None = ..., + plugins: Incomplete | None = ..., + metadata: Incomplete | None = ..., + config: Incomplete | None = ..., + org_id: Incomplete | None = ..., + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def plugins(self): ... + @plugins.setter + def plugins(self, plugins) -> None: ... + @property + def metadata(self): ... + @metadata.setter + def metadata(self, metadata) -> None: ... + @property + def config(self): ... + @config.setter + def config(self, config) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegraf_plugin_request_plugins.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegraf_plugin_request_plugins.pyi new file mode 100644 index 00000000..1566558b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegraf_plugin_request_plugins.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete + +class TelegrafPluginRequestPlugins: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: Incomplete | None = ..., + name: Incomplete | None = ..., + alias: Incomplete | None = ..., + description: Incomplete | None = ..., + config: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def alias(self): ... + @alias.setter + def alias(self, alias) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def config(self): ... + @config.setter + def config(self, config) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegraf_plugins.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegraf_plugins.pyi new file mode 100644 index 00000000..b000a1d9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegraf_plugins.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class TelegrafPlugins: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, version: Incomplete | None = ..., os: Incomplete | None = ..., plugins: Incomplete | None = ... + ) -> None: ... + @property + def version(self): ... + @version.setter + def version(self, version) -> None: ... + @property + def os(self): ... + @os.setter + def os(self, os) -> None: ... + @property + def plugins(self): ... + @plugins.setter + def plugins(self, plugins) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegraf_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegraf_request.pyi new file mode 100644 index 00000000..20f62efe --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegraf_request.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete + +class TelegrafRequest: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + description: Incomplete | None = ..., + metadata: Incomplete | None = ..., + config: Incomplete | None = ..., + org_id: Incomplete | None = ..., + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def metadata(self): ... + @metadata.setter + def metadata(self, metadata) -> None: ... + @property + def config(self): ... + @config.setter + def config(self, config) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegraf_request_metadata.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegraf_request_metadata.pyi new file mode 100644 index 00000000..684e8985 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegraf_request_metadata.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class TelegrafRequestMetadata: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, buckets: Incomplete | None = ...) -> None: ... + @property + def buckets(self): ... + @buckets.setter + def buckets(self, buckets) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegrafs.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegrafs.pyi new file mode 100644 index 00000000..a53f49e8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegrafs.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class Telegrafs: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, configurations: Incomplete | None = ...) -> None: ... + @property + def configurations(self): ... + @configurations.setter + def configurations(self, configurations) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegram_notification_endpoint.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegram_notification_endpoint.pyi new file mode 100644 index 00000000..dbd52d14 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegram_notification_endpoint.pyi @@ -0,0 +1,36 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.notification_endpoint_discriminator import NotificationEndpointDiscriminator + +class TelegramNotificationEndpoint(NotificationEndpointDiscriminator): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + token: Incomplete | None = ..., + channel: Incomplete | None = ..., + id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + user_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + description: Incomplete | None = ..., + name: Incomplete | None = ..., + status: str = ..., + labels: Incomplete | None = ..., + links: Incomplete | None = ..., + type: str = ..., + ) -> None: ... + @property + def token(self): ... + @token.setter + def token(self, token) -> None: ... + @property + def channel(self): ... + @channel.setter + def channel(self, channel) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegram_notification_rule.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegram_notification_rule.pyi new file mode 100644 index 00000000..47fd76dd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegram_notification_rule.pyi @@ -0,0 +1,42 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.telegram_notification_rule_base import TelegramNotificationRuleBase + +class TelegramNotificationRule(TelegramNotificationRuleBase): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: str = ..., + message_template: Incomplete | None = ..., + parse_mode: Incomplete | None = ..., + disable_web_page_preview: Incomplete | None = ..., + latest_completed: Incomplete | None = ..., + last_run_status: Incomplete | None = ..., + last_run_error: Incomplete | None = ..., + id: Incomplete | None = ..., + endpoint_id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + task_id: Incomplete | None = ..., + owner_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + status: Incomplete | None = ..., + name: Incomplete | None = ..., + sleep_until: Incomplete | None = ..., + every: Incomplete | None = ..., + offset: Incomplete | None = ..., + runbook_link: Incomplete | None = ..., + limit_every: Incomplete | None = ..., + limit: Incomplete | None = ..., + tag_rules: Incomplete | None = ..., + description: Incomplete | None = ..., + status_rules: Incomplete | None = ..., + labels: Incomplete | None = ..., + links: Incomplete | None = ..., + ) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegram_notification_rule_base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegram_notification_rule_base.pyi new file mode 100644 index 00000000..4d3fee3e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/telegram_notification_rule_base.pyi @@ -0,0 +1,58 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.notification_rule_discriminator import NotificationRuleDiscriminator + +class TelegramNotificationRuleBase(NotificationRuleDiscriminator): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: Incomplete | None = ..., + message_template: Incomplete | None = ..., + parse_mode: Incomplete | None = ..., + disable_web_page_preview: Incomplete | None = ..., + latest_completed: Incomplete | None = ..., + last_run_status: Incomplete | None = ..., + last_run_error: Incomplete | None = ..., + id: Incomplete | None = ..., + endpoint_id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + task_id: Incomplete | None = ..., + owner_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + status: Incomplete | None = ..., + name: Incomplete | None = ..., + sleep_until: Incomplete | None = ..., + every: Incomplete | None = ..., + offset: Incomplete | None = ..., + runbook_link: Incomplete | None = ..., + limit_every: Incomplete | None = ..., + limit: Incomplete | None = ..., + tag_rules: Incomplete | None = ..., + description: Incomplete | None = ..., + status_rules: Incomplete | None = ..., + labels: Incomplete | None = ..., + links: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def message_template(self): ... + @message_template.setter + def message_template(self, message_template) -> None: ... + @property + def parse_mode(self): ... + @parse_mode.setter + def parse_mode(self, parse_mode) -> None: ... + @property + def disable_web_page_preview(self): ... + @disable_web_page_preview.setter + def disable_web_page_preview(self, disable_web_page_preview) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_apply.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_apply.pyi new file mode 100644 index 00000000..2b7fbf5f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_apply.pyi @@ -0,0 +1,58 @@ +from _typeshed import Incomplete + +class TemplateApply: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + dry_run: Incomplete | None = ..., + org_id: Incomplete | None = ..., + stack_id: Incomplete | None = ..., + template: Incomplete | None = ..., + templates: Incomplete | None = ..., + env_refs: Incomplete | None = ..., + secrets: Incomplete | None = ..., + remotes: Incomplete | None = ..., + actions: Incomplete | None = ..., + ) -> None: ... + @property + def dry_run(self): ... + @dry_run.setter + def dry_run(self, dry_run) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def stack_id(self): ... + @stack_id.setter + def stack_id(self, stack_id) -> None: ... + @property + def template(self): ... + @template.setter + def template(self, template) -> None: ... + @property + def templates(self): ... + @templates.setter + def templates(self, templates) -> None: ... + @property + def env_refs(self): ... + @env_refs.setter + def env_refs(self, env_refs) -> None: ... + @property + def secrets(self): ... + @secrets.setter + def secrets(self, secrets) -> None: ... + @property + def remotes(self): ... + @remotes.setter + def remotes(self, remotes) -> None: ... + @property + def actions(self): ... + @actions.setter + def actions(self, actions) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_apply_remotes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_apply_remotes.pyi new file mode 100644 index 00000000..636c4a69 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_apply_remotes.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class TemplateApplyRemotes: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, url: Incomplete | None = ..., content_type: Incomplete | None = ...) -> None: ... + @property + def url(self): ... + @url.setter + def url(self, url) -> None: ... + @property + def content_type(self): ... + @content_type.setter + def content_type(self, content_type) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_apply_template.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_apply_template.pyi new file mode 100644 index 00000000..e4f2dfe0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_apply_template.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class TemplateApplyTemplate: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, content_type: Incomplete | None = ..., sources: Incomplete | None = ..., contents: Incomplete | None = ... + ) -> None: ... + @property + def content_type(self): ... + @content_type.setter + def content_type(self, content_type) -> None: ... + @property + def sources(self): ... + @sources.setter + def sources(self, sources) -> None: ... + @property + def contents(self): ... + @contents.setter + def contents(self, contents) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_chart.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_chart.pyi new file mode 100644 index 00000000..44623230 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_chart.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete + +class TemplateChart: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + x_pos: Incomplete | None = ..., + y_pos: Incomplete | None = ..., + height: Incomplete | None = ..., + width: Incomplete | None = ..., + properties: Incomplete | None = ..., + ) -> None: ... + @property + def x_pos(self): ... + @x_pos.setter + def x_pos(self, x_pos) -> None: ... + @property + def y_pos(self): ... + @y_pos.setter + def y_pos(self, y_pos) -> None: ... + @property + def height(self): ... + @height.setter + def height(self, height) -> None: ... + @property + def width(self): ... + @width.setter + def width(self, width) -> None: ... + @property + def properties(self): ... + @properties.setter + def properties(self, properties) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_export_by_id.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_export_by_id.pyi new file mode 100644 index 00000000..102d5282 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_export_by_id.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class TemplateExportByID: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, stack_id: Incomplete | None = ..., org_ids: Incomplete | None = ..., resources: Incomplete | None = ... + ) -> None: ... + @property + def stack_id(self): ... + @stack_id.setter + def stack_id(self, stack_id) -> None: ... + @property + def org_ids(self): ... + @org_ids.setter + def org_ids(self, org_ids) -> None: ... + @property + def resources(self): ... + @resources.setter + def resources(self, resources) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_export_by_id_org_ids.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_export_by_id_org_ids.pyi new file mode 100644 index 00000000..c9f3c80a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_export_by_id_org_ids.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class TemplateExportByIDOrgIDs: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, org_id: Incomplete | None = ..., resource_filters: Incomplete | None = ...) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def resource_filters(self): ... + @resource_filters.setter + def resource_filters(self, resource_filters) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_export_by_id_resource_filters.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_export_by_id_resource_filters.pyi new file mode 100644 index 00000000..42a6a2c2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_export_by_id_resource_filters.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class TemplateExportByIDResourceFilters: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, by_label: Incomplete | None = ..., by_resource_kind: Incomplete | None = ...) -> None: ... + @property + def by_label(self): ... + @by_label.setter + def by_label(self, by_label) -> None: ... + @property + def by_resource_kind(self): ... + @by_resource_kind.setter + def by_resource_kind(self, by_resource_kind) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_export_by_id_resources.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_export_by_id_resources.pyi new file mode 100644 index 00000000..6893e928 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_export_by_id_resources.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete + +class TemplateExportByIDResources: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, id: Incomplete | None = ..., kind: Incomplete | None = ..., name: Incomplete | None = ...) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def kind(self): ... + @kind.setter + def kind(self, kind) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_export_by_name.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_export_by_name.pyi new file mode 100644 index 00000000..d7a2159b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_export_by_name.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class TemplateExportByName: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, stack_id: Incomplete | None = ..., org_ids: Incomplete | None = ..., resources: Incomplete | None = ... + ) -> None: ... + @property + def stack_id(self): ... + @stack_id.setter + def stack_id(self, stack_id) -> None: ... + @property + def org_ids(self): ... + @org_ids.setter + def org_ids(self, org_ids) -> None: ... + @property + def resources(self): ... + @resources.setter + def resources(self, resources) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_export_by_name_resources.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_export_by_name_resources.pyi new file mode 100644 index 00000000..54607b2c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_export_by_name_resources.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class TemplateExportByNameResources: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, kind: Incomplete | None = ..., name: Incomplete | None = ...) -> None: ... + @property + def kind(self): ... + @kind.setter + def kind(self, kind) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_kind.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_kind.pyi new file mode 100644 index 00000000..cb375c1c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_kind.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete + +class TemplateKind: + BUCKET: str + CHECK: str + CHECKDEADMAN: str + CHECKTHRESHOLD: str + DASHBOARD: str + LABEL: str + NOTIFICATIONENDPOINT: str + NOTIFICATIONENDPOINTHTTP: str + NOTIFICATIONENDPOINTPAGERDUTY: str + NOTIFICATIONENDPOINTSLACK: str + NOTIFICATIONRULE: str + TASK: str + TELEGRAF: str + VARIABLE: str + openapi_types: Incomplete + attribute_map: Incomplete + def __init__(self) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary.pyi new file mode 100644 index 00000000..b47f8536 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete + +class TemplateSummary: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + sources: Incomplete | None = ..., + stack_id: Incomplete | None = ..., + summary: Incomplete | None = ..., + diff: Incomplete | None = ..., + errors: Incomplete | None = ..., + ) -> None: ... + @property + def sources(self): ... + @sources.setter + def sources(self, sources) -> None: ... + @property + def stack_id(self): ... + @stack_id.setter + def stack_id(self, stack_id) -> None: ... + @property + def summary(self): ... + @summary.setter + def summary(self, summary) -> None: ... + @property + def diff(self): ... + @diff.setter + def diff(self, diff) -> None: ... + @property + def errors(self): ... + @errors.setter + def errors(self, errors) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff.pyi new file mode 100644 index 00000000..e0105bd3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff.pyi @@ -0,0 +1,63 @@ +from _typeshed import Incomplete + +class TemplateSummaryDiff: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + buckets: Incomplete | None = ..., + checks: Incomplete | None = ..., + dashboards: Incomplete | None = ..., + labels: Incomplete | None = ..., + label_mappings: Incomplete | None = ..., + notification_endpoints: Incomplete | None = ..., + notification_rules: Incomplete | None = ..., + tasks: Incomplete | None = ..., + telegraf_configs: Incomplete | None = ..., + variables: Incomplete | None = ..., + ) -> None: ... + @property + def buckets(self): ... + @buckets.setter + def buckets(self, buckets) -> None: ... + @property + def checks(self): ... + @checks.setter + def checks(self, checks) -> None: ... + @property + def dashboards(self): ... + @dashboards.setter + def dashboards(self, dashboards) -> None: ... + @property + def labels(self): ... + @labels.setter + def labels(self, labels) -> None: ... + @property + def label_mappings(self): ... + @label_mappings.setter + def label_mappings(self, label_mappings) -> None: ... + @property + def notification_endpoints(self): ... + @notification_endpoints.setter + def notification_endpoints(self, notification_endpoints) -> None: ... + @property + def notification_rules(self): ... + @notification_rules.setter + def notification_rules(self, notification_rules) -> None: ... + @property + def tasks(self): ... + @tasks.setter + def tasks(self, tasks) -> None: ... + @property + def telegraf_configs(self): ... + @telegraf_configs.setter + def telegraf_configs(self, telegraf_configs) -> None: ... + @property + def variables(self): ... + @variables.setter + def variables(self, variables) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_buckets.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_buckets.pyi new file mode 100644 index 00000000..e3ce470e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_buckets.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +class TemplateSummaryDiffBuckets: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + kind: Incomplete | None = ..., + state_status: Incomplete | None = ..., + id: Incomplete | None = ..., + template_meta_name: Incomplete | None = ..., + new: Incomplete | None = ..., + old: Incomplete | None = ..., + ) -> None: ... + @property + def kind(self): ... + @kind.setter + def kind(self, kind) -> None: ... + @property + def state_status(self): ... + @state_status.setter + def state_status(self, state_status) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def template_meta_name(self): ... + @template_meta_name.setter + def template_meta_name(self, template_meta_name) -> None: ... + @property + def new(self): ... + @new.setter + def new(self, new) -> None: ... + @property + def old(self): ... + @old.setter + def old(self, old) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_buckets_new_old.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_buckets_new_old.pyi new file mode 100644 index 00000000..a03004bf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_buckets_new_old.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class TemplateSummaryDiffBucketsNewOld: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, name: Incomplete | None = ..., description: Incomplete | None = ..., retention_rules: Incomplete | None = ... + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def retention_rules(self): ... + @retention_rules.setter + def retention_rules(self, retention_rules) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_checks.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_checks.pyi new file mode 100644 index 00000000..58c77219 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_checks.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +class TemplateSummaryDiffChecks: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + kind: Incomplete | None = ..., + state_status: Incomplete | None = ..., + id: Incomplete | None = ..., + template_meta_name: Incomplete | None = ..., + new: Incomplete | None = ..., + old: Incomplete | None = ..., + ) -> None: ... + @property + def kind(self): ... + @kind.setter + def kind(self, kind) -> None: ... + @property + def state_status(self): ... + @state_status.setter + def state_status(self, state_status) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def template_meta_name(self): ... + @template_meta_name.setter + def template_meta_name(self, template_meta_name) -> None: ... + @property + def new(self): ... + @new.setter + def new(self, new) -> None: ... + @property + def old(self): ... + @old.setter + def old(self, old) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_dashboards.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_dashboards.pyi new file mode 100644 index 00000000..74c8f4dd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_dashboards.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +class TemplateSummaryDiffDashboards: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + state_status: Incomplete | None = ..., + id: Incomplete | None = ..., + kind: Incomplete | None = ..., + template_meta_name: Incomplete | None = ..., + new: Incomplete | None = ..., + old: Incomplete | None = ..., + ) -> None: ... + @property + def state_status(self): ... + @state_status.setter + def state_status(self, state_status) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def kind(self): ... + @kind.setter + def kind(self, kind) -> None: ... + @property + def template_meta_name(self): ... + @template_meta_name.setter + def template_meta_name(self, template_meta_name) -> None: ... + @property + def new(self): ... + @new.setter + def new(self, new) -> None: ... + @property + def old(self): ... + @old.setter + def old(self, old) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_dashboards_new_old.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_dashboards_new_old.pyi new file mode 100644 index 00000000..68987fe7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_dashboards_new_old.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class TemplateSummaryDiffDashboardsNewOld: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, name: Incomplete | None = ..., description: Incomplete | None = ..., charts: Incomplete | None = ... + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def charts(self): ... + @charts.setter + def charts(self, charts) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_label_mappings.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_label_mappings.pyi new file mode 100644 index 00000000..f5daed3b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_label_mappings.pyi @@ -0,0 +1,53 @@ +from _typeshed import Incomplete + +class TemplateSummaryDiffLabelMappings: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + status: Incomplete | None = ..., + resource_type: Incomplete | None = ..., + resource_id: Incomplete | None = ..., + resource_template_meta_name: Incomplete | None = ..., + resource_name: Incomplete | None = ..., + label_id: Incomplete | None = ..., + label_template_meta_name: Incomplete | None = ..., + label_name: Incomplete | None = ..., + ) -> None: ... + @property + def status(self): ... + @status.setter + def status(self, status) -> None: ... + @property + def resource_type(self): ... + @resource_type.setter + def resource_type(self, resource_type) -> None: ... + @property + def resource_id(self): ... + @resource_id.setter + def resource_id(self, resource_id) -> None: ... + @property + def resource_template_meta_name(self): ... + @resource_template_meta_name.setter + def resource_template_meta_name(self, resource_template_meta_name) -> None: ... + @property + def resource_name(self): ... + @resource_name.setter + def resource_name(self, resource_name) -> None: ... + @property + def label_id(self): ... + @label_id.setter + def label_id(self, label_id) -> None: ... + @property + def label_template_meta_name(self): ... + @label_template_meta_name.setter + def label_template_meta_name(self, label_template_meta_name) -> None: ... + @property + def label_name(self): ... + @label_name.setter + def label_name(self, label_name) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_labels.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_labels.pyi new file mode 100644 index 00000000..386d4374 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_labels.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +class TemplateSummaryDiffLabels: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + state_status: Incomplete | None = ..., + kind: Incomplete | None = ..., + id: Incomplete | None = ..., + template_meta_name: Incomplete | None = ..., + new: Incomplete | None = ..., + old: Incomplete | None = ..., + ) -> None: ... + @property + def state_status(self): ... + @state_status.setter + def state_status(self, state_status) -> None: ... + @property + def kind(self): ... + @kind.setter + def kind(self, kind) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def template_meta_name(self): ... + @template_meta_name.setter + def template_meta_name(self, template_meta_name) -> None: ... + @property + def new(self): ... + @new.setter + def new(self, new) -> None: ... + @property + def old(self): ... + @old.setter + def old(self, old) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_labels_new_old.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_labels_new_old.pyi new file mode 100644 index 00000000..8bc87e00 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_labels_new_old.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class TemplateSummaryDiffLabelsNewOld: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, name: Incomplete | None = ..., color: Incomplete | None = ..., description: Incomplete | None = ... + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def color(self): ... + @color.setter + def color(self, color) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_notification_endpoints.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_notification_endpoints.pyi new file mode 100644 index 00000000..58ef6786 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_notification_endpoints.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +class TemplateSummaryDiffNotificationEndpoints: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + kind: Incomplete | None = ..., + state_status: Incomplete | None = ..., + id: Incomplete | None = ..., + template_meta_name: Incomplete | None = ..., + new: Incomplete | None = ..., + old: Incomplete | None = ..., + ) -> None: ... + @property + def kind(self): ... + @kind.setter + def kind(self, kind) -> None: ... + @property + def state_status(self): ... + @state_status.setter + def state_status(self, state_status) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def template_meta_name(self): ... + @template_meta_name.setter + def template_meta_name(self, template_meta_name) -> None: ... + @property + def new(self): ... + @new.setter + def new(self, new) -> None: ... + @property + def old(self): ... + @old.setter + def old(self, old) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_notification_rules.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_notification_rules.pyi new file mode 100644 index 00000000..f5d3b530 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_notification_rules.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +class TemplateSummaryDiffNotificationRules: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + kind: Incomplete | None = ..., + state_status: Incomplete | None = ..., + id: Incomplete | None = ..., + template_meta_name: Incomplete | None = ..., + new: Incomplete | None = ..., + old: Incomplete | None = ..., + ) -> None: ... + @property + def kind(self): ... + @kind.setter + def kind(self, kind) -> None: ... + @property + def state_status(self): ... + @state_status.setter + def state_status(self, state_status) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def template_meta_name(self): ... + @template_meta_name.setter + def template_meta_name(self, template_meta_name) -> None: ... + @property + def new(self): ... + @new.setter + def new(self, new) -> None: ... + @property + def old(self): ... + @old.setter + def old(self, old) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_notification_rules_new_old.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_notification_rules_new_old.pyi new file mode 100644 index 00000000..e717dd80 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_notification_rules_new_old.pyi @@ -0,0 +1,68 @@ +from _typeshed import Incomplete + +class TemplateSummaryDiffNotificationRulesNewOld: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + description: Incomplete | None = ..., + endpoint_name: Incomplete | None = ..., + endpoint_id: Incomplete | None = ..., + endpoint_type: Incomplete | None = ..., + every: Incomplete | None = ..., + offset: Incomplete | None = ..., + message_template: Incomplete | None = ..., + status: Incomplete | None = ..., + status_rules: Incomplete | None = ..., + tag_rules: Incomplete | None = ..., + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def endpoint_name(self): ... + @endpoint_name.setter + def endpoint_name(self, endpoint_name) -> None: ... + @property + def endpoint_id(self): ... + @endpoint_id.setter + def endpoint_id(self, endpoint_id) -> None: ... + @property + def endpoint_type(self): ... + @endpoint_type.setter + def endpoint_type(self, endpoint_type) -> None: ... + @property + def every(self): ... + @every.setter + def every(self, every) -> None: ... + @property + def offset(self): ... + @offset.setter + def offset(self, offset) -> None: ... + @property + def message_template(self): ... + @message_template.setter + def message_template(self, message_template) -> None: ... + @property + def status(self): ... + @status.setter + def status(self, status) -> None: ... + @property + def status_rules(self): ... + @status_rules.setter + def status_rules(self, status_rules) -> None: ... + @property + def tag_rules(self): ... + @tag_rules.setter + def tag_rules(self, tag_rules) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_tasks.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_tasks.pyi new file mode 100644 index 00000000..a012bb4f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_tasks.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +class TemplateSummaryDiffTasks: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + kind: Incomplete | None = ..., + state_status: Incomplete | None = ..., + id: Incomplete | None = ..., + template_meta_name: Incomplete | None = ..., + new: Incomplete | None = ..., + old: Incomplete | None = ..., + ) -> None: ... + @property + def kind(self): ... + @kind.setter + def kind(self, kind) -> None: ... + @property + def state_status(self): ... + @state_status.setter + def state_status(self, state_status) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def template_meta_name(self): ... + @template_meta_name.setter + def template_meta_name(self, template_meta_name) -> None: ... + @property + def new(self): ... + @new.setter + def new(self, new) -> None: ... + @property + def old(self): ... + @old.setter + def old(self, old) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_tasks_new_old.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_tasks_new_old.pyi new file mode 100644 index 00000000..ee87c2bd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_tasks_new_old.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete + +class TemplateSummaryDiffTasksNewOld: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + cron: Incomplete | None = ..., + description: Incomplete | None = ..., + every: Incomplete | None = ..., + offset: Incomplete | None = ..., + query: Incomplete | None = ..., + status: Incomplete | None = ..., + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def cron(self): ... + @cron.setter + def cron(self, cron) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def every(self): ... + @every.setter + def every(self, every) -> None: ... + @property + def offset(self): ... + @offset.setter + def offset(self, offset) -> None: ... + @property + def query(self): ... + @query.setter + def query(self, query) -> None: ... + @property + def status(self): ... + @status.setter + def status(self, status) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_telegraf_configs.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_telegraf_configs.pyi new file mode 100644 index 00000000..82f0309e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_telegraf_configs.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +class TemplateSummaryDiffTelegrafConfigs: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + kind: Incomplete | None = ..., + state_status: Incomplete | None = ..., + id: Incomplete | None = ..., + template_meta_name: Incomplete | None = ..., + new: Incomplete | None = ..., + old: Incomplete | None = ..., + ) -> None: ... + @property + def kind(self): ... + @kind.setter + def kind(self, kind) -> None: ... + @property + def state_status(self): ... + @state_status.setter + def state_status(self, state_status) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def template_meta_name(self): ... + @template_meta_name.setter + def template_meta_name(self, template_meta_name) -> None: ... + @property + def new(self): ... + @new.setter + def new(self, new) -> None: ... + @property + def old(self): ... + @old.setter + def old(self, old) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_variables.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_variables.pyi new file mode 100644 index 00000000..f06525fb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_variables.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +class TemplateSummaryDiffVariables: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + kind: Incomplete | None = ..., + state_status: Incomplete | None = ..., + id: Incomplete | None = ..., + template_meta_name: Incomplete | None = ..., + new: Incomplete | None = ..., + old: Incomplete | None = ..., + ) -> None: ... + @property + def kind(self): ... + @kind.setter + def kind(self, kind) -> None: ... + @property + def state_status(self): ... + @state_status.setter + def state_status(self, state_status) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def template_meta_name(self): ... + @template_meta_name.setter + def template_meta_name(self, template_meta_name) -> None: ... + @property + def new(self): ... + @new.setter + def new(self, new) -> None: ... + @property + def old(self): ... + @old.setter + def old(self, old) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_variables_new_old.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_variables_new_old.pyi new file mode 100644 index 00000000..bd55b882 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_diff_variables_new_old.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class TemplateSummaryDiffVariablesNewOld: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, name: Incomplete | None = ..., description: Incomplete | None = ..., args: Incomplete | None = ... + ) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def args(self): ... + @args.setter + def args(self, args) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_errors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_errors.pyi new file mode 100644 index 00000000..aba5c807 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_errors.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete + +class TemplateSummaryErrors: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + kind: Incomplete | None = ..., + reason: Incomplete | None = ..., + fields: Incomplete | None = ..., + indexes: Incomplete | None = ..., + ) -> None: ... + @property + def kind(self): ... + @kind.setter + def kind(self, kind) -> None: ... + @property + def reason(self): ... + @reason.setter + def reason(self, reason) -> None: ... + @property + def fields(self): ... + @fields.setter + def fields(self, fields) -> None: ... + @property + def indexes(self): ... + @indexes.setter + def indexes(self, indexes) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_label.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_label.pyi new file mode 100644 index 00000000..c02353ab --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_label.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete + +class TemplateSummaryLabel: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + kind: Incomplete | None = ..., + template_meta_name: Incomplete | None = ..., + name: Incomplete | None = ..., + properties: Incomplete | None = ..., + env_references: Incomplete | None = ..., + ) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def kind(self): ... + @kind.setter + def kind(self, kind) -> None: ... + @property + def template_meta_name(self): ... + @template_meta_name.setter + def template_meta_name(self, template_meta_name) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def properties(self): ... + @properties.setter + def properties(self, properties) -> None: ... + @property + def env_references(self): ... + @env_references.setter + def env_references(self, env_references) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_label_properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_label_properties.pyi new file mode 100644 index 00000000..bc4de964 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_label_properties.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class TemplateSummaryLabelProperties: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, color: Incomplete | None = ..., description: Incomplete | None = ...) -> None: ... + @property + def color(self): ... + @color.setter + def color(self, color) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary.pyi new file mode 100644 index 00000000..939179cb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary.pyi @@ -0,0 +1,73 @@ +from _typeshed import Incomplete + +class TemplateSummarySummary: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + buckets: Incomplete | None = ..., + checks: Incomplete | None = ..., + dashboards: Incomplete | None = ..., + labels: Incomplete | None = ..., + label_mappings: Incomplete | None = ..., + missing_env_refs: Incomplete | None = ..., + missing_secrets: Incomplete | None = ..., + notification_endpoints: Incomplete | None = ..., + notification_rules: Incomplete | None = ..., + tasks: Incomplete | None = ..., + telegraf_configs: Incomplete | None = ..., + variables: Incomplete | None = ..., + ) -> None: ... + @property + def buckets(self): ... + @buckets.setter + def buckets(self, buckets) -> None: ... + @property + def checks(self): ... + @checks.setter + def checks(self, checks) -> None: ... + @property + def dashboards(self): ... + @dashboards.setter + def dashboards(self, dashboards) -> None: ... + @property + def labels(self): ... + @labels.setter + def labels(self, labels) -> None: ... + @property + def label_mappings(self): ... + @label_mappings.setter + def label_mappings(self, label_mappings) -> None: ... + @property + def missing_env_refs(self): ... + @missing_env_refs.setter + def missing_env_refs(self, missing_env_refs) -> None: ... + @property + def missing_secrets(self): ... + @missing_secrets.setter + def missing_secrets(self, missing_secrets) -> None: ... + @property + def notification_endpoints(self): ... + @notification_endpoints.setter + def notification_endpoints(self, notification_endpoints) -> None: ... + @property + def notification_rules(self): ... + @notification_rules.setter + def notification_rules(self, notification_rules) -> None: ... + @property + def tasks(self): ... + @tasks.setter + def tasks(self, tasks) -> None: ... + @property + def telegraf_configs(self): ... + @telegraf_configs.setter + def telegraf_configs(self, telegraf_configs) -> None: ... + @property + def variables(self): ... + @variables.setter + def variables(self, variables) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_buckets.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_buckets.pyi new file mode 100644 index 00000000..cc8184b9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_buckets.pyi @@ -0,0 +1,58 @@ +from _typeshed import Incomplete + +class TemplateSummarySummaryBuckets: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + kind: Incomplete | None = ..., + template_meta_name: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + retention_period: Incomplete | None = ..., + label_associations: Incomplete | None = ..., + env_references: Incomplete | None = ..., + ) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def kind(self): ... + @kind.setter + def kind(self, kind) -> None: ... + @property + def template_meta_name(self): ... + @template_meta_name.setter + def template_meta_name(self, template_meta_name) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def retention_period(self): ... + @retention_period.setter + def retention_period(self, retention_period) -> None: ... + @property + def label_associations(self): ... + @label_associations.setter + def label_associations(self, label_associations) -> None: ... + @property + def env_references(self): ... + @env_references.setter + def env_references(self, env_references) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_dashboards.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_dashboards.pyi new file mode 100644 index 00000000..3b616f5a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_dashboards.pyi @@ -0,0 +1,58 @@ +from _typeshed import Incomplete + +class TemplateSummarySummaryDashboards: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + kind: Incomplete | None = ..., + template_meta_name: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + label_associations: Incomplete | None = ..., + charts: Incomplete | None = ..., + env_references: Incomplete | None = ..., + ) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def kind(self): ... + @kind.setter + def kind(self, kind) -> None: ... + @property + def template_meta_name(self): ... + @template_meta_name.setter + def template_meta_name(self, template_meta_name) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def label_associations(self): ... + @label_associations.setter + def label_associations(self, label_associations) -> None: ... + @property + def charts(self): ... + @charts.setter + def charts(self, charts) -> None: ... + @property + def env_references(self): ... + @env_references.setter + def env_references(self, env_references) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_label_mappings.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_label_mappings.pyi new file mode 100644 index 00000000..bbc3b360 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_label_mappings.pyi @@ -0,0 +1,53 @@ +from _typeshed import Incomplete + +class TemplateSummarySummaryLabelMappings: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + status: Incomplete | None = ..., + resource_template_meta_name: Incomplete | None = ..., + resource_name: Incomplete | None = ..., + resource_id: Incomplete | None = ..., + resource_type: Incomplete | None = ..., + label_template_meta_name: Incomplete | None = ..., + label_name: Incomplete | None = ..., + label_id: Incomplete | None = ..., + ) -> None: ... + @property + def status(self): ... + @status.setter + def status(self, status) -> None: ... + @property + def resource_template_meta_name(self): ... + @resource_template_meta_name.setter + def resource_template_meta_name(self, resource_template_meta_name) -> None: ... + @property + def resource_name(self): ... + @resource_name.setter + def resource_name(self, resource_name) -> None: ... + @property + def resource_id(self): ... + @resource_id.setter + def resource_id(self, resource_id) -> None: ... + @property + def resource_type(self): ... + @resource_type.setter + def resource_type(self, resource_type) -> None: ... + @property + def label_template_meta_name(self): ... + @label_template_meta_name.setter + def label_template_meta_name(self, label_template_meta_name) -> None: ... + @property + def label_name(self): ... + @label_name.setter + def label_name(self, label_name) -> None: ... + @property + def label_id(self): ... + @label_id.setter + def label_id(self, label_id) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_notification_rules.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_notification_rules.pyi new file mode 100644 index 00000000..b68a224d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_notification_rules.pyi @@ -0,0 +1,88 @@ +from _typeshed import Incomplete + +class TemplateSummarySummaryNotificationRules: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + kind: Incomplete | None = ..., + template_meta_name: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + endpoint_template_meta_name: Incomplete | None = ..., + endpoint_id: Incomplete | None = ..., + endpoint_type: Incomplete | None = ..., + every: Incomplete | None = ..., + offset: Incomplete | None = ..., + message_template: Incomplete | None = ..., + status: Incomplete | None = ..., + status_rules: Incomplete | None = ..., + tag_rules: Incomplete | None = ..., + label_associations: Incomplete | None = ..., + env_references: Incomplete | None = ..., + ) -> None: ... + @property + def kind(self): ... + @kind.setter + def kind(self, kind) -> None: ... + @property + def template_meta_name(self): ... + @template_meta_name.setter + def template_meta_name(self, template_meta_name) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def endpoint_template_meta_name(self): ... + @endpoint_template_meta_name.setter + def endpoint_template_meta_name(self, endpoint_template_meta_name) -> None: ... + @property + def endpoint_id(self): ... + @endpoint_id.setter + def endpoint_id(self, endpoint_id) -> None: ... + @property + def endpoint_type(self): ... + @endpoint_type.setter + def endpoint_type(self, endpoint_type) -> None: ... + @property + def every(self): ... + @every.setter + def every(self, every) -> None: ... + @property + def offset(self): ... + @offset.setter + def offset(self, offset) -> None: ... + @property + def message_template(self): ... + @message_template.setter + def message_template(self, message_template) -> None: ... + @property + def status(self): ... + @status.setter + def status(self, status) -> None: ... + @property + def status_rules(self): ... + @status_rules.setter + def status_rules(self, status_rules) -> None: ... + @property + def tag_rules(self): ... + @tag_rules.setter + def tag_rules(self, tag_rules) -> None: ... + @property + def label_associations(self): ... + @label_associations.setter + def label_associations(self, label_associations) -> None: ... + @property + def env_references(self): ... + @env_references.setter + def env_references(self, env_references) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_status_rules.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_status_rules.pyi new file mode 100644 index 00000000..d07be95e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_status_rules.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class TemplateSummarySummaryStatusRules: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, current_level: Incomplete | None = ..., previous_level: Incomplete | None = ...) -> None: ... + @property + def current_level(self): ... + @current_level.setter + def current_level(self, current_level) -> None: ... + @property + def previous_level(self): ... + @previous_level.setter + def previous_level(self, previous_level) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_tag_rules.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_tag_rules.pyi new file mode 100644 index 00000000..820c085b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_tag_rules.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class TemplateSummarySummaryTagRules: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, key: Incomplete | None = ..., value: Incomplete | None = ..., operator: Incomplete | None = ... + ) -> None: ... + @property + def key(self): ... + @key.setter + def key(self, key) -> None: ... + @property + def value(self): ... + @value.setter + def value(self, value) -> None: ... + @property + def operator(self): ... + @operator.setter + def operator(self, operator) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_tasks.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_tasks.pyi new file mode 100644 index 00000000..a2425cb3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_tasks.pyi @@ -0,0 +1,68 @@ +from _typeshed import Incomplete + +class TemplateSummarySummaryTasks: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + kind: Incomplete | None = ..., + template_meta_name: Incomplete | None = ..., + id: Incomplete | None = ..., + name: Incomplete | None = ..., + cron: Incomplete | None = ..., + description: Incomplete | None = ..., + every: Incomplete | None = ..., + offset: Incomplete | None = ..., + query: Incomplete | None = ..., + status: Incomplete | None = ..., + env_references: Incomplete | None = ..., + ) -> None: ... + @property + def kind(self): ... + @kind.setter + def kind(self, kind) -> None: ... + @property + def template_meta_name(self): ... + @template_meta_name.setter + def template_meta_name(self, template_meta_name) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def cron(self): ... + @cron.setter + def cron(self, cron) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def every(self): ... + @every.setter + def every(self, every) -> None: ... + @property + def offset(self): ... + @offset.setter + def offset(self, offset) -> None: ... + @property + def query(self): ... + @query.setter + def query(self, query) -> None: ... + @property + def status(self): ... + @status.setter + def status(self, status) -> None: ... + @property + def env_references(self): ... + @env_references.setter + def env_references(self, env_references) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_variables.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_variables.pyi new file mode 100644 index 00000000..44133a7a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/template_summary_summary_variables.pyi @@ -0,0 +1,58 @@ +from _typeshed import Incomplete + +class TemplateSummarySummaryVariables: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + kind: Incomplete | None = ..., + template_meta_name: Incomplete | None = ..., + id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + arguments: Incomplete | None = ..., + label_associations: Incomplete | None = ..., + env_references: Incomplete | None = ..., + ) -> None: ... + @property + def kind(self): ... + @kind.setter + def kind(self, kind) -> None: ... + @property + def template_meta_name(self): ... + @template_meta_name.setter + def template_meta_name(self, template_meta_name) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def arguments(self): ... + @arguments.setter + def arguments(self, arguments) -> None: ... + @property + def label_associations(self): ... + @label_associations.setter + def label_associations(self, label_associations) -> None: ... + @property + def env_references(self): ... + @env_references.setter + def env_references(self, env_references) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/test_statement.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/test_statement.pyi new file mode 100644 index 00000000..4c93f948 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/test_statement.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.statement import Statement + +class TestStatement(Statement): + def __init__(self, type: Incomplete | None = ..., assignment: Incomplete | None = ...): ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def assignment(self): ... + @assignment.setter + def assignment(self, assignment) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/threshold.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/threshold.pyi new file mode 100644 index 00000000..3633be3c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/threshold.pyi @@ -0,0 +1,17 @@ +from _typeshed import Incomplete + +class Threshold: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator_value_class_map: Incomplete + discriminator: str + def __init__(self, type: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + def get_real_child_model(self, data): ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/threshold_base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/threshold_base.pyi new file mode 100644 index 00000000..e67f3a77 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/threshold_base.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class ThresholdBase: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, level: Incomplete | None = ..., all_values: Incomplete | None = ...) -> None: ... + @property + def level(self): ... + @level.setter + def level(self, level) -> None: ... + @property + def all_values(self): ... + @all_values.setter + def all_values(self, all_values) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/threshold_check.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/threshold_check.pyi new file mode 100644 index 00000000..5ba66f08 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/threshold_check.pyi @@ -0,0 +1,60 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.check_discriminator import CheckDiscriminator + +class ThresholdCheck(CheckDiscriminator): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + type: str = ..., + thresholds: Incomplete | None = ..., + every: Incomplete | None = ..., + offset: Incomplete | None = ..., + tags: Incomplete | None = ..., + status_message_template: Incomplete | None = ..., + id: Incomplete | None = ..., + name: Incomplete | None = ..., + org_id: Incomplete | None = ..., + task_id: Incomplete | None = ..., + owner_id: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + query: Incomplete | None = ..., + status: Incomplete | None = ..., + description: Incomplete | None = ..., + latest_completed: Incomplete | None = ..., + last_run_status: Incomplete | None = ..., + last_run_error: Incomplete | None = ..., + labels: Incomplete | None = ..., + links: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def thresholds(self): ... + @thresholds.setter + def thresholds(self, thresholds) -> None: ... + @property + def every(self): ... + @every.setter + def every(self, every) -> None: ... + @property + def offset(self): ... + @offset.setter + def offset(self, offset) -> None: ... + @property + def tags(self): ... + @tags.setter + def tags(self, tags) -> None: ... + @property + def status_message_template(self): ... + @status_message_template.setter + def status_message_template(self, status_message_template) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/unary_expression.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/unary_expression.pyi new file mode 100644 index 00000000..30d06beb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/unary_expression.pyi @@ -0,0 +1,27 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.expression import Expression + +class UnaryExpression(Expression): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, type: Incomplete | None = ..., operator: Incomplete | None = ..., argument: Incomplete | None = ... + ) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def operator(self): ... + @operator.setter + def operator(self, operator) -> None: ... + @property + def argument(self): ... + @argument.setter + def argument(self, argument) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/unsigned_integer_literal.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/unsigned_integer_literal.pyi new file mode 100644 index 00000000..397cb61d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/unsigned_integer_literal.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.expression import Expression + +class UnsignedIntegerLiteral(Expression): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., value: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def value(self): ... + @value.setter + def value(self, value) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/user.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/user.pyi new file mode 100644 index 00000000..13b8516d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/user.pyi @@ -0,0 +1,29 @@ +from _typeshed import Incomplete + +class User: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, id: Incomplete | None = ..., oauth_id: Incomplete | None = ..., name: Incomplete | None = ..., status: str = ... + ) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def oauth_id(self): ... + @oauth_id.setter + def oauth_id(self, oauth_id) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def status(self): ... + @status.setter + def status(self, status) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/user_response.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/user_response.pyi new file mode 100644 index 00000000..6b89e187 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/user_response.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete + +class UserResponse: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + oauth_id: Incomplete | None = ..., + name: Incomplete | None = ..., + status: str = ..., + links: Incomplete | None = ..., + ) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def oauth_id(self): ... + @oauth_id.setter + def oauth_id(self, oauth_id) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def status(self): ... + @status.setter + def status(self, status) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/user_response_links.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/user_response_links.pyi new file mode 100644 index 00000000..710d020f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/user_response_links.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete + +class UserResponseLinks: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, _self: Incomplete | None = ...) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/users.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/users.pyi new file mode 100644 index 00000000..161f35e3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/users.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class Users: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, links: Incomplete | None = ..., users: Incomplete | None = ...) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + @property + def users(self): ... + @users.setter + def users(self, users) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/variable.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/variable.pyi new file mode 100644 index 00000000..4d8a3552 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/variable.pyi @@ -0,0 +1,63 @@ +from _typeshed import Incomplete + +class Variable: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + links: Incomplete | None = ..., + id: Incomplete | None = ..., + org_id: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + selected: Incomplete | None = ..., + labels: Incomplete | None = ..., + arguments: Incomplete | None = ..., + created_at: Incomplete | None = ..., + updated_at: Incomplete | None = ..., + ) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def org_id(self): ... + @org_id.setter + def org_id(self, org_id) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def description(self): ... + @description.setter + def description(self, description) -> None: ... + @property + def selected(self): ... + @selected.setter + def selected(self, selected) -> None: ... + @property + def labels(self): ... + @labels.setter + def labels(self, labels) -> None: ... + @property + def arguments(self): ... + @arguments.setter + def arguments(self, arguments) -> None: ... + @property + def created_at(self): ... + @created_at.setter + def created_at(self, created_at) -> None: ... + @property + def updated_at(self): ... + @updated_at.setter + def updated_at(self, updated_at) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/variable_assignment.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/variable_assignment.pyi new file mode 100644 index 00000000..5a767f7a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/variable_assignment.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.statement import Statement + +class VariableAssignment(Statement): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, type: Incomplete | None = ..., id: Incomplete | None = ..., init: Incomplete | None = ...) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def init(self): ... + @init.setter + def init(self, init) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/variable_links.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/variable_links.pyi new file mode 100644 index 00000000..2ee8bb69 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/variable_links.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class VariableLinks: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, _self: Incomplete | None = ..., org: Incomplete | None = ..., labels: Incomplete | None = ...) -> None: ... + @property + def org(self): ... + @org.setter + def org(self, org) -> None: ... + @property + def labels(self): ... + @labels.setter + def labels(self, labels) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/variable_properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/variable_properties.pyi new file mode 100644 index 00000000..4554cacf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/variable_properties.pyi @@ -0,0 +1,10 @@ +from _typeshed import Incomplete + +class VariableProperties: + openapi_types: Incomplete + attribute_map: Incomplete + def __init__(self) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/variables.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/variables.pyi new file mode 100644 index 00000000..4dc5281c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/variables.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class Variables: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, variables: Incomplete | None = ...) -> None: ... + @property + def variables(self): ... + @variables.setter + def variables(self, variables) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/view.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/view.pyi new file mode 100644 index 00000000..ecaaa8ca --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/view.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete + +class View: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + links: Incomplete | None = ..., + id: Incomplete | None = ..., + name: Incomplete | None = ..., + properties: Incomplete | None = ..., + ) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, id) -> None: ... + @property + def name(self): ... + @name.setter + def name(self, name) -> None: ... + @property + def properties(self): ... + @properties.setter + def properties(self, properties) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/view_links.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/view_links.pyi new file mode 100644 index 00000000..a290c0e0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/view_links.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete + +class ViewLinks: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, _self: Incomplete | None = ...) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/view_properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/view_properties.pyi new file mode 100644 index 00000000..eef5b89a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/view_properties.pyi @@ -0,0 +1,10 @@ +from _typeshed import Incomplete + +class ViewProperties: + openapi_types: Incomplete + attribute_map: Incomplete + def __init__(self) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/views.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/views.pyi new file mode 100644 index 00000000..86603392 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/views.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +class Views: + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__(self, links: Incomplete | None = ..., views: Incomplete | None = ...) -> None: ... + @property + def links(self): ... + @links.setter + def links(self, links) -> None: ... + @property + def views(self): ... + @views.setter + def views(self, views) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/write_precision.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/write_precision.pyi new file mode 100644 index 00000000..88f73831 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/write_precision.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete +from typing import Any, ClassVar +from typing_extensions import Final, Literal, TypeAlias + +_WritePrecision: TypeAlias = Literal["ms", "s", "us", "ns"] # noqa: Y047 + +class WritePrecision: + MS: Final = "ms" + S: Final = "s" + US: Final = "us" + NS: Final = "ns" + openapi_types: ClassVar[dict[str, Incomplete]] + attribute_map: ClassVar[dict[str, Incomplete]] + def __init__(self) -> None: ... + def to_dict(self) -> dict[str, Any]: ... + def to_str(self) -> str: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/xy_geom.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/xy_geom.pyi new file mode 100644 index 00000000..e4ab5eba --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/xy_geom.pyi @@ -0,0 +1,17 @@ +from _typeshed import Incomplete + +class XYGeom: + LINE: str + STEP: str + STACKED: str + BAR: str + MONOTONEX: str + STEPBEFORE: str + STEPAFTER: str + openapi_types: Incomplete + attribute_map: Incomplete + def __init__(self) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/xy_view_properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/xy_view_properties.pyi new file mode 100644 index 00000000..b1a3e912 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/domain/xy_view_properties.pyi @@ -0,0 +1,155 @@ +from _typeshed import Incomplete + +from influxdb_client.domain.view_properties import ViewProperties + +class XYViewProperties(ViewProperties): + openapi_types: Incomplete + attribute_map: Incomplete + discriminator: Incomplete + def __init__( + self, + time_format: Incomplete | None = ..., + type: Incomplete | None = ..., + queries: Incomplete | None = ..., + colors: Incomplete | None = ..., + color_mapping: Incomplete | None = ..., + shape: Incomplete | None = ..., + note: Incomplete | None = ..., + show_note_when_empty: Incomplete | None = ..., + axes: Incomplete | None = ..., + static_legend: Incomplete | None = ..., + x_column: Incomplete | None = ..., + generate_x_axis_ticks: Incomplete | None = ..., + x_total_ticks: Incomplete | None = ..., + x_tick_start: Incomplete | None = ..., + x_tick_step: Incomplete | None = ..., + y_column: Incomplete | None = ..., + generate_y_axis_ticks: Incomplete | None = ..., + y_total_ticks: Incomplete | None = ..., + y_tick_start: Incomplete | None = ..., + y_tick_step: Incomplete | None = ..., + shade_below: Incomplete | None = ..., + hover_dimension: Incomplete | None = ..., + position: Incomplete | None = ..., + geom: Incomplete | None = ..., + legend_colorize_rows: Incomplete | None = ..., + legend_hide: Incomplete | None = ..., + legend_opacity: Incomplete | None = ..., + legend_orientation_threshold: Incomplete | None = ..., + ) -> None: ... + @property + def time_format(self): ... + @time_format.setter + def time_format(self, time_format) -> None: ... + @property + def type(self): ... + @type.setter + def type(self, type) -> None: ... + @property + def queries(self): ... + @queries.setter + def queries(self, queries) -> None: ... + @property + def colors(self): ... + @colors.setter + def colors(self, colors) -> None: ... + @property + def color_mapping(self): ... + @color_mapping.setter + def color_mapping(self, color_mapping) -> None: ... + @property + def shape(self): ... + @shape.setter + def shape(self, shape) -> None: ... + @property + def note(self): ... + @note.setter + def note(self, note) -> None: ... + @property + def show_note_when_empty(self): ... + @show_note_when_empty.setter + def show_note_when_empty(self, show_note_when_empty) -> None: ... + @property + def axes(self): ... + @axes.setter + def axes(self, axes) -> None: ... + @property + def static_legend(self): ... + @static_legend.setter + def static_legend(self, static_legend) -> None: ... + @property + def x_column(self): ... + @x_column.setter + def x_column(self, x_column) -> None: ... + @property + def generate_x_axis_ticks(self): ... + @generate_x_axis_ticks.setter + def generate_x_axis_ticks(self, generate_x_axis_ticks) -> None: ... + @property + def x_total_ticks(self): ... + @x_total_ticks.setter + def x_total_ticks(self, x_total_ticks) -> None: ... + @property + def x_tick_start(self): ... + @x_tick_start.setter + def x_tick_start(self, x_tick_start) -> None: ... + @property + def x_tick_step(self): ... + @x_tick_step.setter + def x_tick_step(self, x_tick_step) -> None: ... + @property + def y_column(self): ... + @y_column.setter + def y_column(self, y_column) -> None: ... + @property + def generate_y_axis_ticks(self): ... + @generate_y_axis_ticks.setter + def generate_y_axis_ticks(self, generate_y_axis_ticks) -> None: ... + @property + def y_total_ticks(self): ... + @y_total_ticks.setter + def y_total_ticks(self, y_total_ticks) -> None: ... + @property + def y_tick_start(self): ... + @y_tick_start.setter + def y_tick_start(self, y_tick_start) -> None: ... + @property + def y_tick_step(self): ... + @y_tick_step.setter + def y_tick_step(self, y_tick_step) -> None: ... + @property + def shade_below(self): ... + @shade_below.setter + def shade_below(self, shade_below) -> None: ... + @property + def hover_dimension(self): ... + @hover_dimension.setter + def hover_dimension(self, hover_dimension) -> None: ... + @property + def position(self): ... + @position.setter + def position(self, position) -> None: ... + @property + def geom(self): ... + @geom.setter + def geom(self, geom) -> None: ... + @property + def legend_colorize_rows(self): ... + @legend_colorize_rows.setter + def legend_colorize_rows(self, legend_colorize_rows) -> None: ... + @property + def legend_hide(self): ... + @legend_hide.setter + def legend_hide(self, legend_hide) -> None: ... + @property + def legend_opacity(self): ... + @legend_opacity.setter + def legend_opacity(self, legend_opacity) -> None: ... + @property + def legend_orientation_threshold(self): ... + @legend_orientation_threshold.setter + def legend_orientation_threshold(self, legend_orientation_threshold) -> None: ... + def to_dict(self): ... + def to_str(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/extras.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/extras.pyi new file mode 100644 index 00000000..3cc4ea47 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/extras.pyi @@ -0,0 +1,4 @@ +from typing import Any + +np: Any # numpy module +pd: Any # pandas module diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/rest.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/rest.pyi new file mode 100644 index 00000000..b82f6068 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/rest.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete + +from influxdb_client.client.exceptions import InfluxDBError + +class ApiException(InfluxDBError): + status: Incomplete + reason: Incomplete + body: Incomplete + headers: Incomplete + def __init__( + self, status: Incomplete | None = ..., reason: Incomplete | None = ..., http_resp: Incomplete | None = ... + ) -> None: ... + +class _BaseRESTClient: + logger: Incomplete + @staticmethod + def log_request(method: str, url: str): ... + @staticmethod + def log_response(status: str): ... + @staticmethod + def log_body(body: object, prefix: str): ... + @staticmethod + def log_headers(headers: dict[str, str], prefix: str): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/__init__.pyi new file mode 100644 index 00000000..4285a8aa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/__init__.pyi @@ -0,0 +1,41 @@ +from influxdb_client.service.authorizations_service import AuthorizationsService as AuthorizationsService +from influxdb_client.service.backup_service import BackupService as BackupService +from influxdb_client.service.bucket_schemas_service import BucketSchemasService as BucketSchemasService +from influxdb_client.service.buckets_service import BucketsService as BucketsService +from influxdb_client.service.cells_service import CellsService as CellsService +from influxdb_client.service.checks_service import ChecksService as ChecksService +from influxdb_client.service.config_service import ConfigService as ConfigService +from influxdb_client.service.dashboards_service import DashboardsService as DashboardsService +from influxdb_client.service.dbr_ps_service import DBRPsService as DBRPsService +from influxdb_client.service.delete_service import DeleteService as DeleteService +from influxdb_client.service.health_service import HealthService as HealthService +from influxdb_client.service.invokable_scripts_service import InvokableScriptsService as InvokableScriptsService +from influxdb_client.service.labels_service import LabelsService as LabelsService +from influxdb_client.service.legacy_authorizations_service import LegacyAuthorizationsService as LegacyAuthorizationsService +from influxdb_client.service.metrics_service import MetricsService as MetricsService +from influxdb_client.service.notification_endpoints_service import NotificationEndpointsService as NotificationEndpointsService +from influxdb_client.service.notification_rules_service import NotificationRulesService as NotificationRulesService +from influxdb_client.service.organizations_service import OrganizationsService as OrganizationsService +from influxdb_client.service.ping_service import PingService as PingService +from influxdb_client.service.query_service import QueryService as QueryService +from influxdb_client.service.ready_service import ReadyService as ReadyService +from influxdb_client.service.remote_connections_service import RemoteConnectionsService as RemoteConnectionsService +from influxdb_client.service.replications_service import ReplicationsService as ReplicationsService +from influxdb_client.service.resources_service import ResourcesService as ResourcesService +from influxdb_client.service.restore_service import RestoreService as RestoreService +from influxdb_client.service.routes_service import RoutesService as RoutesService +from influxdb_client.service.rules_service import RulesService as RulesService +from influxdb_client.service.scraper_targets_service import ScraperTargetsService as ScraperTargetsService +from influxdb_client.service.secrets_service import SecretsService as SecretsService +from influxdb_client.service.setup_service import SetupService as SetupService +from influxdb_client.service.signin_service import SigninService as SigninService +from influxdb_client.service.signout_service import SignoutService as SignoutService +from influxdb_client.service.sources_service import SourcesService as SourcesService +from influxdb_client.service.tasks_service import TasksService as TasksService +from influxdb_client.service.telegraf_plugins_service import TelegrafPluginsService as TelegrafPluginsService +from influxdb_client.service.telegrafs_service import TelegrafsService as TelegrafsService +from influxdb_client.service.templates_service import TemplatesService as TemplatesService +from influxdb_client.service.users_service import UsersService as UsersService +from influxdb_client.service.variables_service import VariablesService as VariablesService +from influxdb_client.service.views_service import ViewsService as ViewsService +from influxdb_client.service.write_service import WriteService as WriteService diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/_base_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/_base_service.pyi new file mode 100644 index 00000000..dc2bed88 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/_base_service.pyi @@ -0,0 +1,8 @@ +from _typeshed import Incomplete + +class _BaseService: + api_client: Incomplete + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def build_type(self) -> str: ... + async def build_type_async(self) -> str: ... + def response_header(self, response, header_name: str = ...) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/authorizations_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/authorizations_service.pyi new file mode 100644 index 00000000..1cb3af2b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/authorizations_service.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class AuthorizationsService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def delete_authorizations_id(self, auth_id, **kwargs): ... + def delete_authorizations_id_with_http_info(self, auth_id, **kwargs): ... + async def delete_authorizations_id_async(self, auth_id, **kwargs): ... + def get_authorizations(self, **kwargs): ... + def get_authorizations_with_http_info(self, **kwargs): ... + async def get_authorizations_async(self, **kwargs): ... + def get_authorizations_id(self, auth_id, **kwargs): ... + def get_authorizations_id_with_http_info(self, auth_id, **kwargs): ... + async def get_authorizations_id_async(self, auth_id, **kwargs): ... + def patch_authorizations_id(self, auth_id, authorization_update_request, **kwargs): ... + def patch_authorizations_id_with_http_info(self, auth_id, authorization_update_request, **kwargs): ... + async def patch_authorizations_id_async(self, auth_id, authorization_update_request, **kwargs): ... + def post_authorizations(self, authorization_post_request, **kwargs): ... + def post_authorizations_with_http_info(self, authorization_post_request, **kwargs): ... + async def post_authorizations_async(self, authorization_post_request, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/backup_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/backup_service.pyi new file mode 100644 index 00000000..96f89ced --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/backup_service.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class BackupService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def get_backup_kv(self, **kwargs): ... + def get_backup_kv_with_http_info(self, **kwargs): ... + async def get_backup_kv_async(self, **kwargs): ... + def get_backup_metadata(self, **kwargs): ... + def get_backup_metadata_with_http_info(self, **kwargs): ... + async def get_backup_metadata_async(self, **kwargs): ... + def get_backup_shard_id(self, shard_id, **kwargs): ... + def get_backup_shard_id_with_http_info(self, shard_id, **kwargs): ... + async def get_backup_shard_id_async(self, shard_id, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/bucket_schemas_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/bucket_schemas_service.pyi new file mode 100644 index 00000000..276cf48b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/bucket_schemas_service.pyi @@ -0,0 +1,20 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class BucketSchemasService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def create_measurement_schema(self, bucket_id, measurement_schema_create_request, **kwargs): ... + def create_measurement_schema_with_http_info(self, bucket_id, measurement_schema_create_request, **kwargs): ... + async def create_measurement_schema_async(self, bucket_id, measurement_schema_create_request, **kwargs): ... + def get_measurement_schema(self, bucket_id, measurement_id, **kwargs): ... + def get_measurement_schema_with_http_info(self, bucket_id, measurement_id, **kwargs): ... + async def get_measurement_schema_async(self, bucket_id, measurement_id, **kwargs): ... + def get_measurement_schemas(self, bucket_id, **kwargs): ... + def get_measurement_schemas_with_http_info(self, bucket_id, **kwargs): ... + async def get_measurement_schemas_async(self, bucket_id, **kwargs): ... + def update_measurement_schema(self, bucket_id, measurement_id, measurement_schema_update_request, **kwargs): ... + def update_measurement_schema_with_http_info( + self, bucket_id, measurement_id, measurement_schema_update_request, **kwargs + ): ... + async def update_measurement_schema_async(self, bucket_id, measurement_id, measurement_schema_update_request, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/buckets_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/buckets_service.pyi new file mode 100644 index 00000000..9839754c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/buckets_service.pyi @@ -0,0 +1,51 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class BucketsService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def delete_buckets_id(self, bucket_id, **kwargs): ... + def delete_buckets_id_with_http_info(self, bucket_id, **kwargs): ... + async def delete_buckets_id_async(self, bucket_id, **kwargs): ... + def delete_buckets_id_labels_id(self, bucket_id, label_id, **kwargs): ... + def delete_buckets_id_labels_id_with_http_info(self, bucket_id, label_id, **kwargs): ... + async def delete_buckets_id_labels_id_async(self, bucket_id, label_id, **kwargs): ... + def delete_buckets_id_members_id(self, user_id, bucket_id, **kwargs): ... + def delete_buckets_id_members_id_with_http_info(self, user_id, bucket_id, **kwargs): ... + async def delete_buckets_id_members_id_async(self, user_id, bucket_id, **kwargs): ... + def delete_buckets_id_owners_id(self, user_id, bucket_id, **kwargs): ... + def delete_buckets_id_owners_id_with_http_info(self, user_id, bucket_id, **kwargs): ... + async def delete_buckets_id_owners_id_async(self, user_id, bucket_id, **kwargs): ... + def get_buckets(self, **kwargs): ... + def get_buckets_with_http_info(self, **kwargs): ... + async def get_buckets_async(self, **kwargs): ... + def get_buckets_id(self, bucket_id, **kwargs): ... + def get_buckets_id_with_http_info(self, bucket_id, **kwargs): ... + async def get_buckets_id_async(self, bucket_id, **kwargs): ... + def get_buckets_id_labels(self, bucket_id, **kwargs): ... + def get_buckets_id_labels_with_http_info(self, bucket_id, **kwargs): ... + async def get_buckets_id_labels_async(self, bucket_id, **kwargs): ... + def get_buckets_id_members(self, bucket_id, **kwargs): ... + def get_buckets_id_members_with_http_info(self, bucket_id, **kwargs): ... + async def get_buckets_id_members_async(self, bucket_id, **kwargs): ... + def get_buckets_id_owners(self, bucket_id, **kwargs): ... + def get_buckets_id_owners_with_http_info(self, bucket_id, **kwargs): ... + async def get_buckets_id_owners_async(self, bucket_id, **kwargs): ... + def get_sources_id_buckets(self, source_id, **kwargs): ... + def get_sources_id_buckets_with_http_info(self, source_id, **kwargs): ... + async def get_sources_id_buckets_async(self, source_id, **kwargs): ... + def patch_buckets_id(self, bucket_id, patch_bucket_request, **kwargs): ... + def patch_buckets_id_with_http_info(self, bucket_id, patch_bucket_request, **kwargs): ... + async def patch_buckets_id_async(self, bucket_id, patch_bucket_request, **kwargs): ... + def post_buckets(self, post_bucket_request, **kwargs): ... + def post_buckets_with_http_info(self, post_bucket_request, **kwargs): ... + async def post_buckets_async(self, post_bucket_request, **kwargs): ... + def post_buckets_id_labels(self, bucket_id, label_mapping, **kwargs): ... + def post_buckets_id_labels_with_http_info(self, bucket_id, label_mapping, **kwargs): ... + async def post_buckets_id_labels_async(self, bucket_id, label_mapping, **kwargs): ... + def post_buckets_id_members(self, bucket_id, add_resource_member_request_body, **kwargs): ... + def post_buckets_id_members_with_http_info(self, bucket_id, add_resource_member_request_body, **kwargs): ... + async def post_buckets_id_members_async(self, bucket_id, add_resource_member_request_body, **kwargs): ... + def post_buckets_id_owners(self, bucket_id, add_resource_member_request_body, **kwargs): ... + def post_buckets_id_owners_with_http_info(self, bucket_id, add_resource_member_request_body, **kwargs): ... + async def post_buckets_id_owners_async(self, bucket_id, add_resource_member_request_body, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/cells_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/cells_service.pyi new file mode 100644 index 00000000..4135ddc0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/cells_service.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class CellsService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def delete_dashboards_id_cells_id(self, dashboard_id, cell_id, **kwargs): ... + def delete_dashboards_id_cells_id_with_http_info(self, dashboard_id, cell_id, **kwargs): ... + async def delete_dashboards_id_cells_id_async(self, dashboard_id, cell_id, **kwargs): ... + def get_dashboards_id_cells_id_view(self, dashboard_id, cell_id, **kwargs): ... + def get_dashboards_id_cells_id_view_with_http_info(self, dashboard_id, cell_id, **kwargs): ... + async def get_dashboards_id_cells_id_view_async(self, dashboard_id, cell_id, **kwargs): ... + def patch_dashboards_id_cells_id(self, dashboard_id, cell_id, cell_update, **kwargs): ... + def patch_dashboards_id_cells_id_with_http_info(self, dashboard_id, cell_id, cell_update, **kwargs): ... + async def patch_dashboards_id_cells_id_async(self, dashboard_id, cell_id, cell_update, **kwargs): ... + def patch_dashboards_id_cells_id_view(self, dashboard_id, cell_id, view, **kwargs): ... + def patch_dashboards_id_cells_id_view_with_http_info(self, dashboard_id, cell_id, view, **kwargs): ... + async def patch_dashboards_id_cells_id_view_async(self, dashboard_id, cell_id, view, **kwargs): ... + def post_dashboards_id_cells(self, dashboard_id, create_cell, **kwargs): ... + def post_dashboards_id_cells_with_http_info(self, dashboard_id, create_cell, **kwargs): ... + async def post_dashboards_id_cells_async(self, dashboard_id, create_cell, **kwargs): ... + def put_dashboards_id_cells(self, dashboard_id, cell, **kwargs): ... + def put_dashboards_id_cells_with_http_info(self, dashboard_id, cell, **kwargs): ... + async def put_dashboards_id_cells_async(self, dashboard_id, cell, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/checks_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/checks_service.pyi new file mode 100644 index 00000000..fd840989 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/checks_service.pyi @@ -0,0 +1,36 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class ChecksService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def create_check(self, post_check, **kwargs): ... + def create_check_with_http_info(self, post_check, **kwargs): ... + async def create_check_async(self, post_check, **kwargs): ... + def delete_checks_id(self, check_id, **kwargs): ... + def delete_checks_id_with_http_info(self, check_id, **kwargs): ... + async def delete_checks_id_async(self, check_id, **kwargs): ... + def delete_checks_id_labels_id(self, check_id, label_id, **kwargs): ... + def delete_checks_id_labels_id_with_http_info(self, check_id, label_id, **kwargs): ... + async def delete_checks_id_labels_id_async(self, check_id, label_id, **kwargs): ... + def get_checks(self, org_id, **kwargs): ... + def get_checks_with_http_info(self, org_id, **kwargs): ... + async def get_checks_async(self, org_id, **kwargs): ... + def get_checks_id(self, check_id, **kwargs): ... + def get_checks_id_with_http_info(self, check_id, **kwargs): ... + async def get_checks_id_async(self, check_id, **kwargs): ... + def get_checks_id_labels(self, check_id, **kwargs): ... + def get_checks_id_labels_with_http_info(self, check_id, **kwargs): ... + async def get_checks_id_labels_async(self, check_id, **kwargs): ... + def get_checks_id_query(self, check_id, **kwargs): ... + def get_checks_id_query_with_http_info(self, check_id, **kwargs): ... + async def get_checks_id_query_async(self, check_id, **kwargs): ... + def patch_checks_id(self, check_id, check_patch, **kwargs): ... + def patch_checks_id_with_http_info(self, check_id, check_patch, **kwargs): ... + async def patch_checks_id_async(self, check_id, check_patch, **kwargs): ... + def post_checks_id_labels(self, check_id, label_mapping, **kwargs): ... + def post_checks_id_labels_with_http_info(self, check_id, label_mapping, **kwargs): ... + async def post_checks_id_labels_async(self, check_id, label_mapping, **kwargs): ... + def put_checks_id(self, check_id, check, **kwargs): ... + def put_checks_id_with_http_info(self, check_id, check, **kwargs): ... + async def put_checks_id_async(self, check_id, check, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/config_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/config_service.pyi new file mode 100644 index 00000000..89aca655 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/config_service.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class ConfigService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def get_config(self, **kwargs): ... + def get_config_with_http_info(self, **kwargs): ... + async def get_config_async(self, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/dashboards_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/dashboards_service.pyi new file mode 100644 index 00000000..507d9438 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/dashboards_service.pyi @@ -0,0 +1,66 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class DashboardsService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def delete_dashboards_id(self, dashboard_id, **kwargs): ... + def delete_dashboards_id_with_http_info(self, dashboard_id, **kwargs): ... + async def delete_dashboards_id_async(self, dashboard_id, **kwargs): ... + def delete_dashboards_id_cells_id(self, dashboard_id, cell_id, **kwargs): ... + def delete_dashboards_id_cells_id_with_http_info(self, dashboard_id, cell_id, **kwargs): ... + async def delete_dashboards_id_cells_id_async(self, dashboard_id, cell_id, **kwargs): ... + def delete_dashboards_id_labels_id(self, dashboard_id, label_id, **kwargs): ... + def delete_dashboards_id_labels_id_with_http_info(self, dashboard_id, label_id, **kwargs): ... + async def delete_dashboards_id_labels_id_async(self, dashboard_id, label_id, **kwargs): ... + def delete_dashboards_id_members_id(self, user_id, dashboard_id, **kwargs): ... + def delete_dashboards_id_members_id_with_http_info(self, user_id, dashboard_id, **kwargs): ... + async def delete_dashboards_id_members_id_async(self, user_id, dashboard_id, **kwargs): ... + def delete_dashboards_id_owners_id(self, user_id, dashboard_id, **kwargs): ... + def delete_dashboards_id_owners_id_with_http_info(self, user_id, dashboard_id, **kwargs): ... + async def delete_dashboards_id_owners_id_async(self, user_id, dashboard_id, **kwargs): ... + def get_dashboards(self, **kwargs): ... + def get_dashboards_with_http_info(self, **kwargs): ... + async def get_dashboards_async(self, **kwargs): ... + def get_dashboards_id(self, dashboard_id, **kwargs): ... + def get_dashboards_id_with_http_info(self, dashboard_id, **kwargs): ... + async def get_dashboards_id_async(self, dashboard_id, **kwargs): ... + def get_dashboards_id_cells_id_view(self, dashboard_id, cell_id, **kwargs): ... + def get_dashboards_id_cells_id_view_with_http_info(self, dashboard_id, cell_id, **kwargs): ... + async def get_dashboards_id_cells_id_view_async(self, dashboard_id, cell_id, **kwargs): ... + def get_dashboards_id_labels(self, dashboard_id, **kwargs): ... + def get_dashboards_id_labels_with_http_info(self, dashboard_id, **kwargs): ... + async def get_dashboards_id_labels_async(self, dashboard_id, **kwargs): ... + def get_dashboards_id_members(self, dashboard_id, **kwargs): ... + def get_dashboards_id_members_with_http_info(self, dashboard_id, **kwargs): ... + async def get_dashboards_id_members_async(self, dashboard_id, **kwargs): ... + def get_dashboards_id_owners(self, dashboard_id, **kwargs): ... + def get_dashboards_id_owners_with_http_info(self, dashboard_id, **kwargs): ... + async def get_dashboards_id_owners_async(self, dashboard_id, **kwargs): ... + def patch_dashboards_id(self, dashboard_id, **kwargs): ... + def patch_dashboards_id_with_http_info(self, dashboard_id, **kwargs): ... + async def patch_dashboards_id_async(self, dashboard_id, **kwargs): ... + def patch_dashboards_id_cells_id(self, dashboard_id, cell_id, cell_update, **kwargs): ... + def patch_dashboards_id_cells_id_with_http_info(self, dashboard_id, cell_id, cell_update, **kwargs): ... + async def patch_dashboards_id_cells_id_async(self, dashboard_id, cell_id, cell_update, **kwargs): ... + def patch_dashboards_id_cells_id_view(self, dashboard_id, cell_id, view, **kwargs): ... + def patch_dashboards_id_cells_id_view_with_http_info(self, dashboard_id, cell_id, view, **kwargs): ... + async def patch_dashboards_id_cells_id_view_async(self, dashboard_id, cell_id, view, **kwargs): ... + def post_dashboards(self, create_dashboard_request, **kwargs): ... + def post_dashboards_with_http_info(self, create_dashboard_request, **kwargs): ... + async def post_dashboards_async(self, create_dashboard_request, **kwargs): ... + def post_dashboards_id_cells(self, dashboard_id, create_cell, **kwargs): ... + def post_dashboards_id_cells_with_http_info(self, dashboard_id, create_cell, **kwargs): ... + async def post_dashboards_id_cells_async(self, dashboard_id, create_cell, **kwargs): ... + def post_dashboards_id_labels(self, dashboard_id, label_mapping, **kwargs): ... + def post_dashboards_id_labels_with_http_info(self, dashboard_id, label_mapping, **kwargs): ... + async def post_dashboards_id_labels_async(self, dashboard_id, label_mapping, **kwargs): ... + def post_dashboards_id_members(self, dashboard_id, add_resource_member_request_body, **kwargs): ... + def post_dashboards_id_members_with_http_info(self, dashboard_id, add_resource_member_request_body, **kwargs): ... + async def post_dashboards_id_members_async(self, dashboard_id, add_resource_member_request_body, **kwargs): ... + def post_dashboards_id_owners(self, dashboard_id, add_resource_member_request_body, **kwargs): ... + def post_dashboards_id_owners_with_http_info(self, dashboard_id, add_resource_member_request_body, **kwargs): ... + async def post_dashboards_id_owners_async(self, dashboard_id, add_resource_member_request_body, **kwargs): ... + def put_dashboards_id_cells(self, dashboard_id, cell, **kwargs): ... + def put_dashboards_id_cells_with_http_info(self, dashboard_id, cell, **kwargs): ... + async def put_dashboards_id_cells_async(self, dashboard_id, cell, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/dbr_ps_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/dbr_ps_service.pyi new file mode 100644 index 00000000..00b189b6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/dbr_ps_service.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class DBRPsService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def delete_dbrpid(self, dbrp_id, **kwargs): ... + def delete_dbrpid_with_http_info(self, dbrp_id, **kwargs): ... + async def delete_dbrpid_async(self, dbrp_id, **kwargs): ... + def get_dbr_ps(self, **kwargs): ... + def get_dbr_ps_with_http_info(self, **kwargs): ... + async def get_dbr_ps_async(self, **kwargs): ... + def get_dbr_ps_id(self, dbrp_id, **kwargs): ... + def get_dbr_ps_id_with_http_info(self, dbrp_id, **kwargs): ... + async def get_dbr_ps_id_async(self, dbrp_id, **kwargs): ... + def patch_dbrpid(self, dbrp_id, dbrp_update, **kwargs): ... + def patch_dbrpid_with_http_info(self, dbrp_id, dbrp_update, **kwargs): ... + async def patch_dbrpid_async(self, dbrp_id, dbrp_update, **kwargs): ... + def post_dbrp(self, dbrp_create, **kwargs): ... + def post_dbrp_with_http_info(self, dbrp_create, **kwargs): ... + async def post_dbrp_async(self, dbrp_create, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/delete_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/delete_service.pyi new file mode 100644 index 00000000..50407915 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/delete_service.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class DeleteService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def post_delete(self, delete_predicate_request, **kwargs): ... + def post_delete_with_http_info(self, delete_predicate_request, **kwargs): ... + async def post_delete_async(self, delete_predicate_request, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/health_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/health_service.pyi new file mode 100644 index 00000000..0ade64f9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/health_service.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class HealthService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def get_health(self, **kwargs): ... + def get_health_with_http_info(self, **kwargs): ... + async def get_health_async(self, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/invokable_scripts_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/invokable_scripts_service.pyi new file mode 100644 index 00000000..c5f0e227 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/invokable_scripts_service.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class InvokableScriptsService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def delete_scripts_id(self, script_id, **kwargs): ... + def delete_scripts_id_with_http_info(self, script_id, **kwargs): ... + async def delete_scripts_id_async(self, script_id, **kwargs): ... + def get_scripts(self, **kwargs): ... + def get_scripts_with_http_info(self, **kwargs): ... + async def get_scripts_async(self, **kwargs): ... + def get_scripts_id(self, script_id, **kwargs): ... + def get_scripts_id_with_http_info(self, script_id, **kwargs): ... + async def get_scripts_id_async(self, script_id, **kwargs): ... + def patch_scripts_id(self, script_id, script_update_request, **kwargs): ... + def patch_scripts_id_with_http_info(self, script_id, script_update_request, **kwargs): ... + async def patch_scripts_id_async(self, script_id, script_update_request, **kwargs): ... + def post_scripts(self, script_create_request, **kwargs): ... + def post_scripts_with_http_info(self, script_create_request, **kwargs): ... + async def post_scripts_async(self, script_create_request, **kwargs): ... + def post_scripts_id_invoke(self, script_id, **kwargs): ... + def post_scripts_id_invoke_with_http_info(self, script_id, **kwargs): ... + async def post_scripts_id_invoke_async(self, script_id, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/labels_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/labels_service.pyi new file mode 100644 index 00000000..63e974f5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/labels_service.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class LabelsService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def delete_labels_id(self, label_id, **kwargs): ... + def delete_labels_id_with_http_info(self, label_id, **kwargs): ... + async def delete_labels_id_async(self, label_id, **kwargs): ... + def get_labels(self, **kwargs): ... + def get_labels_with_http_info(self, **kwargs): ... + async def get_labels_async(self, **kwargs): ... + def get_labels_id(self, label_id, **kwargs): ... + def get_labels_id_with_http_info(self, label_id, **kwargs): ... + async def get_labels_id_async(self, label_id, **kwargs): ... + def patch_labels_id(self, label_id, label_update, **kwargs): ... + def patch_labels_id_with_http_info(self, label_id, label_update, **kwargs): ... + async def patch_labels_id_async(self, label_id, label_update, **kwargs): ... + def post_labels(self, label_create_request, **kwargs): ... + def post_labels_with_http_info(self, label_create_request, **kwargs): ... + async def post_labels_async(self, label_create_request, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/legacy_authorizations_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/legacy_authorizations_service.pyi new file mode 100644 index 00000000..fbec0e37 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/legacy_authorizations_service.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class LegacyAuthorizationsService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def delete_legacy_authorizations_id(self, auth_id, **kwargs): ... + def delete_legacy_authorizations_id_with_http_info(self, auth_id, **kwargs): ... + async def delete_legacy_authorizations_id_async(self, auth_id, **kwargs): ... + def get_legacy_authorizations(self, **kwargs): ... + def get_legacy_authorizations_with_http_info(self, **kwargs): ... + async def get_legacy_authorizations_async(self, **kwargs): ... + def get_legacy_authorizations_id(self, auth_id, **kwargs): ... + def get_legacy_authorizations_id_with_http_info(self, auth_id, **kwargs): ... + async def get_legacy_authorizations_id_async(self, auth_id, **kwargs): ... + def patch_legacy_authorizations_id(self, auth_id, authorization_update_request, **kwargs): ... + def patch_legacy_authorizations_id_with_http_info(self, auth_id, authorization_update_request, **kwargs): ... + async def patch_legacy_authorizations_id_async(self, auth_id, authorization_update_request, **kwargs): ... + def post_legacy_authorizations(self, legacy_authorization_post_request, **kwargs): ... + def post_legacy_authorizations_with_http_info(self, legacy_authorization_post_request, **kwargs): ... + async def post_legacy_authorizations_async(self, legacy_authorization_post_request, **kwargs): ... + def post_legacy_authorizations_id_password(self, auth_id, password_reset_body, **kwargs): ... + def post_legacy_authorizations_id_password_with_http_info(self, auth_id, password_reset_body, **kwargs): ... + async def post_legacy_authorizations_id_password_async(self, auth_id, password_reset_body, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/metrics_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/metrics_service.pyi new file mode 100644 index 00000000..8194e55c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/metrics_service.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class MetricsService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def get_metrics(self, **kwargs): ... + def get_metrics_with_http_info(self, **kwargs): ... + async def get_metrics_async(self, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/notification_endpoints_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/notification_endpoints_service.pyi new file mode 100644 index 00000000..35286fe1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/notification_endpoints_service.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class NotificationEndpointsService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def create_notification_endpoint(self, post_notification_endpoint, **kwargs): ... + def create_notification_endpoint_with_http_info(self, post_notification_endpoint, **kwargs): ... + async def create_notification_endpoint_async(self, post_notification_endpoint, **kwargs): ... + def delete_notification_endpoints_id(self, endpoint_id, **kwargs): ... + def delete_notification_endpoints_id_with_http_info(self, endpoint_id, **kwargs): ... + async def delete_notification_endpoints_id_async(self, endpoint_id, **kwargs): ... + def delete_notification_endpoints_id_labels_id(self, endpoint_id, label_id, **kwargs): ... + def delete_notification_endpoints_id_labels_id_with_http_info(self, endpoint_id, label_id, **kwargs): ... + async def delete_notification_endpoints_id_labels_id_async(self, endpoint_id, label_id, **kwargs): ... + def get_notification_endpoints(self, org_id, **kwargs): ... + def get_notification_endpoints_with_http_info(self, org_id, **kwargs): ... + async def get_notification_endpoints_async(self, org_id, **kwargs): ... + def get_notification_endpoints_id(self, endpoint_id, **kwargs): ... + def get_notification_endpoints_id_with_http_info(self, endpoint_id, **kwargs): ... + async def get_notification_endpoints_id_async(self, endpoint_id, **kwargs): ... + def get_notification_endpoints_id_labels(self, endpoint_id, **kwargs): ... + def get_notification_endpoints_id_labels_with_http_info(self, endpoint_id, **kwargs): ... + async def get_notification_endpoints_id_labels_async(self, endpoint_id, **kwargs): ... + def patch_notification_endpoints_id(self, endpoint_id, notification_endpoint_update, **kwargs): ... + def patch_notification_endpoints_id_with_http_info(self, endpoint_id, notification_endpoint_update, **kwargs): ... + async def patch_notification_endpoints_id_async(self, endpoint_id, notification_endpoint_update, **kwargs): ... + def post_notification_endpoint_id_labels(self, endpoint_id, label_mapping, **kwargs): ... + def post_notification_endpoint_id_labels_with_http_info(self, endpoint_id, label_mapping, **kwargs): ... + async def post_notification_endpoint_id_labels_async(self, endpoint_id, label_mapping, **kwargs): ... + def put_notification_endpoints_id(self, endpoint_id, notification_endpoint, **kwargs): ... + def put_notification_endpoints_id_with_http_info(self, endpoint_id, notification_endpoint, **kwargs): ... + async def put_notification_endpoints_id_async(self, endpoint_id, notification_endpoint, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/notification_rules_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/notification_rules_service.pyi new file mode 100644 index 00000000..2a449e3f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/notification_rules_service.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class NotificationRulesService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def create_notification_rule(self, post_notification_rule, **kwargs): ... + def create_notification_rule_with_http_info(self, post_notification_rule, **kwargs): ... + async def create_notification_rule_async(self, post_notification_rule, **kwargs): ... + def delete_notification_rules_id(self, rule_id, **kwargs): ... + def delete_notification_rules_id_with_http_info(self, rule_id, **kwargs): ... + async def delete_notification_rules_id_async(self, rule_id, **kwargs): ... + def delete_notification_rules_id_labels_id(self, rule_id, label_id, **kwargs): ... + def delete_notification_rules_id_labels_id_with_http_info(self, rule_id, label_id, **kwargs): ... + async def delete_notification_rules_id_labels_id_async(self, rule_id, label_id, **kwargs): ... + def get_notification_rules(self, org_id, **kwargs): ... + def get_notification_rules_with_http_info(self, org_id, **kwargs): ... + async def get_notification_rules_async(self, org_id, **kwargs): ... + def get_notification_rules_id(self, rule_id, **kwargs): ... + def get_notification_rules_id_with_http_info(self, rule_id, **kwargs): ... + async def get_notification_rules_id_async(self, rule_id, **kwargs): ... + def get_notification_rules_id_labels(self, rule_id, **kwargs): ... + def get_notification_rules_id_labels_with_http_info(self, rule_id, **kwargs): ... + async def get_notification_rules_id_labels_async(self, rule_id, **kwargs): ... + def patch_notification_rules_id(self, rule_id, notification_rule_update, **kwargs): ... + def patch_notification_rules_id_with_http_info(self, rule_id, notification_rule_update, **kwargs): ... + async def patch_notification_rules_id_async(self, rule_id, notification_rule_update, **kwargs): ... + def post_notification_rule_id_labels(self, rule_id, label_mapping, **kwargs): ... + def post_notification_rule_id_labels_with_http_info(self, rule_id, label_mapping, **kwargs): ... + async def post_notification_rule_id_labels_async(self, rule_id, label_mapping, **kwargs): ... + def put_notification_rules_id(self, rule_id, notification_rule, **kwargs): ... + def put_notification_rules_id_with_http_info(self, rule_id, notification_rule, **kwargs): ... + async def put_notification_rules_id_async(self, rule_id, notification_rule, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/organizations_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/organizations_service.pyi new file mode 100644 index 00000000..26b75e28 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/organizations_service.pyi @@ -0,0 +1,39 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class OrganizationsService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def delete_orgs_id(self, org_id, **kwargs): ... + def delete_orgs_id_with_http_info(self, org_id, **kwargs): ... + async def delete_orgs_id_async(self, org_id, **kwargs): ... + def delete_orgs_id_members_id(self, user_id, org_id, **kwargs): ... + def delete_orgs_id_members_id_with_http_info(self, user_id, org_id, **kwargs): ... + async def delete_orgs_id_members_id_async(self, user_id, org_id, **kwargs): ... + def delete_orgs_id_owners_id(self, user_id, org_id, **kwargs): ... + def delete_orgs_id_owners_id_with_http_info(self, user_id, org_id, **kwargs): ... + async def delete_orgs_id_owners_id_async(self, user_id, org_id, **kwargs): ... + def get_orgs(self, **kwargs): ... + def get_orgs_with_http_info(self, **kwargs): ... + async def get_orgs_async(self, **kwargs): ... + def get_orgs_id(self, org_id, **kwargs): ... + def get_orgs_id_with_http_info(self, org_id, **kwargs): ... + async def get_orgs_id_async(self, org_id, **kwargs): ... + def get_orgs_id_members(self, org_id, **kwargs): ... + def get_orgs_id_members_with_http_info(self, org_id, **kwargs): ... + async def get_orgs_id_members_async(self, org_id, **kwargs): ... + def get_orgs_id_owners(self, org_id, **kwargs): ... + def get_orgs_id_owners_with_http_info(self, org_id, **kwargs): ... + async def get_orgs_id_owners_async(self, org_id, **kwargs): ... + def patch_orgs_id(self, org_id, patch_organization_request, **kwargs): ... + def patch_orgs_id_with_http_info(self, org_id, patch_organization_request, **kwargs): ... + async def patch_orgs_id_async(self, org_id, patch_organization_request, **kwargs): ... + def post_orgs(self, post_organization_request, **kwargs): ... + def post_orgs_with_http_info(self, post_organization_request, **kwargs): ... + async def post_orgs_async(self, post_organization_request, **kwargs): ... + def post_orgs_id_members(self, org_id, add_resource_member_request_body, **kwargs): ... + def post_orgs_id_members_with_http_info(self, org_id, add_resource_member_request_body, **kwargs): ... + async def post_orgs_id_members_async(self, org_id, add_resource_member_request_body, **kwargs): ... + def post_orgs_id_owners(self, org_id, add_resource_member_request_body, **kwargs): ... + def post_orgs_id_owners_with_http_info(self, org_id, add_resource_member_request_body, **kwargs): ... + async def post_orgs_id_owners_async(self, org_id, add_resource_member_request_body, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/ping_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/ping_service.pyi new file mode 100644 index 00000000..fd1b5bfa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/ping_service.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class PingService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def get_ping(self, **kwargs): ... + def get_ping_with_http_info(self, **kwargs): ... + async def get_ping_async(self, **kwargs): ... + def head_ping(self, **kwargs): ... + def head_ping_with_http_info(self, **kwargs): ... + async def head_ping_async(self, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/query_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/query_service.pyi new file mode 100644 index 00000000..409d0070 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/query_service.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class QueryService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def get_query_suggestions(self, **kwargs): ... + def get_query_suggestions_with_http_info(self, **kwargs): ... + async def get_query_suggestions_async(self, **kwargs): ... + def get_query_suggestions_name(self, name, **kwargs): ... + def get_query_suggestions_name_with_http_info(self, name, **kwargs): ... + async def get_query_suggestions_name_async(self, name, **kwargs): ... + def post_query(self, **kwargs): ... + def post_query_with_http_info(self, **kwargs): ... + async def post_query_async(self, **kwargs): ... + def post_query_analyze(self, **kwargs): ... + def post_query_analyze_with_http_info(self, **kwargs): ... + async def post_query_analyze_async(self, **kwargs): ... + def post_query_ast(self, **kwargs): ... + def post_query_ast_with_http_info(self, **kwargs): ... + async def post_query_ast_async(self, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/ready_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/ready_service.pyi new file mode 100644 index 00000000..ff3e13da --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/ready_service.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class ReadyService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def get_ready(self, **kwargs): ... + def get_ready_with_http_info(self, **kwargs): ... + async def get_ready_async(self, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/remote_connections_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/remote_connections_service.pyi new file mode 100644 index 00000000..1bfede4c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/remote_connections_service.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class RemoteConnectionsService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def delete_remote_connection_by_id(self, remote_id, **kwargs): ... + def delete_remote_connection_by_id_with_http_info(self, remote_id, **kwargs): ... + async def delete_remote_connection_by_id_async(self, remote_id, **kwargs): ... + def get_remote_connection_by_id(self, remote_id, **kwargs): ... + def get_remote_connection_by_id_with_http_info(self, remote_id, **kwargs): ... + async def get_remote_connection_by_id_async(self, remote_id, **kwargs): ... + def get_remote_connections(self, org_id, **kwargs): ... + def get_remote_connections_with_http_info(self, org_id, **kwargs): ... + async def get_remote_connections_async(self, org_id, **kwargs): ... + def patch_remote_connection_by_id(self, remote_id, remote_connection_update_request, **kwargs): ... + def patch_remote_connection_by_id_with_http_info(self, remote_id, remote_connection_update_request, **kwargs): ... + async def patch_remote_connection_by_id_async(self, remote_id, remote_connection_update_request, **kwargs): ... + def post_remote_connection(self, remote_connection_creation_request, **kwargs): ... + def post_remote_connection_with_http_info(self, remote_connection_creation_request, **kwargs): ... + async def post_remote_connection_async(self, remote_connection_creation_request, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/replications_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/replications_service.pyi new file mode 100644 index 00000000..96e1056f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/replications_service.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class ReplicationsService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def delete_replication_by_id(self, replication_id, **kwargs): ... + def delete_replication_by_id_with_http_info(self, replication_id, **kwargs): ... + async def delete_replication_by_id_async(self, replication_id, **kwargs): ... + def get_replication_by_id(self, replication_id, **kwargs): ... + def get_replication_by_id_with_http_info(self, replication_id, **kwargs): ... + async def get_replication_by_id_async(self, replication_id, **kwargs): ... + def get_replications(self, org_id, **kwargs): ... + def get_replications_with_http_info(self, org_id, **kwargs): ... + async def get_replications_async(self, org_id, **kwargs): ... + def patch_replication_by_id(self, replication_id, replication_update_request, **kwargs): ... + def patch_replication_by_id_with_http_info(self, replication_id, replication_update_request, **kwargs): ... + async def patch_replication_by_id_async(self, replication_id, replication_update_request, **kwargs): ... + def post_replication(self, replication_creation_request, **kwargs): ... + def post_replication_with_http_info(self, replication_creation_request, **kwargs): ... + async def post_replication_async(self, replication_creation_request, **kwargs): ... + def post_validate_replication_by_id(self, replication_id, **kwargs): ... + def post_validate_replication_by_id_with_http_info(self, replication_id, **kwargs): ... + async def post_validate_replication_by_id_async(self, replication_id, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/resources_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/resources_service.pyi new file mode 100644 index 00000000..c5894161 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/resources_service.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class ResourcesService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def get_resources(self, **kwargs): ... + def get_resources_with_http_info(self, **kwargs): ... + async def get_resources_async(self, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/restore_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/restore_service.pyi new file mode 100644 index 00000000..c826e5a4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/restore_service.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class RestoreService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def post_restore_bucket_id(self, bucket_id, body, **kwargs): ... + def post_restore_bucket_id_with_http_info(self, bucket_id, body, **kwargs): ... + async def post_restore_bucket_id_async(self, bucket_id, body, **kwargs): ... + def post_restore_bucket_metadata(self, bucket_metadata_manifest, **kwargs): ... + def post_restore_bucket_metadata_with_http_info(self, bucket_metadata_manifest, **kwargs): ... + async def post_restore_bucket_metadata_async(self, bucket_metadata_manifest, **kwargs): ... + def post_restore_kv(self, body, **kwargs): ... + def post_restore_kv_with_http_info(self, body, **kwargs): ... + async def post_restore_kv_async(self, body, **kwargs): ... + def post_restore_shard_id(self, shard_id, body, **kwargs): ... + def post_restore_shard_id_with_http_info(self, shard_id, body, **kwargs): ... + async def post_restore_shard_id_async(self, shard_id, body, **kwargs): ... + def post_restore_sql(self, body, **kwargs): ... + def post_restore_sql_with_http_info(self, body, **kwargs): ... + async def post_restore_sql_async(self, body, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/routes_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/routes_service.pyi new file mode 100644 index 00000000..d04bac3a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/routes_service.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class RoutesService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def get_routes(self, **kwargs): ... + def get_routes_with_http_info(self, **kwargs): ... + async def get_routes_async(self, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/rules_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/rules_service.pyi new file mode 100644 index 00000000..e11bec1b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/rules_service.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class RulesService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def get_notification_rules_id_query(self, rule_id, **kwargs): ... + def get_notification_rules_id_query_with_http_info(self, rule_id, **kwargs): ... + async def get_notification_rules_id_query_async(self, rule_id, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/scraper_targets_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/scraper_targets_service.pyi new file mode 100644 index 00000000..20876f18 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/scraper_targets_service.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class ScraperTargetsService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def delete_scrapers_id(self, scraper_target_id, **kwargs): ... + def delete_scrapers_id_with_http_info(self, scraper_target_id, **kwargs): ... + async def delete_scrapers_id_async(self, scraper_target_id, **kwargs): ... + def delete_scrapers_id_labels_id(self, scraper_target_id, label_id, **kwargs): ... + def delete_scrapers_id_labels_id_with_http_info(self, scraper_target_id, label_id, **kwargs): ... + async def delete_scrapers_id_labels_id_async(self, scraper_target_id, label_id, **kwargs): ... + def delete_scrapers_id_members_id(self, user_id, scraper_target_id, **kwargs): ... + def delete_scrapers_id_members_id_with_http_info(self, user_id, scraper_target_id, **kwargs): ... + async def delete_scrapers_id_members_id_async(self, user_id, scraper_target_id, **kwargs): ... + def delete_scrapers_id_owners_id(self, user_id, scraper_target_id, **kwargs): ... + def delete_scrapers_id_owners_id_with_http_info(self, user_id, scraper_target_id, **kwargs): ... + async def delete_scrapers_id_owners_id_async(self, user_id, scraper_target_id, **kwargs): ... + def get_scrapers(self, **kwargs): ... + def get_scrapers_with_http_info(self, **kwargs): ... + async def get_scrapers_async(self, **kwargs): ... + def get_scrapers_id(self, scraper_target_id, **kwargs): ... + def get_scrapers_id_with_http_info(self, scraper_target_id, **kwargs): ... + async def get_scrapers_id_async(self, scraper_target_id, **kwargs): ... + def get_scrapers_id_labels(self, scraper_target_id, **kwargs): ... + def get_scrapers_id_labels_with_http_info(self, scraper_target_id, **kwargs): ... + async def get_scrapers_id_labels_async(self, scraper_target_id, **kwargs): ... + def get_scrapers_id_members(self, scraper_target_id, **kwargs): ... + def get_scrapers_id_members_with_http_info(self, scraper_target_id, **kwargs): ... + async def get_scrapers_id_members_async(self, scraper_target_id, **kwargs): ... + def get_scrapers_id_owners(self, scraper_target_id, **kwargs): ... + def get_scrapers_id_owners_with_http_info(self, scraper_target_id, **kwargs): ... + async def get_scrapers_id_owners_async(self, scraper_target_id, **kwargs): ... + def patch_scrapers_id(self, scraper_target_id, scraper_target_request, **kwargs): ... + def patch_scrapers_id_with_http_info(self, scraper_target_id, scraper_target_request, **kwargs): ... + async def patch_scrapers_id_async(self, scraper_target_id, scraper_target_request, **kwargs): ... + def post_scrapers(self, scraper_target_request, **kwargs): ... + def post_scrapers_with_http_info(self, scraper_target_request, **kwargs): ... + async def post_scrapers_async(self, scraper_target_request, **kwargs): ... + def post_scrapers_id_labels(self, scraper_target_id, label_mapping, **kwargs): ... + def post_scrapers_id_labels_with_http_info(self, scraper_target_id, label_mapping, **kwargs): ... + async def post_scrapers_id_labels_async(self, scraper_target_id, label_mapping, **kwargs): ... + def post_scrapers_id_members(self, scraper_target_id, add_resource_member_request_body, **kwargs): ... + def post_scrapers_id_members_with_http_info(self, scraper_target_id, add_resource_member_request_body, **kwargs): ... + async def post_scrapers_id_members_async(self, scraper_target_id, add_resource_member_request_body, **kwargs): ... + def post_scrapers_id_owners(self, scraper_target_id, add_resource_member_request_body, **kwargs): ... + def post_scrapers_id_owners_with_http_info(self, scraper_target_id, add_resource_member_request_body, **kwargs): ... + async def post_scrapers_id_owners_async(self, scraper_target_id, add_resource_member_request_body, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/secrets_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/secrets_service.pyi new file mode 100644 index 00000000..8e5e2c02 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/secrets_service.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class SecretsService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def delete_orgs_id_secrets_id(self, org_id, secret_id, **kwargs): ... + def delete_orgs_id_secrets_id_with_http_info(self, org_id, secret_id, **kwargs): ... + async def delete_orgs_id_secrets_id_async(self, org_id, secret_id, **kwargs): ... + def get_orgs_id_secrets(self, org_id, **kwargs): ... + def get_orgs_id_secrets_with_http_info(self, org_id, **kwargs): ... + async def get_orgs_id_secrets_async(self, org_id, **kwargs): ... + def patch_orgs_id_secrets(self, org_id, request_body, **kwargs): ... + def patch_orgs_id_secrets_with_http_info(self, org_id, request_body, **kwargs): ... + async def patch_orgs_id_secrets_async(self, org_id, request_body, **kwargs): ... + def post_orgs_id_secrets(self, org_id, secret_keys, **kwargs): ... + def post_orgs_id_secrets_with_http_info(self, org_id, secret_keys, **kwargs): ... + async def post_orgs_id_secrets_async(self, org_id, secret_keys, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/setup_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/setup_service.pyi new file mode 100644 index 00000000..a423d9ea --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/setup_service.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class SetupService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def get_setup(self, **kwargs): ... + def get_setup_with_http_info(self, **kwargs): ... + async def get_setup_async(self, **kwargs): ... + def post_setup(self, onboarding_request, **kwargs): ... + def post_setup_with_http_info(self, onboarding_request, **kwargs): ... + async def post_setup_async(self, onboarding_request, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/signin_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/signin_service.pyi new file mode 100644 index 00000000..45c33564 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/signin_service.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class SigninService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def post_signin(self, **kwargs): ... + def post_signin_with_http_info(self, **kwargs): ... + async def post_signin_async(self, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/signout_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/signout_service.pyi new file mode 100644 index 00000000..99940d20 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/signout_service.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class SignoutService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def post_signout(self, **kwargs): ... + def post_signout_with_http_info(self, **kwargs): ... + async def post_signout_async(self, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/sources_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/sources_service.pyi new file mode 100644 index 00000000..8ee48a13 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/sources_service.pyi @@ -0,0 +1,27 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class SourcesService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def delete_sources_id(self, source_id, **kwargs): ... + def delete_sources_id_with_http_info(self, source_id, **kwargs): ... + async def delete_sources_id_async(self, source_id, **kwargs): ... + def get_sources(self, **kwargs): ... + def get_sources_with_http_info(self, **kwargs): ... + async def get_sources_async(self, **kwargs): ... + def get_sources_id(self, source_id, **kwargs): ... + def get_sources_id_with_http_info(self, source_id, **kwargs): ... + async def get_sources_id_async(self, source_id, **kwargs): ... + def get_sources_id_buckets(self, source_id, **kwargs): ... + def get_sources_id_buckets_with_http_info(self, source_id, **kwargs): ... + async def get_sources_id_buckets_async(self, source_id, **kwargs): ... + def get_sources_id_health(self, source_id, **kwargs): ... + def get_sources_id_health_with_http_info(self, source_id, **kwargs): ... + async def get_sources_id_health_async(self, source_id, **kwargs): ... + def patch_sources_id(self, source_id, source, **kwargs): ... + def patch_sources_id_with_http_info(self, source_id, source, **kwargs): ... + async def patch_sources_id_async(self, source_id, source, **kwargs): ... + def post_sources(self, source, **kwargs): ... + def post_sources_with_http_info(self, source, **kwargs): ... + async def post_sources_async(self, source, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/tasks_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/tasks_service.pyi new file mode 100644 index 00000000..218df105 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/tasks_service.pyi @@ -0,0 +1,69 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class TasksService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def delete_tasks_id(self, task_id, **kwargs): ... + def delete_tasks_id_with_http_info(self, task_id, **kwargs): ... + async def delete_tasks_id_async(self, task_id, **kwargs): ... + def delete_tasks_id_labels_id(self, task_id, label_id, **kwargs): ... + def delete_tasks_id_labels_id_with_http_info(self, task_id, label_id, **kwargs): ... + async def delete_tasks_id_labels_id_async(self, task_id, label_id, **kwargs): ... + def delete_tasks_id_members_id(self, user_id, task_id, **kwargs): ... + def delete_tasks_id_members_id_with_http_info(self, user_id, task_id, **kwargs): ... + async def delete_tasks_id_members_id_async(self, user_id, task_id, **kwargs): ... + def delete_tasks_id_owners_id(self, user_id, task_id, **kwargs): ... + def delete_tasks_id_owners_id_with_http_info(self, user_id, task_id, **kwargs): ... + async def delete_tasks_id_owners_id_async(self, user_id, task_id, **kwargs): ... + def delete_tasks_id_runs_id(self, task_id, run_id, **kwargs): ... + def delete_tasks_id_runs_id_with_http_info(self, task_id, run_id, **kwargs): ... + async def delete_tasks_id_runs_id_async(self, task_id, run_id, **kwargs): ... + def get_tasks(self, **kwargs): ... + def get_tasks_with_http_info(self, **kwargs): ... + async def get_tasks_async(self, **kwargs): ... + def get_tasks_id(self, task_id, **kwargs): ... + def get_tasks_id_with_http_info(self, task_id, **kwargs): ... + async def get_tasks_id_async(self, task_id, **kwargs): ... + def get_tasks_id_labels(self, task_id, **kwargs): ... + def get_tasks_id_labels_with_http_info(self, task_id, **kwargs): ... + async def get_tasks_id_labels_async(self, task_id, **kwargs): ... + def get_tasks_id_logs(self, task_id, **kwargs): ... + def get_tasks_id_logs_with_http_info(self, task_id, **kwargs): ... + async def get_tasks_id_logs_async(self, task_id, **kwargs): ... + def get_tasks_id_members(self, task_id, **kwargs): ... + def get_tasks_id_members_with_http_info(self, task_id, **kwargs): ... + async def get_tasks_id_members_async(self, task_id, **kwargs): ... + def get_tasks_id_owners(self, task_id, **kwargs): ... + def get_tasks_id_owners_with_http_info(self, task_id, **kwargs): ... + async def get_tasks_id_owners_async(self, task_id, **kwargs): ... + def get_tasks_id_runs(self, task_id, **kwargs): ... + def get_tasks_id_runs_with_http_info(self, task_id, **kwargs): ... + async def get_tasks_id_runs_async(self, task_id, **kwargs): ... + def get_tasks_id_runs_id(self, task_id, run_id, **kwargs): ... + def get_tasks_id_runs_id_with_http_info(self, task_id, run_id, **kwargs): ... + async def get_tasks_id_runs_id_async(self, task_id, run_id, **kwargs): ... + def get_tasks_id_runs_id_logs(self, task_id, run_id, **kwargs): ... + def get_tasks_id_runs_id_logs_with_http_info(self, task_id, run_id, **kwargs): ... + async def get_tasks_id_runs_id_logs_async(self, task_id, run_id, **kwargs): ... + def patch_tasks_id(self, task_id, task_update_request, **kwargs): ... + def patch_tasks_id_with_http_info(self, task_id, task_update_request, **kwargs): ... + async def patch_tasks_id_async(self, task_id, task_update_request, **kwargs): ... + def post_tasks(self, task_create_request, **kwargs): ... + def post_tasks_with_http_info(self, task_create_request, **kwargs): ... + async def post_tasks_async(self, task_create_request, **kwargs): ... + def post_tasks_id_labels(self, task_id, label_mapping, **kwargs): ... + def post_tasks_id_labels_with_http_info(self, task_id, label_mapping, **kwargs): ... + async def post_tasks_id_labels_async(self, task_id, label_mapping, **kwargs): ... + def post_tasks_id_members(self, task_id, add_resource_member_request_body, **kwargs): ... + def post_tasks_id_members_with_http_info(self, task_id, add_resource_member_request_body, **kwargs): ... + async def post_tasks_id_members_async(self, task_id, add_resource_member_request_body, **kwargs): ... + def post_tasks_id_owners(self, task_id, add_resource_member_request_body, **kwargs): ... + def post_tasks_id_owners_with_http_info(self, task_id, add_resource_member_request_body, **kwargs): ... + async def post_tasks_id_owners_async(self, task_id, add_resource_member_request_body, **kwargs): ... + def post_tasks_id_runs(self, task_id, **kwargs): ... + def post_tasks_id_runs_with_http_info(self, task_id, **kwargs): ... + async def post_tasks_id_runs_async(self, task_id, **kwargs): ... + def post_tasks_id_runs_id_retry(self, task_id, run_id, **kwargs): ... + def post_tasks_id_runs_id_retry_with_http_info(self, task_id, run_id, **kwargs): ... + async def post_tasks_id_runs_id_retry_async(self, task_id, run_id, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/telegraf_plugins_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/telegraf_plugins_service.pyi new file mode 100644 index 00000000..cfa93453 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/telegraf_plugins_service.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class TelegrafPluginsService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def get_telegraf_plugins(self, **kwargs): ... + def get_telegraf_plugins_with_http_info(self, **kwargs): ... + async def get_telegraf_plugins_async(self, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/telegrafs_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/telegrafs_service.pyi new file mode 100644 index 00000000..18594458 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/telegrafs_service.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class TelegrafsService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def delete_telegrafs_id(self, telegraf_id, **kwargs): ... + def delete_telegrafs_id_with_http_info(self, telegraf_id, **kwargs): ... + async def delete_telegrafs_id_async(self, telegraf_id, **kwargs): ... + def delete_telegrafs_id_labels_id(self, telegraf_id, label_id, **kwargs): ... + def delete_telegrafs_id_labels_id_with_http_info(self, telegraf_id, label_id, **kwargs): ... + async def delete_telegrafs_id_labels_id_async(self, telegraf_id, label_id, **kwargs): ... + def delete_telegrafs_id_members_id(self, user_id, telegraf_id, **kwargs): ... + def delete_telegrafs_id_members_id_with_http_info(self, user_id, telegraf_id, **kwargs): ... + async def delete_telegrafs_id_members_id_async(self, user_id, telegraf_id, **kwargs): ... + def delete_telegrafs_id_owners_id(self, user_id, telegraf_id, **kwargs): ... + def delete_telegrafs_id_owners_id_with_http_info(self, user_id, telegraf_id, **kwargs): ... + async def delete_telegrafs_id_owners_id_async(self, user_id, telegraf_id, **kwargs): ... + def get_telegrafs(self, **kwargs): ... + def get_telegrafs_with_http_info(self, **kwargs): ... + async def get_telegrafs_async(self, **kwargs): ... + def get_telegrafs_id(self, telegraf_id, **kwargs): ... + def get_telegrafs_id_with_http_info(self, telegraf_id, **kwargs): ... + async def get_telegrafs_id_async(self, telegraf_id, **kwargs): ... + def get_telegrafs_id_labels(self, telegraf_id, **kwargs): ... + def get_telegrafs_id_labels_with_http_info(self, telegraf_id, **kwargs): ... + async def get_telegrafs_id_labels_async(self, telegraf_id, **kwargs): ... + def get_telegrafs_id_members(self, telegraf_id, **kwargs): ... + def get_telegrafs_id_members_with_http_info(self, telegraf_id, **kwargs): ... + async def get_telegrafs_id_members_async(self, telegraf_id, **kwargs): ... + def get_telegrafs_id_owners(self, telegraf_id, **kwargs): ... + def get_telegrafs_id_owners_with_http_info(self, telegraf_id, **kwargs): ... + async def get_telegrafs_id_owners_async(self, telegraf_id, **kwargs): ... + def post_telegrafs(self, telegraf_plugin_request, **kwargs): ... + def post_telegrafs_with_http_info(self, telegraf_plugin_request, **kwargs): ... + async def post_telegrafs_async(self, telegraf_plugin_request, **kwargs): ... + def post_telegrafs_id_labels(self, telegraf_id, label_mapping, **kwargs): ... + def post_telegrafs_id_labels_with_http_info(self, telegraf_id, label_mapping, **kwargs): ... + async def post_telegrafs_id_labels_async(self, telegraf_id, label_mapping, **kwargs): ... + def post_telegrafs_id_members(self, telegraf_id, add_resource_member_request_body, **kwargs): ... + def post_telegrafs_id_members_with_http_info(self, telegraf_id, add_resource_member_request_body, **kwargs): ... + async def post_telegrafs_id_members_async(self, telegraf_id, add_resource_member_request_body, **kwargs): ... + def post_telegrafs_id_owners(self, telegraf_id, add_resource_member_request_body, **kwargs): ... + def post_telegrafs_id_owners_with_http_info(self, telegraf_id, add_resource_member_request_body, **kwargs): ... + async def post_telegrafs_id_owners_async(self, telegraf_id, add_resource_member_request_body, **kwargs): ... + def put_telegrafs_id(self, telegraf_id, telegraf_plugin_request, **kwargs): ... + def put_telegrafs_id_with_http_info(self, telegraf_id, telegraf_plugin_request, **kwargs): ... + async def put_telegrafs_id_async(self, telegraf_id, telegraf_plugin_request, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/templates_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/templates_service.pyi new file mode 100644 index 00000000..199f0fc1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/templates_service.pyi @@ -0,0 +1,30 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class TemplatesService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def apply_template(self, template_apply, **kwargs): ... + def apply_template_with_http_info(self, template_apply, **kwargs): ... + async def apply_template_async(self, template_apply, **kwargs): ... + def create_stack(self, **kwargs): ... + def create_stack_with_http_info(self, **kwargs): ... + async def create_stack_async(self, **kwargs): ... + def delete_stack(self, stack_id, org_id, **kwargs): ... + def delete_stack_with_http_info(self, stack_id, org_id, **kwargs): ... + async def delete_stack_async(self, stack_id, org_id, **kwargs): ... + def export_template(self, **kwargs): ... + def export_template_with_http_info(self, **kwargs): ... + async def export_template_async(self, **kwargs): ... + def list_stacks(self, org_id, **kwargs): ... + def list_stacks_with_http_info(self, org_id, **kwargs): ... + async def list_stacks_async(self, org_id, **kwargs): ... + def read_stack(self, stack_id, **kwargs): ... + def read_stack_with_http_info(self, stack_id, **kwargs): ... + async def read_stack_async(self, stack_id, **kwargs): ... + def uninstall_stack(self, stack_id, **kwargs): ... + def uninstall_stack_with_http_info(self, stack_id, **kwargs): ... + async def uninstall_stack_async(self, stack_id, **kwargs): ... + def update_stack(self, stack_id, **kwargs): ... + def update_stack_with_http_info(self, stack_id, **kwargs): ... + async def update_stack_async(self, stack_id, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/users_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/users_service.pyi new file mode 100644 index 00000000..b0bbd881 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/users_service.pyi @@ -0,0 +1,36 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class UsersService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def delete_users_id(self, user_id, **kwargs): ... + def delete_users_id_with_http_info(self, user_id, **kwargs): ... + async def delete_users_id_async(self, user_id, **kwargs): ... + def get_flags(self, **kwargs): ... + def get_flags_with_http_info(self, **kwargs): ... + async def get_flags_async(self, **kwargs): ... + def get_me(self, **kwargs): ... + def get_me_with_http_info(self, **kwargs): ... + async def get_me_async(self, **kwargs): ... + def get_users(self, **kwargs): ... + def get_users_with_http_info(self, **kwargs): ... + async def get_users_async(self, **kwargs): ... + def get_users_id(self, user_id, **kwargs): ... + def get_users_id_with_http_info(self, user_id, **kwargs): ... + async def get_users_id_async(self, user_id, **kwargs): ... + def patch_users_id(self, user_id, user, **kwargs): ... + def patch_users_id_with_http_info(self, user_id, user, **kwargs): ... + async def patch_users_id_async(self, user_id, user, **kwargs): ... + def post_users(self, user, **kwargs): ... + def post_users_with_http_info(self, user, **kwargs): ... + async def post_users_async(self, user, **kwargs): ... + def post_users_id_password(self, user_id, password_reset_body, **kwargs): ... + def post_users_id_password_with_http_info(self, user_id, password_reset_body, **kwargs): ... + async def post_users_id_password_async(self, user_id, password_reset_body, **kwargs): ... + def put_me_password(self, password_reset_body, **kwargs): ... + def put_me_password_with_http_info(self, password_reset_body, **kwargs): ... + async def put_me_password_async(self, password_reset_body, **kwargs): ... + def put_users_id_password(self, user_id, password_reset_body, **kwargs): ... + def put_users_id_password_with_http_info(self, user_id, password_reset_body, **kwargs): ... + async def put_users_id_password_async(self, user_id, password_reset_body, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/variables_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/variables_service.pyi new file mode 100644 index 00000000..4a180192 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/variables_service.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class VariablesService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def delete_variables_id(self, variable_id, **kwargs): ... + def delete_variables_id_with_http_info(self, variable_id, **kwargs): ... + async def delete_variables_id_async(self, variable_id, **kwargs): ... + def delete_variables_id_labels_id(self, variable_id, label_id, **kwargs): ... + def delete_variables_id_labels_id_with_http_info(self, variable_id, label_id, **kwargs): ... + async def delete_variables_id_labels_id_async(self, variable_id, label_id, **kwargs): ... + def get_variables(self, **kwargs): ... + def get_variables_with_http_info(self, **kwargs): ... + async def get_variables_async(self, **kwargs): ... + def get_variables_id(self, variable_id, **kwargs): ... + def get_variables_id_with_http_info(self, variable_id, **kwargs): ... + async def get_variables_id_async(self, variable_id, **kwargs): ... + def get_variables_id_labels(self, variable_id, **kwargs): ... + def get_variables_id_labels_with_http_info(self, variable_id, **kwargs): ... + async def get_variables_id_labels_async(self, variable_id, **kwargs): ... + def patch_variables_id(self, variable_id, variable, **kwargs): ... + def patch_variables_id_with_http_info(self, variable_id, variable, **kwargs): ... + async def patch_variables_id_async(self, variable_id, variable, **kwargs): ... + def post_variables(self, variable, **kwargs): ... + def post_variables_with_http_info(self, variable, **kwargs): ... + async def post_variables_async(self, variable, **kwargs): ... + def post_variables_id_labels(self, variable_id, label_mapping, **kwargs): ... + def post_variables_id_labels_with_http_info(self, variable_id, label_mapping, **kwargs): ... + async def post_variables_id_labels_async(self, variable_id, label_mapping, **kwargs): ... + def put_variables_id(self, variable_id, variable, **kwargs): ... + def put_variables_id_with_http_info(self, variable_id, variable, **kwargs): ... + async def put_variables_id_async(self, variable_id, variable, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/views_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/views_service.pyi new file mode 100644 index 00000000..127b8e5d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/views_service.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class ViewsService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def get_dashboards_id_cells_id_view(self, dashboard_id, cell_id, **kwargs): ... + def get_dashboards_id_cells_id_view_with_http_info(self, dashboard_id, cell_id, **kwargs): ... + async def get_dashboards_id_cells_id_view_async(self, dashboard_id, cell_id, **kwargs): ... + def patch_dashboards_id_cells_id_view(self, dashboard_id, cell_id, view, **kwargs): ... + def patch_dashboards_id_cells_id_view_with_http_info(self, dashboard_id, cell_id, view, **kwargs): ... + async def patch_dashboards_id_cells_id_view_async(self, dashboard_id, cell_id, view, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/write_service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/write_service.pyi new file mode 100644 index 00000000..7f1ec2eb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/service/write_service.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete + +from influxdb_client.service._base_service import _BaseService + +class WriteService(_BaseService): + def __init__(self, api_client: Incomplete | None = ...) -> None: ... + def post_write(self, org, bucket, body, **kwargs): ... + def post_write_with_http_info(self, org, bucket, body, **kwargs): ... + async def post_write_async(self, org, bucket, body, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/version.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/version.pyi new file mode 100644 index 00000000..3acee936 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/influxdb-client/influxdb_client/version.pyi @@ -0,0 +1 @@ +VERSION: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..7ff918e3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/@tests/stubtest_allowlist.txt @@ -0,0 +1,3 @@ +# Metaclass differs: +invoke.parser.ParseMachine +invoke.parser.parser.ParseMachine diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/@tests/test_cases/check_task.py b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/@tests/test_cases/check_task.py new file mode 100644 index 00000000..a56d1488 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/@tests/test_cases/check_task.py @@ -0,0 +1,17 @@ +from __future__ import annotations + +from invoke import Context, task + +# =========================================== +# This snippet is a regression test for #8936 +# =========================================== + + +@task +def docker_build(context: Context) -> None: + pass + + +@task(docker_build) +def docker_push(context: Context) -> None: + pass diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/METADATA.toml new file mode 100644 index 00000000..d9b8010e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/METADATA.toml @@ -0,0 +1,4 @@ +version = "2.0.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/__init__.pyi new file mode 100644 index 00000000..6bf0e8a1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/__init__.pyi @@ -0,0 +1,37 @@ +from typing import Any + +from .collection import Collection as Collection +from .config import Config as Config +from .context import Context as Context, MockContext as MockContext +from .exceptions import ( + AmbiguousEnvVar as AmbiguousEnvVar, + AuthFailure as AuthFailure, + CollectionNotFound as CollectionNotFound, + CommandTimedOut as CommandTimedOut, + Exit as Exit, + Failure as Failure, + ParseError as ParseError, + PlatformError as PlatformError, + ResponseNotAccepted as ResponseNotAccepted, + SubprocessPipeError as SubprocessPipeError, + ThreadException as ThreadException, + UncastableEnvVar as UncastableEnvVar, + UnexpectedExit as UnexpectedExit, + UnknownFileType as UnknownFileType, + UnpicklableConfigMember as UnpicklableConfigMember, + WatcherError as WatcherError, +) +from .executor import Executor as Executor +from .loader import FilesystemLoader as FilesystemLoader +from .parser import Argument as Argument, Parser as Parser, ParserContext as ParserContext, ParseResult as ParseResult +from .program import Program as Program +from .runners import Local as Local, Promise as Promise, Result as Result, Runner as Runner +from .tasks import Call as Call, Task as Task, call as call, task as task +from .terminals import pty_size as pty_size +from .watchers import FailingResponder as FailingResponder, Responder as Responder, StreamWatcher as StreamWatcher + +__version_info__: tuple[int, int, int] +__version__: str + +def run(command: str, **kwargs: Any) -> Result: ... +def sudo(command: str, **kwargs: Any) -> Result: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/collection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/collection.pyi new file mode 100644 index 00000000..415b0bfe --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/collection.pyi @@ -0,0 +1,24 @@ +from typing import Any + +class Collection: + tasks: Any + collections: Any + default: str | None + name: str | None + loaded_from: Any + auto_dash_names: bool + def __init__(self, *args, **kwargs) -> None: ... + @classmethod + def from_module(cls, module, name=..., config=..., loaded_from=..., auto_dash_names=...): ... + def add_task(self, task, name=..., aliases=..., default=...) -> None: ... + def add_collection(self, coll, name=..., default=...) -> None: ... + def subcollection_from_path(self, path): ... + def task_with_config(self, name): ... + def to_contexts(self, ignore_unknown_help: bool | None = ...): ... + def subtask_name(self, collection_name, task_name): ... + def transform(self, name): ... + @property + def task_names(self): ... + def configuration(self, taskpath=...): ... + def configure(self, options) -> None: ... + def serialized(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/completion/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/completion/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/completion/complete.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/completion/complete.pyi new file mode 100644 index 00000000..58661e89 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/completion/complete.pyi @@ -0,0 +1,11 @@ +from collections.abc import Iterable, Sequence +from typing import NoReturn + +from ..collection import Collection +from ..parser import Parser, ParserContext, ParseResult + +def complete( + names: Iterable[str], core: ParseResult, initial_context: ParserContext, collection: Collection, parser: Parser +) -> NoReturn: ... +def print_task_names(collection: Collection) -> None: ... +def print_completion_script(shell: str, names: Sequence[str]) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/config.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/config.pyi new file mode 100644 index 00000000..6e843fde --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/config.pyi @@ -0,0 +1,60 @@ +from typing import Any, ClassVar + +def load_source(name: str, path: str) -> dict[str, Any]: ... + +class DataProxy: + @classmethod + def from_data(cls, data, root=..., keypath=...): ... + def __getattr__(self, key: str): ... + def __setattr__(self, key: str, value) -> None: ... + def __iter__(self): ... + def __eq__(self, other): ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __len__(self) -> int: ... + def __setitem__(self, key, value) -> None: ... + def __getitem__(self, key): ... + def __contains__(self, key): ... + def __delitem__(self, key) -> None: ... + def __delattr__(self, name: str) -> None: ... + def clear(self) -> None: ... + def pop(self, *args): ... + def popitem(self): ... + def setdefault(self, *args): ... + def update(self, *args, **kwargs) -> None: ... + +class Config(DataProxy): + prefix: str + file_prefix: Any + env_prefix: Any + @staticmethod + def global_defaults(): ... + def __init__( + self, + overrides=..., + defaults=..., + system_prefix=..., + user_prefix=..., + project_location=..., + runtime_path=..., + lazy: bool = ..., + ) -> None: ... + def load_base_conf_files(self) -> None: ... + def load_defaults(self, data, merge: bool = ...) -> None: ... + def load_overrides(self, data, merge: bool = ...) -> None: ... + def load_system(self, merge: bool = ...) -> None: ... + def load_user(self, merge: bool = ...) -> None: ... + def load_project(self, merge: bool = ...) -> None: ... + def set_runtime_path(self, path) -> None: ... + def load_runtime(self, merge: bool = ...) -> None: ... + def load_shell_env(self) -> None: ... + def load_collection(self, data, merge: bool = ...) -> None: ... + def set_project_location(self, path) -> None: ... + def merge(self) -> None: ... + def clone(self, into=...): ... + +class AmbiguousMergeError(ValueError): ... + +def merge_dicts(base, updates): ... +def copy_dict(source): ... +def excise(dict_, keypath) -> None: ... +def obliterate(base, deletions) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/context.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/context.pyi new file mode 100644 index 00000000..b8013b10 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/context.pyi @@ -0,0 +1,22 @@ +from contextlib import AbstractContextManager + +from .config import Config, DataProxy + +class Context(DataProxy): + def __init__(self, config: Config | None = ...) -> None: ... + @property + def config(self) -> Config: ... + @config.setter + def config(self, value: Config) -> None: ... + def run(self, command: str, **kwargs): ... + def sudo(self, command: str, *, password: str = ..., user: str = ..., **kwargs): ... + def prefix(self, command: str) -> AbstractContextManager[None]: ... + @property + def cwd(self) -> str: ... + def cd(self, path: str) -> AbstractContextManager[None]: ... + +class MockContext(Context): + def __init__(self, config: Config | None = ..., **kwargs) -> None: ... + def run(self, command: str, *args, **kwargs): ... + def sudo(self, command: str, *args, **kwargs): ... + def set_result_for(self, attname, command, result) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/env.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/env.pyi new file mode 100644 index 00000000..b7feeb47 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/env.pyi @@ -0,0 +1,6 @@ +from typing import Any + +class Environment: + data: dict[str, Any] + def __init__(self, config: dict[str, Any], prefix: str) -> None: ... + def load(self) -> dict[str, Any]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/exceptions.pyi new file mode 100644 index 00000000..437cbc93 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/exceptions.pyi @@ -0,0 +1,47 @@ +from typing import Any + +class CollectionNotFound(Exception): + name: Any + start: Any + def __init__(self, name, start) -> None: ... + +class Failure(Exception): + result: Any + reason: Any + def __init__(self, result, reason=...) -> None: ... + def streams_for_display(self): ... + +class UnexpectedExit(Failure): ... + +class CommandTimedOut(Failure): + timeout: Any + def __init__(self, result, timeout) -> None: ... + +class AuthFailure(Failure): + result: Any + prompt: Any + def __init__(self, result, prompt) -> None: ... + +class ParseError(Exception): + context: Any + def __init__(self, msg, context=...) -> None: ... + +class Exit(Exception): + message: Any + def __init__(self, message=..., code=...) -> None: ... + @property + def code(self): ... + +class PlatformError(Exception): ... +class AmbiguousEnvVar(Exception): ... +class UncastableEnvVar(Exception): ... +class UnknownFileType(Exception): ... +class UnpicklableConfigMember(Exception): ... + +class ThreadException(Exception): + exceptions: Any + def __init__(self, exceptions) -> None: ... + +class WatcherError(Exception): ... +class ResponseNotAccepted(WatcherError): ... +class SubprocessPipeError(Exception): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/executor.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/executor.pyi new file mode 100644 index 00000000..fc9b6c14 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/executor.pyi @@ -0,0 +1,17 @@ +from collections.abc import Iterable +from typing import Any + +from .collection import Collection +from .config import Config +from .parser import ParserContext, ParseResult +from .tasks import Call, Task + +class Executor: + collection: Collection + config: Config + core: ParseResult | None + def __init__(self, collection: Collection, config: Config | None = ..., core: ParseResult | None = ...) -> None: ... + def execute(self, *tasks: str | tuple[str, dict[str, Any]] | ParserContext) -> dict[Task[..., Any], Any]: ... + def normalize(self, tasks: Iterable[str | tuple[str, dict[str, Any]] | ParserContext]): ... + def dedupe(self, calls: Iterable[Call]) -> list[Call]: ... + def expand_calls(self, calls: Iterable[Call | Task[..., Any]]) -> list[Call]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/loader.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/loader.pyi new file mode 100644 index 00000000..b339d5de --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/loader.pyi @@ -0,0 +1,15 @@ +from types import ModuleType +from typing import IO, Any + +from . import Config + +class Loader: + config: Config + def __init__(self, config: Config | None = ...) -> None: ... + def find(self, name: str) -> tuple[str, IO[Any], str, tuple[str, str, int]]: ... + def load(self, name: str | None = ...) -> tuple[ModuleType, str]: ... + +class FilesystemLoader(Loader): + def __init__(self, start: str | None = ..., **kwargs: Any) -> None: ... + @property + def start(self) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/main.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/main.pyi new file mode 100644 index 00000000..d05ea642 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/main.pyi @@ -0,0 +1,3 @@ +from . import Program + +program: Program diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/parser/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/parser/__init__.pyi new file mode 100644 index 00000000..86a53396 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/parser/__init__.pyi @@ -0,0 +1,3 @@ +from .argument import Argument as Argument +from .context import ParserContext as ParserContext, to_flag as to_flag, translate_underscores as translate_underscores +from .parser import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/parser/argument.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/parser/argument.pyi new file mode 100644 index 00000000..1fd21823 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/parser/argument.pyi @@ -0,0 +1,37 @@ +from typing import Any + +class Argument: + names: Any + kind: Any + raw_value: Any + default: Any + help: Any + positional: Any + optional: Any + incrementable: Any + attr_name: Any + def __init__( + self, + name=..., + names=..., + kind=..., + default=..., + help=..., + positional: bool = ..., + optional: bool = ..., + incrementable: bool = ..., + attr_name=..., + ) -> None: ... + @property + def name(self): ... + @property + def nicknames(self): ... + @property + def takes_value(self): ... + @property + def value(self): ... + @value.setter + def value(self, arg) -> None: ... + def set_value(self, value, cast: bool = ...): ... + @property + def got_value(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/parser/context.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/parser/context.pyi new file mode 100644 index 00000000..80cc56a5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/parser/context.pyi @@ -0,0 +1,24 @@ +from typing import Any + +def translate_underscores(name: str) -> str: ... +def to_flag(name: str) -> str: ... +def sort_candidate(arg): ... +def flag_key(x): ... + +class ParserContext: + args: Any + positional_args: Any + flags: Any + inverse_flags: Any + name: Any + aliases: Any + def __init__(self, name=..., aliases=..., args=...) -> None: ... + def add_arg(self, *args, **kwargs) -> None: ... + @property + def missing_positional_args(self): ... + @property + def as_kwargs(self): ... + def names_for(self, flag): ... + def help_for(self, flag): ... + def help_tuples(self): ... + def flag_names(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/parser/parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/parser/parser.pyi new file mode 100644 index 00000000..2f836db1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/parser/parser.pyi @@ -0,0 +1,42 @@ +from typing import Any + +from .context import ParserContext + +def is_flag(value: str) -> bool: ... +def is_long_flag(value: str) -> bool: ... + +class Parser: + initial: Any + contexts: Any + ignore_unknown: Any + def __init__(self, contexts=..., initial=..., ignore_unknown: bool = ...) -> None: ... + def parse_argv(self, argv): ... + +class ParseMachine: + initial_state: str + def changing_state(self, from_, to) -> None: ... + ignore_unknown: Any + initial: Any + flag: Any + flag_got_value: bool + result: Any + contexts: Any + def __init__(self, initial, contexts, ignore_unknown) -> None: ... + @property + def waiting_for_flag_value(self): ... + def handle(self, token) -> None: ... + def store_only(self, token) -> None: ... + def complete_context(self) -> None: ... + context: Any + def switch_to_context(self, name) -> None: ... + def complete_flag(self) -> None: ... + def check_ambiguity(self, value): ... + def switch_to_flag(self, flag, inverse: bool = ...) -> None: ... + def see_value(self, value) -> None: ... + def see_positional_arg(self, value) -> None: ... + def error(self, msg) -> None: ... + +class ParseResult(list[ParserContext]): + remainder: str + unparsed: Any + def __init__(self, *args, **kwargs) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/program.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/program.pyi new file mode 100644 index 00000000..28045823 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/program.pyi @@ -0,0 +1,71 @@ +from typing import Any + +class Program: + def core_args(self): ... + def task_args(self): ... + leading_indent_width: int + leading_indent: str + indent_width: int + indent: str + col_padding: int + version: Any + namespace: Any + argv: Any + loader_class: Any + executor_class: Any + config_class: Any + def __init__( + self, + version=..., + namespace=..., + name=..., + binary=..., + loader_class=..., + executor_class=..., + config_class=..., + binary_names=..., + ) -> None: ... + config: Any + def create_config(self) -> None: ... + def update_config(self, merge: bool = ...) -> None: ... + def run(self, argv=..., exit: bool = ...) -> None: ... + def parse_core(self, argv) -> None: ... + collection: Any + list_root: Any + list_depth: Any + list_format: str + scoped_collection: Any + def parse_collection(self) -> None: ... + def parse_cleanup(self) -> None: ... + def no_tasks_given(self) -> None: ... + def execute(self) -> None: ... + def normalize_argv(self, argv) -> None: ... + @property + def name(self): ... + @property + def called_as(self): ... + @property + def binary(self): ... + @property + def binary_names(self): ... + @property + def args(self): ... + @property + def initial_context(self): ... + def print_version(self) -> None: ... + def print_help(self) -> None: ... + core: Any + def parse_core_args(self) -> None: ... + def load_collection(self) -> None: ... + parser: Any + core_via_tasks: Any + tasks: Any + def parse_tasks(self) -> None: ... + def print_task_help(self, name) -> None: ... + def list_tasks(self) -> None: ... + def list_flat(self) -> None: ... + def list_nested(self) -> None: ... + def list_json(self) -> None: ... + def task_list_opener(self, extra: str = ...): ... + def display_with_columns(self, pairs, extra: str = ...) -> None: ... + def print_columns(self, tuples) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/runners.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/runners.pyi new file mode 100644 index 00000000..755b0ea3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/runners.pyi @@ -0,0 +1,200 @@ +from collections.abc import Iterable, Mapping +from typing import Any, TextIO, overload +from typing_extensions import Literal, TypeAlias + +from .watchers import StreamWatcher + +_Hide: TypeAlias = Literal[None, True, False, "out", "stdout", "err", "stderr", "both"] + +class Runner: + read_chunk_size: int + input_sleep: float + context: Any + program_finished: Any + warned_about_pty_fallback: bool + watchers: Any + def __init__(self, context) -> None: ... + # If disown is True (default=False), returns None + @overload + def run( + self, + command: str, + *, + asynchronous: bool = ..., + disown: Literal[True], + dry: bool = ..., + echo: bool = ..., + echo_format: str = ..., + echo_stdin: bool | None = ..., + encoding: str = ..., + err_stream: TextIO | None = ..., + env: Mapping[str, str] = ..., + fallback: bool = ..., + hide: _Hide = ..., + in_stream: TextIO | None | bool = ..., + out_stream: TextIO | None = ..., + pty: bool = ..., + replace_env: bool = ..., + shell: str = ..., + timeout: float | None = ..., + warn: bool = ..., + watchers: Iterable[StreamWatcher] = ..., + ) -> None: ... + # If disown is False (the default), and asynchronous is True (default=False) returns Promise + @overload + def run( + self, + command: str, + *, + asynchronous: Literal[True], + disown: Literal[False] = ..., + dry: bool = ..., + echo: bool = ..., + echo_format: str = ..., + echo_stdin: bool | None = ..., + encoding: str = ..., + err_stream: TextIO | None = ..., + env: Mapping[str, str] = ..., + fallback: bool = ..., + hide: _Hide = ..., + in_stream: TextIO | None | bool = ..., + out_stream: TextIO | None = ..., + pty: bool = ..., + replace_env: bool = ..., + shell: str = ..., + timeout: float | None = ..., + warn: bool = ..., + watchers: Iterable[StreamWatcher] = ..., + ) -> Promise: ... + # If disown and asynchronous are both False (the defaults), returns Result + @overload + def run( + self, + command: str, + *, + asynchronous: Literal[False] = ..., + disown: Literal[False] = ..., + dry: bool = ..., + echo: bool = ..., + echo_format: str = ..., + echo_stdin: bool | None = ..., + encoding: str = ..., + err_stream: TextIO | None = ..., + env: Mapping[str, str] = ..., + fallback: bool = ..., + hide: _Hide = ..., + in_stream: TextIO | None | bool = ..., + out_stream: TextIO | None = ..., + pty: bool = ..., + replace_env: bool = ..., + shell: str = ..., + timeout: float | None = ..., + warn: bool = ..., + watchers: Iterable[StreamWatcher] = ..., + ) -> Result: ... + # Fallback overload: return Any + @overload + def run( + self, + command: str, + *, + asynchronous: bool, + disown: bool, + dry: bool = ..., + echo: bool = ..., + echo_format: str = ..., + echo_stdin: bool | None = ..., + encoding: str = ..., + err_stream: TextIO | None = ..., + env: Mapping[str, str] = ..., + fallback: bool = ..., + hide: _Hide = ..., + in_stream: TextIO | None | bool = ..., + out_stream: TextIO | None = ..., + pty: bool = ..., + replace_env: bool = ..., + shell: str = ..., + timeout: float | None = ..., + warn: bool = ..., + watchers: Iterable[StreamWatcher] = ..., + ) -> Any: ... + def echo(self, command) -> None: ... + def make_promise(self): ... + def create_io_threads(self): ... + def generate_result(self, **kwargs): ... + def read_proc_output(self, reader) -> None: ... + def write_our_output(self, stream, string) -> None: ... + def handle_stdout(self, buffer_, hide, output) -> None: ... + def handle_stderr(self, buffer_, hide, output) -> None: ... + def read_our_stdin(self, input_): ... + def handle_stdin(self, input_, output, echo) -> None: ... + def should_echo_stdin(self, input_, output): ... + def respond(self, buffer_) -> None: ... + def generate_env(self, env, replace_env): ... + def should_use_pty(self, pty, fallback): ... + @property + def has_dead_threads(self): ... + def wait(self) -> None: ... + def write_proc_stdin(self, data) -> None: ... + def decode(self, data): ... + @property + def process_is_finished(self) -> None: ... + def start(self, command, shell, env) -> None: ... + def start_timer(self, timeout) -> None: ... + def read_proc_stdout(self, num_bytes) -> None: ... + def read_proc_stderr(self, num_bytes) -> None: ... + def close_proc_stdin(self) -> None: ... + def default_encoding(self): ... + def send_interrupt(self, interrupt) -> None: ... + def returncode(self) -> None: ... + def stop(self) -> None: ... + def kill(self) -> None: ... + @property + def timed_out(self): ... + +class Local(Runner): + status: Any + def __init__(self, context) -> None: ... + def should_use_pty(self, pty: bool = ..., fallback: bool = ...): ... + process: Any + +class Result: + stdout: str + stderr: str + encoding: str + command: str + shell: Any + env: dict[str, Any] + exited: int + pty: bool + hide: tuple[Literal["stdout", "stderr"], ...] + def __init__( + self, + stdout: str = ..., + stderr: str = ..., + encoding: str | None = ..., + command: str = ..., + shell: str = ..., + env=..., + exited: int = ..., + pty: bool = ..., + hide: tuple[Literal["stdout", "stderr"], ...] = ..., + ) -> None: ... + @property + def return_code(self) -> int: ... + def __bool__(self) -> bool: ... + @property + def ok(self) -> bool: ... + @property + def failed(self) -> bool: ... + def tail(self, stream: Literal["stderr", "stdout"], count: int = ...) -> str: ... + +class Promise(Result): + runner: Any + def __init__(self, runner) -> None: ... + def join(self): ... + def __enter__(self): ... + def __exit__(self, exc_type, exc_value, traceback) -> None: ... + +def normalize_hide(val, out_stream=..., err_stream=...): ... +def default_encoding() -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/tasks.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/tasks.pyi new file mode 100644 index 00000000..0240aca9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/tasks.pyi @@ -0,0 +1,115 @@ +from _typeshed import Incomplete +from collections.abc import Callable, Iterable +from typing import Any, Generic, TypeVar, overload +from typing_extensions import ParamSpec, Self + +from .config import Config +from .context import Context +from .parser import Argument + +_P = ParamSpec("_P") +_R_co = TypeVar("_R_co", covariant=True) +_TaskT = TypeVar("_TaskT", bound=Task[..., Any]) + +class Task(Generic[_P, _R_co]): + body: Callable[_P, _R_co] + __doc__: str | None + __name__: str + __module__: str + aliases: tuple[str, ...] + is_default: bool + positional: Iterable[str] + optional: Iterable[str] + iterable: Iterable[str] + incrementable: Iterable[str] + auto_shortflags: bool + help: dict[str, str] + pre: Iterable[Task[..., Any] | Call] + post: Iterable[Task[..., Any] | Call] + times_called: int + autoprint: bool + def __init__( + self, + body: Callable[..., Any], + name: str | None = ..., + aliases: tuple[str, ...] = ..., + positional: Iterable[str] | None = ..., + optional: Iterable[str] = ..., + default: bool = ..., + auto_shortflags: bool = ..., + help: dict[str, str] | None = ..., + pre: Iterable[Task[..., Any] | Call] | None = ..., + post: Iterable[Task[..., Any] | Call] | None = ..., + autoprint: bool = ..., + iterable: Iterable[str] | None = ..., + incrementable: Iterable[str] | None = ..., + ) -> None: ... + @property + def name(self): ... + def __eq__(self, other: Task[Incomplete, Incomplete]) -> bool: ... # type: ignore[override] + def __hash__(self) -> int: ... + def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R_co: ... + @property + def called(self) -> bool: ... + def argspec(self, body): ... + def fill_implicit_positionals(self, positional: Iterable[str] | None) -> Iterable[str]: ... + def arg_opts(self, name: str, default: Any, taken_names: Iterable[str]) -> dict[str, Any]: ... + def get_arguments(self, ignore_unknown_help: bool | None = ...) -> list[Argument]: ... + +@overload +def task( + *args: Task[..., Any] | Call, + name: str | None = ..., + aliases: tuple[str, ...] = ..., + positional: Iterable[str] | None = ..., + optional: Iterable[str] = ..., + default: bool = ..., + auto_shortflags: bool = ..., + help: dict[str, str] | None = ..., + pre: list[Task[..., Any] | Call] | None = ..., + post: list[Task[..., Any] | Call] | None = ..., + autoprint: bool = ..., + iterable: Iterable[str] | None = ..., + incrementable: Iterable[str] | None = ..., +) -> Callable[[Callable[_P, _R_co]], Task[_P, _R_co]]: ... +@overload +def task( + *args: Task[..., Any] | Call, + name: str | None = ..., + aliases: tuple[str, ...] = ..., + positional: Iterable[str] | None = ..., + optional: Iterable[str] = ..., + default: bool = ..., + auto_shortflags: bool = ..., + help: dict[str, str] | None = ..., + pre: list[Task[..., Any] | Call] | None = ..., + post: list[Task[..., Any] | Call] | None = ..., + autoprint: bool = ..., + iterable: Iterable[str] | None = ..., + incrementable: Iterable[str] | None = ..., + klass: type[_TaskT], +) -> Callable[[Callable[..., Any]], _TaskT]: ... +@overload +def task(__func: Callable[_P, _R_co]) -> Task[_P, _R_co]: ... + +class Call: + task: Task[..., Any] + called_as: str | None + args: tuple[Any, ...] + kwargs: dict[str, Any] + def __init__( + self, + task: Task[..., Any], + called_as: str | None = ..., + args: tuple[Any, ...] | None = ..., + kwargs: dict[str, Any] | None = ..., + ) -> None: ... + def __getattr__(self, name: str) -> Any: ... + def __deepcopy__(self, memo: Any) -> Self: ... + def __eq__(self, other: Call) -> bool: ... # type: ignore[override] + def make_context(self, config: Config) -> Context: ... + def clone_data(self): ... + # TODO use overload + def clone(self, into: type[Call] | None = ..., with_: dict[str, Any] | None = ...) -> Call: ... + +def call(task: Task[..., Any], *args: Any, **kwargs: Any) -> Call: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/terminals.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/terminals.pyi new file mode 100644 index 00000000..61195404 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/terminals.pyi @@ -0,0 +1,12 @@ +from contextlib import AbstractContextManager +from io import TextIOWrapper +from typing import Any + +WINDOWS: bool + +def pty_size() -> tuple[int, int]: ... +def stdin_is_foregrounded_tty(stream: Any) -> bool: ... +def cbreak_already_set(stream: TextIOWrapper) -> bool: ... +def character_buffered(stream: TextIOWrapper) -> AbstractContextManager[None]: ... +def ready_for_reading(input_: TextIOWrapper) -> bool: ... +def bytes_to_read(input_: TextIOWrapper) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/util.pyi new file mode 100644 index 00000000..bfd23820 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/util.pyi @@ -0,0 +1,39 @@ +import threading +from collections.abc import Callable, Iterable, Mapping +from contextlib import AbstractContextManager +from logging import Logger +from types import TracebackType +from typing import Any, NamedTuple + +LOG_FORMAT: str + +def enable_logging() -> None: ... + +log: Logger + +def task_name_sort_key(name: str) -> tuple[list[str], str]: ... +def cd(where: str) -> AbstractContextManager[None]: ... +def has_fileno(stream) -> bool: ... +def isatty(stream) -> bool: ... +def helpline(obj: Callable[..., object]) -> str | None: ... + +class ExceptionHandlingThread(threading.Thread): + def __init__( + self, + *, + group: None = ..., + target: Callable[..., object] | None = ..., + name: str | None = ..., + args: Iterable[Any] = ..., + kwargs: Mapping[str, Any] | None = ..., + daemon: bool | None = ..., + ) -> None: ... + def exception(self) -> ExceptionWrapper | None: ... + @property + def is_dead(self) -> bool: ... + +class ExceptionWrapper(NamedTuple): + kwargs: Any + type: type[BaseException] + value: BaseException + traceback: TracebackType diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/watchers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/watchers.pyi new file mode 100644 index 00000000..35e3cb57 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/invoke/invoke/watchers.pyi @@ -0,0 +1,20 @@ +import threading +from collections.abc import Iterable + +class StreamWatcher(threading.local): + def submit(self, stream) -> Iterable[str]: ... + +class Responder(StreamWatcher): + pattern: str + response: str + index: int + def __init__(self, pattern: str, response: str) -> None: ... + def pattern_matches(self, stream: str, pattern: str, index_attr: str) -> Iterable[str]: ... + def submit(self, stream: str) -> Iterable[str]: ... + +class FailingResponder(Responder): + sentinel: str + failure_index: int + tried: bool + def __init__(self, pattern: str, response: str, sentinel: str) -> None: ... + def submit(self, stream: str) -> Iterable[str]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/METADATA.toml new file mode 100644 index 00000000..f3e83f9c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/METADATA.toml @@ -0,0 +1 @@ +version = "1.0.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/__init__.pyi new file mode 100644 index 00000000..9406a3dc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/__init__.pyi @@ -0,0 +1,7 @@ +from typing import Any + +from jmespath import parser as parser +from jmespath.visitor import Options as Options + +def compile(expression: str) -> parser.ParsedResult: ... +def search(expression: str, data: Any, options: Options | None = ...) -> Any: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/ast.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/ast.pyi new file mode 100644 index 00000000..77cc988f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/ast.pyi @@ -0,0 +1,22 @@ +def comparator(name, first, second): ... +def current_node(): ... +def expref(expression): ... +def function_expression(name, args): ... +def field(name): ... +def filter_projection(left, right, comparator): ... +def flatten(node): ... +def identity(): ... +def index(index): ... +def index_expression(children): ... +def key_val_pair(key_name, node): ... +def literal(literal_value): ... +def multi_select_dict(nodes): ... +def multi_select_list(nodes): ... +def or_expression(left, right): ... +def and_expression(left, right): ... +def not_expression(expr): ... +def pipe(left, right): ... +def projection(left, right): ... +def subexpression(children): ... +def slice(start, end, step): ... +def value_projection(left, right): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/compat.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/compat.pyi new file mode 100644 index 00000000..fdafd152 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/compat.pyi @@ -0,0 +1,10 @@ +from _typeshed import Incomplete +from collections.abc import Generator +from itertools import zip_longest as zip_longest + +text_type = str +string_type = str + +def with_str_method(cls): ... +def with_repr_method(cls): ... +def get_methods(cls) -> Generator[Incomplete, None, None]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/exceptions.pyi new file mode 100644 index 00000000..e09d9425 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/exceptions.pyi @@ -0,0 +1,45 @@ +from collections.abc import Sequence +from typing import Any + +class JMESPathError(ValueError): ... + +class ParseError(JMESPathError): + lex_position: int + token_value: str + token_type: str + msg: str + expression: str | None + def __init__(self, lex_position: int, token_value: str, token_type: str, msg: str = ...) -> None: ... + +class IncompleteExpressionError(ParseError): + # When ParseError is used directly, the token always have a non-null value and type + token_value: str | None # type: ignore[assignment] + token_type: str | None # type: ignore[assignment] + expression: str + def set_expression(self, expression: str) -> None: ... + +class LexerError(ParseError): + lexer_position: int + lexer_value: str + message: str + def __init__(self, lexer_position: int, lexer_value: str, message: str, expression: str | None = ...) -> None: ... + +class ArityError(ParseError): + expected_arity: int + actual_arity: int + function_name: str + def __init__(self, expected, actual, name) -> None: ... + +class VariadictArityError(ArityError): ... + +class JMESPathTypeError(JMESPathError): + function_name: str + current_value: Any + actual_type: str + expected_types: Sequence[str] + def __init__(self, function_name: str, current_value: Any, actual_type: str, expected_types: Sequence[str]) -> None: ... + +class EmptyExpressionError(JMESPathError): + def __init__(self) -> None: ... + +class UnknownFunctionError(JMESPathError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/functions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/functions.pyi new file mode 100644 index 00000000..836f80e3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/functions.pyi @@ -0,0 +1,22 @@ +from collections.abc import Callable, Iterable +from typing import Any, TypeVar +from typing_extensions import NotRequired, TypedDict + +TYPES_MAP: dict[str, str] +REVERSE_TYPES_MAP: dict[str, tuple[str, ...]] + +class _Signature(TypedDict): + types: list[str] + variadic: NotRequired[bool] + +_F = TypeVar("_F", bound=Callable[..., Any]) + +def signature(*arguments: _Signature) -> Callable[[_F], _F]: ... + +class FunctionRegistry(type): + def __init__(cls, name, bases, attrs) -> None: ... + +class Functions(metaclass=FunctionRegistry): + FUNCTION_TABLE: Any + # resolved_args and return value are the *args and return of a function called by name + def call_function(self, function_name: str, resolved_args: Iterable[Any]) -> Any: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/lexer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/lexer.pyi new file mode 100644 index 00000000..9e44bf80 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/lexer.pyi @@ -0,0 +1,19 @@ +from collections.abc import Iterator +from typing import ClassVar +from typing_extensions import TypedDict + +from jmespath.exceptions import EmptyExpressionError as EmptyExpressionError, LexerError as LexerError + +class _LexerTokenizeResult(TypedDict): + type: str + value: str + start: int + end: int + +class Lexer: + START_IDENTIFIER: ClassVar[set[str]] + VALID_IDENTIFIER: ClassVar[set[str]] + VALID_NUMBER: ClassVar[set[str]] + WHITESPACE: ClassVar[set[str]] + SIMPLE_TOKENS: ClassVar[dict[str, str]] + def tokenize(self, expression: str) -> Iterator[_LexerTokenizeResult]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/parser.pyi new file mode 100644 index 00000000..cc75ceed --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/parser.pyi @@ -0,0 +1,19 @@ +from collections.abc import Iterator +from typing import Any, ClassVar + +from jmespath.lexer import _LexerTokenizeResult +from jmespath.visitor import Options, _TreeNode + +class Parser: + BINDING_POWER: ClassVar[dict[str, int]] + tokenizer: Iterator[_LexerTokenizeResult] | None + def __init__(self, lookahead: int = ...) -> None: ... + def parse(self, expression: str) -> ParsedResult: ... + @classmethod + def purge(cls) -> None: ... + +class ParsedResult: + expression: str + parsed: _TreeNode + def __init__(self, expression: str, parsed: _TreeNode) -> None: ... + def search(self, value: Any, options: Options | None = ...) -> Any: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/visitor.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/visitor.pyi new file mode 100644 index 00000000..4189200f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jmespath/jmespath/visitor.pyi @@ -0,0 +1,60 @@ +from collections.abc import Callable, MutableMapping +from typing import Any, ClassVar, NoReturn +from typing_extensions import TypedDict + +from jmespath.functions import Functions + +class Options: + dict_cls: Callable[[], MutableMapping[Any, Any]] | None + custom_functions: Functions | None + def __init__( + self, dict_cls: Callable[[], MutableMapping[Any, Any]] | None = ..., custom_functions: Functions | None = ... + ) -> None: ... + +class _Expression: + expression: str + interpreter: Visitor + def __init__(self, expression: str, interpreter: Visitor) -> None: ... + def visit(self, node: _TreeNode, *args, **kwargs) -> Any: ... + +class Visitor: + def __init__(self) -> None: ... + def visit(self, node: _TreeNode, *args, **kwargs) -> Any: ... + def default_visit(self, node: _TreeNode, *args, **kwargs) -> NoReturn: ... + +class _TreeNode(TypedDict): + type: str + value: Any + children: list[_TreeNode] + +class TreeInterpreter(Visitor): + COMPARATOR_FUNC: ClassVar[dict[str, Callable[[Any, Any], Any]]] + MAP_TYPE: ClassVar[Callable[[], MutableMapping[Any, Any]]] + def __init__(self, options: Options | None = ...) -> None: ... + def default_visit(self, node: _TreeNode, *args, **kwargs) -> NoReturn: ... + def visit_subexpression(self, node: _TreeNode, value: Any) -> Any: ... + def visit_field(self, node: _TreeNode, value: Any) -> Any: ... + def visit_comparator(self, node: _TreeNode, value: Any) -> Any: ... + def visit_current(self, node: _TreeNode, value: Any) -> Any: ... + def visit_expref(self, node: _TreeNode, value: Any) -> Any: ... + def visit_function_expression(self, node: _TreeNode, value: Any) -> Any: ... + def visit_filter_projection(self, node: _TreeNode, value: Any) -> Any: ... + def visit_flatten(self, node: _TreeNode, value: Any) -> Any: ... + def visit_identity(self, node: _TreeNode, value: Any) -> Any: ... + def visit_index(self, node: _TreeNode, value: Any) -> Any: ... + def visit_index_expression(self, node: _TreeNode, value: Any) -> Any: ... + def visit_slice(self, node: _TreeNode, value: Any) -> Any: ... + def visit_key_val_pair(self, node: _TreeNode, value: Any) -> Any: ... + def visit_literal(self, node: _TreeNode, value: Any) -> Any: ... + def visit_multi_select_dict(self, node: _TreeNode, value: Any) -> Any: ... + def visit_multi_select_list(self, node: _TreeNode, value: Any) -> Any: ... + def visit_or_expression(self, node: _TreeNode, value: Any) -> Any: ... + def visit_and_expression(self, node: _TreeNode, value: Any) -> Any: ... + def visit_not_expression(self, node: _TreeNode, value: Any) -> Any: ... + def visit_pipe(self, node: _TreeNode, value: Any) -> Any: ... + def visit_projection(self, node: _TreeNode, value: Any) -> Any: ... + def visit_value_projection(self, node: _TreeNode, value: Any) -> Any: ... + +class GraphvizVisitor(Visitor): + def __init__(self) -> None: ... + def visit(self, node: _TreeNode, *args, **kwargs) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/METADATA.toml new file mode 100644 index 00000000..d5dbdb97 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/METADATA.toml @@ -0,0 +1,5 @@ +version = "4.17.*" + +[tool.stubtest] +ignore_missing_stub = true +extras = ["format"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/__init__.pyi new file mode 100644 index 00000000..dcb925bc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/__init__.pyi @@ -0,0 +1,28 @@ +from jsonschema._format import ( + FormatChecker as FormatChecker, + draft3_format_checker as draft3_format_checker, + draft4_format_checker as draft4_format_checker, + draft6_format_checker as draft6_format_checker, + draft7_format_checker as draft7_format_checker, + draft201909_format_checker as draft201909_format_checker, + draft202012_format_checker as draft202012_format_checker, +) +from jsonschema._types import TypeChecker as TypeChecker +from jsonschema.exceptions import ( + ErrorTree as ErrorTree, + FormatError as FormatError, + RefResolutionError as RefResolutionError, + SchemaError as SchemaError, + ValidationError as ValidationError, +) +from jsonschema.protocols import Validator as Validator +from jsonschema.validators import ( + Draft3Validator as Draft3Validator, + Draft4Validator as Draft4Validator, + Draft6Validator as Draft6Validator, + Draft7Validator as Draft7Validator, + Draft201909Validator as Draft201909Validator, + Draft202012Validator as Draft202012Validator, + RefResolver as RefResolver, + validate as validate, +) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/_format.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/_format.pyi new file mode 100644 index 00000000..9c2b12ef --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/_format.pyi @@ -0,0 +1,50 @@ +from collections.abc import Callable, Iterable +from typing import TypeVar +from typing_extensions import TypeAlias + +_FormatCheckCallable: TypeAlias = Callable[[object], bool] +_F = TypeVar("_F", bound=_FormatCheckCallable) +_RaisesType: TypeAlias = type[Exception] | tuple[type[Exception], ...] + +class FormatChecker: + checkers: dict[str, tuple[_FormatCheckCallable, _RaisesType]] + + def __init__(self, formats: Iterable[str] | None = ...) -> None: ... + def checks(self, format: str, raises: _RaisesType = ...) -> Callable[[_F], _F]: ... + @classmethod + def cls_checks(cls, format: str, raises: _RaisesType = ...) -> Callable[[_F], _F]: ... + def check(self, instance: object, format: str) -> None: ... + def conforms(self, instance: object, format: str) -> bool: ... + +draft3_format_checker: FormatChecker +draft4_format_checker: FormatChecker +draft6_format_checker: FormatChecker +draft7_format_checker: FormatChecker +draft201909_format_checker: FormatChecker +draft202012_format_checker: FormatChecker + +def is_email(instance: object) -> bool: ... +def is_ipv4(instance: object) -> bool: ... +def is_ipv6(instance: object) -> bool: ... + +# is_host_name is only defined if fqdn is installed. +def is_host_name(instance: object) -> bool: ... +def is_idn_host_name(instance: object) -> bool: ... +def is_uri(instance: object) -> bool: ... +def is_uri_reference(instance: object) -> bool: ... +def is_iri(instance: object) -> bool: ... +def is_iri_reference(instance: object) -> bool: ... +def is_datetime(instance: object) -> bool: ... +def is_time(instance: object) -> bool: ... +def is_regex(instance: object) -> bool: ... +def is_date(instance: object) -> bool: ... +def is_draft3_time(instance: object) -> bool: ... +def is_css_color_code(instance: object) -> bool: ... +def is_css21_color(instance: object) -> bool: ... +def is_json_pointer(instance: object) -> bool: ... +def is_relative_json_pointer(instance: object) -> bool: ... +def is_uri_template(instance: object) -> bool: ... + +# is_duration is only defined if isoduration is installed. +def is_duration(instance: object) -> bool: ... +def is_uuid(instance: object) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/_legacy_validators.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/_legacy_validators.pyi new file mode 100644 index 00000000..783d3e5b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/_legacy_validators.pyi @@ -0,0 +1,16 @@ +from collections.abc import ItemsView +from typing import Any + +def ignore_ref_siblings(schema) -> list[tuple[str, Any]] | ItemsView[str, Any]: ... +def dependencies_draft3(validator, dependencies, instance, schema) -> None: ... +def dependencies_draft4_draft6_draft7(validator, dependencies, instance, schema) -> None: ... +def disallow_draft3(validator, disallow, instance, schema) -> None: ... +def extends_draft3(validator, extends, instance, schema) -> None: ... +def items_draft3_draft4(validator, items, instance, schema) -> None: ... +def items_draft6_draft7_draft201909(validator, items, instance, schema) -> None: ... +def minimum_draft3_draft4(validator, minimum, instance, schema) -> None: ... +def maximum_draft3_draft4(validator, maximum, instance, schema) -> None: ... +def properties_draft3(validator, properties, instance, schema) -> None: ... +def type_draft3(validator, types, instance, schema) -> None: ... +def contains_draft6_draft7(validator, contains, instance, schema) -> None: ... +def recursiveRef(validator, recursiveRef, instance, schema) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/_types.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/_types.pyi new file mode 100644 index 00000000..2a3641d3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/_types.pyi @@ -0,0 +1,24 @@ +from collections.abc import Callable, Iterable, Mapping + +def is_array(checker, instance) -> bool: ... +def is_bool(checker, instance) -> bool: ... +def is_integer(checker, instance) -> bool: ... +def is_null(checker, instance) -> bool: ... +def is_number(checker, instance) -> bool: ... +def is_object(checker, instance) -> bool: ... +def is_string(checker, instance) -> bool: ... +def is_any(checker, instance) -> bool: ... + +class TypeChecker: + def __init__(self, type_checkers: Mapping[str, Callable[[object], bool]] = ...) -> None: ... + def is_type(self, instance, type: str) -> bool: ... + def redefine(self, type: str, fn: Callable[..., bool]) -> TypeChecker: ... + def redefine_many(self, definitions=...) -> TypeChecker: ... + def remove(self, *types: Iterable[str]) -> TypeChecker: ... + +draft3_type_checker: TypeChecker +draft4_type_checker: TypeChecker +draft6_type_checker: TypeChecker +draft7_type_checker: TypeChecker +draft201909_type_checker: TypeChecker +draft202012_type_checker: TypeChecker diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/_utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/_utils.pyi new file mode 100644 index 00000000..7e5720e7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/_utils.pyi @@ -0,0 +1,26 @@ +from _typeshed import SupportsKeysAndGetItem +from collections.abc import Generator, Iterable, Iterator, Mapping, MutableMapping, Sized +from typing import Any + +class URIDict(MutableMapping[str, str]): + def normalize(self, uri: str) -> str: ... + store: dict[str, str] + def __init__(self, __m: SupportsKeysAndGetItem[str, str] | Iterable[tuple[str, str]], **kwargs: str) -> None: ... + def __getitem__(self, uri: str) -> str: ... + def __setitem__(self, uri: str, value: str) -> None: ... + def __delitem__(self, uri: str) -> None: ... + def __iter__(self) -> Iterator[str]: ... + def __len__(self) -> int: ... + +class Unset: ... + +def load_schema(name): ... +def format_as_index(container: str, indices) -> str: ... +def find_additional_properties(instance: Iterable[Any], schema: Mapping[Any, Any]) -> Generator[Any, None, None]: ... +def extras_msg(extras: Iterable[Any] | Sized) -> str: ... +def ensure_list(thing) -> list[Any]: ... +def equal(one, two) -> bool: ... +def unbool(element, true=..., false=...): ... +def uniq(container) -> bool: ... +def find_evaluated_item_indexes_by_schema(validator, instance, schema) -> list[Any]: ... +def find_evaluated_property_keys_by_schema(validator, instance, schema) -> list[Any]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/_validators.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/_validators.pyi new file mode 100644 index 00000000..f6daf126 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/_validators.pyi @@ -0,0 +1,37 @@ +def patternProperties(validator, patternProperties, instance, schema) -> None: ... +def propertyNames(validator, propertyNames, instance, schema) -> None: ... +def additionalProperties(validator, aP, instance, schema) -> None: ... +def items(validator, items, instance, schema) -> None: ... +def additionalItems(validator, aI, instance, schema) -> None: ... +def const(validator, const, instance, schema) -> None: ... +def contains(validator, contains, instance, schema) -> None: ... +def exclusiveMinimum(validator, minimum, instance, schema) -> None: ... +def exclusiveMaximum(validator, maximum, instance, schema) -> None: ... +def minimum(validator, minimum, instance, schema) -> None: ... +def maximum(validator, maximum, instance, schema) -> None: ... +def multipleOf(validator, dB, instance, schema) -> None: ... +def minItems(validator, mI, instance, schema) -> None: ... +def maxItems(validator, mI, instance, schema) -> None: ... +def uniqueItems(validator, uI, instance, schema) -> None: ... +def pattern(validator, patrn, instance, schema) -> None: ... +def format(validator, format, instance, schema) -> None: ... +def minLength(validator, mL, instance, schema) -> None: ... +def maxLength(validator, mL, instance, schema) -> None: ... +def dependentRequired(validator, dependentRequired, instance, schema) -> None: ... +def dependentSchemas(validator, dependentSchemas, instance, schema) -> None: ... +def enum(validator, enums, instance, schema) -> None: ... +def ref(validator, ref, instance, schema) -> None: ... +def dynamicRef(validator, dynamicRef, instance, schema) -> None: ... +def type(validator, types, instance, schema) -> None: ... +def properties(validator, properties, instance, schema) -> None: ... +def required(validator, required, instance, schema) -> None: ... +def minProperties(validator, mP, instance, schema) -> None: ... +def maxProperties(validator, mP, instance, schema) -> None: ... +def allOf(validator, allOf, instance, schema) -> None: ... +def anyOf(validator, anyOf, instance, schema) -> None: ... +def oneOf(validator, oneOf, instance, schema) -> None: ... +def not_(validator, not_schema, instance, schema) -> None: ... +def if_(validator, if_schema, instance, schema) -> None: ... +def unevaluatedItems(validator, unevaluatedItems, instance, schema) -> None: ... +def unevaluatedProperties(validator, unevaluatedProperties, instance, schema) -> None: ... +def prefixItems(validator, prefixItems, instance, schema) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/cli.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/cli.pyi new file mode 100644 index 00000000..85c3bd71 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/cli.pyi @@ -0,0 +1,32 @@ +from typing import Any + +class _CannotLoadFile(Exception): ... + +class _Outputter: + def __init__(self, formatter, stdout, stderr): ... + @classmethod + def from_arguments(cls, arguments, stdout, stderr): ... + def load(self, path): ... + def filenotfound_error(self, **kwargs) -> None: ... + def parsing_error(self, **kwargs) -> None: ... + def validation_error(self, **kwargs) -> None: ... + def validation_success(self, **kwargs) -> None: ... + +class _PrettyFormatter: + def filenotfound_error(self, path, exc_info): ... + def parsing_error(self, path, exc_info): ... + def validation_error(self, instance_path, error): ... + def validation_success(self, instance_path): ... + +class _PlainFormatter: + def __init__(self, error_format): ... + def filenotfound_error(self, path, exc_info): ... + def parsing_error(self, path, exc_info): ... + def validation_error(self, instance_path, error): ... + def validation_success(self, instance_path): ... + +parser: Any + +def parse_args(args): ... +def main(args=...) -> None: ... +def run(arguments, stdout=..., stderr=..., stdin=...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/exceptions.pyi new file mode 100644 index 00000000..a61828da --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/exceptions.pyi @@ -0,0 +1,90 @@ +from _typeshed import Incomplete, SupportsRichComparison +from collections import deque +from collections.abc import Callable, Container, Iterable, Sequence +from typing import Any +from typing_extensions import Self, TypeAlias + +from jsonschema import _utils, protocols +from jsonschema._types import TypeChecker + +_RelevanceFuncType: TypeAlias = Callable[[ValidationError], SupportsRichComparison] + +WEAK_MATCHES: frozenset[str] +STRONG_MATCHES: frozenset[str] + +class _Error(Exception): + message: str + path: deque[str | int] + relative_path: deque[str | int] + schema_path: deque[str | int] + relative_schema_path: deque[str | int] + context: list[ValidationError] | None + cause: Exception | None + validator: protocols.Validator | None + validator_value: Any + instance: Any + schema: Any + parent: _Error | None + def __init__( + self, + message: str, + validator: _utils.Unset | None | protocols.Validator = ..., + path: Sequence[str | int] = ..., + cause: Incomplete | None = ..., + context: Sequence[ValidationError] = ..., + validator_value=..., + instance: Any = ..., + schema: Any = ..., + schema_path: Sequence[str | int] = ..., + parent: _Error | None = ..., + type_checker: _utils.Unset | TypeChecker = ..., + ) -> None: ... + @classmethod + def create_from(cls, other: _Error) -> Self: ... + @property + def absolute_path(self) -> Sequence[str | int]: ... + @property + def absolute_schema_path(self) -> Sequence[str | int]: ... + @property + def json_path(self) -> str: ... + # TODO: this type could be made more precise using TypedDict to + # enumerate the types of the members + def _contents(self) -> dict[str, Any]: ... + +class ValidationError(_Error): ... +class SchemaError(_Error): ... + +class RefResolutionError(Exception): + def __init__(self, cause: str) -> None: ... + +class UndefinedTypeCheck(Exception): + type: Any + def __init__(self, type) -> None: ... + +class UnknownType(Exception): + type: Any + instance: Any + schema: Any + def __init__(self, type, instance, schema) -> None: ... + +class FormatError(Exception): + message: Any + cause: Any + def __init__(self, message, cause: Incomplete | None = ...) -> None: ... + +class ErrorTree: + errors: Any + def __init__(self, errors=...) -> None: ... + def __contains__(self, index): ... + def __getitem__(self, index): ... + def __setitem__(self, index, value) -> None: ... + def __iter__(self): ... + def __len__(self) -> int: ... + @property + def total_errors(self): ... + +def by_relevance(weak: Container[str] = ..., strong: Container[str] = ...) -> _RelevanceFuncType: ... + +relevance: _RelevanceFuncType + +def best_match(errors: Iterable[ValidationError], key: _RelevanceFuncType = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/protocols.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/protocols.pyi new file mode 100644 index 00000000..7a3c6e14 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/protocols.pyi @@ -0,0 +1,27 @@ +from collections.abc import Iterator, Mapping, Sequence +from typing import Any, ClassVar, Protocol +from typing_extensions import TypeAlias + +from jsonschema._format import FormatChecker +from jsonschema._types import TypeChecker +from jsonschema.exceptions import ValidationError +from jsonschema.validators import RefResolver + +_JsonParameter: TypeAlias = str | int | float | bool | None | Mapping[str, _JsonParameter] | Sequence[_JsonParameter] + +class Validator(Protocol): + META_SCHEMA: ClassVar[dict[Any, Any]] + VALIDATORS: ClassVar[dict[Any, Any]] + TYPE_CHECKER: ClassVar[TypeChecker] + FORMAT_CHECKER: ClassVar[FormatChecker] + schema: dict[Any, Any] | bool + def __init__( + self, schema: dict[Any, Any] | bool, resolver: RefResolver | None = ..., format_checker: FormatChecker | None = ... + ) -> None: ... + @classmethod + def check_schema(cls, schema: dict[Any, Any]) -> None: ... + def is_type(self, instance: _JsonParameter, type: str) -> bool: ... + def is_valid(self, instance: _JsonParameter) -> bool: ... + def iter_errors(self, instance: _JsonParameter) -> Iterator[ValidationError]: ... + def validate(self, instance: _JsonParameter) -> None: ... + def evolve(self, **kwargs) -> Validator: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/validators.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/validators.pyi new file mode 100644 index 00000000..635d8f4e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/jsonschema/jsonschema/validators.pyi @@ -0,0 +1,104 @@ +from _typeshed import Incomplete, SupportsKeysAndGetItem +from collections.abc import Callable, Generator, Iterable, Iterator, Mapping +from contextlib import contextmanager +from typing import Any, ClassVar +from typing_extensions import TypeAlias + +from ._format import FormatChecker +from ._types import TypeChecker +from ._utils import Unset, URIDict +from .exceptions import ValidationError + +# these type aliases do not exist at runtime, they're only defined here in the stub +_JsonObject: TypeAlias = Mapping[str, Any] +_JsonValue: TypeAlias = _JsonObject | list[Any] | str | int | float | bool | None +_ValidatorCallback: TypeAlias = Callable[[Any, Any, _JsonValue, _JsonObject], Iterator[ValidationError]] + +_Schema: TypeAlias = Mapping[str, Any] + +# This class does not exist at runtime. Compatible classes are created at +# runtime by create(). +class _Validator: + VALIDATORS: ClassVar[dict[Any, Any]] + META_SCHEMA: ClassVar[dict[Any, Any]] + TYPE_CHECKER: ClassVar[Any] + FORMAT_CHECKER: ClassVar[Any] + @staticmethod + def ID_OF(schema: _Schema) -> str: ... + schema: _Schema + resolver: Any + format_checker: Any + evolve: Any + def __init__(self, schema: _Schema, resolver: Incomplete | None = ..., format_checker: Incomplete | None = ...) -> None: ... + @classmethod + def check_schema(cls, schema: _Schema, format_checker: FormatChecker | Unset = ...) -> None: ... + def iter_errors(self, instance, _schema: _Schema | None = ...) -> Generator[Any, None, None]: ... + def descend( + self, instance, schema: _Schema, path: Incomplete | None = ..., schema_path: Incomplete | None = ... + ) -> Generator[Any, None, None]: ... + def validate(self, *args, **kwargs) -> None: ... + def is_type(self, instance, type): ... + def is_valid(self, instance, _schema: _Schema | None = ...) -> bool: ... + +def validates(version: str) -> Callable[..., Any]: ... +def create( + meta_schema: _Schema, + validators: Mapping[str, _ValidatorCallback] | tuple[()] = ..., + version: Incomplete | None = ..., + type_checker: TypeChecker = ..., + format_checker: FormatChecker = ..., + id_of: Callable[[_Schema], str] = ..., + applicable_validators: Callable[[_Schema], Iterable[tuple[str, _ValidatorCallback]]] = ..., +) -> type[_Validator]: ... +def extend( + validator, + validators=..., + version: Incomplete | None = ..., + type_checker: Incomplete | None = ..., + format_checker: Incomplete | None = ..., +): ... + +# At runtime these are fields that are assigned the return values of create() calls. +class Draft3Validator(_Validator): ... +class Draft4Validator(_Validator): ... +class Draft6Validator(_Validator): ... +class Draft7Validator(_Validator): ... +class Draft201909Validator(_Validator): ... +class Draft202012Validator(_Validator): ... + +_Handler: TypeAlias = Callable[[str], Any] + +class RefResolver: + referrer: dict[str, Any] + cache_remote: Any + handlers: dict[str, _Handler] + store: URIDict + def __init__( + self, + base_uri: str, + referrer: dict[str, Any], + store: SupportsKeysAndGetItem[str, str] | Iterable[tuple[str, str]] = ..., + cache_remote: bool = ..., + handlers: SupportsKeysAndGetItem[str, _Handler] | Iterable[tuple[str, _Handler]] = ..., + urljoin_cache: Incomplete | None = ..., + remote_cache: Incomplete | None = ..., + ) -> None: ... + @classmethod + def from_schema(cls, schema: _Schema, id_of=..., *args, **kwargs): ... + def push_scope(self, scope) -> None: ... + def pop_scope(self) -> None: ... + @property + def resolution_scope(self): ... + @property + def base_uri(self): ... + @contextmanager + def in_scope(self, scope) -> Generator[None, None, None]: ... + @contextmanager + def resolving(self, ref) -> Generator[Incomplete, None, None]: ... + def resolve(self, ref): ... + def resolve_from_url(self, url): ... + def resolve_fragment(self, document, fragment): ... + def resolve_remote(self, uri): ... + +def validate(instance: object, schema: _Schema, cls: type[_Validator] | None = ..., *args: Any, **kwargs: Any) -> None: ... +def validator_for(schema: _Schema | bool, default=...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..6fa6de59 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/@tests/stubtest_allowlist.txt @@ -0,0 +1,7 @@ +# scan_code *should* never be None in real use. This is also according to docs. +keyboard.KeyboardEvent.scan_code +keyboard._keyboard_event.KeyboardEvent.scan_code +# Defaults don't align with possible values +keyboard.mouse.wait +# TODO: Should this be allowlisted? +keyboard.__main__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/@tests/stubtest_allowlist_darwin.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/@tests/stubtest_allowlist_darwin.txt new file mode 100644 index 00000000..d2dcf88d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/@tests/stubtest_allowlist_darwin.txt @@ -0,0 +1,2 @@ +# Defaults don't align with possible values +keyboard.mouse.on_button diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/@tests/stubtest_allowlist_linux.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/@tests/stubtest_allowlist_linux.txt new file mode 100644 index 00000000..d2dcf88d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/@tests/stubtest_allowlist_linux.txt @@ -0,0 +1,2 @@ +# Defaults don't align with possible values +keyboard.mouse.on_button diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/METADATA.toml new file mode 100644 index 00000000..dd666087 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/METADATA.toml @@ -0,0 +1,8 @@ +version = "0.13.*" + +# [tool.stubtest] +# While the stubs slightly differ on Windows vs Linux. +# It's only by possible mouse buttons and event literal types. +# As well as returning a tuple of int/long from keyboard.mouse.get_position +# The "mouse" module is obsoleted by the "mouse" package. +# platforms = diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/keyboard/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/keyboard/__init__.pyi new file mode 100644 index 00000000..d65c0c50 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/keyboard/__init__.pyi @@ -0,0 +1,113 @@ +from collections.abc import Callable, Generator, Iterable, Sequence +from queue import Queue +from threading import Event as _UninterruptibleEvent +from typing_extensions import TypeAlias + +from ._canonical_names import all_modifiers as all_modifiers, sided_modifiers as sided_modifiers +from ._keyboard_event import KEY_DOWN as KEY_DOWN, KEY_UP as KEY_UP, KeyboardEvent as KeyboardEvent + +_Key: TypeAlias = int | str +_ScanCodeList: TypeAlias = list[int] | tuple[int, ...] +_ParseableHotkey: TypeAlias = _Key | list[int | _ScanCodeList] | tuple[int | _ScanCodeList, ...] +_Callback: TypeAlias = Callable[[KeyboardEvent], bool | None] | Callable[[], bool | None] +# mypy doesn't support PEP 646's TypeVarTuple yet: https://github.com/python/mypy/issues/12280 +# _Ts = TypeVarTuple("_Ts") +_Ts: TypeAlias = tuple[object, ...] + +version: str + +class _Event(_UninterruptibleEvent): + def wait(self) -> None: ... # type: ignore[override] # Actual implementation + +def is_modifier(key: _Key | None) -> bool: ... +def key_to_scan_codes(key: _ParseableHotkey, error_if_missing: bool = ...) -> tuple[int, ...]: ... +def parse_hotkey(hotkey: _ParseableHotkey) -> tuple[tuple[tuple[int, ...], ...], ...]: ... +def send(hotkey: _ParseableHotkey, do_press: bool = ..., do_release: bool = ...) -> None: ... + +press_and_release = send + +def press(hotkey: _ParseableHotkey) -> None: ... +def release(hotkey: _ParseableHotkey) -> None: ... + +# is_pressed cannot check multi-step hotkeys, so not using _ParseableHotkey + +def is_pressed(hotkey: _Key | _ScanCodeList) -> bool: ... +def call_later(fn: Callable[..., None], args: _Ts = ..., delay: float = ...) -> None: ... +def hook(callback: _Callback, suppress: bool = ..., on_remove: Callable[[], None] = ...) -> Callable[[], None]: ... +def on_press(callback: _Callback, suppress: bool = ...) -> Callable[[], None]: ... +def on_release(callback: _Callback, suppress: bool = ...) -> Callable[[], None]: ... +def hook_key(key: _ParseableHotkey, callback: _Callback, suppress: bool = ...) -> Callable[[], None]: ... +def on_press_key(key: _ParseableHotkey, callback: _Callback, suppress: bool = ...) -> Callable[[], None]: ... +def on_release_key(key: _ParseableHotkey, callback: _Callback, suppress: bool = ...) -> Callable[[], None]: ... +def unhook(remove: _Callback) -> None: ... + +unhook_key = unhook + +def unhook_all() -> None: ... +def block_key(key: _ParseableHotkey) -> Callable[[], None]: ... + +unblock_key = unhook_key + +def remap_key(src: _ParseableHotkey, dst: _ParseableHotkey) -> Callable[[], None]: ... + +unremap_key = unhook_key + +def parse_hotkey_combinations(hotkey: _ParseableHotkey) -> tuple[tuple[tuple[int, ...], ...], ...]: ... +def add_hotkey( + hotkey: _ParseableHotkey, + callback: Callable[..., bool | None], + args: _Ts = ..., + suppress: bool = ..., + timeout: float = ..., + trigger_on_release: bool = ..., +) -> Callable[[], None]: ... + +register_hotkey = add_hotkey + +def remove_hotkey(hotkey_or_callback: _ParseableHotkey | _Callback) -> None: ... + +unregister_hotkey = remove_hotkey +clear_hotkey = remove_hotkey + +def unhook_all_hotkeys() -> None: ... + +unregister_all_hotkeys = unhook_all_hotkeys +remove_all_hotkeys = unhook_all_hotkeys +clear_all_hotkeys = unhook_all_hotkeys + +def remap_hotkey( + src: _ParseableHotkey, dst: _ParseableHotkey, suppress: bool = ..., trigger_on_release: bool = ... +) -> Callable[[], None]: ... + +unremap_hotkey = remove_hotkey + +def stash_state() -> list[int]: ... +def restore_state(scan_codes: Iterable[int]) -> None: ... +def restore_modifiers(scan_codes: Iterable[int]) -> None: ... +def write(text: str, delay: float = ..., restore_state_after: bool = ..., exact: bool | None = ...) -> None: ... +def wait(hotkey: _ParseableHotkey | None = ..., suppress: bool = ..., trigger_on_release: bool = ...) -> None: ... +def get_hotkey_name(names: Iterable[str] | None = ...) -> str: ... +def read_event(suppress: bool = ...) -> KeyboardEvent: ... +def read_key(suppress: bool = ...) -> _Key: ... +def read_hotkey(suppress: bool = ...) -> str: ... +def get_typed_strings(events: Iterable[KeyboardEvent], allow_backspace: bool = ...) -> Generator[str, None, None]: ... +def start_recording( + recorded_events_queue: Queue[KeyboardEvent] | None = ..., +) -> tuple[Queue[KeyboardEvent], Callable[[], None]]: ... +def stop_recording() -> list[KeyboardEvent]: ... +def record(until: str = ..., suppress: bool = ..., trigger_on_release: bool = ...) -> list[KeyboardEvent]: ... +def play(events: Iterable[KeyboardEvent], speed_factor: float = ...) -> None: ... + +replay = play + +def add_word_listener( + word: str, callback: _Callback, triggers: Sequence[str] = ..., match_suffix: bool = ..., timeout: float = ... +) -> Callable[[], None]: ... +def remove_word_listener(word_or_handler: str | _Callback) -> None: ... +def add_abbreviation( + source_text: str, replacement_text: str, match_suffix: bool = ..., timeout: float = ... +) -> Callable[[], None]: ... + +register_word_listener = add_word_listener +register_abbreviation = add_abbreviation +remove_abbreviation = remove_word_listener diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/keyboard/_canonical_names.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/keyboard/_canonical_names.pyi new file mode 100644 index 00000000..8a0c3a00 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/keyboard/_canonical_names.pyi @@ -0,0 +1,5 @@ +canonical_names: dict[str, str] +sided_modifiers: set[str] +all_modifiers: set[str] + +def normalize_name(name: str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/keyboard/_generic.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/keyboard/_generic.pyi new file mode 100644 index 00000000..21f74b9e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/keyboard/_generic.pyi @@ -0,0 +1,24 @@ +from collections.abc import Callable +from queue import Queue +from threading import Lock, Thread +from typing import ClassVar +from typing_extensions import Literal, TypeAlias + +from ._keyboard_event import KeyboardEvent +from ._mouse_event import _MouseEvent + +_Event: TypeAlias = KeyboardEvent | _MouseEvent + +class GenericListener: + lock: ClassVar[Lock] + handlers: list[Callable[[_Event], bool | None]] + listening: bool + queue: Queue[_Event] + listening_thread: Thread | None + processing_thread: Thread | None + def invoke_handlers(self, event: _Event) -> Literal[1] | None: ... + def start_if_necessary(self) -> None: ... + def pre_process_event(self, event: _Event) -> None: ... + def process(self) -> None: ... + def add_handler(self, handler: Callable[[_Event], bool | None]) -> None: ... + def remove_handler(self, handler: Callable[[_Event], bool | None]) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/keyboard/_keyboard_event.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/keyboard/_keyboard_event.pyi new file mode 100644 index 00000000..9c511fdc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/keyboard/_keyboard_event.pyi @@ -0,0 +1,28 @@ +from typing_extensions import Literal + +from ._canonical_names import canonical_names as canonical_names, normalize_name as normalize_name + +KEY_DOWN: Literal["down"] +KEY_UP: Literal["up"] + +class KeyboardEvent: + event_type: Literal["down", "up"] | None + scan_code: int + name: str | None + time: float | None + device: str | None + modifiers: tuple[str, ...] | None + is_keypad: bool | None + + def __init__( + self, + event_type: Literal["down", "up"] | None, + scan_code: int, + name: str | None = ..., + time: float | None = ..., + device: str | None = ..., + modifiers: tuple[str, ...] | None = ..., + is_keypad: bool | None = ..., + ) -> None: ... + def to_json(self, ensure_ascii: bool = ...) -> str: ... + def __eq__(self, other: object) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/keyboard/_mouse_event.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/keyboard/_mouse_event.pyi new file mode 100644 index 00000000..479fe4c9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/keyboard/_mouse_event.pyi @@ -0,0 +1,43 @@ +import sys +from typing import NamedTuple +from typing_extensions import Literal, TypeAlias + +_MouseEvent: TypeAlias = ButtonEvent | WheelEvent | MoveEvent # noqa: Y047 # Used outside + +LEFT: Literal["left"] +RIGHT: Literal["right"] +MIDDLE: Literal["middle"] +X: Literal["x"] +X2: Literal["x2"] + +UP: Literal["up"] +DOWN: Literal["down"] +DOUBLE: Literal["double"] +WHEEL: Literal["wheel"] + +VERTICAL: Literal["vertical"] +HORIZONTAL: Literal["horizontal"] + +if sys.platform == "linux" or sys.platform == "win32": + _MouseButton: TypeAlias = Literal["left", "right", "middle", "x", "x2"] +else: + _MouseButton: TypeAlias = Literal["left", "right", "middle"] + +if sys.platform == "win32": + _MouseEventType: TypeAlias = Literal["up", "down", "double", "wheel"] +else: + _MouseEventType: TypeAlias = Literal["up", "down"] + +class ButtonEvent(NamedTuple): + event_type: _MouseEventType + button: _MouseButton + time: float + +class WheelEvent(NamedTuple): + delta: int + time: float + +class MoveEvent(NamedTuple): + x: int + y: int + time: float diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/keyboard/mouse.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/keyboard/mouse.pyi new file mode 100644 index 00000000..48a10d23 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/keyboard/keyboard/mouse.pyi @@ -0,0 +1,78 @@ +import sys +from collections.abc import Callable, Iterable +from ctypes import c_long +from typing import SupportsInt, TypeVar +from typing_extensions import Literal, TypeAlias + +from ._generic import GenericListener as _GenericListener +from ._mouse_event import ( + DOUBLE as DOUBLE, + DOWN as DOWN, + LEFT as LEFT, + MIDDLE as MIDDLE, + RIGHT as RIGHT, + UP as UP, + X2 as X2, + ButtonEvent as ButtonEvent, + MoveEvent as MoveEvent, + WheelEvent as WheelEvent, + X as X, + _MouseButton, + _MouseEvent, + _MouseEventType, +) + +# mypy doesn't support PEP 646's TypeVarTuple yet: https://github.com/python/mypy/issues/12280 +# _Ts = TypeVarTuple("_Ts") +_Ts: TypeAlias = tuple[object, ...] +_Callback: TypeAlias = Callable[[_MouseEvent], bool | None] +_C = TypeVar("_C", bound=_Callback) + +class _MouseListener(_GenericListener): + def init(self) -> None: ... + def pre_process_event( # type: ignore[override] # Mouse specific events and return + self, event: _MouseEvent + ) -> Literal[True]: ... + def listen(self) -> None: ... + +def is_pressed(button: _MouseButton = ...) -> bool: ... +def press(button: _MouseButton = ...) -> None: ... +def release(button: _MouseButton = ...) -> None: ... +def click(button: _MouseButton = ...) -> None: ... +def double_click(button: _MouseButton = ...) -> None: ... +def right_click() -> None: ... +def wheel(delta: int = ...) -> None: ... +def move(x: SupportsInt, y: SupportsInt, absolute: bool = ..., duration: float = ...) -> None: ... +def drag(start_x: int, start_y: int, end_x: int, end_y: int, absolute: bool = ..., duration: float = ...) -> None: ... +def on_button( + callback: Callable[..., None], + args: _Ts = ..., + buttons: list[_MouseButton] | tuple[_MouseButton, ...] | _MouseButton = ..., + types: list[_MouseEventType] | tuple[_MouseEventType, ...] | _MouseEventType = ..., +) -> _Callback: ... +def on_click(callback: Callable[..., None], args: _Ts = ...) -> _Callback: ... +def on_double_click(callback: Callable[..., None], args: _Ts = ...) -> _Callback: ... +def on_right_click(callback: Callable[..., None], args: _Ts = ...) -> _Callback: ... +def on_middle_click(callback: Callable[..., None], args: _Ts = ...) -> _Callback: ... +def wait(button: _MouseButton = ..., target_types: tuple[_MouseEventType] = ...) -> None: ... + +if sys.platform == "win32": + def get_position() -> tuple[c_long, c_long]: ... + +else: + def get_position() -> tuple[int, int]: ... + +def hook(callback: _C) -> _C: ... +def unhook(callback: _Callback) -> None: ... +def unhook_all() -> None: ... +def record(button: _MouseButton = ..., target_types: tuple[_MouseEventType] = ...) -> _MouseEvent: ... +def play( + events: Iterable[_MouseEvent], + speed_factor: float = ..., + include_clicks: bool = ..., + include_moves: bool = ..., + include_wheel: bool = ..., +) -> None: ... + +replay = play +hold = press diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/METADATA.toml new file mode 100644 index 00000000..9159fae4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/METADATA.toml @@ -0,0 +1,10 @@ +version = "2.9.*" +requires = ["types-pyasn1"] + +[tool.stubtest] +ignore_missing_stub = true +apt_dependencies = ["libkrb5-dev"] +# No need to install on the CI. Leaving here as information for MacOs/Windows contributors. +# brew_dependencies = ["krb5"] +# choco_dependencies = ["mitkerberos"] +stubtest_requirements = ["gssapi"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/__init__.pyi new file mode 100644 index 00000000..4e9d2efd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/__init__.pyi @@ -0,0 +1,104 @@ +from typing import Any +from typing_extensions import Literal + +from .abstract.attrDef import AttrDef as AttrDef +from .abstract.attribute import ( + Attribute as Attribute, + OperationalAttribute as OperationalAttribute, + WritableAttribute as WritableAttribute, +) +from .abstract.cursor import Reader as Reader, Writer as Writer +from .abstract.entry import Entry as Entry, WritableEntry as WritableEntry +from .abstract.objectDef import ObjectDef as ObjectDef +from .core.connection import Connection as Connection +from .core.pooling import ServerPool as ServerPool +from .core.rdns import ReverseDnsSetting as ReverseDnsSetting +from .core.server import Server as Server +from .core.tls import Tls as Tls +from .protocol.rfc4512 import DsaInfo as DsaInfo, SchemaInfo as SchemaInfo +from .utils.config import get_config_parameter as get_config_parameter, set_config_parameter as set_config_parameter +from .version import __description__ as __description__, __status__ as __status__, __url__ as __url__ + +ANONYMOUS: Literal["ANONYMOUS"] +SIMPLE: Literal["SIMPLE"] +SASL: Literal["SASL"] +NTLM: Literal["NTLM"] + +EXTERNAL: Literal["EXTERNAL"] +DIGEST_MD5: Literal["DIGEST-MD5"] +KERBEROS: Literal["GSSAPI"] +GSSAPI: Literal["GSSAPI"] +PLAIN: Literal["PLAIN"] + +AUTO_BIND_DEFAULT: Literal["DEFAULT"] +AUTO_BIND_NONE: Literal["NONE"] +AUTO_BIND_NO_TLS: Literal["NO_TLS"] +AUTO_BIND_TLS_BEFORE_BIND: Literal["TLS_BEFORE_BIND"] +AUTO_BIND_TLS_AFTER_BIND: Literal["TLS_AFTER_BIND"] + +IP_SYSTEM_DEFAULT: Literal["IP_SYSTEM_DEFAULT"] +IP_V4_ONLY: Literal["IP_V4_ONLY"] +IP_V6_ONLY: Literal["IP_V6_ONLY"] +IP_V4_PREFERRED: Literal["IP_V4_PREFERRED"] +IP_V6_PREFERRED: Literal["IP_V6_PREFERRED"] + +BASE: Literal["BASE"] +LEVEL: Literal["LEVEL"] +SUBTREE: Literal["SUBTREE"] + +DEREF_NEVER: Literal["NEVER"] +DEREF_SEARCH: Literal["SEARCH"] +DEREF_BASE: Literal["FINDING_BASE"] +DEREF_ALWAYS: Literal["ALWAYS"] + +ALL_ATTRIBUTES: Literal["*"] +NO_ATTRIBUTES: Literal["1.1"] +ALL_OPERATIONAL_ATTRIBUTES: Literal["+"] + +MODIFY_ADD: Literal["MODIFY_ADD"] +MODIFY_DELETE: Literal["MODIFY_DELETE"] +MODIFY_REPLACE: Literal["MODIFY_REPLACE"] +MODIFY_INCREMENT: Literal["MODIFY_INCREMENT"] + +SYNC: Literal["SYNC"] +SAFE_SYNC: Literal["SAFE_SYNC"] +SAFE_RESTARTABLE: Literal["SAFE_RESTARTABLE"] +ASYNC: Literal["ASYNC"] +LDIF: Literal["LDIF"] +RESTARTABLE: Literal["RESTARTABLE"] +REUSABLE: Literal["REUSABLE"] +MOCK_SYNC: Literal["MOCK_SYNC"] +MOCK_ASYNC: Literal["MOCK_ASYNC"] +ASYNC_STREAM: Literal["ASYNC_STREAM"] + +NONE: Literal["NO_INFO"] +DSA: Literal["DSA"] +SCHEMA: Literal["SCHEMA"] +ALL: Literal["ALL"] + +OFFLINE_EDIR_8_8_8: Literal["EDIR_8_8_8"] +OFFLINE_EDIR_9_1_4: Literal["EDIR_9_1_4"] +OFFLINE_AD_2012_R2: Literal["AD_2012_R2"] +OFFLINE_SLAPD_2_4: Literal["SLAPD_2_4"] +OFFLINE_DS389_1_3_3: Literal["DS389_1_3_3"] + +FIRST: Literal["FIRST"] +ROUND_ROBIN: Literal["ROUND_ROBIN"] +RANDOM: Literal["RANDOM"] + +HASHED_NONE: Literal["PLAIN"] +HASHED_SHA: Literal["SHA"] +HASHED_SHA256: Literal["SHA256"] +HASHED_SHA384: Literal["SHA384"] +HASHED_SHA512: Literal["SHA512"] +HASHED_MD5: Literal["MD5"] +HASHED_SALTED_SHA: Literal["SALTED_SHA"] +HASHED_SALTED_SHA256: Literal["SALTED_SHA256"] +HASHED_SALTED_SHA384: Literal["SALTED_SHA384"] +HASHED_SALTED_SHA512: Literal["SALTED_SHA512"] +HASHED_SALTED_MD5: Literal["SALTED_MD5"] + +NUMERIC_TYPES: tuple[type[Any], ...] +INTEGER_TYPES: tuple[type[Any], ...] +STRING_TYPES: tuple[type[Any], ...] +SEQUENCE_TYPES: tuple[type[Any], ...] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/abstract/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/abstract/__init__.pyi new file mode 100644 index 00000000..5c2b1bd7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/abstract/__init__.pyi @@ -0,0 +1,15 @@ +from typing import Any + +STATUS_INIT: str +STATUS_VIRTUAL: str +STATUS_MANDATORY_MISSING: str +STATUS_READ: str +STATUS_WRITABLE: str +STATUS_PENDING_CHANGES: str +STATUS_COMMITTED: str +STATUS_READY_FOR_DELETION: str +STATUS_READY_FOR_MOVING: str +STATUS_READY_FOR_RENAMING: str +STATUS_DELETED: str +STATUSES: Any +INITIAL_STATUSES: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/abstract/attrDef.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/abstract/attrDef.pyi new file mode 100644 index 00000000..609123f4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/abstract/attrDef.pyi @@ -0,0 +1,34 @@ +from _typeshed import Incomplete +from typing import Any + +class AttrDef: + name: Any + key: Any + validate: Any + pre_query: Any + post_query: Any + default: Any + dereference_dn: Any + description: Any + mandatory: Any + single_value: Any + oid_info: Any + other_names: Any + def __init__( + self, + name, + key: Incomplete | None = ..., + validate: Incomplete | None = ..., + pre_query: Incomplete | None = ..., + post_query: Incomplete | None = ..., + default=..., + dereference_dn: Incomplete | None = ..., + description: Incomplete | None = ..., + mandatory: bool = ..., + single_value: Incomplete | None = ..., + alias: Incomplete | None = ..., + ) -> None: ... + def __eq__(self, other): ... + def __lt__(self, other): ... + def __hash__(self) -> int: ... + def __setattr__(self, key: str, value) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/abstract/attribute.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/abstract/attribute.pyi new file mode 100644 index 00000000..d22ff5d2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/abstract/attribute.pyi @@ -0,0 +1,34 @@ +from typing import Any + +class Attribute: + key: Any + definition: Any + values: Any + raw_values: Any + response: Any + entry: Any + cursor: Any + other_names: Any + def __init__(self, attr_def, entry, cursor) -> None: ... + def __len__(self) -> int: ... + def __iter__(self): ... + def __getitem__(self, item): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + @property + def value(self): ... + +class OperationalAttribute(Attribute): ... + +class WritableAttribute(Attribute): + def __iadd__(self, other): ... + def __isub__(self, other): ... + def add(self, values) -> None: ... + def set(self, values) -> None: ... + def delete(self, values) -> None: ... + def remove(self) -> None: ... + def discard(self) -> None: ... + @property + def virtual(self): ... + @property + def changes(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/abstract/cursor.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/abstract/cursor.pyi new file mode 100644 index 00000000..0787f9d2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/abstract/cursor.pyi @@ -0,0 +1,110 @@ +from _typeshed import Incomplete +from typing import Any, NamedTuple + +class Operation(NamedTuple): + request: Any + result: Any + response: Any + +class Cursor: + connection: Any + get_operational_attributes: Any + definition: Any + attributes: Any + controls: Any + execution_time: Any + entries: Any + schema: Any + def __init__( + self, + connection, + object_def, + get_operational_attributes: bool = ..., + attributes: Incomplete | None = ..., + controls: Incomplete | None = ..., + auxiliary_class: Incomplete | None = ..., + ) -> None: ... + def __iter__(self): ... + def __getitem__(self, item): ... + def __len__(self) -> int: ... + def __bool__(self) -> bool: ... + def match_dn(self, dn): ... + def match(self, attributes, value): ... + def remove(self, entry) -> None: ... + @property + def operations(self): ... + @property + def errors(self): ... + @property + def failed(self): ... + +class Reader(Cursor): + entry_class: Any + attribute_class: Any + entry_initial_status: Any + sub_tree: Any + base: Any + dereference_aliases: Any + validated_query: Any + query_filter: Any + def __init__( + self, + connection, + object_def, + base, + query: str = ..., + components_in_and: bool = ..., + sub_tree: bool = ..., + get_operational_attributes: bool = ..., + attributes: Incomplete | None = ..., + controls: Incomplete | None = ..., + auxiliary_class: Incomplete | None = ..., + ) -> None: ... + @property + def query(self): ... + @query.setter + def query(self, value) -> None: ... + @property + def components_in_and(self): ... + @components_in_and.setter + def components_in_and(self, value) -> None: ... + def clear(self) -> None: ... + execution_time: Any + entries: Any + def reset(self) -> None: ... + def search(self, attributes: Incomplete | None = ...): ... + def search_object(self, entry_dn: Incomplete | None = ..., attributes: Incomplete | None = ...): ... + def search_level(self, attributes: Incomplete | None = ...): ... + def search_subtree(self, attributes: Incomplete | None = ...): ... + def search_paged( + self, paged_size, paged_criticality: bool = ..., generator: bool = ..., attributes: Incomplete | None = ... + ): ... + +class Writer(Cursor): + entry_class: Any + attribute_class: Any + entry_initial_status: Any + @staticmethod + def from_cursor( + cursor, + connection: Incomplete | None = ..., + object_def: Incomplete | None = ..., + custom_validator: Incomplete | None = ..., + ): ... + @staticmethod + def from_response(connection, object_def, response: Incomplete | None = ...): ... + dereference_aliases: Any + def __init__( + self, + connection, + object_def, + get_operational_attributes: bool = ..., + attributes: Incomplete | None = ..., + controls: Incomplete | None = ..., + auxiliary_class: Incomplete | None = ..., + ) -> None: ... + execution_time: Any + def commit(self, refresh: bool = ...): ... + def discard(self) -> None: ... + def new(self, dn): ... + def refresh_entry(self, entry, tries: int = ..., seconds: int = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/abstract/entry.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/abstract/entry.pyi new file mode 100644 index 00000000..ef5929cc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/abstract/entry.pyi @@ -0,0 +1,88 @@ +from _typeshed import Incomplete +from typing import Any + +class EntryState: + dn: Any + status: Any + attributes: Any + raw_attributes: Any + response: Any + cursor: Any + origin: Any + read_time: Any + changes: Any + definition: Any + def __init__(self, dn, cursor) -> None: ... + def set_status(self, status) -> None: ... + @property + def entry_raw_attributes(self): ... + +class EntryBase: + def __init__(self, dn, cursor) -> None: ... + def __iter__(self): ... + def __contains__(self, item): ... + def __getattr__(self, item: str): ... + def __setattr__(self, item: str, value) -> None: ... + def __getitem__(self, item): ... + def __eq__(self, other): ... + def __lt__(self, other): ... + @property + def entry_dn(self): ... + @property + def entry_cursor(self): ... + @property + def entry_status(self): ... + @property + def entry_definition(self): ... + @property + def entry_raw_attributes(self): ... + def entry_raw_attribute(self, name): ... + @property + def entry_mandatory_attributes(self): ... + @property + def entry_attributes(self): ... + @property + def entry_attributes_as_dict(self): ... + @property + def entry_read_time(self): ... + def entry_to_json( + self, + raw: bool = ..., + indent: int = ..., + sort: bool = ..., + stream: Incomplete | None = ..., + checked_attributes: bool = ..., + include_empty: bool = ..., + ): ... + def entry_to_ldif( + self, + all_base64: bool = ..., + line_separator: Incomplete | None = ..., + sort_order: Incomplete | None = ..., + stream: Incomplete | None = ..., + ): ... + +class Entry(EntryBase): + def entry_writable( + self, + object_def: Incomplete | None = ..., + writer_cursor: Incomplete | None = ..., + attributes: Incomplete | None = ..., + custom_validator: Incomplete | None = ..., + auxiliary_class: Incomplete | None = ..., + ): ... + +class WritableEntry(EntryBase): + def __setitem__(self, key, value) -> None: ... + def __setattr__(self, item: str, value) -> None: ... + def __getattr__(self, item: str): ... + @property + def entry_virtual_attributes(self): ... + def entry_commit_changes(self, refresh: bool = ..., controls: Incomplete | None = ..., clear_history: bool = ...): ... + def entry_discard_changes(self) -> None: ... + def entry_delete(self) -> None: ... + def entry_refresh(self, tries: int = ..., seconds: int = ...): ... + def entry_move(self, destination_dn) -> None: ... + def entry_rename(self, new_name) -> None: ... + @property + def entry_changes(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/abstract/objectDef.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/abstract/objectDef.pyi new file mode 100644 index 00000000..f7a449cf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/abstract/objectDef.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete + +class ObjectDef: + def __init__( + self, + object_class: Incomplete | None = ..., + schema: Incomplete | None = ..., + custom_validator: Incomplete | None = ..., + auxiliary_class: Incomplete | None = ..., + ) -> None: ... + def __getitem__(self, item): ... + def __getattr__(self, item: str): ... + def __setattr__(self, key: str, value) -> None: ... + def __iadd__(self, other): ... + def __isub__(self, other): ... + def __iter__(self): ... + def __len__(self) -> int: ... + def __bool__(self) -> bool: ... + def __contains__(self, item): ... + def add_from_schema(self, attribute_name, mandatory: bool = ...) -> None: ... + def add_attribute(self, definition: Incomplete | None = ...) -> None: ... + def remove_attribute(self, item) -> None: ... + def clear_attributes(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/connection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/connection.pyi new file mode 100644 index 00000000..0e5b29c7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/connection.pyi @@ -0,0 +1,192 @@ +from _collections_abc import Generator, dict_keys +from _typeshed import Incomplete, ReadableBuffer +from types import TracebackType +from typing_extensions import Literal, Self, TypeAlias + +from pyasn1.type.base import Asn1Item + +from .pooling import ServerPool +from .server import Server + +SASL_AVAILABLE_MECHANISMS: Incomplete +CLIENT_STRATEGIES: Incomplete + +_ServerSequence: TypeAlias = ( + set[Server] | list[Server] | tuple[Server, ...] | Generator[Server, None, None] | dict_keys[Server, Incomplete] +) + +class Connection: + connection_lock: Incomplete + last_error: str + strategy_type: Incomplete + user: Incomplete + password: Incomplete + authentication: Incomplete + version: Incomplete + auto_referrals: Incomplete + request: Incomplete + response: Incomplete | None + result: Incomplete + bound: bool + listening: bool + closed: bool + auto_bind: Incomplete + sasl_mechanism: Incomplete + sasl_credentials: Incomplete + socket: Incomplete + tls_started: bool + sasl_in_progress: bool + read_only: Incomplete + lazy: Incomplete + pool_name: Incomplete + pool_size: int | None + cred_store: Incomplete + pool_lifetime: Incomplete + pool_keepalive: Incomplete + starting_tls: bool + check_names: Incomplete + raise_exceptions: Incomplete + auto_range: Incomplete + extend: Incomplete + fast_decoder: Incomplete + receive_timeout: Incomplete + empty_attributes: Incomplete + use_referral_cache: Incomplete + auto_escape: Incomplete + auto_encode: Incomplete + source_address: Incomplete + source_port_list: Incomplete + server_pool: Incomplete | None + server: Incomplete + strategy: Incomplete + send: Incomplete + open: Incomplete + get_response: Incomplete + post_send_single_response: Incomplete + post_send_search: Incomplete + def __init__( + self, + server: Server | str | _ServerSequence | ServerPool, + user: str | None = ..., + password: str | None = ..., + auto_bind: Literal["DEFAULT", "NONE", "NO_TLS", "TLS_BEFORE_BIND", "TLS_AFTER_BIND"] = ..., + version: int = ..., + authentication: Literal["ANONYMOUS", "SIMPLE", "SASL", "NTLM"] | None = ..., + client_strategy: Literal[ + "SYNC", + "SAFE_RESTARTABLE", + "SAFE_SYNC", + "ASYNC", + "LDIF", + "RESTARTABLE", + "REUSABLE", + "MOCK_SYNC", + "MOCK_ASYNC", + "ASYNC_STREAM", + ] = ..., + auto_referrals: bool = ..., + auto_range: bool = ..., + sasl_mechanism: str | None = ..., + sasl_credentials: Incomplete | None = ..., + check_names: bool = ..., + collect_usage: bool = ..., + read_only: bool = ..., + lazy: bool = ..., + raise_exceptions: bool = ..., + pool_name: str | None = ..., + pool_size: int | None = ..., + pool_lifetime: int | None = ..., + cred_store: Incomplete | None = ..., + fast_decoder: bool = ..., + receive_timeout: Incomplete | None = ..., + return_empty_attributes: bool = ..., + use_referral_cache: bool = ..., + auto_escape: bool = ..., + auto_encode: bool = ..., + pool_keepalive: Incomplete | None = ..., + source_address: str | None = ..., + source_port: int | None = ..., + source_port_list: Incomplete | None = ..., + ) -> None: ... + def repr_with_sensitive_data_stripped(self): ... + @property + def stream(self): ... + @stream.setter + def stream(self, value) -> None: ... + @property + def usage(self): ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> Literal[False] | None: ... + def bind(self, read_server_info: bool = ..., controls: Incomplete | None = ...): ... + def rebind( + self, + user: Incomplete | None = ..., + password: Incomplete | None = ..., + authentication: Incomplete | None = ..., + sasl_mechanism: Incomplete | None = ..., + sasl_credentials: Incomplete | None = ..., + read_server_info: bool = ..., + controls: Incomplete | None = ..., + ): ... + def unbind(self, controls: Incomplete | None = ...): ... + def search( + self, + search_base: str, + search_filter: str, + search_scope: Literal["BASE", "LEVEL", "SUBTREE"] = ..., + dereference_aliases: Literal["NEVER", "SEARCH", "FINDING_BASE", "ALWAYS"] = ..., + attributes: Incomplete | None = ..., + size_limit: int = ..., + time_limit: int = ..., + types_only: bool = ..., + get_operational_attributes: bool = ..., + controls: Incomplete | None = ..., + paged_size: int | None = ..., + paged_criticality: bool = ..., + paged_cookie: str | bytes | None = ..., + auto_escape: bool | None = ..., + ): ... + def compare(self, dn, attribute, value, controls: Incomplete | None = ...): ... + def add( + self, dn, object_class: Incomplete | None = ..., attributes: Incomplete | None = ..., controls: Incomplete | None = ... + ): ... + def delete(self, dn, controls: Incomplete | None = ...): ... + def modify(self, dn, changes, controls: Incomplete | None = ...): ... + def modify_dn( + self, dn, relative_dn, delete_old_dn: bool = ..., new_superior: Incomplete | None = ..., controls: Incomplete | None = ... + ): ... + def abandon(self, message_id, controls: Incomplete | None = ...): ... + def extended( + self, + request_name, + request_value: Asn1Item | ReadableBuffer | None = ..., + controls: Incomplete | None = ..., + no_encode: bool | None = ..., + ): ... + def start_tls(self, read_server_info: bool = ...): ... + def do_sasl_bind(self, controls): ... + def do_ntlm_bind(self, controls): ... + def refresh_server_info(self) -> None: ... + def response_to_ldif( + self, + search_result: Incomplete | None = ..., + all_base64: bool = ..., + line_separator: Incomplete | None = ..., + sort_order: Incomplete | None = ..., + stream: Incomplete | None = ..., + ): ... + def response_to_json( + self, + raw: bool = ..., + search_result: Incomplete | None = ..., + indent: int = ..., + sort: bool = ..., + stream: Incomplete | None = ..., + checked_attributes: bool = ..., + include_empty: bool = ..., + ): ... + def response_to_file(self, target, raw: bool = ..., indent: int = ..., sort: bool = ...) -> None: ... + @property + def entries(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/exceptions.pyi new file mode 100644 index 00000000..7da6a70a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/exceptions.pyi @@ -0,0 +1,146 @@ +import socket +from _typeshed import Incomplete +from typing import Any +from typing_extensions import Self + +class LDAPException(Exception): ... + +class LDAPOperationResult(LDAPException): + def __new__( + cls, + result: Incomplete | None = ..., + description: Incomplete | None = ..., + dn: Incomplete | None = ..., + message: Incomplete | None = ..., + response_type: Incomplete | None = ..., + response: Incomplete | None = ..., + ) -> Self: ... + result: Any + description: Any + dn: Any + message: Any + type: Any + response: Any + def __init__( + self, + result: Incomplete | None = ..., + description: Incomplete | None = ..., + dn: Incomplete | None = ..., + message: Incomplete | None = ..., + response_type: Incomplete | None = ..., + response: Incomplete | None = ..., + ) -> None: ... + +class LDAPOperationsErrorResult(LDAPOperationResult): ... +class LDAPProtocolErrorResult(LDAPOperationResult): ... +class LDAPTimeLimitExceededResult(LDAPOperationResult): ... +class LDAPSizeLimitExceededResult(LDAPOperationResult): ... +class LDAPAuthMethodNotSupportedResult(LDAPOperationResult): ... +class LDAPStrongerAuthRequiredResult(LDAPOperationResult): ... +class LDAPReferralResult(LDAPOperationResult): ... +class LDAPAdminLimitExceededResult(LDAPOperationResult): ... +class LDAPUnavailableCriticalExtensionResult(LDAPOperationResult): ... +class LDAPConfidentialityRequiredResult(LDAPOperationResult): ... +class LDAPSASLBindInProgressResult(LDAPOperationResult): ... +class LDAPNoSuchAttributeResult(LDAPOperationResult): ... +class LDAPUndefinedAttributeTypeResult(LDAPOperationResult): ... +class LDAPInappropriateMatchingResult(LDAPOperationResult): ... +class LDAPConstraintViolationResult(LDAPOperationResult): ... +class LDAPAttributeOrValueExistsResult(LDAPOperationResult): ... +class LDAPInvalidAttributeSyntaxResult(LDAPOperationResult): ... +class LDAPNoSuchObjectResult(LDAPOperationResult): ... +class LDAPAliasProblemResult(LDAPOperationResult): ... +class LDAPInvalidDNSyntaxResult(LDAPOperationResult): ... +class LDAPAliasDereferencingProblemResult(LDAPOperationResult): ... +class LDAPInappropriateAuthenticationResult(LDAPOperationResult): ... +class LDAPInvalidCredentialsResult(LDAPOperationResult): ... +class LDAPInsufficientAccessRightsResult(LDAPOperationResult): ... +class LDAPBusyResult(LDAPOperationResult): ... +class LDAPUnavailableResult(LDAPOperationResult): ... +class LDAPUnwillingToPerformResult(LDAPOperationResult): ... +class LDAPLoopDetectedResult(LDAPOperationResult): ... +class LDAPNamingViolationResult(LDAPOperationResult): ... +class LDAPObjectClassViolationResult(LDAPOperationResult): ... +class LDAPNotAllowedOnNotLeafResult(LDAPOperationResult): ... +class LDAPNotAllowedOnRDNResult(LDAPOperationResult): ... +class LDAPEntryAlreadyExistsResult(LDAPOperationResult): ... +class LDAPObjectClassModsProhibitedResult(LDAPOperationResult): ... +class LDAPAffectMultipleDSASResult(LDAPOperationResult): ... +class LDAPOtherResult(LDAPOperationResult): ... +class LDAPLCUPResourcesExhaustedResult(LDAPOperationResult): ... +class LDAPLCUPSecurityViolationResult(LDAPOperationResult): ... +class LDAPLCUPInvalidDataResult(LDAPOperationResult): ... +class LDAPLCUPUnsupportedSchemeResult(LDAPOperationResult): ... +class LDAPLCUPReloadRequiredResult(LDAPOperationResult): ... +class LDAPCanceledResult(LDAPOperationResult): ... +class LDAPNoSuchOperationResult(LDAPOperationResult): ... +class LDAPTooLateResult(LDAPOperationResult): ... +class LDAPCannotCancelResult(LDAPOperationResult): ... +class LDAPAssertionFailedResult(LDAPOperationResult): ... +class LDAPAuthorizationDeniedResult(LDAPOperationResult): ... +class LDAPESyncRefreshRequiredResult(LDAPOperationResult): ... + +exception_table: Any + +class LDAPExceptionError(LDAPException): ... +class LDAPConfigurationError(LDAPExceptionError): ... +class LDAPUnknownStrategyError(LDAPConfigurationError): ... +class LDAPUnknownAuthenticationMethodError(LDAPConfigurationError): ... +class LDAPSSLConfigurationError(LDAPConfigurationError): ... +class LDAPDefinitionError(LDAPConfigurationError): ... +class LDAPPackageUnavailableError(LDAPConfigurationError, ImportError): ... +class LDAPConfigurationParameterError(LDAPConfigurationError): ... +class LDAPKeyError(LDAPExceptionError, KeyError, AttributeError): ... +class LDAPObjectError(LDAPExceptionError, ValueError): ... +class LDAPAttributeError(LDAPExceptionError, ValueError, TypeError): ... +class LDAPCursorError(LDAPExceptionError): ... +class LDAPCursorAttributeError(LDAPCursorError, AttributeError): ... +class LDAPObjectDereferenceError(LDAPExceptionError): ... +class LDAPSSLNotSupportedError(LDAPExceptionError, ImportError): ... +class LDAPInvalidTlsSpecificationError(LDAPExceptionError): ... +class LDAPInvalidHashAlgorithmError(LDAPExceptionError, ValueError): ... +class LDAPSignatureVerificationFailedError(LDAPExceptionError): ... +class LDAPBindError(LDAPExceptionError): ... +class LDAPInvalidServerError(LDAPExceptionError): ... +class LDAPSASLMechanismNotSupportedError(LDAPExceptionError): ... +class LDAPConnectionIsReadOnlyError(LDAPExceptionError): ... +class LDAPChangeError(LDAPExceptionError, ValueError): ... +class LDAPServerPoolError(LDAPExceptionError): ... +class LDAPServerPoolExhaustedError(LDAPExceptionError): ... +class LDAPInvalidPortError(LDAPExceptionError): ... +class LDAPStartTLSError(LDAPExceptionError): ... +class LDAPCertificateError(LDAPExceptionError): ... +class LDAPUserNameNotAllowedError(LDAPExceptionError): ... +class LDAPUserNameIsMandatoryError(LDAPExceptionError): ... +class LDAPPasswordIsMandatoryError(LDAPExceptionError): ... +class LDAPInvalidFilterError(LDAPExceptionError): ... +class LDAPInvalidScopeError(LDAPExceptionError, ValueError): ... +class LDAPInvalidDereferenceAliasesError(LDAPExceptionError, ValueError): ... +class LDAPInvalidValueError(LDAPExceptionError, ValueError): ... +class LDAPControlError(LDAPExceptionError, ValueError): ... +class LDAPExtensionError(LDAPExceptionError, ValueError): ... +class LDAPLDIFError(LDAPExceptionError): ... +class LDAPSchemaError(LDAPExceptionError): ... +class LDAPSASLPrepError(LDAPExceptionError): ... +class LDAPSASLBindInProgressError(LDAPExceptionError): ... +class LDAPMetricsError(LDAPExceptionError): ... +class LDAPObjectClassError(LDAPExceptionError): ... +class LDAPInvalidDnError(LDAPExceptionError): ... +class LDAPResponseTimeoutError(LDAPExceptionError): ... +class LDAPTransactionError(LDAPExceptionError): ... +class LDAPInfoError(LDAPExceptionError): ... +class LDAPCommunicationError(LDAPExceptionError): ... +class LDAPSocketOpenError(LDAPCommunicationError): ... +class LDAPSocketCloseError(LDAPCommunicationError): ... +class LDAPSocketReceiveError(LDAPCommunicationError, socket.error): ... +class LDAPSocketSendError(LDAPCommunicationError, socket.error): ... +class LDAPSessionTerminatedByServerError(LDAPCommunicationError): ... +class LDAPUnknownResponseError(LDAPCommunicationError): ... +class LDAPUnknownRequestError(LDAPCommunicationError): ... +class LDAPReferralError(LDAPCommunicationError): ... +class LDAPConnectionPoolNameIsMandatoryError(LDAPExceptionError): ... +class LDAPConnectionPoolNotStartedError(LDAPExceptionError): ... +class LDAPMaximumRetriesError(LDAPExceptionError): ... + +def communication_exception_factory(exc_to_raise, exc): ... +def start_tls_exception_factory(exc): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/pooling.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/pooling.pyi new file mode 100644 index 00000000..e468fa9c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/pooling.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete +from typing import Any + +POOLING_STRATEGIES: Any + +class ServerState: + server: Any + last_checked_time: Any + available: Any + def __init__(self, server, last_checked_time, available) -> None: ... + +class ServerPoolState: + server_states: Any + strategy: Any + server_pool: Any + last_used_server: int + initialize_time: Any + def __init__(self, server_pool) -> None: ... + def refresh(self) -> None: ... + def get_current_server(self): ... + def get_server(self): ... + def find_active_random_server(self): ... + def find_active_server(self, starting): ... + def __len__(self) -> int: ... + +class ServerPool: + servers: Any + pool_states: Any + active: Any + exhaust: Any + single: Any + strategy: Any + def __init__( + self, + servers: Incomplete | None = ..., + pool_strategy=..., + active: bool = ..., + exhaust: bool = ..., + single_state: bool = ..., + ) -> None: ... + def __len__(self) -> int: ... + def __getitem__(self, item): ... + def __iter__(self): ... + def add(self, servers) -> None: ... + def remove(self, server) -> None: ... + def initialize(self, connection) -> None: ... + def get_server(self, connection): ... + def get_current_server(self, connection): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/rdns.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/rdns.pyi new file mode 100644 index 00000000..e712f803 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/rdns.pyi @@ -0,0 +1,12 @@ +from typing import Any + +class ReverseDnsSetting: + OFF: Any + REQUIRE_RESOLVE_ALL_ADDRESSES: Any + REQUIRE_RESOLVE_IP_ADDRESSES_ONLY: Any + OPTIONAL_RESOLVE_ALL_ADDRESSES: Any + OPTIONAL_RESOLVE_IP_ADDRESSES_ONLY: Any + SUPPORTED_VALUES: Any + +def get_hostname_by_addr(addr, success_required: bool = ...): ... +def is_ip_addr(addr): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/results.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/results.pyi new file mode 100644 index 00000000..a2772bd1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/results.pyi @@ -0,0 +1,56 @@ +from typing import Any + +RESULT_SUCCESS: int +RESULT_OPERATIONS_ERROR: int +RESULT_PROTOCOL_ERROR: int +RESULT_TIME_LIMIT_EXCEEDED: int +RESULT_SIZE_LIMIT_EXCEEDED: int +RESULT_COMPARE_FALSE: int +RESULT_COMPARE_TRUE: int +RESULT_AUTH_METHOD_NOT_SUPPORTED: int +RESULT_STRONGER_AUTH_REQUIRED: int +RESULT_RESERVED: int +RESULT_REFERRAL: int +RESULT_ADMIN_LIMIT_EXCEEDED: int +RESULT_UNAVAILABLE_CRITICAL_EXTENSION: int +RESULT_CONFIDENTIALITY_REQUIRED: int +RESULT_SASL_BIND_IN_PROGRESS: int +RESULT_NO_SUCH_ATTRIBUTE: int +RESULT_UNDEFINED_ATTRIBUTE_TYPE: int +RESULT_INAPPROPRIATE_MATCHING: int +RESULT_CONSTRAINT_VIOLATION: int +RESULT_ATTRIBUTE_OR_VALUE_EXISTS: int +RESULT_INVALID_ATTRIBUTE_SYNTAX: int +RESULT_NO_SUCH_OBJECT: int +RESULT_ALIAS_PROBLEM: int +RESULT_INVALID_DN_SYNTAX: int +RESULT_ALIAS_DEREFERENCING_PROBLEM: int +RESULT_INAPPROPRIATE_AUTHENTICATION: int +RESULT_INVALID_CREDENTIALS: int +RESULT_INSUFFICIENT_ACCESS_RIGHTS: int +RESULT_BUSY: int +RESULT_UNAVAILABLE: int +RESULT_UNWILLING_TO_PERFORM: int +RESULT_LOOP_DETECTED: int +RESULT_NAMING_VIOLATION: int +RESULT_OBJECT_CLASS_VIOLATION: int +RESULT_NOT_ALLOWED_ON_NON_LEAF: int +RESULT_NOT_ALLOWED_ON_RDN: int +RESULT_ENTRY_ALREADY_EXISTS: int +RESULT_OBJECT_CLASS_MODS_PROHIBITED: int +RESULT_AFFECT_MULTIPLE_DSAS: int +RESULT_OTHER: int +RESULT_LCUP_RESOURCES_EXHAUSTED: int +RESULT_LCUP_SECURITY_VIOLATION: int +RESULT_LCUP_INVALID_DATA: int +RESULT_LCUP_UNSUPPORTED_SCHEME: int +RESULT_LCUP_RELOAD_REQUIRED: int +RESULT_CANCELED: int +RESULT_NO_SUCH_OPERATION: int +RESULT_TOO_LATE: int +RESULT_CANNOT_CANCEL: int +RESULT_ASSERTION_FAILED: int +RESULT_AUTHORIZATION_DENIED: int +RESULT_E_SYNC_REFRESH_REQUIRED: int +RESULT_CODES: Any +DO_NOT_RAISE_EXCEPTIONS: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/server.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/server.pyi new file mode 100644 index 00000000..870ddbab --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/server.pyi @@ -0,0 +1,67 @@ +from _typeshed import Incomplete +from typing import Any +from typing_extensions import Literal + +unix_socket_available: bool + +class Server: + ipc: bool + host: Any + port: Any + allowed_referral_hosts: Any + ssl: Any + tls: Any + name: Any + get_info: Any + dit_lock: Any + custom_formatter: Any + custom_validator: Any + current_address: Any + connect_timeout: Any + mode: Any + def __init__( + self, + host: str, + port: int | None = ..., + use_ssl: bool = ..., + allowed_referral_hosts: Incomplete | None = ..., + get_info: Literal["NO_INFO", "DSA", "SCHEMA", "ALL"] = ..., + tls: Incomplete | None = ..., + formatter: Incomplete | None = ..., + connect_timeout: Incomplete | None = ..., + mode: Literal["IP_SYSTEM_DEFAULT", "IP_V4_ONLY", "IP_V6_ONLY", "IP_V4_PREFERRED", "IP_V6_PREFERRED"] = ..., + validator: Incomplete | None = ..., + ) -> None: ... + @property + def address_info(self): ... + def update_availability(self, address, available) -> None: ... + def reset_availability(self) -> None: ... + def check_availability( + self, + source_address: Incomplete | None = ..., + source_port: Incomplete | None = ..., + source_port_list: Incomplete | None = ..., + ): ... + @staticmethod + def next_message_id(): ... + def get_info_from_server(self, connection) -> None: ... + def attach_dsa_info(self, dsa_info: Incomplete | None = ...) -> None: ... + def attach_schema_info(self, dsa_schema: Incomplete | None = ...) -> None: ... + @property + def info(self): ... + @property + def schema(self): ... + @staticmethod + def from_definition( + host, + dsa_info, + dsa_schema, + port: Incomplete | None = ..., + use_ssl: bool = ..., + formatter: Incomplete | None = ..., + validator: Incomplete | None = ..., + ): ... + def candidate_addresses(self): ... + def has_control(self, control): ... + def has_extension(self, extension): ... + def has_feature(self, feature): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/timezone.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/timezone.pyi new file mode 100644 index 00000000..c6c52b37 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/timezone.pyi @@ -0,0 +1,11 @@ +from datetime import tzinfo +from typing import Any + +class OffsetTzInfo(tzinfo): + offset: Any + name: Any + def __init__(self, offset, name) -> None: ... + def utcoffset(self, dt): ... + def tzname(self, dt): ... + def dst(self, dt): ... + def __getinitargs__(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/tls.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/tls.pyi new file mode 100644 index 00000000..240c93b2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/tls.pyi @@ -0,0 +1,37 @@ +from _typeshed import Incomplete +from typing import Any + +use_ssl_context: bool + +class Tls: + ssl_options: Any + validate: Any + ca_certs_file: Any + ca_certs_path: Any + ca_certs_data: Any + private_key_password: Any + version: Any + private_key_file: Any + certificate_file: Any + valid_names: Any + ciphers: Any + sni: Any + def __init__( + self, + local_private_key_file: Incomplete | None = ..., + local_certificate_file: Incomplete | None = ..., + validate=..., + version: Incomplete | None = ..., + ssl_options: Incomplete | None = ..., + ca_certs_file: Incomplete | None = ..., + valid_names: Incomplete | None = ..., + ca_certs_path: Incomplete | None = ..., + ca_certs_data: Incomplete | None = ..., + local_private_key_password: Incomplete | None = ..., + ciphers: Incomplete | None = ..., + sni: Incomplete | None = ..., + ) -> None: ... + def wrap_socket(self, connection, do_handshake: bool = ...) -> None: ... + def start_tls(self, connection): ... + +def check_hostname(sock, server_name, additional_names) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/usage.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/usage.pyi new file mode 100644 index 00000000..ccb805bb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/core/usage.pyi @@ -0,0 +1,41 @@ +from typing import Any + +class ConnectionUsage: + open_sockets: int + closed_sockets: int + wrapped_sockets: int + bytes_transmitted: int + bytes_received: int + messages_transmitted: int + messages_received: int + operations: int + abandon_operations: int + add_operations: int + bind_operations: int + compare_operations: int + delete_operations: int + extended_operations: int + modify_operations: int + modify_dn_operations: int + search_operations: int + unbind_operations: int + referrals_received: int + referrals_followed: int + referrals_connections: int + restartable_failures: int + restartable_successes: int + servers_from_pool: int + def reset(self) -> None: ... + initial_connection_start_time: Any + open_socket_start_time: Any + connection_stop_time: Any + last_transmitted_time: Any + last_received_time: Any + def __init__(self) -> None: ... + def __iadd__(self, other): ... + def update_transmitted_message(self, message, length) -> None: ... + def update_received_message(self, length) -> None: ... + def start(self, reset: bool = ...) -> None: ... + def stop(self) -> None: ... + @property + def elapsed_time(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/__init__.pyi new file mode 100644 index 00000000..e87b55ee --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/__init__.pyi @@ -0,0 +1,106 @@ +from _typeshed import Incomplete +from typing import Any + +class ExtendedOperationContainer: + def __init__(self, connection) -> None: ... + +class StandardExtendedOperations(ExtendedOperationContainer): + def who_am_i(self, controls: Incomplete | None = ...): ... + def modify_password( + self, + user: Incomplete | None = ..., + old_password: Incomplete | None = ..., + new_password: Incomplete | None = ..., + hash_algorithm: Incomplete | None = ..., + salt: Incomplete | None = ..., + controls: Incomplete | None = ..., + ): ... + def paged_search( + self, + search_base, + search_filter, + search_scope=..., + dereference_aliases=..., + attributes: Incomplete | None = ..., + size_limit: int = ..., + time_limit: int = ..., + types_only: bool = ..., + get_operational_attributes: bool = ..., + controls: Incomplete | None = ..., + paged_size: int = ..., + paged_criticality: bool = ..., + generator: bool = ..., + ): ... + def persistent_search( + self, + search_base: str = ..., + search_filter: str = ..., + search_scope=..., + dereference_aliases=..., + attributes=..., + size_limit: int = ..., + time_limit: int = ..., + controls: Incomplete | None = ..., + changes_only: bool = ..., + show_additions: bool = ..., + show_deletions: bool = ..., + show_modifications: bool = ..., + show_dn_modifications: bool = ..., + notifications: bool = ..., + streaming: bool = ..., + callback: Incomplete | None = ..., + ): ... + def funnel_search( + self, + search_base: str = ..., + search_filter: str = ..., + search_scope=..., + dereference_aliases=..., + attributes=..., + size_limit: int = ..., + time_limit: int = ..., + controls: Incomplete | None = ..., + streaming: bool = ..., + callback: Incomplete | None = ..., + ): ... + +class NovellExtendedOperations(ExtendedOperationContainer): + def get_bind_dn(self, controls: Incomplete | None = ...): ... + def get_universal_password(self, user, controls: Incomplete | None = ...): ... + def set_universal_password(self, user, new_password: Incomplete | None = ..., controls: Incomplete | None = ...): ... + def list_replicas(self, server_dn, controls: Incomplete | None = ...): ... + def partition_entry_count(self, partition_dn, controls: Incomplete | None = ...): ... + def replica_info(self, server_dn, partition_dn, controls: Incomplete | None = ...): ... + def start_transaction(self, controls: Incomplete | None = ...): ... + def end_transaction(self, commit: bool = ..., controls: Incomplete | None = ...): ... + def add_members_to_groups(self, members, groups, fix: bool = ..., transaction: bool = ...): ... + def remove_members_from_groups(self, members, groups, fix: bool = ..., transaction: bool = ...): ... + def check_groups_memberships(self, members, groups, fix: bool = ..., transaction: bool = ...): ... + +class MicrosoftExtendedOperations(ExtendedOperationContainer): + def dir_sync( + self, + sync_base, + sync_filter: str = ..., + attributes=..., + cookie: Incomplete | None = ..., + object_security: bool = ..., + ancestors_first: bool = ..., + public_data_only: bool = ..., + incremental_values: bool = ..., + max_length: int = ..., + hex_guid: bool = ..., + ): ... + def modify_password(self, user, new_password, old_password: Incomplete | None = ..., controls: Incomplete | None = ...): ... + def unlock_account(self, user): ... + def add_members_to_groups(self, members, groups, fix: bool = ...): ... + def remove_members_from_groups(self, members, groups, fix: bool = ...): ... + def persistent_search( + self, search_base: str = ..., search_scope=..., attributes=..., streaming: bool = ..., callback: Incomplete | None = ... + ): ... + +class ExtendedOperationsRoot(ExtendedOperationContainer): + standard: Any + novell: Any + microsoft: Any + def __init__(self, connection) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/microsoft/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/microsoft/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/microsoft/addMembersToGroups.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/microsoft/addMembersToGroups.pyi new file mode 100644 index 00000000..486d084f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/microsoft/addMembersToGroups.pyi @@ -0,0 +1 @@ +def ad_add_members_to_groups(connection, members_dn, groups_dn, fix: bool = ..., raise_error: bool = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/microsoft/dirSync.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/microsoft/dirSync.pyi new file mode 100644 index 00000000..68acbfc5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/microsoft/dirSync.pyi @@ -0,0 +1,30 @@ +from typing import Any + +class DirSync: + connection: Any + base: Any + filter: Any + attributes: Any + cookie: Any + object_security: Any + ancestors_first: Any + public_data_only: Any + incremental_values: Any + max_length: Any + hex_guid: Any + more_results: bool + def __init__( + self, + connection, + sync_base, + sync_filter, + attributes, + cookie, + object_security, + ancestors_first, + public_data_only, + incremental_values, + max_length, + hex_guid, + ) -> None: ... + def loop(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/microsoft/modifyPassword.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/microsoft/modifyPassword.pyi new file mode 100644 index 00000000..454a1bd7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/microsoft/modifyPassword.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def ad_modify_password(connection, user_dn, new_password, old_password, controls: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/microsoft/persistentSearch.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/microsoft/persistentSearch.pyi new file mode 100644 index 00000000..18c01eb5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/microsoft/persistentSearch.pyi @@ -0,0 +1,16 @@ +from _typeshed import Incomplete +from typing import Any + +class ADPersistentSearch: + connection: Any + message_id: Any + base: Any + scope: Any + attributes: Any + controls: Any + filter: str + def __init__(self, connection, search_base, search_scope, attributes, streaming, callback) -> None: ... + def start(self) -> None: ... + def stop(self, unbind: bool = ...) -> None: ... + def next(self, block: bool = ..., timeout: Incomplete | None = ...): ... + def funnel(self, block: bool = ..., timeout: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/microsoft/removeMembersFromGroups.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/microsoft/removeMembersFromGroups.pyi new file mode 100644 index 00000000..915fb9db --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/microsoft/removeMembersFromGroups.pyi @@ -0,0 +1 @@ +def ad_remove_members_from_groups(connection, members_dn, groups_dn, fix, raise_error: bool = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/microsoft/unlockAccount.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/microsoft/unlockAccount.pyi new file mode 100644 index 00000000..8dd6f01e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/microsoft/unlockAccount.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def ad_unlock_account(connection, user_dn, controls: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/addMembersToGroups.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/addMembersToGroups.pyi new file mode 100644 index 00000000..5ba4cab9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/addMembersToGroups.pyi @@ -0,0 +1 @@ +def edir_add_members_to_groups(connection, members_dn, groups_dn, fix, transaction): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/checkGroupsMemberships.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/checkGroupsMemberships.pyi new file mode 100644 index 00000000..551636c2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/checkGroupsMemberships.pyi @@ -0,0 +1 @@ +def edir_check_groups_memberships(connection, members_dn, groups_dn, fix, transaction): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/endTransaction.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/endTransaction.pyi new file mode 100644 index 00000000..d0b07c28 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/endTransaction.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +from ...extend.operation import ExtendedOperation +from ...protocol.novell import EndGroupTypeRequestValue, EndGroupTypeResponseValue + +class EndTransaction(ExtendedOperation): + request_name: str + response_name: str + request_value: EndGroupTypeRequestValue + asn1_spec: EndGroupTypeResponseValue + def config(self) -> None: ... + def __init__(self, connection, commit: bool = ..., controls: Incomplete | None = ...) -> None: ... + def populate_result(self) -> None: ... + response_value: Incomplete + def set_response(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/getBindDn.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/getBindDn.pyi new file mode 100644 index 00000000..3e9e9c40 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/getBindDn.pyi @@ -0,0 +1,10 @@ +from ...extend.operation import ExtendedOperation +from ...protocol.novell import Identity + +class GetBindDn(ExtendedOperation): + request_name: str + response_name: str + response_attribute: str + asn1_spec: Identity + def config(self) -> None: ... + def populate_result(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/listReplicas.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/listReplicas.pyi new file mode 100644 index 00000000..ec94538a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/listReplicas.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +from ...extend.operation import ExtendedOperation +from ...protocol.novell import ReplicaList +from ...protocol.rfc4511 import LDAPDN + +class ListReplicas(ExtendedOperation): + request_name: str + response_name: str + request_value: LDAPDN + asn1_spec: ReplicaList + response_attribute: str + def config(self) -> None: ... + def __init__(self, connection, server_dn, controls: Incomplete | None = ...) -> None: ... + def populate_result(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/nmasGetUniversalPassword.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/nmasGetUniversalPassword.pyi new file mode 100644 index 00000000..bc467cd5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/nmasGetUniversalPassword.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete + +from ...extend.operation import ExtendedOperation +from ...protocol.novell import NmasGetUniversalPasswordRequestValue, NmasGetUniversalPasswordResponseValue + +class NmasGetUniversalPassword(ExtendedOperation): + request_name: str + response_name: str + request_value: NmasGetUniversalPasswordRequestValue + asn1_spec: NmasGetUniversalPasswordResponseValue + response_attribute: str + def config(self) -> None: ... + def __init__(self, connection, user, controls: Incomplete | None = ...) -> None: ... + def populate_result(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/nmasSetUniversalPassword.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/nmasSetUniversalPassword.pyi new file mode 100644 index 00000000..de23d84d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/nmasSetUniversalPassword.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete + +from ...extend.operation import ExtendedOperation +from ...protocol.novell import NmasSetUniversalPasswordRequestValue, NmasSetUniversalPasswordResponseValue + +class NmasSetUniversalPassword(ExtendedOperation): + request_name: str + response_name: str + request_value: NmasSetUniversalPasswordRequestValue + asn1_spec: NmasSetUniversalPasswordResponseValue + response_attribute: str + def config(self) -> None: ... + def __init__(self, connection, user, new_password, controls: Incomplete | None = ...) -> None: ... + def populate_result(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/partition_entry_count.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/partition_entry_count.pyi new file mode 100644 index 00000000..cd2e1c07 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/partition_entry_count.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete + +from ...protocol.rfc4511 import LDAPDN +from ..operation import ExtendedOperation + +class PartitionEntryCount(ExtendedOperation): + request_name: str + response_name: str + request_value: LDAPDN + response_attribute: str + def config(self) -> None: ... + def __init__(self, connection, partition_dn, controls: Incomplete | None = ...) -> None: ... + def populate_result(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/removeMembersFromGroups.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/removeMembersFromGroups.pyi new file mode 100644 index 00000000..91a3223c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/removeMembersFromGroups.pyi @@ -0,0 +1 @@ +def edir_remove_members_from_groups(connection, members_dn, groups_dn, fix, transaction): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/replicaInfo.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/replicaInfo.pyi new file mode 100644 index 00000000..a9405d21 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/replicaInfo.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete + +from ...protocol.novell import ReplicaInfoRequestValue +from ..operation import ExtendedOperation + +class ReplicaInfo(ExtendedOperation): + request_name: str + response_name: str + request_value: ReplicaInfoRequestValue + response_attribute: str + def config(self) -> None: ... + def __init__(self, connection, server_dn, partition_dn, controls: Incomplete | None = ...) -> None: ... + def populate_result(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/startTransaction.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/startTransaction.pyi new file mode 100644 index 00000000..e013b81a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/novell/startTransaction.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +from ...extend.operation import ExtendedOperation +from ...protocol.novell import CreateGroupTypeRequestValue, CreateGroupTypeResponseValue + +class StartTransaction(ExtendedOperation): + request_name: str + response_name: str + request_value: CreateGroupTypeRequestValue + asn1_spec: CreateGroupTypeResponseValue + def config(self) -> None: ... + def __init__(self, connection, controls: Incomplete | None = ...) -> None: ... + def populate_result(self) -> None: ... + response_value: Incomplete + def set_response(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/operation.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/operation.pyi new file mode 100644 index 00000000..e07b8142 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/operation.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from pyasn1.type.base import Asn1Type + +class ExtendedOperation: + connection: Incomplete + decoded_response: Incomplete | None + result: Incomplete | None + asn1_spec: Asn1Type | None + request_name: Incomplete | None + response_name: Incomplete | None + request_value: Asn1Type | None + response_value: Incomplete | None + response_attribute: Incomplete | None + controls: Incomplete + def __init__(self, connection, controls: Incomplete | None = ...) -> None: ... + def send(self): ... + def populate_result(self) -> None: ... + def decode_response(self, response: Incomplete | None = ...) -> None: ... + def set_response(self) -> None: ... + def config(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/standard/PagedSearch.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/standard/PagedSearch.pyi new file mode 100644 index 00000000..bf75db7e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/standard/PagedSearch.pyi @@ -0,0 +1,32 @@ +from _typeshed import Incomplete + +def paged_search_generator( + connection, + search_base, + search_filter, + search_scope=..., + dereference_aliases=..., + attributes: Incomplete | None = ..., + size_limit: int = ..., + time_limit: int = ..., + types_only: bool = ..., + get_operational_attributes: bool = ..., + controls: Incomplete | None = ..., + paged_size: int = ..., + paged_criticality: bool = ..., +) -> None: ... +def paged_search_accumulator( + connection, + search_base, + search_filter, + search_scope=..., + dereference_aliases=..., + attributes: Incomplete | None = ..., + size_limit: int = ..., + time_limit: int = ..., + types_only: bool = ..., + get_operational_attributes: bool = ..., + controls: Incomplete | None = ..., + paged_size: int = ..., + paged_criticality: bool = ..., +): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/standard/PersistentSearch.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/standard/PersistentSearch.pyi new file mode 100644 index 00000000..bc3c9c08 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/standard/PersistentSearch.pyi @@ -0,0 +1,37 @@ +from _typeshed import Incomplete +from typing import Any + +class PersistentSearch: + connection: Any + changes_only: Any + notifications: Any + message_id: Any + base: Any + filter: Any + scope: Any + dereference_aliases: Any + attributes: Any + size_limit: Any + time_limit: Any + controls: Any + def __init__( + self, + connection, + search_base, + search_filter, + search_scope, + dereference_aliases, + attributes, + size_limit, + time_limit, + controls, + changes_only, + events_type, + notifications, + streaming, + callback, + ) -> None: ... + def start(self) -> None: ... + def stop(self, unbind: bool = ...) -> None: ... + def next(self, block: bool = ..., timeout: Incomplete | None = ...): ... + def funnel(self, block: bool = ..., timeout: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/standard/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/standard/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/standard/modifyPassword.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/standard/modifyPassword.pyi new file mode 100644 index 00000000..37698dce --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/standard/modifyPassword.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete + +from ...extend.operation import ExtendedOperation +from ...protocol.rfc3062 import PasswdModifyRequestValue, PasswdModifyResponseValue + +class ModifyPassword(ExtendedOperation): + request_name: str + request_value: PasswdModifyRequestValue + asn1_spec: PasswdModifyResponseValue + response_attribute: str + def config(self) -> None: ... + def __init__( + self, + connection, + user: Incomplete | None = ..., + old_password: Incomplete | None = ..., + new_password: Incomplete | None = ..., + hash_algorithm: Incomplete | None = ..., + salt: Incomplete | None = ..., + controls: Incomplete | None = ..., + ) -> None: ... + def populate_result(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/standard/whoAmI.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/standard/whoAmI.pyi new file mode 100644 index 00000000..e61b175e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/extend/standard/whoAmI.pyi @@ -0,0 +1,7 @@ +from ...extend.operation import ExtendedOperation + +class WhoAmI(ExtendedOperation): + request_name: str + response_attribute: str + def config(self) -> None: ... + def populate_result(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/abandon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/abandon.pyi new file mode 100644 index 00000000..2413c214 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/abandon.pyi @@ -0,0 +1,2 @@ +def abandon_operation(msg_id): ... +def abandon_request_to_dict(request): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/add.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/add.pyi new file mode 100644 index 00000000..956aa744 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/add.pyi @@ -0,0 +1,7 @@ +from _typeshed import Incomplete + +def add_operation( + dn, attributes, auto_encode, schema: Incomplete | None = ..., validator: Incomplete | None = ..., check_names: bool = ... +): ... +def add_request_to_dict(request): ... +def add_response_to_dict(response): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/bind.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/bind.pyi new file mode 100644 index 00000000..d6572b7f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/bind.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete + +def bind_operation( + version, + authentication, + name: str = ..., + password: Incomplete | None = ..., + sasl_mechanism: Incomplete | None = ..., + sasl_credentials: Incomplete | None = ..., + auto_encode: bool = ..., +): ... +def bind_request_to_dict(request): ... +def bind_response_operation( + result_code, + matched_dn: str = ..., + diagnostic_message: str = ..., + referral: Incomplete | None = ..., + server_sasl_credentials: Incomplete | None = ..., +): ... +def bind_response_to_dict(response): ... +def sicily_bind_response_to_dict(response): ... +def bind_response_to_dict_fast(response): ... +def sicily_bind_response_to_dict_fast(response): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/compare.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/compare.pyi new file mode 100644 index 00000000..16003f1c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/compare.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete + +def compare_operation( + dn, + attribute, + value, + auto_encode, + schema: Incomplete | None = ..., + validator: Incomplete | None = ..., + check_names: bool = ..., +): ... +def compare_request_to_dict(request): ... +def compare_response_to_dict(response): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/delete.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/delete.pyi new file mode 100644 index 00000000..618c8f41 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/delete.pyi @@ -0,0 +1,3 @@ +def delete_operation(dn): ... +def delete_request_to_dict(request): ... +def delete_response_to_dict(response): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/extended.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/extended.pyi new file mode 100644 index 00000000..470849ed --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/extended.pyi @@ -0,0 +1,14 @@ +from _typeshed import ReadableBuffer + +from pyasn1.type.base import Asn1Item + +from ..protocol.rfc4511 import ExtendedRequest + +def extended_operation( + request_name, request_value: Asn1Item | ReadableBuffer | None = ..., no_encode: bool | None = ... +) -> ExtendedRequest: ... +def extended_request_to_dict(request): ... +def extended_response_to_dict(response): ... +def intermediate_response_to_dict(response): ... +def extended_response_to_dict_fast(response): ... +def intermediate_response_to_dict_fast(response): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/modify.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/modify.pyi new file mode 100644 index 00000000..5094e52d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/modify.pyi @@ -0,0 +1,10 @@ +from _typeshed import Incomplete +from typing import Any + +change_table: Any + +def modify_operation( + dn, changes, auto_encode, schema: Incomplete | None = ..., validator: Incomplete | None = ..., check_names: bool = ... +): ... +def modify_request_to_dict(request): ... +def modify_response_to_dict(response): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/modifyDn.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/modifyDn.pyi new file mode 100644 index 00000000..acf50577 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/modifyDn.pyi @@ -0,0 +1,5 @@ +from _typeshed import Incomplete + +def modify_dn_operation(dn, new_relative_dn, delete_old_rdn: bool = ..., new_superior: Incomplete | None = ...): ... +def modify_dn_request_to_dict(request): ... +def modify_dn_response_to_dict(response): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/search.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/search.pyi new file mode 100644 index 00000000..166e31db --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/search.pyi @@ -0,0 +1,68 @@ +from _typeshed import Incomplete +from typing import Any + +ROOT: int +AND: int +OR: int +NOT: int +MATCH_APPROX: int +MATCH_GREATER_OR_EQUAL: int +MATCH_LESS_OR_EQUAL: int +MATCH_EXTENSIBLE: int +MATCH_PRESENT: int +MATCH_SUBSTRING: int +MATCH_EQUAL: int +SEARCH_OPEN: int +SEARCH_OPEN_OR_CLOSE: int +SEARCH_MATCH_OR_CLOSE: int +SEARCH_MATCH_OR_CONTROL: int + +class FilterNode: + tag: Any + parent: Any + assertion: Any + elements: Any + def __init__(self, tag: Incomplete | None = ..., assertion: Incomplete | None = ...) -> None: ... + def __str__(self, pos: int = ...) -> str: ... + def __repr__(self, pos: int = ...) -> str: ... + def append(self, filter_node): ... + +def evaluate_match(match, schema, auto_escape, auto_encode, validator, check_names): ... +def parse_filter(search_filter, schema, auto_escape, auto_encode, validator, check_names): ... +def compile_filter(filter_node): ... +def build_attribute_selection(attribute_list, schema): ... +def search_operation( + search_base, + search_filter, + search_scope, + dereference_aliases, + attributes, + size_limit, + time_limit, + types_only, + auto_escape, + auto_encode, + schema: Incomplete | None = ..., + validator: Incomplete | None = ..., + check_names: bool = ..., +): ... +def decode_vals(vals): ... +def decode_vals_fast(vals): ... +def attributes_to_dict(attribute_list): ... +def attributes_to_dict_fast(attribute_list): ... +def decode_raw_vals(vals): ... +def decode_raw_vals_fast(vals): ... +def raw_attributes_to_dict(attribute_list): ... +def raw_attributes_to_dict_fast(attribute_list): ... +def checked_attributes_to_dict(attribute_list, schema: Incomplete | None = ..., custom_formatter: Incomplete | None = ...): ... +def checked_attributes_to_dict_fast( + attribute_list, schema: Incomplete | None = ..., custom_formatter: Incomplete | None = ... +): ... +def matching_rule_assertion_to_string(matching_rule_assertion): ... +def filter_to_string(filter_object): ... +def search_request_to_dict(request): ... +def search_result_entry_response_to_dict(response, schema, custom_formatter, check_names): ... +def search_result_done_response_to_dict(response): ... +def search_result_reference_response_to_dict(response): ... +def search_result_entry_response_to_dict_fast(response, schema, custom_formatter, check_names): ... +def search_result_reference_response_to_dict_fast(response): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/unbind.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/unbind.pyi new file mode 100644 index 00000000..0c66a79d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/operation/unbind.pyi @@ -0,0 +1 @@ +def unbind_operation(): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/controls.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/controls.pyi new file mode 100644 index 00000000..7c67b9f3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/controls.pyi @@ -0,0 +1 @@ +def build_control(oid, criticality, value, encode_control_value: bool = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/convert.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/convert.pyi new file mode 100644 index 00000000..f57fce20 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/convert.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete + +def to_str_or_normalized_unicode(val): ... +def attribute_to_dict(attribute): ... +def attributes_to_dict(attributes): ... +def referrals_to_list(referrals): ... +def search_refs_to_list(search_refs): ... +def search_refs_to_list_fast(search_refs): ... +def sasl_to_dict(sasl): ... +def authentication_choice_to_dict(authentication_choice): ... +def partial_attribute_to_dict(modification): ... +def change_to_dict(change): ... +def changes_to_list(changes): ... +def attributes_to_list(attributes): ... +def ava_to_dict(ava): ... +def substring_to_dict(substring): ... +def prepare_changes_for_request(changes): ... +def build_controls_list(controls): ... +def validate_assertion_value(schema, name, value, auto_escape, auto_encode, validator, check_names): ... +def validate_attribute_value(schema, name, value, auto_encode, validator: Incomplete | None = ..., check_names: bool = ...): ... +def prepare_filter_for_sending(raw_string): ... +def prepare_for_sending(raw_string): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/formatters/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/formatters/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/formatters/formatters.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/formatters/formatters.pyi new file mode 100644 index 00000000..69c4e1ae --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/formatters/formatters.pyi @@ -0,0 +1,16 @@ +from typing import Any + +def format_unicode(raw_value): ... +def format_integer(raw_value): ... +def format_binary(raw_value): ... +def format_uuid(raw_value): ... +def format_uuid_le(raw_value): ... +def format_boolean(raw_value): ... +def format_ad_timestamp(raw_value): ... + +time_format: Any + +def format_time(raw_value): ... +def format_ad_timedelta(raw_value): ... +def format_time_with_0_year(raw_value): ... +def format_sid(raw_value): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/formatters/standard.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/formatters/standard.pyi new file mode 100644 index 00000000..f85dd648 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/formatters/standard.pyi @@ -0,0 +1,7 @@ +from typing import Any + +standard_formatter: Any + +def find_attribute_helpers(attr_type, name, custom_formatter): ... +def format_attribute_values(schema, name, values, custom_formatter): ... +def find_attribute_validator(schema, name, custom_validator): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/formatters/validators.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/formatters/validators.pyi new file mode 100644 index 00000000..b49eee62 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/formatters/validators.pyi @@ -0,0 +1,16 @@ +def check_backslash(value): ... +def check_type(input_value, value_type): ... +def always_valid(input_value): ... +def validate_generic_single_value(input_value): ... +def validate_zero_and_minus_one_and_positive_int(input_value): ... +def validate_integer(input_value): ... +def validate_bytes(input_value): ... +def validate_boolean(input_value): ... +def validate_time_with_0_year(input_value): ... +def validate_time(input_value): ... +def validate_ad_timestamp(input_value): ... +def validate_ad_timedelta(input_value): ... +def validate_guid(input_value): ... +def validate_uuid(input_value): ... +def validate_uuid_le(input_value): ... +def validate_sid(input_value): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/microsoft.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/microsoft.pyi new file mode 100644 index 00000000..c2961a7d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/microsoft.pyi @@ -0,0 +1,25 @@ +from pyasn1.type.namedtype import NamedTypes +from pyasn1.type.tag import TagSet +from pyasn1.type.univ import Sequence + +class SicilyBindResponse(Sequence): + tagSet: TagSet + componentType: NamedTypes + +class DirSyncControlRequestValue(Sequence): + componentType: NamedTypes + +class DirSyncControlResponseValue(Sequence): + componentType: NamedTypes + +class SdFlags(Sequence): + componentType: NamedTypes + +class ExtendedDN(Sequence): + componentType: NamedTypes + +def dir_sync_control(criticality, object_security, ancestors_first, public_data_only, incremental_values, max_length, cookie): ... +def extended_dn_control(criticality: bool = ..., hex_format: bool = ...): ... +def show_deleted_control(criticality: bool = ...): ... +def security_descriptor_control(criticality: bool = ..., sdflags: int = ...): ... +def persistent_search_control(criticality: bool = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/novell.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/novell.pyi new file mode 100644 index 00000000..bfa4a1cd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/novell.pyi @@ -0,0 +1,67 @@ +from pyasn1.type.namedtype import NamedTypes +from pyasn1.type.tag import TagSet +from pyasn1.type.univ import Integer, OctetString, Sequence, SequenceOf + +NMAS_LDAP_EXT_VERSION: int + +class Identity(OctetString): + encoding: str + +class LDAPDN(OctetString): + tagSet: TagSet + encoding: str + +class Password(OctetString): + tagSet: TagSet + encoding: str + +class LDAPOID(OctetString): + tagSet: TagSet + encoding: str + +class GroupCookie(Integer): + tagSet: TagSet + +class NmasVer(Integer): + tagSet: TagSet + +class Error(Integer): + tagSet: TagSet + +class NmasGetUniversalPasswordRequestValue(Sequence): + componentType: NamedTypes + +class NmasGetUniversalPasswordResponseValue(Sequence): + componentType: NamedTypes + +class NmasSetUniversalPasswordRequestValue(Sequence): + componentType: NamedTypes + +class NmasSetUniversalPasswordResponseValue(Sequence): + componentType: NamedTypes + +class ReplicaList(SequenceOf): + componentType: OctetString # type: ignore[assignment] + +class ReplicaInfoRequestValue(Sequence): + tagSet: TagSet + componentType: NamedTypes + +class ReplicaInfoResponseValue(Sequence): + tagSet: TagSet + componentType: NamedTypes + +class CreateGroupTypeRequestValue(Sequence): + componentType: NamedTypes + +class CreateGroupTypeResponseValue(Sequence): + componentType: NamedTypes + +class EndGroupTypeRequestValue(Sequence): + componentType: NamedTypes + +class EndGroupTypeResponseValue(Sequence): + componentType: NamedTypes + +class GroupingControlValue(Sequence): + componentType: NamedTypes diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/oid.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/oid.pyi new file mode 100644 index 00000000..77efddad --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/oid.pyi @@ -0,0 +1,29 @@ +from typing import Any + +OID_CONTROL: str +OID_EXTENSION: str +OID_FEATURE: str +OID_UNSOLICITED_NOTICE: str +OID_ATTRIBUTE_TYPE: str +OID_DIT_CONTENT_RULE: str +OID_LDAP_URL_EXTENSION: str +OID_FAMILY: str +OID_MATCHING_RULE: str +OID_NAME_FORM: str +OID_OBJECT_CLASS: str +OID_ADMINISTRATIVE_ROLE: str +OID_LDAP_SYNTAX: str +CLASS_STRUCTURAL: str +CLASS_ABSTRACT: str +CLASS_AUXILIARY: str +ATTRIBUTE_USER_APPLICATION: str +ATTRIBUTE_DIRECTORY_OPERATION: str +ATTRIBUTE_DISTRIBUTED_OPERATION: str +ATTRIBUTE_DSA_OPERATION: str + +def constant_to_oid_kind(oid_kind): ... +def decode_oids(sequence): ... +def decode_syntax(syntax): ... +def oid_to_string(oid): ... + +Oids: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/persistentSearch.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/persistentSearch.pyi new file mode 100644 index 00000000..c584d407 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/persistentSearch.pyi @@ -0,0 +1,14 @@ +from pyasn1.type.namedtype import NamedTypes +from pyasn1.type.namedval import NamedValues +from pyasn1.type.univ import Enumerated, Sequence + +class PersistentSearchControl(Sequence): + componentType: NamedTypes + +class ChangeType(Enumerated): + namedValues: NamedValues + +class EntryChangeNotificationControl(Sequence): + componentType: NamedTypes + +def persistent_search_control(change_types, changes_only: bool = ..., return_ecs: bool = ..., criticality: bool = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/rfc2696.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/rfc2696.pyi new file mode 100644 index 00000000..28159c27 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/rfc2696.pyi @@ -0,0 +1,20 @@ +from _typeshed import Incomplete +from typing_extensions import Final + +from pyasn1.type.constraint import ConstraintsIntersection, ValueRangeConstraint +from pyasn1.type.namedtype import NamedTypes +from pyasn1.type.univ import Integer, OctetString, Sequence + +MAXINT: Final[Integer] +rangeInt0ToMaxConstraint: ValueRangeConstraint + +class Integer0ToMax(Integer): + subtypeSpec: ConstraintsIntersection + +class Size(Integer0ToMax): ... +class Cookie(OctetString): ... + +class RealSearchControlValue(Sequence): + componentType: NamedTypes + +def paged_search_control(criticality: bool = ..., size: int = ..., cookie: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/rfc2849.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/rfc2849.pyi new file mode 100644 index 00000000..4859b42a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/rfc2849.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete +from typing import Any + +conf_ldif_line_length: Any + +def safe_ldif_string(bytes_value): ... +def add_controls(controls, all_base64): ... +def add_attributes(attributes, all_base64): ... +def sort_ldif_lines(lines, sort_order): ... +def search_response_to_ldif(entries, all_base64, sort_order: Incomplete | None = ...): ... +def add_request_to_ldif(entry, all_base64, sort_order: Incomplete | None = ...): ... +def delete_request_to_ldif(entry, all_base64, sort_order: Incomplete | None = ...): ... +def modify_request_to_ldif(entry, all_base64, sort_order: Incomplete | None = ...): ... +def modify_dn_request_to_ldif(entry, all_base64, sort_order: Incomplete | None = ...): ... +def operation_to_ldif(operation_type, entries, all_base64: bool = ..., sort_order: Incomplete | None = ...): ... +def add_ldif_header(ldif_lines): ... +def ldif_sort(line, sort_order): ... +def decode_persistent_search_control(change): ... +def persistent_search_response_to_ldif(change): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/rfc3062.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/rfc3062.pyi new file mode 100644 index 00000000..4a97a374 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/rfc3062.pyi @@ -0,0 +1,25 @@ +from pyasn1.type.namedtype import NamedTypes +from pyasn1.type.tag import TagSet +from pyasn1.type.univ import OctetString, Sequence + +class UserIdentity(OctetString): + tagSet: TagSet + encoding: str + +class OldPasswd(OctetString): + tagSet: TagSet + encoding: str + +class NewPasswd(OctetString): + tagSet: TagSet + encoding: str + +class GenPasswd(OctetString): + tagSet: TagSet + encoding: str + +class PasswdModifyRequestValue(Sequence): + componentType: NamedTypes + +class PasswdModifyResponseValue(Sequence): + componentType: NamedTypes diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/rfc4511.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/rfc4511.pyi new file mode 100644 index 00000000..7dcd3f58 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/rfc4511.pyi @@ -0,0 +1,321 @@ +# Alias the import to avoid name clash with a class called "Final" +from typing_extensions import Final as _Final + +from pyasn1.type.constraint import ConstraintsIntersection, SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint +from pyasn1.type.namedtype import NamedTypes +from pyasn1.type.namedval import NamedValues +from pyasn1.type.tag import TagSet +from pyasn1.type.univ import Boolean, Choice, Enumerated, Integer, Null, OctetString, Sequence, SequenceOf, SetOf + +LDAP_MAX_INT: _Final[int] +MAXINT: _Final[Integer] +rangeInt0ToMaxConstraint: ValueRangeConstraint +rangeInt1To127Constraint: ValueRangeConstraint +size1ToMaxConstraint: ValueSizeConstraint +responseValueConstraint: SingleValueConstraint +# Custom constraints. They have yet to be implemented so ldap3 keeps them as None. +numericOIDConstraint: None +distinguishedNameConstraint: None +nameComponentConstraint: None +attributeDescriptionConstraint: None +uriConstraint: None +attributeSelectorConstraint: None + +class Integer0ToMax(Integer): + subtypeSpec: ConstraintsIntersection + +class LDAPString(OctetString): + encoding: str + +class MessageID(Integer0ToMax): ... +class LDAPOID(OctetString): ... +class LDAPDN(LDAPString): ... +class RelativeLDAPDN(LDAPString): ... +class AttributeDescription(LDAPString): ... + +class AttributeValue(OctetString): + encoding: str + +class AssertionValue(OctetString): + encoding: str + +class AttributeValueAssertion(Sequence): + componentType: NamedTypes + +class MatchingRuleId(LDAPString): ... + +class Vals(SetOf): + componentType: AttributeValue # type: ignore[assignment] + +class ValsAtLeast1(SetOf): + componentType: AttributeValue # type: ignore[assignment] + subtypeSpec: ConstraintsIntersection + +class PartialAttribute(Sequence): + componentType: NamedTypes + +class Attribute(Sequence): + componentType: NamedTypes + +class AttributeList(SequenceOf): + componentType: Attribute # type: ignore[assignment] + +class Simple(OctetString): + tagSet: TagSet + encoding: str + +class Credentials(OctetString): + encoding: str + +class SaslCredentials(Sequence): + tagSet: TagSet + componentType: NamedTypes + +class SicilyPackageDiscovery(OctetString): + tagSet: TagSet + encoding: str + +class SicilyNegotiate(OctetString): + tagSet: TagSet + encoding: str + +class SicilyResponse(OctetString): + tagSet: TagSet + encoding: str + +class AuthenticationChoice(Choice): + componentType: NamedTypes + +class Version(Integer): + subtypeSpec: ConstraintsIntersection + +class ResultCode(Enumerated): + namedValues: NamedValues + subTypeSpec: ConstraintsIntersection + +class URI(LDAPString): ... + +class Referral(SequenceOf): + tagSet: TagSet + componentType: URI # type: ignore[assignment] + +class ServerSaslCreds(OctetString): + tagSet: TagSet + encoding: str + +class LDAPResult(Sequence): + componentType: NamedTypes + +class Criticality(Boolean): + defaultValue: bool + +class ControlValue(OctetString): + encoding: str + +class Control(Sequence): + componentType: NamedTypes + +class Controls(SequenceOf): + tagSet: TagSet + componentType: Control # type: ignore[assignment] + +class Scope(Enumerated): + namedValues: NamedValues + +class DerefAliases(Enumerated): + namedValues: NamedValues + +class TypesOnly(Boolean): ... +class Selector(LDAPString): ... + +class AttributeSelection(SequenceOf): + componentType: Selector # type: ignore[assignment] + +class MatchingRule(MatchingRuleId): + tagSet: TagSet + +class Type(AttributeDescription): + tagSet: TagSet + +class MatchValue(AssertionValue): + tagSet: TagSet + +class DnAttributes(Boolean): + tagSet: TagSet + defaultValue: Boolean + +class MatchingRuleAssertion(Sequence): + componentType: NamedTypes + +class Initial(AssertionValue): + tagSet: TagSet + +class Any(AssertionValue): + tagSet: TagSet + +class Final(AssertionValue): + tagSet: TagSet + +class Substring(Choice): + componentType: NamedTypes + +class Substrings(SequenceOf): + subtypeSpec: ConstraintsIntersection + componentType: Substring # type: ignore[assignment] + +class SubstringFilter(Sequence): + tagSet: TagSet + componentType: NamedTypes + +class And(SetOf): + tagSet: TagSet + subtypeSpec: ConstraintsIntersection + componentType: Filter # type: ignore[assignment] + +class Or(SetOf): + tagSet: TagSet + subtypeSpec: ConstraintsIntersection + componentType: Filter # type: ignore[assignment] + +class Not(Choice): ... + +class EqualityMatch(AttributeValueAssertion): + tagSet: TagSet + +class GreaterOrEqual(AttributeValueAssertion): + tagSet: TagSet + +class LessOrEqual(AttributeValueAssertion): + tagSet: TagSet + +class Present(AttributeDescription): + tagSet: TagSet + +class ApproxMatch(AttributeValueAssertion): + tagSet: TagSet + +class ExtensibleMatch(MatchingRuleAssertion): + tagSet: TagSet + +class Filter(Choice): + componentType: NamedTypes + +class PartialAttributeList(SequenceOf): + componentType: PartialAttribute # type: ignore[assignment] + +class Operation(Enumerated): + namedValues: NamedValues + +class Change(Sequence): + componentType: NamedTypes + +class Changes(SequenceOf): + componentType: Change # type: ignore[assignment] + +class DeleteOldRDN(Boolean): ... + +class NewSuperior(LDAPDN): + tagSet: TagSet + +class RequestName(LDAPOID): + tagSet: TagSet + +class RequestValue(OctetString): + tagSet: TagSet + encoding: str + +class ResponseName(LDAPOID): + tagSet: TagSet + +class ResponseValue(OctetString): + tagSet: TagSet + encoding: str + +class IntermediateResponseName(LDAPOID): + tagSet: TagSet + +class IntermediateResponseValue(OctetString): + tagSet: TagSet + encoding: str + +class BindRequest(Sequence): + tagSet: TagSet + componentType: NamedTypes + +class BindResponse(Sequence): + tagSet: TagSet + componentType: NamedTypes + +class UnbindRequest(Null): + tagSet: TagSet + +class SearchRequest(Sequence): + tagSet: TagSet + componentType: NamedTypes + +class SearchResultReference(SequenceOf): + tagSet: TagSet + subtypeSpec: ConstraintsIntersection + componentType: URI # type: ignore[assignment] + +class SearchResultEntry(Sequence): + tagSet: TagSet + componentType: NamedTypes + +class SearchResultDone(LDAPResult): + tagSet: TagSet + +class ModifyRequest(Sequence): + tagSet: TagSet + componentType: NamedTypes + +class ModifyResponse(LDAPResult): + tagSet: TagSet + +class AddRequest(Sequence): + tagSet: TagSet + componentType: NamedTypes + +class AddResponse(LDAPResult): + tagSet: TagSet + +class DelRequest(LDAPDN): + tagSet: TagSet + +class DelResponse(LDAPResult): + tagSet: TagSet + +class ModifyDNRequest(Sequence): + tagSet: TagSet + componentType: NamedTypes + +class ModifyDNResponse(LDAPResult): + tagSet: TagSet + +class CompareRequest(Sequence): + tagSet: TagSet + componentType: NamedTypes + +class CompareResponse(LDAPResult): + tagSet: TagSet + +class AbandonRequest(MessageID): + tagSet: TagSet + +class ExtendedRequest(Sequence): + tagSet: TagSet + componentType: NamedTypes + +class ExtendedResponse(Sequence): + tagSet: TagSet + componentType: NamedTypes + +class IntermediateResponse(Sequence): + tagSet: TagSet + componentType: NamedTypes + +class ProtocolOp(Choice): + componentType: NamedTypes + +class LDAPMessage(Sequence): + componentType: NamedTypes diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/rfc4512.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/rfc4512.pyi new file mode 100644 index 00000000..c47cdfc1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/rfc4512.pyi @@ -0,0 +1,219 @@ +from _typeshed import Incomplete +from typing import Any + +def constant_to_class_kind(value): ... +def constant_to_attribute_usage(value): ... +def attribute_usage_to_constant(value): ... +def quoted_string_to_list(quoted_string): ... +def oids_string_to_list(oid_string): ... +def extension_to_tuple(extension_string): ... +def list_to_string(list_object): ... + +class BaseServerInfo: + raw: Any + def __init__(self, raw_attributes) -> None: ... + @classmethod + def from_json(cls, json_definition, schema: Incomplete | None = ..., custom_formatter: Incomplete | None = ...): ... + @classmethod + def from_file(cls, target, schema: Incomplete | None = ..., custom_formatter: Incomplete | None = ...): ... + def to_file(self, target, indent: int = ..., sort: bool = ...) -> None: ... + def to_json(self, indent: int = ..., sort: bool = ...): ... + +class DsaInfo(BaseServerInfo): + alt_servers: Any + naming_contexts: Any + supported_controls: Any + supported_extensions: Any + supported_features: Any + supported_ldap_versions: Any + supported_sasl_mechanisms: Any + vendor_name: Any + vendor_version: Any + schema_entry: Any + other: Any + def __init__(self, attributes, raw_attributes) -> None: ... + +class SchemaInfo(BaseServerInfo): + schema_entry: Any + create_time_stamp: Any + modify_time_stamp: Any + attribute_types: Any + object_classes: Any + matching_rules: Any + matching_rule_uses: Any + dit_content_rules: Any + dit_structure_rules: Any + name_forms: Any + ldap_syntaxes: Any + other: Any + def __init__(self, schema_entry, attributes, raw_attributes) -> None: ... + def is_valid(self): ... + +class BaseObjectInfo: + oid: Any + name: Any + description: Any + obsolete: Any + extensions: Any + experimental: Any + raw_definition: Any + def __init__( + self, + oid: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + obsolete: bool = ..., + extensions: Incomplete | None = ..., + experimental: Incomplete | None = ..., + definition: Incomplete | None = ..., + ) -> None: ... + @property + def oid_info(self): ... + @classmethod + def from_definition(cls, definitions): ... + +class MatchingRuleInfo(BaseObjectInfo): + syntax: Any + def __init__( + self, + oid: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + obsolete: bool = ..., + syntax: Incomplete | None = ..., + extensions: Incomplete | None = ..., + experimental: Incomplete | None = ..., + definition: Incomplete | None = ..., + ) -> None: ... + +class MatchingRuleUseInfo(BaseObjectInfo): + apply_to: Any + def __init__( + self, + oid: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + obsolete: bool = ..., + apply_to: Incomplete | None = ..., + extensions: Incomplete | None = ..., + experimental: Incomplete | None = ..., + definition: Incomplete | None = ..., + ) -> None: ... + +class ObjectClassInfo(BaseObjectInfo): + superior: Any + kind: Any + must_contain: Any + may_contain: Any + def __init__( + self, + oid: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + obsolete: bool = ..., + superior: Incomplete | None = ..., + kind: Incomplete | None = ..., + must_contain: Incomplete | None = ..., + may_contain: Incomplete | None = ..., + extensions: Incomplete | None = ..., + experimental: Incomplete | None = ..., + definition: Incomplete | None = ..., + ) -> None: ... + +class AttributeTypeInfo(BaseObjectInfo): + superior: Any + equality: Any + ordering: Any + substring: Any + syntax: Any + min_length: Any + single_value: Any + collective: Any + no_user_modification: Any + usage: Any + mandatory_in: Any + optional_in: Any + def __init__( + self, + oid: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + obsolete: bool = ..., + superior: Incomplete | None = ..., + equality: Incomplete | None = ..., + ordering: Incomplete | None = ..., + substring: Incomplete | None = ..., + syntax: Incomplete | None = ..., + min_length: Incomplete | None = ..., + single_value: bool = ..., + collective: bool = ..., + no_user_modification: bool = ..., + usage: Incomplete | None = ..., + extensions: Incomplete | None = ..., + experimental: Incomplete | None = ..., + definition: Incomplete | None = ..., + ) -> None: ... + +class LdapSyntaxInfo(BaseObjectInfo): + def __init__( + self, + oid: Incomplete | None = ..., + description: Incomplete | None = ..., + extensions: Incomplete | None = ..., + experimental: Incomplete | None = ..., + definition: Incomplete | None = ..., + ) -> None: ... + +class DitContentRuleInfo(BaseObjectInfo): + auxiliary_classes: Any + must_contain: Any + may_contain: Any + not_contains: Any + def __init__( + self, + oid: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + obsolete: bool = ..., + auxiliary_classes: Incomplete | None = ..., + must_contain: Incomplete | None = ..., + may_contain: Incomplete | None = ..., + not_contains: Incomplete | None = ..., + extensions: Incomplete | None = ..., + experimental: Incomplete | None = ..., + definition: Incomplete | None = ..., + ) -> None: ... + +class DitStructureRuleInfo(BaseObjectInfo): + superior: Any + name_form: Any + def __init__( + self, + oid: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + obsolete: bool = ..., + name_form: Incomplete | None = ..., + superior: Incomplete | None = ..., + extensions: Incomplete | None = ..., + experimental: Incomplete | None = ..., + definition: Incomplete | None = ..., + ) -> None: ... + +class NameFormInfo(BaseObjectInfo): + object_class: Any + must_contain: Any + may_contain: Any + def __init__( + self, + oid: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + obsolete: bool = ..., + object_class: Incomplete | None = ..., + must_contain: Incomplete | None = ..., + may_contain: Incomplete | None = ..., + extensions: Incomplete | None = ..., + experimental: Incomplete | None = ..., + definition: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/rfc4527.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/rfc4527.pyi new file mode 100644 index 00000000..eacc393c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/rfc4527.pyi @@ -0,0 +1,2 @@ +def pre_read_control(attributes, criticality: bool = ...): ... +def post_read_control(attributes, criticality: bool = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/sasl/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/sasl/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/sasl/digestMd5.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/sasl/digestMd5.pyi new file mode 100644 index 00000000..433416f2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/sasl/digestMd5.pyi @@ -0,0 +1,9 @@ +STATE_KEY: int +STATE_VALUE: int + +def md5_h(value): ... +def md5_kd(k, s): ... +def md5_hex(value): ... +def md5_hmac(k, s): ... +def sasl_digest_md5(connection, controls): ... +def decode_directives(directives_string): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/sasl/external.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/sasl/external.pyi new file mode 100644 index 00000000..8403ee79 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/sasl/external.pyi @@ -0,0 +1 @@ +def sasl_external(connection, controls): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/sasl/kerberos.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/sasl/kerberos.pyi new file mode 100644 index 00000000..7a795b21 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/sasl/kerberos.pyi @@ -0,0 +1,8 @@ +posix_gssapi_unavailable: bool +windows_gssapi_unavailable: bool +NO_SECURITY_LAYER: int +INTEGRITY_PROTECTION: int +CONFIDENTIALITY_PROTECTION: int + +def get_channel_bindings(ssl_socket): ... +def sasl_gssapi(connection, controls): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/sasl/plain.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/sasl/plain.pyi new file mode 100644 index 00000000..5be879c5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/sasl/plain.pyi @@ -0,0 +1 @@ +def sasl_plain(connection, controls): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/sasl/sasl.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/sasl/sasl.pyi new file mode 100644 index 00000000..564c26ab --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/sasl/sasl.pyi @@ -0,0 +1,5 @@ +def sasl_prep(data): ... +def validate_simple_password(password, accept_empty: bool = ...): ... +def abort_sasl_negotiation(connection, controls): ... +def send_sasl_negotiation(connection, controls, payload): ... +def random_hex_string(size): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/schemas/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/schemas/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/schemas/ad2012R2.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/schemas/ad2012R2.pyi new file mode 100644 index 00000000..3f484293 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/schemas/ad2012R2.pyi @@ -0,0 +1,2 @@ +ad_2012_r2_schema: str +ad_2012_r2_dsa_info: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/schemas/ds389.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/schemas/ds389.pyi new file mode 100644 index 00000000..4d90cdc5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/schemas/ds389.pyi @@ -0,0 +1,2 @@ +ds389_1_3_3_schema: str +ds389_1_3_3_dsa_info: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/schemas/edir888.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/schemas/edir888.pyi new file mode 100644 index 00000000..5b982a48 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/schemas/edir888.pyi @@ -0,0 +1,2 @@ +edir_8_8_8_schema: str +edir_8_8_8_dsa_info: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/schemas/edir914.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/schemas/edir914.pyi new file mode 100644 index 00000000..d7c9cf64 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/schemas/edir914.pyi @@ -0,0 +1,2 @@ +edir_9_1_4_schema: str +edir_9_1_4_dsa_info: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/schemas/slapd24.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/schemas/slapd24.pyi new file mode 100644 index 00000000..c080d082 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/protocol/schemas/slapd24.pyi @@ -0,0 +1,2 @@ +slapd_2_4_schema: str +slapd_2_4_dsa_info: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/asyncStream.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/asyncStream.pyi new file mode 100644 index 00000000..b0ed1b4f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/asyncStream.pyi @@ -0,0 +1,18 @@ +from typing import Any + +from ..strategy.asynchronous import AsyncStrategy + +class AsyncStreamStrategy(AsyncStrategy): + can_stream: bool + line_separator: Any + all_base64: bool + stream: Any + order: Any + persistent_search_message_id: Any + streaming: bool + callback: Any + events: Any + def __init__(self, ldap_connection) -> None: ... + def accumulate_stream(self, message_id, change) -> None: ... + def get_stream(self): ... + def set_stream(self, value) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/asynchronous.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/asynchronous.pyi new file mode 100644 index 00000000..ecc746f8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/asynchronous.pyi @@ -0,0 +1,27 @@ +from threading import Thread +from typing import Any + +from ..strategy.base import BaseStrategy + +class AsyncStrategy(BaseStrategy): + class ReceiverSocketThread(Thread): + connection: Any + socket_size: Any + def __init__(self, ldap_connection) -> None: ... + def run(self) -> None: ... + sync: bool + no_real_dsa: bool + pooled: bool + can_stream: bool + receiver: Any + async_lock: Any + event_lock: Any + def __init__(self, ldap_connection) -> None: ... + def open(self, reset_usage: bool = ..., read_server_info: bool = ...) -> None: ... + def close(self) -> None: ... + def set_event_for_message(self, message_id) -> None: ... + def post_send_search(self, message_id): ... + def post_send_single_response(self, message_id): ... + def receiving(self) -> None: ... + def get_stream(self) -> None: ... + def set_stream(self, value) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/base.pyi new file mode 100644 index 00000000..e324089b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/base.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete +from typing import Any + +unix_socket_available: bool +SESSION_TERMINATED_BY_SERVER: str +TRANSACTION_ERROR: str +RESPONSE_COMPLETE: str + +class BaseStrategy: + connection: Any + sync: Any + no_real_dsa: Any + pooled: Any + can_stream: Any + referral_cache: Any + thread_safe: bool + def __init__(self, ldap_connection) -> None: ... + def open(self, reset_usage: bool = ..., read_server_info: bool = ...) -> None: ... + def close(self) -> None: ... + def send(self, message_type, request, controls: Incomplete | None = ...): ... + def get_response(self, message_id, timeout: Incomplete | None = ..., get_request: bool = ...): ... + @staticmethod + def compute_ldap_message_size(data): ... + def decode_response(self, ldap_message): ... + def decode_response_fast(self, ldap_message): ... + @staticmethod + def decode_control(control): ... + @staticmethod + def decode_control_fast(control, from_server: bool = ...): ... + @staticmethod + def decode_request(message_type, component, controls: Incomplete | None = ...): ... + def valid_referral_list(self, referrals): ... + def do_next_range_search(self, request, response, attr_name): ... + def do_search_on_auto_range(self, request, response): ... + def create_referral_connection(self, referrals): ... + def do_operation_on_referral(self, request, referrals): ... + def sending(self, ldap_message) -> None: ... + def receiving(self) -> None: ... + def post_send_single_response(self, message_id) -> None: ... + def post_send_search(self, message_id) -> None: ... + def get_stream(self) -> None: ... + def set_stream(self, value) -> None: ... + def unbind_referral_cache(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/ldifProducer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/ldifProducer.pyi new file mode 100644 index 00000000..a6aa78fa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/ldifProducer.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete +from typing import Any + +from .base import BaseStrategy + +class LdifProducerStrategy(BaseStrategy): + sync: bool + no_real_dsa: bool + pooled: bool + can_stream: bool + line_separator: Any + all_base64: bool + stream: Any + order: Any + def __init__(self, ldap_connection) -> None: ... + def receiving(self) -> None: ... + def send(self, message_type, request, controls: Incomplete | None = ...): ... + def post_send_single_response(self, message_id): ... + def post_send_search(self, message_id) -> None: ... + def accumulate_stream(self, fragment) -> None: ... + def get_stream(self): ... + def set_stream(self, value) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/mockAsync.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/mockAsync.pyi new file mode 100644 index 00000000..042511bd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/mockAsync.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete +from typing import Any + +from .asynchronous import AsyncStrategy +from .mockBase import MockBaseStrategy + +class MockAsyncStrategy(MockBaseStrategy, AsyncStrategy): + def __init__(self, ldap_connection) -> None: ... + def post_send_search(self, payload): ... + bound: Any + def post_send_single_response(self, payload): ... + def get_response(self, message_id, timeout: Incomplete | None = ..., get_request: bool = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/mockBase.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/mockBase.pyi new file mode 100644 index 00000000..8daea2cf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/mockBase.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete +from typing import Any + +SEARCH_CONTROLS: Any +SERVER_ENCODING: str + +def random_cookie(): ... + +class PagedSearchSet: + size: Any + response: Any + cookie: Any + sent: int + done: bool + def __init__(self, response, size, criticality) -> None: ... + def next(self, size: Incomplete | None = ...): ... + +class MockBaseStrategy: + entries: Any + no_real_dsa: bool + bound: Any + custom_validators: Any + operational_attributes: Any + def __init__(self) -> None: ... + def add_entry(self, dn, attributes, validate: bool = ...): ... + def remove_entry(self, dn): ... + def entries_from_json(self, json_entry_file) -> None: ... + def mock_bind(self, request_message, controls): ... + def mock_delete(self, request_message, controls): ... + def mock_add(self, request_message, controls): ... + def mock_compare(self, request_message, controls): ... + def mock_modify_dn(self, request_message, controls): ... + def mock_modify(self, request_message, controls): ... + def mock_search(self, request_message, controls): ... + def mock_extended(self, request_message, controls): ... + def evaluate_filter_node(self, node, candidates): ... + def equal(self, dn, attribute_type, value_to_check): ... + def send(self, message_type, request, controls: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/mockSync.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/mockSync.pyi new file mode 100644 index 00000000..dcfd12d8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/mockSync.pyi @@ -0,0 +1,10 @@ +from typing import Any + +from .mockBase import MockBaseStrategy +from .sync import SyncStrategy + +class MockSyncStrategy(MockBaseStrategy, SyncStrategy): + def __init__(self, ldap_connection) -> None: ... + def post_send_search(self, payload): ... + bound: Any + def post_send_single_response(self, payload): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/restartable.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/restartable.pyi new file mode 100644 index 00000000..e0d02e85 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/restartable.pyi @@ -0,0 +1,20 @@ +from _typeshed import Incomplete +from typing import Any + +from .sync import SyncStrategy + +class RestartableStrategy(SyncStrategy): + sync: bool + no_real_dsa: bool + pooled: bool + can_stream: bool + restartable_sleep_time: Any + restartable_tries: Any + exception_history: Any + def __init__(self, ldap_connection) -> None: ... + def open(self, reset_usage: bool = ..., read_server_info: bool = ...) -> None: ... + def send(self, message_type, request, controls: Incomplete | None = ...): ... + def post_send_single_response(self, message_id): ... + def post_send_search(self, message_id): ... + def get_stream(self) -> None: ... + def set_stream(self, value) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/reusable.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/reusable.pyi new file mode 100644 index 00000000..cd900b1d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/reusable.pyi @@ -0,0 +1,75 @@ +from _typeshed import Incomplete +from threading import Thread +from typing import Any + +from .base import BaseStrategy + +TERMINATE_REUSABLE: str +BOGUS_BIND: int +BOGUS_UNBIND: int +BOGUS_EXTENDED: int +BOGUS_ABANDON: int + +class ReusableStrategy(BaseStrategy): + pools: Any + def receiving(self) -> None: ... + def get_stream(self) -> None: ... + def set_stream(self, value) -> None: ... + + class ConnectionPool: + def __new__(cls, connection): ... + name: Any + master_connection: Any + workers: Any + pool_size: Any + lifetime: Any + keepalive: Any + request_queue: Any + open_pool: bool + bind_pool: bool + tls_pool: bool + counter: int + terminated_usage: Any + terminated: bool + pool_lock: Any + started: bool + def __init__(self, connection) -> None: ... + def get_info_from_server(self) -> None: ... + def rebind_pool(self) -> None: ... + def start_pool(self): ... + def create_pool(self) -> None: ... + def terminate_pool(self) -> None: ... + + class PooledConnectionThread(Thread): + daemon: bool + worker: Any + master_connection: Any + def __init__(self, worker, master_connection) -> None: ... + def run(self) -> None: ... + + class PooledConnectionWorker: + master_connection: Any + request_queue: Any + running: bool + busy: bool + get_info_from_server: bool + connection: Any + creation_time: Any + task_counter: int + thread: Any + worker_lock: Any + def __init__(self, connection, request_queue) -> None: ... + def new_connection(self) -> None: ... + sync: bool + no_real_dsa: bool + pooled: bool + can_stream: bool + pool: Any + def __init__(self, ldap_connection) -> None: ... + def open(self, reset_usage: bool = ..., read_server_info: bool = ...) -> None: ... + def terminate(self) -> None: ... + def send(self, message_type, request, controls: Incomplete | None = ...): ... + def validate_bind(self, controls): ... + def get_response(self, counter, timeout: Incomplete | None = ..., get_request: bool = ...): ... + def post_send_single_response(self, counter): ... + def post_send_search(self, counter): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/safeRestartable.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/safeRestartable.pyi new file mode 100644 index 00000000..b52aa5a1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/safeRestartable.pyi @@ -0,0 +1,5 @@ +from .restartable import RestartableStrategy + +class SafeRestartableStrategy(RestartableStrategy): + thread_safe: bool + def __init__(self, ldap_connection) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/safeSync.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/safeSync.pyi new file mode 100644 index 00000000..2b8b5139 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/safeSync.pyi @@ -0,0 +1,5 @@ +from .sync import SyncStrategy + +class SafeSyncStrategy(SyncStrategy): + thread_safe: bool + def __init__(self, ldap_connection) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/sync.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/sync.pyi new file mode 100644 index 00000000..a1a84609 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/strategy/sync.pyi @@ -0,0 +1,19 @@ +from typing import Any + +from ..strategy.base import BaseStrategy + +LDAP_MESSAGE_TEMPLATE: Any + +class SyncStrategy(BaseStrategy): + sync: bool + no_real_dsa: bool + pooled: bool + can_stream: bool + socket_size: Any + def __init__(self, ldap_connection) -> None: ... + def open(self, reset_usage: bool = ..., read_server_info: bool = ...) -> None: ... + def receiving(self): ... + def post_send_single_response(self, message_id): ... + def post_send_search(self, message_id): ... + def set_stream(self, value) -> None: ... + def get_stream(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/asn1.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/asn1.pyi new file mode 100644 index 00000000..3f4bc709 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/asn1.pyi @@ -0,0 +1,47 @@ +from _typeshed import Incomplete, IndexableBuffer, SliceableBuffer, Unused +from collections.abc import Callable, Mapping +from typing import Any, TypeVar, overload +from typing_extensions import Final, TypeAlias + +from pyasn1.codec.ber.encoder import AbstractItemEncoder + +# Use _typeshed._SupportsGetItemBuffer after PEP 688 +_SupportsGetItemBuffer: TypeAlias = SliceableBuffer | IndexableBuffer +_R = TypeVar("_R") +_B = TypeVar("_B", bound=_SupportsGetItemBuffer) +# The possible return type is a union of all other decode methods, ie: AnyOf[Incomplete | bool] +_AllDecodersReturnType: TypeAlias = Any + +CLASSES: Final[dict[tuple[bool, bool], int]] + +class LDAPBooleanEncoder(AbstractItemEncoder): + supportIndefLenMode: bool + # Requires pyasn1 > 0.3.7 + def encodeValue(self, value: bool | int, asn1Spec: Unused, encodeFun: Unused, **options: Unused): ... + +def compute_ber_size(data): ... +def decode_message_fast(message): ... +@overload +def decode_sequence(message: _B, start: int, stop: int, context_decoders: Mapping[int, Callable[[_B, int, int], _R]]) -> _R: ... +@overload +def decode_sequence( + message: _SupportsGetItemBuffer, start: int, stop: int, context_decoders: None = ... +) -> _AllDecodersReturnType: ... +def decode_integer(message, start: int, stop: int, context_decoders: Unused = ...): ... +def decode_octet_string(message, start: int, stop: int, context_decoders: Unused = ...): ... +def decode_boolean(message, start: int, stop: int, context_decoders: Unused = ...): ... +def decode_bind_response(message, start: int, stop: int, context_decoders: Unused = ...): ... +def decode_extended_response(message, start: int, stop: int, context_decoders: Unused = ...): ... +def decode_intermediate_response(message, start: int, stop: int, context_decoders: Unused = ...): ... +def decode_controls(message, start: int, stop: int, context_decoders: Unused = ...): ... +def ldap_result_to_dict_fast(response): ... +def get_byte(x): ... +def get_bytes(x): ... + +# The possible return type is a union of all other decode methods, ie: AnyOf[Incomplete | bool] +DECODERS: dict[tuple[int, int], Callable[..., _AllDecodersReturnType]] +BIND_RESPONSE_CONTEXT: dict[int, Callable[..., Incomplete]] +EXTENDED_RESPONSE_CONTEXT: dict[int, Callable[..., Incomplete]] +INTERMEDIATE_RESPONSE_CONTEXT: dict[int, Callable[..., Incomplete]] +LDAP_MESSAGE_CONTEXT: dict[int, Callable[..., Incomplete]] +CONTROLS_CONTEXT: dict[int, Callable[..., Incomplete]] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/ciDict.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/ciDict.pyi new file mode 100644 index 00000000..15419940 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/ciDict.pyi @@ -0,0 +1,30 @@ +from _typeshed import Incomplete +from collections.abc import MutableMapping +from typing import Generic, TypeVar + +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") + +class CaseInsensitiveDict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): + def __init__(self, other: Incomplete | None = ..., **kwargs) -> None: ... + def __contains__(self, item): ... + def __delitem__(self, key) -> None: ... + def __setitem__(self, key, item) -> None: ... + def __getitem__(self, key): ... + def __iter__(self): ... + def __len__(self) -> int: ... + def keys(self): ... + def values(self): ... + def items(self): ... + def __eq__(self, other): ... + def copy(self): ... + +class CaseInsensitiveWithAliasDict(CaseInsensitiveDict[_KT, _VT], Generic[_KT, _VT]): + def __init__(self, other: Incomplete | None = ..., **kwargs) -> None: ... + def aliases(self): ... + def __setitem__(self, key, value) -> None: ... + def __delitem__(self, key) -> None: ... + def set_alias(self, key, alias, ignore_duplicates: bool = ...) -> None: ... + def remove_alias(self, alias) -> None: ... + def __getitem__(self, key): ... + def copy(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/config.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/config.pyi new file mode 100644 index 00000000..80971d5f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/config.pyi @@ -0,0 +1,6 @@ +from typing import Any + +PARAMETERS: Any + +def get_config_parameter(parameter): ... +def set_config_parameter(parameter, value) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/conv.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/conv.pyi new file mode 100644 index 00000000..4955a853 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/conv.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete + +def to_unicode(obj: float | bytes | str, encoding: str | None = ..., from_server: bool = ...) -> str: ... +def to_raw(obj, encoding: str = ...): ... +def escape_filter_chars(text: float | bytes | str, encoding: str | None = ...) -> str: ... +def unescape_filter_chars(text, encoding: Incomplete | None = ...): ... +def escape_bytes(bytes_value: str | bytes) -> str: ... +def prepare_for_stream(value): ... +def json_encode_b64(obj): ... +def check_json_dict(json_dict) -> None: ... +def json_hook(obj): ... +def format_json(obj, iso_format: bool = ...): ... +def is_filter_escaped(text): ... +def ldap_escape_to_bytes(text): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/dn.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/dn.pyi new file mode 100644 index 00000000..dab0cdff --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/dn.pyi @@ -0,0 +1,11 @@ +STATE_ANY: int +STATE_ESCAPE: int +STATE_ESCAPE_HEX: int + +def to_dn( + iterator, decompose: bool = ..., remove_space: bool = ..., space_around_equal: bool = ..., separate_rdn: bool = ... +): ... +def parse_dn(dn, escape: bool = ..., strip: bool = ...): ... +def safe_dn(dn, decompose: bool = ..., reverse: bool = ...): ... +def safe_rdn(dn, decompose: bool = ...): ... +def escape_rdn(rdn: str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/hashed.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/hashed.pyi new file mode 100644 index 00000000..87693219 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/hashed.pyi @@ -0,0 +1,7 @@ +from _typeshed import Incomplete +from typing import Any + +algorithms_table: Any +salted_table: Any + +def hashed(algorithm, value, salt: Incomplete | None = ..., raw: bool = ..., encoding: str = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/log.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/log.pyi new file mode 100644 index 00000000..a5c47f7b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/log.pyi @@ -0,0 +1,25 @@ +from logging import NullHandler as NullHandler +from typing import Any + +OFF: int +ERROR: int +BASIC: int +PROTOCOL: int +NETWORK: int +EXTENDED: int +DETAIL_LEVELS: Any + +def get_detail_level_name(level_name): ... +def log(detail, message, *args) -> None: ... +def log_enabled(detail): ... +def set_library_log_hide_sensitive_data(hide: bool = ...) -> None: ... +def get_library_log_hide_sensitive_data(): ... +def set_library_log_activation_level(logging_level) -> None: ... +def get_library_log_activation_lavel(): ... +def set_library_log_max_line_length(length) -> None: ... +def get_library_log_max_line_length(): ... +def set_library_log_detail_level(detail) -> None: ... +def get_library_log_detail_level(): ... +def format_ldap_message(message, prefix): ... + +logger: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/ntlm.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/ntlm.pyi new file mode 100644 index 00000000..6e122f3a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/ntlm.pyi @@ -0,0 +1,117 @@ +from typing import Any + +oem_encoding: Any +NTLM_SIGNATURE: bytes +NTLM_MESSAGE_TYPE_NTLM_NEGOTIATE: int +NTLM_MESSAGE_TYPE_NTLM_CHALLENGE: int +NTLM_MESSAGE_TYPE_NTLM_AUTHENTICATE: int +FLAG_NEGOTIATE_56: int +FLAG_NEGOTIATE_KEY_EXCH: int +FLAG_NEGOTIATE_128: int +FLAG_NEGOTIATE_VERSION: int +FLAG_NEGOTIATE_TARGET_INFO: int +FLAG_REQUEST_NOT_NT_SESSION_KEY: int +FLAG_NEGOTIATE_IDENTIFY: int +FLAG_NEGOTIATE_EXTENDED_SESSIONSECURITY: int +FLAG_TARGET_TYPE_SERVER: int +FLAG_TARGET_TYPE_DOMAIN: int +FLAG_NEGOTIATE_ALWAYS_SIGN: int +FLAG_NEGOTIATE_OEM_WORKSTATION_SUPPLIED: int +FLAG_NEGOTIATE_OEM_DOMAIN_SUPPLIED: int +FLAG_NEGOTIATE_ANONYMOUS: int +FLAG_NEGOTIATE_NTLM: int +FLAG_NEGOTIATE_LM_KEY: int +FLAG_NEGOTIATE_DATAGRAM: int +FLAG_NEGOTIATE_SEAL: int +FLAG_NEGOTIATE_SIGN: int +FLAG_REQUEST_TARGET: int +FLAG_NEGOTIATE_OEM: int +FLAG_NEGOTIATE_UNICODE: int +FLAG_TYPES: Any +AV_END_OF_LIST: int +AV_NETBIOS_COMPUTER_NAME: int +AV_NETBIOS_DOMAIN_NAME: int +AV_DNS_COMPUTER_NAME: int +AV_DNS_DOMAIN_NAME: int +AV_DNS_TREE_NAME: int +AV_FLAGS: int +AV_TIMESTAMP: int +AV_SINGLE_HOST_DATA: int +AV_TARGET_NAME: int +AV_CHANNEL_BINDINGS: int +AV_TYPES: Any +AV_FLAG_CONSTRAINED: int +AV_FLAG_INTEGRITY: int +AV_FLAG_TARGET_SPN_UNTRUSTED: int +AV_FLAG_TYPES: Any + +def pack_windows_version(debug: bool = ...): ... +def unpack_windows_version(version_message): ... + +class NtlmClient: + client_config_flags: int + exported_session_key: Any + negotiated_flags: Any + user_name: Any + user_domain: Any + no_lm_response_ntlm_v1: Any + client_blocked: bool + client_block_exceptions: Any + client_require_128_bit_encryption: Any + max_life_time: Any + client_signing_key: Any + client_sealing_key: Any + sequence_number: Any + server_sealing_key: Any + server_signing_key: Any + integrity: bool + replay_detect: bool + sequence_detect: bool + confidentiality: bool + datagram: bool + identity: bool + client_supplied_target_name: Any + client_channel_binding_unhashed: Any + unverified_target_name: Any + server_challenge: Any + server_target_name: Any + server_target_info: Any + server_version: Any + server_av_netbios_computer_name: Any + server_av_netbios_domain_name: Any + server_av_dns_computer_name: Any + server_av_dns_domain_name: Any + server_av_dns_forest_name: Any + server_av_target_name: Any + server_av_flags: Any + server_av_timestamp: Any + server_av_single_host_data: Any + server_av_channel_bindings: Any + server_av_flag_constrained: Any + server_av_flag_integrity: Any + server_av_flag_target_spn_untrusted: Any + current_encoding: Any + client_challenge: Any + server_target_info_raw: Any + def __init__(self, domain, user_name, password) -> None: ... + def get_client_flag(self, flag): ... + def get_negotiated_flag(self, flag): ... + def get_server_av_flag(self, flag): ... + def set_client_flag(self, flags) -> None: ... + def reset_client_flags(self) -> None: ... + def unset_client_flag(self, flags) -> None: ... + def create_negotiate_message(self): ... + def parse_challenge_message(self, message): ... + def create_authenticate_message(self): ... + @staticmethod + def pack_field(value, offset): ... + @staticmethod + def unpack_field(field_message): ... + @staticmethod + def unpack_av_info(info): ... + @staticmethod + def pack_av_info(avs): ... + @staticmethod + def pack_windows_timestamp(): ... + def compute_nt_response(self): ... + def ntowf_v2(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/port_validators.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/port_validators.pyi new file mode 100644 index 00000000..c120f02b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/port_validators.pyi @@ -0,0 +1,2 @@ +def check_port(port): ... +def check_port_and_port_list(port, port_list): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/repr.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/repr.pyi new file mode 100644 index 00000000..f2c58e20 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/repr.pyi @@ -0,0 +1,5 @@ +from typing import Any + +repr_encoding: Any + +def to_stdout_encoding(value): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/tls_backport.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/tls_backport.pyi new file mode 100644 index 00000000..c2199801 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/tls_backport.pyi @@ -0,0 +1,3 @@ +class CertificateError(ValueError): ... + +def match_hostname(cert, hostname): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/uri.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/uri.pyi new file mode 100644 index 00000000..45ea8bb7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/utils/uri.pyi @@ -0,0 +1 @@ +def parse_uri(uri): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/version.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/version.pyi new file mode 100644 index 00000000..50be840c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ldap3/ldap3/version.pyi @@ -0,0 +1,3 @@ +__url__: str +__description__: str +__status__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mock/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mock/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..b9f6f499 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mock/@tests/stubtest_allowlist.txt @@ -0,0 +1,2 @@ +mock.patch +mock.mock.patch diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mock/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mock/METADATA.toml new file mode 100644 index 00000000..c98db630 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mock/METADATA.toml @@ -0,0 +1 @@ +version = "5.0.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mock/mock/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mock/mock/__init__.pyi new file mode 100644 index 00000000..9b47e220 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mock/mock/__init__.pyi @@ -0,0 +1,3 @@ +from .mock import * + +version_info: tuple[int, int, int] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mock/mock/backports.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mock/mock/backports.pyi new file mode 100644 index 00000000..3984dd88 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mock/mock/backports.pyi @@ -0,0 +1,11 @@ +import sys + +if sys.version_info >= (3, 8): + from asyncio import iscoroutinefunction as iscoroutinefunction + from unittest import IsolatedAsyncioTestCase as IsolatedAsyncioTestCase +else: + import unittest + + class IsolatedAsyncioTestCase(unittest.TestCase): ... + # It is a typeguard, but its signature is to complex to duplicate. + def iscoroutinefunction(obj: object) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mock/mock/mock.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mock/mock/mock.pyi new file mode 100644 index 00000000..2723619a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mock/mock/mock.pyi @@ -0,0 +1,379 @@ +from _typeshed import Incomplete +from collections.abc import Callable, Coroutine, Mapping, Sequence +from contextlib import AbstractContextManager +from types import TracebackType +from typing import Any, Generic, TypeVar, overload +from typing_extensions import Literal, Self + +_F = TypeVar("_F", bound=Callable[..., Any]) +_AF = TypeVar("_AF", bound=Callable[..., Coroutine[Any, Any, Any]]) +_T = TypeVar("_T") +_TT = TypeVar("_TT", bound=type[Any]) +_R = TypeVar("_R") + +__all__ = ( + "Mock", + "MagicMock", + "patch", + "sentinel", + "DEFAULT", + "ANY", + "call", + "create_autospec", + "AsyncMock", + "FILTER_DIR", + "NonCallableMock", + "NonCallableMagicMock", + "mock_open", + "PropertyMock", + "seal", +) + +class InvalidSpecError(Exception): ... + +FILTER_DIR: bool + +class _SentinelObject: + def __init__(self, name: str) -> None: ... + name: str + +class _Sentinel: + def __getattr__(self, name: str) -> _SentinelObject: ... + +sentinel: _Sentinel +DEFAULT: _SentinelObject + +class _Call(tuple[Any, ...]): + def __new__( + cls, + value: Any = ..., + name: Incomplete | None = ..., + parent: Incomplete | None = ..., + two: bool = ..., + from_kall: bool = ..., + ) -> Self: ... + name: Any + parent: Any + from_kall: Any + def __init__( + self, + value: Any = ..., + name: Incomplete | None = ..., + parent: Incomplete | None = ..., + two: bool = ..., + from_kall: bool = ..., + ) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, __other: object) -> bool: ... + def __call__(self, *args: Any, **kwargs: Any) -> _Call: ... + def __getattr__(self, attr: str) -> Any: ... + @property + def args(self) -> tuple[Any, ...]: ... + @property + def kwargs(self) -> dict[str, Any]: ... + def call_list(self) -> _CallList: ... + +call: _Call + +class _CallList(list[_Call]): + def __contains__(self, value: Any) -> bool: ... + +class Base: + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + +# We subclass with "Any" because mocks are explicitly designed to stand in for other types, +# something that can't be expressed with our static type system. +class NonCallableMock(Base, Any): + def __new__( + cls, + spec: list[str] | object | type[object] | None = ..., + wraps: Incomplete | None = ..., + name: str | None = ..., + spec_set: list[str] | object | type[object] | None = ..., + parent: NonCallableMock | None = ..., + _spec_state: Incomplete | None = ..., + _new_name: str = ..., + _new_parent: NonCallableMock | None = ..., + _spec_as_instance: bool = ..., + _eat_self: bool | None = ..., + unsafe: bool = ..., + **kwargs: Any, + ) -> Self: ... + def __init__( + self, + spec: list[str] | object | type[object] | None = ..., + wraps: Incomplete | None = ..., + name: str | None = ..., + spec_set: list[str] | object | type[object] | None = ..., + parent: NonCallableMock | None = ..., + _spec_state: Incomplete | None = ..., + _new_name: str = ..., + _new_parent: NonCallableMock | None = ..., + _spec_as_instance: bool = ..., + _eat_self: bool | None = ..., + unsafe: bool = ..., + **kwargs: Any, + ) -> None: ... + def __getattr__(self, name: str) -> Any: ... + def _calls_repr(self, prefix: str = ...) -> str: ... + def assert_called_with(_mock_self, *args: Any, **kwargs: Any) -> None: ... + def assert_not_called(_mock_self) -> None: ... + def assert_called_once_with(_mock_self, *args: Any, **kwargs: Any) -> None: ... + def _format_mock_failure_message(self, args: Any, kwargs: Any, action: str = ...) -> str: ... + def assert_called(_mock_self) -> None: ... + def assert_called_once(_mock_self) -> None: ... + def reset_mock(self, visited: Any = ..., *, return_value: bool = ..., side_effect: bool = ...) -> None: ... + def _extract_mock_name(self) -> str: ... + def assert_any_call(self, *args: Any, **kwargs: Any) -> None: ... + def assert_has_calls(self, calls: Sequence[_Call], any_order: bool = ...) -> None: ... + def mock_add_spec(self, spec: Any, spec_set: bool = ...) -> None: ... + def _mock_add_spec(self, spec: Any, spec_set: bool, _spec_as_instance: bool = ..., _eat_self: bool = ...) -> None: ... + def attach_mock(self, mock: NonCallableMock, attribute: str) -> None: ... + def configure_mock(self, **kwargs: Any) -> None: ... + return_value: Any + side_effect: Any + called: bool + call_count: int + call_args: Any + call_args_list: _CallList + mock_calls: _CallList + def _format_mock_call_signature(self, args: Any, kwargs: Any) -> str: ... + def _call_matcher(self, _call: tuple[_Call, ...]) -> _Call: ... + def _get_child_mock(self, **kw: Any) -> NonCallableMock: ... + +class CallableMixin(Base): + side_effect: Any + def __init__( + self, + spec: Incomplete | None = ..., + side_effect: Incomplete | None = ..., + return_value: Any = ..., + wraps: Incomplete | None = ..., + name: Incomplete | None = ..., + spec_set: Incomplete | None = ..., + parent: Incomplete | None = ..., + _spec_state: Incomplete | None = ..., + _new_name: Any = ..., + _new_parent: Incomplete | None = ..., + **kwargs: Any, + ) -> None: ... + def __call__(_mock_self, *args: Any, **kwargs: Any) -> Any: ... + +class Mock(CallableMixin, NonCallableMock): ... + +class _patch(Generic[_T]): + attribute_name: Any + getter: Callable[[], Any] + attribute: str + new: _T + new_callable: Any + spec: Any + create: bool + has_local: Any + spec_set: Any + autospec: Any + kwargs: Mapping[str, Any] + additional_patchers: Any + def __init__( + self: _patch[_T], + getter: Callable[[], Any], + attribute: str, + new: _T, + spec: Incomplete | None, + create: bool, + spec_set: Incomplete | None, + autospec: Incomplete | None, + new_callable: Incomplete | None, + kwargs: Mapping[str, Any], + *, + unsafe: bool = ..., + ) -> None: ... + def copy(self) -> _patch[_T]: ... + def __call__(self, func: Callable[..., _R]) -> Callable[..., _R]: ... + def decorate_class(self, klass: _TT) -> _TT: ... + def decorate_callable(self, func: _F) -> _F: ... + def decorate_async_callable(self, func: _AF) -> _AF: ... + def decoration_helper( + self, patched: Any, args: tuple[Any, ...], keywargs: dict[str, Any] + ) -> AbstractContextManager[tuple[tuple[Any, ...], dict[str, Any]]]: ... + def get_original(self) -> tuple[Any, bool]: ... + target: Any + temp_original: Any + is_local: bool + def __enter__(self) -> _T: ... + def __exit__( + self, __exc_type: type[BaseException] | None, __exc_value: BaseException | None, __traceback: TracebackType | None + ) -> None: ... + def start(self) -> _T: ... + def stop(self) -> None: ... + +class _patch_dict: + in_dict: Any + values: Any + clear: Any + def __init__(self, in_dict: Any, values: Any = ..., clear: Any = ..., **kwargs: Any) -> None: ... + def __call__(self, f: Any) -> Any: ... + def decorate_callable(self, f: _F) -> _F: ... + def decorate_async_callable(self, f: _AF) -> _AF: ... + def decorate_class(self, klass: Any) -> Any: ... + def __enter__(self) -> Any: ... + def __exit__(self, *args: object) -> Any: ... + start: Any + stop: Any + +class _patcher: + TEST_PREFIX: str + dict: type[_patch_dict] + @overload + def __call__( # type: ignore[misc] + self, + target: Any, + *, + spec: Incomplete | None = ..., + create: bool = ..., + spec_set: Incomplete | None = ..., + autospec: Incomplete | None = ..., + new_callable: Incomplete | None = ..., + unsafe: bool = ..., + **kwargs: Any, + ) -> _patch[MagicMock | AsyncMock]: ... + # This overload also covers the case, where new==DEFAULT. In this case, the return type is _patch[Any]. + # Ideally we'd be able to add an overload for it so that the return type is _patch[MagicMock], + # but that's impossible with the current type system. + @overload + def __call__( + self, + target: Any, + new: _T, + spec: Incomplete | None = ..., + create: bool = ..., + spec_set: Incomplete | None = ..., + autospec: Incomplete | None = ..., + new_callable: Incomplete | None = ..., + *, + unsafe: bool = ..., + **kwargs: Any, + ) -> _patch[_T]: ... + @overload + def object( # type: ignore[misc] + self, + target: Any, + attribute: str, + *, + spec: Incomplete | None = ..., + create: bool = ..., + spec_set: Incomplete | None = ..., + autospec: Incomplete | None = ..., + new_callable: Incomplete | None = ..., + unsafe: bool = ..., + **kwargs: Any, + ) -> _patch[MagicMock | AsyncMock]: ... + @overload + def object( + self, + target: Any, + attribute: str, + new: _T, + spec: Incomplete | None = ..., + create: bool = ..., + spec_set: Incomplete | None = ..., + autospec: Incomplete | None = ..., + new_callable: Incomplete | None = ..., + *, + unsafe: bool = ..., + **kwargs: Any, + ) -> _patch[_T]: ... + def multiple( + self, + target: Any, + spec: Incomplete | None = ..., + create: bool = ..., + spec_set: Incomplete | None = ..., + autospec: Incomplete | None = ..., + new_callable: Incomplete | None = ..., + *, + unsafe: bool = ..., + **kwargs: _T, + ) -> _patch[_T]: ... + def stopall(self) -> None: ... + +patch: _patcher + +class MagicMixin: + def __init__(self, *args: Any, **kw: Any) -> None: ... + +class NonCallableMagicMock(MagicMixin, NonCallableMock): + def mock_add_spec(self, spec: Any, spec_set: bool = ...) -> None: ... + +class MagicMock(MagicMixin, Mock): + def mock_add_spec(self, spec: Any, spec_set: bool = ...) -> None: ... + +class AsyncMockMixin(Base): + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + def assert_awaited(_mock_self) -> None: ... + def assert_awaited_once(_mock_self) -> None: ... + def assert_awaited_with(_mock_self, *args: Any, **kwargs: Any) -> None: ... + def assert_awaited_once_with(_mock_self, *args: Any, **kwargs: Any) -> None: ... + def assert_any_await(_mock_self, *args: Any, **kwargs: Any) -> None: ... + def assert_has_awaits(_mock_self, calls: _CallList, any_order: bool = ...) -> None: ... + def assert_not_awaited(_mock_self) -> None: ... + def reset_mock(self, *args: Any, **kwargs: Any) -> None: ... + await_count: int + await_args: _Call | None + await_args_list: _CallList + __name__: str + __defaults__: tuple[Any, ...] + __kwdefaults__: dict[str, Any] + __annotations__: dict[str, Any] | None # type: ignore[assignment] + +class AsyncMagicMixin(MagicMixin): ... +class AsyncMock(AsyncMockMixin, AsyncMagicMixin, Mock): ... + +class MagicProxy(Base): + name: str + parent: Any + def __init__(self, name: str, parent: Any) -> None: ... + def create_mock(self) -> Any: ... + def __get__(self, obj: Any, _type: Incomplete | None = ...) -> Any: ... + +class _ANY: + def __eq__(self, other: object) -> Literal[True]: ... + def __ne__(self, other: object) -> Literal[False]: ... + +ANY: Any + +def create_autospec( + spec: Any, + spec_set: Any = ..., + instance: Any = ..., + _parent: Incomplete | None = ..., + _name: Incomplete | None = ..., + *, + unsafe: bool = ..., + **kwargs: Any, +) -> Any: ... + +class _SpecState: + spec: Any + ids: Any + spec_set: Any + parent: Any + instance: Any + name: Any + def __init__( + self, + spec: Any, + spec_set: Any = ..., + parent: Incomplete | None = ..., + name: Incomplete | None = ..., + ids: Incomplete | None = ..., + instance: Any = ..., + ) -> None: ... + +def mock_open(mock: Incomplete | None = ..., read_data: Any = ...) -> Any: ... + +class PropertyMock(Mock): + def __get__(self, obj: _T, obj_type: type[_T] | None = ...) -> Self: ... + def __set__(self, obj: Any, value: Any) -> None: ... + +def seal(mock: Any) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mypy-extensions/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mypy-extensions/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..4f48c1c4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mypy-extensions/@tests/stubtest_allowlist.txt @@ -0,0 +1,6 @@ +mypy_extensions.FlexibleAlias +mypy_extensions.TypedDict +mypy_extensions.i64.* +mypy_extensions.i32.* +mypy_extensions.i16.* +mypy_extensions.u8.* diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mypy-extensions/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mypy-extensions/METADATA.toml new file mode 100644 index 00000000..f3e83f9c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mypy-extensions/METADATA.toml @@ -0,0 +1 @@ +version = "1.0.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mypy-extensions/mypy_extensions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mypy-extensions/mypy_extensions.pyi new file mode 100644 index 00000000..8565793c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mypy-extensions/mypy_extensions.pyi @@ -0,0 +1,78 @@ +import abc +import sys +from _collections_abc import dict_items, dict_keys, dict_values +from _typeshed import IdentityFunction, Unused +from collections.abc import Mapping +from typing import Any, ClassVar, Generic, TypeVar, overload, type_check_only +from typing_extensions import Never, Self + +_T = TypeVar("_T") +_U = TypeVar("_U") + +# Internal mypy fallback type for all typed dicts (does not exist at runtime) +# N.B. Keep this mostly in sync with typing(_extensions)._TypedDict +@type_check_only +class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): + __total__: ClassVar[bool] + # Unlike typing(_extensions).TypedDict, + # subclasses of mypy_extensions.TypedDict do NOT have the __required_keys__ and __optional_keys__ ClassVars + def copy(self) -> Self: ... + # Using Never so that only calls using mypy plugin hook that specialize the signature + # can go through. + def setdefault(self, k: Never, default: object) -> object: ... + # Mypy plugin hook for 'pop' expects that 'default' has a type variable type. + def pop(self, k: Never, default: _T = ...) -> object: ... # pyright: ignore[reportInvalidTypeVarUse] + def update(self, __m: Self) -> None: ... + def items(self) -> dict_items[str, object]: ... + def keys(self) -> dict_keys[str, object]: ... + def values(self) -> dict_values[str, object]: ... + def __delitem__(self, k: Never) -> None: ... + if sys.version_info >= (3, 9): + def __or__(self, __other: Self) -> Self: ... + def __ior__(self, __other: Self) -> Self: ... + +def TypedDict(typename: str, fields: dict[str, type[Any]], total: bool = ...) -> type[dict[str, Any]]: ... +@overload +def Arg(type: _T, name: str | None = ...) -> _T: ... +@overload +def Arg(*, name: str | None = ...) -> Any: ... +@overload +def DefaultArg(type: _T, name: str | None = ...) -> _T: ... +@overload +def DefaultArg(*, name: str | None = ...) -> Any: ... +@overload +def NamedArg(type: _T, name: str | None = ...) -> _T: ... +@overload +def NamedArg(*, name: str | None = ...) -> Any: ... +@overload +def DefaultNamedArg(type: _T, name: str | None = ...) -> _T: ... +@overload +def DefaultNamedArg(*, name: str | None = ...) -> Any: ... +@overload +def VarArg(type: _T) -> _T: ... +@overload +def VarArg() -> Any: ... +@overload +def KwArg(type: _T) -> _T: ... +@overload +def KwArg() -> Any: ... + +# Return type that indicates a function does not return. +# Deprecated: Use typing.NoReturn instead. +class NoReturn: ... + +# This is consistent with implementation. Usage intends for this as +# a class decorator, but mypy does not support type[_T] for abstract +# classes until this issue is resolved, https://github.com/python/mypy/issues/4717. +def trait(cls: _T) -> _T: ... +def mypyc_attr(*attrs: str, **kwattrs: Unused) -> IdentityFunction: ... + +class FlexibleAlias(Generic[_T, _U]): ... + +# Mypy and mypyc treat these native int types as different from 'int', but this is +# a non-standard extension. For other tools, aliasing these to 'int' allows them +# to mostly do the right thing with these types. +i64 = int +i32 = int +i16 = int +u8 = int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..fbc91acd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/@tests/stubtest_allowlist.txt @@ -0,0 +1 @@ +MySQLdb.Connection diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/METADATA.toml new file mode 100644 index 00000000..acdc22b5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/METADATA.toml @@ -0,0 +1 @@ +version = "2.1.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/__init__.pyi new file mode 100644 index 00000000..87fbcf2b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/__init__.pyi @@ -0,0 +1,51 @@ +from typing import Any + +from MySQLdb import connections as connections, constants as constants, converters as converters, cursors as cursors +from MySQLdb._mysql import ( + DatabaseError as DatabaseError, + DataError as DataError, + Error as Error, + IntegrityError as IntegrityError, + InterfaceError as InterfaceError, + InternalError as InternalError, + MySQLError as MySQLError, + NotSupportedError as NotSupportedError, + OperationalError as OperationalError, + ProgrammingError as ProgrammingError, + Warning as Warning, + debug as debug, + get_client_info as get_client_info, + string_literal as string_literal, +) +from MySQLdb.connections import Connection as Connection +from MySQLdb.constants import FIELD_TYPE as FIELD_TYPE +from MySQLdb.release import version_info as version_info +from MySQLdb.times import ( + Date as Date, + DateFromTicks as DateFromTicks, + Time as Time, + TimeFromTicks as TimeFromTicks, + Timestamp as Timestamp, + TimestampFromTicks as TimestampFromTicks, +) + +threadsafety: int +apilevel: str +paramstyle: str + +class DBAPISet(frozenset[Any]): + def __eq__(self, other): ... + +STRING: Any +BINARY: Any +NUMBER: Any +DATE: Any +TIME: Any +TIMESTAMP: Any +DATETIME: Any +ROWID: Any + +def Binary(x): ... +def Connect(*args, **kwargs): ... + +connect = Connect diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/_exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/_exceptions.pyi new file mode 100644 index 00000000..37c3112b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/_exceptions.pyi @@ -0,0 +1,13 @@ +import builtins + +class MySQLError(Exception): ... +class Warning(builtins.Warning, MySQLError): ... +class Error(MySQLError): ... +class InterfaceError(Error): ... +class DatabaseError(Error): ... +class DataError(DatabaseError): ... +class OperationalError(DatabaseError): ... +class IntegrityError(DatabaseError): ... +class InternalError(DatabaseError): ... +class ProgrammingError(DatabaseError): ... +class NotSupportedError(DatabaseError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/_mysql.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/_mysql.pyi new file mode 100644 index 00000000..b1dded22 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/_mysql.pyi @@ -0,0 +1,87 @@ +import builtins +from typing import Any + +import MySQLdb._exceptions + +version_info: tuple[Any, ...] + +class DataError(MySQLdb._exceptions.DatabaseError): ... +class DatabaseError(MySQLdb._exceptions.Error): ... +class Error(MySQLdb._exceptions.MySQLError): ... +class IntegrityError(MySQLdb._exceptions.DatabaseError): ... +class InterfaceError(MySQLdb._exceptions.Error): ... +class InternalError(MySQLdb._exceptions.DatabaseError): ... +class MySQLError(Exception): ... +class NotSupportedError(MySQLdb._exceptions.DatabaseError): ... +class OperationalError(MySQLdb._exceptions.DatabaseError): ... +class ProgrammingError(MySQLdb._exceptions.DatabaseError): ... +class Warning(builtins.Warning, MySQLdb._exceptions.MySQLError): ... + +class connection: + client_flag: Any + converter: Any + open: Any + port: Any + server_capabilities: Any + def __init__(self, *args, **kwargs) -> None: ... + def _get_native_connection(self, *args, **kwargs) -> Any: ... + def affected_rows(self, *args, **kwargs) -> Any: ... + def autocommit(self, on) -> Any: ... + def change_user(self, *args, **kwargs) -> Any: ... + def character_set_name(self, *args, **kwargs) -> Any: ... + def close(self, *args, **kwargs) -> Any: ... + def commit(self, *args, **kwargs) -> Any: ... + def dump_debug_info(self, *args, **kwargs) -> Any: ... + def errno(self, *args, **kwargs) -> Any: ... + def error(self, *args, **kwargs) -> Any: ... + def escape(self, obj, dict) -> Any: ... + def escape_string(self, s) -> Any: ... + def field_count(self, *args, **kwargs) -> Any: ... + def fileno(self, *args, **kwargs) -> Any: ... + def get_autocommit(self, *args, **kwargs) -> Any: ... + def get_character_set_info(self, *args, **kwargs) -> Any: ... + def get_host_info(self, *args, **kwargs) -> Any: ... + def get_proto_info(self, *args, **kwargs) -> Any: ... + def get_server_info(self, *args, **kwargs) -> Any: ... + def info(self, *args, **kwargs) -> Any: ... + def insert_id(self, *args, **kwargs) -> Any: ... + def kill(self, *args, **kwargs) -> Any: ... + def next_result(self) -> Any: ... + def ping(self) -> Any: ... + def query(self, query) -> Any: ... + def read_query_result(self, *args, **kwargs) -> Any: ... + def rollback(self, *args, **kwargs) -> Any: ... + def select_db(self, *args, **kwargs) -> Any: ... + def send_query(self, *args, **kwargs) -> Any: ... + def set_character_set(self, charset) -> Any: ... + def set_server_option(self, option) -> Any: ... + def shutdown(self, *args, **kwargs) -> Any: ... + def sqlstate(self, *args, **kwargs) -> Any: ... + def stat(self, *args, **kwargs) -> Any: ... + def store_result(self, *args, **kwargs) -> Any: ... + def string_literal(self, obj) -> Any: ... + def thread_id(self, *args, **kwargs) -> Any: ... + def use_result(self, *args, **kwargs) -> Any: ... + def warning_count(self, *args, **kwargs) -> Any: ... + def __delattr__(self, __name: str) -> None: ... + def __setattr__(self, __name: str, __value) -> None: ... + +class result: + converter: Any + has_next: Any + def __init__(self, *args, **kwargs) -> None: ... + def data_seek(self, n) -> Any: ... + def describe(self, *args, **kwargs) -> Any: ... + def fetch_row(self, *args, **kwargs) -> Any: ... + def field_flags(self, *args, **kwargs) -> Any: ... + def num_fields(self, *args, **kwargs) -> Any: ... + def num_rows(self, *args, **kwargs) -> Any: ... + def __delattr__(self, __name: str) -> None: ... + def __setattr__(self, __name: str, __value) -> None: ... + +def connect(*args, **kwargs) -> Any: ... +def debug(*args, **kwargs) -> Any: ... +def escape(obj, dict) -> Any: ... +def escape_string(s) -> Any: ... +def get_client_info() -> Any: ... +def string_literal(obj) -> Any: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/connections.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/connections.pyi new file mode 100644 index 00000000..d1034859 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/connections.pyi @@ -0,0 +1,52 @@ +from types import TracebackType +from typing import Any +from typing_extensions import Self + +from . import _mysql, cursors +from ._exceptions import ( + DatabaseError as DatabaseError, + DataError as DataError, + Error as Error, + IntegrityError as IntegrityError, + InterfaceError as InterfaceError, + InternalError as InternalError, + NotSupportedError as NotSupportedError, + OperationalError as OperationalError, + ProgrammingError as ProgrammingError, + Warning as Warning, +) + +re_numeric_part: Any + +def numeric_part(s): ... + +class Connection(_mysql.connection): + default_cursor: type[cursors.Cursor] + cursorclass: type[cursors.BaseCursor] + encoders: Any + encoding: str + messages: Any + def __init__(self, *args, **kwargs) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def autocommit(self, on: bool) -> None: ... + def cursor(self, cursorclass: type[cursors.BaseCursor] | None = ...): ... + def query(self, query) -> None: ... + def literal(self, o): ... + def begin(self) -> None: ... + def warning_count(self): ... + def set_character_set(self, charset) -> None: ... + def set_sql_mode(self, sql_mode) -> None: ... + def show_warnings(self): ... + Warning: type[BaseException] + Error: type[BaseException] + InterfaceError: type[BaseException] + DatabaseError: type[BaseException] + DataError: type[BaseException] + OperationalError: type[BaseException] + IntegrityError: type[BaseException] + InternalError: type[BaseException] + ProgrammingError: type[BaseException] + NotSupportedError: type[BaseException] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/constants/CLIENT.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/constants/CLIENT.pyi new file mode 100644 index 00000000..66cb6302 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/constants/CLIENT.pyi @@ -0,0 +1,18 @@ +LONG_PASSWORD: int +FOUND_ROWS: int +LONG_FLAG: int +CONNECT_WITH_DB: int +NO_SCHEMA: int +COMPRESS: int +ODBC: int +LOCAL_FILES: int +IGNORE_SPACE: int +CHANGE_USER: int +INTERACTIVE: int +SSL: int +IGNORE_SIGPIPE: int +TRANSACTIONS: int +RESERVED: int +SECURE_CONNECTION: int +MULTI_STATEMENTS: int +MULTI_RESULTS: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/constants/CR.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/constants/CR.pyi new file mode 100644 index 00000000..192117a0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/constants/CR.pyi @@ -0,0 +1,69 @@ +ERROR_FIRST: int +MIN_ERROR: int +UNKNOWN_ERROR: int +SOCKET_CREATE_ERROR: int +CONNECTION_ERROR: int +CONN_HOST_ERROR: int +IPSOCK_ERROR: int +UNKNOWN_HOST: int +SERVER_GONE_ERROR: int +VERSION_ERROR: int +OUT_OF_MEMORY: int +WRONG_HOST_INFO: int +LOCALHOST_CONNECTION: int +TCP_CONNECTION: int +SERVER_HANDSHAKE_ERR: int +SERVER_LOST: int +COMMANDS_OUT_OF_SYNC: int +NAMEDPIPE_CONNECTION: int +NAMEDPIPEWAIT_ERROR: int +NAMEDPIPEOPEN_ERROR: int +NAMEDPIPESETSTATE_ERROR: int +CANT_READ_CHARSET: int +NET_PACKET_TOO_LARGE: int +EMBEDDED_CONNECTION: int +PROBE_SLAVE_STATUS: int +PROBE_SLAVE_HOSTS: int +PROBE_SLAVE_CONNECT: int +PROBE_MASTER_CONNECT: int +SSL_CONNECTION_ERROR: int +MALFORMED_PACKET: int +WRONG_LICENSE: int +NULL_POINTER: int +NO_PREPARE_STMT: int +PARAMS_NOT_BOUND: int +DATA_TRUNCATED: int +NO_PARAMETERS_EXISTS: int +INVALID_PARAMETER_NO: int +INVALID_BUFFER_USE: int +UNSUPPORTED_PARAM_TYPE: int +SHARED_MEMORY_CONNECTION: int +SHARED_MEMORY_CONNECT_REQUEST_ERROR: int +SHARED_MEMORY_CONNECT_ANSWER_ERROR: int +SHARED_MEMORY_CONNECT_FILE_MAP_ERROR: int +SHARED_MEMORY_CONNECT_MAP_ERROR: int +SHARED_MEMORY_FILE_MAP_ERROR: int +SHARED_MEMORY_MAP_ERROR: int +SHARED_MEMORY_EVENT_ERROR: int +SHARED_MEMORY_CONNECT_ABANDONED_ERROR: int +SHARED_MEMORY_CONNECT_SET_ERROR: int +CONN_UNKNOW_PROTOCOL: int +INVALID_CONN_HANDLE: int +UNUSED_1: int +FETCH_CANCELED: int +NO_DATA: int +NO_STMT_METADATA: int +NO_RESULT_SET: int +NOT_IMPLEMENTED: int +SERVER_LOST_EXTENDED: int +STMT_CLOSED: int +NEW_STMT_METADATA: int +ALREADY_CONNECTED: int +AUTH_PLUGIN_CANNOT_LOAD: int +DUPLICATE_CONNECTION_ATTR: int +AUTH_PLUGIN_ERR: int +INSECURE_API_ERR: int +FILE_NAME_TOO_LONG: int +SSL_FIPS_MODE_ERR: int +MAX_ERROR: int +ERROR_LAST: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/constants/ER.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/constants/ER.pyi new file mode 100644 index 00000000..207a097a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/constants/ER.pyi @@ -0,0 +1,790 @@ +ERROR_FIRST: int +NO: int +YES: int +CANT_CREATE_FILE: int +CANT_CREATE_TABLE: int +CANT_CREATE_DB: int +DB_CREATE_EXISTS: int +DB_DROP_EXISTS: int +DB_DROP_RMDIR: int +CANT_FIND_SYSTEM_REC: int +CANT_GET_STAT: int +CANT_LOCK: int +CANT_OPEN_FILE: int +FILE_NOT_FOUND: int +CANT_READ_DIR: int +CHECKREAD: int +DUP_KEY: int +ERROR_ON_READ: int +ERROR_ON_RENAME: int +ERROR_ON_WRITE: int +FILE_USED: int +FILSORT_ABORT: int +GET_ERRNO: int +ILLEGAL_HA: int +KEY_NOT_FOUND: int +NOT_FORM_FILE: int +NOT_KEYFILE: int +OLD_KEYFILE: int +OPEN_AS_READONLY: int +OUTOFMEMORY: int +OUT_OF_SORTMEMORY: int +CON_COUNT_ERROR: int +OUT_OF_RESOURCES: int +BAD_HOST_ERROR: int +HANDSHAKE_ERROR: int +DBACCESS_DENIED_ERROR: int +ACCESS_DENIED_ERROR: int +NO_DB_ERROR: int +UNKNOWN_COM_ERROR: int +BAD_NULL_ERROR: int +BAD_DB_ERROR: int +TABLE_EXISTS_ERROR: int +BAD_TABLE_ERROR: int +NON_UNIQ_ERROR: int +SERVER_SHUTDOWN: int +BAD_FIELD_ERROR: int +WRONG_FIELD_WITH_GROUP: int +WRONG_GROUP_FIELD: int +WRONG_SUM_SELECT: int +WRONG_VALUE_COUNT: int +TOO_LONG_IDENT: int +DUP_FIELDNAME: int +DUP_KEYNAME: int +DUP_ENTRY: int +WRONG_FIELD_SPEC: int +PARSE_ERROR: int +EMPTY_QUERY: int +NONUNIQ_TABLE: int +INVALID_DEFAULT: int +MULTIPLE_PRI_KEY: int +TOO_MANY_KEYS: int +TOO_MANY_KEY_PARTS: int +TOO_LONG_KEY: int +KEY_COLUMN_DOES_NOT_EXITS: int +BLOB_USED_AS_KEY: int +TOO_BIG_FIELDLENGTH: int +WRONG_AUTO_KEY: int +READY: int +SHUTDOWN_COMPLETE: int +FORCING_CLOSE: int +IPSOCK_ERROR: int +NO_SUCH_INDEX: int +WRONG_FIELD_TERMINATORS: int +BLOBS_AND_NO_TERMINATED: int +TEXTFILE_NOT_READABLE: int +FILE_EXISTS_ERROR: int +LOAD_INFO: int +ALTER_INFO: int +WRONG_SUB_KEY: int +CANT_REMOVE_ALL_FIELDS: int +CANT_DROP_FIELD_OR_KEY: int +INSERT_INFO: int +UPDATE_TABLE_USED: int +NO_SUCH_THREAD: int +KILL_DENIED_ERROR: int +NO_TABLES_USED: int +TOO_BIG_SET: int +NO_UNIQUE_LOGFILE: int +TABLE_NOT_LOCKED_FOR_WRITE: int +TABLE_NOT_LOCKED: int +BLOB_CANT_HAVE_DEFAULT: int +WRONG_DB_NAME: int +WRONG_TABLE_NAME: int +TOO_BIG_SELECT: int +UNKNOWN_ERROR: int +UNKNOWN_PROCEDURE: int +WRONG_PARAMCOUNT_TO_PROCEDURE: int +WRONG_PARAMETERS_TO_PROCEDURE: int +UNKNOWN_TABLE: int +FIELD_SPECIFIED_TWICE: int +INVALID_GROUP_FUNC_USE: int +UNSUPPORTED_EXTENSION: int +TABLE_MUST_HAVE_COLUMNS: int +RECORD_FILE_FULL: int +UNKNOWN_CHARACTER_SET: int +TOO_MANY_TABLES: int +TOO_MANY_FIELDS: int +TOO_BIG_ROWSIZE: int +STACK_OVERRUN: int +WRONG_OUTER_JOIN_UNUSED: int +NULL_COLUMN_IN_INDEX: int +CANT_FIND_UDF: int +CANT_INITIALIZE_UDF: int +UDF_NO_PATHS: int +UDF_EXISTS: int +CANT_OPEN_LIBRARY: int +CANT_FIND_DL_ENTRY: int +FUNCTION_NOT_DEFINED: int +HOST_IS_BLOCKED: int +HOST_NOT_PRIVILEGED: int +PASSWORD_ANONYMOUS_USER: int +PASSWORD_NOT_ALLOWED: int +PASSWORD_NO_MATCH: int +UPDATE_INFO: int +CANT_CREATE_THREAD: int +WRONG_VALUE_COUNT_ON_ROW: int +CANT_REOPEN_TABLE: int +INVALID_USE_OF_NULL: int +REGEXP_ERROR: int +MIX_OF_GROUP_FUNC_AND_FIELDS: int +NONEXISTING_GRANT: int +TABLEACCESS_DENIED_ERROR: int +COLUMNACCESS_DENIED_ERROR: int +ILLEGAL_GRANT_FOR_TABLE: int +GRANT_WRONG_HOST_OR_USER: int +NO_SUCH_TABLE: int +NONEXISTING_TABLE_GRANT: int +NOT_ALLOWED_COMMAND: int +SYNTAX_ERROR: int +ABORTING_CONNECTION: int +NET_PACKET_TOO_LARGE: int +NET_READ_ERROR_FROM_PIPE: int +NET_FCNTL_ERROR: int +NET_PACKETS_OUT_OF_ORDER: int +NET_UNCOMPRESS_ERROR: int +NET_READ_ERROR: int +NET_READ_INTERRUPTED: int +NET_ERROR_ON_WRITE: int +NET_WRITE_INTERRUPTED: int +TOO_LONG_STRING: int +TABLE_CANT_HANDLE_BLOB: int +TABLE_CANT_HANDLE_AUTO_INCREMENT: int +WRONG_COLUMN_NAME: int +WRONG_KEY_COLUMN: int +WRONG_MRG_TABLE: int +DUP_UNIQUE: int +BLOB_KEY_WITHOUT_LENGTH: int +PRIMARY_CANT_HAVE_NULL: int +TOO_MANY_ROWS: int +REQUIRES_PRIMARY_KEY: int +UPDATE_WITHOUT_KEY_IN_SAFE_MODE: int +KEY_DOES_NOT_EXITS: int +CHECK_NO_SUCH_TABLE: int +CHECK_NOT_IMPLEMENTED: int +CANT_DO_THIS_DURING_AN_TRANSACTION: int +ERROR_DURING_COMMIT: int +ERROR_DURING_ROLLBACK: int +ERROR_DURING_FLUSH_LOGS: int +NEW_ABORTING_CONNECTION: int +MASTER: int +MASTER_NET_READ: int +MASTER_NET_WRITE: int +FT_MATCHING_KEY_NOT_FOUND: int +LOCK_OR_ACTIVE_TRANSACTION: int +UNKNOWN_SYSTEM_VARIABLE: int +CRASHED_ON_USAGE: int +CRASHED_ON_REPAIR: int +WARNING_NOT_COMPLETE_ROLLBACK: int +TRANS_CACHE_FULL: int +SLAVE_NOT_RUNNING: int +BAD_SLAVE: int +MASTER_INFO: int +SLAVE_THREAD: int +TOO_MANY_USER_CONNECTIONS: int +SET_CONSTANTS_ONLY: int +LOCK_WAIT_TIMEOUT: int +LOCK_TABLE_FULL: int +READ_ONLY_TRANSACTION: int +WRONG_ARGUMENTS: int +NO_PERMISSION_TO_CREATE_USER: int +LOCK_DEADLOCK: int +TABLE_CANT_HANDLE_FT: int +CANNOT_ADD_FOREIGN: int +NO_REFERENCED_ROW: int +ROW_IS_REFERENCED: int +CONNECT_TO_MASTER: int +ERROR_WHEN_EXECUTING_COMMAND: int +WRONG_USAGE: int +WRONG_NUMBER_OF_COLUMNS_IN_SELECT: int +CANT_UPDATE_WITH_READLOCK: int +MIXING_NOT_ALLOWED: int +DUP_ARGUMENT: int +USER_LIMIT_REACHED: int +SPECIFIC_ACCESS_DENIED_ERROR: int +LOCAL_VARIABLE: int +GLOBAL_VARIABLE: int +NO_DEFAULT: int +WRONG_VALUE_FOR_VAR: int +WRONG_TYPE_FOR_VAR: int +VAR_CANT_BE_READ: int +CANT_USE_OPTION_HERE: int +NOT_SUPPORTED_YET: int +MASTER_FATAL_ERROR_READING_BINLOG: int +SLAVE_IGNORED_TABLE: int +INCORRECT_GLOBAL_LOCAL_VAR: int +WRONG_FK_DEF: int +KEY_REF_DO_NOT_MATCH_TABLE_REF: int +OPERAND_COLUMNS: int +SUBQUERY_NO_1_ROW: int +UNKNOWN_STMT_HANDLER: int +CORRUPT_HELP_DB: int +AUTO_CONVERT: int +ILLEGAL_REFERENCE: int +DERIVED_MUST_HAVE_ALIAS: int +SELECT_REDUCED: int +TABLENAME_NOT_ALLOWED_HERE: int +NOT_SUPPORTED_AUTH_MODE: int +SPATIAL_CANT_HAVE_NULL: int +COLLATION_CHARSET_MISMATCH: int +TOO_BIG_FOR_UNCOMPRESS: int +ZLIB_Z_MEM_ERROR: int +ZLIB_Z_BUF_ERROR: int +ZLIB_Z_DATA_ERROR: int +CUT_VALUE_GROUP_CONCAT: int +WARN_TOO_FEW_RECORDS: int +WARN_TOO_MANY_RECORDS: int +WARN_NULL_TO_NOTNULL: int +WARN_DATA_OUT_OF_RANGE: int +WARN_DATA_TRUNCATED: int +WARN_USING_OTHER_HANDLER: int +CANT_AGGREGATE_2COLLATIONS: int +REVOKE_GRANTS: int +CANT_AGGREGATE_3COLLATIONS: int +CANT_AGGREGATE_NCOLLATIONS: int +VARIABLE_IS_NOT_STRUCT: int +UNKNOWN_COLLATION: int +SLAVE_IGNORED_SSL_PARAMS: int +SERVER_IS_IN_SECURE_AUTH_MODE: int +WARN_FIELD_RESOLVED: int +BAD_SLAVE_UNTIL_COND: int +MISSING_SKIP_SLAVE: int +UNTIL_COND_IGNORED: int +WRONG_NAME_FOR_INDEX: int +WRONG_NAME_FOR_CATALOG: int +BAD_FT_COLUMN: int +UNKNOWN_KEY_CACHE: int +WARN_HOSTNAME_WONT_WORK: int +UNKNOWN_STORAGE_ENGINE: int +WARN_DEPRECATED_SYNTAX: int +NON_UPDATABLE_TABLE: int +FEATURE_DISABLED: int +OPTION_PREVENTS_STATEMENT: int +DUPLICATED_VALUE_IN_TYPE: int +TRUNCATED_WRONG_VALUE: int +INVALID_ON_UPDATE: int +UNSUPPORTED_PS: int +GET_ERRMSG: int +GET_TEMPORARY_ERRMSG: int +UNKNOWN_TIME_ZONE: int +WARN_INVALID_TIMESTAMP: int +INVALID_CHARACTER_STRING: int +WARN_ALLOWED_PACKET_OVERFLOWED: int +CONFLICTING_DECLARATIONS: int +SP_NO_RECURSIVE_CREATE: int +SP_ALREADY_EXISTS: int +SP_DOES_NOT_EXIST: int +SP_DROP_FAILED: int +SP_STORE_FAILED: int +SP_LILABEL_MISMATCH: int +SP_LABEL_REDEFINE: int +SP_LABEL_MISMATCH: int +SP_UNINIT_VAR: int +SP_BADSELECT: int +SP_BADRETURN: int +SP_BADSTATEMENT: int +UPDATE_LOG_DEPRECATED_IGNORED: int +UPDATE_LOG_DEPRECATED_TRANSLATED: int +QUERY_INTERRUPTED: int +SP_WRONG_NO_OF_ARGS: int +SP_COND_MISMATCH: int +SP_NORETURN: int +SP_NORETURNEND: int +SP_BAD_CURSOR_QUERY: int +SP_BAD_CURSOR_SELECT: int +SP_CURSOR_MISMATCH: int +SP_CURSOR_ALREADY_OPEN: int +SP_CURSOR_NOT_OPEN: int +SP_UNDECLARED_VAR: int +SP_WRONG_NO_OF_FETCH_ARGS: int +SP_FETCH_NO_DATA: int +SP_DUP_PARAM: int +SP_DUP_VAR: int +SP_DUP_COND: int +SP_DUP_CURS: int +SP_CANT_ALTER: int +SP_SUBSELECT_NYI: int +STMT_NOT_ALLOWED_IN_SF_OR_TRG: int +SP_VARCOND_AFTER_CURSHNDLR: int +SP_CURSOR_AFTER_HANDLER: int +SP_CASE_NOT_FOUND: int +FPARSER_TOO_BIG_FILE: int +FPARSER_BAD_HEADER: int +FPARSER_EOF_IN_COMMENT: int +FPARSER_ERROR_IN_PARAMETER: int +FPARSER_EOF_IN_UNKNOWN_PARAMETER: int +VIEW_NO_EXPLAIN: int +WRONG_OBJECT: int +NONUPDATEABLE_COLUMN: int +VIEW_SELECT_CLAUSE: int +VIEW_SELECT_VARIABLE: int +VIEW_SELECT_TMPTABLE: int +VIEW_WRONG_LIST: int +WARN_VIEW_MERGE: int +WARN_VIEW_WITHOUT_KEY: int +VIEW_INVALID: int +SP_NO_DROP_SP: int +TRG_ALREADY_EXISTS: int +TRG_DOES_NOT_EXIST: int +TRG_ON_VIEW_OR_TEMP_TABLE: int +TRG_CANT_CHANGE_ROW: int +TRG_NO_SUCH_ROW_IN_TRG: int +NO_DEFAULT_FOR_FIELD: int +DIVISION_BY_ZERO: int +TRUNCATED_WRONG_VALUE_FOR_FIELD: int +ILLEGAL_VALUE_FOR_TYPE: int +VIEW_NONUPD_CHECK: int +VIEW_CHECK_FAILED: int +PROCACCESS_DENIED_ERROR: int +RELAY_LOG_FAIL: int +UNKNOWN_TARGET_BINLOG: int +IO_ERR_LOG_INDEX_READ: int +BINLOG_PURGE_PROHIBITED: int +FSEEK_FAIL: int +BINLOG_PURGE_FATAL_ERR: int +LOG_IN_USE: int +LOG_PURGE_UNKNOWN_ERR: int +RELAY_LOG_INIT: int +NO_BINARY_LOGGING: int +RESERVED_SYNTAX: int +PS_MANY_PARAM: int +KEY_PART_0: int +VIEW_CHECKSUM: int +VIEW_MULTIUPDATE: int +VIEW_NO_INSERT_FIELD_LIST: int +VIEW_DELETE_MERGE_VIEW: int +CANNOT_USER: int +XAER_NOTA: int +XAER_INVAL: int +XAER_RMFAIL: int +XAER_OUTSIDE: int +XAER_RMERR: int +XA_RBROLLBACK: int +NONEXISTING_PROC_GRANT: int +PROC_AUTO_GRANT_FAIL: int +PROC_AUTO_REVOKE_FAIL: int +DATA_TOO_LONG: int +SP_BAD_SQLSTATE: int +STARTUP: int +LOAD_FROM_FIXED_SIZE_ROWS_TO_VAR: int +CANT_CREATE_USER_WITH_GRANT: int +WRONG_VALUE_FOR_TYPE: int +TABLE_DEF_CHANGED: int +SP_DUP_HANDLER: int +SP_NOT_VAR_ARG: int +SP_NO_RETSET: int +CANT_CREATE_GEOMETRY_OBJECT: int +BINLOG_UNSAFE_ROUTINE: int +BINLOG_CREATE_ROUTINE_NEED_SUPER: int +STMT_HAS_NO_OPEN_CURSOR: int +COMMIT_NOT_ALLOWED_IN_SF_OR_TRG: int +NO_DEFAULT_FOR_VIEW_FIELD: int +SP_NO_RECURSION: int +TOO_BIG_SCALE: int +TOO_BIG_PRECISION: int +M_BIGGER_THAN_D: int +WRONG_LOCK_OF_SYSTEM_TABLE: int +CONNECT_TO_FOREIGN_DATA_SOURCE: int +QUERY_ON_FOREIGN_DATA_SOURCE: int +FOREIGN_DATA_SOURCE_DOESNT_EXIST: int +FOREIGN_DATA_STRING_INVALID_CANT_CREATE: int +FOREIGN_DATA_STRING_INVALID: int +TRG_IN_WRONG_SCHEMA: int +STACK_OVERRUN_NEED_MORE: int +TOO_LONG_BODY: int +WARN_CANT_DROP_DEFAULT_KEYCACHE: int +TOO_BIG_DISPLAYWIDTH: int +XAER_DUPID: int +DATETIME_FUNCTION_OVERFLOW: int +CANT_UPDATE_USED_TABLE_IN_SF_OR_TRG: int +VIEW_PREVENT_UPDATE: int +PS_NO_RECURSION: int +SP_CANT_SET_AUTOCOMMIT: int +VIEW_FRM_NO_USER: int +VIEW_OTHER_USER: int +NO_SUCH_USER: int +FORBID_SCHEMA_CHANGE: int +ROW_IS_REFERENCED_2: int +NO_REFERENCED_ROW_2: int +SP_BAD_VAR_SHADOW: int +TRG_NO_DEFINER: int +OLD_FILE_FORMAT: int +SP_RECURSION_LIMIT: int +SP_WRONG_NAME: int +TABLE_NEEDS_UPGRADE: int +SP_NO_AGGREGATE: int +MAX_PREPARED_STMT_COUNT_REACHED: int +VIEW_RECURSIVE: int +NON_GROUPING_FIELD_USED: int +TABLE_CANT_HANDLE_SPKEYS: int +NO_TRIGGERS_ON_SYSTEM_SCHEMA: int +REMOVED_SPACES: int +AUTOINC_READ_FAILED: int +USERNAME: int +HOSTNAME: int +WRONG_STRING_LENGTH: int +NON_INSERTABLE_TABLE: int +ADMIN_WRONG_MRG_TABLE: int +TOO_HIGH_LEVEL_OF_NESTING_FOR_SELECT: int +NAME_BECOMES_EMPTY: int +AMBIGUOUS_FIELD_TERM: int +FOREIGN_SERVER_EXISTS: int +FOREIGN_SERVER_DOESNT_EXIST: int +ILLEGAL_HA_CREATE_OPTION: int +PARTITION_REQUIRES_VALUES_ERROR: int +PARTITION_WRONG_VALUES_ERROR: int +PARTITION_MAXVALUE_ERROR: int +PARTITION_WRONG_NO_PART_ERROR: int +PARTITION_WRONG_NO_SUBPART_ERROR: int +WRONG_EXPR_IN_PARTITION_FUNC_ERROR: int +FIELD_NOT_FOUND_PART_ERROR: int +INCONSISTENT_PARTITION_INFO_ERROR: int +PARTITION_FUNC_NOT_ALLOWED_ERROR: int +PARTITIONS_MUST_BE_DEFINED_ERROR: int +RANGE_NOT_INCREASING_ERROR: int +INCONSISTENT_TYPE_OF_FUNCTIONS_ERROR: int +MULTIPLE_DEF_CONST_IN_LIST_PART_ERROR: int +PARTITION_ENTRY_ERROR: int +MIX_HANDLER_ERROR: int +PARTITION_NOT_DEFINED_ERROR: int +TOO_MANY_PARTITIONS_ERROR: int +SUBPARTITION_ERROR: int +CANT_CREATE_HANDLER_FILE: int +BLOB_FIELD_IN_PART_FUNC_ERROR: int +UNIQUE_KEY_NEED_ALL_FIELDS_IN_PF: int +NO_PARTS_ERROR: int +PARTITION_MGMT_ON_NONPARTITIONED: int +FOREIGN_KEY_ON_PARTITIONED: int +DROP_PARTITION_NON_EXISTENT: int +DROP_LAST_PARTITION: int +COALESCE_ONLY_ON_HASH_PARTITION: int +REORG_HASH_ONLY_ON_SAME_NO: int +REORG_NO_PARAM_ERROR: int +ONLY_ON_RANGE_LIST_PARTITION: int +ADD_PARTITION_SUBPART_ERROR: int +ADD_PARTITION_NO_NEW_PARTITION: int +COALESCE_PARTITION_NO_PARTITION: int +REORG_PARTITION_NOT_EXIST: int +SAME_NAME_PARTITION: int +NO_BINLOG_ERROR: int +CONSECUTIVE_REORG_PARTITIONS: int +REORG_OUTSIDE_RANGE: int +PARTITION_FUNCTION_FAILURE: int +LIMITED_PART_RANGE: int +PLUGIN_IS_NOT_LOADED: int +WRONG_VALUE: int +NO_PARTITION_FOR_GIVEN_VALUE: int +FILEGROUP_OPTION_ONLY_ONCE: int +CREATE_FILEGROUP_FAILED: int +DROP_FILEGROUP_FAILED: int +TABLESPACE_AUTO_EXTEND_ERROR: int +WRONG_SIZE_NUMBER: int +SIZE_OVERFLOW_ERROR: int +ALTER_FILEGROUP_FAILED: int +BINLOG_ROW_LOGGING_FAILED: int +EVENT_ALREADY_EXISTS: int +EVENT_DOES_NOT_EXIST: int +EVENT_INTERVAL_NOT_POSITIVE_OR_TOO_BIG: int +EVENT_ENDS_BEFORE_STARTS: int +EVENT_EXEC_TIME_IN_THE_PAST: int +EVENT_SAME_NAME: int +DROP_INDEX_FK: int +WARN_DEPRECATED_SYNTAX_WITH_VER: int +CANT_LOCK_LOG_TABLE: int +FOREIGN_DUPLICATE_KEY_OLD_UNUSED: int +COL_COUNT_DOESNT_MATCH_PLEASE_UPDATE: int +TEMP_TABLE_PREVENTS_SWITCH_OUT_OF_RBR: int +STORED_FUNCTION_PREVENTS_SWITCH_BINLOG_FORMAT: int +PARTITION_NO_TEMPORARY: int +PARTITION_CONST_DOMAIN_ERROR: int +PARTITION_FUNCTION_IS_NOT_ALLOWED: int +NULL_IN_VALUES_LESS_THAN: int +WRONG_PARTITION_NAME: int +CANT_CHANGE_TX_CHARACTERISTICS: int +DUP_ENTRY_AUTOINCREMENT_CASE: int +EVENT_SET_VAR_ERROR: int +PARTITION_MERGE_ERROR: int +BASE64_DECODE_ERROR: int +EVENT_RECURSION_FORBIDDEN: int +ONLY_INTEGERS_ALLOWED: int +UNSUPORTED_LOG_ENGINE: int +BAD_LOG_STATEMENT: int +CANT_RENAME_LOG_TABLE: int +WRONG_PARAMCOUNT_TO_NATIVE_FCT: int +WRONG_PARAMETERS_TO_NATIVE_FCT: int +WRONG_PARAMETERS_TO_STORED_FCT: int +NATIVE_FCT_NAME_COLLISION: int +DUP_ENTRY_WITH_KEY_NAME: int +BINLOG_PURGE_EMFILE: int +EVENT_CANNOT_CREATE_IN_THE_PAST: int +EVENT_CANNOT_ALTER_IN_THE_PAST: int +NO_PARTITION_FOR_GIVEN_VALUE_SILENT: int +BINLOG_UNSAFE_STATEMENT: int +BINLOG_FATAL_ERROR: int +BINLOG_LOGGING_IMPOSSIBLE: int +VIEW_NO_CREATION_CTX: int +VIEW_INVALID_CREATION_CTX: int +TRG_CORRUPTED_FILE: int +TRG_NO_CREATION_CTX: int +TRG_INVALID_CREATION_CTX: int +EVENT_INVALID_CREATION_CTX: int +TRG_CANT_OPEN_TABLE: int +NO_FORMAT_DESCRIPTION_EVENT_BEFORE_BINLOG_STATEMENT: int +SLAVE_CORRUPT_EVENT: int +LOG_PURGE_NO_FILE: int +XA_RBTIMEOUT: int +XA_RBDEADLOCK: int +NEED_REPREPARE: int +WARN_NO_MASTER_INFO: int +WARN_OPTION_IGNORED: int +PLUGIN_DELETE_BUILTIN: int +WARN_PLUGIN_BUSY: int +VARIABLE_IS_READONLY: int +WARN_ENGINE_TRANSACTION_ROLLBACK: int +SLAVE_HEARTBEAT_VALUE_OUT_OF_RANGE: int +NDB_REPLICATION_SCHEMA_ERROR: int +CONFLICT_FN_PARSE_ERROR: int +EXCEPTIONS_WRITE_ERROR: int +TOO_LONG_TABLE_COMMENT: int +TOO_LONG_FIELD_COMMENT: int +FUNC_INEXISTENT_NAME_COLLISION: int +DATABASE_NAME: int +TABLE_NAME: int +PARTITION_NAME: int +SUBPARTITION_NAME: int +TEMPORARY_NAME: int +RENAMED_NAME: int +TOO_MANY_CONCURRENT_TRXS: int +WARN_NON_ASCII_SEPARATOR_NOT_IMPLEMENTED: int +DEBUG_SYNC_TIMEOUT: int +DEBUG_SYNC_HIT_LIMIT: int +DUP_SIGNAL_SET: int +SIGNAL_WARN: int +SIGNAL_NOT_FOUND: int +SIGNAL_EXCEPTION: int +RESIGNAL_WITHOUT_ACTIVE_HANDLER: int +SIGNAL_BAD_CONDITION_TYPE: int +WARN_COND_ITEM_TRUNCATED: int +COND_ITEM_TOO_LONG: int +UNKNOWN_LOCALE: int +SLAVE_IGNORE_SERVER_IDS: int +SAME_NAME_PARTITION_FIELD: int +PARTITION_COLUMN_LIST_ERROR: int +WRONG_TYPE_COLUMN_VALUE_ERROR: int +TOO_MANY_PARTITION_FUNC_FIELDS_ERROR: int +MAXVALUE_IN_VALUES_IN: int +TOO_MANY_VALUES_ERROR: int +ROW_SINGLE_PARTITION_FIELD_ERROR: int +FIELD_TYPE_NOT_ALLOWED_AS_PARTITION_FIELD: int +PARTITION_FIELDS_TOO_LONG: int +BINLOG_ROW_ENGINE_AND_STMT_ENGINE: int +BINLOG_ROW_MODE_AND_STMT_ENGINE: int +BINLOG_UNSAFE_AND_STMT_ENGINE: int +BINLOG_ROW_INJECTION_AND_STMT_ENGINE: int +BINLOG_STMT_MODE_AND_ROW_ENGINE: int +BINLOG_ROW_INJECTION_AND_STMT_MODE: int +BINLOG_MULTIPLE_ENGINES_AND_SELF_LOGGING_ENGINE: int +BINLOG_UNSAFE_LIMIT: int +BINLOG_UNSAFE_SYSTEM_TABLE: int +BINLOG_UNSAFE_AUTOINC_COLUMNS: int +BINLOG_UNSAFE_UDF: int +BINLOG_UNSAFE_SYSTEM_VARIABLE: int +BINLOG_UNSAFE_SYSTEM_FUNCTION: int +BINLOG_UNSAFE_NONTRANS_AFTER_TRANS: int +MESSAGE_AND_STATEMENT: int +SLAVE_CANT_CREATE_CONVERSION: int +INSIDE_TRANSACTION_PREVENTS_SWITCH_BINLOG_FORMAT: int +PATH_LENGTH: int +WARN_DEPRECATED_SYNTAX_NO_REPLACEMENT: int +WRONG_NATIVE_TABLE_STRUCTURE: int +WRONG_PERFSCHEMA_USAGE: int +WARN_I_S_SKIPPED_TABLE: int +INSIDE_TRANSACTION_PREVENTS_SWITCH_BINLOG_DIRECT: int +STORED_FUNCTION_PREVENTS_SWITCH_BINLOG_DIRECT: int +SPATIAL_MUST_HAVE_GEOM_COL: int +TOO_LONG_INDEX_COMMENT: int +LOCK_ABORTED: int +DATA_OUT_OF_RANGE: int +WRONG_SPVAR_TYPE_IN_LIMIT: int +BINLOG_UNSAFE_MULTIPLE_ENGINES_AND_SELF_LOGGING_ENGINE: int +BINLOG_UNSAFE_MIXED_STATEMENT: int +INSIDE_TRANSACTION_PREVENTS_SWITCH_SQL_LOG_BIN: int +STORED_FUNCTION_PREVENTS_SWITCH_SQL_LOG_BIN: int +FAILED_READ_FROM_PAR_FILE: int +VALUES_IS_NOT_INT_TYPE_ERROR: int +ACCESS_DENIED_NO_PASSWORD_ERROR: int +SET_PASSWORD_AUTH_PLUGIN: int +TRUNCATE_ILLEGAL_FK: int +PLUGIN_IS_PERMANENT: int +SLAVE_HEARTBEAT_VALUE_OUT_OF_RANGE_MIN: int +SLAVE_HEARTBEAT_VALUE_OUT_OF_RANGE_MAX: int +STMT_CACHE_FULL: int +MULTI_UPDATE_KEY_CONFLICT: int +TABLE_NEEDS_REBUILD: int +WARN_OPTION_BELOW_LIMIT: int +INDEX_COLUMN_TOO_LONG: int +ERROR_IN_TRIGGER_BODY: int +ERROR_IN_UNKNOWN_TRIGGER_BODY: int +INDEX_CORRUPT: int +UNDO_RECORD_TOO_BIG: int +BINLOG_UNSAFE_INSERT_IGNORE_SELECT: int +BINLOG_UNSAFE_INSERT_SELECT_UPDATE: int +BINLOG_UNSAFE_REPLACE_SELECT: int +BINLOG_UNSAFE_CREATE_IGNORE_SELECT: int +BINLOG_UNSAFE_CREATE_REPLACE_SELECT: int +BINLOG_UNSAFE_UPDATE_IGNORE: int +PLUGIN_NO_UNINSTALL: int +PLUGIN_NO_INSTALL: int +BINLOG_UNSAFE_WRITE_AUTOINC_SELECT: int +BINLOG_UNSAFE_CREATE_SELECT_AUTOINC: int +BINLOG_UNSAFE_INSERT_TWO_KEYS: int +TABLE_IN_FK_CHECK: int +UNSUPPORTED_ENGINE: int +BINLOG_UNSAFE_AUTOINC_NOT_FIRST: int +CANNOT_LOAD_FROM_TABLE_V2: int +MASTER_DELAY_VALUE_OUT_OF_RANGE: int +ONLY_FD_AND_RBR_EVENTS_ALLOWED_IN_BINLOG_STATEMENT: int +PARTITION_EXCHANGE_DIFFERENT_OPTION: int +PARTITION_EXCHANGE_PART_TABLE: int +PARTITION_EXCHANGE_TEMP_TABLE: int +PARTITION_INSTEAD_OF_SUBPARTITION: int +UNKNOWN_PARTITION: int +TABLES_DIFFERENT_METADATA: int +ROW_DOES_NOT_MATCH_PARTITION: int +BINLOG_CACHE_SIZE_GREATER_THAN_MAX: int +WARN_INDEX_NOT_APPLICABLE: int +PARTITION_EXCHANGE_FOREIGN_KEY: int +RPL_INFO_DATA_TOO_LONG: int +BINLOG_STMT_CACHE_SIZE_GREATER_THAN_MAX: int +CANT_UPDATE_TABLE_IN_CREATE_TABLE_SELECT: int +PARTITION_CLAUSE_ON_NONPARTITIONED: int +ROW_DOES_NOT_MATCH_GIVEN_PARTITION_SET: int +CHANGE_RPL_INFO_REPOSITORY_FAILURE: int +WARNING_NOT_COMPLETE_ROLLBACK_WITH_CREATED_TEMP_TABLE: int +WARNING_NOT_COMPLETE_ROLLBACK_WITH_DROPPED_TEMP_TABLE: int +MTS_FEATURE_IS_NOT_SUPPORTED: int +MTS_UPDATED_DBS_GREATER_MAX: int +MTS_CANT_PARALLEL: int +MTS_INCONSISTENT_DATA: int +FULLTEXT_NOT_SUPPORTED_WITH_PARTITIONING: int +DA_INVALID_CONDITION_NUMBER: int +INSECURE_PLAIN_TEXT: int +INSECURE_CHANGE_MASTER: int +FOREIGN_DUPLICATE_KEY_WITH_CHILD_INFO: int +FOREIGN_DUPLICATE_KEY_WITHOUT_CHILD_INFO: int +SQLTHREAD_WITH_SECURE_SLAVE: int +TABLE_HAS_NO_FT: int +VARIABLE_NOT_SETTABLE_IN_SF_OR_TRIGGER: int +VARIABLE_NOT_SETTABLE_IN_TRANSACTION: int +SET_STATEMENT_CANNOT_INVOKE_FUNCTION: int +GTID_NEXT_CANT_BE_AUTOMATIC_IF_GTID_NEXT_LIST_IS_NON_NULL: int +MALFORMED_GTID_SET_SPECIFICATION: int +MALFORMED_GTID_SET_ENCODING: int +MALFORMED_GTID_SPECIFICATION: int +GNO_EXHAUSTED: int +BAD_SLAVE_AUTO_POSITION: int +AUTO_POSITION_REQUIRES_GTID_MODE_NOT_OFF: int +CANT_DO_IMPLICIT_COMMIT_IN_TRX_WHEN_GTID_NEXT_IS_SET: int +GTID_MODE_ON_REQUIRES_ENFORCE_GTID_CONSISTENCY_ON: int +CANT_SET_GTID_NEXT_TO_GTID_WHEN_GTID_MODE_IS_OFF: int +CANT_SET_GTID_NEXT_TO_ANONYMOUS_WHEN_GTID_MODE_IS_ON: int +CANT_SET_GTID_NEXT_LIST_TO_NON_NULL_WHEN_GTID_MODE_IS_OFF: int +GTID_UNSAFE_NON_TRANSACTIONAL_TABLE: int +GTID_UNSAFE_CREATE_SELECT: int +GTID_UNSAFE_CREATE_DROP_TEMPORARY_TABLE_IN_TRANSACTION: int +GTID_MODE_CAN_ONLY_CHANGE_ONE_STEP_AT_A_TIME: int +MASTER_HAS_PURGED_REQUIRED_GTIDS: int +CANT_SET_GTID_NEXT_WHEN_OWNING_GTID: int +UNKNOWN_EXPLAIN_FORMAT: int +CANT_EXECUTE_IN_READ_ONLY_TRANSACTION: int +TOO_LONG_TABLE_PARTITION_COMMENT: int +SLAVE_CONFIGURATION: int +INNODB_FT_LIMIT: int +INNODB_NO_FT_TEMP_TABLE: int +INNODB_FT_WRONG_DOCID_COLUMN: int +INNODB_FT_WRONG_DOCID_INDEX: int +INNODB_ONLINE_LOG_TOO_BIG: int +UNKNOWN_ALTER_ALGORITHM: int +UNKNOWN_ALTER_LOCK: int +MTS_CHANGE_MASTER_CANT_RUN_WITH_GAPS: int +MTS_RECOVERY_FAILURE: int +MTS_RESET_WORKERS: int +COL_COUNT_DOESNT_MATCH_CORRUPTED_V2: int +SLAVE_SILENT_RETRY_TRANSACTION: int +DISCARD_FK_CHECKS_RUNNING: int +TABLE_SCHEMA_MISMATCH: int +TABLE_IN_SYSTEM_TABLESPACE: int +IO_READ_ERROR: int +IO_WRITE_ERROR: int +TABLESPACE_MISSING: int +TABLESPACE_EXISTS: int +TABLESPACE_DISCARDED: int +INTERNAL_ERROR: int +INNODB_IMPORT_ERROR: int +INNODB_INDEX_CORRUPT: int +INVALID_YEAR_COLUMN_LENGTH: int +NOT_VALID_PASSWORD: int +MUST_CHANGE_PASSWORD: int +FK_NO_INDEX_CHILD: int +FK_NO_INDEX_PARENT: int +FK_FAIL_ADD_SYSTEM: int +FK_CANNOT_OPEN_PARENT: int +FK_INCORRECT_OPTION: int +FK_DUP_NAME: int +PASSWORD_FORMAT: int +FK_COLUMN_CANNOT_DROP: int +FK_COLUMN_CANNOT_DROP_CHILD: int +FK_COLUMN_NOT_NULL: int +DUP_INDEX: int +FK_COLUMN_CANNOT_CHANGE: int +FK_COLUMN_CANNOT_CHANGE_CHILD: int +MALFORMED_PACKET: int +READ_ONLY_MODE: int +GTID_NEXT_TYPE_UNDEFINED_GTID: int +VARIABLE_NOT_SETTABLE_IN_SP: int +CANT_SET_GTID_PURGED_WHEN_GTID_EXECUTED_IS_NOT_EMPTY: int +CANT_SET_GTID_PURGED_WHEN_OWNED_GTIDS_IS_NOT_EMPTY: int +GTID_PURGED_WAS_CHANGED: int +GTID_EXECUTED_WAS_CHANGED: int +BINLOG_STMT_MODE_AND_NO_REPL_TABLES: int +ALTER_OPERATION_NOT_SUPPORTED: int +ALTER_OPERATION_NOT_SUPPORTED_REASON: int +ALTER_OPERATION_NOT_SUPPORTED_REASON_COPY: int +ALTER_OPERATION_NOT_SUPPORTED_REASON_PARTITION: int +ALTER_OPERATION_NOT_SUPPORTED_REASON_FK_RENAME: int +ALTER_OPERATION_NOT_SUPPORTED_REASON_COLUMN_TYPE: int +ALTER_OPERATION_NOT_SUPPORTED_REASON_FK_CHECK: int +ALTER_OPERATION_NOT_SUPPORTED_REASON_NOPK: int +ALTER_OPERATION_NOT_SUPPORTED_REASON_AUTOINC: int +ALTER_OPERATION_NOT_SUPPORTED_REASON_HIDDEN_FTS: int +ALTER_OPERATION_NOT_SUPPORTED_REASON_CHANGE_FTS: int +ALTER_OPERATION_NOT_SUPPORTED_REASON_FTS: int +SQL_SLAVE_SKIP_COUNTER_NOT_SETTABLE_IN_GTID_MODE: int +DUP_UNKNOWN_IN_INDEX: int +IDENT_CAUSES_TOO_LONG_PATH: int +ALTER_OPERATION_NOT_SUPPORTED_REASON_NOT_NULL: int +MUST_CHANGE_PASSWORD_LOGIN: int +ROW_IN_WRONG_PARTITION: int +MTS_EVENT_BIGGER_PENDING_JOBS_SIZE_MAX: int +BINLOG_LOGICAL_CORRUPTION: int +WARN_PURGE_LOG_IN_USE: int +WARN_PURGE_LOG_IS_ACTIVE: int +AUTO_INCREMENT_CONFLICT: int +WARN_ON_BLOCKHOLE_IN_RBR: int +SLAVE_MI_INIT_REPOSITORY: int +SLAVE_RLI_INIT_REPOSITORY: int +ACCESS_DENIED_CHANGE_USER_ERROR: int +INNODB_READ_ONLY: int +STOP_SLAVE_SQL_THREAD_TIMEOUT: int +STOP_SLAVE_IO_THREAD_TIMEOUT: int +TABLE_CORRUPT: int +TEMP_FILE_WRITE_FAILURE: int +INNODB_FT_AUX_NOT_HEX_ID: int +OLD_TEMPORALS_UPGRADED: int +INNODB_FORCED_RECOVERY: int +AES_INVALID_IV: int +PLUGIN_CANNOT_BE_UNINSTALLED: int +GTID_UNSAFE_BINLOG_SPLITTABLE_STATEMENT_AND_ASSIGNED_GTID: int +SLAVE_HAS_MORE_GTIDS_THAN_MASTER: int +MISSING_KEY: int +ERROR_LAST: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/constants/FIELD_TYPE.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/constants/FIELD_TYPE.pyi new file mode 100644 index 00000000..26f61055 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/constants/FIELD_TYPE.pyi @@ -0,0 +1,29 @@ +DECIMAL: int +TINY: int +SHORT: int +LONG: int +FLOAT: int +DOUBLE: int +NULL: int +TIMESTAMP: int +LONGLONG: int +INT24: int +DATE: int +TIME: int +DATETIME: int +YEAR: int +VARCHAR: int +BIT: int +JSON: int +NEWDECIMAL: int +ENUM: int +SET: int +TINY_BLOB: int +MEDIUM_BLOB: int +LONG_BLOB: int +BLOB: int +VAR_STRING: int +STRING: int +GEOMETRY: int +CHAR: int +INTERVAL: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/constants/FLAG.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/constants/FLAG.pyi new file mode 100644 index 00000000..9fe6c7a7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/constants/FLAG.pyi @@ -0,0 +1,16 @@ +NOT_NULL: int +PRI_KEY: int +UNIQUE_KEY: int +MULTIPLE_KEY: int +BLOB: int +UNSIGNED: int +ZEROFILL: int +BINARY: int +ENUM: int +AUTO_INCREMENT: int +TIMESTAMP: int +SET: int +NUM: int +PART_KEY: int +GROUP: int +UNIQUE: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/constants/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/constants/__init__.pyi new file mode 100644 index 00000000..3eaad111 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/constants/__init__.pyi @@ -0,0 +1 @@ +from . import CLIENT as CLIENT, CR as CR, ER as ER, FIELD_TYPE as FIELD_TYPE, FLAG as FLAG diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/converters.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/converters.pyi new file mode 100644 index 00000000..d350ee38 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/converters.pyi @@ -0,0 +1,30 @@ +import array +from typing import Any + +from MySQLdb._exceptions import ProgrammingError as ProgrammingError +from MySQLdb._mysql import string_literal as string_literal +from MySQLdb.constants import FIELD_TYPE as FIELD_TYPE, FLAG as FLAG +from MySQLdb.times import ( + Date as Date, + Date_or_None as Date_or_None, + DateTime2literal as DateTime2literal, + DateTime_or_None as DateTime_or_None, + DateTimeDelta2literal as DateTimeDelta2literal, + DateTimeDeltaType as DateTimeDeltaType, + DateTimeType as DateTimeType, + TimeDelta_or_None as TimeDelta_or_None, +) + +NoneType: Any +ArrayType = array.array + +def Bool2Str(s, d): ... +def Set2Str(s, d): ... +def Thing2Str(s, d): ... +def Float2Str(o, d): ... +def None2NULL(o, d): ... +def Thing2Literal(o, d): ... +def Decimal2Literal(o, d): ... +def array2Str(o, d): ... + +conversions: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/cursors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/cursors.pyi new file mode 100644 index 00000000..72957eed --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/cursors.pyi @@ -0,0 +1,63 @@ +from _typeshed import Incomplete +from typing import Any + +RE_INSERT_VALUES: Any + +class BaseCursor: + from ._exceptions import ( + DatabaseError as DatabaseError, + DataError as DataError, + Error as Error, + IntegrityError as IntegrityError, + InterfaceError as InterfaceError, + InternalError as InternalError, + MySQLError as MySQLError, + NotSupportedError as NotSupportedError, + OperationalError as OperationalError, + ProgrammingError as ProgrammingError, + Warning as Warning, + ) + + max_stmt_length: Any + connection: Any + description: Any + description_flags: Any + rowcount: int + arraysize: int + lastrowid: Any + rownumber: Any + def __init__(self, connection) -> None: ... + def close(self) -> None: ... + def __enter__(self): ... + def __exit__(self, *exc_info) -> None: ... + def nextset(self): ... + def setinputsizes(self, *args) -> None: ... + def setoutputsizes(self, *args) -> None: ... + def execute(self, query, args: Incomplete | None = ...): ... + def executemany(self, query: str, args: list[Any]) -> int: ... + def callproc(self, procname, args=...): ... + def __iter__(self): ... + +class CursorStoreResultMixIn: + rownumber: Any + def fetchone(self): ... + def fetchmany(self, size: Incomplete | None = ...): ... + def fetchall(self): ... + def scroll(self, value, mode: str = ...) -> None: ... + def __iter__(self): ... + +class CursorUseResultMixIn: + rownumber: Any + def fetchone(self): ... + def fetchmany(self, size: Incomplete | None = ...): ... + def fetchall(self): ... + def __iter__(self): ... + def next(self): ... + __next__: Any + +class CursorTupleRowsMixIn: ... +class CursorDictRowsMixIn: ... +class Cursor(CursorStoreResultMixIn, CursorTupleRowsMixIn, BaseCursor): ... +class DictCursor(CursorStoreResultMixIn, CursorDictRowsMixIn, BaseCursor): ... +class SSCursor(CursorUseResultMixIn, CursorTupleRowsMixIn, BaseCursor): ... +class SSDictCursor(CursorUseResultMixIn, CursorDictRowsMixIn, BaseCursor): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/release.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/release.pyi new file mode 100644 index 00000000..a78eb9f6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/release.pyi @@ -0,0 +1 @@ +version_info: tuple[int, int, int, str, int] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/times.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/times.pyi new file mode 100644 index 00000000..21a14cfc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/mysqlclient/MySQLdb/times.pyi @@ -0,0 +1,27 @@ +from _typeshed import Unused +from datetime import date, datetime, time, timedelta + +from MySQLdb._mysql import string_literal as string_literal + +Date = date +Time = time +TimeDelta = timedelta +Timestamp = datetime +DateTimeDeltaType = timedelta +DateTimeType = datetime + +def DateFromTicks(ticks: float | None) -> date: ... +def TimeFromTicks(ticks: float | None) -> time: ... +def TimestampFromTicks(ticks: float | None) -> datetime: ... + +format_TIME = str +format_DATE = str + +def format_TIMEDELTA(v: timedelta) -> str: ... +def format_TIMESTAMP(d: datetime) -> str: ... +def DateTime_or_None(s: str) -> datetime | None: ... +def TimeDelta_or_None(s: str) -> timedelta | None: ... +def Time_or_None(s: str) -> time | None: ... +def Date_or_None(s: str) -> date | None: ... +def DateTime2literal(d: datetime, c: Unused) -> str: ... +def DateTimeDelta2literal(d: datetime, c: Unused) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..a77ef49f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/@tests/stubtest_allowlist.txt @@ -0,0 +1,7 @@ +# Error: is not present in stub +# ============================= +netaddr.core.a # This is a temporary module attribute used to detect python version + +# Error: is not present at runtime +# ================================ +netaddr.ip.iana.XMLRecordParser.__getattr__ # __init__ has `self.__dict__.update(kwargs)` diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/METADATA.toml new file mode 100644 index 00000000..29511ee7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/METADATA.toml @@ -0,0 +1 @@ +version = "0.8.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/__init__.pyi new file mode 100644 index 00000000..194e6052 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/__init__.pyi @@ -0,0 +1,75 @@ +from netaddr.contrib.subnet_splitter import SubnetSplitter as SubnetSplitter +from netaddr.core import ( + INET_PTON as INET_PTON, + NOHOST as NOHOST, + ZEROFILL as ZEROFILL, + AddrConversionError as AddrConversionError, + AddrFormatError as AddrFormatError, + N as N, + NotRegisteredError as NotRegisteredError, + P as P, + Z as Z, +) +from netaddr.eui import EUI as EUI, IAB as IAB, OUI as OUI +from netaddr.ip import ( + IPAddress as IPAddress, + IPNetwork as IPNetwork, + IPRange as IPRange, + all_matching_cidrs as all_matching_cidrs, + cidr_abbrev_to_verbose as cidr_abbrev_to_verbose, + cidr_exclude as cidr_exclude, + cidr_merge as cidr_merge, + iprange_to_cidrs as iprange_to_cidrs, + iter_iprange as iter_iprange, + iter_unique_ips as iter_unique_ips, + largest_matching_cidr as largest_matching_cidr, + smallest_matching_cidr as smallest_matching_cidr, + spanning_cidr as spanning_cidr, +) +from netaddr.ip.glob import ( + IPGlob as IPGlob, + cidr_to_glob as cidr_to_glob, + glob_to_cidrs as glob_to_cidrs, + glob_to_iprange as glob_to_iprange, + glob_to_iptuple as glob_to_iptuple, + iprange_to_globs as iprange_to_globs, + valid_glob as valid_glob, +) +from netaddr.ip.nmap import iter_nmap_range as iter_nmap_range, valid_nmap_range as valid_nmap_range +from netaddr.ip.rfc1924 import base85_to_ipv6 as base85_to_ipv6, ipv6_to_base85 as ipv6_to_base85 +from netaddr.ip.sets import IPSet as IPSet +from netaddr.strategy.eui48 import ( + mac_bare as mac_bare, + mac_cisco as mac_cisco, + mac_eui48 as mac_eui48, + mac_pgsql as mac_pgsql, + mac_unix as mac_unix, + mac_unix_expanded as mac_unix_expanded, + valid_str as __eui48_valid_str, +) +from netaddr.strategy.eui64 import ( + eui64_bare as eui64_bare, + eui64_base as eui64_base, + eui64_cisco as eui64_cisco, + eui64_unix as eui64_unix, + eui64_unix_expanded as eui64_unix_expanded, + valid_str as __eui64_valid_str, +) +from netaddr.strategy.ipv4 import valid_str as __ipv4_valid_str +from netaddr.strategy.ipv6 import ( + ipv6_compact as ipv6_compact, + ipv6_full as ipv6_full, + ipv6_verbose as ipv6_verbose, + valid_str as __ipv6_valid_str, +) + +# These are reexported with different names +valid_ipv4 = __ipv4_valid_str +valid_ipv6 = __ipv6_valid_str +valid_mac = __eui48_valid_str +valid_eui64 = __eui64_valid_str + +# Module constants +__version__: str +VERSION: tuple[int, ...] +STATUS: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/cli.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/cli.pyi new file mode 100644 index 00000000..3c7c5b20 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/cli.pyi @@ -0,0 +1,3 @@ +from netaddr import * + +def main() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/compat.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/compat.pyi new file mode 100644 index 00000000..fd612883 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/compat.pyi @@ -0,0 +1,2 @@ +# Python 2 compatibility module +# All members are prefixed with "_", nothing to declare. diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/contrib/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/contrib/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/contrib/subnet_splitter.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/contrib/subnet_splitter.pyi new file mode 100644 index 00000000..2e420006 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/contrib/subnet_splitter.pyi @@ -0,0 +1,7 @@ +from netaddr.ip import IPNetwork, _IPAddressAddr + +class SubnetSplitter: + def __init__(self, base_cidr: _IPAddressAddr) -> None: ... + def extract_subnet(self, prefix: int, count: int | None = ...) -> list[IPNetwork]: ... + def available_subnets(self) -> list[IPNetwork]: ... + def remove_subnet(self, ip_network: IPNetwork) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/core.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/core.pyi new file mode 100644 index 00000000..da4d657f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/core.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete, SupportsWrite +from collections.abc import Iterator, Mapping +from typing_extensions import Final + +BIG_ENDIAN_PLATFORM: bool +P: Final = 1 +INET_PTON: Final = 1 +Z: Final = 2 +ZEROFILL: Final = 2 +N: Final = 4 +NOHOST: Final = 4 + +class AddrFormatError(Exception): ... +class AddrConversionError(Exception): ... +class NotRegisteredError(Exception): ... + +def num_bits(int_val: int) -> int: ... + +class Subscriber: + def update(self, data: Incomplete) -> None: ... + +class PrettyPrinter(Subscriber): + fh: SupportsWrite[str] + write_eol: bool + def __init__(self, fh: SupportsWrite[str] = ..., write_eol: bool = ...) -> None: ... + def update(self, data: object) -> None: ... + +class Publisher: + subscribers: list[Subscriber] + def __init__(self) -> None: ... + def attach(self, subscriber: Subscriber) -> None: ... + def detach(self, subscriber: Subscriber) -> None: ... + def notify(self, data: object) -> None: ... + +class DictDotLookup: + def __init__(self, d: Mapping[str, object]) -> None: ... + def __getitem__(self, name: str) -> object: ... + def __iter__(self) -> Iterator[str]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/eui/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/eui/__init__.pyi new file mode 100644 index 00000000..9d2b963e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/eui/__init__.pyi @@ -0,0 +1,84 @@ +from typing import ClassVar, SupportsInt, overload +from typing_extensions import Literal, Self, SupportsIndex + +from netaddr.core import DictDotLookup +from netaddr.ip import IPAddress +from netaddr.strategy.eui48 import mac_eui48 +from netaddr.strategy.eui64 import eui64_base + +class BaseIdentifier: + def __init__(self) -> None: ... + def __int__(self) -> int: ... + def __long__(self) -> int: ... + def __oct__(self) -> str: ... + def __hex__(self) -> str: ... + def __index__(self) -> int: ... + +class OUI(BaseIdentifier): + records: list[dict[str, object]] + def __init__(self, oui: str | int) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + @property + def reg_count(self) -> int: ... + def registration(self, index: int = ...) -> DictDotLookup: ... + +class IAB(BaseIdentifier): + IAB_EUI_VALUES: ClassVar[tuple[int, int]] + @classmethod + def split_iab_mac(cls, eui_int: int, strict: bool = ...) -> tuple[int, int]: ... + record: dict[str, object] + def __init__(self, iab: str | int, strict: bool = ...) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def registration(self) -> DictDotLookup: ... + +class EUI(BaseIdentifier): + def __init__( + self, addr: EUI | int | str, version: int | None = ..., dialect: type[mac_eui48] | type[eui64_base] | None = ... + ) -> None: ... + @property + def value(self) -> int: ... + @value.setter + def value(self, value: str | SupportsInt | SupportsIndex) -> None: ... + @property + def dialect(self) -> type[mac_eui48] | type[eui64_base]: ... + @dialect.setter + def dialect(self, value: type[mac_eui48] | type[eui64_base] | None) -> None: ... + @property + def oui(self) -> OUI: ... + @property + def ei(self) -> str: ... + def is_iab(self) -> bool: ... + @property + def iab(self) -> IAB | None: ... + @property + def version(self) -> Literal[48, 64]: ... + @overload + def __getitem__(self, idx: int) -> int: ... + @overload + def __getitem__(self, idx: slice) -> list[int]: ... + @overload + def __getitem__(self, idx: int | slice) -> int | list[int]: ... + def __setitem__(self, idx: int, value: int) -> None: ... + def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __lt__(self, other: EUI | int | str) -> bool: ... + def __le__(self, other: EUI | int | str) -> bool: ... + def __gt__(self, other: EUI | int | str) -> bool: ... + def __ge__(self, other: EUI | int | str) -> bool: ... + def bits(self, word_sep: str | None = ...) -> str: ... + @property + def packed(self) -> bytes: ... + @property + def words(self) -> tuple[int, ...]: ... + @property + def bin(self) -> str: ... + def eui64(self) -> Self: ... + def modified_eui64(self) -> Self: ... + def ipv6(self, prefix: str | SupportsInt | SupportsIndex) -> IPAddress: ... + def ipv6_link_local(self) -> IPAddress: ... + @property + def info(self) -> DictDotLookup: ... + def format(self, dialect: type[mac_eui48] | type[eui64_base] | None = ...) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/eui/ieee.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/eui/ieee.pyi new file mode 100644 index 00000000..9adf6459 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/eui/ieee.pyi @@ -0,0 +1,33 @@ +import _csv +from _typeshed import FileDescriptorOrPath, StrOrBytesPath +from collections.abc import Iterable +from typing import Any, BinaryIO, TextIO +from typing_extensions import TypeAlias + +from netaddr.core import Publisher, Subscriber + +_INDEX: TypeAlias = dict[int, list[tuple[int, int]]] +OUI_INDEX: _INDEX +IAB_INDEX: _INDEX + +class FileIndexer(Subscriber): + writer: _csv._writer + def __init__(self, index_file: TextIO | FileDescriptorOrPath) -> None: ... + def update(self, data: Iterable[Any]) -> None: ... + +class OUIIndexParser(Publisher): + fh: BinaryIO + def __init__(self, ieee_file: BinaryIO | FileDescriptorOrPath) -> None: ... + def parse(self) -> None: ... + +class IABIndexParser(Publisher): + fh: BinaryIO + def __init__(self, ieee_file: BinaryIO | FileDescriptorOrPath) -> None: ... + def parse(self) -> None: ... + +def create_index_from_registry( + registry_fh: BinaryIO | FileDescriptorOrPath, index_path: StrOrBytesPath, parser: type[OUIIndexParser] | type[IABIndexParser] +) -> None: ... +def create_indices() -> None: ... +def load_index(index: _INDEX, fp: Iterable[bytes]) -> None: ... +def load_indices() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/fbsocket.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/fbsocket.pyi new file mode 100644 index 00000000..e924e757 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/fbsocket.pyi @@ -0,0 +1,8 @@ +from typing_extensions import Literal + +AF_INET: Literal[2] +AF_INET6: Literal[10] + +def inet_ntoa(packed_ip: bytes) -> str: ... +def inet_ntop(af: int, packed_ip: bytes) -> str: ... +def inet_pton(af: int, ip_string: str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/ip/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/ip/__init__.pyi new file mode 100644 index 00000000..91651b73 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/ip/__init__.pyi @@ -0,0 +1,174 @@ +from _typeshed import Incomplete, Unused +from abc import abstractmethod +from collections.abc import Iterable, Iterator +from typing import SupportsInt, overload +from typing_extensions import Literal, Self, SupportsIndex, TypeAlias + +from netaddr.core import DictDotLookup +from netaddr.strategy.ipv6 import ipv6_verbose + +class BaseIP: + def __init__(self) -> None: ... + @property + def value(self) -> int | None: ... + @value.setter + def value(self, value: int) -> None: ... + @abstractmethod + def key(self) -> tuple[int, ...]: ... + @abstractmethod + def sort_key(self) -> tuple[int, ...]: ... + def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __lt__(self, other: BaseIP) -> bool: ... + def __le__(self, other: BaseIP) -> bool: ... + def __gt__(self, other: BaseIP) -> bool: ... + def __ge__(self, other: BaseIP) -> bool: ... + def is_unicast(self) -> bool: ... + def is_multicast(self) -> bool: ... + def is_loopback(self) -> bool: ... + def is_private(self) -> bool: ... + def is_link_local(self) -> bool: ... + def is_reserved(self) -> bool: ... + def is_ipv4_mapped(self) -> bool: ... + def is_ipv4_compat(self) -> bool: ... + @property + def info(self) -> DictDotLookup: ... + @property + def version(self) -> Literal[4, 6]: ... + +_IPAddressAddr: TypeAlias = BaseIP | int | str +_IPNetworkAddr: TypeAlias = IPNetwork | IPAddress | tuple[int, int] | str + +class IPAddress(BaseIP): + def __init__(self, addr: _IPAddressAddr, version: Literal[4, 6] | None = ..., flags: int = ...) -> None: ... + def netmask_bits(self) -> int: ... + def is_hostmask(self) -> bool: ... + def is_netmask(self) -> bool: ... + def __iadd__(self, num: int) -> Self: ... + def __isub__(self, num: int) -> Self: ... + def __add__(self, num: int) -> Self: ... + __radd__ = __add__ + def __sub__(self, num: int) -> Self: ... + def __rsub__(self, num: int) -> Self: ... + def key(self) -> tuple[int, ...]: ... + def sort_key(self) -> tuple[int, ...]: ... + def __int__(self) -> int: ... + def __long__(self) -> int: ... + def __oct__(self) -> str: ... + def __hex__(self) -> str: ... + def __index__(self) -> int: ... + def __bytes__(self) -> bytes: ... + def bits(self, word_sep: str | None = ...) -> str: ... + @property + def packed(self) -> bytes: ... + @property + def words(self) -> tuple[int, ...]: ... + @property + def bin(self) -> str: ... + @property + def reverse_dns(self) -> str: ... + def ipv4(self) -> Self: ... + def ipv6(self, ipv4_compatible: bool = ...) -> Self: ... + def format(self, dialect: type[ipv6_verbose] | None = ...) -> str: ... + def __or__(self, other: str | SupportsInt | SupportsIndex) -> Self: ... + def __and__(self, other: str | SupportsInt | SupportsIndex) -> Self: ... + def __xor__(self, other: str | SupportsInt | SupportsIndex) -> Self: ... + def __lshift__(self, numbits: int) -> Self: ... + def __rshift__(self, numbits: int) -> Self: ... + def __bool__(self) -> bool: ... + +class IPListMixin: + def __iter__(self) -> Iterator[IPAddress]: ... + @property + def size(self) -> int: ... + def __len__(self) -> int: ... + @overload + def __getitem__(self, index: SupportsIndex) -> IPAddress: ... + @overload + def __getitem__(self, index: slice) -> Iterator[IPAddress]: ... + @overload + def __getitem__(self, index: SupportsIndex | slice) -> IPAddress | Iterator[IPAddress]: ... + def __contains__(self, other: BaseIP | _IPAddressAddr) -> bool: ... + def __bool__(self) -> Literal[True]: ... + +def parse_ip_network( + module: Incomplete, addr: tuple[int, int] | str, implicit_prefix: bool = ..., flags: int = ... +) -> tuple[int, int]: ... + +class IPNetwork(BaseIP, IPListMixin): + def __init__( + self, addr: _IPNetworkAddr, implicit_prefix: bool = ..., version: Literal[4, 6] | None = ..., flags: int = ... + ) -> None: ... + @property + def prefixlen(self) -> int: ... + @prefixlen.setter + def prefixlen(self, value: int) -> None: ... + @property + def ip(self) -> IPAddress: ... + @property + def network(self) -> IPAddress: ... + @property + def broadcast(self) -> IPAddress | None: ... + @property + def first(self) -> int: ... + @property + def last(self) -> int: ... + @property + def netmask(self) -> IPAddress: ... + @netmask.setter + def netmask(self, value: _IPAddressAddr) -> None: ... + @property + def hostmask(self) -> IPAddress: ... + @property + def cidr(self) -> IPNetwork: ... + def __iadd__(self, num: int) -> Self: ... + def __isub__(self, num: int) -> Self: ... + # runtime overrides __contains__ with incompatible type for "other" + def __contains__(self, other: BaseIP | _IPNetworkAddr) -> bool: ... # type: ignore[override] + def key(self) -> tuple[int, ...]: ... + def sort_key(self) -> tuple[int, ...]: ... + def ipv4(self) -> Self: ... + def ipv6(self, ipv4_compatible: bool = ...) -> Self: ... + def previous(self, step: int = ...) -> Self: ... + def next(self, step: int = ...) -> Self: ... + def supernet(self, prefixlen: int = ...) -> list[IPNetwork]: ... + def subnet(self, prefixlen: int, count: int | None = ..., fmt: Unused = None) -> Iterator[Self]: ... + def iter_hosts(self) -> Iterator[IPAddress]: ... + +class IPRange(BaseIP, IPListMixin): + def __init__(self, start: _IPAddressAddr, end: _IPAddressAddr, flags: int = ...) -> None: ... + def __contains__(self, other: BaseIP | _IPAddressAddr) -> bool: ... + @property + def first(self) -> int: ... + @property + def last(self) -> int: ... + def key(self) -> tuple[int, ...]: ... + def sort_key(self) -> tuple[int, ...]: ... + def cidrs(self) -> list[IPNetwork]: ... + +def iter_unique_ips(*args: IPRange | _IPNetworkAddr) -> Iterator[IPAddress]: ... +def cidr_abbrev_to_verbose(abbrev_cidr: str | SupportsInt | SupportsIndex) -> str: ... +def cidr_merge(ip_addrs: Iterable[IPRange | _IPNetworkAddr]) -> list[IPNetwork]: ... +def cidr_exclude(target: _IPNetworkAddr, exclude: _IPNetworkAddr) -> list[IPNetwork]: ... +def cidr_partition( + target: _IPNetworkAddr, exclude: _IPNetworkAddr +) -> tuple[list[IPNetwork], list[IPNetwork], list[IPNetwork]]: ... +def spanning_cidr(ip_addrs: Iterable[_IPNetworkAddr]) -> IPNetwork: ... +def iter_iprange(start: _IPAddressAddr, end: _IPAddressAddr, step: SupportsInt | SupportsIndex = ...) -> Iterator[IPAddress]: ... +def iprange_to_cidrs(start: _IPNetworkAddr, end: _IPNetworkAddr) -> list[IPNetwork]: ... +def smallest_matching_cidr(ip: _IPAddressAddr, cidrs: Iterable[_IPNetworkAddr]) -> IPNetwork | None: ... +def largest_matching_cidr(ip: _IPAddressAddr, cidrs: Iterable[_IPNetworkAddr]) -> IPNetwork | None: ... +def all_matching_cidrs(ip: _IPAddressAddr, cidrs: Iterable[_IPNetworkAddr]) -> list[IPNetwork]: ... + +IPV4_LOOPBACK: IPNetwork +IPV4_PRIVATE: tuple[IPNetwork | IPRange, ...] +IPV4_LINK_LOCAL: IPNetwork +IPV4_MULTICAST: IPNetwork +IPV4_6TO4: IPNetwork +IPV4_RESERVED: tuple[IPNetwork | IPRange, ...] +IPV6_LOOPBACK: IPAddress +IPV6_PRIVATE: tuple[IPNetwork, ...] +IPV6_LINK_LOCAL: IPNetwork +IPV6_MULTICAST: IPNetwork +IPV6_RESERVED: tuple[IPNetwork, ...] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/ip/glob.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/ip/glob.pyi new file mode 100644 index 00000000..34660723 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/ip/glob.pyi @@ -0,0 +1,17 @@ +from typing_extensions import TypeGuard + +from netaddr.ip import IPAddress, IPNetwork, IPRange, _IPAddressAddr, _IPNetworkAddr + +def valid_glob(ipglob: object) -> TypeGuard[str]: ... +def glob_to_iptuple(ipglob: str) -> tuple[IPAddress, IPAddress]: ... +def glob_to_iprange(ipglob: str) -> IPRange: ... +def iprange_to_globs(start: _IPAddressAddr, end: _IPAddressAddr) -> list[str]: ... +def glob_to_cidrs(ipglob: str) -> list[IPNetwork]: ... +def cidr_to_glob(cidr: _IPNetworkAddr) -> str: ... + +class IPGlob(IPRange): + def __init__(self, ipglob: str) -> None: ... + @property + def glob(self) -> str: ... + @glob.setter + def glob(self, value: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/ip/iana.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/ip/iana.pyi new file mode 100644 index 00000000..ef3657aa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/ip/iana.pyi @@ -0,0 +1,52 @@ +from _typeshed import Incomplete, SupportsWrite +from collections.abc import Callable, Mapping, MutableMapping +from typing import Any +from typing_extensions import TypeAlias +from xml.sax import handler +from xml.sax.xmlreader import XMLReader + +from netaddr.core import Publisher, Subscriber +from netaddr.ip import IPAddress, IPNetwork, IPRange + +_IanaInfoKey: TypeAlias = IPAddress | IPNetwork | IPRange + +IANA_INFO: dict[str, dict[_IanaInfoKey, dict[str, str]]] + +class SaxRecordParser(handler.ContentHandler): + def __init__(self, callback: Callable[[Mapping[str, object] | None], object] | None = ...) -> None: ... + def startElement(self, name: str, attrs: Mapping[str, object]) -> None: ... + def endElement(self, name: str) -> None: ... + def characters(self, content: str) -> None: ... + +class XMLRecordParser(Publisher): + xmlparser: XMLReader + fh: Incomplete + def __init__(self, fh: Incomplete, **kwargs: object) -> None: ... + def process_record(self, rec: Mapping[str, object]) -> dict[str, str] | None: ... + def consume_record(self, rec: object) -> None: ... + def parse(self) -> None: ... + # Arbitrary attributes are set in __init__ with `self.__dict__.update(kwargs)` + def __getattr__(self, __name: str) -> Any: ... + +class IPv4Parser(XMLRecordParser): + def process_record(self, rec: Mapping[str, object]) -> dict[str, str]: ... + +class IPv6Parser(XMLRecordParser): + def process_record(self, rec: Mapping[str, object]) -> dict[str, str]: ... + +class IPv6UnicastParser(XMLRecordParser): + def process_record(self, rec: Mapping[str, object]) -> dict[str, str]: ... + +class MulticastParser(XMLRecordParser): + def normalise_addr(self, addr: str) -> str: ... + +class DictUpdater(Subscriber): + dct: MutableMapping[_IanaInfoKey, Incomplete] + topic: str + unique_key: str + def __init__(self, dct: MutableMapping[_IanaInfoKey, Incomplete], topic: str, unique_key: str) -> None: ... + def update(self, data: Incomplete) -> None: ... + +def load_info() -> None: ... +def pprint_info(fh: SupportsWrite[str] | None = ...) -> None: ... +def query(ip_addr: IPAddress) -> dict[str, list[dict[str, str]]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/ip/nmap.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/ip/nmap.pyi new file mode 100644 index 00000000..b63826b5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/ip/nmap.pyi @@ -0,0 +1,6 @@ +from collections.abc import Iterator + +from netaddr.ip import IPAddress + +def valid_nmap_range(target_spec: str) -> bool: ... +def iter_nmap_range(*nmap_target_spec: str) -> Iterator[IPAddress]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/ip/rfc1924.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/ip/rfc1924.pyi new file mode 100644 index 00000000..6a4199bf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/ip/rfc1924.pyi @@ -0,0 +1,9 @@ +from netaddr.ip import _IPAddressAddr + +def chr_range(low: str, high: str) -> list[str]: ... + +BASE_85: list[str] +BASE_85_DICT: dict[str, int] + +def ipv6_to_base85(addr: _IPAddressAddr) -> str: ... +def base85_to_ipv6(addr: str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/ip/sets.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/ip/sets.pyi new file mode 100644 index 00000000..ac6e2cb1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/ip/sets.pyi @@ -0,0 +1,45 @@ +from collections.abc import Iterable, Iterator +from typing import NoReturn +from typing_extensions import Self, TypeAlias + +from netaddr.ip import IPAddress, IPNetwork, IPRange, _IPNetworkAddr + +_IPIterable: TypeAlias = IPNetwork | IPRange | IPSet | Iterable[_IPNetworkAddr | IPRange | int] + +class IPSet: + def __init__(self, iterable: _IPIterable | None = ..., flags: int = ...) -> None: ... + def compact(self) -> None: ... + def __hash__(self) -> NoReturn: ... + def __contains__(self, ip: _IPNetworkAddr) -> bool: ... + def __bool__(self) -> bool: ... + def __iter__(self) -> Iterator[IPAddress]: ... + def iter_cidrs(self) -> list[IPNetwork]: ... + def add(self, addr: IPRange | _IPNetworkAddr | int, flags: int = ...) -> None: ... + def remove(self, addr: IPRange | _IPNetworkAddr | int, flags: int = ...) -> None: ... + def pop(self) -> IPNetwork: ... + def isdisjoint(self, other: IPSet) -> bool: ... + def copy(self) -> Self: ... + def update(self, iterable: _IPIterable, flags: int = ...) -> None: ... + def clear(self) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __lt__(self, other: IPSet) -> bool: ... + def issubset(self, other: IPSet) -> bool: ... + __le__ = issubset + def __gt__(self, other: IPSet) -> bool: ... + def issuperset(self, other: IPSet) -> bool: ... + __ge__ = issuperset + def union(self, other: IPSet) -> Self: ... + __or__ = union + def intersection(self, other: IPSet) -> IPSet: ... + __and__ = intersection + def symmetric_difference(self, other: IPSet) -> IPSet: ... + __xor__ = symmetric_difference + def difference(self, other: IPSet) -> IPSet: ... + __sub__ = difference + def __len__(self) -> int: ... + @property + def size(self) -> int: ... + def iscontiguous(self) -> bool: ... + def iprange(self) -> IPRange | None: ... + def iter_ipranges(self) -> Iterator[IPRange]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/strategy/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/strategy/__init__.pyi new file mode 100644 index 00000000..d2daa0f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/strategy/__init__.pyi @@ -0,0 +1,15 @@ +from collections.abc import Iterable, Sequence + +def bytes_to_bits() -> list[str]: ... + +BYTES_TO_BITS: list[str] + +def valid_words(words: Iterable[int], word_size: int, num_words: int) -> bool: ... +def int_to_words(int_val: int, word_size: int, num_words: int) -> tuple[int, ...]: ... +def words_to_int(words: Sequence[int], word_size: int, num_words: int) -> int: ... +def valid_bits(bits: str, width: int, word_sep: str = ...) -> bool: ... +def bits_to_int(bits: str, width: int, word_sep: str = ...) -> int: ... +def int_to_bits(int_val: int, word_size: int, num_words: int, word_sep: str = ...) -> str: ... +def valid_bin(bin_val: str, width: int) -> bool: ... +def int_to_bin(int_val: int, width: int) -> str: ... +def bin_to_int(bin_val: str, width: int) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/strategy/eui48.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/strategy/eui48.pyi new file mode 100644 index 00000000..aa7dcb24 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/strategy/eui48.pyi @@ -0,0 +1,43 @@ +from collections.abc import Iterable, Sequence +from re import Pattern +from typing import ClassVar +from typing_extensions import Literal + +AF_LINK: Literal[48] +width: Literal[48] +family: Literal[48] +family_name: Literal["MAC"] +version: Literal[48] +max_int: int + +class mac_eui48: + word_size: ClassVar[int] + num_words: ClassVar[int] + max_word: ClassVar[int] + word_sep: ClassVar[str] + word_fmt: ClassVar[str] + word_base: ClassVar[int] + +class mac_unix(mac_eui48): ... +class mac_unix_expanded(mac_unix): ... +class mac_cisco(mac_eui48): ... +class mac_bare(mac_eui48): ... +class mac_pgsql(mac_eui48): ... + +DEFAULT_DIALECT: type[mac_eui48] +RE_MAC_FORMATS: list[Pattern[str]] + +def valid_str(addr: str) -> bool: ... +def str_to_int(addr: str) -> int: ... +def int_to_str(int_val: int, dialect: type[mac_eui48] | None = ...) -> str: ... +def int_to_packed(int_val: int) -> bytes: ... +def packed_to_int(packed_int: bytes) -> int: ... +def valid_words(words: Iterable[int], dialect: type[mac_eui48] | None = ...) -> bool: ... +def int_to_words(int_val: int, dialect: type[mac_eui48] | None = ...) -> tuple[int, ...]: ... +def words_to_int(words: Sequence[int], dialect: type[mac_eui48] | None = ...) -> int: ... +def valid_bits(bits: str, dialect: type[mac_eui48] | None = ...) -> bool: ... +def bits_to_int(bits: str, dialect: type[mac_eui48] | None = ...) -> int: ... +def int_to_bits(int_val: int, dialect: type[mac_eui48] | None = ...) -> str: ... +def valid_bin(bin_val: str, dialect: type[mac_eui48] | None = ...) -> bool: ... +def int_to_bin(int_val: int) -> str: ... +def bin_to_int(bin_val: str) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/strategy/eui64.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/strategy/eui64.pyi new file mode 100644 index 00000000..c5e456cc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/strategy/eui64.pyi @@ -0,0 +1,42 @@ +from collections.abc import Iterable, Sequence +from re import Pattern +from typing import ClassVar +from typing_extensions import Literal + +AF_EUI64: Literal[64] +width: Literal[64] +family: Literal[64] +family_name: Literal["EUI-64"] +version: Literal[64] +max_int: int + +class eui64_base: + word_size: ClassVar[int] + num_words: ClassVar[int] + max_word: ClassVar[int] + word_sep: ClassVar[str] + word_fmt: ClassVar[str] + word_base: ClassVar[int] + +class eui64_unix(eui64_base): ... +class eui64_unix_expanded(eui64_unix): ... +class eui64_cisco(eui64_base): ... +class eui64_bare(eui64_base): ... + +DEFAULT_EUI64_DIALECT: type[eui64_base] +RE_EUI64_FORMATS: list[Pattern[str]] + +def valid_str(addr: str) -> bool: ... +def str_to_int(addr: str) -> int: ... +def int_to_str(int_val: int, dialect: type[eui64_base] | None = ...) -> str: ... +def int_to_packed(int_val: int) -> bytes: ... +def packed_to_int(packed_int: bytes) -> int: ... +def valid_words(words: Iterable[int], dialect: type[eui64_base] | None = ...) -> bool: ... +def int_to_words(int_val: int, dialect: type[eui64_base] | None = ...) -> tuple[int, ...]: ... +def words_to_int(words: Sequence[int], dialect: type[eui64_base] | None = ...) -> int: ... +def valid_bits(bits: str, dialect: type[eui64_base] | None = ...) -> bool: ... +def bits_to_int(bits: str, dialect: type[eui64_base] | None = ...) -> int: ... +def int_to_bits(int_val: int, dialect: type[eui64_base] | None = ...) -> str: ... +def valid_bin(bin_val: str, dialect: type[eui64_base] | None = ...) -> bool: ... +def int_to_bin(int_val: int) -> str: ... +def bin_to_int(bin_val: str) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/strategy/ipv4.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/strategy/ipv4.pyi new file mode 100644 index 00000000..763a858c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/strategy/ipv4.pyi @@ -0,0 +1,39 @@ +from _typeshed import Unused +from collections.abc import Iterable, Sequence +from socket import AddressFamily +from typing_extensions import Literal + +from netaddr.core import INET_PTON as INET_PTON, ZEROFILL as ZEROFILL + +width: Literal[32] +word_size: Literal[8] +word_fmt: Literal["%d"] +word_sep: Literal["."] +family: Literal[AddressFamily.AF_INET] +family_name: Literal["IPv4"] +version: Literal[4] +word_base: Literal[10] +max_int: int +num_words: Literal[4] +max_word: int +prefix_to_netmask: dict[int, int] +netmask_to_prefix: dict[int, int] +prefix_to_hostmask: dict[int, int] +hostmask_to_prefix: dict[int, int] + +def valid_str(addr: str, flags: int = ...) -> bool: ... +def str_to_int(addr: str, flags: int = ...) -> int: ... +def int_to_str(int_val: int, dialect: Unused = None) -> str: ... +def int_to_arpa(int_val: int) -> str: ... +def int_to_packed(int_val: int) -> bytes: ... +def packed_to_int(packed_int: bytes) -> int: ... +def valid_words(words: Iterable[int]) -> bool: ... +def int_to_words(int_val: int) -> tuple[int, ...]: ... +def words_to_int(words: Sequence[int]) -> int: ... +def valid_bits(bits: str) -> bool: ... +def bits_to_int(bits: str) -> int: ... +def int_to_bits(int_val: int, word_sep: str | None = ...) -> str: ... +def valid_bin(bin_val: str) -> bool: ... +def int_to_bin(int_val: int) -> str: ... +def bin_to_int(bin_val: str) -> int: ... +def expand_partial_address(addr: str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/strategy/ipv6.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/strategy/ipv6.pyi new file mode 100644 index 00000000..d96f42ef --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/netaddr/netaddr/strategy/ipv6.pyi @@ -0,0 +1,44 @@ +from collections.abc import Iterable, Sequence +from typing import ClassVar +from typing_extensions import Final, Literal + +from netaddr.fbsocket import AF_INET6 + +OPT_IMPORTS: bool +width: Literal[128] +word_size: Literal[16] +word_sep: Literal[":"] +family: Final = AF_INET6 +family_name: Literal["IPv6"] +version: Literal[6] +word_base: Literal[16] +max_int: int +num_words: Literal[8] +max_word: int +prefix_to_netmask: dict[int, int] +netmask_to_prefix: dict[int, int] +prefix_to_hostmask: dict[int, int] +hostmask_to_prefix: dict[int, int] + +class ipv6_compact: + word_fmt: ClassVar[str] + compact: ClassVar[bool] + +class ipv6_full(ipv6_compact): ... +class ipv6_verbose(ipv6_compact): ... + +def valid_str(addr: str, flags: int = ...) -> bool: ... +def str_to_int(addr: str, flags: int = ...) -> int: ... +def int_to_str(int_val: int, dialect: type[ipv6_compact] | None = ...) -> str: ... +def int_to_arpa(int_val: int) -> str: ... +def int_to_packed(int_val: int) -> bytes: ... +def packed_to_int(packed_int: bytes) -> int: ... +def valid_words(words: Iterable[int]) -> bool: ... +def int_to_words(int_val: int, num_words: int | None = ..., word_size: int | None = ...) -> tuple[int, ...]: ... +def words_to_int(words: Sequence[int]) -> int: ... +def valid_bits(bits: str) -> bool: ... +def bits_to_int(bits: str) -> int: ... +def int_to_bits(int_val: int, word_sep: str | None = ...) -> str: ... +def valid_bin(bin_val: str) -> bool: ... +def int_to_bin(int_val: int) -> str: ... +def bin_to_int(bin_val: str) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..7159ad88 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/@tests/stubtest_allowlist.txt @@ -0,0 +1 @@ +oauthlib.oauth1.rfc5849.parameters.prepare_headers diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/METADATA.toml new file mode 100644 index 00000000..ab10fefc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/METADATA.toml @@ -0,0 +1,4 @@ +version = "3.2.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/__init__.pyi new file mode 100644 index 00000000..3440b7c1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/__init__.pyi @@ -0,0 +1,2 @@ +def set_debug(debug_val) -> None: ... +def get_debug(): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/common.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/common.pyi new file mode 100644 index 00000000..6dd0f731 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/common.pyi @@ -0,0 +1,59 @@ +from _typeshed import Incomplete +from typing import Any + +UNICODE_ASCII_CHARACTER_SET: str +CLIENT_ID_CHARACTER_SET: str +SANITIZE_PATTERN: Any +INVALID_HEX_PATTERN: Any +always_safe: str +log: Any + +def quote(s, safe: bytes = ...): ... +def unquote(s): ... +def urlencode(params): ... +def encode_params_utf8(params): ... +def decode_params_utf8(params): ... + +urlencoded: Any + +def urldecode(query): ... +def extract_params(raw): ... +def generate_nonce(): ... +def generate_timestamp(): ... +def generate_token(length: int = ..., chars=...): ... +def generate_signed_token(private_pem, request): ... +def verify_signed_token(public_pem, token): ... +def generate_client_id(length: int = ..., chars=...): ... +def add_params_to_qs(query, params): ... +def add_params_to_uri(uri, params, fragment: bool = ...): ... +def safe_string_equals(a, b): ... +def to_unicode(data, encoding: str = ...): ... + +class CaseInsensitiveDict(dict[Any, Any]): + proxy: Any + def __init__(self, data) -> None: ... + def __contains__(self, k): ... + def __delitem__(self, k) -> None: ... + def __getitem__(self, k): ... + def get(self, k, default: Incomplete | None = ...): ... + def __setitem__(self, k, v) -> None: ... + def update(self, *args, **kwargs) -> None: ... + +class Request: + uri: Any + http_method: Any + headers: Any + body: Any + decoded_body: Any + oauth_params: Any + validator_log: Any + def __init__( + self, uri, http_method: str = ..., body: Incomplete | None = ..., headers: Incomplete | None = ..., encoding: str = ... + ): ... + def __getattr__(self, name: str): ... + @property + def uri_query(self): ... + @property + def uri_query_params(self): ... + @property + def duplicate_params(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/__init__.pyi new file mode 100644 index 00000000..1d6a88dd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/__init__.pyi @@ -0,0 +1,31 @@ +from .rfc5849 import ( + SIGNATURE_HMAC as SIGNATURE_HMAC, + SIGNATURE_HMAC_SHA1 as SIGNATURE_HMAC_SHA1, + SIGNATURE_HMAC_SHA256 as SIGNATURE_HMAC_SHA256, + SIGNATURE_HMAC_SHA512 as SIGNATURE_HMAC_SHA512, + SIGNATURE_PLAINTEXT as SIGNATURE_PLAINTEXT, + SIGNATURE_RSA as SIGNATURE_RSA, + SIGNATURE_RSA_SHA1 as SIGNATURE_RSA_SHA1, + SIGNATURE_RSA_SHA256 as SIGNATURE_RSA_SHA256, + SIGNATURE_RSA_SHA512 as SIGNATURE_RSA_SHA512, + SIGNATURE_TYPE_AUTH_HEADER as SIGNATURE_TYPE_AUTH_HEADER, + SIGNATURE_TYPE_BODY as SIGNATURE_TYPE_BODY, + SIGNATURE_TYPE_QUERY as SIGNATURE_TYPE_QUERY, + Client as Client, +) +from .rfc5849.endpoints import ( + AccessTokenEndpoint as AccessTokenEndpoint, + AuthorizationEndpoint as AuthorizationEndpoint, + RequestTokenEndpoint as RequestTokenEndpoint, + ResourceEndpoint as ResourceEndpoint, + SignatureOnlyEndpoint as SignatureOnlyEndpoint, + WebApplicationServer as WebApplicationServer, +) +from .rfc5849.errors import ( + InsecureTransportError as InsecureTransportError, + InvalidClientError as InvalidClientError, + InvalidRequestError as InvalidRequestError, + InvalidSignatureMethodError as InvalidSignatureMethodError, + OAuth1Error as OAuth1Error, +) +from .rfc5849.request_validator import RequestValidator as RequestValidator diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/__init__.pyi new file mode 100644 index 00000000..3ee040db --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/__init__.pyi @@ -0,0 +1,64 @@ +from _typeshed import Incomplete +from typing import Any + +log: Any +SIGNATURE_HMAC_SHA1: str +SIGNATURE_HMAC_SHA256: str +SIGNATURE_HMAC_SHA512: str +SIGNATURE_HMAC: str +SIGNATURE_RSA_SHA1: str +SIGNATURE_RSA_SHA256: str +SIGNATURE_RSA_SHA512: str +SIGNATURE_RSA: str +SIGNATURE_PLAINTEXT: str +SIGNATURE_METHODS: Any +SIGNATURE_TYPE_AUTH_HEADER: str +SIGNATURE_TYPE_QUERY: str +SIGNATURE_TYPE_BODY: str +CONTENT_TYPE_FORM_URLENCODED: str + +class Client: + SIGNATURE_METHODS: Any + @classmethod + def register_signature_method(cls, method_name, method_callback) -> None: ... + client_key: Any + client_secret: Any + resource_owner_key: Any + resource_owner_secret: Any + signature_method: Any + signature_type: Any + callback_uri: Any + rsa_key: Any + verifier: Any + realm: Any + encoding: Any + decoding: Any + nonce: Any + timestamp: Any + def __init__( + self, + client_key, + client_secret: Incomplete | None = ..., + resource_owner_key: Incomplete | None = ..., + resource_owner_secret: Incomplete | None = ..., + callback_uri: Incomplete | None = ..., + signature_method=..., + signature_type=..., + rsa_key: Incomplete | None = ..., + verifier: Incomplete | None = ..., + realm: Incomplete | None = ..., + encoding: str = ..., + decoding: Incomplete | None = ..., + nonce: Incomplete | None = ..., + timestamp: Incomplete | None = ..., + ): ... + def get_oauth_signature(self, request): ... + def get_oauth_params(self, request): ... + def sign( + self, + uri, + http_method: str = ..., + body: Incomplete | None = ..., + headers: Incomplete | None = ..., + realm: Incomplete | None = ..., + ): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/__init__.pyi new file mode 100644 index 00000000..d9678f18 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/__init__.pyi @@ -0,0 +1,7 @@ +from .access_token import AccessTokenEndpoint as AccessTokenEndpoint +from .authorization import AuthorizationEndpoint as AuthorizationEndpoint +from .base import BaseEndpoint as BaseEndpoint +from .pre_configured import WebApplicationServer as WebApplicationServer +from .request_token import RequestTokenEndpoint as RequestTokenEndpoint +from .resource import ResourceEndpoint as ResourceEndpoint +from .signature_only import SignatureOnlyEndpoint as SignatureOnlyEndpoint diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/access_token.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/access_token.pyi new file mode 100644 index 00000000..8cd0094c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/access_token.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete +from typing import Any + +from .base import BaseEndpoint as BaseEndpoint + +log: Any + +class AccessTokenEndpoint(BaseEndpoint): + def create_access_token(self, request, credentials): ... + def create_access_token_response( + self, + uri, + http_method: str = ..., + body: Incomplete | None = ..., + headers: Incomplete | None = ..., + credentials: Incomplete | None = ..., + ): ... + def validate_access_token_request(self, request): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/authorization.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/authorization.pyi new file mode 100644 index 00000000..8e4ec020 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/authorization.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete + +from .base import BaseEndpoint as BaseEndpoint + +class AuthorizationEndpoint(BaseEndpoint): + def create_verifier(self, request, credentials): ... + def create_authorization_response( + self, + uri, + http_method: str = ..., + body: Incomplete | None = ..., + headers: Incomplete | None = ..., + realms: Incomplete | None = ..., + credentials: Incomplete | None = ..., + ): ... + def get_realms_and_credentials( + self, uri, http_method: str = ..., body: Incomplete | None = ..., headers: Incomplete | None = ... + ): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/base.pyi new file mode 100644 index 00000000..467b40c9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/base.pyi @@ -0,0 +1,7 @@ +from _typeshed import Incomplete +from typing import Any + +class BaseEndpoint: + request_validator: Any + token_generator: Any + def __init__(self, request_validator, token_generator: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/pre_configured.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/pre_configured.pyi new file mode 100644 index 00000000..fcc6a398 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/pre_configured.pyi @@ -0,0 +1,9 @@ +from . import ( + AccessTokenEndpoint as AccessTokenEndpoint, + AuthorizationEndpoint as AuthorizationEndpoint, + RequestTokenEndpoint as RequestTokenEndpoint, + ResourceEndpoint as ResourceEndpoint, +) + +class WebApplicationServer(RequestTokenEndpoint, AuthorizationEndpoint, AccessTokenEndpoint, ResourceEndpoint): + def __init__(self, request_validator) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/request_token.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/request_token.pyi new file mode 100644 index 00000000..a383e70e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/request_token.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete +from typing import Any + +from .base import BaseEndpoint as BaseEndpoint + +log: Any + +class RequestTokenEndpoint(BaseEndpoint): + def create_request_token(self, request, credentials): ... + def create_request_token_response( + self, + uri, + http_method: str = ..., + body: Incomplete | None = ..., + headers: Incomplete | None = ..., + credentials: Incomplete | None = ..., + ): ... + def validate_request_token_request(self, request): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/resource.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/resource.pyi new file mode 100644 index 00000000..9d7efb83 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/resource.pyi @@ -0,0 +1,16 @@ +from _typeshed import Incomplete +from typing import Any + +from .base import BaseEndpoint as BaseEndpoint + +log: Any + +class ResourceEndpoint(BaseEndpoint): + def validate_protected_resource_request( + self, + uri, + http_method: str = ..., + body: Incomplete | None = ..., + headers: Incomplete | None = ..., + realms: Incomplete | None = ..., + ): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/signature_only.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/signature_only.pyi new file mode 100644 index 00000000..a8b612bc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/endpoints/signature_only.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete +from typing import Any + +from .base import BaseEndpoint as BaseEndpoint + +log: Any + +class SignatureOnlyEndpoint(BaseEndpoint): + def validate_request(self, uri, http_method: str = ..., body: Incomplete | None = ..., headers: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/errors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/errors.pyi new file mode 100644 index 00000000..311dbff7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/errors.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete +from typing import Any + +class OAuth1Error(Exception): + error: Any + description: str + uri: Any + status_code: Any + def __init__( + self, + description: Incomplete | None = ..., + uri: Incomplete | None = ..., + status_code: int = ..., + request: Incomplete | None = ..., + ) -> None: ... + def in_uri(self, uri): ... + @property + def twotuples(self): ... + @property + def urlencoded(self): ... + +class InsecureTransportError(OAuth1Error): + error: str + description: str + +class InvalidSignatureMethodError(OAuth1Error): + error: str + +class InvalidRequestError(OAuth1Error): + error: str + +class InvalidClientError(OAuth1Error): + error: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/parameters.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/parameters.pyi new file mode 100644 index 00000000..82f078da --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/parameters.pyi @@ -0,0 +1,5 @@ +from _typeshed import Incomplete + +def prepare_headers(oauth_params, headers: Incomplete | None = ..., realm: Incomplete | None = ...): ... +def prepare_form_encoded_body(oauth_params, body): ... +def prepare_request_uri_query(oauth_params, uri): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/request_validator.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/request_validator.pyi new file mode 100644 index 00000000..0fba878f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/request_validator.pyi @@ -0,0 +1,61 @@ +from _typeshed import Incomplete + +class RequestValidator: + def __init__(self) -> None: ... + @property + def allowed_signature_methods(self): ... + @property + def safe_characters(self): ... + @property + def client_key_length(self): ... + @property + def request_token_length(self): ... + @property + def access_token_length(self): ... + @property + def timestamp_lifetime(self): ... + @property + def nonce_length(self): ... + @property + def verifier_length(self): ... + @property + def realms(self): ... + @property + def enforce_ssl(self): ... + def check_client_key(self, client_key): ... + def check_request_token(self, request_token): ... + def check_access_token(self, request_token): ... + def check_nonce(self, nonce): ... + def check_verifier(self, verifier): ... + def check_realms(self, realms): ... + @property + def dummy_client(self) -> None: ... + @property + def dummy_request_token(self) -> None: ... + @property + def dummy_access_token(self) -> None: ... + def get_client_secret(self, client_key, request) -> None: ... + def get_request_token_secret(self, client_key, token, request) -> None: ... + def get_access_token_secret(self, client_key, token, request) -> None: ... + def get_default_realms(self, client_key, request) -> None: ... + def get_realms(self, token, request) -> None: ... + def get_redirect_uri(self, token, request) -> None: ... + def get_rsa_key(self, client_key, request) -> None: ... + def invalidate_request_token(self, client_key, request_token, request) -> None: ... + def validate_client_key(self, client_key, request) -> None: ... + def validate_request_token(self, client_key, token, request) -> None: ... + def validate_access_token(self, client_key, token, request) -> None: ... + def validate_timestamp_and_nonce( + self, client_key, timestamp, nonce, request, request_token: Incomplete | None = ..., access_token: Incomplete | None = ... + ) -> None: ... + def validate_redirect_uri(self, client_key, redirect_uri, request) -> None: ... + def validate_requested_realms(self, client_key, realms, request) -> None: ... + def validate_realms( + self, client_key, token, request, uri: Incomplete | None = ..., realms: Incomplete | None = ... + ) -> None: ... + def validate_verifier(self, client_key, token, verifier, request) -> None: ... + def verify_request_token(self, token, request) -> None: ... + def verify_realms(self, token, realms, request) -> None: ... + def save_access_token(self, token, request) -> None: ... + def save_request_token(self, token, request) -> None: ... + def save_verifier(self, token, verifier, request) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/signature.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/signature.pyi new file mode 100644 index 00000000..1b7c8df8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/signature.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete +from typing import Any + +log: Any + +def signature_base_string(http_method: str, base_str_uri: str, normalized_encoded_request_parameters: str) -> str: ... +def base_string_uri(uri: str, host: str | None = ...) -> str: ... +def collect_parameters( + uri_query: str = ..., + body: Incomplete | None = ..., + headers: Incomplete | None = ..., + exclude_oauth_signature: bool = ..., + with_realm: bool = ..., +): ... +def normalize_parameters(params) -> str: ... +def sign_hmac_sha1_with_client(sig_base_str, client): ... +def verify_hmac_sha1(request, client_secret: Incomplete | None = ..., resource_owner_secret: Incomplete | None = ...): ... +def sign_hmac_sha1(base_string, client_secret, resource_owner_secret): ... +def sign_hmac_sha256_with_client(sig_base_str, client): ... +def verify_hmac_sha256(request, client_secret: Incomplete | None = ..., resource_owner_secret: Incomplete | None = ...): ... +def sign_hmac_sha256(base_string, client_secret, resource_owner_secret): ... +def sign_hmac_sha512_with_client(sig_base_str: str, client): ... +def verify_hmac_sha512(request, client_secret: str | None = ..., resource_owner_secret: str | None = ...): ... +def sign_rsa_sha1_with_client(sig_base_str, client): ... +def verify_rsa_sha1(request, rsa_public_key: str): ... +def sign_rsa_sha1(base_string, rsa_private_key): ... +def sign_rsa_sha256_with_client(sig_base_str: str, client): ... +def verify_rsa_sha256(request, rsa_public_key: str): ... +def sign_rsa_sha512_with_client(sig_base_str: str, client): ... +def verify_rsa_sha512(request, rsa_public_key: str): ... +def sign_plaintext_with_client(_signature_base_string, client): ... +def sign_plaintext(client_secret, resource_owner_secret): ... +def verify_plaintext(request, client_secret: Incomplete | None = ..., resource_owner_secret: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/utils.pyi new file mode 100644 index 00000000..cce8ec23 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth1/rfc5849/utils.pyi @@ -0,0 +1,9 @@ +UNICODE_ASCII_CHARACTER_SET: str + +def filter_params(target): ... +def filter_oauth_params(params): ... +def escape(u): ... +def unescape(u): ... +def parse_keqv_list(l): ... +def parse_http_list(u): ... +def parse_authorization_header(authorization_header): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/__init__.pyi new file mode 100644 index 00000000..c56170ab --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/__init__.pyi @@ -0,0 +1,59 @@ +from .rfc6749.clients import ( + BackendApplicationClient as BackendApplicationClient, + Client as Client, + LegacyApplicationClient as LegacyApplicationClient, + MobileApplicationClient as MobileApplicationClient, + ServiceApplicationClient as ServiceApplicationClient, + WebApplicationClient as WebApplicationClient, +) +from .rfc6749.endpoints import ( + AuthorizationEndpoint as AuthorizationEndpoint, + BackendApplicationServer as BackendApplicationServer, + IntrospectEndpoint as IntrospectEndpoint, + LegacyApplicationServer as LegacyApplicationServer, + MetadataEndpoint as MetadataEndpoint, + MobileApplicationServer as MobileApplicationServer, + ResourceEndpoint as ResourceEndpoint, + RevocationEndpoint as RevocationEndpoint, + Server as Server, + TokenEndpoint as TokenEndpoint, + WebApplicationServer as WebApplicationServer, +) +from .rfc6749.errors import ( + AccessDeniedError as AccessDeniedError, + FatalClientError as FatalClientError, + InsecureTransportError as InsecureTransportError, + InvalidClientError as InvalidClientError, + InvalidClientIdError as InvalidClientIdError, + InvalidGrantError as InvalidGrantError, + InvalidRedirectURIError as InvalidRedirectURIError, + InvalidRequestError as InvalidRequestError, + InvalidRequestFatalError as InvalidRequestFatalError, + InvalidScopeError as InvalidScopeError, + MismatchingRedirectURIError as MismatchingRedirectURIError, + MismatchingStateError as MismatchingStateError, + MissingClientIdError as MissingClientIdError, + MissingCodeError as MissingCodeError, + MissingRedirectURIError as MissingRedirectURIError, + MissingResponseTypeError as MissingResponseTypeError, + MissingTokenError as MissingTokenError, + MissingTokenTypeError as MissingTokenTypeError, + OAuth2Error as OAuth2Error, + ServerError as ServerError, + TemporarilyUnavailableError as TemporarilyUnavailableError, + TokenExpiredError as TokenExpiredError, + UnauthorizedClientError as UnauthorizedClientError, + UnsupportedGrantTypeError as UnsupportedGrantTypeError, + UnsupportedResponseTypeError as UnsupportedResponseTypeError, + UnsupportedTokenTypeError as UnsupportedTokenTypeError, +) +from .rfc6749.grant_types import ( + AuthorizationCodeGrant as AuthorizationCodeGrant, + ClientCredentialsGrant as ClientCredentialsGrant, + ImplicitGrant as ImplicitGrant, + RefreshTokenGrant as RefreshTokenGrant, + ResourceOwnerPasswordCredentialsGrant as ResourceOwnerPasswordCredentialsGrant, +) +from .rfc6749.request_validator import RequestValidator as RequestValidator +from .rfc6749.tokens import BearerToken as BearerToken, OAuth2Token as OAuth2Token +from .rfc6749.utils import is_secure_transport as is_secure_transport diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/__init__.pyi new file mode 100644 index 00000000..efc812cc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/__init__.pyi @@ -0,0 +1,11 @@ +from typing import Any + +from .endpoints.base import BaseEndpoint as BaseEndpoint, catch_errors_and_unavailability as catch_errors_and_unavailability +from .errors import ( + FatalClientError as FatalClientError, + OAuth2Error as OAuth2Error, + ServerError as ServerError, + TemporarilyUnavailableError as TemporarilyUnavailableError, +) + +log: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/__init__.pyi new file mode 100644 index 00000000..a3b9711c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/__init__.pyi @@ -0,0 +1,6 @@ +from .backend_application import BackendApplicationClient as BackendApplicationClient +from .base import AUTH_HEADER as AUTH_HEADER, BODY as BODY, URI_QUERY as URI_QUERY, Client as Client +from .legacy_application import LegacyApplicationClient as LegacyApplicationClient +from .mobile_application import MobileApplicationClient as MobileApplicationClient +from .service_application import ServiceApplicationClient as ServiceApplicationClient +from .web_application import WebApplicationClient as WebApplicationClient diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/backend_application.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/backend_application.pyi new file mode 100644 index 00000000..c94f999a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/backend_application.pyi @@ -0,0 +1,7 @@ +from _typeshed import Incomplete + +from .base import Client as Client + +class BackendApplicationClient(Client): + grant_type: str + def prepare_request_body(self, body: str = ..., scope: Incomplete | None = ..., include_client_id: bool = ..., **kwargs): ... # type: ignore[override] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/base.pyi new file mode 100644 index 00000000..ff329d44 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/base.pyi @@ -0,0 +1,91 @@ +from _typeshed import Incomplete +from typing import Any + +AUTH_HEADER: str +URI_QUERY: str +BODY: str +FORM_ENC_HEADERS: Any + +class Client: + refresh_token_key: str + client_id: Any + default_token_placement: Any + token_type: Any + access_token: Any + refresh_token: Any + mac_key: Any + mac_algorithm: Any + token: Any + scope: Any + state_generator: Any + state: Any + redirect_url: Any + code: Any + expires_in: Any + code_verifier: str + code_challenge: str + code_challenge_method: str + def __init__( + self, + client_id, + default_token_placement=..., + token_type: str = ..., + access_token: Incomplete | None = ..., + refresh_token: Incomplete | None = ..., + mac_key: Incomplete | None = ..., + mac_algorithm: Incomplete | None = ..., + token: Incomplete | None = ..., + scope: Incomplete | None = ..., + state: Incomplete | None = ..., + redirect_url: Incomplete | None = ..., + state_generator=..., + code_verifier: str | None = ..., + code_challenge: str | None = ..., + code_challenge_method: str | None = ..., + **kwargs, + ) -> None: ... + @property + def token_types(self): ... + def prepare_request_uri(self, *args, **kwargs) -> None: ... + def prepare_request_body(self, *args, **kwargs) -> None: ... + def parse_request_uri_response(self, *args, **kwargs) -> None: ... + def add_token( + self, + uri, + http_method: str = ..., + body: Incomplete | None = ..., + headers: Incomplete | None = ..., + token_placement: Incomplete | None = ..., + **kwargs, + ): ... + def prepare_authorization_request( + self, + authorization_url, + state: Incomplete | None = ..., + redirect_url: Incomplete | None = ..., + scope: Incomplete | None = ..., + **kwargs, + ): ... + def prepare_token_request( + self, + token_url, + authorization_response: Incomplete | None = ..., + redirect_url: Incomplete | None = ..., + state: Incomplete | None = ..., + body: str = ..., + **kwargs, + ): ... + def prepare_refresh_token_request( + self, token_url, refresh_token: Incomplete | None = ..., body: str = ..., scope: Incomplete | None = ..., **kwargs + ): ... + def prepare_token_revocation_request( + self, revocation_url, token, token_type_hint: str = ..., body: str = ..., callback: Incomplete | None = ..., **kwargs + ): ... + def parse_request_body_response(self, body, scope: Incomplete | None = ..., **kwargs): ... + def prepare_refresh_body( + self, body: str = ..., refresh_token: Incomplete | None = ..., scope: Incomplete | None = ..., **kwargs + ): ... + def create_code_verifier(self, length: int) -> str: ... + def create_code_challenge(self, code_verifier: str, code_challenge_method: str | None = ...) -> str: ... + def populate_code_attributes(self, response) -> None: ... + def populate_token_attributes(self, response) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/legacy_application.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/legacy_application.pyi new file mode 100644 index 00000000..48fa83f8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/legacy_application.pyi @@ -0,0 +1,10 @@ +from _typeshed import Incomplete + +from .base import Client as Client + +class LegacyApplicationClient(Client): + grant_type: str + def __init__(self, client_id, **kwargs) -> None: ... + def prepare_request_body( # type: ignore[override] + self, username, password, body: str = ..., scope: Incomplete | None = ..., include_client_id: bool = ..., **kwargs + ): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/mobile_application.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/mobile_application.pyi new file mode 100644 index 00000000..7b1223aa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/mobile_application.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete +from typing import Any + +from .base import Client as Client + +class MobileApplicationClient(Client): + response_type: str + def prepare_request_uri( # type: ignore[override] + self, uri, redirect_uri: Incomplete | None = ..., scope: Incomplete | None = ..., state: Incomplete | None = ..., **kwargs + ): ... + token: Any + def parse_request_uri_response(self, uri, state: Incomplete | None = ..., scope: Incomplete | None = ...): ... # type: ignore[override] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/service_application.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/service_application.pyi new file mode 100644 index 00000000..fdbfee5a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/service_application.pyi @@ -0,0 +1,34 @@ +from _typeshed import Incomplete +from typing import Any + +from .base import Client as Client + +class ServiceApplicationClient(Client): + grant_type: str + private_key: Any + subject: Any + issuer: Any + audience: Any + def __init__( + self, + client_id, + private_key: Incomplete | None = ..., + subject: Incomplete | None = ..., + issuer: Incomplete | None = ..., + audience: Incomplete | None = ..., + **kwargs, + ) -> None: ... + def prepare_request_body( # type: ignore[override] + self, + private_key: Incomplete | None = ..., + subject: Incomplete | None = ..., + issuer: Incomplete | None = ..., + audience: Incomplete | None = ..., + expires_at: Incomplete | None = ..., + issued_at: Incomplete | None = ..., + extra_claims: Incomplete | None = ..., + body: str = ..., + scope: Incomplete | None = ..., + include_client_id: bool = ..., + **kwargs, + ): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/web_application.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/web_application.pyi new file mode 100644 index 00000000..0198e28a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/web_application.pyi @@ -0,0 +1,29 @@ +from _typeshed import Incomplete +from typing import Any + +from .base import Client as Client + +class WebApplicationClient(Client): + grant_type: str + code: Any + def __init__(self, client_id, code: Incomplete | None = ..., **kwargs) -> None: ... + def prepare_request_uri( # type: ignore[override] + self, + uri, + redirect_uri: Incomplete | None = ..., + scope: Incomplete | None = ..., + state: Incomplete | None = ..., + code_challenge: str | None = ..., + code_challenge_method: str | None = ..., + **kwargs, + ): ... + def prepare_request_body( # type: ignore[override] + self, + code: Incomplete | None = ..., + redirect_uri: Incomplete | None = ..., + body: str = ..., + include_client_id: bool = ..., + code_verifier: str | None = ..., + **kwargs, + ): ... + def parse_request_uri_response(self, uri, state: Incomplete | None = ...): ... # type: ignore[override] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/__init__.pyi new file mode 100644 index 00000000..d0582823 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/__init__.pyi @@ -0,0 +1,13 @@ +from .authorization import AuthorizationEndpoint as AuthorizationEndpoint +from .introspect import IntrospectEndpoint as IntrospectEndpoint +from .metadata import MetadataEndpoint as MetadataEndpoint +from .pre_configured import ( + BackendApplicationServer as BackendApplicationServer, + LegacyApplicationServer as LegacyApplicationServer, + MobileApplicationServer as MobileApplicationServer, + Server as Server, + WebApplicationServer as WebApplicationServer, +) +from .resource import ResourceEndpoint as ResourceEndpoint +from .revocation import RevocationEndpoint as RevocationEndpoint +from .token import TokenEndpoint as TokenEndpoint diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/authorization.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/authorization.pyi new file mode 100644 index 00000000..35d45519 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/authorization.pyi @@ -0,0 +1,29 @@ +from _typeshed import Incomplete +from typing import Any + +from .base import BaseEndpoint as BaseEndpoint + +log: Any + +class AuthorizationEndpoint(BaseEndpoint): + def __init__(self, default_response_type, default_token_type, response_types) -> None: ... + @property + def response_types(self): ... + @property + def default_response_type(self): ... + @property + def default_response_type_handler(self): ... + @property + def default_token_type(self): ... + def create_authorization_response( + self, + uri, + http_method: str = ..., + body: Incomplete | None = ..., + headers: Incomplete | None = ..., + scopes: Incomplete | None = ..., + credentials: Incomplete | None = ..., + ): ... + def validate_authorization_request( + self, uri, http_method: str = ..., body: Incomplete | None = ..., headers: Incomplete | None = ... + ): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/base.pyi new file mode 100644 index 00000000..a7d18663 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/base.pyi @@ -0,0 +1,20 @@ +from typing import Any + +log: Any + +class BaseEndpoint: + def __init__(self) -> None: ... + @property + def valid_request_methods(self): ... + @valid_request_methods.setter + def valid_request_methods(self, valid_request_methods) -> None: ... + @property + def available(self): ... + @available.setter + def available(self, available) -> None: ... + @property + def catch_errors(self): ... + @catch_errors.setter + def catch_errors(self, catch_errors) -> None: ... + +def catch_errors_and_unavailability(f): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/introspect.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/introspect.pyi new file mode 100644 index 00000000..40a148ab --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/introspect.pyi @@ -0,0 +1,17 @@ +from _typeshed import Incomplete +from typing import Any + +from .base import BaseEndpoint as BaseEndpoint + +log: Any + +class IntrospectEndpoint(BaseEndpoint): + valid_token_types: Any + valid_request_methods: Any + request_validator: Any + supported_token_types: Any + def __init__(self, request_validator, supported_token_types: Incomplete | None = ...) -> None: ... + def create_introspect_response( + self, uri, http_method: str = ..., body: Incomplete | None = ..., headers: Incomplete | None = ... + ): ... + def validate_introspect_request(self, request) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/metadata.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/metadata.pyi new file mode 100644 index 00000000..0c3176e7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/metadata.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete +from typing import Any + +from .base import BaseEndpoint as BaseEndpoint + +log: Any + +class MetadataEndpoint(BaseEndpoint): + raise_errors: Any + endpoints: Any + initial_claims: Any + claims: Any + def __init__(self, endpoints, claims=..., raise_errors: bool = ...) -> None: ... + def create_metadata_response( + self, uri, http_method: str = ..., body: Incomplete | None = ..., headers: Incomplete | None = ... + ): ... + def validate_metadata( + self, array, key, is_required: bool = ..., is_list: bool = ..., is_url: bool = ..., is_issuer: bool = ... + ) -> None: ... + def validate_metadata_token(self, claims, endpoint) -> None: ... + def validate_metadata_authorization(self, claims, endpoint): ... + def validate_metadata_revocation(self, claims, endpoint) -> None: ... + def validate_metadata_introspection(self, claims, endpoint) -> None: ... + def validate_metadata_server(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/pre_configured.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/pre_configured.pyi new file mode 100644 index 00000000..d33ea10a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/pre_configured.pyi @@ -0,0 +1,75 @@ +from _typeshed import Incomplete +from typing import Any + +from .authorization import AuthorizationEndpoint as AuthorizationEndpoint +from .introspect import IntrospectEndpoint as IntrospectEndpoint +from .resource import ResourceEndpoint as ResourceEndpoint +from .revocation import RevocationEndpoint as RevocationEndpoint +from .token import TokenEndpoint as TokenEndpoint + +class Server(AuthorizationEndpoint, IntrospectEndpoint, TokenEndpoint, ResourceEndpoint, RevocationEndpoint): + auth_grant: Any + implicit_grant: Any + password_grant: Any + credentials_grant: Any + refresh_grant: Any + bearer: Any + def __init__( + self, + request_validator, + token_expires_in: Incomplete | None = ..., + token_generator: Incomplete | None = ..., + refresh_token_generator: Incomplete | None = ..., + *args, + **kwargs, + ) -> None: ... + +class WebApplicationServer(AuthorizationEndpoint, IntrospectEndpoint, TokenEndpoint, ResourceEndpoint, RevocationEndpoint): + auth_grant: Any + refresh_grant: Any + bearer: Any + def __init__( + self, + request_validator, + token_generator: Incomplete | None = ..., + token_expires_in: Incomplete | None = ..., + refresh_token_generator: Incomplete | None = ..., + **kwargs, + ) -> None: ... + +class MobileApplicationServer(AuthorizationEndpoint, IntrospectEndpoint, ResourceEndpoint, RevocationEndpoint): + implicit_grant: Any + bearer: Any + def __init__( + self, + request_validator, + token_generator: Incomplete | None = ..., + token_expires_in: Incomplete | None = ..., + refresh_token_generator: Incomplete | None = ..., + **kwargs, + ) -> None: ... + +class LegacyApplicationServer(TokenEndpoint, IntrospectEndpoint, ResourceEndpoint, RevocationEndpoint): + password_grant: Any + refresh_grant: Any + bearer: Any + def __init__( + self, + request_validator, + token_generator: Incomplete | None = ..., + token_expires_in: Incomplete | None = ..., + refresh_token_generator: Incomplete | None = ..., + **kwargs, + ) -> None: ... + +class BackendApplicationServer(TokenEndpoint, IntrospectEndpoint, ResourceEndpoint, RevocationEndpoint): + credentials_grant: Any + bearer: Any + def __init__( + self, + request_validator, + token_generator: Incomplete | None = ..., + token_expires_in: Incomplete | None = ..., + refresh_token_generator: Incomplete | None = ..., + **kwargs, + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/resource.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/resource.pyi new file mode 100644 index 00000000..19e06ae7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/resource.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete +from typing import Any + +from .base import BaseEndpoint as BaseEndpoint + +log: Any + +class ResourceEndpoint(BaseEndpoint): + def __init__(self, default_token, token_types) -> None: ... + @property + def default_token(self): ... + @property + def default_token_type_handler(self): ... + @property + def tokens(self): ... + def verify_request( + self, + uri, + http_method: str = ..., + body: Incomplete | None = ..., + headers: Incomplete | None = ..., + scopes: Incomplete | None = ..., + ): ... + def find_token_type(self, request): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/revocation.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/revocation.pyi new file mode 100644 index 00000000..d9963434 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/revocation.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete +from typing import Any + +from .base import BaseEndpoint as BaseEndpoint + +log: Any + +class RevocationEndpoint(BaseEndpoint): + valid_token_types: Any + valid_request_methods: Any + request_validator: Any + supported_token_types: Any + enable_jsonp: Any + def __init__(self, request_validator, supported_token_types: Incomplete | None = ..., enable_jsonp: bool = ...) -> None: ... + def create_revocation_response( + self, uri, http_method: str = ..., body: Incomplete | None = ..., headers: Incomplete | None = ... + ): ... + def validate_revocation_request(self, request) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/token.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/token.pyi new file mode 100644 index 00000000..f1a474e9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/endpoints/token.pyi @@ -0,0 +1,29 @@ +from _typeshed import Incomplete +from typing import Any + +from .base import BaseEndpoint as BaseEndpoint + +log: Any + +class TokenEndpoint(BaseEndpoint): + valid_request_methods: Any + def __init__(self, default_grant_type, default_token_type, grant_types) -> None: ... + @property + def grant_types(self): ... + @property + def default_grant_type(self): ... + @property + def default_grant_type_handler(self): ... + @property + def default_token_type(self): ... + def create_token_response( + self, + uri, + http_method: str = ..., + body: Incomplete | None = ..., + headers: Incomplete | None = ..., + credentials: Incomplete | None = ..., + grant_type_for_scope: Incomplete | None = ..., + claims: Incomplete | None = ..., + ): ... + def validate_token_request(self, request) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/errors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/errors.pyi new file mode 100644 index 00000000..1ca35366 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/errors.pyi @@ -0,0 +1,141 @@ +from _typeshed import Incomplete +from typing import Any + +class OAuth2Error(Exception): + error: Any + status_code: int + description: str + uri: Any + state: Any + redirect_uri: Any + client_id: Any + scopes: Any + response_type: Any + response_mode: Any + grant_type: Any + def __init__( + self, + description: Incomplete | None = ..., + uri: Incomplete | None = ..., + state: Incomplete | None = ..., + status_code: Incomplete | None = ..., + request: Incomplete | None = ..., + ) -> None: ... + def in_uri(self, uri): ... + @property + def twotuples(self): ... + @property + def urlencoded(self): ... + @property + def json(self): ... + @property + def headers(self): ... + +class TokenExpiredError(OAuth2Error): + error: str + +class InsecureTransportError(OAuth2Error): + error: str + description: str + +class MismatchingStateError(OAuth2Error): + error: str + description: str + +class MissingCodeError(OAuth2Error): + error: str + +class MissingTokenError(OAuth2Error): + error: str + +class MissingTokenTypeError(OAuth2Error): + error: str + +class FatalClientError(OAuth2Error): ... + +class InvalidRequestFatalError(FatalClientError): + error: str + +class InvalidRedirectURIError(InvalidRequestFatalError): + description: str + +class MissingRedirectURIError(InvalidRequestFatalError): + description: str + +class MismatchingRedirectURIError(InvalidRequestFatalError): + description: str + +class InvalidClientIdError(InvalidRequestFatalError): + description: str + +class MissingClientIdError(InvalidRequestFatalError): + description: str + +class InvalidRequestError(OAuth2Error): + error: str + +class MissingResponseTypeError(InvalidRequestError): + description: str + +class MissingCodeChallengeError(InvalidRequestError): + description: str + +class MissingCodeVerifierError(InvalidRequestError): + description: str + +class AccessDeniedError(OAuth2Error): + error: str + +class UnsupportedResponseTypeError(OAuth2Error): + error: str + +class UnsupportedCodeChallengeMethodError(InvalidRequestError): + description: str + +class InvalidScopeError(OAuth2Error): + error: str + +class ServerError(OAuth2Error): + error: str + +class TemporarilyUnavailableError(OAuth2Error): + error: str + +class InvalidClientError(FatalClientError): + error: str + status_code: int + +class InvalidGrantError(OAuth2Error): + error: str + status_code: int + +class UnauthorizedClientError(OAuth2Error): + error: str + +class UnsupportedGrantTypeError(OAuth2Error): + error: str + +class UnsupportedTokenTypeError(OAuth2Error): + error: str + +class InvalidTokenError(OAuth2Error): + error: str + status_code: int + description: str + +class InsufficientScopeError(OAuth2Error): + error: str + status_code: int + description: str + +class ConsentRequired(OAuth2Error): + error: str + +class LoginRequired(OAuth2Error): + error: str + +class CustomOAuth2Error(OAuth2Error): + error: Any + def __init__(self, error, *args, **kwargs) -> None: ... + +def raise_from_error(error, params: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/__init__.pyi new file mode 100644 index 00000000..d18b0495 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/__init__.pyi @@ -0,0 +1,5 @@ +from .authorization_code import AuthorizationCodeGrant as AuthorizationCodeGrant +from .client_credentials import ClientCredentialsGrant as ClientCredentialsGrant +from .implicit import ImplicitGrant as ImplicitGrant +from .refresh_token import RefreshTokenGrant as RefreshTokenGrant +from .resource_owner_password_credentials import ResourceOwnerPasswordCredentialsGrant as ResourceOwnerPasswordCredentialsGrant diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/authorization_code.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/authorization_code.pyi new file mode 100644 index 00000000..28cd3018 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/authorization_code.pyi @@ -0,0 +1,18 @@ +from typing import Any + +from .base import GrantTypeBase as GrantTypeBase + +log: Any + +def code_challenge_method_s256(verifier, challenge): ... +def code_challenge_method_plain(verifier, challenge): ... + +class AuthorizationCodeGrant(GrantTypeBase): + default_response_mode: str + response_types: Any + def create_authorization_code(self, request): ... + def create_authorization_response(self, request, token_handler): ... + def create_token_response(self, request, token_handler): ... + def validate_authorization_request(self, request): ... + def validate_token_request(self, request) -> None: ... + def validate_code_challenge(self, challenge, challenge_method, verifier): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/base.pyi new file mode 100644 index 00000000..f4412521 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/base.pyi @@ -0,0 +1,32 @@ +from _typeshed import Incomplete +from typing import Any + +log: Any + +class ValidatorsContainer: + pre_auth: Any + post_auth: Any + pre_token: Any + post_token: Any + def __init__(self, post_auth, post_token, pre_auth, pre_token) -> None: ... + @property + def all_pre(self): ... + @property + def all_post(self): ... + +class GrantTypeBase: + error_uri: Any + request_validator: Any + default_response_mode: str + refresh_token: bool + response_types: Any + def __init__(self, request_validator: Incomplete | None = ..., **kwargs) -> None: ... + def register_response_type(self, response_type) -> None: ... + def register_code_modifier(self, modifier) -> None: ... + def register_token_modifier(self, modifier) -> None: ... + def create_authorization_response(self, request, token_handler) -> None: ... + def create_token_response(self, request, token_handler) -> None: ... + def add_token(self, token, token_handler, request): ... + def validate_grant_type(self, request) -> None: ... + def validate_scopes(self, request) -> None: ... + def prepare_authorization_response(self, request, token, headers, body, status): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/client_credentials.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/client_credentials.pyi new file mode 100644 index 00000000..0d33d829 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/client_credentials.pyi @@ -0,0 +1,9 @@ +from typing import Any + +from .base import GrantTypeBase as GrantTypeBase + +log: Any + +class ClientCredentialsGrant(GrantTypeBase): + def create_token_response(self, request, token_handler): ... + def validate_token_request(self, request) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/implicit.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/implicit.pyi new file mode 100644 index 00000000..613ac0a0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/implicit.pyi @@ -0,0 +1,13 @@ +from typing import Any + +from .base import GrantTypeBase as GrantTypeBase + +log: Any + +class ImplicitGrant(GrantTypeBase): + response_types: Any + grant_allows_refresh_token: bool + def create_authorization_response(self, request, token_handler): ... + def create_token_response(self, request, token_handler): ... + def validate_authorization_request(self, request): ... + def validate_token_request(self, request): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/refresh_token.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/refresh_token.pyi new file mode 100644 index 00000000..c1d6c179 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/refresh_token.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete +from typing import Any + +from .base import GrantTypeBase as GrantTypeBase + +log: Any + +class RefreshTokenGrant(GrantTypeBase): + def __init__(self, request_validator: Incomplete | None = ..., issue_new_refresh_tokens: bool = ..., **kwargs) -> None: ... + def create_token_response(self, request, token_handler): ... + def validate_token_request(self, request) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/resource_owner_password_credentials.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/resource_owner_password_credentials.pyi new file mode 100644 index 00000000..347fa456 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/grant_types/resource_owner_password_credentials.pyi @@ -0,0 +1,9 @@ +from typing import Any + +from .base import GrantTypeBase as GrantTypeBase + +log: Any + +class ResourceOwnerPasswordCredentialsGrant(GrantTypeBase): + def create_token_response(self, request, token_handler): ... + def validate_token_request(self, request) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/parameters.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/parameters.pyi new file mode 100644 index 00000000..2e9fe127 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/parameters.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete + +def prepare_grant_uri( + uri, + client_id, + response_type, + redirect_uri: Incomplete | None = ..., + scope: Incomplete | None = ..., + state: Incomplete | None = ..., + code_challenge: str | None = ..., + code_challenge_method: str | None = ..., + **kwargs, +): ... +def prepare_token_request( + grant_type, body: str = ..., include_client_id: bool = ..., code_verifier: str | None = ..., **kwargs +): ... +def prepare_token_revocation_request( + url, token, token_type_hint: str = ..., callback: Incomplete | None = ..., body: str = ..., **kwargs +): ... +def parse_authorization_code_response(uri, state: Incomplete | None = ...): ... +def parse_implicit_response(uri, state: Incomplete | None = ..., scope: Incomplete | None = ...): ... +def parse_token_response(body, scope: Incomplete | None = ...): ... +def validate_token_parameters(params) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/request_validator.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/request_validator.pyi new file mode 100644 index 00000000..447fd075 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/request_validator.pyi @@ -0,0 +1,32 @@ +from typing import Any + +log: Any + +class RequestValidator: + def client_authentication_required(self, request, *args, **kwargs): ... + def authenticate_client(self, request, *args, **kwargs) -> None: ... + def authenticate_client_id(self, client_id, request, *args, **kwargs) -> None: ... + def confirm_redirect_uri(self, client_id, code, redirect_uri, client, request, *args, **kwargs) -> None: ... + def get_default_redirect_uri(self, client_id, request, *args, **kwargs) -> None: ... + def get_default_scopes(self, client_id, request, *args, **kwargs) -> None: ... + def get_original_scopes(self, refresh_token, request, *args, **kwargs) -> None: ... + def is_within_original_scope(self, request_scopes, refresh_token, request, *args, **kwargs): ... + def introspect_token(self, token, token_type_hint, request, *args, **kwargs) -> None: ... + def invalidate_authorization_code(self, client_id, code, request, *args, **kwargs) -> None: ... + def revoke_token(self, token, token_type_hint, request, *args, **kwargs) -> None: ... + def rotate_refresh_token(self, request): ... + def save_authorization_code(self, client_id, code, request, *args, **kwargs) -> None: ... + def save_token(self, token, request, *args, **kwargs): ... + def save_bearer_token(self, token, request, *args, **kwargs) -> None: ... + def validate_bearer_token(self, token, scopes, request) -> None: ... + def validate_client_id(self, client_id, request, *args, **kwargs) -> None: ... + def validate_code(self, client_id, code, client, request, *args, **kwargs) -> None: ... + def validate_grant_type(self, client_id, grant_type, client, request, *args, **kwargs) -> None: ... + def validate_redirect_uri(self, client_id, redirect_uri, request, *args, **kwargs) -> None: ... + def validate_refresh_token(self, refresh_token, client, request, *args, **kwargs) -> None: ... + def validate_response_type(self, client_id, response_type, client, request, *args, **kwargs) -> None: ... + def validate_scopes(self, client_id, scopes, client, request, *args, **kwargs) -> None: ... + def validate_user(self, username, password, client, request, *args, **kwargs) -> None: ... + def is_pkce_required(self, client_id, request): ... + def get_code_challenge(self, code, request) -> None: ... + def get_code_challenge_method(self, code, request) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/tokens.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/tokens.pyi new file mode 100644 index 00000000..4fded89a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/tokens.pyi @@ -0,0 +1,60 @@ +from _typeshed import Incomplete +from typing import Any + +class OAuth2Token(dict[Any, Any]): + def __init__(self, params, old_scope: Incomplete | None = ...) -> None: ... + @property + def scope_changed(self): ... + @property + def old_scope(self): ... + @property + def old_scopes(self): ... + @property + def scope(self): ... + @property + def scopes(self): ... + @property + def missing_scopes(self): ... + @property + def additional_scopes(self): ... + +def prepare_mac_header( + token, + uri, + key, + http_method, + nonce: Incomplete | None = ..., + headers: Incomplete | None = ..., + body: Incomplete | None = ..., + ext: str = ..., + hash_algorithm: str = ..., + issue_time: Incomplete | None = ..., + draft: int = ..., +): ... +def prepare_bearer_uri(token, uri): ... +def prepare_bearer_headers(token, headers: Incomplete | None = ...): ... +def prepare_bearer_body(token, body: str = ...): ... +def random_token_generator(request, refresh_token: bool = ...): ... +def signed_token_generator(private_pem, **kwargs): ... +def get_token_from_header(request): ... + +class TokenBase: + def __call__(self, request, refresh_token: bool = ...) -> None: ... + def validate_request(self, request) -> None: ... + def estimate_type(self, request) -> None: ... + +class BearerToken(TokenBase): + request_validator: Any + token_generator: Any + refresh_token_generator: Any + expires_in: Any + def __init__( + self, + request_validator: Incomplete | None = ..., + token_generator: Incomplete | None = ..., + expires_in: Incomplete | None = ..., + refresh_token_generator: Incomplete | None = ..., + ) -> None: ... + def create_token(self, request, refresh_token: bool = ..., **kwargs): ... + def validate_request(self, request): ... + def estimate_type(self, request): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/utils.pyi new file mode 100644 index 00000000..dc660b79 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/oauth2/rfc6749/utils.pyi @@ -0,0 +1,7 @@ +def list_to_scope(scope): ... +def scope_to_list(scope): ... +def params_from_uri(uri): ... +def host_from_uri(uri): ... +def escape(u): ... +def generate_age(issue_time): ... +def is_secure_transport(uri): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/__init__.pyi new file mode 100644 index 00000000..e3f1f18f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/__init__.pyi @@ -0,0 +1,2 @@ +from .connect.core.endpoints import Server as Server, UserInfoEndpoint as UserInfoEndpoint +from .connect.core.request_validator import RequestValidator as RequestValidator diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/__init__.pyi new file mode 100644 index 00000000..2886b423 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/__init__.pyi @@ -0,0 +1,2 @@ +from .pre_configured import Server as Server +from .userinfo import UserInfoEndpoint as UserInfoEndpoint diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/pre_configured.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/pre_configured.pyi new file mode 100644 index 00000000..c01840db --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/pre_configured.pyi @@ -0,0 +1,36 @@ +from _typeshed import Incomplete +from typing import Any + +from oauthlib.oauth2.rfc6749.endpoints import ( + AuthorizationEndpoint as AuthorizationEndpoint, + IntrospectEndpoint as IntrospectEndpoint, + ResourceEndpoint as ResourceEndpoint, + RevocationEndpoint as RevocationEndpoint, + TokenEndpoint as TokenEndpoint, +) + +from .userinfo import UserInfoEndpoint as UserInfoEndpoint + +class Server(AuthorizationEndpoint, IntrospectEndpoint, TokenEndpoint, ResourceEndpoint, RevocationEndpoint, UserInfoEndpoint): + auth_grant: Any + implicit_grant: Any + password_grant: Any + credentials_grant: Any + refresh_grant: Any + openid_connect_auth: Any + openid_connect_implicit: Any + openid_connect_hybrid: Any + bearer: Any + jwt: Any + auth_grant_choice: Any + implicit_grant_choice: Any + token_grant_choice: Any + def __init__( + self, + request_validator, + token_expires_in: Incomplete | None = ..., + token_generator: Incomplete | None = ..., + refresh_token_generator: Incomplete | None = ..., + *args, + **kwargs, + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/userinfo.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/userinfo.pyi new file mode 100644 index 00000000..90bda604 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/endpoints/userinfo.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete +from typing import Any + +from oauthlib.oauth2.rfc6749.endpoints.base import BaseEndpoint as BaseEndpoint + +log: Any + +class UserInfoEndpoint(BaseEndpoint): + bearer: Any + request_validator: Any + def __init__(self, request_validator) -> None: ... + def create_userinfo_response( + self, uri, http_method: str = ..., body: Incomplete | None = ..., headers: Incomplete | None = ... + ): ... + def validate_userinfo_request(self, request) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/exceptions.pyi new file mode 100644 index 00000000..7976462b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/exceptions.pyi @@ -0,0 +1,53 @@ +from _typeshed import Incomplete + +from oauthlib.oauth2.rfc6749.errors import FatalClientError as FatalClientError, OAuth2Error as OAuth2Error + +class FatalOpenIDClientError(FatalClientError): ... +class OpenIDClientError(OAuth2Error): ... + +class InteractionRequired(OpenIDClientError): + error: str + status_code: int + +class LoginRequired(OpenIDClientError): + error: str + status_code: int + +class AccountSelectionRequired(OpenIDClientError): + error: str + +class ConsentRequired(OpenIDClientError): + error: str + status_code: int + +class InvalidRequestURI(OpenIDClientError): + error: str + description: str + +class InvalidRequestObject(OpenIDClientError): + error: str + description: str + +class RequestNotSupported(OpenIDClientError): + error: str + description: str + +class RequestURINotSupported(OpenIDClientError): + error: str + description: str + +class RegistrationNotSupported(OpenIDClientError): + error: str + description: str + +class InvalidTokenError(OAuth2Error): + error: str + status_code: int + description: str + +class InsufficientScopeError(OAuth2Error): + error: str + status_code: int + description: str + +def raise_from_error(error, params: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/__init__.pyi new file mode 100644 index 00000000..1d5a9ac6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/__init__.pyi @@ -0,0 +1,9 @@ +from .authorization_code import AuthorizationCodeGrant as AuthorizationCodeGrant +from .base import GrantTypeBase as GrantTypeBase +from .dispatchers import ( + AuthorizationCodeGrantDispatcher as AuthorizationCodeGrantDispatcher, + AuthorizationTokenGrantDispatcher as AuthorizationTokenGrantDispatcher, + ImplicitTokenGrantDispatcher as ImplicitTokenGrantDispatcher, +) +from .hybrid import HybridGrant as HybridGrant +from .implicit import ImplicitGrant as ImplicitGrant diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/authorization_code.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/authorization_code.pyi new file mode 100644 index 00000000..d9f4eb63 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/authorization_code.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete +from typing import Any + +from .base import GrantTypeBase as GrantTypeBase + +log: Any + +class AuthorizationCodeGrant(GrantTypeBase): + proxy_target: Any + def __init__(self, request_validator: Incomplete | None = ..., **kwargs) -> None: ... + def add_id_token(self, token, token_handler, request): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/base.pyi new file mode 100644 index 00000000..8cd66324 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/base.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete +from typing import Any + +log: Any + +class GrantTypeBase: + def __getattr__(self, attr: str): ... + def __setattr__(self, attr: str, value) -> None: ... + def validate_authorization_request(self, request): ... + def id_token_hash(self, value, hashfunc=...): ... + def add_id_token(self, token, token_handler, request, nonce: Incomplete | None = ...): ... + def openid_authorization_validator(self, request): ... + +OpenIDConnectBase = GrantTypeBase diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/dispatchers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/dispatchers.pyi new file mode 100644 index 00000000..aa541de7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/dispatchers.pyi @@ -0,0 +1,31 @@ +from _typeshed import Incomplete +from typing import Any + +log: Any + +class Dispatcher: + default_grant: Any + oidc_grant: Any + +class AuthorizationCodeGrantDispatcher(Dispatcher): + default_grant: Any + oidc_grant: Any + def __init__(self, default_grant: Incomplete | None = ..., oidc_grant: Incomplete | None = ...) -> None: ... + def create_authorization_response(self, request, token_handler): ... + def validate_authorization_request(self, request): ... + +class ImplicitTokenGrantDispatcher(Dispatcher): + default_grant: Any + oidc_grant: Any + def __init__(self, default_grant: Incomplete | None = ..., oidc_grant: Incomplete | None = ...) -> None: ... + def create_authorization_response(self, request, token_handler): ... + def validate_authorization_request(self, request): ... + +class AuthorizationTokenGrantDispatcher(Dispatcher): + default_grant: Any + oidc_grant: Any + request_validator: Any + def __init__( + self, request_validator, default_grant: Incomplete | None = ..., oidc_grant: Incomplete | None = ... + ) -> None: ... + def create_token_response(self, request, token_handler): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/hybrid.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/hybrid.pyi new file mode 100644 index 00000000..555f185c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/hybrid.pyi @@ -0,0 +1,16 @@ +from _typeshed import Incomplete +from typing import Any + +from oauthlib.oauth2.rfc6749.errors import InvalidRequestError as InvalidRequestError + +from ..request_validator import RequestValidator as RequestValidator +from .base import GrantTypeBase as GrantTypeBase + +log: Any + +class HybridGrant(GrantTypeBase): + request_validator: Any + proxy_target: Any + def __init__(self, request_validator: Incomplete | None = ..., **kwargs) -> None: ... + def add_id_token(self, token, token_handler, request): ... + def openid_authorization_validator(self, request): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/implicit.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/implicit.pyi new file mode 100644 index 00000000..b99659c1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/grant_types/implicit.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete +from typing import Any + +from .base import GrantTypeBase as GrantTypeBase + +log: Any + +class ImplicitGrant(GrantTypeBase): + proxy_target: Any + def __init__(self, request_validator: Incomplete | None = ..., **kwargs) -> None: ... + def add_id_token(self, token, token_handler, request): ... + def openid_authorization_validator(self, request): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/request_validator.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/request_validator.pyi new file mode 100644 index 00000000..6790cf45 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/request_validator.pyi @@ -0,0 +1,18 @@ +from typing import Any + +from oauthlib.oauth2.rfc6749.request_validator import RequestValidator as OAuth2RequestValidator + +log: Any + +class RequestValidator(OAuth2RequestValidator): + def get_authorization_code_scopes(self, client_id, code, redirect_uri, request) -> None: ... + def get_authorization_code_nonce(self, client_id, code, redirect_uri, request) -> None: ... + def get_jwt_bearer_token(self, token, token_handler, request) -> None: ... + def get_id_token(self, token, token_handler, request) -> None: ... + def finalize_id_token(self, id_token, token, token_handler, request) -> None: ... + def validate_jwt_bearer_token(self, token, scopes, request) -> None: ... + def validate_id_token(self, token, scopes, request) -> None: ... + def validate_silent_authorization(self, request) -> None: ... + def validate_silent_login(self, request) -> None: ... + def validate_user_match(self, id_token_hint, scopes, claims, request) -> None: ... + def get_userinfo_claims(self, request) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/tokens.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/tokens.pyi new file mode 100644 index 00000000..efbc471a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/openid/connect/core/tokens.pyi @@ -0,0 +1,20 @@ +from _typeshed import Incomplete +from typing import Any + +from oauthlib.oauth2.rfc6749.tokens import TokenBase as TokenBase + +class JWTToken(TokenBase): + request_validator: Any + token_generator: Any + refresh_token_generator: Any + expires_in: Any + def __init__( + self, + request_validator: Incomplete | None = ..., + token_generator: Incomplete | None = ..., + expires_in: Incomplete | None = ..., + refresh_token_generator: Incomplete | None = ..., + ) -> None: ... + def create_token(self, request, refresh_token: bool = ...): ... + def validate_request(self, request): ... + def estimate_type(self, request): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/signals.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/signals.pyi new file mode 100644 index 00000000..d06f0633 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/signals.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete +from typing import Any + +signals_available: bool + +class Namespace: + def signal(self, name, doc: Incomplete | None = ...): ... + +class _FakeSignal: + name: Any + __doc__: Any + def __init__(self, name, doc: Incomplete | None = ...) -> None: ... + send: Any + connect: Any + disconnect: Any + has_receivers_for: Any + receivers_for: Any + temporarily_connected_to: Any + connected_to: Any + +scope_changed: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/uri_validate.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/uri_validate.pyi new file mode 100644 index 00000000..de635c86 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/oauthlib/oauthlib/uri_validate.pyi @@ -0,0 +1,43 @@ +from typing import Any + +DIGIT: str +ALPHA: str +HEXDIG: str +pct_encoded: Any +unreserved: Any +gen_delims: str +sub_delims: str +pchar: Any +reserved: Any +scheme: Any +dec_octet: Any +IPv4address: Any +IPv6address: str +IPvFuture: Any +IP_literal: Any +reg_name: Any +userinfo: Any +host: Any +port: Any +authority: Any +segment: Any +segment_nz: Any +segment_nz_nc: Any +path_abempty: Any +path_absolute: Any +path_noscheme: Any +path_rootless: Any +path_empty: str +path: Any +query: Any +fragment: Any +hier_part: Any +relative_part: Any +relative_ref: Any +URI: Any +URI_reference: Any +absolute_URI: Any + +def is_uri(uri): ... +def is_uri_reference(uri): ... +def is_absolute_uri(uri): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..3592ee37 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/@tests/stubtest_allowlist.txt @@ -0,0 +1,2 @@ +# "cls" argument has wrong name in implementation. +openpyxl.descriptors.slots.AutoSlotProperties.__new__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/METADATA.toml new file mode 100644 index 00000000..7f11ff38 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/METADATA.toml @@ -0,0 +1,4 @@ +version = "3.0.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/__init__.pyi new file mode 100644 index 00000000..c252e6d9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/__init__.pyi @@ -0,0 +1,15 @@ +from openpyxl.compat.numbers import NUMPY as NUMPY +from openpyxl.reader.excel import load_workbook as load_workbook +from openpyxl.workbook import Workbook as Workbook +from openpyxl.xml import DEFUSEDXML as DEFUSEDXML, LXML as LXML + +from ._constants import ( + __author__ as __author__, + __author_email__ as __author_email__, + __license__ as __license__, + __maintainer_email__ as __maintainer_email__, + __url__ as __url__, + __version__ as __version__, +) + +open = load_workbook diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/_constants.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/_constants.pyi new file mode 100644 index 00000000..ed593cc2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/_constants.pyi @@ -0,0 +1,7 @@ +__author__: str +__author_email__: str +__license__: str +__maintainer_email__: str +__url__: str +__version__: str +__python__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/cell/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/cell/__init__.pyi new file mode 100644 index 00000000..cc5d82bf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/cell/__init__.pyi @@ -0,0 +1,2 @@ +from .cell import Cell as Cell, MergedCell as MergedCell, WriteOnlyCell as WriteOnlyCell +from .read_only import ReadOnlyCell as ReadOnlyCell diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/cell/_writer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/cell/_writer.pyi new file mode 100644 index 00000000..ab9092eb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/cell/_writer.pyi @@ -0,0 +1,7 @@ +from _typeshed import Incomplete + +def etree_write_cell(xf, worksheet, cell, styled: Incomplete | None = ...) -> None: ... +def lxml_write_cell(xf, worksheet, cell, styled: bool = ...) -> None: ... + +write_cell = lxml_write_cell +write_cell = etree_write_cell diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/cell/cell.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/cell/cell.pyi new file mode 100644 index 00000000..ce7c7df3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/cell/cell.pyi @@ -0,0 +1,83 @@ +from _typeshed import Incomplete +from datetime import datetime + +from openpyxl.comments.comments import Comment +from openpyxl.styles.cell_style import StyleArray +from openpyxl.styles.styleable import StyleableObject +from openpyxl.worksheet.hyperlink import Hyperlink +from openpyxl.worksheet.worksheet import Worksheet + +__docformat__: str +TIME_TYPES: Incomplete +TIME_FORMATS: Incomplete +STRING_TYPES: Incomplete +KNOWN_TYPES: Incomplete +ILLEGAL_CHARACTERS_RE: Incomplete +ERROR_CODES: Incomplete +TYPE_STRING: str +TYPE_FORMULA: str +TYPE_NUMERIC: str +TYPE_BOOL: str +TYPE_NULL: str +TYPE_INLINE: str +TYPE_ERROR: str +TYPE_FORMULA_CACHE_STRING: str +VALID_TYPES: Incomplete + +def get_type(t: type, value: object) -> str | None: ... +def get_time_format(t: datetime) -> str: ... + +class Cell(StyleableObject): + row: int + column: int + data_type: str + def __init__( + self, + worksheet: Worksheet, + row: int | None = ..., + column: int | None = ..., + value: str | float | datetime | None = ..., + style_array: StyleArray | None = ..., + ) -> None: ... + @property + def coordinate(self) -> str: ... + @property + def col_idx(self) -> int: ... + @property + def column_letter(self) -> str: ... + @property + def encoding(self) -> str: ... + @property + def base_date(self) -> datetime: ... + def check_string(self, value: str): ... + def check_error(self, value: object) -> str: ... + @property + def value(self) -> str | float | datetime | None: ... + @value.setter + def value(self, value: str | float | datetime | None) -> None: ... + @property + def internal_value(self) -> str | float | datetime | None: ... + @property + def hyperlink(self) -> Hyperlink | None: ... + @hyperlink.setter + def hyperlink(self, val: Hyperlink | str | None) -> None: ... + @property + def is_date(self) -> bool: ... + def offset(self, row: int = ..., column: int = ...) -> Cell: ... + @property + def comment(self) -> Comment | None: ... + @comment.setter + def comment(self, value: Comment | None) -> None: ... + +class MergedCell(StyleableObject): + data_type: str + comment: Comment | None + hyperlink: Hyperlink | None + row: int + column: int + def __init__(self, worksheet: Worksheet, row: int | None = ..., column: int | None = ...) -> None: ... + @property + def coordinate(self) -> str: ... + value: str | float | int | datetime | None + +def WriteOnlyCell(ws: Worksheet | None = ..., value: str | float | datetime | None = ...) -> Cell: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/cell/read_only.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/cell/read_only.pyi new file mode 100644 index 00000000..97004344 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/cell/read_only.pyi @@ -0,0 +1,51 @@ +from _typeshed import Incomplete + +class ReadOnlyCell: + parent: Incomplete + row: Incomplete + column: Incomplete + data_type: Incomplete + def __init__(self, sheet, row, column, value, data_type: str = ..., style_id: int = ...) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + # defined twice in the implementation + @property + def coordinate(self): ... + @property + def column_letter(self): ... + @property + def style_array(self): ... + @property + def has_style(self): ... + @property + def number_format(self): ... + @property + def font(self): ... + @property + def fill(self): ... + @property + def border(self): ... + @property + def alignment(self): ... + @property + def protection(self): ... + @property + def is_date(self): ... + @property + def internal_value(self): ... + @property + def value(self): ... + @value.setter + def value(self, value) -> None: ... + +class EmptyCell: + value: Incomplete + is_date: bool + font: Incomplete + border: Incomplete + fill: Incomplete + number_format: Incomplete + alignment: Incomplete + data_type: str + +EMPTY_CELL: Incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/cell/text.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/cell/text.pyi new file mode 100644 index 00000000..92771f02 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/cell/text.pyi @@ -0,0 +1,82 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable +from openpyxl.styles.fonts import Font + +class PhoneticProperties(Serialisable): + tagname: str + fontId: Incomplete + type: Incomplete + alignment: Incomplete + def __init__( + self, fontId: Incomplete | None = ..., type: Incomplete | None = ..., alignment: Incomplete | None = ... + ) -> None: ... + +class PhoneticText(Serialisable): + tagname: str + sb: Incomplete + eb: Incomplete + t: Incomplete + text: Incomplete + def __init__(self, sb: Incomplete | None = ..., eb: Incomplete | None = ..., t: Incomplete | None = ...) -> None: ... + +class InlineFont(Font): + tagname: str + rFont: Incomplete + charset: Incomplete + family: Incomplete + b: Incomplete + i: Incomplete + strike: Incomplete + outline: Incomplete + shadow: Incomplete + condense: Incomplete + extend: Incomplete + color: Incomplete + sz: Incomplete + u: Incomplete + vertAlign: Incomplete + scheme: Incomplete + __elements__: Incomplete + def __init__( + self, + rFont: Incomplete | None = ..., + charset: Incomplete | None = ..., + family: Incomplete | None = ..., + b: Incomplete | None = ..., + i: Incomplete | None = ..., + strike: Incomplete | None = ..., + outline: Incomplete | None = ..., + shadow: Incomplete | None = ..., + condense: Incomplete | None = ..., + extend: Incomplete | None = ..., + color: Incomplete | None = ..., + sz: Incomplete | None = ..., + u: Incomplete | None = ..., + vertAlign: Incomplete | None = ..., + scheme: Incomplete | None = ..., + ) -> None: ... + +class RichText(Serialisable): + tagname: str + rPr: Incomplete + font: Incomplete + t: Incomplete + text: Incomplete + __elements__: Incomplete + def __init__(self, rPr: Incomplete | None = ..., t: Incomplete | None = ...) -> None: ... + +class Text(Serialisable): + tagname: str + t: Incomplete + plain: Incomplete + r: Incomplete + formatted: Incomplete + rPh: Incomplete + phonetic: Incomplete + phoneticPr: Incomplete + PhoneticProperties: Incomplete + __elements__: Incomplete + def __init__(self, t: Incomplete | None = ..., r=..., rPh=..., phoneticPr: Incomplete | None = ...) -> None: ... + @property + def content(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/_3d.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/_3d.pyi new file mode 100644 index 00000000..174a349c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/_3d.pyi @@ -0,0 +1,58 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class View3D(Serialisable): + tagname: str + rotX: Incomplete + x_rotation: Incomplete + hPercent: Incomplete + height_percent: Incomplete + rotY: Incomplete + y_rotation: Incomplete + depthPercent: Incomplete + rAngAx: Incomplete + right_angle_axes: Incomplete + perspective: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + rotX: int = ..., + hPercent: Incomplete | None = ..., + rotY: int = ..., + depthPercent: Incomplete | None = ..., + rAngAx: bool = ..., + perspective: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class Surface(Serialisable): + tagname: str + thickness: Incomplete + spPr: Incomplete + graphicalProperties: Incomplete + pictureOptions: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + thickness: Incomplete | None = ..., + spPr: Incomplete | None = ..., + pictureOptions: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class _3DBase(Serialisable): + tagname: str + view3D: Incomplete + floor: Incomplete + sideWall: Incomplete + backWall: Incomplete + def __init__( + self, + view3D: Incomplete | None = ..., + floor: Incomplete | None = ..., + sideWall: Incomplete | None = ..., + backWall: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/__init__.pyi new file mode 100644 index 00000000..e0d73d19 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/__init__.pyi @@ -0,0 +1,15 @@ +from .area_chart import AreaChart as AreaChart, AreaChart3D as AreaChart3D +from .bar_chart import BarChart as BarChart, BarChart3D as BarChart3D +from .bubble_chart import BubbleChart as BubbleChart +from .line_chart import LineChart as LineChart, LineChart3D as LineChart3D +from .pie_chart import ( + DoughnutChart as DoughnutChart, + PieChart as PieChart, + PieChart3D as PieChart3D, + ProjectedPieChart as ProjectedPieChart, +) +from .radar_chart import RadarChart as RadarChart +from .reference import Reference as Reference +from .scatter_chart import ScatterChart as ScatterChart +from .stock_chart import StockChart as StockChart +from .surface_chart import SurfaceChart as SurfaceChart, SurfaceChart3D as SurfaceChart3D diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/_chart.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/_chart.pyi new file mode 100644 index 00000000..fb9f3880 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/_chart.pyi @@ -0,0 +1,44 @@ +from _typeshed import Incomplete +from abc import abstractmethod + +from openpyxl.descriptors.serialisable import Serialisable + +class AxId(Serialisable): # type: ignore[misc] + val: Incomplete + def __init__(self, val) -> None: ... + +def PlotArea(): ... + +class ChartBase(Serialisable): + legend: Incomplete + layout: Incomplete + roundedCorners: Incomplete + axId: Incomplete + visible_cells_only: Incomplete + display_blanks: Incomplete + ser: Incomplete + series: Incomplete + title: Incomplete + anchor: str + width: int + height: float + style: Incomplete + mime_type: str + graphical_properties: Incomplete + __elements__: Incomplete + plot_area: Incomplete + pivotSource: Incomplete + pivotFormats: Incomplete + idx_base: int + def __init__(self, axId=..., **kw) -> None: ... + def __hash__(self) -> int: ... + def __iadd__(self, other): ... + def to_tree(self, namespace: Incomplete | None = ..., tagname: Incomplete | None = ..., idx: Incomplete | None = ...): ... # type: ignore[override] + def set_categories(self, labels) -> None: ... + def add_data(self, data, from_rows: bool = ..., titles_from_data: bool = ...) -> None: ... + def append(self, value) -> None: ... + @property + def path(self): ... + @property + @abstractmethod + def tagname(self) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/area_chart.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/area_chart.pyi new file mode 100644 index 00000000..dc40da13 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/area_chart.pyi @@ -0,0 +1,51 @@ +from _typeshed import Incomplete +from abc import abstractmethod + +from ._chart import ChartBase + +class _AreaChartBase(ChartBase): + grouping: Incomplete + varyColors: Incomplete + ser: Incomplete + dLbls: Incomplete + dataLabels: Incomplete + dropLines: Incomplete + __elements__: Incomplete + def __init__( + self, + grouping: str = ..., + varyColors: Incomplete | None = ..., + ser=..., + dLbls: Incomplete | None = ..., + dropLines: Incomplete | None = ..., + ) -> None: ... + @property + @abstractmethod + def tagname(self) -> str: ... + +class AreaChart(_AreaChartBase): + tagname: str + grouping: Incomplete + varyColors: Incomplete + ser: Incomplete + dLbls: Incomplete + dropLines: Incomplete + x_axis: Incomplete + y_axis: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__(self, axId: Incomplete | None = ..., extLst: Incomplete | None = ..., **kw) -> None: ... + +class AreaChart3D(AreaChart): + tagname: str + grouping: Incomplete + varyColors: Incomplete + ser: Incomplete + dLbls: Incomplete + dropLines: Incomplete + gapDepth: Incomplete + x_axis: Incomplete + y_axis: Incomplete + z_axis: Incomplete + __elements__: Incomplete + def __init__(self, gapDepth: Incomplete | None = ..., **kw) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/axis.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/axis.pyi new file mode 100644 index 00000000..2bf227bc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/axis.pyi @@ -0,0 +1,244 @@ +from _typeshed import Incomplete +from abc import abstractmethod + +from openpyxl.descriptors.serialisable import Serialisable + +class ChartLines(Serialisable): + tagname: str + spPr: Incomplete + graphicalProperties: Incomplete + def __init__(self, spPr: Incomplete | None = ...) -> None: ... + +class Scaling(Serialisable): + tagname: str + logBase: Incomplete + orientation: Incomplete + max: Incomplete + min: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + logBase: Incomplete | None = ..., + orientation: str = ..., + max: Incomplete | None = ..., + min: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class _BaseAxis(Serialisable): + axId: Incomplete + scaling: Incomplete + delete: Incomplete + axPos: Incomplete + majorGridlines: Incomplete + minorGridlines: Incomplete + title: Incomplete + numFmt: Incomplete + number_format: Incomplete + majorTickMark: Incomplete + minorTickMark: Incomplete + tickLblPos: Incomplete + spPr: Incomplete + graphicalProperties: Incomplete + txPr: Incomplete + textProperties: Incomplete + crossAx: Incomplete + crosses: Incomplete + crossesAt: Incomplete + __elements__: Incomplete + def __init__( + self, + axId: Incomplete | None = ..., + scaling: Incomplete | None = ..., + delete: Incomplete | None = ..., + axPos: str = ..., + majorGridlines: Incomplete | None = ..., + minorGridlines: Incomplete | None = ..., + title: Incomplete | None = ..., + numFmt: Incomplete | None = ..., + majorTickMark: Incomplete | None = ..., + minorTickMark: Incomplete | None = ..., + tickLblPos: Incomplete | None = ..., + spPr: Incomplete | None = ..., + txPr: Incomplete | None = ..., + crossAx: Incomplete | None = ..., + crosses: Incomplete | None = ..., + crossesAt: Incomplete | None = ..., + ) -> None: ... + @property + @abstractmethod + def tagname(self) -> str: ... + +class DisplayUnitsLabel(Serialisable): + tagname: str + layout: Incomplete + tx: Incomplete + text: Incomplete + spPr: Incomplete + graphicalProperties: Incomplete + txPr: Incomplete + textPropertes: Incomplete + __elements__: Incomplete + def __init__( + self, + layout: Incomplete | None = ..., + tx: Incomplete | None = ..., + spPr: Incomplete | None = ..., + txPr: Incomplete | None = ..., + ) -> None: ... + +class DisplayUnitsLabelList(Serialisable): + tagname: str + custUnit: Incomplete + builtInUnit: Incomplete + dispUnitsLbl: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + custUnit: Incomplete | None = ..., + builtInUnit: Incomplete | None = ..., + dispUnitsLbl: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class NumericAxis(_BaseAxis): + tagname: str + axId: Incomplete + scaling: Incomplete + delete: Incomplete + axPos: Incomplete + majorGridlines: Incomplete + minorGridlines: Incomplete + title: Incomplete + numFmt: Incomplete + majorTickMark: Incomplete + minorTickMark: Incomplete + tickLblPos: Incomplete + spPr: Incomplete + txPr: Incomplete + crossAx: Incomplete + crosses: Incomplete + crossesAt: Incomplete + crossBetween: Incomplete + majorUnit: Incomplete + minorUnit: Incomplete + dispUnits: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + crossBetween: Incomplete | None = ..., + majorUnit: Incomplete | None = ..., + minorUnit: Incomplete | None = ..., + dispUnits: Incomplete | None = ..., + extLst: Incomplete | None = ..., + **kw, + ) -> None: ... + @classmethod + def from_tree(cls, node): ... + +class TextAxis(_BaseAxis): + tagname: str + axId: Incomplete + scaling: Incomplete + delete: Incomplete + axPos: Incomplete + majorGridlines: Incomplete + minorGridlines: Incomplete + title: Incomplete + numFmt: Incomplete + majorTickMark: Incomplete + minorTickMark: Incomplete + tickLblPos: Incomplete + spPr: Incomplete + txPr: Incomplete + crossAx: Incomplete + crosses: Incomplete + crossesAt: Incomplete + auto: Incomplete + lblAlgn: Incomplete + lblOffset: Incomplete + tickLblSkip: Incomplete + tickMarkSkip: Incomplete + noMultiLvlLbl: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + auto: Incomplete | None = ..., + lblAlgn: Incomplete | None = ..., + lblOffset: int = ..., + tickLblSkip: Incomplete | None = ..., + tickMarkSkip: Incomplete | None = ..., + noMultiLvlLbl: Incomplete | None = ..., + extLst: Incomplete | None = ..., + **kw, + ) -> None: ... + +class DateAxis(TextAxis): + tagname: str + axId: Incomplete + scaling: Incomplete + delete: Incomplete + axPos: Incomplete + majorGridlines: Incomplete + minorGridlines: Incomplete + title: Incomplete + numFmt: Incomplete + majorTickMark: Incomplete + minorTickMark: Incomplete + tickLblPos: Incomplete + spPr: Incomplete + txPr: Incomplete + crossAx: Incomplete + crosses: Incomplete + crossesAt: Incomplete + auto: Incomplete + lblOffset: Incomplete + baseTimeUnit: Incomplete + majorUnit: Incomplete + majorTimeUnit: Incomplete + minorUnit: Incomplete + minorTimeUnit: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + auto: Incomplete | None = ..., + lblOffset: Incomplete | None = ..., + baseTimeUnit: Incomplete | None = ..., + majorUnit: Incomplete | None = ..., + majorTimeUnit: Incomplete | None = ..., + minorUnit: Incomplete | None = ..., + minorTimeUnit: Incomplete | None = ..., + extLst: Incomplete | None = ..., + **kw, + ) -> None: ... + +class SeriesAxis(_BaseAxis): + tagname: str + axId: Incomplete + scaling: Incomplete + delete: Incomplete + axPos: Incomplete + majorGridlines: Incomplete + minorGridlines: Incomplete + title: Incomplete + numFmt: Incomplete + majorTickMark: Incomplete + minorTickMark: Incomplete + tickLblPos: Incomplete + spPr: Incomplete + txPr: Incomplete + crossAx: Incomplete + crosses: Incomplete + crossesAt: Incomplete + tickLblSkip: Incomplete + tickMarkSkip: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, tickLblSkip: Incomplete | None = ..., tickMarkSkip: Incomplete | None = ..., extLst: Incomplete | None = ..., **kw + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/bar_chart.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/bar_chart.pyi new file mode 100644 index 00000000..04cc873c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/bar_chart.pyi @@ -0,0 +1,81 @@ +from _typeshed import Incomplete +from abc import abstractmethod + +from ._3d import _3DBase +from ._chart import ChartBase + +class _BarChartBase(ChartBase): + barDir: Incomplete + type: Incomplete + grouping: Incomplete + varyColors: Incomplete + ser: Incomplete + dLbls: Incomplete + dataLabels: Incomplete + __elements__: Incomplete + def __init__( + self, + barDir: str = ..., + grouping: str = ..., + varyColors: Incomplete | None = ..., + ser=..., + dLbls: Incomplete | None = ..., + **kw, + ) -> None: ... + @property + @abstractmethod + def tagname(self) -> str: ... + +class BarChart(_BarChartBase): + tagname: str + barDir: Incomplete + grouping: Incomplete + varyColors: Incomplete + ser: Incomplete + dLbls: Incomplete + gapWidth: Incomplete + overlap: Incomplete + serLines: Incomplete + extLst: Incomplete + x_axis: Incomplete + y_axis: Incomplete + __elements__: Incomplete + legend: Incomplete + def __init__( + self, + gapWidth: int = ..., + overlap: Incomplete | None = ..., + serLines: Incomplete | None = ..., + extLst: Incomplete | None = ..., + **kw, + ) -> None: ... + +class BarChart3D(_BarChartBase, _3DBase): + tagname: str + barDir: Incomplete + grouping: Incomplete + varyColors: Incomplete + ser: Incomplete + dLbls: Incomplete + view3D: Incomplete + floor: Incomplete + sideWall: Incomplete + backWall: Incomplete + gapWidth: Incomplete + gapDepth: Incomplete + shape: Incomplete + serLines: Incomplete + extLst: Incomplete + x_axis: Incomplete + y_axis: Incomplete + z_axis: Incomplete + __elements__: Incomplete + def __init__( + self, + gapWidth: int = ..., + gapDepth: int = ..., + shape: Incomplete | None = ..., + serLines: Incomplete | None = ..., + extLst: Incomplete | None = ..., + **kw, + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/bubble_chart.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/bubble_chart.pyi new file mode 100644 index 00000000..aea9771c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/bubble_chart.pyi @@ -0,0 +1,30 @@ +from _typeshed import Incomplete + +from ._chart import ChartBase + +class BubbleChart(ChartBase): + tagname: str + varyColors: Incomplete + ser: Incomplete + dLbls: Incomplete + dataLabels: Incomplete + bubble3D: Incomplete + bubbleScale: Incomplete + showNegBubbles: Incomplete + sizeRepresents: Incomplete + extLst: Incomplete + x_axis: Incomplete + y_axis: Incomplete + __elements__: Incomplete + def __init__( + self, + varyColors: Incomplete | None = ..., + ser=..., + dLbls: Incomplete | None = ..., + bubble3D: Incomplete | None = ..., + bubbleScale: Incomplete | None = ..., + showNegBubbles: Incomplete | None = ..., + sizeRepresents: Incomplete | None = ..., + extLst: Incomplete | None = ..., + **kw, + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/chartspace.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/chartspace.pyi new file mode 100644 index 00000000..5d062853 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/chartspace.pyi @@ -0,0 +1,97 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class ChartContainer(Serialisable): + tagname: str + title: Incomplete + autoTitleDeleted: Incomplete + pivotFmts: Incomplete + view3D: Incomplete + floor: Incomplete + sideWall: Incomplete + backWall: Incomplete + plotArea: Incomplete + legend: Incomplete + plotVisOnly: Incomplete + dispBlanksAs: Incomplete + showDLblsOverMax: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + title: Incomplete | None = ..., + autoTitleDeleted: Incomplete | None = ..., + pivotFmts=..., + view3D: Incomplete | None = ..., + floor: Incomplete | None = ..., + sideWall: Incomplete | None = ..., + backWall: Incomplete | None = ..., + plotArea: Incomplete | None = ..., + legend: Incomplete | None = ..., + plotVisOnly: bool = ..., + dispBlanksAs: str = ..., + showDLblsOverMax: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class Protection(Serialisable): + tagname: str + chartObject: Incomplete + data: Incomplete + formatting: Incomplete + selection: Incomplete + userInterface: Incomplete + __elements__: Incomplete + def __init__( + self, + chartObject: Incomplete | None = ..., + data: Incomplete | None = ..., + formatting: Incomplete | None = ..., + selection: Incomplete | None = ..., + userInterface: Incomplete | None = ..., + ) -> None: ... + +class ExternalData(Serialisable): + tagname: str + autoUpdate: Incomplete + id: Incomplete + def __init__(self, autoUpdate: Incomplete | None = ..., id: Incomplete | None = ...) -> None: ... + +class ChartSpace(Serialisable): + tagname: str + date1904: Incomplete + lang: Incomplete + roundedCorners: Incomplete + style: Incomplete + clrMapOvr: Incomplete + pivotSource: Incomplete + protection: Incomplete + chart: Incomplete + spPr: Incomplete + graphicalProperties: Incomplete + txPr: Incomplete + textProperties: Incomplete + externalData: Incomplete + printSettings: Incomplete + userShapes: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + date1904: Incomplete | None = ..., + lang: Incomplete | None = ..., + roundedCorners: Incomplete | None = ..., + style: Incomplete | None = ..., + clrMapOvr: Incomplete | None = ..., + pivotSource: Incomplete | None = ..., + protection: Incomplete | None = ..., + chart: Incomplete | None = ..., + spPr: Incomplete | None = ..., + txPr: Incomplete | None = ..., + externalData: Incomplete | None = ..., + printSettings: Incomplete | None = ..., + userShapes: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + def to_tree(self, tagname: Incomplete | None = ..., idx: Incomplete | None = ..., namespace: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/data_source.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/data_source.pyi new file mode 100644 index 00000000..b71a03dc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/data_source.pyi @@ -0,0 +1,109 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.nested import NestedText +from openpyxl.descriptors.serialisable import Serialisable + +class NumFmt(Serialisable): # type: ignore[misc] + formatCode: Incomplete + sourceLinked: Incomplete + def __init__(self, formatCode: Incomplete | None = ..., sourceLinked: bool = ...) -> None: ... + +class NumberValueDescriptor(NestedText): + allow_none: bool + expected_type: Incomplete + def __set__(self, instance, value) -> None: ... + +class NumVal(Serialisable): # type: ignore[misc] + idx: Incomplete + formatCode: Incomplete + v: Incomplete + def __init__(self, idx: Incomplete | None = ..., formatCode: Incomplete | None = ..., v: Incomplete | None = ...) -> None: ... + +class NumData(Serialisable): # type: ignore[misc] + formatCode: Incomplete + ptCount: Incomplete + pt: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, formatCode: Incomplete | None = ..., ptCount: Incomplete | None = ..., pt=..., extLst: Incomplete | None = ... + ) -> None: ... + +class NumRef(Serialisable): # type: ignore[misc] + f: Incomplete + ref: Incomplete + numCache: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, f: Incomplete | None = ..., numCache: Incomplete | None = ..., extLst: Incomplete | None = ... + ) -> None: ... + +class StrVal(Serialisable): + tagname: str + idx: Incomplete + v: Incomplete + def __init__(self, idx: int = ..., v: Incomplete | None = ...) -> None: ... + +class StrData(Serialisable): + tagname: str + ptCount: Incomplete + pt: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__(self, ptCount: Incomplete | None = ..., pt=..., extLst: Incomplete | None = ...) -> None: ... + +class StrRef(Serialisable): + tagname: str + f: Incomplete + strCache: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, f: Incomplete | None = ..., strCache: Incomplete | None = ..., extLst: Incomplete | None = ... + ) -> None: ... + +class NumDataSource(Serialisable): # type: ignore[misc] + numRef: Incomplete + numLit: Incomplete + def __init__(self, numRef: Incomplete | None = ..., numLit: Incomplete | None = ...) -> None: ... + +class Level(Serialisable): + tagname: str + pt: Incomplete + __elements__: Incomplete + def __init__(self, pt=...) -> None: ... + +class MultiLevelStrData(Serialisable): + tagname: str + ptCount: Incomplete + lvl: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__(self, ptCount: Incomplete | None = ..., lvl=..., extLst: Incomplete | None = ...) -> None: ... + +class MultiLevelStrRef(Serialisable): + tagname: str + f: Incomplete + multiLvlStrCache: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, f: Incomplete | None = ..., multiLvlStrCache: Incomplete | None = ..., extLst: Incomplete | None = ... + ) -> None: ... + +class AxDataSource(Serialisable): + tagname: str + numRef: Incomplete + numLit: Incomplete + strRef: Incomplete + strLit: Incomplete + multiLvlStrRef: Incomplete + def __init__( + self, + numRef: Incomplete | None = ..., + numLit: Incomplete | None = ..., + strRef: Incomplete | None = ..., + strLit: Incomplete | None = ..., + multiLvlStrRef: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/descriptors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/descriptors.pyi new file mode 100644 index 00000000..4c8407af --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/descriptors.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors import Typed +from openpyxl.descriptors.nested import NestedMinMax + +class NestedGapAmount(NestedMinMax): + allow_none: bool + min: int + max: int + +class NestedOverlap(NestedMinMax): + allow_none: bool + min: int + max: int + +class NumberFormatDescriptor(Typed): + expected_type: Incomplete + allow_none: bool + def __set__(self, instance, value) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/error_bar.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/error_bar.pyi new file mode 100644 index 00000000..102fd176 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/error_bar.pyi @@ -0,0 +1,32 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class ErrorBars(Serialisable): + tagname: str + errDir: Incomplete + direction: Incomplete + errBarType: Incomplete + style: Incomplete + errValType: Incomplete + size: Incomplete + noEndCap: Incomplete + plus: Incomplete + minus: Incomplete + val: Incomplete + spPr: Incomplete + graphicalProperties: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + errDir: Incomplete | None = ..., + errBarType: str = ..., + errValType: str = ..., + noEndCap: Incomplete | None = ..., + plus: Incomplete | None = ..., + minus: Incomplete | None = ..., + val: Incomplete | None = ..., + spPr: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/label.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/label.pyi new file mode 100644 index 00000000..b5bb682f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/label.pyi @@ -0,0 +1,81 @@ +from _typeshed import Incomplete +from abc import abstractmethod + +from openpyxl.descriptors.serialisable import Serialisable as Serialisable + +class _DataLabelBase(Serialisable): + numFmt: Incomplete + spPr: Incomplete + graphicalProperties: Incomplete + txPr: Incomplete + textProperties: Incomplete + dLblPos: Incomplete + position: Incomplete + showLegendKey: Incomplete + showVal: Incomplete + showCatName: Incomplete + showSerName: Incomplete + showPercent: Incomplete + showBubbleSize: Incomplete + showLeaderLines: Incomplete + separator: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + numFmt: Incomplete | None = ..., + spPr: Incomplete | None = ..., + txPr: Incomplete | None = ..., + dLblPos: Incomplete | None = ..., + showLegendKey: Incomplete | None = ..., + showVal: Incomplete | None = ..., + showCatName: Incomplete | None = ..., + showSerName: Incomplete | None = ..., + showPercent: Incomplete | None = ..., + showBubbleSize: Incomplete | None = ..., + showLeaderLines: Incomplete | None = ..., + separator: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + @property + @abstractmethod + def tagname(self) -> str: ... + +class DataLabel(_DataLabelBase): + tagname: str + idx: Incomplete + numFmt: Incomplete + spPr: Incomplete + txPr: Incomplete + dLblPos: Incomplete + showLegendKey: Incomplete + showVal: Incomplete + showCatName: Incomplete + showSerName: Incomplete + showPercent: Incomplete + showBubbleSize: Incomplete + showLeaderLines: Incomplete + separator: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__(self, idx: int = ..., **kw) -> None: ... + +class DataLabelList(_DataLabelBase): + tagname: str + dLbl: Incomplete + delete: Incomplete + numFmt: Incomplete + spPr: Incomplete + txPr: Incomplete + dLblPos: Incomplete + showLegendKey: Incomplete + showVal: Incomplete + showCatName: Incomplete + showSerName: Incomplete + showPercent: Incomplete + showBubbleSize: Incomplete + showLeaderLines: Incomplete + separator: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__(self, dLbl=..., delete: Incomplete | None = ..., **kw) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/layout.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/layout.pyi new file mode 100644 index 00000000..1bb85f85 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/layout.pyi @@ -0,0 +1,39 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class ManualLayout(Serialisable): + tagname: str + layoutTarget: Incomplete + xMode: Incomplete + yMode: Incomplete + wMode: Incomplete + hMode: Incomplete + x: Incomplete + y: Incomplete + w: Incomplete + width: Incomplete + h: Incomplete + height: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + layoutTarget: Incomplete | None = ..., + xMode: Incomplete | None = ..., + yMode: Incomplete | None = ..., + wMode: str = ..., + hMode: str = ..., + x: Incomplete | None = ..., + y: Incomplete | None = ..., + w: Incomplete | None = ..., + h: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class Layout(Serialisable): + tagname: str + manualLayout: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__(self, manualLayout: Incomplete | None = ..., extLst: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/legend.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/legend.pyi new file mode 100644 index 00000000..42e7543e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/legend.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class LegendEntry(Serialisable): + tagname: str + idx: Incomplete + delete: Incomplete + txPr: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, idx: int = ..., delete: bool = ..., txPr: Incomplete | None = ..., extLst: Incomplete | None = ... + ) -> None: ... + +class Legend(Serialisable): + tagname: str + legendPos: Incomplete + position: Incomplete + legendEntry: Incomplete + layout: Incomplete + overlay: Incomplete + spPr: Incomplete + graphicalProperties: Incomplete + txPr: Incomplete + textProperties: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + legendPos: str = ..., + legendEntry=..., + layout: Incomplete | None = ..., + overlay: Incomplete | None = ..., + spPr: Incomplete | None = ..., + txPr: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/line_chart.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/line_chart.pyi new file mode 100644 index 00000000..4df38afd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/line_chart.pyi @@ -0,0 +1,77 @@ +from _typeshed import Incomplete +from abc import abstractmethod + +from ._chart import ChartBase + +class _LineChartBase(ChartBase): + grouping: Incomplete + varyColors: Incomplete + ser: Incomplete + dLbls: Incomplete + dataLabels: Incomplete + dropLines: Incomplete + __elements__: Incomplete + def __init__( + self, + grouping: str = ..., + varyColors: Incomplete | None = ..., + ser=..., + dLbls: Incomplete | None = ..., + dropLines: Incomplete | None = ..., + **kw, + ) -> None: ... + @property + @abstractmethod + def tagname(self) -> str: ... + +class LineChart(_LineChartBase): + tagname: str + grouping: Incomplete + varyColors: Incomplete + ser: Incomplete + dLbls: Incomplete + dropLines: Incomplete + hiLowLines: Incomplete + upDownBars: Incomplete + marker: Incomplete + smooth: Incomplete + extLst: Incomplete + x_axis: Incomplete + y_axis: Incomplete + __elements__: Incomplete + def __init__( + self, + hiLowLines: Incomplete | None = ..., + upDownBars: Incomplete | None = ..., + marker: Incomplete | None = ..., + smooth: Incomplete | None = ..., + extLst: Incomplete | None = ..., + **kw, + ) -> None: ... + +class LineChart3D(_LineChartBase): + tagname: str + grouping: Incomplete + varyColors: Incomplete + ser: Incomplete + dLbls: Incomplete + dropLines: Incomplete + gapDepth: Incomplete + hiLowLines: Incomplete + upDownBars: Incomplete + marker: Incomplete + smooth: Incomplete + extLst: Incomplete + x_axis: Incomplete + y_axis: Incomplete + z_axis: Incomplete + __elements__: Incomplete + def __init__( + self, + gapDepth: Incomplete | None = ..., + hiLowLines: Incomplete | None = ..., + upDownBars: Incomplete | None = ..., + marker: Incomplete | None = ..., + smooth: Incomplete | None = ..., + **kw, + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/marker.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/marker.pyi new file mode 100644 index 00000000..56825794 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/marker.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class Marker(Serialisable): + tagname: str + symbol: Incomplete + size: Incomplete + spPr: Incomplete + graphicalProperties: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + symbol: Incomplete | None = ..., + size: Incomplete | None = ..., + spPr: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class DataPoint(Serialisable): + tagname: str + idx: Incomplete + invertIfNegative: Incomplete + marker: Incomplete + bubble3D: Incomplete + explosion: Incomplete + spPr: Incomplete + graphicalProperties: Incomplete + pictureOptions: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + idx: Incomplete | None = ..., + invertIfNegative: Incomplete | None = ..., + marker: Incomplete | None = ..., + bubble3D: Incomplete | None = ..., + explosion: Incomplete | None = ..., + spPr: Incomplete | None = ..., + pictureOptions: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/picture.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/picture.pyi new file mode 100644 index 00000000..5ec11607 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/picture.pyi @@ -0,0 +1,20 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class PictureOptions(Serialisable): + tagname: str + applyToFront: Incomplete + applyToSides: Incomplete + applyToEnd: Incomplete + pictureFormat: Incomplete + pictureStackUnit: Incomplete + __elements__: Incomplete + def __init__( + self, + applyToFront: Incomplete | None = ..., + applyToSides: Incomplete | None = ..., + applyToEnd: Incomplete | None = ..., + pictureFormat: Incomplete | None = ..., + pictureStackUnit: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/pie_chart.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/pie_chart.pyi new file mode 100644 index 00000000..839e60d2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/pie_chart.pyi @@ -0,0 +1,81 @@ +from _typeshed import Incomplete +from abc import abstractmethod + +from openpyxl.descriptors.serialisable import Serialisable + +from ._chart import ChartBase + +class _PieChartBase(ChartBase): + varyColors: Incomplete + ser: Incomplete + dLbls: Incomplete + dataLabels: Incomplete + __elements__: Incomplete + def __init__(self, varyColors: bool = ..., ser=..., dLbls: Incomplete | None = ...) -> None: ... + @property + @abstractmethod + def tagname(self) -> str: ... + +class PieChart(_PieChartBase): + tagname: str + varyColors: Incomplete + ser: Incomplete + dLbls: Incomplete + firstSliceAng: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__(self, firstSliceAng: int = ..., extLst: Incomplete | None = ..., **kw) -> None: ... + +class PieChart3D(_PieChartBase): + tagname: str + varyColors: Incomplete + ser: Incomplete + dLbls: Incomplete + extLst: Incomplete + __elements__: Incomplete + +class DoughnutChart(_PieChartBase): + tagname: str + varyColors: Incomplete + ser: Incomplete + dLbls: Incomplete + firstSliceAng: Incomplete + holeSize: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__(self, firstSliceAng: int = ..., holeSize: int = ..., extLst: Incomplete | None = ..., **kw) -> None: ... + +class CustomSplit(Serialisable): + tagname: str + secondPiePt: Incomplete + __elements__: Incomplete + def __init__(self, secondPiePt=...) -> None: ... + +class ProjectedPieChart(_PieChartBase): + tagname: str + varyColors: Incomplete + ser: Incomplete + dLbls: Incomplete + ofPieType: Incomplete + type: Incomplete + gapWidth: Incomplete + splitType: Incomplete + splitPos: Incomplete + custSplit: Incomplete + secondPieSize: Incomplete + serLines: Incomplete + join_lines: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + ofPieType: str = ..., + gapWidth: Incomplete | None = ..., + splitType: str = ..., + splitPos: Incomplete | None = ..., + custSplit: Incomplete | None = ..., + secondPieSize: int = ..., + serLines: Incomplete | None = ..., + extLst: Incomplete | None = ..., + **kw, + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/pivot.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/pivot.pyi new file mode 100644 index 00000000..e5e86d23 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/pivot.pyi @@ -0,0 +1,35 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class PivotSource(Serialisable): + tagname: str + name: Incomplete + fmtId: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, name: Incomplete | None = ..., fmtId: Incomplete | None = ..., extLst: Incomplete | None = ... + ) -> None: ... + +class PivotFormat(Serialisable): + tagname: str + idx: Incomplete + spPr: Incomplete + graphicalProperties: Incomplete + txPr: Incomplete + TextBody: Incomplete + marker: Incomplete + dLbl: Incomplete + DataLabel: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + idx: int = ..., + spPr: Incomplete | None = ..., + txPr: Incomplete | None = ..., + marker: Incomplete | None = ..., + dLbl: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/plotarea.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/plotarea.pyi new file mode 100644 index 00000000..2a9f7f31 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/plotarea.pyi @@ -0,0 +1,66 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class DataTable(Serialisable): + tagname: str + showHorzBorder: Incomplete + showVertBorder: Incomplete + showOutline: Incomplete + showKeys: Incomplete + spPr: Incomplete + graphicalProperties: Incomplete + txPr: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + showHorzBorder: Incomplete | None = ..., + showVertBorder: Incomplete | None = ..., + showOutline: Incomplete | None = ..., + showKeys: Incomplete | None = ..., + spPr: Incomplete | None = ..., + txPr: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class PlotArea(Serialisable): + tagname: str + layout: Incomplete + dTable: Incomplete + spPr: Incomplete + graphicalProperties: Incomplete + extLst: Incomplete + areaChart: Incomplete + area3DChart: Incomplete + lineChart: Incomplete + line3DChart: Incomplete + stockChart: Incomplete + radarChart: Incomplete + scatterChart: Incomplete + pieChart: Incomplete + pie3DChart: Incomplete + doughnutChart: Incomplete + barChart: Incomplete + bar3DChart: Incomplete + ofPieChart: Incomplete + surfaceChart: Incomplete + surface3DChart: Incomplete + bubbleChart: Incomplete + valAx: Incomplete + catAx: Incomplete + dateAx: Incomplete + serAx: Incomplete + __elements__: Incomplete + def __init__( + self, + layout: Incomplete | None = ..., + dTable: Incomplete | None = ..., + spPr: Incomplete | None = ..., + _charts=..., + _axes=..., + extLst: Incomplete | None = ..., + ) -> None: ... + def to_tree(self, tagname: Incomplete | None = ..., idx: Incomplete | None = ..., namespace: Incomplete | None = ...): ... + @classmethod + def from_tree(cls, node): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/print_settings.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/print_settings.pyi new file mode 100644 index 00000000..3a1effc4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/print_settings.pyi @@ -0,0 +1,29 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class PageMargins(Serialisable): + tagname: str + l: Incomplete + left: Incomplete + r: Incomplete + right: Incomplete + t: Incomplete + top: Incomplete + b: Incomplete + bottom: Incomplete + header: Incomplete + footer: Incomplete + def __init__( + self, l: float = ..., r: float = ..., t: int = ..., b: int = ..., header: float = ..., footer: float = ... + ) -> None: ... + +class PrintSettings(Serialisable): + tagname: str + headerFooter: Incomplete + pageMargins: Incomplete + pageSetup: Incomplete + __elements__: Incomplete + def __init__( + self, headerFooter: Incomplete | None = ..., pageMargins: Incomplete | None = ..., pageSetup: Incomplete | None = ... + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/radar_chart.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/radar_chart.pyi new file mode 100644 index 00000000..21d1ea66 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/radar_chart.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +from ._chart import ChartBase + +class RadarChart(ChartBase): + tagname: str + radarStyle: Incomplete + type: Incomplete + varyColors: Incomplete + ser: Incomplete + dLbls: Incomplete + dataLabels: Incomplete + extLst: Incomplete + x_axis: Incomplete + y_axis: Incomplete + __elements__: Incomplete + def __init__( + self, + radarStyle: str = ..., + varyColors: Incomplete | None = ..., + ser=..., + dLbls: Incomplete | None = ..., + extLst: Incomplete | None = ..., + **kw, + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/reader.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/reader.pyi new file mode 100644 index 00000000..0e58541f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/reader.pyi @@ -0,0 +1 @@ +def read_chart(chartspace): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/reference.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/reference.pyi new file mode 100644 index 00000000..b27c3d2a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/reference.pyi @@ -0,0 +1,34 @@ +from _typeshed import Incomplete +from collections.abc import Generator + +from openpyxl.descriptors import Strict + +class DummyWorksheet: + title: Incomplete + def __init__(self, title) -> None: ... + +class Reference(Strict): + min_row: Incomplete + max_row: Incomplete + min_col: Incomplete + max_col: Incomplete + range_string: Incomplete + worksheet: Incomplete + def __init__( + self, + worksheet: Incomplete | None = ..., + min_col: Incomplete | None = ..., + min_row: Incomplete | None = ..., + max_col: Incomplete | None = ..., + max_row: Incomplete | None = ..., + range_string: Incomplete | None = ..., + ) -> None: ... + def __len__(self) -> int: ... + def __eq__(self, other): ... + @property + def rows(self) -> Generator[Incomplete, None, None]: ... + @property + def cols(self) -> Generator[Incomplete, None, None]: ... + def pop(self): ... + @property + def sheetname(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/scatter_chart.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/scatter_chart.pyi new file mode 100644 index 00000000..7d293645 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/scatter_chart.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete + +from ._chart import ChartBase as ChartBase + +class ScatterChart(ChartBase): + tagname: str + scatterStyle: Incomplete + varyColors: Incomplete + ser: Incomplete + dLbls: Incomplete + dataLabels: Incomplete + extLst: Incomplete + x_axis: Incomplete + y_axis: Incomplete + __elements__: Incomplete + def __init__( + self, + scatterStyle: Incomplete | None = ..., + varyColors: Incomplete | None = ..., + ser=..., + dLbls: Incomplete | None = ..., + extLst: Incomplete | None = ..., + **kw, + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/series.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/series.pyi new file mode 100644 index 00000000..06167b63 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/series.pyi @@ -0,0 +1,86 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +attribute_mapping: Incomplete + +class SeriesLabel(Serialisable): + tagname: str + strRef: Incomplete + v: Incomplete + value: Incomplete + __elements__: Incomplete + def __init__(self, strRef: Incomplete | None = ..., v: Incomplete | None = ...) -> None: ... + +class Series(Serialisable): + tagname: str + idx: Incomplete + order: Incomplete + tx: Incomplete + title: Incomplete + spPr: Incomplete + graphicalProperties: Incomplete + pictureOptions: Incomplete + dPt: Incomplete + data_points: Incomplete + dLbls: Incomplete + labels: Incomplete + trendline: Incomplete + errBars: Incomplete + cat: Incomplete + identifiers: Incomplete + val: Incomplete + extLst: Incomplete + invertIfNegative: Incomplete + shape: Incomplete + xVal: Incomplete + yVal: Incomplete + bubbleSize: Incomplete + zVal: Incomplete + bubble3D: Incomplete + marker: Incomplete + smooth: Incomplete + explosion: Incomplete + __elements__: Incomplete + def __init__( + self, + idx: int = ..., + order: int = ..., + tx: Incomplete | None = ..., + spPr: Incomplete | None = ..., + pictureOptions: Incomplete | None = ..., + dPt=..., + dLbls: Incomplete | None = ..., + trendline: Incomplete | None = ..., + errBars: Incomplete | None = ..., + cat: Incomplete | None = ..., + val: Incomplete | None = ..., + invertIfNegative: Incomplete | None = ..., + shape: Incomplete | None = ..., + xVal: Incomplete | None = ..., + yVal: Incomplete | None = ..., + bubbleSize: Incomplete | None = ..., + bubble3D: Incomplete | None = ..., + marker: Incomplete | None = ..., + smooth: Incomplete | None = ..., + explosion: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + def to_tree(self, tagname: Incomplete | None = ..., idx: Incomplete | None = ...): ... # type: ignore[override] + +class XYSeries(Series): + idx: Incomplete + order: Incomplete + tx: Incomplete + spPr: Incomplete + dPt: Incomplete + dLbls: Incomplete + trendline: Incomplete + errBars: Incomplete + xVal: Incomplete + yVal: Incomplete + invertIfNegative: Incomplete + bubbleSize: Incomplete + bubble3D: Incomplete + marker: Incomplete + smooth: Incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/series_factory.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/series_factory.pyi new file mode 100644 index 00000000..53243ea2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/series_factory.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete + +def SeriesFactory( + values, + xvalues: Incomplete | None = ..., + zvalues: Incomplete | None = ..., + title: Incomplete | None = ..., + title_from_data: bool = ..., +): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/shapes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/shapes.pyi new file mode 100644 index 00000000..debb37a7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/shapes.pyi @@ -0,0 +1,37 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class GraphicalProperties(Serialisable): + tagname: str + bwMode: Incomplete + xfrm: Incomplete + transform: Incomplete + custGeom: Incomplete + prstGeom: Incomplete + noFill: Incomplete + solidFill: Incomplete + gradFill: Incomplete + pattFill: Incomplete + ln: Incomplete + line: Incomplete + scene3d: Incomplete + sp3d: Incomplete + shape3D: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + bwMode: Incomplete | None = ..., + xfrm: Incomplete | None = ..., + noFill: Incomplete | None = ..., + solidFill: Incomplete | None = ..., + gradFill: Incomplete | None = ..., + pattFill: Incomplete | None = ..., + ln: Incomplete | None = ..., + scene3d: Incomplete | None = ..., + custGeom: Incomplete | None = ..., + prstGeom: Incomplete | None = ..., + sp3d: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/stock_chart.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/stock_chart.pyi new file mode 100644 index 00000000..41318473 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/stock_chart.pyi @@ -0,0 +1,26 @@ +from _typeshed import Incomplete + +from ._chart import ChartBase + +class StockChart(ChartBase): + tagname: str + ser: Incomplete + dLbls: Incomplete + dataLabels: Incomplete + dropLines: Incomplete + hiLowLines: Incomplete + upDownBars: Incomplete + extLst: Incomplete + x_axis: Incomplete + y_axis: Incomplete + __elements__: Incomplete + def __init__( + self, + ser=..., + dLbls: Incomplete | None = ..., + dropLines: Incomplete | None = ..., + hiLowLines: Incomplete | None = ..., + upDownBars: Incomplete | None = ..., + extLst: Incomplete | None = ..., + **kw, + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/surface_chart.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/surface_chart.pyi new file mode 100644 index 00000000..69734122 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/surface_chart.pyi @@ -0,0 +1,52 @@ +from _typeshed import Incomplete +from abc import abstractmethod + +from openpyxl.descriptors.serialisable import Serialisable + +from ._3d import _3DBase +from ._chart import ChartBase + +class BandFormat(Serialisable): + tagname: str + idx: Incomplete + spPr: Incomplete + graphicalProperties: Incomplete + __elements__: Incomplete + def __init__(self, idx: int = ..., spPr: Incomplete | None = ...) -> None: ... + +class BandFormatList(Serialisable): + tagname: str + bandFmt: Incomplete + __elements__: Incomplete + def __init__(self, bandFmt=...) -> None: ... + +class _SurfaceChartBase(ChartBase): + wireframe: Incomplete + ser: Incomplete + bandFmts: Incomplete + __elements__: Incomplete + def __init__(self, wireframe: Incomplete | None = ..., ser=..., bandFmts: Incomplete | None = ..., **kw) -> None: ... + @property + @abstractmethod + def tagname(self) -> str: ... + +class SurfaceChart3D(_SurfaceChartBase, _3DBase): + tagname: str + wireframe: Incomplete + ser: Incomplete + bandFmts: Incomplete + extLst: Incomplete + x_axis: Incomplete + y_axis: Incomplete + z_axis: Incomplete + __elements__: Incomplete + def __init__(self, **kw) -> None: ... + +class SurfaceChart(SurfaceChart3D): + tagname: str + wireframe: Incomplete + ser: Incomplete + bandFmts: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__(self, **kw) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/text.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/text.pyi new file mode 100644 index 00000000..c4420217 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/text.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class RichText(Serialisable): + tagname: str + bodyPr: Incomplete + properties: Incomplete + lstStyle: Incomplete + p: Incomplete + paragraphs: Incomplete + __elements__: Incomplete + def __init__( + self, bodyPr: Incomplete | None = ..., lstStyle: Incomplete | None = ..., p: Incomplete | None = ... + ) -> None: ... + +class Text(Serialisable): + tagname: str + strRef: Incomplete + rich: Incomplete + __elements__: Incomplete + def __init__(self, strRef: Incomplete | None = ..., rich: Incomplete | None = ...) -> None: ... + def to_tree(self, tagname: Incomplete | None = ..., idx: Incomplete | None = ..., namespace: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/title.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/title.pyi new file mode 100644 index 00000000..d12c2900 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/title.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors import Typed +from openpyxl.descriptors.serialisable import Serialisable + +class Title(Serialisable): + tagname: str + tx: Incomplete + text: Incomplete + layout: Incomplete + overlay: Incomplete + spPr: Incomplete + graphicalProperties: Incomplete + txPr: Incomplete + body: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + tx: Incomplete | None = ..., + layout: Incomplete | None = ..., + overlay: Incomplete | None = ..., + spPr: Incomplete | None = ..., + txPr: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +def title_maker(text): ... + +class TitleDescriptor(Typed): + expected_type: Incomplete + allow_none: bool + def __set__(self, instance, value) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/trendline.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/trendline.pyi new file mode 100644 index 00000000..5e5e21a8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/trendline.pyi @@ -0,0 +1,56 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class TrendlineLabel(Serialisable): + tagname: str + layout: Incomplete + tx: Incomplete + numFmt: Incomplete + spPr: Incomplete + graphicalProperties: Incomplete + txPr: Incomplete + textProperties: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + layout: Incomplete | None = ..., + tx: Incomplete | None = ..., + numFmt: Incomplete | None = ..., + spPr: Incomplete | None = ..., + txPr: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class Trendline(Serialisable): + tagname: str + name: Incomplete + spPr: Incomplete + graphicalProperties: Incomplete + trendlineType: Incomplete + order: Incomplete + period: Incomplete + forward: Incomplete + backward: Incomplete + intercept: Incomplete + dispRSqr: Incomplete + dispEq: Incomplete + trendlineLbl: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + spPr: Incomplete | None = ..., + trendlineType: str = ..., + order: Incomplete | None = ..., + period: Incomplete | None = ..., + forward: Incomplete | None = ..., + backward: Incomplete | None = ..., + intercept: Incomplete | None = ..., + dispRSqr: Incomplete | None = ..., + dispEq: Incomplete | None = ..., + trendlineLbl: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/updown_bars.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/updown_bars.pyi new file mode 100644 index 00000000..89e21c27 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chart/updown_bars.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class UpDownBars(Serialisable): + tagname: str + gapWidth: Incomplete + upBars: Incomplete + downBars: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + gapWidth: int = ..., + upBars: Incomplete | None = ..., + downBars: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/__init__.pyi new file mode 100644 index 00000000..59dba6f1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/__init__.pyi @@ -0,0 +1 @@ +from .chartsheet import Chartsheet as Chartsheet diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/chartsheet.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/chartsheet.pyi new file mode 100644 index 00000000..9423f1cc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/chartsheet.pyi @@ -0,0 +1,44 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable +from openpyxl.workbook.child import _WorkbookChild + +class Chartsheet(_WorkbookChild, Serialisable): + tagname: str + mime_type: str + sheetPr: Incomplete + sheetViews: Incomplete + sheetProtection: Incomplete + customSheetViews: Incomplete + pageMargins: Incomplete + pageSetup: Incomplete + drawing: Incomplete + drawingHF: Incomplete + picture: Incomplete + webPublishItems: Incomplete + extLst: Incomplete + sheet_state: Incomplete + headerFooter: Incomplete + HeaderFooter: Incomplete + __elements__: Incomplete + __attrs__: Incomplete + def __init__( + self, + sheetPr: Incomplete | None = ..., + sheetViews: Incomplete | None = ..., + sheetProtection: Incomplete | None = ..., + customSheetViews: Incomplete | None = ..., + pageMargins: Incomplete | None = ..., + pageSetup: Incomplete | None = ..., + headerFooter: Incomplete | None = ..., + drawing: Incomplete | None = ..., + drawingHF: Incomplete | None = ..., + picture: Incomplete | None = ..., + webPublishItems: Incomplete | None = ..., + extLst: Incomplete | None = ..., + parent: Incomplete | None = ..., + title: str = ..., + sheet_state: str = ..., + ) -> None: ... + def add_chart(self, chart) -> None: ... + def to_tree(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/custom.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/custom.pyi new file mode 100644 index 00000000..88a26bd8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/custom.pyi @@ -0,0 +1,30 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class CustomChartsheetView(Serialisable): + tagname: str + guid: Incomplete + scale: Incomplete + state: Incomplete + zoomToFit: Incomplete + pageMargins: Incomplete + pageSetup: Incomplete + headerFooter: Incomplete + __elements__: Incomplete + def __init__( + self, + guid: Incomplete | None = ..., + scale: Incomplete | None = ..., + state: str = ..., + zoomToFit: Incomplete | None = ..., + pageMargins: Incomplete | None = ..., + pageSetup: Incomplete | None = ..., + headerFooter: Incomplete | None = ..., + ) -> None: ... + +class CustomChartsheetViews(Serialisable): + tagname: str + customSheetView: Incomplete + __elements__: Incomplete + def __init__(self, customSheetView: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/properties.pyi new file mode 100644 index 00000000..ecfa22fc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/properties.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable as Serialisable + +class ChartsheetProperties(Serialisable): + tagname: str + published: Incomplete + codeName: Incomplete + tabColor: Incomplete + __elements__: Incomplete + def __init__( + self, published: Incomplete | None = ..., codeName: Incomplete | None = ..., tabColor: Incomplete | None = ... + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/protection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/protection.pyi new file mode 100644 index 00000000..1565a57d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/protection.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable +from openpyxl.worksheet.protection import _Protected + +class ChartsheetProtection(Serialisable, _Protected): + tagname: str + algorithmName: Incomplete + hashValue: Incomplete + saltValue: Incomplete + spinCount: Incomplete + content: Incomplete + objects: Incomplete + __attrs__: Incomplete + password: Incomplete + def __init__( + self, + content: Incomplete | None = ..., + objects: Incomplete | None = ..., + hashValue: Incomplete | None = ..., + spinCount: Incomplete | None = ..., + saltValue: Incomplete | None = ..., + algorithmName: Incomplete | None = ..., + password: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/publish.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/publish.pyi new file mode 100644 index 00000000..deb8575e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/publish.pyi @@ -0,0 +1,32 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class WebPublishItem(Serialisable): + tagname: str + id: Incomplete + divId: Incomplete + sourceType: Incomplete + sourceRef: Incomplete + sourceObject: Incomplete + destinationFile: Incomplete + title: Incomplete + autoRepublish: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + divId: Incomplete | None = ..., + sourceType: Incomplete | None = ..., + sourceRef: Incomplete | None = ..., + sourceObject: Incomplete | None = ..., + destinationFile: Incomplete | None = ..., + title: Incomplete | None = ..., + autoRepublish: Incomplete | None = ..., + ) -> None: ... + +class WebPublishItems(Serialisable): + tagname: str + count: Incomplete + webPublishItem: Incomplete + __elements__: Incomplete + def __init__(self, count: Incomplete | None = ..., webPublishItem: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/relation.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/relation.pyi new file mode 100644 index 00000000..5c4cf80d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/relation.pyi @@ -0,0 +1,69 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class SheetBackgroundPicture(Serialisable): + tagname: str + id: Incomplete + def __init__(self, id) -> None: ... + +class DrawingHF(Serialisable): + id: Incomplete + lho: Incomplete + leftHeaderOddPages: Incomplete + lhe: Incomplete + leftHeaderEvenPages: Incomplete + lhf: Incomplete + leftHeaderFirstPage: Incomplete + cho: Incomplete + centerHeaderOddPages: Incomplete + che: Incomplete + centerHeaderEvenPages: Incomplete + chf: Incomplete + centerHeaderFirstPage: Incomplete + rho: Incomplete + rightHeaderOddPages: Incomplete + rhe: Incomplete + rightHeaderEvenPages: Incomplete + rhf: Incomplete + rightHeaderFirstPage: Incomplete + lfo: Incomplete + leftFooterOddPages: Incomplete + lfe: Incomplete + leftFooterEvenPages: Incomplete + lff: Incomplete + leftFooterFirstPage: Incomplete + cfo: Incomplete + centerFooterOddPages: Incomplete + cfe: Incomplete + centerFooterEvenPages: Incomplete + cff: Incomplete + centerFooterFirstPage: Incomplete + rfo: Incomplete + rightFooterOddPages: Incomplete + rfe: Incomplete + rightFooterEvenPages: Incomplete + rff: Incomplete + rightFooterFirstPage: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + lho: Incomplete | None = ..., + lhe: Incomplete | None = ..., + lhf: Incomplete | None = ..., + cho: Incomplete | None = ..., + che: Incomplete | None = ..., + chf: Incomplete | None = ..., + rho: Incomplete | None = ..., + rhe: Incomplete | None = ..., + rhf: Incomplete | None = ..., + lfo: Incomplete | None = ..., + lfe: Incomplete | None = ..., + lff: Incomplete | None = ..., + cfo: Incomplete | None = ..., + cfe: Incomplete | None = ..., + cff: Incomplete | None = ..., + rfo: Incomplete | None = ..., + rfe: Incomplete | None = ..., + rff: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/views.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/views.pyi new file mode 100644 index 00000000..772090b6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/chartsheet/views.pyi @@ -0,0 +1,27 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class ChartsheetView(Serialisable): + tagname: str + tabSelected: Incomplete + zoomScale: Incomplete + workbookViewId: Incomplete + zoomToFit: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + tabSelected: Incomplete | None = ..., + zoomScale: Incomplete | None = ..., + workbookViewId: int = ..., + zoomToFit: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class ChartsheetViewList(Serialisable): + tagname: str + sheetView: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__(self, sheetView: Incomplete | None = ..., extLst: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/comments/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/comments/__init__.pyi new file mode 100644 index 00000000..86ce8fc2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/comments/__init__.pyi @@ -0,0 +1 @@ +from .comments import Comment as Comment diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/comments/author.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/comments/author.pyi new file mode 100644 index 00000000..e80cfa91 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/comments/author.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class AuthorList(Serialisable): + tagname: str + author: Incomplete + authors: Incomplete + def __init__(self, author=...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/comments/comment_sheet.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/comments/comment_sheet.pyi new file mode 100644 index 00000000..50dcef57 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/comments/comment_sheet.pyi @@ -0,0 +1,90 @@ +from _typeshed import Incomplete +from collections.abc import Generator + +from openpyxl.descriptors.serialisable import Serialisable + +class Properties(Serialisable): + locked: Incomplete + defaultSize: Incomplete + disabled: Incomplete + uiObject: Incomplete + autoFill: Incomplete + autoLine: Incomplete + altText: Incomplete + textHAlign: Incomplete + textVAlign: Incomplete + lockText: Incomplete + justLastX: Incomplete + autoScale: Incomplete + rowHidden: Incomplete + colHidden: Incomplete + __elements__: Incomplete + anchor: Incomplete + def __init__( + self, + locked: Incomplete | None = ..., + defaultSize: Incomplete | None = ..., + _print: Incomplete | None = ..., + disabled: Incomplete | None = ..., + uiObject: Incomplete | None = ..., + autoFill: Incomplete | None = ..., + autoLine: Incomplete | None = ..., + altText: Incomplete | None = ..., + textHAlign: Incomplete | None = ..., + textVAlign: Incomplete | None = ..., + lockText: Incomplete | None = ..., + justLastX: Incomplete | None = ..., + autoScale: Incomplete | None = ..., + rowHidden: Incomplete | None = ..., + colHidden: Incomplete | None = ..., + anchor: Incomplete | None = ..., + ) -> None: ... + +class CommentRecord(Serialisable): + tagname: str + ref: Incomplete + authorId: Incomplete + guid: Incomplete + shapeId: Incomplete + text: Incomplete + commentPr: Incomplete + author: Incomplete + __elements__: Incomplete + __attrs__: Incomplete + height: Incomplete + width: Incomplete + def __init__( + self, + ref: str = ..., + authorId: int = ..., + guid: Incomplete | None = ..., + shapeId: int = ..., + text: Incomplete | None = ..., + commentPr: Incomplete | None = ..., + author: Incomplete | None = ..., + height: int = ..., + width: int = ..., + ) -> None: ... + @classmethod + def from_cell(cls, cell): ... + @property + def content(self): ... + +class CommentSheet(Serialisable): + tagname: str + authors: Incomplete + commentList: Incomplete + extLst: Incomplete + mime_type: str + __elements__: Incomplete + def __init__( + self, authors: Incomplete | None = ..., commentList: Incomplete | None = ..., extLst: Incomplete | None = ... + ) -> None: ... + def to_tree(self): ... + @property + def comments(self) -> Generator[Incomplete, None, None]: ... + @classmethod + def from_comments(cls, comments): ... + def write_shapes(self, vml: Incomplete | None = ...): ... + @property + def path(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/comments/comments.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/comments/comments.pyi new file mode 100644 index 00000000..b751879a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/comments/comments.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete + +class Comment: + content: Incomplete + author: Incomplete + height: Incomplete + width: Incomplete + def __init__(self, text, author, height: int = ..., width: int = ...) -> None: ... + @property + def parent(self): ... + def __eq__(self, other): ... + def __copy__(self): ... + def bind(self, cell) -> None: ... + def unbind(self) -> None: ... + @property + def text(self): ... + @text.setter + def text(self, value) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/comments/shape_writer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/comments/shape_writer.pyi new file mode 100644 index 00000000..df71fe9d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/comments/shape_writer.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete + +vmlns: str +officens: str +excelns: str + +class ShapeWriter: + vml: Incomplete + vml_path: Incomplete + comments: Incomplete + def __init__(self, comments) -> None: ... + def add_comment_shapetype(self, root) -> None: ... + def add_comment_shape(self, root, idx, coord, height, width) -> None: ... + def write(self, root): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/compat/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/compat/__init__.pyi new file mode 100644 index 00000000..5c4b7cd1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/compat/__init__.pyi @@ -0,0 +1,10 @@ +from _typeshed import Incomplete + +from .numbers import NUMERIC_TYPES as NUMERIC_TYPES +from .strings import safe_string as safe_string + +class DummyCode: ... + +string_types: Incomplete + +def deprecated(reason): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/compat/abc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/compat/abc.pyi new file mode 100644 index 00000000..5beda934 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/compat/abc.pyi @@ -0,0 +1 @@ +from abc import ABC as ABC diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/compat/numbers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/compat/numbers.pyi new file mode 100644 index 00000000..2d5d36fc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/compat/numbers.pyi @@ -0,0 +1,4 @@ +from _typeshed import Incomplete + +NUMERIC_TYPES: Incomplete +NUMPY: bool diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/compat/product.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/compat/product.pyi new file mode 100644 index 00000000..60e7b298 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/compat/product.pyi @@ -0,0 +1,3 @@ +def product(sequence): ... + +prod = product diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/compat/singleton.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/compat/singleton.pyi new file mode 100644 index 00000000..65403a66 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/compat/singleton.pyi @@ -0,0 +1,7 @@ +class Singleton(type): + def __init__(self, *args, **kw) -> None: ... + def __call__(self, *args, **kw): ... + +class Cached(type): + def __init__(self, *args, **kw) -> None: ... + def __call__(self, *args): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/compat/strings.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/compat/strings.pyi new file mode 100644 index 00000000..0e3080eb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/compat/strings.pyi @@ -0,0 +1,5 @@ +from _typeshed import Incomplete + +VER: Incomplete + +def safe_string(value): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/__init__.pyi new file mode 100644 index 00000000..bf852331 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/__init__.pyi @@ -0,0 +1,11 @@ +from .base import * +from .sequence import Sequence as Sequence + +class MetaStrict(type): + def __new__(cls, clsname, bases, methods): ... + +class MetaSerialisable(type): + def __new__(cls, clsname, bases, methods): ... + +class Strict(metaclass=MetaStrict): ... +class _Serialiasable(metaclass=MetaSerialisable): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/base.pyi new file mode 100644 index 00000000..c61b95e8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/base.pyi @@ -0,0 +1,85 @@ +from _typeshed import Incomplete + +class Descriptor: + name: Incomplete + def __init__(self, name: Incomplete | None = ..., **kw) -> None: ... + def __set__(self, instance, value) -> None: ... + +class Typed(Descriptor): + expected_type: Incomplete + allow_none: bool + nested: bool + __doc__: Incomplete + def __init__(self, *args, **kw) -> None: ... + def __set__(self, instance, value) -> None: ... + +class Convertible(Typed): + def __set__(self, instance, value) -> None: ... + +class Max(Convertible): + expected_type: Incomplete + allow_none: bool + def __init__(self, **kw) -> None: ... + def __set__(self, instance, value) -> None: ... + +class Min(Convertible): + expected_type: Incomplete + allow_none: bool + def __init__(self, **kw) -> None: ... + def __set__(self, instance, value) -> None: ... + +class MinMax(Min, Max): ... + +class Set(Descriptor): + __doc__: Incomplete + def __init__(self, name: Incomplete | None = ..., **kw) -> None: ... + def __set__(self, instance, value) -> None: ... + +class NoneSet(Set): + def __init__(self, name: Incomplete | None = ..., **kw) -> None: ... + def __set__(self, instance, value) -> None: ... + +class Integer(Convertible): + expected_type: Incomplete + +class Float(Convertible): + expected_type: Incomplete + +class Bool(Convertible): + expected_type: Incomplete + def __set__(self, instance, value) -> None: ... + +class String(Typed): + expected_type: Incomplete + +class Text(String, Convertible): ... + +class ASCII(Typed): + expected_type: Incomplete + +class Tuple(Typed): + expected_type: Incomplete + +class Length(Descriptor): + def __init__(self, name: Incomplete | None = ..., **kw) -> None: ... + def __set__(self, instance, value) -> None: ... + +class Default(Typed): + def __init__(self, name: Incomplete | None = ..., **kw) -> None: ... + def __call__(self): ... + +class Alias(Descriptor): + alias: Incomplete + def __init__(self, alias) -> None: ... + def __set__(self, instance, value) -> None: ... + def __get__(self, instance, cls): ... + +class MatchPattern(Descriptor): + allow_none: bool + test_pattern: Incomplete + def __init__(self, name: Incomplete | None = ..., **kw) -> None: ... + def __set__(self, instance, value) -> None: ... + +class DateTime(Typed): + expected_type: Incomplete + def __set__(self, instance, value) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/excel.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/excel.pyi new file mode 100644 index 00000000..1d7c80f6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/excel.pyi @@ -0,0 +1,46 @@ +from _typeshed import Incomplete + +from . import Integer, MatchPattern, MinMax, String +from .serialisable import Serialisable + +class HexBinary(MatchPattern): + pattern: str + +class UniversalMeasure(MatchPattern): + pattern: str + +class TextPoint(MinMax): + expected_type: Incomplete + min: int + max: int + +Coordinate = Integer + +class Percentage(MinMax): + pattern: str + min: int + max: int + def __set__(self, instance, value) -> None: ... + +class Extension(Serialisable): + uri: Incomplete + def __init__(self, uri: Incomplete | None = ...) -> None: ... + +class ExtensionList(Serialisable): + ext: Incomplete + def __init__(self, ext=...) -> None: ... + +class Relation(String): + namespace: Incomplete + allow_none: bool + +class Base64Binary(MatchPattern): + pattern: str + +class Guid(MatchPattern): + pattern: str + +class CellRange(MatchPattern): + pattern: str + allow_none: bool + def __set__(self, instance, value) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/namespace.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/namespace.pyi new file mode 100644 index 00000000..2d64ce84 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/namespace.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def namespaced(obj, tagname, namespace: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/nested.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/nested.pyi new file mode 100644 index 00000000..f4902e65 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/nested.pyi @@ -0,0 +1,31 @@ +from _typeshed import Incomplete + +from .base import Bool, Convertible, Descriptor, Float, Integer, MinMax, NoneSet, Set, String + +class Nested(Descriptor): + nested: bool + attribute: str + def __set__(self, instance, value) -> None: ... + def from_tree(self, node): ... + def to_tree(self, tagname: Incomplete | None = ..., value: Incomplete | None = ..., namespace: Incomplete | None = ...): ... + +class NestedValue(Nested, Convertible): ... + +class NestedText(NestedValue): + def from_tree(self, node): ... + def to_tree(self, tagname: Incomplete | None = ..., value: Incomplete | None = ..., namespace: Incomplete | None = ...): ... + +class NestedFloat(NestedValue, Float): ... +class NestedInteger(NestedValue, Integer): ... +class NestedString(NestedValue, String): ... + +class NestedBool(NestedValue, Bool): + def from_tree(self, node): ... + +class NestedNoneSet(Nested, NoneSet): ... +class NestedSet(Nested, Set): ... +class NestedMinMax(Nested, MinMax): ... + +class EmptyTag(Nested, Bool): + def from_tree(self, node): ... + def to_tree(self, tagname: Incomplete | None = ..., value: Incomplete | None = ..., namespace: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/sequence.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/sequence.pyi new file mode 100644 index 00000000..81259a9d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/sequence.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete +from collections.abc import Generator + +from .base import Alias, Descriptor + +class Sequence(Descriptor): + expected_type: Incomplete + seq_types: Incomplete + idx_base: int + unique: bool + def __set__(self, instance, seq) -> None: ... + def to_tree(self, tagname, obj, namespace: Incomplete | None = ...) -> Generator[Incomplete, None, None]: ... + +class ValueSequence(Sequence): + attribute: str + def to_tree(self, tagname, obj, namespace: Incomplete | None = ...) -> Generator[Incomplete, None, None]: ... + def from_tree(self, node): ... + +class NestedSequence(Sequence): + count: bool + def to_tree(self, tagname, obj, namespace: Incomplete | None = ...): ... + def from_tree(self, node): ... + +class MultiSequence(Sequence): + def __set__(self, instance, seq) -> None: ... + def to_tree(self, tagname, obj, namespace: Incomplete | None = ...) -> Generator[Incomplete, None, None]: ... + +class MultiSequencePart(Alias): + expected_type: Incomplete + store: Incomplete + def __init__(self, expected_type, store) -> None: ... + def __set__(self, instance, value) -> None: ... + def __get__(self, instance, cls): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/serialisable.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/serialisable.pyi new file mode 100644 index 00000000..f309f6a4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/serialisable.pyi @@ -0,0 +1,27 @@ +from _typeshed import Incomplete + +from . import _Serialiasable + +KEYWORDS: Incomplete +seq_types: Incomplete + +class Serialisable(_Serialiasable): + __attrs__: Incomplete + __nested__: Incomplete + __elements__: Incomplete + __namespaced__: Incomplete + idx_base: int + @property + # TODO: needs overrides in many sub-classes + # @abstractmethod + def tagname(self) -> str: ... + namespace: Incomplete + @classmethod + def from_tree(cls, node): ... + def to_tree(self, tagname: Incomplete | None = ..., idx: Incomplete | None = ..., namespace: Incomplete | None = ...): ... + def __iter__(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __hash__(self) -> int: ... + def __add__(self, other): ... + def __copy__(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/slots.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/slots.pyi new file mode 100644 index 00000000..87f6a075 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/descriptors/slots.pyi @@ -0,0 +1,2 @@ +class AutoSlotProperties(type): + def __new__(cls, classname, bases, dictionary): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/__init__.pyi new file mode 100644 index 00000000..a41cad04 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/__init__.pyi @@ -0,0 +1 @@ +from .drawing import Drawing as Drawing diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/colors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/colors.pyi new file mode 100644 index 00000000..aaf77fd0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/colors.pyi @@ -0,0 +1,216 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors import Typed +from openpyxl.descriptors.serialisable import Serialisable + +PRESET_COLORS: Incomplete +SCHEME_COLORS: Incomplete + +class Transform(Serialisable): ... + +class SystemColor(Serialisable): + tagname: str + namespace: Incomplete + tint: Incomplete + shade: Incomplete + comp: Incomplete + inv: Incomplete + gray: Incomplete + alpha: Incomplete + alphaOff: Incomplete + alphaMod: Incomplete + hue: Incomplete + hueOff: Incomplete + hueMod: Incomplete + sat: Incomplete + satOff: Incomplete + satMod: Incomplete + lum: Incomplete + lumOff: Incomplete + lumMod: Incomplete + red: Incomplete + redOff: Incomplete + redMod: Incomplete + green: Incomplete + greenOff: Incomplete + greenMod: Incomplete + blue: Incomplete + blueOff: Incomplete + blueMod: Incomplete + gamma: Incomplete + invGamma: Incomplete + val: Incomplete + lastClr: Incomplete + __elements__: Incomplete + def __init__( + self, + val: str = ..., + lastClr: Incomplete | None = ..., + tint: Incomplete | None = ..., + shade: Incomplete | None = ..., + comp: Incomplete | None = ..., + inv: Incomplete | None = ..., + gray: Incomplete | None = ..., + alpha: Incomplete | None = ..., + alphaOff: Incomplete | None = ..., + alphaMod: Incomplete | None = ..., + hue: Incomplete | None = ..., + hueOff: Incomplete | None = ..., + hueMod: Incomplete | None = ..., + sat: Incomplete | None = ..., + satOff: Incomplete | None = ..., + satMod: Incomplete | None = ..., + lum: Incomplete | None = ..., + lumOff: Incomplete | None = ..., + lumMod: Incomplete | None = ..., + red: Incomplete | None = ..., + redOff: Incomplete | None = ..., + redMod: Incomplete | None = ..., + green: Incomplete | None = ..., + greenOff: Incomplete | None = ..., + greenMod: Incomplete | None = ..., + blue: Incomplete | None = ..., + blueOff: Incomplete | None = ..., + blueMod: Incomplete | None = ..., + gamma: Incomplete | None = ..., + invGamma: Incomplete | None = ..., + ) -> None: ... + +class HSLColor(Serialisable): + tagname: str + hue: Incomplete + sat: Incomplete + lum: Incomplete + def __init__(self, hue: Incomplete | None = ..., sat: Incomplete | None = ..., lum: Incomplete | None = ...) -> None: ... + +class RGBPercent(Serialisable): + tagname: str + r: Incomplete + g: Incomplete + b: Incomplete + def __init__(self, r: Incomplete | None = ..., g: Incomplete | None = ..., b: Incomplete | None = ...) -> None: ... + +class SchemeColor(Serialisable): + tagname: str + namespace: Incomplete + tint: Incomplete + shade: Incomplete + comp: Incomplete + inv: Incomplete + gray: Incomplete + alpha: Incomplete + alphaOff: Incomplete + alphaMod: Incomplete + hue: Incomplete + hueOff: Incomplete + hueMod: Incomplete + sat: Incomplete + satOff: Incomplete + satMod: Incomplete + lum: Incomplete + lumOff: Incomplete + lumMod: Incomplete + red: Incomplete + redOff: Incomplete + redMod: Incomplete + green: Incomplete + greenOff: Incomplete + greenMod: Incomplete + blue: Incomplete + blueOff: Incomplete + blueMod: Incomplete + gamma: Incomplete + invGamma: Incomplete + val: Incomplete + __elements__: Incomplete + def __init__( + self, + tint: Incomplete | None = ..., + shade: Incomplete | None = ..., + comp: Incomplete | None = ..., + inv: Incomplete | None = ..., + gray: Incomplete | None = ..., + alpha: Incomplete | None = ..., + alphaOff: Incomplete | None = ..., + alphaMod: Incomplete | None = ..., + hue: Incomplete | None = ..., + hueOff: Incomplete | None = ..., + hueMod: Incomplete | None = ..., + sat: Incomplete | None = ..., + satOff: Incomplete | None = ..., + satMod: Incomplete | None = ..., + lum: Incomplete | None = ..., + lumOff: Incomplete | None = ..., + lumMod: Incomplete | None = ..., + red: Incomplete | None = ..., + redOff: Incomplete | None = ..., + redMod: Incomplete | None = ..., + green: Incomplete | None = ..., + greenOff: Incomplete | None = ..., + greenMod: Incomplete | None = ..., + blue: Incomplete | None = ..., + blueOff: Incomplete | None = ..., + blueMod: Incomplete | None = ..., + gamma: Incomplete | None = ..., + invGamma: Incomplete | None = ..., + val: Incomplete | None = ..., + ) -> None: ... + +class ColorChoice(Serialisable): + tagname: str + namespace: Incomplete + scrgbClr: Incomplete + RGBPercent: Incomplete + srgbClr: Incomplete + RGB: Incomplete + hslClr: Incomplete + sysClr: Incomplete + schemeClr: Incomplete + prstClr: Incomplete + __elements__: Incomplete + def __init__( + self, + scrgbClr: Incomplete | None = ..., + srgbClr: Incomplete | None = ..., + hslClr: Incomplete | None = ..., + sysClr: Incomplete | None = ..., + schemeClr: Incomplete | None = ..., + prstClr: Incomplete | None = ..., + ) -> None: ... + +class ColorMapping(Serialisable): + tagname: str + bg1: Incomplete + tx1: Incomplete + bg2: Incomplete + tx2: Incomplete + accent1: Incomplete + accent2: Incomplete + accent3: Incomplete + accent4: Incomplete + accent5: Incomplete + accent6: Incomplete + hlink: Incomplete + folHlink: Incomplete + extLst: Incomplete + def __init__( + self, + bg1: str = ..., + tx1: str = ..., + bg2: str = ..., + tx2: str = ..., + accent1: str = ..., + accent2: str = ..., + accent3: str = ..., + accent4: str = ..., + accent5: str = ..., + accent6: str = ..., + hlink: str = ..., + folHlink: str = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class ColorChoiceDescriptor(Typed): + expected_type: Incomplete + allow_none: bool + def __set__(self, instance, value) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/connector.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/connector.pyi new file mode 100644 index 00000000..926b416c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/connector.pyi @@ -0,0 +1,76 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class Connection(Serialisable): + id: Incomplete + idx: Incomplete + def __init__(self, id: Incomplete | None = ..., idx: Incomplete | None = ...) -> None: ... + +class ConnectorLocking(Serialisable): + extLst: Incomplete + def __init__(self, extLst: Incomplete | None = ...) -> None: ... + +class NonVisualConnectorProperties(Serialisable): + cxnSpLocks: Incomplete + stCxn: Incomplete + endCxn: Incomplete + extLst: Incomplete + def __init__( + self, + cxnSpLocks: Incomplete | None = ..., + stCxn: Incomplete | None = ..., + endCxn: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class ConnectorNonVisual(Serialisable): + cNvPr: Incomplete + cNvCxnSpPr: Incomplete + __elements__: Incomplete + def __init__(self, cNvPr: Incomplete | None = ..., cNvCxnSpPr: Incomplete | None = ...) -> None: ... + +class ConnectorShape(Serialisable): + tagname: str + nvCxnSpPr: Incomplete + spPr: Incomplete + style: Incomplete + macro: Incomplete + fPublished: Incomplete + def __init__( + self, + nvCxnSpPr: Incomplete | None = ..., + spPr: Incomplete | None = ..., + style: Incomplete | None = ..., + macro: Incomplete | None = ..., + fPublished: Incomplete | None = ..., + ) -> None: ... + +class ShapeMeta(Serialisable): + tagname: str + cNvPr: Incomplete + cNvSpPr: Incomplete + def __init__(self, cNvPr: Incomplete | None = ..., cNvSpPr: Incomplete | None = ...) -> None: ... + +class Shape(Serialisable): + macro: Incomplete + textlink: Incomplete + fPublished: Incomplete + fLocksText: Incomplete + nvSpPr: Incomplete + meta: Incomplete + spPr: Incomplete + graphicalProperties: Incomplete + style: Incomplete + txBody: Incomplete + def __init__( + self, + macro: Incomplete | None = ..., + textlink: Incomplete | None = ..., + fPublished: Incomplete | None = ..., + fLocksText: Incomplete | None = ..., + nvSpPr: Incomplete | None = ..., + spPr: Incomplete | None = ..., + style: Incomplete | None = ..., + txBody: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/drawing.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/drawing.pyi new file mode 100644 index 00000000..805c8d2a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/drawing.pyi @@ -0,0 +1,27 @@ +from _typeshed import Incomplete + +class Drawing: + count: int + name: str + description: str + coordinates: Incomplete + left: int + top: int + resize_proportional: bool + rotation: int + anchortype: str + anchorcol: int + anchorrow: int + def __init__(self) -> None: ... + @property + def width(self): ... + @width.setter + def width(self, w) -> None: ... + @property + def height(self): ... + @height.setter + def height(self, h) -> None: ... + def set_dimension(self, w: int = ..., h: int = ...) -> None: ... + def get_emu_dimensions(self): ... + @property + def anchor(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/effect.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/effect.pyi new file mode 100644 index 00000000..58b6e09e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/effect.pyi @@ -0,0 +1,208 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +from .colors import ColorChoice + +class TintEffect(Serialisable): + tagname: str + hue: Incomplete + amt: Incomplete + def __init__(self, hue: int = ..., amt: int = ...) -> None: ... + +class LuminanceEffect(Serialisable): + tagname: str + bright: Incomplete + contrast: Incomplete + def __init__(self, bright: int = ..., contrast: int = ...) -> None: ... + +class HSLEffect(Serialisable): + hue: Incomplete + sat: Incomplete + lum: Incomplete + def __init__(self, hue: Incomplete | None = ..., sat: Incomplete | None = ..., lum: Incomplete | None = ...) -> None: ... + +class GrayscaleEffect(Serialisable): + tagname: str + +class FillOverlayEffect(Serialisable): + blend: Incomplete + def __init__(self, blend: Incomplete | None = ...) -> None: ... + +class DuotoneEffect(Serialisable): ... +class ColorReplaceEffect(Serialisable): ... +class Color(Serialisable): ... + +class ColorChangeEffect(Serialisable): + useA: Incomplete + clrFrom: Incomplete + clrTo: Incomplete + def __init__( + self, useA: Incomplete | None = ..., clrFrom: Incomplete | None = ..., clrTo: Incomplete | None = ... + ) -> None: ... + +class BlurEffect(Serialisable): + rad: Incomplete + grow: Incomplete + def __init__(self, rad: Incomplete | None = ..., grow: Incomplete | None = ...) -> None: ... + +class BiLevelEffect(Serialisable): + thresh: Incomplete + def __init__(self, thresh: Incomplete | None = ...) -> None: ... + +class AlphaReplaceEffect(Serialisable): + a: Incomplete + def __init__(self, a: Incomplete | None = ...) -> None: ... + +class AlphaModulateFixedEffect(Serialisable): + amt: Incomplete + def __init__(self, amt: Incomplete | None = ...) -> None: ... + +class EffectContainer(Serialisable): + type: Incomplete + name: Incomplete + def __init__(self, type: Incomplete | None = ..., name: Incomplete | None = ...) -> None: ... + +class AlphaModulateEffect(Serialisable): + cont: Incomplete + def __init__(self, cont: Incomplete | None = ...) -> None: ... + +class AlphaInverseEffect(Serialisable): ... +class AlphaFloorEffect(Serialisable): ... +class AlphaCeilingEffect(Serialisable): ... + +class AlphaBiLevelEffect(Serialisable): + thresh: Incomplete + def __init__(self, thresh: Incomplete | None = ...) -> None: ... + +class GlowEffect(ColorChoice): + rad: Incomplete + scrgbClr: Incomplete + srgbClr: Incomplete + hslClr: Incomplete + sysClr: Incomplete + schemeClr: Incomplete + prstClr: Incomplete + __elements__: Incomplete + def __init__(self, rad: Incomplete | None = ..., **kw) -> None: ... + +class InnerShadowEffect(ColorChoice): + blurRad: Incomplete + dist: Incomplete + dir: Incomplete + scrgbClr: Incomplete + srgbClr: Incomplete + hslClr: Incomplete + sysClr: Incomplete + schemeClr: Incomplete + prstClr: Incomplete + __elements__: Incomplete + def __init__( + self, blurRad: Incomplete | None = ..., dist: Incomplete | None = ..., dir: Incomplete | None = ..., **kw + ) -> None: ... + +class OuterShadow(ColorChoice): + tagname: str + blurRad: Incomplete + dist: Incomplete + dir: Incomplete + sx: Incomplete + sy: Incomplete + kx: Incomplete + ky: Incomplete + algn: Incomplete + rotWithShape: Incomplete + scrgbClr: Incomplete + srgbClr: Incomplete + hslClr: Incomplete + sysClr: Incomplete + schemeClr: Incomplete + prstClr: Incomplete + __elements__: Incomplete + def __init__( + self, + blurRad: Incomplete | None = ..., + dist: Incomplete | None = ..., + dir: Incomplete | None = ..., + sx: Incomplete | None = ..., + sy: Incomplete | None = ..., + kx: Incomplete | None = ..., + ky: Incomplete | None = ..., + algn: Incomplete | None = ..., + rotWithShape: Incomplete | None = ..., + **kw, + ) -> None: ... + +class PresetShadowEffect(ColorChoice): + prst: Incomplete + dist: Incomplete + dir: Incomplete + scrgbClr: Incomplete + srgbClr: Incomplete + hslClr: Incomplete + sysClr: Incomplete + schemeClr: Incomplete + prstClr: Incomplete + __elements__: Incomplete + def __init__( + self, prst: Incomplete | None = ..., dist: Incomplete | None = ..., dir: Incomplete | None = ..., **kw + ) -> None: ... + +class ReflectionEffect(Serialisable): + blurRad: Incomplete + stA: Incomplete + stPos: Incomplete + endA: Incomplete + endPos: Incomplete + dist: Incomplete + dir: Incomplete + fadeDir: Incomplete + sx: Incomplete + sy: Incomplete + kx: Incomplete + ky: Incomplete + algn: Incomplete + rotWithShape: Incomplete + def __init__( + self, + blurRad: Incomplete | None = ..., + stA: Incomplete | None = ..., + stPos: Incomplete | None = ..., + endA: Incomplete | None = ..., + endPos: Incomplete | None = ..., + dist: Incomplete | None = ..., + dir: Incomplete | None = ..., + fadeDir: Incomplete | None = ..., + sx: Incomplete | None = ..., + sy: Incomplete | None = ..., + kx: Incomplete | None = ..., + ky: Incomplete | None = ..., + algn: Incomplete | None = ..., + rotWithShape: Incomplete | None = ..., + ) -> None: ... + +class SoftEdgesEffect(Serialisable): + rad: Incomplete + def __init__(self, rad: Incomplete | None = ...) -> None: ... + +class EffectList(Serialisable): + blur: Incomplete + fillOverlay: Incomplete + glow: Incomplete + innerShdw: Incomplete + outerShdw: Incomplete + prstShdw: Incomplete + reflection: Incomplete + softEdge: Incomplete + __elements__: Incomplete + def __init__( + self, + blur: Incomplete | None = ..., + fillOverlay: Incomplete | None = ..., + glow: Incomplete | None = ..., + innerShdw: Incomplete | None = ..., + outerShdw: Incomplete | None = ..., + prstShdw: Incomplete | None = ..., + reflection: Incomplete | None = ..., + softEdge: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/fill.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/fill.pyi new file mode 100644 index 00000000..88b2ce16 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/fill.pyi @@ -0,0 +1,223 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class PatternFillProperties(Serialisable): + tagname: str + namespace: Incomplete + prst: Incomplete + preset: Incomplete + fgClr: Incomplete + foreground: Incomplete + bgClr: Incomplete + background: Incomplete + __elements__: Incomplete + def __init__(self, prst: Incomplete | None = ..., fgClr: Incomplete | None = ..., bgClr: Incomplete | None = ...) -> None: ... + +class RelativeRect(Serialisable): + tagname: str + namespace: Incomplete + l: Incomplete + left: Incomplete + t: Incomplete + top: Incomplete + r: Incomplete + right: Incomplete + b: Incomplete + bottom: Incomplete + def __init__( + self, l: Incomplete | None = ..., t: Incomplete | None = ..., r: Incomplete | None = ..., b: Incomplete | None = ... + ) -> None: ... + +class StretchInfoProperties(Serialisable): + tagname: str + namespace: Incomplete + fillRect: Incomplete + def __init__(self, fillRect=...) -> None: ... + +class GradientStop(Serialisable): + tagname: str + namespace: Incomplete + pos: Incomplete + scrgbClr: Incomplete + RGBPercent: Incomplete + srgbClr: Incomplete + RGB: Incomplete + hslClr: Incomplete + sysClr: Incomplete + schemeClr: Incomplete + prstClr: Incomplete + __elements__: Incomplete + def __init__( + self, + pos: Incomplete | None = ..., + scrgbClr: Incomplete | None = ..., + srgbClr: Incomplete | None = ..., + hslClr: Incomplete | None = ..., + sysClr: Incomplete | None = ..., + schemeClr: Incomplete | None = ..., + prstClr: Incomplete | None = ..., + ) -> None: ... + +class LinearShadeProperties(Serialisable): + tagname: str + namespace: Incomplete + ang: Incomplete + scaled: Incomplete + def __init__(self, ang: Incomplete | None = ..., scaled: Incomplete | None = ...) -> None: ... + +class PathShadeProperties(Serialisable): + tagname: str + namespace: Incomplete + path: Incomplete + fillToRect: Incomplete + def __init__(self, path: Incomplete | None = ..., fillToRect: Incomplete | None = ...) -> None: ... + +class GradientFillProperties(Serialisable): + tagname: str + namespace: Incomplete + flip: Incomplete + rotWithShape: Incomplete + gsLst: Incomplete + stop_list: Incomplete + lin: Incomplete + linear: Incomplete + path: Incomplete + tileRect: Incomplete + __elements__: Incomplete + def __init__( + self, + flip: Incomplete | None = ..., + rotWithShape: Incomplete | None = ..., + gsLst=..., + lin: Incomplete | None = ..., + path: Incomplete | None = ..., + tileRect: Incomplete | None = ..., + ) -> None: ... + +class SolidColorFillProperties(Serialisable): + tagname: str + scrgbClr: Incomplete + RGBPercent: Incomplete + srgbClr: Incomplete + RGB: Incomplete + hslClr: Incomplete + sysClr: Incomplete + schemeClr: Incomplete + prstClr: Incomplete + __elements__: Incomplete + def __init__( + self, + scrgbClr: Incomplete | None = ..., + srgbClr: Incomplete | None = ..., + hslClr: Incomplete | None = ..., + sysClr: Incomplete | None = ..., + schemeClr: Incomplete | None = ..., + prstClr: Incomplete | None = ..., + ) -> None: ... + +class Blip(Serialisable): + tagname: str + namespace: Incomplete + cstate: Incomplete + embed: Incomplete + link: Incomplete + noGrp: Incomplete + noSelect: Incomplete + noRot: Incomplete + noChangeAspect: Incomplete + noMove: Incomplete + noResize: Incomplete + noEditPoints: Incomplete + noAdjustHandles: Incomplete + noChangeArrowheads: Incomplete + noChangeShapeType: Incomplete + extLst: Incomplete + alphaBiLevel: Incomplete + alphaCeiling: Incomplete + alphaFloor: Incomplete + alphaInv: Incomplete + alphaMod: Incomplete + alphaModFix: Incomplete + alphaRepl: Incomplete + biLevel: Incomplete + blur: Incomplete + clrChange: Incomplete + clrRepl: Incomplete + duotone: Incomplete + fillOverlay: Incomplete + grayscl: Incomplete + hsl: Incomplete + lum: Incomplete + tint: Incomplete + __elements__: Incomplete + def __init__( + self, + cstate: Incomplete | None = ..., + embed: Incomplete | None = ..., + link: Incomplete | None = ..., + noGrp: Incomplete | None = ..., + noSelect: Incomplete | None = ..., + noRot: Incomplete | None = ..., + noChangeAspect: Incomplete | None = ..., + noMove: Incomplete | None = ..., + noResize: Incomplete | None = ..., + noEditPoints: Incomplete | None = ..., + noAdjustHandles: Incomplete | None = ..., + noChangeArrowheads: Incomplete | None = ..., + noChangeShapeType: Incomplete | None = ..., + extLst: Incomplete | None = ..., + alphaBiLevel: Incomplete | None = ..., + alphaCeiling: Incomplete | None = ..., + alphaFloor: Incomplete | None = ..., + alphaInv: Incomplete | None = ..., + alphaMod: Incomplete | None = ..., + alphaModFix: Incomplete | None = ..., + alphaRepl: Incomplete | None = ..., + biLevel: Incomplete | None = ..., + blur: Incomplete | None = ..., + clrChange: Incomplete | None = ..., + clrRepl: Incomplete | None = ..., + duotone: Incomplete | None = ..., + fillOverlay: Incomplete | None = ..., + grayscl: Incomplete | None = ..., + hsl: Incomplete | None = ..., + lum: Incomplete | None = ..., + tint: Incomplete | None = ..., + ) -> None: ... + +class TileInfoProperties(Serialisable): + tx: Incomplete + ty: Incomplete + sx: Incomplete + sy: Incomplete + flip: Incomplete + algn: Incomplete + def __init__( + self, + tx: Incomplete | None = ..., + ty: Incomplete | None = ..., + sx: Incomplete | None = ..., + sy: Incomplete | None = ..., + flip: Incomplete | None = ..., + algn: Incomplete | None = ..., + ) -> None: ... + +class BlipFillProperties(Serialisable): + tagname: str + dpi: Incomplete + rotWithShape: Incomplete + blip: Incomplete + srcRect: Incomplete + tile: Incomplete + stretch: Incomplete + __elements__: Incomplete + def __init__( + self, + dpi: Incomplete | None = ..., + rotWithShape: Incomplete | None = ..., + blip: Incomplete | None = ..., + tile: Incomplete | None = ..., + stretch=..., + srcRect: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/geometry.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/geometry.pyi new file mode 100644 index 00000000..f5ed079b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/geometry.pyi @@ -0,0 +1,259 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class Point2D(Serialisable): + tagname: str + namespace: Incomplete + x: Incomplete + y: Incomplete + def __init__(self, x: Incomplete | None = ..., y: Incomplete | None = ...) -> None: ... + +class PositiveSize2D(Serialisable): + tagname: str + namespace: Incomplete + cx: Incomplete + width: Incomplete + cy: Incomplete + height: Incomplete + def __init__(self, cx: Incomplete | None = ..., cy: Incomplete | None = ...) -> None: ... + +class Transform2D(Serialisable): + tagname: str + namespace: Incomplete + rot: Incomplete + flipH: Incomplete + flipV: Incomplete + off: Incomplete + ext: Incomplete + chOff: Incomplete + chExt: Incomplete + __elements__: Incomplete + def __init__( + self, + rot: Incomplete | None = ..., + flipH: Incomplete | None = ..., + flipV: Incomplete | None = ..., + off: Incomplete | None = ..., + ext: Incomplete | None = ..., + chOff: Incomplete | None = ..., + chExt: Incomplete | None = ..., + ) -> None: ... + +class GroupTransform2D(Serialisable): + tagname: str + namespace: Incomplete + rot: Incomplete + flipH: Incomplete + flipV: Incomplete + off: Incomplete + ext: Incomplete + chOff: Incomplete + chExt: Incomplete + __elements__: Incomplete + def __init__( + self, + rot: int = ..., + flipH: Incomplete | None = ..., + flipV: Incomplete | None = ..., + off: Incomplete | None = ..., + ext: Incomplete | None = ..., + chOff: Incomplete | None = ..., + chExt: Incomplete | None = ..., + ) -> None: ... + +class SphereCoords(Serialisable): + tagname: str + lat: Incomplete + lon: Incomplete + rev: Incomplete + def __init__(self, lat: Incomplete | None = ..., lon: Incomplete | None = ..., rev: Incomplete | None = ...) -> None: ... + +class Camera(Serialisable): + tagname: str + prst: Incomplete + fov: Incomplete + zoom: Incomplete + rot: Incomplete + def __init__( + self, + prst: Incomplete | None = ..., + fov: Incomplete | None = ..., + zoom: Incomplete | None = ..., + rot: Incomplete | None = ..., + ) -> None: ... + +class LightRig(Serialisable): + tagname: str + rig: Incomplete + dir: Incomplete + rot: Incomplete + def __init__(self, rig: Incomplete | None = ..., dir: Incomplete | None = ..., rot: Incomplete | None = ...) -> None: ... + +class Vector3D(Serialisable): + tagname: str + dx: Incomplete + dy: Incomplete + dz: Incomplete + def __init__(self, dx: Incomplete | None = ..., dy: Incomplete | None = ..., dz: Incomplete | None = ...) -> None: ... + +class Point3D(Serialisable): + tagname: str + x: Incomplete + y: Incomplete + z: Incomplete + def __init__(self, x: Incomplete | None = ..., y: Incomplete | None = ..., z: Incomplete | None = ...) -> None: ... + +class Backdrop(Serialisable): + anchor: Incomplete + norm: Incomplete + up: Incomplete + extLst: Incomplete + def __init__( + self, + anchor: Incomplete | None = ..., + norm: Incomplete | None = ..., + up: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class Scene3D(Serialisable): + camera: Incomplete + lightRig: Incomplete + backdrop: Incomplete + extLst: Incomplete + def __init__( + self, + camera: Incomplete | None = ..., + lightRig: Incomplete | None = ..., + backdrop: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class Bevel(Serialisable): + tagname: str + w: Incomplete + h: Incomplete + prst: Incomplete + def __init__(self, w: Incomplete | None = ..., h: Incomplete | None = ..., prst: Incomplete | None = ...) -> None: ... + +class Shape3D(Serialisable): + namespace: Incomplete + z: Incomplete + extrusionH: Incomplete + contourW: Incomplete + prstMaterial: Incomplete + bevelT: Incomplete + bevelB: Incomplete + extrusionClr: Incomplete + contourClr: Incomplete + extLst: Incomplete + def __init__( + self, + z: Incomplete | None = ..., + extrusionH: Incomplete | None = ..., + contourW: Incomplete | None = ..., + prstMaterial: Incomplete | None = ..., + bevelT: Incomplete | None = ..., + bevelB: Incomplete | None = ..., + extrusionClr: Incomplete | None = ..., + contourClr: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class Path2D(Serialisable): + w: Incomplete + h: Incomplete + fill: Incomplete + stroke: Incomplete + extrusionOk: Incomplete + def __init__( + self, + w: Incomplete | None = ..., + h: Incomplete | None = ..., + fill: Incomplete | None = ..., + stroke: Incomplete | None = ..., + extrusionOk: Incomplete | None = ..., + ) -> None: ... + +class Path2DList(Serialisable): + path: Incomplete + def __init__(self, path: Incomplete | None = ...) -> None: ... + +class GeomRect(Serialisable): + l: Incomplete + t: Incomplete + r: Incomplete + b: Incomplete + def __init__( + self, l: Incomplete | None = ..., t: Incomplete | None = ..., r: Incomplete | None = ..., b: Incomplete | None = ... + ) -> None: ... + +class AdjPoint2D(Serialisable): + x: Incomplete + y: Incomplete + def __init__(self, x: Incomplete | None = ..., y: Incomplete | None = ...) -> None: ... + +class ConnectionSite(Serialisable): + ang: Incomplete + pos: Incomplete + def __init__(self, ang: Incomplete | None = ..., pos: Incomplete | None = ...) -> None: ... + +class ConnectionSiteList(Serialisable): + cxn: Incomplete + def __init__(self, cxn: Incomplete | None = ...) -> None: ... + +class AdjustHandleList(Serialisable): ... + +class GeomGuide(Serialisable): + name: Incomplete + fmla: Incomplete + def __init__(self, name: Incomplete | None = ..., fmla: Incomplete | None = ...) -> None: ... + +class GeomGuideList(Serialisable): + gd: Incomplete + def __init__(self, gd: Incomplete | None = ...) -> None: ... + +class CustomGeometry2D(Serialisable): + avLst: Incomplete + gdLst: Incomplete + ahLst: Incomplete + cxnLst: Incomplete + pathLst: Incomplete + rect: Incomplete + def __init__( + self, + avLst: Incomplete | None = ..., + gdLst: Incomplete | None = ..., + ahLst: Incomplete | None = ..., + cxnLst: Incomplete | None = ..., + rect: Incomplete | None = ..., + pathLst: Incomplete | None = ..., + ) -> None: ... + +class PresetGeometry2D(Serialisable): + namespace: Incomplete + prst: Incomplete + avLst: Incomplete + def __init__(self, prst: Incomplete | None = ..., avLst: Incomplete | None = ...) -> None: ... + +class FontReference(Serialisable): + idx: Incomplete + def __init__(self, idx: Incomplete | None = ...) -> None: ... + +class StyleMatrixReference(Serialisable): + idx: Incomplete + def __init__(self, idx: Incomplete | None = ...) -> None: ... + +class ShapeStyle(Serialisable): + lnRef: Incomplete + fillRef: Incomplete + effectRef: Incomplete + fontRef: Incomplete + def __init__( + self, + lnRef: Incomplete | None = ..., + fillRef: Incomplete | None = ..., + effectRef: Incomplete | None = ..., + fontRef: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/graphic.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/graphic.pyi new file mode 100644 index 00000000..c30ac13f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/graphic.pyi @@ -0,0 +1,76 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class GraphicFrameLocking(Serialisable): + noGrp: Incomplete + noDrilldown: Incomplete + noSelect: Incomplete + noChangeAspect: Incomplete + noMove: Incomplete + noResize: Incomplete + extLst: Incomplete + def __init__( + self, + noGrp: Incomplete | None = ..., + noDrilldown: Incomplete | None = ..., + noSelect: Incomplete | None = ..., + noChangeAspect: Incomplete | None = ..., + noMove: Incomplete | None = ..., + noResize: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class NonVisualGraphicFrameProperties(Serialisable): + tagname: str + graphicFrameLocks: Incomplete + extLst: Incomplete + def __init__(self, graphicFrameLocks: Incomplete | None = ..., extLst: Incomplete | None = ...) -> None: ... + +class NonVisualGraphicFrame(Serialisable): + tagname: str + cNvPr: Incomplete + cNvGraphicFramePr: Incomplete + __elements__: Incomplete + def __init__(self, cNvPr: Incomplete | None = ..., cNvGraphicFramePr: Incomplete | None = ...) -> None: ... + +class GraphicData(Serialisable): + tagname: str + namespace: Incomplete + uri: Incomplete + chart: Incomplete + def __init__(self, uri=..., chart: Incomplete | None = ...) -> None: ... + +class GraphicObject(Serialisable): + tagname: str + namespace: Incomplete + graphicData: Incomplete + def __init__(self, graphicData: Incomplete | None = ...) -> None: ... + +class GraphicFrame(Serialisable): + tagname: str + nvGraphicFramePr: Incomplete + xfrm: Incomplete + graphic: Incomplete + macro: Incomplete + fPublished: Incomplete + __elements__: Incomplete + def __init__( + self, + nvGraphicFramePr: Incomplete | None = ..., + xfrm: Incomplete | None = ..., + graphic: Incomplete | None = ..., + macro: Incomplete | None = ..., + fPublished: Incomplete | None = ..., + ) -> None: ... + +class GroupShape(Serialisable): + nvGrpSpPr: Incomplete + nonVisualProperties: Incomplete + grpSpPr: Incomplete + visualProperties: Incomplete + pic: Incomplete + __elements__: Incomplete + def __init__( + self, nvGrpSpPr: Incomplete | None = ..., grpSpPr: Incomplete | None = ..., pic: Incomplete | None = ... + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/image.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/image.pyi new file mode 100644 index 00000000..3864d30d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/image.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete + +class Image: + anchor: str + ref: Incomplete + format: Incomplete + def __init__(self, img) -> None: ... + @property + def path(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/line.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/line.pyi new file mode 100644 index 00000000..0c8ac33a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/line.pyi @@ -0,0 +1,66 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class LineEndProperties(Serialisable): + tagname: str + namespace: Incomplete + type: Incomplete + w: Incomplete + len: Incomplete + def __init__(self, type: Incomplete | None = ..., w: Incomplete | None = ..., len: Incomplete | None = ...) -> None: ... + +class DashStop(Serialisable): + tagname: str + namespace: Incomplete + d: Incomplete + length: Incomplete + sp: Incomplete + space: Incomplete + def __init__(self, d: int = ..., sp: int = ...) -> None: ... + +class DashStopList(Serialisable): + ds: Incomplete + def __init__(self, ds: Incomplete | None = ...) -> None: ... + +class LineProperties(Serialisable): + tagname: str + namespace: Incomplete + w: Incomplete + width: Incomplete + cap: Incomplete + cmpd: Incomplete + algn: Incomplete + noFill: Incomplete + solidFill: Incomplete + gradFill: Incomplete + pattFill: Incomplete + prstDash: Incomplete + dashStyle: Incomplete + custDash: Incomplete + round: Incomplete + bevel: Incomplete + miter: Incomplete + headEnd: Incomplete + tailEnd: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + w: Incomplete | None = ..., + cap: Incomplete | None = ..., + cmpd: Incomplete | None = ..., + algn: Incomplete | None = ..., + noFill: Incomplete | None = ..., + solidFill: Incomplete | None = ..., + gradFill: Incomplete | None = ..., + pattFill: Incomplete | None = ..., + prstDash: Incomplete | None = ..., + custDash: Incomplete | None = ..., + round: Incomplete | None = ..., + bevel: Incomplete | None = ..., + miter: Incomplete | None = ..., + headEnd: Incomplete | None = ..., + tailEnd: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/picture.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/picture.pyi new file mode 100644 index 00000000..9f916d77 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/picture.pyi @@ -0,0 +1,72 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class PictureLocking(Serialisable): + tagname: str + namespace: Incomplete + noCrop: Incomplete + noGrp: Incomplete + noSelect: Incomplete + noRot: Incomplete + noChangeAspect: Incomplete + noMove: Incomplete + noResize: Incomplete + noEditPoints: Incomplete + noAdjustHandles: Incomplete + noChangeArrowheads: Incomplete + noChangeShapeType: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + noCrop: Incomplete | None = ..., + noGrp: Incomplete | None = ..., + noSelect: Incomplete | None = ..., + noRot: Incomplete | None = ..., + noChangeAspect: Incomplete | None = ..., + noMove: Incomplete | None = ..., + noResize: Incomplete | None = ..., + noEditPoints: Incomplete | None = ..., + noAdjustHandles: Incomplete | None = ..., + noChangeArrowheads: Incomplete | None = ..., + noChangeShapeType: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class NonVisualPictureProperties(Serialisable): + tagname: str + preferRelativeResize: Incomplete + picLocks: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, preferRelativeResize: Incomplete | None = ..., picLocks: Incomplete | None = ..., extLst: Incomplete | None = ... + ) -> None: ... + +class PictureNonVisual(Serialisable): + tagname: str + cNvPr: Incomplete + cNvPicPr: Incomplete + __elements__: Incomplete + def __init__(self, cNvPr: Incomplete | None = ..., cNvPicPr: Incomplete | None = ...) -> None: ... + +class PictureFrame(Serialisable): + tagname: str + macro: Incomplete + fPublished: Incomplete + nvPicPr: Incomplete + blipFill: Incomplete + spPr: Incomplete + graphicalProperties: Incomplete + style: Incomplete + __elements__: Incomplete + def __init__( + self, + macro: Incomplete | None = ..., + fPublished: Incomplete | None = ..., + nvPicPr: Incomplete | None = ..., + blipFill: Incomplete | None = ..., + spPr: Incomplete | None = ..., + style: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/properties.pyi new file mode 100644 index 00000000..a7fdff46 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/properties.pyi @@ -0,0 +1,97 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class GroupShapeProperties(Serialisable): + tagname: str + bwMode: Incomplete + xfrm: Incomplete + scene3d: Incomplete + extLst: Incomplete + def __init__( + self, + bwMode: Incomplete | None = ..., + xfrm: Incomplete | None = ..., + scene3d: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class GroupLocking(Serialisable): + tagname: str + namespace: Incomplete + noGrp: Incomplete + noUngrp: Incomplete + noSelect: Incomplete + noRot: Incomplete + noChangeAspect: Incomplete + noMove: Incomplete + noResize: Incomplete + noChangeArrowheads: Incomplete + noEditPoints: Incomplete + noAdjustHandles: Incomplete + noChangeShapeType: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + noGrp: Incomplete | None = ..., + noUngrp: Incomplete | None = ..., + noSelect: Incomplete | None = ..., + noRot: Incomplete | None = ..., + noChangeAspect: Incomplete | None = ..., + noChangeArrowheads: Incomplete | None = ..., + noMove: Incomplete | None = ..., + noResize: Incomplete | None = ..., + noEditPoints: Incomplete | None = ..., + noAdjustHandles: Incomplete | None = ..., + noChangeShapeType: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class NonVisualGroupDrawingShapeProps(Serialisable): + tagname: str + grpSpLocks: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__(self, grpSpLocks: Incomplete | None = ..., extLst: Incomplete | None = ...) -> None: ... + +class NonVisualDrawingShapeProps(Serialisable): + tagname: str + spLocks: Incomplete + txBax: Incomplete + extLst: Incomplete + __elements__: Incomplete + txBox: Incomplete + def __init__( + self, spLocks: Incomplete | None = ..., txBox: Incomplete | None = ..., extLst: Incomplete | None = ... + ) -> None: ... + +class NonVisualDrawingProps(Serialisable): + tagname: str + id: Incomplete + name: Incomplete + descr: Incomplete + hidden: Incomplete + title: Incomplete + hlinkClick: Incomplete + hlinkHover: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + name: Incomplete | None = ..., + descr: Incomplete | None = ..., + hidden: Incomplete | None = ..., + title: Incomplete | None = ..., + hlinkClick: Incomplete | None = ..., + hlinkHover: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class NonVisualGroupShape(Serialisable): + tagname: str + cNvPr: Incomplete + cNvGrpSpPr: Incomplete + __elements__: Incomplete + def __init__(self, cNvPr: Incomplete | None = ..., cNvGrpSpPr: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/relation.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/relation.pyi new file mode 100644 index 00000000..05c831ed --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/relation.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class ChartRelation(Serialisable): + tagname: str + namespace: Incomplete + id: Incomplete + def __init__(self, id) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/spreadsheet_drawing.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/spreadsheet_drawing.pyi new file mode 100644 index 00000000..3e1de73b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/spreadsheet_drawing.pyi @@ -0,0 +1,98 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class AnchorClientData(Serialisable): + fLocksWithSheet: Incomplete + fPrintsWithSheet: Incomplete + def __init__(self, fLocksWithSheet: Incomplete | None = ..., fPrintsWithSheet: Incomplete | None = ...) -> None: ... + +class AnchorMarker(Serialisable): + tagname: str + col: Incomplete + colOff: Incomplete + row: Incomplete + rowOff: Incomplete + def __init__(self, col: int = ..., colOff: int = ..., row: int = ..., rowOff: int = ...) -> None: ... + +class _AnchorBase(Serialisable): + sp: Incomplete + shape: Incomplete + grpSp: Incomplete + groupShape: Incomplete + graphicFrame: Incomplete + cxnSp: Incomplete + connectionShape: Incomplete + pic: Incomplete + contentPart: Incomplete + clientData: Incomplete + __elements__: Incomplete + def __init__( + self, + clientData: Incomplete | None = ..., + sp: Incomplete | None = ..., + grpSp: Incomplete | None = ..., + graphicFrame: Incomplete | None = ..., + cxnSp: Incomplete | None = ..., + pic: Incomplete | None = ..., + contentPart: Incomplete | None = ..., + ) -> None: ... + +class AbsoluteAnchor(_AnchorBase): + tagname: str + pos: Incomplete + ext: Incomplete + sp: Incomplete + grpSp: Incomplete + graphicFrame: Incomplete + cxnSp: Incomplete + pic: Incomplete + contentPart: Incomplete + clientData: Incomplete + __elements__: Incomplete + def __init__(self, pos: Incomplete | None = ..., ext: Incomplete | None = ..., **kw) -> None: ... + +class OneCellAnchor(_AnchorBase): + tagname: str + ext: Incomplete + sp: Incomplete + grpSp: Incomplete + graphicFrame: Incomplete + cxnSp: Incomplete + pic: Incomplete + contentPart: Incomplete + clientData: Incomplete + __elements__: Incomplete + def __init__(self, _from: Incomplete | None = ..., ext: Incomplete | None = ..., **kw) -> None: ... + +class TwoCellAnchor(_AnchorBase): + tagname: str + editAs: Incomplete + to: Incomplete + sp: Incomplete + grpSp: Incomplete + graphicFrame: Incomplete + cxnSp: Incomplete + pic: Incomplete + contentPart: Incomplete + clientData: Incomplete + __elements__: Incomplete + def __init__( + self, editAs: Incomplete | None = ..., _from: Incomplete | None = ..., to: Incomplete | None = ..., **kw + ) -> None: ... + +class SpreadsheetDrawing(Serialisable): + tagname: str + mime_type: str + PartName: str + twoCellAnchor: Incomplete + oneCellAnchor: Incomplete + absoluteAnchor: Incomplete + __elements__: Incomplete + charts: Incomplete + images: Incomplete + def __init__(self, twoCellAnchor=..., oneCellAnchor=..., absoluteAnchor=...) -> None: ... + def __hash__(self) -> int: ... + def __bool__(self) -> bool: ... + @property + def path(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/text.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/text.pyi new file mode 100644 index 00000000..8ace8780 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/text.pyi @@ -0,0 +1,386 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class EmbeddedWAVAudioFile(Serialisable): # type: ignore[misc] + name: Incomplete + def __init__(self, name: Incomplete | None = ...) -> None: ... + +class Hyperlink(Serialisable): + tagname: str + namespace: Incomplete + invalidUrl: Incomplete + action: Incomplete + tgtFrame: Incomplete + tooltip: Incomplete + history: Incomplete + highlightClick: Incomplete + endSnd: Incomplete + snd: Incomplete + extLst: Incomplete + id: Incomplete + __elements__: Incomplete + def __init__( + self, + invalidUrl: Incomplete | None = ..., + action: Incomplete | None = ..., + tgtFrame: Incomplete | None = ..., + tooltip: Incomplete | None = ..., + history: Incomplete | None = ..., + highlightClick: Incomplete | None = ..., + endSnd: Incomplete | None = ..., + snd: Incomplete | None = ..., + extLst: Incomplete | None = ..., + id: Incomplete | None = ..., + ) -> None: ... + +class Font(Serialisable): + tagname: str + namespace: Incomplete + typeface: Incomplete + panose: Incomplete + pitchFamily: Incomplete + charset: Incomplete + def __init__( + self, + typeface: Incomplete | None = ..., + panose: Incomplete | None = ..., + pitchFamily: Incomplete | None = ..., + charset: Incomplete | None = ..., + ) -> None: ... + +class CharacterProperties(Serialisable): + tagname: str + namespace: Incomplete + kumimoji: Incomplete + lang: Incomplete + altLang: Incomplete + sz: Incomplete + b: Incomplete + i: Incomplete + u: Incomplete + strike: Incomplete + kern: Incomplete + cap: Incomplete + spc: Incomplete + normalizeH: Incomplete + baseline: Incomplete + noProof: Incomplete + dirty: Incomplete + err: Incomplete + smtClean: Incomplete + smtId: Incomplete + bmk: Incomplete + ln: Incomplete + highlight: Incomplete + latin: Incomplete + ea: Incomplete + cs: Incomplete + sym: Incomplete + hlinkClick: Incomplete + hlinkMouseOver: Incomplete + rtl: Incomplete + extLst: Incomplete + noFill: Incomplete + solidFill: Incomplete + gradFill: Incomplete + blipFill: Incomplete + pattFill: Incomplete + grpFill: Incomplete + effectLst: Incomplete + effectDag: Incomplete + uLnTx: Incomplete + uLn: Incomplete + uFillTx: Incomplete + uFill: Incomplete + __elements__: Incomplete + def __init__( + self, + kumimoji: Incomplete | None = ..., + lang: Incomplete | None = ..., + altLang: Incomplete | None = ..., + sz: Incomplete | None = ..., + b: Incomplete | None = ..., + i: Incomplete | None = ..., + u: Incomplete | None = ..., + strike: Incomplete | None = ..., + kern: Incomplete | None = ..., + cap: Incomplete | None = ..., + spc: Incomplete | None = ..., + normalizeH: Incomplete | None = ..., + baseline: Incomplete | None = ..., + noProof: Incomplete | None = ..., + dirty: Incomplete | None = ..., + err: Incomplete | None = ..., + smtClean: Incomplete | None = ..., + smtId: Incomplete | None = ..., + bmk: Incomplete | None = ..., + ln: Incomplete | None = ..., + highlight: Incomplete | None = ..., + latin: Incomplete | None = ..., + ea: Incomplete | None = ..., + cs: Incomplete | None = ..., + sym: Incomplete | None = ..., + hlinkClick: Incomplete | None = ..., + hlinkMouseOver: Incomplete | None = ..., + rtl: Incomplete | None = ..., + extLst: Incomplete | None = ..., + noFill: Incomplete | None = ..., + solidFill: Incomplete | None = ..., + gradFill: Incomplete | None = ..., + blipFill: Incomplete | None = ..., + pattFill: Incomplete | None = ..., + grpFill: Incomplete | None = ..., + effectLst: Incomplete | None = ..., + effectDag: Incomplete | None = ..., + uLnTx: Incomplete | None = ..., + uLn: Incomplete | None = ..., + uFillTx: Incomplete | None = ..., + uFill: Incomplete | None = ..., + ) -> None: ... + +class TabStop(Serialisable): # type: ignore[misc] + pos: Incomplete + algn: Incomplete + def __init__(self, pos: Incomplete | None = ..., algn: Incomplete | None = ...) -> None: ... + +class TabStopList(Serialisable): # type: ignore[misc] + tab: Incomplete + def __init__(self, tab: Incomplete | None = ...) -> None: ... + +class Spacing(Serialisable): + spcPct: Incomplete + spcPts: Incomplete + __elements__: Incomplete + def __init__(self, spcPct: Incomplete | None = ..., spcPts: Incomplete | None = ...) -> None: ... + +class AutonumberBullet(Serialisable): + type: Incomplete + startAt: Incomplete + def __init__(self, type: Incomplete | None = ..., startAt: Incomplete | None = ...) -> None: ... + +class ParagraphProperties(Serialisable): + tagname: str + namespace: Incomplete + marL: Incomplete + marR: Incomplete + lvl: Incomplete + indent: Incomplete + algn: Incomplete + defTabSz: Incomplete + rtl: Incomplete + eaLnBrk: Incomplete + fontAlgn: Incomplete + latinLnBrk: Incomplete + hangingPunct: Incomplete + lnSpc: Incomplete + spcBef: Incomplete + spcAft: Incomplete + tabLst: Incomplete + defRPr: Incomplete + extLst: Incomplete + buClrTx: Incomplete + buClr: Incomplete + buSzTx: Incomplete + buSzPct: Incomplete + buSzPts: Incomplete + buFontTx: Incomplete + buFont: Incomplete + buNone: Incomplete + buAutoNum: Incomplete + buChar: Incomplete + buBlip: Incomplete + __elements__: Incomplete + def __init__( + self, + marL: Incomplete | None = ..., + marR: Incomplete | None = ..., + lvl: Incomplete | None = ..., + indent: Incomplete | None = ..., + algn: Incomplete | None = ..., + defTabSz: Incomplete | None = ..., + rtl: Incomplete | None = ..., + eaLnBrk: Incomplete | None = ..., + fontAlgn: Incomplete | None = ..., + latinLnBrk: Incomplete | None = ..., + hangingPunct: Incomplete | None = ..., + lnSpc: Incomplete | None = ..., + spcBef: Incomplete | None = ..., + spcAft: Incomplete | None = ..., + tabLst: Incomplete | None = ..., + defRPr: Incomplete | None = ..., + extLst: Incomplete | None = ..., + buClrTx: Incomplete | None = ..., + buClr: Incomplete | None = ..., + buSzTx: Incomplete | None = ..., + buSzPct: Incomplete | None = ..., + buSzPts: Incomplete | None = ..., + buFontTx: Incomplete | None = ..., + buFont: Incomplete | None = ..., + buNone: Incomplete | None = ..., + buAutoNum: Incomplete | None = ..., + buChar: Incomplete | None = ..., + buBlip: Incomplete | None = ..., + ) -> None: ... + +class ListStyle(Serialisable): + tagname: str + namespace: Incomplete + defPPr: Incomplete + lvl1pPr: Incomplete + lvl2pPr: Incomplete + lvl3pPr: Incomplete + lvl4pPr: Incomplete + lvl5pPr: Incomplete + lvl6pPr: Incomplete + lvl7pPr: Incomplete + lvl8pPr: Incomplete + lvl9pPr: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + defPPr: Incomplete | None = ..., + lvl1pPr: Incomplete | None = ..., + lvl2pPr: Incomplete | None = ..., + lvl3pPr: Incomplete | None = ..., + lvl4pPr: Incomplete | None = ..., + lvl5pPr: Incomplete | None = ..., + lvl6pPr: Incomplete | None = ..., + lvl7pPr: Incomplete | None = ..., + lvl8pPr: Incomplete | None = ..., + lvl9pPr: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class RegularTextRun(Serialisable): + tagname: str + namespace: Incomplete + rPr: Incomplete + properties: Incomplete + t: Incomplete + value: Incomplete + __elements__: Incomplete + def __init__(self, rPr: Incomplete | None = ..., t: str = ...) -> None: ... + +class LineBreak(Serialisable): + tagname: str + namespace: Incomplete + rPr: Incomplete + __elements__: Incomplete + def __init__(self, rPr: Incomplete | None = ...) -> None: ... + +class TextField(Serialisable): + id: Incomplete + type: Incomplete + rPr: Incomplete + pPr: Incomplete + t: Incomplete + __elements__: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + type: Incomplete | None = ..., + rPr: Incomplete | None = ..., + pPr: Incomplete | None = ..., + t: Incomplete | None = ..., + ) -> None: ... + +class Paragraph(Serialisable): + tagname: str + namespace: Incomplete + pPr: Incomplete + properties: Incomplete + endParaRPr: Incomplete + r: Incomplete + text: Incomplete + br: Incomplete + fld: Incomplete + __elements__: Incomplete + def __init__( + self, + pPr: Incomplete | None = ..., + endParaRPr: Incomplete | None = ..., + r: Incomplete | None = ..., + br: Incomplete | None = ..., + fld: Incomplete | None = ..., + ) -> None: ... + +class GeomGuide(Serialisable): + name: Incomplete + fmla: Incomplete + def __init__(self, name: Incomplete | None = ..., fmla: Incomplete | None = ...) -> None: ... + +class GeomGuideList(Serialisable): + gd: Incomplete + def __init__(self, gd: Incomplete | None = ...) -> None: ... + +class PresetTextShape(Serialisable): + prst: Incomplete + avLst: Incomplete + def __init__(self, prst: Incomplete | None = ..., avLst: Incomplete | None = ...) -> None: ... + +class TextNormalAutofit(Serialisable): + fontScale: Incomplete + lnSpcReduction: Incomplete + def __init__(self, fontScale: Incomplete | None = ..., lnSpcReduction: Incomplete | None = ...) -> None: ... + +class RichTextProperties(Serialisable): + tagname: str + namespace: Incomplete + rot: Incomplete + spcFirstLastPara: Incomplete + vertOverflow: Incomplete + horzOverflow: Incomplete + vert: Incomplete + wrap: Incomplete + lIns: Incomplete + tIns: Incomplete + rIns: Incomplete + bIns: Incomplete + numCol: Incomplete + spcCol: Incomplete + rtlCol: Incomplete + fromWordArt: Incomplete + anchor: Incomplete + anchorCtr: Incomplete + forceAA: Incomplete + upright: Incomplete + compatLnSpc: Incomplete + prstTxWarp: Incomplete + scene3d: Incomplete + extLst: Incomplete + noAutofit: Incomplete + normAutofit: Incomplete + spAutoFit: Incomplete + flatTx: Incomplete + __elements__: Incomplete + def __init__( + self, + rot: Incomplete | None = ..., + spcFirstLastPara: Incomplete | None = ..., + vertOverflow: Incomplete | None = ..., + horzOverflow: Incomplete | None = ..., + vert: Incomplete | None = ..., + wrap: Incomplete | None = ..., + lIns: Incomplete | None = ..., + tIns: Incomplete | None = ..., + rIns: Incomplete | None = ..., + bIns: Incomplete | None = ..., + numCol: Incomplete | None = ..., + spcCol: Incomplete | None = ..., + rtlCol: Incomplete | None = ..., + fromWordArt: Incomplete | None = ..., + anchor: Incomplete | None = ..., + anchorCtr: Incomplete | None = ..., + forceAA: Incomplete | None = ..., + upright: Incomplete | None = ..., + compatLnSpc: Incomplete | None = ..., + prstTxWarp: Incomplete | None = ..., + scene3d: Incomplete | None = ..., + extLst: Incomplete | None = ..., + noAutofit: Incomplete | None = ..., + normAutofit: Incomplete | None = ..., + spAutoFit: Incomplete | None = ..., + flatTx: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/xdr.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/xdr.pyi new file mode 100644 index 00000000..c77a2b7e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/drawing/xdr.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete + +from .geometry import Point2D, PositiveSize2D, Transform2D + +class XDRPoint2D(Point2D): + namespace: Incomplete + x: Incomplete + y: Incomplete + +class XDRPositiveSize2D(PositiveSize2D): + namespace: Incomplete + cx: Incomplete + cy: Incomplete + +class XDRTransform2D(Transform2D): + namespace: Incomplete + rot: Incomplete + flipH: Incomplete + flipV: Incomplete + off: Incomplete + ext: Incomplete + chOff: Incomplete + chExt: Incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/formatting/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/formatting/__init__.pyi new file mode 100644 index 00000000..c4e04ce6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/formatting/__init__.pyi @@ -0,0 +1 @@ +from .rule import Rule as Rule diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/formatting/formatting.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/formatting/formatting.pyi new file mode 100644 index 00000000..e4dfa0bc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/formatting/formatting.pyi @@ -0,0 +1,26 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class ConditionalFormatting(Serialisable): + tagname: str + sqref: Incomplete + cells: Incomplete + pivot: Incomplete + cfRule: Incomplete + rules: Incomplete + def __init__(self, sqref=..., pivot: Incomplete | None = ..., cfRule=..., extLst: Incomplete | None = ...) -> None: ... + def __eq__(self, other): ... + def __hash__(self) -> int: ... + def __contains__(self, coord): ... + +class ConditionalFormattingList: + max_priority: int + def __init__(self) -> None: ... + def add(self, range_string, cfRule) -> None: ... + def __bool__(self) -> bool: ... + def __len__(self) -> int: ... + def __iter__(self): ... + def __getitem__(self, key): ... + def __delitem__(self, key) -> None: ... + def __setitem__(self, key, rule) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/formatting/rule.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/formatting/rule.pyi new file mode 100644 index 00000000..5807e1f5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/formatting/rule.pyi @@ -0,0 +1,154 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors import Float +from openpyxl.descriptors.serialisable import Serialisable + +class ValueDescriptor(Float): + expected_type: Incomplete + def __set__(self, instance, value) -> None: ... + +class FormatObject(Serialisable): + tagname: str + type: Incomplete + val: Incomplete + gte: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, type, val: Incomplete | None = ..., gte: Incomplete | None = ..., extLst: Incomplete | None = ... + ) -> None: ... + +class RuleType(Serialisable): # type: ignore[misc] + cfvo: Incomplete + +class IconSet(RuleType): + tagname: str + iconSet: Incomplete + showValue: Incomplete + percent: Incomplete + reverse: Incomplete + __elements__: Incomplete + cfvo: Incomplete + def __init__( + self, + iconSet: Incomplete | None = ..., + showValue: Incomplete | None = ..., + percent: Incomplete | None = ..., + reverse: Incomplete | None = ..., + cfvo: Incomplete | None = ..., + ) -> None: ... + +class DataBar(RuleType): + tagname: str + minLength: Incomplete + maxLength: Incomplete + showValue: Incomplete + color: Incomplete + __elements__: Incomplete + cfvo: Incomplete + def __init__( + self, + minLength: Incomplete | None = ..., + maxLength: Incomplete | None = ..., + showValue: Incomplete | None = ..., + cfvo: Incomplete | None = ..., + color: Incomplete | None = ..., + ) -> None: ... + +class ColorScale(RuleType): + tagname: str + color: Incomplete + __elements__: Incomplete + cfvo: Incomplete + def __init__(self, cfvo: Incomplete | None = ..., color: Incomplete | None = ...) -> None: ... + +class Rule(Serialisable): + tagname: str + type: Incomplete + dxfId: Incomplete + priority: Incomplete + stopIfTrue: Incomplete + aboveAverage: Incomplete + percent: Incomplete + bottom: Incomplete + operator: Incomplete + text: Incomplete + timePeriod: Incomplete + rank: Incomplete + stdDev: Incomplete + equalAverage: Incomplete + formula: Incomplete + colorScale: Incomplete + dataBar: Incomplete + iconSet: Incomplete + extLst: Incomplete + dxf: Incomplete + __elements__: Incomplete + __attrs__: Incomplete + def __init__( + self, + type, + dxfId: Incomplete | None = ..., + priority: int = ..., + stopIfTrue: Incomplete | None = ..., + aboveAverage: Incomplete | None = ..., + percent: Incomplete | None = ..., + bottom: Incomplete | None = ..., + operator: Incomplete | None = ..., + text: Incomplete | None = ..., + timePeriod: Incomplete | None = ..., + rank: Incomplete | None = ..., + stdDev: Incomplete | None = ..., + equalAverage: Incomplete | None = ..., + formula=..., + colorScale: Incomplete | None = ..., + dataBar: Incomplete | None = ..., + iconSet: Incomplete | None = ..., + extLst: Incomplete | None = ..., + dxf: Incomplete | None = ..., + ) -> None: ... + +def ColorScaleRule( + start_type: Incomplete | None = ..., + start_value: Incomplete | None = ..., + start_color: Incomplete | None = ..., + mid_type: Incomplete | None = ..., + mid_value: Incomplete | None = ..., + mid_color: Incomplete | None = ..., + end_type: Incomplete | None = ..., + end_value: Incomplete | None = ..., + end_color: Incomplete | None = ..., +): ... +def FormulaRule( + formula: Incomplete | None = ..., + stopIfTrue: Incomplete | None = ..., + font: Incomplete | None = ..., + border: Incomplete | None = ..., + fill: Incomplete | None = ..., +): ... +def CellIsRule( + operator: Incomplete | None = ..., + formula: Incomplete | None = ..., + stopIfTrue: Incomplete | None = ..., + font: Incomplete | None = ..., + border: Incomplete | None = ..., + fill: Incomplete | None = ..., +): ... +def IconSetRule( + icon_style: Incomplete | None = ..., + type: Incomplete | None = ..., + values: Incomplete | None = ..., + showValue: Incomplete | None = ..., + percent: Incomplete | None = ..., + reverse: Incomplete | None = ..., +): ... +def DataBarRule( + start_type: Incomplete | None = ..., + start_value: Incomplete | None = ..., + end_type: Incomplete | None = ..., + end_value: Incomplete | None = ..., + color: Incomplete | None = ..., + showValue: Incomplete | None = ..., + minLength: Incomplete | None = ..., + maxLength: Incomplete | None = ..., +): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/formula/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/formula/__init__.pyi new file mode 100644 index 00000000..c2decb33 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/formula/__init__.pyi @@ -0,0 +1 @@ +from .tokenizer import Tokenizer as Tokenizer diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/formula/tokenizer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/formula/tokenizer.pyi new file mode 100644 index 00000000..16129f95 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/formula/tokenizer.pyi @@ -0,0 +1,52 @@ +from _typeshed import Incomplete + +class TokenizerError(Exception): ... + +class Tokenizer: + SN_RE: Incomplete + WSPACE_RE: Incomplete + STRING_REGEXES: Incomplete + ERROR_CODES: Incomplete + TOKEN_ENDERS: str + formula: Incomplete + items: Incomplete + token_stack: Incomplete + offset: int + token: Incomplete + def __init__(self, formula) -> None: ... + def check_scientific_notation(self): ... + def assert_empty_token(self, can_follow=...) -> None: ... + def save_token(self) -> None: ... + def render(self): ... + +class Token: + LITERAL: str + OPERAND: str + FUNC: str + ARRAY: str + PAREN: str + SEP: str + OP_PRE: str + OP_IN: str + OP_POST: str + WSPACE: str + value: Incomplete + type: Incomplete + subtype: Incomplete + def __init__(self, value, type_, subtype: str = ...) -> None: ... + TEXT: str + NUMBER: str + LOGICAL: str + ERROR: str + RANGE: str + @classmethod + def make_operand(cls, value): ... + OPEN: str + CLOSE: str + @classmethod + def make_subexp(cls, value, func: bool = ...): ... + def get_closer(self): ... + ARG: str + ROW: str + @classmethod + def make_separator(cls, value): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/formula/translate.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/formula/translate.pyi new file mode 100644 index 00000000..0904e1e6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/formula/translate.pyi @@ -0,0 +1,20 @@ +from _typeshed import Incomplete + +class TranslatorError(Exception): ... + +class Translator: + tokenizer: Incomplete + def __init__(self, formula, origin) -> None: ... + def get_tokens(self): ... + ROW_RANGE_RE: Incomplete + COL_RANGE_RE: Incomplete + CELL_REF_RE: Incomplete + @staticmethod + def translate_row(row_str, rdelta): ... + @staticmethod + def translate_col(col_str, cdelta): ... + @staticmethod + def strip_ws_name(range_str): ... + @classmethod + def translate_range(cls, range_str, rdelta, cdelta): ... + def translate_formula(self, dest: Incomplete | None = ..., row_delta: int = ..., col_delta: int = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/packaging/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/packaging/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/packaging/core.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/packaging/core.pyi new file mode 100644 index 00000000..0fb87a72 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/packaging/core.pyi @@ -0,0 +1,51 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors import DateTime +from openpyxl.descriptors.nested import NestedText +from openpyxl.descriptors.serialisable import Serialisable + +class NestedDateTime(DateTime, NestedText): + expected_type: Incomplete + def to_tree(self, tagname: Incomplete | None = ..., value: Incomplete | None = ..., namespace: Incomplete | None = ...): ... + +class QualifiedDateTime(NestedDateTime): + def to_tree(self, tagname: Incomplete | None = ..., value: Incomplete | None = ..., namespace: Incomplete | None = ...): ... + +class DocumentProperties(Serialisable): + tagname: str + namespace: Incomplete + category: Incomplete + contentStatus: Incomplete + keywords: Incomplete + lastModifiedBy: Incomplete + lastPrinted: Incomplete + revision: Incomplete + version: Incomplete + last_modified_by: Incomplete + subject: Incomplete + title: Incomplete + creator: Incomplete + description: Incomplete + identifier: Incomplete + language: Incomplete + created: Incomplete + modified: Incomplete + __elements__: Incomplete + def __init__( + self, + category: Incomplete | None = ..., + contentStatus: Incomplete | None = ..., + keywords: Incomplete | None = ..., + lastModifiedBy: Incomplete | None = ..., + lastPrinted: Incomplete | None = ..., + revision: Incomplete | None = ..., + version: Incomplete | None = ..., + created=..., + creator: str = ..., + description: Incomplete | None = ..., + identifier: Incomplete | None = ..., + language: Incomplete | None = ..., + modified=..., + subject: Incomplete | None = ..., + title: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/packaging/extended.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/packaging/extended.pyi new file mode 100644 index 00000000..644279d1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/packaging/extended.pyi @@ -0,0 +1,79 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +def get_version(): ... + +class DigSigBlob(Serialisable): # type: ignore[misc] + __elements__: Incomplete + __attrs__: Incomplete + +class VectorLpstr(Serialisable): # type: ignore[misc] + __elements__: Incomplete + __attrs__: Incomplete + +class VectorVariant(Serialisable): # type: ignore[misc] + __elements__: Incomplete + __attrs__: Incomplete + +class ExtendedProperties(Serialisable): + tagname: str + Template: Incomplete + Manager: Incomplete + Company: Incomplete + Pages: Incomplete + Words: Incomplete + Characters: Incomplete + PresentationFormat: Incomplete + Lines: Incomplete + Paragraphs: Incomplete + Slides: Incomplete + Notes: Incomplete + TotalTime: Incomplete + HiddenSlides: Incomplete + MMClips: Incomplete + ScaleCrop: Incomplete + HeadingPairs: Incomplete + TitlesOfParts: Incomplete + LinksUpToDate: Incomplete + CharactersWithSpaces: Incomplete + SharedDoc: Incomplete + HyperlinkBase: Incomplete + HLinks: Incomplete + HyperlinksChanged: Incomplete + DigSig: Incomplete + Application: Incomplete + AppVersion: Incomplete + DocSecurity: Incomplete + __elements__: Incomplete + def __init__( + self, + Template: Incomplete | None = ..., + Manager: Incomplete | None = ..., + Company: Incomplete | None = ..., + Pages: Incomplete | None = ..., + Words: Incomplete | None = ..., + Characters: Incomplete | None = ..., + PresentationFormat: Incomplete | None = ..., + Lines: Incomplete | None = ..., + Paragraphs: Incomplete | None = ..., + Slides: Incomplete | None = ..., + Notes: Incomplete | None = ..., + TotalTime: Incomplete | None = ..., + HiddenSlides: Incomplete | None = ..., + MMClips: Incomplete | None = ..., + ScaleCrop: Incomplete | None = ..., + HeadingPairs: Incomplete | None = ..., + TitlesOfParts: Incomplete | None = ..., + LinksUpToDate: Incomplete | None = ..., + CharactersWithSpaces: Incomplete | None = ..., + SharedDoc: Incomplete | None = ..., + HyperlinkBase: Incomplete | None = ..., + HLinks: Incomplete | None = ..., + HyperlinksChanged: Incomplete | None = ..., + DigSig: Incomplete | None = ..., + Application: str = ..., + AppVersion: Incomplete | None = ..., + DocSecurity: Incomplete | None = ..., + ) -> None: ... + def to_tree(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/packaging/interface.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/packaging/interface.pyi new file mode 100644 index 00000000..d000905f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/packaging/interface.pyi @@ -0,0 +1,6 @@ +from abc import ABC, ABCMeta, abstractmethod + +class ISerialisableFile(ABC, metaclass=ABCMeta): + @property + @abstractmethod + def id(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/packaging/manifest.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/packaging/manifest.pyi new file mode 100644 index 00000000..78ee8678 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/packaging/manifest.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete +from collections.abc import Generator + +from openpyxl.descriptors.serialisable import Serialisable + +mimetypes: Incomplete + +class FileExtension(Serialisable): + tagname: str + Extension: Incomplete + ContentType: Incomplete + def __init__(self, Extension, ContentType) -> None: ... + +class Override(Serialisable): + tagname: str + PartName: Incomplete + ContentType: Incomplete + def __init__(self, PartName, ContentType) -> None: ... + +DEFAULT_TYPES: Incomplete +DEFAULT_OVERRIDE: Incomplete + +class Manifest(Serialisable): + tagname: str + Default: Incomplete + Override: Incomplete + path: str + __elements__: Incomplete + def __init__(self, Default=..., Override=...) -> None: ... + @property + def filenames(self): ... + @property + def extensions(self): ... + def to_tree(self): ... + def __contains__(self, content_type): ... + def find(self, content_type): ... + def findall(self, content_type) -> Generator[Incomplete, None, None]: ... + def append(self, obj) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/packaging/relationship.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/packaging/relationship.pyi new file mode 100644 index 00000000..c8447aa9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/packaging/relationship.pyi @@ -0,0 +1,36 @@ +from _typeshed import Incomplete +from collections.abc import Generator + +from openpyxl.descriptors.serialisable import Serialisable + +class Relationship(Serialisable): + tagname: str + Type: Incomplete + Target: Incomplete + target: Incomplete + TargetMode: Incomplete + Id: Incomplete + id: Incomplete + def __init__( + self, + Id: Incomplete | None = ..., + Type: Incomplete | None = ..., + type: Incomplete | None = ..., + Target: Incomplete | None = ..., + TargetMode: Incomplete | None = ..., + ) -> None: ... + +class RelationshipList(Serialisable): + tagname: str + Relationship: Incomplete + def __init__(self, Relationship=...) -> None: ... + def append(self, value) -> None: ... + def __len__(self) -> int: ... + def __bool__(self) -> bool: ... + def find(self, content_type) -> Generator[Incomplete, None, None]: ... + def __getitem__(self, key): ... + def to_tree(self): ... + +def get_rels_path(path): ... +def get_dependents(archive, filename): ... +def get_rel(archive, deps, id: Incomplete | None = ..., cls: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/packaging/workbook.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/packaging/workbook.pyi new file mode 100644 index 00000000..37bc20d1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/packaging/workbook.pyi @@ -0,0 +1,88 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class FileRecoveryProperties(Serialisable): + tagname: str + autoRecover: Incomplete + crashSave: Incomplete + dataExtractLoad: Incomplete + repairLoad: Incomplete + def __init__( + self, + autoRecover: Incomplete | None = ..., + crashSave: Incomplete | None = ..., + dataExtractLoad: Incomplete | None = ..., + repairLoad: Incomplete | None = ..., + ) -> None: ... + +class ChildSheet(Serialisable): + tagname: str + name: Incomplete + sheetId: Incomplete + state: Incomplete + id: Incomplete + def __init__( + self, name: Incomplete | None = ..., sheetId: Incomplete | None = ..., state: str = ..., id: Incomplete | None = ... + ) -> None: ... + +class PivotCache(Serialisable): + tagname: str + cacheId: Incomplete + id: Incomplete + def __init__(self, cacheId: Incomplete | None = ..., id: Incomplete | None = ...) -> None: ... + +class WorkbookPackage(Serialisable): + tagname: str + conformance: Incomplete + fileVersion: Incomplete + fileSharing: Incomplete + workbookPr: Incomplete + properties: Incomplete + workbookProtection: Incomplete + bookViews: Incomplete + sheets: Incomplete + functionGroups: Incomplete + externalReferences: Incomplete + definedNames: Incomplete + calcPr: Incomplete + oleSize: Incomplete + customWorkbookViews: Incomplete + pivotCaches: Incomplete + smartTagPr: Incomplete + smartTagTypes: Incomplete + webPublishing: Incomplete + fileRecoveryPr: Incomplete + webPublishObjects: Incomplete + extLst: Incomplete + Ignorable: Incomplete + __elements__: Incomplete + def __init__( + self, + conformance: Incomplete | None = ..., + fileVersion: Incomplete | None = ..., + fileSharing: Incomplete | None = ..., + workbookPr: Incomplete | None = ..., + workbookProtection: Incomplete | None = ..., + bookViews=..., + sheets=..., + functionGroups: Incomplete | None = ..., + externalReferences=..., + definedNames: Incomplete | None = ..., + calcPr: Incomplete | None = ..., + oleSize: Incomplete | None = ..., + customWorkbookViews=..., + pivotCaches=..., + smartTagPr: Incomplete | None = ..., + smartTagTypes: Incomplete | None = ..., + webPublishing: Incomplete | None = ..., + fileRecoveryPr: Incomplete | None = ..., + webPublishObjects: Incomplete | None = ..., + extLst: Incomplete | None = ..., + Ignorable: Incomplete | None = ..., + ) -> None: ... + def to_tree(self): ... + @property + def active(self): ... + @property + def pivot_caches(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/pivot/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/pivot/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/pivot/cache.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/pivot/cache.pyi new file mode 100644 index 00000000..3c490640 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/pivot/cache.pyi @@ -0,0 +1,596 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class MeasureDimensionMap(Serialisable): + tagname: str + measureGroup: Incomplete + dimension: Incomplete + def __init__(self, measureGroup: Incomplete | None = ..., dimension: Incomplete | None = ...) -> None: ... + +class MeasureGroup(Serialisable): + tagname: str + name: Incomplete + caption: Incomplete + def __init__(self, name: Incomplete | None = ..., caption: Incomplete | None = ...) -> None: ... + +class PivotDimension(Serialisable): + tagname: str + measure: Incomplete + name: Incomplete + uniqueName: Incomplete + caption: Incomplete + def __init__( + self, + measure: Incomplete | None = ..., + name: Incomplete | None = ..., + uniqueName: Incomplete | None = ..., + caption: Incomplete | None = ..., + ) -> None: ... + +class CalculatedMember(Serialisable): + tagname: str + name: Incomplete + mdx: Incomplete + memberName: Incomplete + hierarchy: Incomplete + parent: Incomplete + solveOrder: Incomplete + set: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + mdx: Incomplete | None = ..., + memberName: Incomplete | None = ..., + hierarchy: Incomplete | None = ..., + parent: Incomplete | None = ..., + solveOrder: Incomplete | None = ..., + set: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class CalculatedItem(Serialisable): + tagname: str + field: Incomplete + formula: Incomplete + pivotArea: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + field: Incomplete | None = ..., + formula: Incomplete | None = ..., + pivotArea: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class ServerFormat(Serialisable): + tagname: str + culture: Incomplete + format: Incomplete + def __init__(self, culture: Incomplete | None = ..., format: Incomplete | None = ...) -> None: ... + +class ServerFormatList(Serialisable): + tagname: str + serverFormat: Incomplete + __elements__: Incomplete + __attrs__: Incomplete + def __init__(self, count: Incomplete | None = ..., serverFormat: Incomplete | None = ...) -> None: ... + @property + def count(self): ... + +class Query(Serialisable): + tagname: str + mdx: Incomplete + tpls: Incomplete + __elements__: Incomplete + def __init__(self, mdx: Incomplete | None = ..., tpls: Incomplete | None = ...) -> None: ... + +class QueryCache(Serialisable): + tagname: str + count: Incomplete + query: Incomplete + __elements__: Incomplete + def __init__(self, count: Incomplete | None = ..., query: Incomplete | None = ...) -> None: ... + +class OLAPSet(Serialisable): + tagname: str + count: Incomplete + maxRank: Incomplete + setDefinition: Incomplete + sortType: Incomplete + queryFailed: Incomplete + tpls: Incomplete + sortByTuple: Incomplete + __elements__: Incomplete + def __init__( + self, + count: Incomplete | None = ..., + maxRank: Incomplete | None = ..., + setDefinition: Incomplete | None = ..., + sortType: Incomplete | None = ..., + queryFailed: Incomplete | None = ..., + tpls: Incomplete | None = ..., + sortByTuple: Incomplete | None = ..., + ) -> None: ... + +class OLAPSets(Serialisable): # type: ignore[misc] + count: Incomplete + set: Incomplete + __elements__: Incomplete + def __init__(self, count: Incomplete | None = ..., set: Incomplete | None = ...) -> None: ... + +class PCDSDTCEntries(Serialisable): + tagname: str + count: Incomplete + m: Incomplete + n: Incomplete + e: Incomplete + s: Incomplete + __elements__: Incomplete + def __init__( + self, + count: Incomplete | None = ..., + m: Incomplete | None = ..., + n: Incomplete | None = ..., + e: Incomplete | None = ..., + s: Incomplete | None = ..., + ) -> None: ... + +class TupleCache(Serialisable): + tagname: str + entries: Incomplete + sets: Incomplete + queryCache: Incomplete + serverFormats: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + entries: Incomplete | None = ..., + sets: Incomplete | None = ..., + queryCache: Incomplete | None = ..., + serverFormats: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class PCDKPI(Serialisable): + tagname: str + uniqueName: Incomplete + caption: Incomplete + displayFolder: Incomplete + measureGroup: Incomplete + parent: Incomplete + value: Incomplete + goal: Incomplete + status: Incomplete + trend: Incomplete + weight: Incomplete + time: Incomplete + def __init__( + self, + uniqueName: Incomplete | None = ..., + caption: Incomplete | None = ..., + displayFolder: Incomplete | None = ..., + measureGroup: Incomplete | None = ..., + parent: Incomplete | None = ..., + value: Incomplete | None = ..., + goal: Incomplete | None = ..., + status: Incomplete | None = ..., + trend: Incomplete | None = ..., + weight: Incomplete | None = ..., + time: Incomplete | None = ..., + ) -> None: ... + +class GroupMember(Serialisable): + tagname: str + uniqueName: Incomplete + group: Incomplete + def __init__(self, uniqueName: Incomplete | None = ..., group: Incomplete | None = ...) -> None: ... + +class GroupMembers(Serialisable): # type: ignore[misc] + count: Incomplete + groupMember: Incomplete + __elements__: Incomplete + def __init__(self, count: Incomplete | None = ..., groupMember: Incomplete | None = ...) -> None: ... + +class LevelGroup(Serialisable): + tagname: str + name: Incomplete + uniqueName: Incomplete + caption: Incomplete + uniqueParent: Incomplete + id: Incomplete + groupMembers: Incomplete + __elements__: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + uniqueName: Incomplete | None = ..., + caption: Incomplete | None = ..., + uniqueParent: Incomplete | None = ..., + id: Incomplete | None = ..., + groupMembers: Incomplete | None = ..., + ) -> None: ... + +class Groups(Serialisable): + tagname: str + count: Incomplete + group: Incomplete + __elements__: Incomplete + def __init__(self, count: Incomplete | None = ..., group: Incomplete | None = ...) -> None: ... + +class GroupLevel(Serialisable): + tagname: str + uniqueName: Incomplete + caption: Incomplete + user: Incomplete + customRollUp: Incomplete + groups: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + uniqueName: Incomplete | None = ..., + caption: Incomplete | None = ..., + user: Incomplete | None = ..., + customRollUp: Incomplete | None = ..., + groups: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class GroupLevels(Serialisable): # type: ignore[misc] + count: Incomplete + groupLevel: Incomplete + __elements__: Incomplete + def __init__(self, count: Incomplete | None = ..., groupLevel: Incomplete | None = ...) -> None: ... + +class FieldUsage(Serialisable): + tagname: str + x: Incomplete + def __init__(self, x: Incomplete | None = ...) -> None: ... + +class FieldsUsage(Serialisable): # type: ignore[misc] + count: Incomplete + fieldUsage: Incomplete + __elements__: Incomplete + def __init__(self, count: Incomplete | None = ..., fieldUsage: Incomplete | None = ...) -> None: ... + +class CacheHierarchy(Serialisable): + tagname: str + uniqueName: Incomplete + caption: Incomplete + measure: Incomplete + set: Incomplete + parentSet: Incomplete + iconSet: Incomplete + attribute: Incomplete + time: Incomplete + keyAttribute: Incomplete + defaultMemberUniqueName: Incomplete + allUniqueName: Incomplete + allCaption: Incomplete + dimensionUniqueName: Incomplete + displayFolder: Incomplete + measureGroup: Incomplete + measures: Incomplete + count: Incomplete + oneField: Incomplete + memberValueDatatype: Incomplete + unbalanced: Incomplete + unbalancedGroup: Incomplete + hidden: Incomplete + fieldsUsage: Incomplete + groupLevels: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + uniqueName: str = ..., + caption: Incomplete | None = ..., + measure: Incomplete | None = ..., + set: Incomplete | None = ..., + parentSet: Incomplete | None = ..., + iconSet: int = ..., + attribute: Incomplete | None = ..., + time: Incomplete | None = ..., + keyAttribute: Incomplete | None = ..., + defaultMemberUniqueName: Incomplete | None = ..., + allUniqueName: Incomplete | None = ..., + allCaption: Incomplete | None = ..., + dimensionUniqueName: Incomplete | None = ..., + displayFolder: Incomplete | None = ..., + measureGroup: Incomplete | None = ..., + measures: Incomplete | None = ..., + count: Incomplete | None = ..., + oneField: Incomplete | None = ..., + memberValueDatatype: Incomplete | None = ..., + unbalanced: Incomplete | None = ..., + unbalancedGroup: Incomplete | None = ..., + hidden: Incomplete | None = ..., + fieldsUsage: Incomplete | None = ..., + groupLevels: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class GroupItems(Serialisable): + tagname: str + m: Incomplete + n: Incomplete + b: Incomplete + e: Incomplete + s: Incomplete + d: Incomplete + __elements__: Incomplete + __attrs__: Incomplete + def __init__(self, count: Incomplete | None = ..., m=..., n=..., b=..., e=..., s=..., d=...) -> None: ... + @property + def count(self): ... + +class DiscretePr(Serialisable): + tagname: str + count: Incomplete + x: Incomplete + __elements__: Incomplete + def __init__(self, count: Incomplete | None = ..., x: Incomplete | None = ...) -> None: ... + +class RangePr(Serialisable): + tagname: str + autoStart: Incomplete + autoEnd: Incomplete + groupBy: Incomplete + startNum: Incomplete + endNum: Incomplete + startDate: Incomplete + endDate: Incomplete + groupInterval: Incomplete + def __init__( + self, + autoStart: bool = ..., + autoEnd: bool = ..., + groupBy: str = ..., + startNum: Incomplete | None = ..., + endNum: Incomplete | None = ..., + startDate: Incomplete | None = ..., + endDate: Incomplete | None = ..., + groupInterval: int = ..., + ) -> None: ... + +class FieldGroup(Serialisable): + tagname: str + par: Incomplete + base: Incomplete + rangePr: Incomplete + discretePr: Incomplete + groupItems: Incomplete + __elements__: Incomplete + def __init__( + self, + par: Incomplete | None = ..., + base: Incomplete | None = ..., + rangePr: Incomplete | None = ..., + discretePr: Incomplete | None = ..., + groupItems: Incomplete | None = ..., + ) -> None: ... + +class SharedItems(Serialisable): + tagname: str + m: Incomplete + n: Incomplete + b: Incomplete + e: Incomplete + s: Incomplete + d: Incomplete + containsSemiMixedTypes: Incomplete + containsNonDate: Incomplete + containsDate: Incomplete + containsString: Incomplete + containsBlank: Incomplete + containsMixedTypes: Incomplete + containsNumber: Incomplete + containsInteger: Incomplete + minValue: Incomplete + maxValue: Incomplete + minDate: Incomplete + maxDate: Incomplete + longText: Incomplete + __attrs__: Incomplete + def __init__( + self, + _fields=..., + containsSemiMixedTypes: Incomplete | None = ..., + containsNonDate: Incomplete | None = ..., + containsDate: Incomplete | None = ..., + containsString: Incomplete | None = ..., + containsBlank: Incomplete | None = ..., + containsMixedTypes: Incomplete | None = ..., + containsNumber: Incomplete | None = ..., + containsInteger: Incomplete | None = ..., + minValue: Incomplete | None = ..., + maxValue: Incomplete | None = ..., + minDate: Incomplete | None = ..., + maxDate: Incomplete | None = ..., + count: Incomplete | None = ..., + longText: Incomplete | None = ..., + ) -> None: ... + @property + def count(self): ... + +class CacheField(Serialisable): + tagname: str + sharedItems: Incomplete + fieldGroup: Incomplete + mpMap: Incomplete + extLst: Incomplete + name: Incomplete + caption: Incomplete + propertyName: Incomplete + serverField: Incomplete + uniqueList: Incomplete + numFmtId: Incomplete + formula: Incomplete + sqlType: Incomplete + hierarchy: Incomplete + level: Incomplete + databaseField: Incomplete + mappingCount: Incomplete + memberPropertyField: Incomplete + __elements__: Incomplete + def __init__( + self, + sharedItems: Incomplete | None = ..., + fieldGroup: Incomplete | None = ..., + mpMap: Incomplete | None = ..., + extLst: Incomplete | None = ..., + name: Incomplete | None = ..., + caption: Incomplete | None = ..., + propertyName: Incomplete | None = ..., + serverField: Incomplete | None = ..., + uniqueList: bool = ..., + numFmtId: Incomplete | None = ..., + formula: Incomplete | None = ..., + sqlType: int = ..., + hierarchy: int = ..., + level: int = ..., + databaseField: bool = ..., + mappingCount: Incomplete | None = ..., + memberPropertyField: Incomplete | None = ..., + ) -> None: ... + +class RangeSet(Serialisable): + tagname: str + i1: Incomplete + i2: Incomplete + i3: Incomplete + i4: Incomplete + ref: Incomplete + name: Incomplete + sheet: Incomplete + def __init__( + self, + i1: Incomplete | None = ..., + i2: Incomplete | None = ..., + i3: Incomplete | None = ..., + i4: Incomplete | None = ..., + ref: Incomplete | None = ..., + name: Incomplete | None = ..., + sheet: Incomplete | None = ..., + ) -> None: ... + +class PageItem(Serialisable): + tagname: str + name: Incomplete + def __init__(self, name: Incomplete | None = ...) -> None: ... + +class Page(Serialisable): + tagname: str + pageItem: Incomplete + __elements__: Incomplete + def __init__(self, count: Incomplete | None = ..., pageItem: Incomplete | None = ...) -> None: ... + @property + def count(self): ... + +class Consolidation(Serialisable): + tagname: str + autoPage: Incomplete + pages: Incomplete + rangeSets: Incomplete + __elements__: Incomplete + def __init__(self, autoPage: Incomplete | None = ..., pages=..., rangeSets=...) -> None: ... + +class WorksheetSource(Serialisable): + tagname: str + ref: Incomplete + name: Incomplete + sheet: Incomplete + def __init__(self, ref: Incomplete | None = ..., name: Incomplete | None = ..., sheet: Incomplete | None = ...) -> None: ... + +class CacheSource(Serialisable): + tagname: str + type: Incomplete + connectionId: Incomplete + worksheetSource: Incomplete + consolidation: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + type: Incomplete | None = ..., + connectionId: Incomplete | None = ..., + worksheetSource: Incomplete | None = ..., + consolidation: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class CacheDefinition(Serialisable): + mime_type: str + rel_type: str + records: Incomplete + tagname: str + invalid: Incomplete + saveData: Incomplete + refreshOnLoad: Incomplete + optimizeMemory: Incomplete + enableRefresh: Incomplete + refreshedBy: Incomplete + refreshedDate: Incomplete + refreshedDateIso: Incomplete + backgroundQuery: Incomplete + missingItemsLimit: Incomplete + createdVersion: Incomplete + refreshedVersion: Incomplete + minRefreshableVersion: Incomplete + recordCount: Incomplete + upgradeOnRefresh: Incomplete + tupleCache: Incomplete + supportSubquery: Incomplete + supportAdvancedDrill: Incomplete + cacheSource: Incomplete + cacheFields: Incomplete + cacheHierarchies: Incomplete + kpis: Incomplete + calculatedItems: Incomplete + calculatedMembers: Incomplete + dimensions: Incomplete + measureGroups: Incomplete + maps: Incomplete + extLst: Incomplete + id: Incomplete + __elements__: Incomplete + def __init__( + self, + invalid: Incomplete | None = ..., + saveData: Incomplete | None = ..., + refreshOnLoad: Incomplete | None = ..., + optimizeMemory: Incomplete | None = ..., + enableRefresh: Incomplete | None = ..., + refreshedBy: Incomplete | None = ..., + refreshedDate: Incomplete | None = ..., + refreshedDateIso: Incomplete | None = ..., + backgroundQuery: Incomplete | None = ..., + missingItemsLimit: Incomplete | None = ..., + createdVersion: Incomplete | None = ..., + refreshedVersion: Incomplete | None = ..., + minRefreshableVersion: Incomplete | None = ..., + recordCount: Incomplete | None = ..., + upgradeOnRefresh: Incomplete | None = ..., + tupleCache: Incomplete | None = ..., + supportSubquery: Incomplete | None = ..., + supportAdvancedDrill: Incomplete | None = ..., + cacheSource: Incomplete | None = ..., + cacheFields=..., + cacheHierarchies=..., + kpis=..., + calculatedItems=..., + calculatedMembers=..., + dimensions=..., + measureGroups=..., + maps=..., + extLst: Incomplete | None = ..., + id: Incomplete | None = ..., + ) -> None: ... + def to_tree(self): ... + @property + def path(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/pivot/fields.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/pivot/fields.pyi new file mode 100644 index 00000000..b3eb3158 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/pivot/fields.pyi @@ -0,0 +1,192 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class Index(Serialisable): + tagname: str + v: Incomplete + def __init__(self, v: int = ...) -> None: ... + +class Tuple(Serialisable): # type: ignore[misc] + fld: Incomplete + hier: Incomplete + item: Incomplete + def __init__(self, fld: Incomplete | None = ..., hier: Incomplete | None = ..., item: Incomplete | None = ...) -> None: ... + +class TupleList(Serialisable): # type: ignore[misc] + c: Incomplete + tpl: Incomplete + __elements__: Incomplete + def __init__(self, c: Incomplete | None = ..., tpl: Incomplete | None = ...) -> None: ... + +class Missing(Serialisable): + tagname: str + tpls: Incomplete + x: Incomplete + u: Incomplete + f: Incomplete + c: Incomplete + cp: Incomplete + bc: Incomplete + fc: Incomplete + i: Incomplete + un: Incomplete + st: Incomplete + b: Incomplete + __elements__: Incomplete + def __init__( + self, + tpls=..., + x=..., + u: Incomplete | None = ..., + f: Incomplete | None = ..., + c: Incomplete | None = ..., + cp: Incomplete | None = ..., + _in: Incomplete | None = ..., + bc: Incomplete | None = ..., + fc: Incomplete | None = ..., + i: Incomplete | None = ..., + un: Incomplete | None = ..., + st: Incomplete | None = ..., + b: Incomplete | None = ..., + ) -> None: ... + +class Number(Serialisable): + tagname: str + tpls: Incomplete + x: Incomplete + v: Incomplete + u: Incomplete + f: Incomplete + c: Incomplete + cp: Incomplete + bc: Incomplete + fc: Incomplete + i: Incomplete + un: Incomplete + st: Incomplete + b: Incomplete + __elements__: Incomplete + def __init__( + self, + tpls=..., + x=..., + v: Incomplete | None = ..., + u: Incomplete | None = ..., + f: Incomplete | None = ..., + c: Incomplete | None = ..., + cp: Incomplete | None = ..., + _in: Incomplete | None = ..., + bc: Incomplete | None = ..., + fc: Incomplete | None = ..., + i: Incomplete | None = ..., + un: Incomplete | None = ..., + st: Incomplete | None = ..., + b: Incomplete | None = ..., + ) -> None: ... + +class Error(Serialisable): + tagname: str + tpls: Incomplete + x: Incomplete + v: Incomplete + u: Incomplete + f: Incomplete + c: Incomplete + cp: Incomplete + bc: Incomplete + fc: Incomplete + i: Incomplete + un: Incomplete + st: Incomplete + b: Incomplete + __elements__: Incomplete + def __init__( + self, + tpls: Incomplete | None = ..., + x=..., + v: Incomplete | None = ..., + u: Incomplete | None = ..., + f: Incomplete | None = ..., + c: Incomplete | None = ..., + cp: Incomplete | None = ..., + _in: Incomplete | None = ..., + bc: Incomplete | None = ..., + fc: Incomplete | None = ..., + i: Incomplete | None = ..., + un: Incomplete | None = ..., + st: Incomplete | None = ..., + b: Incomplete | None = ..., + ) -> None: ... + +class Boolean(Serialisable): + tagname: str + x: Incomplete + v: Incomplete + u: Incomplete + f: Incomplete + c: Incomplete + cp: Incomplete + __elements__: Incomplete + def __init__( + self, + x=..., + v: Incomplete | None = ..., + u: Incomplete | None = ..., + f: Incomplete | None = ..., + c: Incomplete | None = ..., + cp: Incomplete | None = ..., + ) -> None: ... + +class Text(Serialisable): + tagname: str + tpls: Incomplete + x: Incomplete + v: Incomplete + u: Incomplete + f: Incomplete + c: Incomplete + cp: Incomplete + bc: Incomplete + fc: Incomplete + i: Incomplete + un: Incomplete + st: Incomplete + b: Incomplete + __elements__: Incomplete + def __init__( + self, + tpls=..., + x=..., + v: Incomplete | None = ..., + u: Incomplete | None = ..., + f: Incomplete | None = ..., + c: Incomplete | None = ..., + cp: Incomplete | None = ..., + _in: Incomplete | None = ..., + bc: Incomplete | None = ..., + fc: Incomplete | None = ..., + i: Incomplete | None = ..., + un: Incomplete | None = ..., + st: Incomplete | None = ..., + b: Incomplete | None = ..., + ) -> None: ... + +class DateTimeField(Serialisable): + tagname: str + x: Incomplete + v: Incomplete + u: Incomplete + f: Incomplete + c: Incomplete + cp: Incomplete + __elements__: Incomplete + def __init__( + self, + x=..., + v: Incomplete | None = ..., + u: Incomplete | None = ..., + f: Incomplete | None = ..., + c: Incomplete | None = ..., + cp: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/pivot/record.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/pivot/record.pyi new file mode 100644 index 00000000..3406068b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/pivot/record.pyi @@ -0,0 +1,39 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class Record(Serialisable): + tagname: str + m: Incomplete + n: Incomplete + b: Incomplete + e: Incomplete + s: Incomplete + d: Incomplete + x: Incomplete + def __init__( + self, + _fields=..., + m: Incomplete | None = ..., + n: Incomplete | None = ..., + b: Incomplete | None = ..., + e: Incomplete | None = ..., + s: Incomplete | None = ..., + d: Incomplete | None = ..., + x: Incomplete | None = ..., + ) -> None: ... + +class RecordList(Serialisable): + mime_type: str + rel_type: str + tagname: str + r: Incomplete + extLst: Incomplete + __elements__: Incomplete + __attrs__: Incomplete + def __init__(self, count: Incomplete | None = ..., r=..., extLst: Incomplete | None = ...) -> None: ... + @property + def count(self): ... + def to_tree(self): ... + @property + def path(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/pivot/table.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/pivot/table.pyi new file mode 100644 index 00000000..16a7b289 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/pivot/table.pyi @@ -0,0 +1,680 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class HierarchyUsage(Serialisable): + tagname: str + hierarchyUsage: Incomplete + def __init__(self, hierarchyUsage: Incomplete | None = ...) -> None: ... + +class ColHierarchiesUsage(Serialisable): + tagname: str + colHierarchyUsage: Incomplete + __elements__: Incomplete + __attrs__: Incomplete + def __init__(self, count: Incomplete | None = ..., colHierarchyUsage=...) -> None: ... + @property + def count(self): ... + +class RowHierarchiesUsage(Serialisable): + tagname: str + rowHierarchyUsage: Incomplete + __elements__: Incomplete + __attrs__: Incomplete + def __init__(self, count: Incomplete | None = ..., rowHierarchyUsage=...) -> None: ... + @property + def count(self): ... + +class PivotFilter(Serialisable): + tagname: str + fld: Incomplete + mpFld: Incomplete + type: Incomplete + evalOrder: Incomplete + id: Incomplete + iMeasureHier: Incomplete + iMeasureFld: Incomplete + name: Incomplete + description: Incomplete + stringValue1: Incomplete + stringValue2: Incomplete + autoFilter: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + fld: Incomplete | None = ..., + mpFld: Incomplete | None = ..., + type: Incomplete | None = ..., + evalOrder: Incomplete | None = ..., + id: Incomplete | None = ..., + iMeasureHier: Incomplete | None = ..., + iMeasureFld: Incomplete | None = ..., + name: Incomplete | None = ..., + description: Incomplete | None = ..., + stringValue1: Incomplete | None = ..., + stringValue2: Incomplete | None = ..., + autoFilter: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class PivotFilters(Serialisable): # type: ignore[misc] + count: Incomplete + filter: Incomplete + __elements__: Incomplete + def __init__(self, count: Incomplete | None = ..., filter: Incomplete | None = ...) -> None: ... + +class PivotTableStyle(Serialisable): + tagname: str + name: Incomplete + showRowHeaders: Incomplete + showColHeaders: Incomplete + showRowStripes: Incomplete + showColStripes: Incomplete + showLastColumn: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + showRowHeaders: Incomplete | None = ..., + showColHeaders: Incomplete | None = ..., + showRowStripes: Incomplete | None = ..., + showColStripes: Incomplete | None = ..., + showLastColumn: Incomplete | None = ..., + ) -> None: ... + +class MemberList(Serialisable): + tagname: str + level: Incomplete + member: Incomplete + __elements__: Incomplete + def __init__(self, count: Incomplete | None = ..., level: Incomplete | None = ..., member=...) -> None: ... + @property + def count(self): ... + +class MemberProperty(Serialisable): + tagname: str + name: Incomplete + showCell: Incomplete + showTip: Incomplete + showAsCaption: Incomplete + nameLen: Incomplete + pPos: Incomplete + pLen: Incomplete + level: Incomplete + field: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + showCell: Incomplete | None = ..., + showTip: Incomplete | None = ..., + showAsCaption: Incomplete | None = ..., + nameLen: Incomplete | None = ..., + pPos: Incomplete | None = ..., + pLen: Incomplete | None = ..., + level: Incomplete | None = ..., + field: Incomplete | None = ..., + ) -> None: ... + +class PivotHierarchy(Serialisable): + tagname: str + outline: Incomplete + multipleItemSelectionAllowed: Incomplete + subtotalTop: Incomplete + showInFieldList: Incomplete + dragToRow: Incomplete + dragToCol: Incomplete + dragToPage: Incomplete + dragToData: Incomplete + dragOff: Incomplete + includeNewItemsInFilter: Incomplete + caption: Incomplete + mps: Incomplete + members: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + outline: Incomplete | None = ..., + multipleItemSelectionAllowed: Incomplete | None = ..., + subtotalTop: Incomplete | None = ..., + showInFieldList: Incomplete | None = ..., + dragToRow: Incomplete | None = ..., + dragToCol: Incomplete | None = ..., + dragToPage: Incomplete | None = ..., + dragToData: Incomplete | None = ..., + dragOff: Incomplete | None = ..., + includeNewItemsInFilter: Incomplete | None = ..., + caption: Incomplete | None = ..., + mps=..., + members: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class Reference(Serialisable): + tagname: str + field: Incomplete + selected: Incomplete + byPosition: Incomplete + relative: Incomplete + defaultSubtotal: Incomplete + sumSubtotal: Incomplete + countASubtotal: Incomplete + avgSubtotal: Incomplete + maxSubtotal: Incomplete + minSubtotal: Incomplete + productSubtotal: Incomplete + countSubtotal: Incomplete + stdDevSubtotal: Incomplete + stdDevPSubtotal: Incomplete + varSubtotal: Incomplete + varPSubtotal: Incomplete + x: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + field: Incomplete | None = ..., + count: Incomplete | None = ..., + selected: Incomplete | None = ..., + byPosition: Incomplete | None = ..., + relative: Incomplete | None = ..., + defaultSubtotal: Incomplete | None = ..., + sumSubtotal: Incomplete | None = ..., + countASubtotal: Incomplete | None = ..., + avgSubtotal: Incomplete | None = ..., + maxSubtotal: Incomplete | None = ..., + minSubtotal: Incomplete | None = ..., + productSubtotal: Incomplete | None = ..., + countSubtotal: Incomplete | None = ..., + stdDevSubtotal: Incomplete | None = ..., + stdDevPSubtotal: Incomplete | None = ..., + varSubtotal: Incomplete | None = ..., + varPSubtotal: Incomplete | None = ..., + x: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + @property + def count(self): ... + +class PivotArea(Serialisable): + tagname: str + references: Incomplete + extLst: Incomplete + field: Incomplete + type: Incomplete + dataOnly: Incomplete + labelOnly: Incomplete + grandRow: Incomplete + grandCol: Incomplete + cacheIndex: Incomplete + outline: Incomplete + offset: Incomplete + collapsedLevelsAreSubtotals: Incomplete + axis: Incomplete + fieldPosition: Incomplete + __elements__: Incomplete + def __init__( + self, + references=..., + extLst: Incomplete | None = ..., + field: Incomplete | None = ..., + type: str = ..., + dataOnly: bool = ..., + labelOnly: Incomplete | None = ..., + grandRow: Incomplete | None = ..., + grandCol: Incomplete | None = ..., + cacheIndex: Incomplete | None = ..., + outline: bool = ..., + offset: Incomplete | None = ..., + collapsedLevelsAreSubtotals: Incomplete | None = ..., + axis: Incomplete | None = ..., + fieldPosition: Incomplete | None = ..., + ) -> None: ... + +class ChartFormat(Serialisable): + tagname: str + chart: Incomplete + format: Incomplete + series: Incomplete + pivotArea: Incomplete + __elements__: Incomplete + def __init__( + self, + chart: Incomplete | None = ..., + format: Incomplete | None = ..., + series: Incomplete | None = ..., + pivotArea: Incomplete | None = ..., + ) -> None: ... + +class ConditionalFormat(Serialisable): + tagname: str + scope: Incomplete + type: Incomplete + priority: Incomplete + pivotAreas: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + scope: Incomplete | None = ..., + type: Incomplete | None = ..., + priority: Incomplete | None = ..., + pivotAreas=..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class Format(Serialisable): + tagname: str + action: Incomplete + dxfId: Incomplete + pivotArea: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + action: str = ..., + dxfId: Incomplete | None = ..., + pivotArea: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class DataField(Serialisable): + tagname: str + name: Incomplete + fld: Incomplete + subtotal: Incomplete + showDataAs: Incomplete + baseField: Incomplete + baseItem: Incomplete + numFmtId: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + fld: Incomplete | None = ..., + subtotal: str = ..., + showDataAs: str = ..., + baseField: int = ..., + baseItem: int = ..., + numFmtId: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class PageField(Serialisable): + tagname: str + fld: Incomplete + item: Incomplete + hier: Incomplete + name: Incomplete + cap: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + fld: Incomplete | None = ..., + item: Incomplete | None = ..., + hier: Incomplete | None = ..., + name: Incomplete | None = ..., + cap: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class RowColItem(Serialisable): + tagname: str + t: Incomplete + r: Incomplete + i: Incomplete + x: Incomplete + __elements__: Incomplete + def __init__(self, t: str = ..., r: int = ..., i: int = ..., x=...) -> None: ... + +class RowColField(Serialisable): + tagname: str + x: Incomplete + def __init__(self, x: Incomplete | None = ...) -> None: ... + +class AutoSortScope(Serialisable): # type: ignore[misc] + pivotArea: Incomplete + __elements__: Incomplete + def __init__(self, pivotArea: Incomplete | None = ...) -> None: ... + +class FieldItem(Serialisable): + tagname: str + n: Incomplete + t: Incomplete + h: Incomplete + s: Incomplete + sd: Incomplete + f: Incomplete + m: Incomplete + c: Incomplete + x: Incomplete + d: Incomplete + e: Incomplete + def __init__( + self, + n: Incomplete | None = ..., + t: str = ..., + h: Incomplete | None = ..., + s: Incomplete | None = ..., + sd: bool = ..., + f: Incomplete | None = ..., + m: Incomplete | None = ..., + c: Incomplete | None = ..., + x: Incomplete | None = ..., + d: Incomplete | None = ..., + e: Incomplete | None = ..., + ) -> None: ... + +class PivotField(Serialisable): + tagname: str + items: Incomplete + autoSortScope: Incomplete + extLst: Incomplete + name: Incomplete + axis: Incomplete + dataField: Incomplete + subtotalCaption: Incomplete + showDropDowns: Incomplete + hiddenLevel: Incomplete + uniqueMemberProperty: Incomplete + compact: Incomplete + allDrilled: Incomplete + numFmtId: Incomplete + outline: Incomplete + subtotalTop: Incomplete + dragToRow: Incomplete + dragToCol: Incomplete + multipleItemSelectionAllowed: Incomplete + dragToPage: Incomplete + dragToData: Incomplete + dragOff: Incomplete + showAll: Incomplete + insertBlankRow: Incomplete + serverField: Incomplete + insertPageBreak: Incomplete + autoShow: Incomplete + topAutoShow: Incomplete + hideNewItems: Incomplete + measureFilter: Incomplete + includeNewItemsInFilter: Incomplete + itemPageCount: Incomplete + sortType: Incomplete + dataSourceSort: Incomplete + nonAutoSortDefault: Incomplete + rankBy: Incomplete + defaultSubtotal: Incomplete + sumSubtotal: Incomplete + countASubtotal: Incomplete + avgSubtotal: Incomplete + maxSubtotal: Incomplete + minSubtotal: Incomplete + productSubtotal: Incomplete + countSubtotal: Incomplete + stdDevSubtotal: Incomplete + stdDevPSubtotal: Incomplete + varSubtotal: Incomplete + varPSubtotal: Incomplete + showPropCell: Incomplete + showPropTip: Incomplete + showPropAsCaption: Incomplete + defaultAttributeDrillState: Incomplete + __elements__: Incomplete + def __init__( + self, + items=..., + autoSortScope: Incomplete | None = ..., + name: Incomplete | None = ..., + axis: Incomplete | None = ..., + dataField: Incomplete | None = ..., + subtotalCaption: Incomplete | None = ..., + showDropDowns: bool = ..., + hiddenLevel: Incomplete | None = ..., + uniqueMemberProperty: Incomplete | None = ..., + compact: bool = ..., + allDrilled: Incomplete | None = ..., + numFmtId: Incomplete | None = ..., + outline: bool = ..., + subtotalTop: bool = ..., + dragToRow: bool = ..., + dragToCol: bool = ..., + multipleItemSelectionAllowed: Incomplete | None = ..., + dragToPage: bool = ..., + dragToData: bool = ..., + dragOff: bool = ..., + showAll: bool = ..., + insertBlankRow: Incomplete | None = ..., + serverField: Incomplete | None = ..., + insertPageBreak: Incomplete | None = ..., + autoShow: Incomplete | None = ..., + topAutoShow: bool = ..., + hideNewItems: Incomplete | None = ..., + measureFilter: Incomplete | None = ..., + includeNewItemsInFilter: Incomplete | None = ..., + itemPageCount: int = ..., + sortType: str = ..., + dataSourceSort: Incomplete | None = ..., + nonAutoSortDefault: Incomplete | None = ..., + rankBy: Incomplete | None = ..., + defaultSubtotal: bool = ..., + sumSubtotal: Incomplete | None = ..., + countASubtotal: Incomplete | None = ..., + avgSubtotal: Incomplete | None = ..., + maxSubtotal: Incomplete | None = ..., + minSubtotal: Incomplete | None = ..., + productSubtotal: Incomplete | None = ..., + countSubtotal: Incomplete | None = ..., + stdDevSubtotal: Incomplete | None = ..., + stdDevPSubtotal: Incomplete | None = ..., + varSubtotal: Incomplete | None = ..., + varPSubtotal: Incomplete | None = ..., + showPropCell: Incomplete | None = ..., + showPropTip: Incomplete | None = ..., + showPropAsCaption: Incomplete | None = ..., + defaultAttributeDrillState: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class Location(Serialisable): + tagname: str + ref: Incomplete + firstHeaderRow: Incomplete + firstDataRow: Incomplete + firstDataCol: Incomplete + rowPageCount: Incomplete + colPageCount: Incomplete + def __init__( + self, + ref: Incomplete | None = ..., + firstHeaderRow: Incomplete | None = ..., + firstDataRow: Incomplete | None = ..., + firstDataCol: Incomplete | None = ..., + rowPageCount: Incomplete | None = ..., + colPageCount: Incomplete | None = ..., + ) -> None: ... + +class TableDefinition(Serialisable): + mime_type: str + rel_type: str + tagname: str + cache: Incomplete + name: Incomplete + cacheId: Incomplete + dataOnRows: Incomplete + dataPosition: Incomplete + dataCaption: Incomplete + grandTotalCaption: Incomplete + errorCaption: Incomplete + showError: Incomplete + missingCaption: Incomplete + showMissing: Incomplete + pageStyle: Incomplete + pivotTableStyle: Incomplete + vacatedStyle: Incomplete + tag: Incomplete + updatedVersion: Incomplete + minRefreshableVersion: Incomplete + asteriskTotals: Incomplete + showItems: Incomplete + editData: Incomplete + disableFieldList: Incomplete + showCalcMbrs: Incomplete + visualTotals: Incomplete + showMultipleLabel: Incomplete + showDataDropDown: Incomplete + showDrill: Incomplete + printDrill: Incomplete + showMemberPropertyTips: Incomplete + showDataTips: Incomplete + enableWizard: Incomplete + enableDrill: Incomplete + enableFieldProperties: Incomplete + preserveFormatting: Incomplete + useAutoFormatting: Incomplete + pageWrap: Incomplete + pageOverThenDown: Incomplete + subtotalHiddenItems: Incomplete + rowGrandTotals: Incomplete + colGrandTotals: Incomplete + fieldPrintTitles: Incomplete + itemPrintTitles: Incomplete + mergeItem: Incomplete + showDropZones: Incomplete + createdVersion: Incomplete + indent: Incomplete + showEmptyRow: Incomplete + showEmptyCol: Incomplete + showHeaders: Incomplete + compact: Incomplete + outline: Incomplete + outlineData: Incomplete + compactData: Incomplete + published: Incomplete + gridDropZones: Incomplete + immersive: Incomplete + multipleFieldFilters: Incomplete + chartFormat: Incomplete + rowHeaderCaption: Incomplete + colHeaderCaption: Incomplete + fieldListSortAscending: Incomplete + mdxSubqueries: Incomplete + customListSort: Incomplete + autoFormatId: Incomplete + applyNumberFormats: Incomplete + applyBorderFormats: Incomplete + applyFontFormats: Incomplete + applyPatternFormats: Incomplete + applyAlignmentFormats: Incomplete + applyWidthHeightFormats: Incomplete + location: Incomplete + pivotFields: Incomplete + rowFields: Incomplete + rowItems: Incomplete + colFields: Incomplete + colItems: Incomplete + pageFields: Incomplete + dataFields: Incomplete + formats: Incomplete + conditionalFormats: Incomplete + chartFormats: Incomplete + pivotHierarchies: Incomplete + pivotTableStyleInfo: Incomplete + filters: Incomplete + rowHierarchiesUsage: Incomplete + colHierarchiesUsage: Incomplete + extLst: Incomplete + id: Incomplete + __elements__: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + cacheId: Incomplete | None = ..., + dataOnRows: bool = ..., + dataPosition: Incomplete | None = ..., + dataCaption: Incomplete | None = ..., + grandTotalCaption: Incomplete | None = ..., + errorCaption: Incomplete | None = ..., + showError: bool = ..., + missingCaption: Incomplete | None = ..., + showMissing: bool = ..., + pageStyle: Incomplete | None = ..., + pivotTableStyle: Incomplete | None = ..., + vacatedStyle: Incomplete | None = ..., + tag: Incomplete | None = ..., + updatedVersion: int = ..., + minRefreshableVersion: int = ..., + asteriskTotals: bool = ..., + showItems: bool = ..., + editData: bool = ..., + disableFieldList: bool = ..., + showCalcMbrs: bool = ..., + visualTotals: bool = ..., + showMultipleLabel: bool = ..., + showDataDropDown: bool = ..., + showDrill: bool = ..., + printDrill: bool = ..., + showMemberPropertyTips: bool = ..., + showDataTips: bool = ..., + enableWizard: bool = ..., + enableDrill: bool = ..., + enableFieldProperties: bool = ..., + preserveFormatting: bool = ..., + useAutoFormatting: bool = ..., + pageWrap: int = ..., + pageOverThenDown: bool = ..., + subtotalHiddenItems: bool = ..., + rowGrandTotals: bool = ..., + colGrandTotals: bool = ..., + fieldPrintTitles: bool = ..., + itemPrintTitles: bool = ..., + mergeItem: bool = ..., + showDropZones: bool = ..., + createdVersion: int = ..., + indent: int = ..., + showEmptyRow: bool = ..., + showEmptyCol: bool = ..., + showHeaders: bool = ..., + compact: bool = ..., + outline: bool = ..., + outlineData: bool = ..., + compactData: bool = ..., + published: bool = ..., + gridDropZones: bool = ..., + immersive: bool = ..., + multipleFieldFilters: Incomplete | None = ..., + chartFormat: int = ..., + rowHeaderCaption: Incomplete | None = ..., + colHeaderCaption: Incomplete | None = ..., + fieldListSortAscending: Incomplete | None = ..., + mdxSubqueries: Incomplete | None = ..., + customListSort: Incomplete | None = ..., + autoFormatId: Incomplete | None = ..., + applyNumberFormats: bool = ..., + applyBorderFormats: bool = ..., + applyFontFormats: bool = ..., + applyPatternFormats: bool = ..., + applyAlignmentFormats: bool = ..., + applyWidthHeightFormats: bool = ..., + location: Incomplete | None = ..., + pivotFields=..., + rowFields=..., + rowItems=..., + colFields=..., + colItems=..., + pageFields=..., + dataFields=..., + formats=..., + conditionalFormats=..., + chartFormats=..., + pivotHierarchies=..., + pivotTableStyleInfo: Incomplete | None = ..., + filters=..., + rowHierarchiesUsage: Incomplete | None = ..., + colHierarchiesUsage: Incomplete | None = ..., + extLst: Incomplete | None = ..., + id: Incomplete | None = ..., + ) -> None: ... + def to_tree(self): ... + @property + def path(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/reader/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/reader/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/reader/drawings.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/reader/drawings.pyi new file mode 100644 index 00000000..4334877f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/reader/drawings.pyi @@ -0,0 +1 @@ +def find_images(archive, path): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/reader/excel.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/reader/excel.pyi new file mode 100644 index 00000000..28a99d45 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/reader/excel.pyi @@ -0,0 +1,38 @@ +from _typeshed import Incomplete, StrPath, SupportsRead + +from openpyxl.chartsheet.chartsheet import Chartsheet +from openpyxl.packaging.relationship import Relationship +from openpyxl.workbook.workbook import Workbook + +SUPPORTED_FORMATS: Incomplete + +class ExcelReader: + archive: Incomplete + valid_files: Incomplete + read_only: Incomplete + keep_vba: Incomplete + data_only: Incomplete + keep_links: Incomplete + shared_strings: Incomplete + def __init__( + self, fn: SupportsRead[bytes] | str, read_only: bool = ..., keep_vba=..., data_only: bool = ..., keep_links: bool = ... + ) -> None: ... + package: Incomplete + def read_manifest(self) -> None: ... + def read_strings(self) -> None: ... + parser: Incomplete + wb: Incomplete + def read_workbook(self) -> None: ... + def read_properties(self) -> None: ... + def read_theme(self) -> None: ... + def read_chartsheet(self, sheet: Chartsheet, rel: Relationship) -> None: ... + def read_worksheets(self) -> None: ... + def read(self) -> None: ... + +def load_workbook( + filename: SupportsRead[bytes] | StrPath, + read_only: bool = ..., + keep_vba: bool = ..., + data_only: bool = ..., + keep_links: bool = ..., +) -> Workbook: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/reader/strings.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/reader/strings.pyi new file mode 100644 index 00000000..932a8b6f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/reader/strings.pyi @@ -0,0 +1 @@ +def read_string_table(xml_source): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/reader/workbook.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/reader/workbook.pyi new file mode 100644 index 00000000..bec29155 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/reader/workbook.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete +from collections.abc import Generator + +class WorkbookParser: + archive: Incomplete + workbook_part_name: Incomplete + wb: Incomplete + keep_links: Incomplete + sheets: Incomplete + def __init__(self, archive, workbook_part_name, keep_links: bool = ...) -> None: ... + @property + def rels(self): ... + caches: Incomplete + def parse(self) -> None: ... + def find_sheets(self) -> Generator[Incomplete, None, None]: ... + def assign_names(self) -> None: ... + @property + def pivot_caches(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/__init__.pyi new file mode 100644 index 00000000..946d3a30 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/__init__.pyi @@ -0,0 +1,8 @@ +from .alignment import Alignment as Alignment +from .borders import Border as Border, Side as Side +from .colors import Color as Color +from .fills import Fill as Fill, GradientFill as GradientFill, PatternFill as PatternFill +from .fonts import DEFAULT_FONT as DEFAULT_FONT, Font as Font +from .named_styles import NamedStyle as NamedStyle +from .numbers import NumberFormatDescriptor as NumberFormatDescriptor, is_builtin as is_builtin, is_date_format as is_date_format +from .protection import Protection as Protection diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/alignment.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/alignment.pyi new file mode 100644 index 00000000..6914145b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/alignment.pyi @@ -0,0 +1,39 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +horizontal_alignments: Incomplete +vertical_aligments: Incomplete + +class Alignment(Serialisable): + tagname: str + __fields__: Incomplete + horizontal: Incomplete + vertical: Incomplete + textRotation: Incomplete + text_rotation: Incomplete + wrapText: Incomplete + wrap_text: Incomplete + shrinkToFit: Incomplete + shrink_to_fit: Incomplete + indent: Incomplete + relativeIndent: Incomplete + justifyLastLine: Incomplete + readingOrder: Incomplete + def __init__( + self, + horizontal: Incomplete | None = ..., + vertical: Incomplete | None = ..., + textRotation: int = ..., + wrapText: Incomplete | None = ..., + shrinkToFit: Incomplete | None = ..., + indent: int = ..., + relativeIndent: int = ..., + justifyLastLine: Incomplete | None = ..., + readingOrder: int = ..., + text_rotation: Incomplete | None = ..., + wrap_text: Incomplete | None = ..., + shrink_to_fit: Incomplete | None = ..., + mergeCell: Incomplete | None = ..., + ) -> None: ... + def __iter__(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/borders.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/borders.pyi new file mode 100644 index 00000000..257e1c68 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/borders.pyi @@ -0,0 +1,64 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +BORDER_NONE: Incomplete +BORDER_DASHDOT: str +BORDER_DASHDOTDOT: str +BORDER_DASHED: str +BORDER_DOTTED: str +BORDER_DOUBLE: str +BORDER_HAIR: str +BORDER_MEDIUM: str +BORDER_MEDIUMDASHDOT: str +BORDER_MEDIUMDASHDOTDOT: str +BORDER_MEDIUMDASHED: str +BORDER_SLANTDASHDOT: str +BORDER_THICK: str +BORDER_THIN: str + +class Side(Serialisable): # type: ignore[misc] + __fields__: Incomplete + color: Incomplete + style: Incomplete + border_style: Incomplete + def __init__( + self, style: Incomplete | None = ..., color: Incomplete | None = ..., border_style: Incomplete | None = ... + ) -> None: ... + +class Border(Serialisable): + tagname: str + __fields__: Incomplete + __elements__: Incomplete + start: Incomplete + end: Incomplete + left: Incomplete + right: Incomplete + top: Incomplete + bottom: Incomplete + diagonal: Incomplete + vertical: Incomplete + horizontal: Incomplete + outline: Incomplete + diagonalUp: Incomplete + diagonalDown: Incomplete + diagonal_direction: Incomplete + def __init__( + self, + left: Incomplete | None = ..., + right: Incomplete | None = ..., + top: Incomplete | None = ..., + bottom: Incomplete | None = ..., + diagonal: Incomplete | None = ..., + diagonal_direction: Incomplete | None = ..., + vertical: Incomplete | None = ..., + horizontal: Incomplete | None = ..., + diagonalUp: bool = ..., + diagonalDown: bool = ..., + outline: bool = ..., + start: Incomplete | None = ..., + end: Incomplete | None = ..., + ) -> None: ... + def __iter__(self): ... + +DEFAULT_BORDER: Incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/builtins.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/builtins.pyi new file mode 100644 index 00000000..aabe58d9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/builtins.pyi @@ -0,0 +1,53 @@ +from _typeshed import Incomplete + +normal: str +comma: str +comma_0: str +currency: str +currency_0: str +percent: str +hyperlink: str +followed_hyperlink: str +title: str +headline_1: str +headline_2: str +headline_3: str +headline_4: str +good: str +bad: str +neutral: str +input: str +output: str +calculation: str +linked_cell: str +check_cell: str +warning: str +note: str +explanatory: str +total: str +accent_1: str +accent_1_20: str +accent_1_40: str +accent_1_60: str +accent_2: str +accent_2_20: str +accent_2_40: str +accent_2_60: str +accent_3: str +accent_3_20: str +accent_3_40: str +accent_3_60: str +accent_4: str +accent_4_20: str +accent_4_40: str +accent_4_60: str +accent_5: str +accent_5_20: str +accent_5_40: str +accent_5_60: str +accent_6: str +accent_6_20: str +accent_6_40: str +accent_6_60: str +pandas_highlight: str +styles: Incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/cell_style.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/cell_style.pyi new file mode 100644 index 00000000..32355ec9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/cell_style.pyi @@ -0,0 +1,88 @@ +from _typeshed import Incomplete +from array import array + +from openpyxl.descriptors.serialisable import Serialisable + +class ArrayDescriptor: + key: Incomplete + def __init__(self, key) -> None: ... + def __get__(self, instance, cls): ... + def __set__(self, instance, value) -> None: ... + +class StyleArray(array[Incomplete]): + tagname: str + fontId: Incomplete + fillId: Incomplete + borderId: Incomplete + numFmtId: Incomplete + protectionId: Incomplete + alignmentId: Incomplete + pivotButton: Incomplete + quotePrefix: Incomplete + xfId: Incomplete + def __new__(cls, args=...): ... + def __hash__(self) -> int: ... + def __copy__(self): ... + def __deepcopy__(self, memo): ... + +class CellStyle(Serialisable): + tagname: str + numFmtId: Incomplete + fontId: Incomplete + fillId: Incomplete + borderId: Incomplete + xfId: Incomplete + quotePrefix: Incomplete + pivotButton: Incomplete + applyNumberFormat: Incomplete + applyFont: Incomplete + applyFill: Incomplete + applyBorder: Incomplete + # Overwritten by properties below + # applyAlignment: Bool + # applyProtection: Bool + alignment: Incomplete + protection: Incomplete + extLst: Incomplete + __elements__: Incomplete + __attrs__: Incomplete + def __init__( + self, + numFmtId: int = ..., + fontId: int = ..., + fillId: int = ..., + borderId: int = ..., + xfId: Incomplete | None = ..., + quotePrefix: Incomplete | None = ..., + pivotButton: Incomplete | None = ..., + applyNumberFormat: Incomplete | None = ..., + applyFont: Incomplete | None = ..., + applyFill: Incomplete | None = ..., + applyBorder: Incomplete | None = ..., + applyAlignment: Incomplete | None = ..., + applyProtection: Incomplete | None = ..., + alignment: Incomplete | None = ..., + protection: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + def to_array(self): ... + @classmethod + def from_array(cls, style): ... + @property + def applyProtection(self): ... + @property + def applyAlignment(self): ... + +class CellStyleList(Serialisable): + tagname: str + __attrs__: Incomplete + # Overwritten by property below + # count: Integer + xf: Incomplete + alignment: Incomplete + protection: Incomplete + __elements__: Incomplete + def __init__(self, count: Incomplete | None = ..., xf=...) -> None: ... + @property + def count(self): ... + def __getitem__(self, idx): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/colors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/colors.pyi new file mode 100644 index 00000000..4cb7974e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/colors.pyi @@ -0,0 +1,60 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors import Typed +from openpyxl.descriptors.serialisable import Serialisable + +COLOR_INDEX: Incomplete +BLACK: Incomplete +WHITE: Incomplete +BLUE: Incomplete +aRGB_REGEX: Incomplete + +class RGB(Typed): + expected_type: Incomplete + def __set__(self, instance, value) -> None: ... + +class Color(Serialisable): + tagname: str + rgb: Incomplete + indexed: Incomplete + auto: Incomplete + theme: Incomplete + tint: Incomplete + type: Incomplete + def __init__( + self, + rgb=..., + indexed: Incomplete | None = ..., + auto: Incomplete | None = ..., + theme: Incomplete | None = ..., + tint: float = ..., + index: Incomplete | None = ..., + type: str = ..., + ) -> None: ... + @property + def value(self): ... + @value.setter + def value(self, value) -> None: ... + def __iter__(self): ... + @property + def index(self): ... + def __add__(self, other): ... + +class ColorDescriptor(Typed): + expected_type: Incomplete + def __set__(self, instance, value) -> None: ... + +class RgbColor(Serialisable): + tagname: str + rgb: Incomplete + def __init__(self, rgb: Incomplete | None = ...) -> None: ... + +class ColorList(Serialisable): + tagname: str + indexedColors: Incomplete + mruColors: Incomplete + __elements__: Incomplete + def __init__(self, indexedColors=..., mruColors=...) -> None: ... + def __bool__(self) -> bool: ... + @property + def index(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/differential.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/differential.pyi new file mode 100644 index 00000000..4e0be87b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/differential.pyi @@ -0,0 +1,37 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class DifferentialStyle(Serialisable): + tagname: str + __elements__: Incomplete + font: Incomplete + numFmt: Incomplete + fill: Incomplete + alignment: Incomplete + border: Incomplete + protection: Incomplete + extLst: Incomplete + def __init__( + self, + font: Incomplete | None = ..., + numFmt: Incomplete | None = ..., + fill: Incomplete | None = ..., + alignment: Incomplete | None = ..., + border: Incomplete | None = ..., + protection: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class DifferentialStyleList(Serialisable): + tagname: str + dxf: Incomplete + styles: Incomplete + __attrs__: Incomplete + def __init__(self, dxf=..., count: Incomplete | None = ...) -> None: ... + def append(self, dxf) -> None: ... + def add(self, dxf): ... + def __bool__(self) -> bool: ... + def __getitem__(self, idx): ... + @property + def count(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/fills.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/fills.pyi new file mode 100644 index 00000000..8afca121 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/fills.pyi @@ -0,0 +1,79 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors import Sequence +from openpyxl.descriptors.serialisable import Serialisable + +FILL_NONE: str +FILL_SOLID: str +FILL_PATTERN_DARKDOWN: str +FILL_PATTERN_DARKGRAY: str +FILL_PATTERN_DARKGRID: str +FILL_PATTERN_DARKHORIZONTAL: str +FILL_PATTERN_DARKTRELLIS: str +FILL_PATTERN_DARKUP: str +FILL_PATTERN_DARKVERTICAL: str +FILL_PATTERN_GRAY0625: str +FILL_PATTERN_GRAY125: str +FILL_PATTERN_LIGHTDOWN: str +FILL_PATTERN_LIGHTGRAY: str +FILL_PATTERN_LIGHTGRID: str +FILL_PATTERN_LIGHTHORIZONTAL: str +FILL_PATTERN_LIGHTTRELLIS: str +FILL_PATTERN_LIGHTUP: str +FILL_PATTERN_LIGHTVERTICAL: str +FILL_PATTERN_MEDIUMGRAY: str +fills: Incomplete + +class Fill(Serialisable): + tagname: str + @classmethod + def from_tree(cls, el): ... + +class PatternFill(Fill): + tagname: str + __elements__: Incomplete + patternType: Incomplete + fill_type: Incomplete + fgColor: Incomplete + start_color: Incomplete + bgColor: Incomplete + end_color: Incomplete + def __init__( + self, + patternType: Incomplete | None = ..., + fgColor=..., + bgColor=..., + fill_type: Incomplete | None = ..., + start_color: Incomplete | None = ..., + end_color: Incomplete | None = ..., + ) -> None: ... + def to_tree(self, tagname: Incomplete | None = ..., idx: Incomplete | None = ...): ... # type: ignore[override] + +DEFAULT_EMPTY_FILL: Incomplete +DEFAULT_GRAY_FILL: Incomplete + +class Stop(Serialisable): + tagname: str + position: Incomplete + color: Incomplete + def __init__(self, color, position) -> None: ... + +class StopList(Sequence): + expected_type: Incomplete + def __set__(self, obj, values) -> None: ... + +class GradientFill(Fill): + tagname: str + type: Incomplete + fill_type: Incomplete + degree: Incomplete + left: Incomplete + right: Incomplete + top: Incomplete + bottom: Incomplete + stop: Incomplete + def __init__( + self, type: str = ..., degree: int = ..., left: int = ..., right: int = ..., top: int = ..., bottom: int = ..., stop=... + ) -> None: ... + def __iter__(self): ... + def to_tree(self, tagname: Incomplete | None = ..., namespace: Incomplete | None = ..., idx: Incomplete | None = ...): ... # type: ignore[override] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/fonts.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/fonts.pyi new file mode 100644 index 00000000..cb565d64 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/fonts.pyi @@ -0,0 +1,58 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class Font(Serialisable): + UNDERLINE_DOUBLE: str + UNDERLINE_DOUBLE_ACCOUNTING: str + UNDERLINE_SINGLE: str + UNDERLINE_SINGLE_ACCOUNTING: str + name: Incomplete + charset: Incomplete + family: Incomplete + sz: Incomplete + size: Incomplete + b: Incomplete + bold: Incomplete + i: Incomplete + italic: Incomplete + strike: Incomplete + strikethrough: Incomplete + outline: Incomplete + shadow: Incomplete + condense: Incomplete + extend: Incomplete + u: Incomplete + underline: Incomplete + vertAlign: Incomplete + color: Incomplete + scheme: Incomplete + tagname: str + __elements__: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + sz: Incomplete | None = ..., + b: Incomplete | None = ..., + i: Incomplete | None = ..., + charset: Incomplete | None = ..., + u: Incomplete | None = ..., + strike: Incomplete | None = ..., + color: Incomplete | None = ..., + scheme: Incomplete | None = ..., + family: Incomplete | None = ..., + size: Incomplete | None = ..., + bold: Incomplete | None = ..., + italic: Incomplete | None = ..., + strikethrough: Incomplete | None = ..., + underline: Incomplete | None = ..., + vertAlign: Incomplete | None = ..., + outline: Incomplete | None = ..., + shadow: Incomplete | None = ..., + condense: Incomplete | None = ..., + extend: Incomplete | None = ..., + ) -> None: ... + @classmethod + def from_tree(cls, node): ... + +DEFAULT_FONT: Incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/named_styles.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/named_styles.pyi new file mode 100644 index 00000000..d848629a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/named_styles.pyi @@ -0,0 +1,76 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class NamedStyle(Serialisable): # type: ignore[misc] + font: Incomplete + fill: Incomplete + border: Incomplete + alignment: Incomplete + number_format: Incomplete + protection: Incomplete + builtinId: Incomplete + hidden: Incomplete + # Overwritten by property below + # xfId: Integer + name: Incomplete + def __init__( + self, + name: str = ..., + font=..., + fill=..., + border=..., + alignment=..., + number_format: Incomplete | None = ..., + protection=..., + builtinId: Incomplete | None = ..., + hidden: bool = ..., + xfId: Incomplete | None = ..., + ) -> None: ... + def __setattr__(self, attr: str, value) -> None: ... + def __iter__(self): ... + @property + def xfId(self): ... + def bind(self, wb) -> None: ... + def as_tuple(self): ... + def as_xf(self): ... + def as_name(self): ... + +class NamedStyleList(list[Incomplete]): + @property + def names(self): ... + def __getitem__(self, key): ... + def append(self, style) -> None: ... + +class _NamedCellStyle(Serialisable): + tagname: str + name: Incomplete + xfId: Incomplete + builtinId: Incomplete + iLevel: Incomplete + hidden: Incomplete + customBuiltin: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + xfId: Incomplete | None = ..., + builtinId: Incomplete | None = ..., + iLevel: Incomplete | None = ..., + hidden: Incomplete | None = ..., + customBuiltin: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class _NamedCellStyleList(Serialisable): + tagname: str + # Overwritten by property below + # count: Integer + cellStyle: Incomplete + __attrs__: Incomplete + def __init__(self, count: Incomplete | None = ..., cellStyle=...) -> None: ... + @property + def count(self): ... + @property + def names(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/numbers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/numbers.pyi new file mode 100644 index 00000000..2762b586 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/numbers.pyi @@ -0,0 +1,73 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors import String +from openpyxl.descriptors.serialisable import Serialisable + +BUILTIN_FORMATS: Incomplete +BUILTIN_FORMATS_MAX_SIZE: int +BUILTIN_FORMATS_REVERSE: Incomplete +FORMAT_GENERAL: Incomplete +FORMAT_TEXT: Incomplete +FORMAT_NUMBER: Incomplete +FORMAT_NUMBER_00: Incomplete +FORMAT_NUMBER_COMMA_SEPARATED1: Incomplete +FORMAT_NUMBER_COMMA_SEPARATED2: str +FORMAT_PERCENTAGE: Incomplete +FORMAT_PERCENTAGE_00: Incomplete +FORMAT_DATE_YYYYMMDD2: str +FORMAT_DATE_YYMMDD: str +FORMAT_DATE_DDMMYY: str +FORMAT_DATE_DMYSLASH: str +FORMAT_DATE_DMYMINUS: str +FORMAT_DATE_DMMINUS: str +FORMAT_DATE_MYMINUS: str +FORMAT_DATE_XLSX14: Incomplete +FORMAT_DATE_XLSX15: Incomplete +FORMAT_DATE_XLSX16: Incomplete +FORMAT_DATE_XLSX17: Incomplete +FORMAT_DATE_XLSX22: Incomplete +FORMAT_DATE_DATETIME: str +FORMAT_DATE_TIME1: Incomplete +FORMAT_DATE_TIME2: Incomplete +FORMAT_DATE_TIME3: Incomplete +FORMAT_DATE_TIME4: Incomplete +FORMAT_DATE_TIME5: Incomplete +FORMAT_DATE_TIME6: Incomplete +FORMAT_DATE_TIME7: str +FORMAT_DATE_TIME8: str +FORMAT_DATE_TIMEDELTA: str +FORMAT_DATE_YYMMDDSLASH: str +FORMAT_CURRENCY_USD_SIMPLE: str +FORMAT_CURRENCY_USD: str +FORMAT_CURRENCY_EUR_SIMPLE: str +COLORS: str +LITERAL_GROUP: str +LOCALE_GROUP: str +STRIP_RE: Incomplete +TIMEDELTA_RE: Incomplete + +def is_date_format(fmt): ... +def is_timedelta_format(fmt): ... +def is_datetime(fmt): ... +def is_builtin(fmt): ... +def builtin_format_code(index): ... +def builtin_format_id(fmt): ... + +class NumberFormatDescriptor(String): + def __set__(self, instance, value) -> None: ... + +class NumberFormat(Serialisable): # type: ignore[misc] + numFmtId: Incomplete + formatCode: Incomplete + def __init__(self, numFmtId: Incomplete | None = ..., formatCode: Incomplete | None = ...) -> None: ... + +class NumberFormatList(Serialisable): # type: ignore[misc] + # Overwritten by property below + # count: Integer + numFmt: Incomplete + __elements__: Incomplete + __attrs__: Incomplete + def __init__(self, count: Incomplete | None = ..., numFmt=...) -> None: ... + @property + def count(self): ... + def __getitem__(self, idx): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/protection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/protection.pyi new file mode 100644 index 00000000..0aae25a3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/protection.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class Protection(Serialisable): + tagname: str + locked: Incomplete + hidden: Incomplete + def __init__(self, locked: bool = ..., hidden: bool = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/proxy.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/proxy.pyi new file mode 100644 index 00000000..6e9d3d30 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/proxy.pyi @@ -0,0 +1,9 @@ +class StyleProxy: + def __init__(self, target) -> None: ... + def __getattr__(self, attr: str): ... + def __setattr__(self, attr: str, value) -> None: ... + def __copy__(self): ... + def __add__(self, other): ... + def copy(self, **kw): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/styleable.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/styleable.pyi new file mode 100644 index 00000000..15e2a4bb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/styleable.pyi @@ -0,0 +1,44 @@ +from _typeshed import Incomplete +from warnings import warn as warn + +class StyleDescriptor: + collection: Incomplete + key: Incomplete + def __init__(self, collection, key) -> None: ... + def __set__(self, instance, value) -> None: ... + def __get__(self, instance, cls): ... + +class NumberFormatDescriptor: + key: str + collection: str + def __set__(self, instance, value) -> None: ... + def __get__(self, instance, cls): ... + +class NamedStyleDescriptor: + key: str + collection: str + def __set__(self, instance, value) -> None: ... + def __get__(self, instance, cls): ... + +class StyleArrayDescriptor: + key: Incomplete + def __init__(self, key) -> None: ... + def __set__(self, instance, value) -> None: ... + def __get__(self, instance, cls): ... + +class StyleableObject: + font: Incomplete + fill: Incomplete + border: Incomplete + number_format: Incomplete + protection: Incomplete + alignment: Incomplete + style: Incomplete + quotePrefix: Incomplete + pivotButton: Incomplete + parent: Incomplete + def __init__(self, sheet, style_array: Incomplete | None = ...) -> None: ... + @property + def style_id(self): ... + @property + def has_style(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/stylesheet.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/stylesheet.pyi new file mode 100644 index 00000000..3af63a89 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/stylesheet.pyi @@ -0,0 +1,45 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class Stylesheet(Serialisable): + tagname: str + numFmts: Incomplete + fonts: Incomplete + fills: Incomplete + borders: Incomplete + cellStyleXfs: Incomplete + cellXfs: Incomplete + cellStyles: Incomplete + dxfs: Incomplete + tableStyles: Incomplete + colors: Incomplete + extLst: Incomplete + __elements__: Incomplete + number_formats: Incomplete + cell_styles: Incomplete + alignments: Incomplete + protections: Incomplete + named_styles: Incomplete + def __init__( + self, + numFmts: Incomplete | None = ..., + fonts=..., + fills=..., + borders=..., + cellStyleXfs: Incomplete | None = ..., + cellXfs: Incomplete | None = ..., + cellStyles: Incomplete | None = ..., + dxfs=..., + tableStyles: Incomplete | None = ..., + colors: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + @classmethod + def from_tree(cls, node): ... + @property + def custom_formats(self): ... + def to_tree(self, tagname: Incomplete | None = ..., idx: Incomplete | None = ..., namespace: Incomplete | None = ...): ... + +def apply_stylesheet(archive, wb): ... +def write_stylesheet(wb): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/table.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/table.pyi new file mode 100644 index 00000000..eae5d385 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/styles/table.pyi @@ -0,0 +1,40 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class TableStyleElement(Serialisable): + tagname: str + type: Incomplete + size: Incomplete + dxfId: Incomplete + def __init__(self, type: Incomplete | None = ..., size: Incomplete | None = ..., dxfId: Incomplete | None = ...) -> None: ... + +class TableStyle(Serialisable): + tagname: str + name: Incomplete + pivot: Incomplete + table: Incomplete + count: Incomplete + tableStyleElement: Incomplete + __elements__: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + pivot: Incomplete | None = ..., + table: Incomplete | None = ..., + count: Incomplete | None = ..., + tableStyleElement=..., + ) -> None: ... + +class TableStyleList(Serialisable): + tagname: str + defaultTableStyle: Incomplete + defaultPivotStyle: Incomplete + tableStyle: Incomplete + __elements__: Incomplete + __attrs__: Incomplete + def __init__( + self, count: Incomplete | None = ..., defaultTableStyle: str = ..., defaultPivotStyle: str = ..., tableStyle=... + ) -> None: ... + @property + def count(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/__init__.pyi new file mode 100644 index 00000000..ba459395 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/__init__.pyi @@ -0,0 +1,13 @@ +from .cell import ( + absolute_coordinate as absolute_coordinate, + cols_from_range as cols_from_range, + column_index_from_string as column_index_from_string, + coordinate_to_tuple as coordinate_to_tuple, + get_column_interval as get_column_interval, + get_column_letter as get_column_letter, + quote_sheetname as quote_sheetname, + range_boundaries as range_boundaries, + range_to_tuple as range_to_tuple, + rows_from_range as rows_from_range, +) +from .formulas import FORMULAE as FORMULAE diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/bound_dictionary.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/bound_dictionary.pyi new file mode 100644 index 00000000..71292228 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/bound_dictionary.pyi @@ -0,0 +1,7 @@ +from _typeshed import Incomplete +from collections import defaultdict + +class BoundDictionary(defaultdict[Incomplete, Incomplete]): + reference: Incomplete + def __init__(self, reference: Incomplete | None = ..., *args, **kw) -> None: ... + def __getitem__(self, key): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/cell.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/cell.pyi new file mode 100644 index 00000000..bddf718b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/cell.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete +from collections.abc import Generator + +COORD_RE: Incomplete +COL_RANGE: str +ROW_RANGE: str +RANGE_EXPR: str +ABSOLUTE_RE: Incomplete +SHEET_TITLE: str +SHEETRANGE_RE: Incomplete + +def get_column_interval(start: str | int, end: str | int) -> list[str]: ... +def coordinate_from_string(coord_string: str) -> tuple[str, int]: ... +def absolute_coordinate(coord_string: str) -> str: ... + +col: Incomplete + +def get_column_letter(idx: int) -> str: ... +def column_index_from_string(str_col: str) -> int: ... +def range_boundaries(range_string: str) -> tuple[int, int, int, int]: ... +def rows_from_range(range_string) -> Generator[Incomplete, None, None]: ... +def cols_from_range(range_string) -> Generator[Incomplete, None, None]: ... +def coordinate_to_tuple(coordinate: str) -> tuple[int, int]: ... +def range_to_tuple(range_string: str) -> tuple[str, tuple[int, int, int, int]]: ... +def quote_sheetname(sheetname: str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/dataframe.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/dataframe.pyi new file mode 100644 index 00000000..2f2938db --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/dataframe.pyi @@ -0,0 +1,5 @@ +from _typeshed import Incomplete +from collections.abc import Generator + +def dataframe_to_rows(df, index: bool = ..., header: bool = ...) -> Generator[Incomplete, None, None]: ... +def expand_index(index, header: bool = ...) -> Generator[Incomplete, None, None]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/datetime.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/datetime.pyi new file mode 100644 index 00000000..1fc8bec4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/datetime.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +MAC_EPOCH: Incomplete +WINDOWS_EPOCH: Incomplete +# The following two constants are defined twice in the implementation. +CALENDAR_WINDOWS_1900 = WINDOWS_EPOCH # noqa: F821 +CALENDAR_MAC_1904 = MAC_EPOCH # noqa: F821 +SECS_PER_DAY: int +ISO_FORMAT: str +ISO_REGEX: Incomplete +ISO_DURATION: Incomplete + +def to_ISO8601(dt): ... +def from_ISO8601(formatted_string): ... +def to_excel(dt, epoch=...): ... +def from_excel(value, epoch=..., timedelta: bool = ...): ... +def time_to_days(value): ... +def timedelta_to_days(value): ... +def days_to_time(value): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/escape.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/escape.pyi new file mode 100644 index 00000000..3e182f45 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/escape.pyi @@ -0,0 +1,2 @@ +def escape(value): ... +def unescape(value): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/exceptions.pyi new file mode 100644 index 00000000..5e9ebead --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/exceptions.pyi @@ -0,0 +1,7 @@ +class CellCoordinatesException(Exception): ... +class IllegalCharacterError(Exception): ... +class NamedRangeException(Exception): ... +class SheetTitleException(Exception): ... +class InvalidFileException(Exception): ... +class ReadOnlyWorkbookException(Exception): ... +class WorkbookAlreadySaved(Exception): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/formulas.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/formulas.pyi new file mode 100644 index 00000000..8e49ec3a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/formulas.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +FORMULAE: Incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/indexed_list.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/indexed_list.pyi new file mode 100644 index 00000000..00420455 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/indexed_list.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete + +class IndexedList(list[Incomplete]): + clean: bool + def __init__(self, iterable: Incomplete | None = ...) -> None: ... + def __contains__(self, value): ... + def index(self, value): ... + def append(self, value) -> None: ... + def add(self, value): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/inference.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/inference.pyi new file mode 100644 index 00000000..b8411c80 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/inference.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete + +PERCENT_REGEX: Incomplete +TIME_REGEX: Incomplete +NUMBER_REGEX: Incomplete + +def cast_numeric(value): ... +def cast_percentage(value): ... +def cast_time(value): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/protection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/protection.pyi new file mode 100644 index 00000000..28eb69b5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/protection.pyi @@ -0,0 +1 @@ +def hash_password(plaintext_password: str = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/units.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/units.pyi new file mode 100644 index 00000000..dde20506 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/utils/units.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete + +DEFAULT_ROW_HEIGHT: float +BASE_COL_WIDTH: int +DEFAULT_COLUMN_WIDTH: Incomplete +DEFAULT_LEFT_MARGIN: float +DEFAULT_TOP_MARGIN: float +DEFAULT_HEADER: float + +def inch_to_dxa(value): ... +def dxa_to_inch(value): ... +def dxa_to_cm(value): ... +def cm_to_dxa(value): ... +def pixels_to_EMU(value): ... +def EMU_to_pixels(value): ... +def cm_to_EMU(value): ... +def EMU_to_cm(value): ... +def inch_to_EMU(value): ... +def EMU_to_inch(value): ... +def pixels_to_points(value, dpi: int = ...): ... +def points_to_pixels(value, dpi: int = ...): ... +def degrees_to_angle(value): ... +def angle_to_degrees(value): ... +def short_color(color): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/__init__.pyi new file mode 100644 index 00000000..af0aa0b8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/__init__.pyi @@ -0,0 +1 @@ +from .workbook import Workbook as Workbook diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/_writer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/_writer.pyi new file mode 100644 index 00000000..4789be9f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/_writer.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete + +def get_active_sheet(wb): ... + +class WorkbookWriter: + wb: Incomplete + rels: Incomplete + package: Incomplete + def __init__(self, wb) -> None: ... + def write_properties(self) -> None: ... + def write_worksheets(self) -> None: ... + def write_refs(self) -> None: ... + def write_names(self) -> None: ... + def write_pivots(self) -> None: ... + def write_views(self) -> None: ... + def write(self): ... + def write_rels(self): ... + def write_root_rels(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/child.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/child.pyi new file mode 100644 index 00000000..385ac4cd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/child.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +INVALID_TITLE_REGEX: Incomplete + +def avoid_duplicate_name(names, value): ... + +class _WorkbookChild: + HeaderFooter: Incomplete + def __init__(self, parent: Incomplete | None = ..., title: Incomplete | None = ...) -> None: ... + @property + def parent(self): ... + @property + def encoding(self): ... + @property + def title(self): ... + @title.setter + def title(self, value) -> None: ... + @property + def oddHeader(self): ... + @oddHeader.setter + def oddHeader(self, value) -> None: ... + @property + def oddFooter(self): ... + @oddFooter.setter + def oddFooter(self, value) -> None: ... + @property + def evenHeader(self): ... + @evenHeader.setter + def evenHeader(self, value) -> None: ... + @property + def evenFooter(self): ... + @evenFooter.setter + def evenFooter(self, value) -> None: ... + @property + def firstHeader(self): ... + @firstHeader.setter + def firstHeader(self, value) -> None: ... + @property + def firstFooter(self): ... + @firstFooter.setter + def firstFooter(self, value) -> None: ... + @property + def path(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/defined_name.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/defined_name.pyi new file mode 100644 index 00000000..d58b6980 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/defined_name.pyi @@ -0,0 +1,73 @@ +from _typeshed import Incomplete +from collections.abc import Generator + +from openpyxl.descriptors.serialisable import Serialisable + +RESERVED: Incomplete +RESERVED_REGEX: Incomplete +COL_RANGE: str +COL_RANGE_RE: Incomplete +ROW_RANGE: str +ROW_RANGE_RE: Incomplete +TITLES_REGEX: Incomplete + +class DefinedName(Serialisable): + tagname: str + name: Incomplete + comment: Incomplete + customMenu: Incomplete + description: Incomplete + help: Incomplete + statusBar: Incomplete + localSheetId: Incomplete + hidden: Incomplete + function: Incomplete + vbProcedure: Incomplete + xlm: Incomplete + functionGroupId: Incomplete + shortcutKey: Incomplete + publishToServer: Incomplete + workbookParameter: Incomplete + attr_text: Incomplete + value: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + comment: Incomplete | None = ..., + customMenu: Incomplete | None = ..., + description: Incomplete | None = ..., + help: Incomplete | None = ..., + statusBar: Incomplete | None = ..., + localSheetId: Incomplete | None = ..., + hidden: Incomplete | None = ..., + function: Incomplete | None = ..., + vbProcedure: Incomplete | None = ..., + xlm: Incomplete | None = ..., + functionGroupId: Incomplete | None = ..., + shortcutKey: Incomplete | None = ..., + publishToServer: Incomplete | None = ..., + workbookParameter: Incomplete | None = ..., + attr_text: Incomplete | None = ..., + ) -> None: ... + @property + def type(self): ... + @property + def destinations(self) -> Generator[Incomplete, None, None]: ... + @property + def is_reserved(self): ... + @property + def is_external(self): ... + def __iter__(self): ... + +class DefinedNameList(Serialisable): + tagname: str + definedName: Incomplete + def __init__(self, definedName=...) -> None: ... + def append(self, defn) -> None: ... + def __len__(self) -> int: ... + def __contains__(self, name): ... + def __getitem__(self, name): ... + def get(self, name, scope: Incomplete | None = ...): ... + def __delitem__(self, name) -> None: ... + def delete(self, name, scope: Incomplete | None = ...): ... + def localnames(self, scope): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/external_link/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/external_link/__init__.pyi new file mode 100644 index 00000000..37ef9372 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/external_link/__init__.pyi @@ -0,0 +1 @@ +from .external import ExternalLink as ExternalLink diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/external_link/external.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/external_link/external.pyi new file mode 100644 index 00000000..49590a3a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/external_link/external.pyi @@ -0,0 +1,78 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class ExternalCell(Serialisable): # type: ignore[misc] + r: Incomplete + t: Incomplete + vm: Incomplete + v: Incomplete + def __init__( + self, r: Incomplete | None = ..., t: Incomplete | None = ..., vm: Incomplete | None = ..., v: Incomplete | None = ... + ) -> None: ... + +class ExternalRow(Serialisable): # type: ignore[misc] + r: Incomplete + cell: Incomplete + __elements__: Incomplete + def __init__(self, r=..., cell: Incomplete | None = ...) -> None: ... + +class ExternalSheetData(Serialisable): # type: ignore[misc] + sheetId: Incomplete + refreshError: Incomplete + row: Incomplete + __elements__: Incomplete + def __init__(self, sheetId: Incomplete | None = ..., refreshError: Incomplete | None = ..., row=...) -> None: ... + +class ExternalSheetDataSet(Serialisable): # type: ignore[misc] + sheetData: Incomplete + __elements__: Incomplete + def __init__(self, sheetData: Incomplete | None = ...) -> None: ... + +class ExternalSheetNames(Serialisable): # type: ignore[misc] + sheetName: Incomplete + __elements__: Incomplete + def __init__(self, sheetName=...) -> None: ... + +class ExternalDefinedName(Serialisable): + tagname: str + name: Incomplete + refersTo: Incomplete + sheetId: Incomplete + def __init__( + self, name: Incomplete | None = ..., refersTo: Incomplete | None = ..., sheetId: Incomplete | None = ... + ) -> None: ... + +class ExternalBook(Serialisable): + tagname: str + sheetNames: Incomplete + definedNames: Incomplete + sheetDataSet: Incomplete + id: Incomplete + __elements__: Incomplete + def __init__( + self, + sheetNames: Incomplete | None = ..., + definedNames=..., + sheetDataSet: Incomplete | None = ..., + id: Incomplete | None = ..., + ) -> None: ... + +class ExternalLink(Serialisable): + tagname: str + mime_type: str + externalBook: Incomplete + file_link: Incomplete + __elements__: Incomplete + def __init__( + self, + externalBook: Incomplete | None = ..., + ddeLink: Incomplete | None = ..., + oleLink: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + def to_tree(self): ... + @property + def path(self): ... + +def read_external_link(archive, book_path): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/external_reference.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/external_reference.pyi new file mode 100644 index 00000000..507b7acd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/external_reference.pyi @@ -0,0 +1,8 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class ExternalReference(Serialisable): + tagname: str + id: Incomplete + def __init__(self, id) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/function_group.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/function_group.pyi new file mode 100644 index 00000000..861dbf40 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/function_group.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class FunctionGroup(Serialisable): + tagname: str + name: Incomplete + def __init__(self, name: Incomplete | None = ...) -> None: ... + +class FunctionGroupList(Serialisable): + tagname: str + builtInGroupCount: Incomplete + functionGroup: Incomplete + __elements__: Incomplete + def __init__(self, builtInGroupCount: int = ..., functionGroup=...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/properties.pyi new file mode 100644 index 00000000..91cc69fa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/properties.pyi @@ -0,0 +1,95 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class WorkbookProperties(Serialisable): + tagname: str + date1904: Incomplete + dateCompatibility: Incomplete + showObjects: Incomplete + showBorderUnselectedTables: Incomplete + filterPrivacy: Incomplete + promptedSolutions: Incomplete + showInkAnnotation: Incomplete + backupFile: Incomplete + saveExternalLinkValues: Incomplete + updateLinks: Incomplete + codeName: Incomplete + hidePivotFieldList: Incomplete + showPivotChartFilter: Incomplete + allowRefreshQuery: Incomplete + publishItems: Incomplete + checkCompatibility: Incomplete + autoCompressPictures: Incomplete + refreshAllConnections: Incomplete + defaultThemeVersion: Incomplete + def __init__( + self, + date1904: Incomplete | None = ..., + dateCompatibility: Incomplete | None = ..., + showObjects: Incomplete | None = ..., + showBorderUnselectedTables: Incomplete | None = ..., + filterPrivacy: Incomplete | None = ..., + promptedSolutions: Incomplete | None = ..., + showInkAnnotation: Incomplete | None = ..., + backupFile: Incomplete | None = ..., + saveExternalLinkValues: Incomplete | None = ..., + updateLinks: Incomplete | None = ..., + codeName: Incomplete | None = ..., + hidePivotFieldList: Incomplete | None = ..., + showPivotChartFilter: Incomplete | None = ..., + allowRefreshQuery: Incomplete | None = ..., + publishItems: Incomplete | None = ..., + checkCompatibility: Incomplete | None = ..., + autoCompressPictures: Incomplete | None = ..., + refreshAllConnections: Incomplete | None = ..., + defaultThemeVersion: Incomplete | None = ..., + ) -> None: ... + +class CalcProperties(Serialisable): + tagname: str + calcId: Incomplete + calcMode: Incomplete + fullCalcOnLoad: Incomplete + refMode: Incomplete + iterate: Incomplete + iterateCount: Incomplete + iterateDelta: Incomplete + fullPrecision: Incomplete + calcCompleted: Incomplete + calcOnSave: Incomplete + concurrentCalc: Incomplete + concurrentManualCount: Incomplete + forceFullCalc: Incomplete + def __init__( + self, + calcId: int = ..., + calcMode: Incomplete | None = ..., + fullCalcOnLoad: bool = ..., + refMode: Incomplete | None = ..., + iterate: Incomplete | None = ..., + iterateCount: Incomplete | None = ..., + iterateDelta: Incomplete | None = ..., + fullPrecision: Incomplete | None = ..., + calcCompleted: Incomplete | None = ..., + calcOnSave: Incomplete | None = ..., + concurrentCalc: Incomplete | None = ..., + concurrentManualCount: Incomplete | None = ..., + forceFullCalc: Incomplete | None = ..., + ) -> None: ... + +class FileVersion(Serialisable): + tagname: str + appName: Incomplete + lastEdited: Incomplete + lowestEdited: Incomplete + rupBuild: Incomplete + codeName: Incomplete + def __init__( + self, + appName: Incomplete | None = ..., + lastEdited: Incomplete | None = ..., + lowestEdited: Incomplete | None = ..., + rupBuild: Incomplete | None = ..., + codeName: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/protection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/protection.pyi new file mode 100644 index 00000000..44b18bf2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/protection.pyi @@ -0,0 +1,77 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class WorkbookProtection(Serialisable): + tagname: str + workbook_password: Incomplete + workbookPasswordCharacterSet: Incomplete + revision_password: Incomplete + revisionsPasswordCharacterSet: Incomplete + lockStructure: Incomplete + lock_structure: Incomplete + lockWindows: Incomplete + lock_windows: Incomplete + lockRevision: Incomplete + lock_revision: Incomplete + revisionsAlgorithmName: Incomplete + revisionsHashValue: Incomplete + revisionsSaltValue: Incomplete + revisionsSpinCount: Incomplete + workbookAlgorithmName: Incomplete + workbookHashValue: Incomplete + workbookSaltValue: Incomplete + workbookSpinCount: Incomplete + __attrs__: Incomplete + def __init__( + self, + workbookPassword: Incomplete | None = ..., + workbookPasswordCharacterSet: Incomplete | None = ..., + revisionsPassword: Incomplete | None = ..., + revisionsPasswordCharacterSet: Incomplete | None = ..., + lockStructure: Incomplete | None = ..., + lockWindows: Incomplete | None = ..., + lockRevision: Incomplete | None = ..., + revisionsAlgorithmName: Incomplete | None = ..., + revisionsHashValue: Incomplete | None = ..., + revisionsSaltValue: Incomplete | None = ..., + revisionsSpinCount: Incomplete | None = ..., + workbookAlgorithmName: Incomplete | None = ..., + workbookHashValue: Incomplete | None = ..., + workbookSaltValue: Incomplete | None = ..., + workbookSpinCount: Incomplete | None = ..., + ) -> None: ... + def set_workbook_password(self, value: str = ..., already_hashed: bool = ...) -> None: ... + @property + def workbookPassword(self): ... + @workbookPassword.setter + def workbookPassword(self, value) -> None: ... + def set_revisions_password(self, value: str = ..., already_hashed: bool = ...) -> None: ... + @property + def revisionsPassword(self): ... + @revisionsPassword.setter + def revisionsPassword(self, value) -> None: ... + @classmethod + def from_tree(cls, node): ... + +DocumentSecurity = WorkbookProtection + +class FileSharing(Serialisable): + tagname: str + readOnlyRecommended: Incomplete + userName: Incomplete + reservationPassword: Incomplete + algorithmName: Incomplete + hashValue: Incomplete + saltValue: Incomplete + spinCount: Incomplete + def __init__( + self, + readOnlyRecommended: Incomplete | None = ..., + userName: Incomplete | None = ..., + reservationPassword: Incomplete | None = ..., + algorithmName: Incomplete | None = ..., + hashValue: Incomplete | None = ..., + saltValue: Incomplete | None = ..., + spinCount: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/smart_tags.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/smart_tags.pyi new file mode 100644 index 00000000..d5be770c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/smart_tags.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class SmartTag(Serialisable): + tagname: str + namespaceUri: Incomplete + name: Incomplete + url: Incomplete + def __init__( + self, namespaceUri: Incomplete | None = ..., name: Incomplete | None = ..., url: Incomplete | None = ... + ) -> None: ... + +class SmartTagList(Serialisable): + tagname: str + smartTagType: Incomplete + __elements__: Incomplete + def __init__(self, smartTagType=...) -> None: ... + +class SmartTagProperties(Serialisable): + tagname: str + embed: Incomplete + show: Incomplete + def __init__(self, embed: Incomplete | None = ..., show: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/views.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/views.pyi new file mode 100644 index 00000000..1a6e8e0b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/views.pyi @@ -0,0 +1,95 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class BookView(Serialisable): + tagname: str + visibility: Incomplete + minimized: Incomplete + showHorizontalScroll: Incomplete + showVerticalScroll: Incomplete + showSheetTabs: Incomplete + xWindow: Incomplete + yWindow: Incomplete + windowWidth: Incomplete + windowHeight: Incomplete + tabRatio: Incomplete + firstSheet: Incomplete + activeTab: Incomplete + autoFilterDateGrouping: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + visibility: str = ..., + minimized: bool = ..., + showHorizontalScroll: bool = ..., + showVerticalScroll: bool = ..., + showSheetTabs: bool = ..., + xWindow: Incomplete | None = ..., + yWindow: Incomplete | None = ..., + windowWidth: Incomplete | None = ..., + windowHeight: Incomplete | None = ..., + tabRatio: int = ..., + firstSheet: int = ..., + activeTab: int = ..., + autoFilterDateGrouping: bool = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class CustomWorkbookView(Serialisable): + tagname: str + name: Incomplete + guid: Incomplete + autoUpdate: Incomplete + mergeInterval: Incomplete + changesSavedWin: Incomplete + onlySync: Incomplete + personalView: Incomplete + includePrintSettings: Incomplete + includeHiddenRowCol: Incomplete + maximized: Incomplete + minimized: Incomplete + showHorizontalScroll: Incomplete + showVerticalScroll: Incomplete + showSheetTabs: Incomplete + xWindow: Incomplete + yWindow: Incomplete + windowWidth: Incomplete + windowHeight: Incomplete + tabRatio: Incomplete + activeSheetId: Incomplete + showFormulaBar: Incomplete + showStatusbar: Incomplete + showComments: Incomplete + showObjects: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + guid: Incomplete | None = ..., + autoUpdate: Incomplete | None = ..., + mergeInterval: Incomplete | None = ..., + changesSavedWin: Incomplete | None = ..., + onlySync: Incomplete | None = ..., + personalView: Incomplete | None = ..., + includePrintSettings: Incomplete | None = ..., + includeHiddenRowCol: Incomplete | None = ..., + maximized: Incomplete | None = ..., + minimized: Incomplete | None = ..., + showHorizontalScroll: Incomplete | None = ..., + showVerticalScroll: Incomplete | None = ..., + showSheetTabs: Incomplete | None = ..., + xWindow: Incomplete | None = ..., + yWindow: Incomplete | None = ..., + windowWidth: Incomplete | None = ..., + windowHeight: Incomplete | None = ..., + tabRatio: Incomplete | None = ..., + activeSheetId: Incomplete | None = ..., + showFormulaBar: Incomplete | None = ..., + showStatusbar: Incomplete | None = ..., + showComments: str = ..., + showObjects: str = ..., + extLst: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/web.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/web.pyi new file mode 100644 index 00000000..0f80700a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/web.pyi @@ -0,0 +1,55 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class WebPublishObject(Serialisable): + tagname: str + id: Incomplete + divId: Incomplete + sourceObject: Incomplete + destinationFile: Incomplete + title: Incomplete + autoRepublish: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + divId: Incomplete | None = ..., + sourceObject: Incomplete | None = ..., + destinationFile: Incomplete | None = ..., + title: Incomplete | None = ..., + autoRepublish: Incomplete | None = ..., + ) -> None: ... + +class WebPublishObjectList(Serialisable): + tagname: str + # Overwritten by property below + # count: Integer + webPublishObject: Incomplete + __elements__: Incomplete + def __init__(self, count: Incomplete | None = ..., webPublishObject=...) -> None: ... + @property + def count(self): ... + +class WebPublishing(Serialisable): + tagname: str + css: Incomplete + thicket: Incomplete + longFileNames: Incomplete + vml: Incomplete + allowPng: Incomplete + targetScreenSize: Incomplete + dpi: Incomplete + codePage: Incomplete + characterSet: Incomplete + def __init__( + self, + css: Incomplete | None = ..., + thicket: Incomplete | None = ..., + longFileNames: Incomplete | None = ..., + vml: Incomplete | None = ..., + allowPng: Incomplete | None = ..., + targetScreenSize: str = ..., + dpi: Incomplete | None = ..., + codePage: Incomplete | None = ..., + characterSet: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/workbook.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/workbook.pyi new file mode 100644 index 00000000..5b5543e6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/workbook/workbook.pyi @@ -0,0 +1,83 @@ +from _typeshed import Incomplete, StrPath +from collections.abc import Iterator +from datetime import datetime +from typing import IO + +from openpyxl.chartsheet.chartsheet import Chartsheet +from openpyxl.styles.named_styles import NamedStyle +from openpyxl.workbook.child import _WorkbookChild +from openpyxl.workbook.defined_name import DefinedName +from openpyxl.worksheet._write_only import WriteOnlyWorksheet +from openpyxl.worksheet.worksheet import Worksheet + +INTEGER_TYPES: Incomplete + +class Workbook: + template: bool + path: str + defined_names: Incomplete + properties: Incomplete + security: Incomplete + shared_strings: Incomplete + loaded_theme: Incomplete + vba_archive: Incomplete + is_template: bool + code_name: Incomplete + encoding: str + iso_dates: Incomplete + rels: Incomplete + calculation: Incomplete + views: Incomplete + def __init__(self, write_only: bool = ..., iso_dates: bool = ...) -> None: ... + @property + def epoch(self) -> datetime: ... + @epoch.setter + def epoch(self, value: datetime) -> None: ... + @property + def read_only(self) -> bool: ... + @property + def data_only(self) -> bool: ... + @property + def write_only(self) -> bool: ... + @property + def excel_base_date(self) -> datetime: ... + @property + def active(self) -> _WorkbookChild | None: ... + @active.setter + def active(self, value: _WorkbookChild | int) -> None: ... + def create_sheet(self, title: str | None = ..., index: int | None = ...): ... + def move_sheet(self, sheet: Worksheet | str, offset: int = ...) -> None: ... + def remove(self, worksheet: Worksheet) -> None: ... + def remove_sheet(self, worksheet: Worksheet) -> None: ... + def create_chartsheet(self, title: str | None = ..., index: int | None = ...) -> Chartsheet: ... + def get_sheet_by_name(self, name: str) -> Worksheet: ... + def __contains__(self, key: str) -> bool: ... + def index(self, worksheet: Worksheet) -> int: ... + def get_index(self, worksheet: Worksheet) -> int: ... + def __getitem__(self, key: str) -> Worksheet: ... + def __delitem__(self, key: str) -> None: ... + def __iter__(self) -> Iterator[Worksheet]: ... + def get_sheet_names(self) -> list[Worksheet]: ... + @property + def worksheets(self) -> list[Worksheet]: ... + @property + def chartsheets(self) -> list[Chartsheet]: ... + @property + def sheetnames(self) -> list[str]: ... + def create_named_range( + self, name: str, worksheet: Worksheet | None = ..., value: str | Incomplete | None = ..., scope: Incomplete | None = ... + ) -> None: ... + def add_named_style(self, style: NamedStyle) -> None: ... + @property + def named_styles(self) -> list[str]: ... + def get_named_ranges(self) -> list[DefinedName] | tuple[DefinedName, ...]: ... + def add_named_range(self, named_range: DefinedName) -> None: ... + def get_named_range(self, name: str) -> DefinedName: ... + def remove_named_range(self, named_range: DefinedName) -> None: ... + @property + def mime_type(self) -> str: ... + def save(self, filename: StrPath | IO[bytes]) -> None: ... + @property + def style_names(self) -> list[str]: ... + def copy_worksheet(self, from_worksheet: Worksheet) -> Worksheet | WriteOnlyWorksheet: ... + def close(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/_read_only.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/_read_only.pyi new file mode 100644 index 00000000..7be20376 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/_read_only.pyi @@ -0,0 +1,27 @@ +from _typeshed import Incomplete + +def read_dimension(source): ... + +class ReadOnlyWorksheet: + cell: Incomplete + iter_rows: Incomplete + @property + def values(self): ... + @property + def rows(self): ... + __getitem__: Incomplete + __iter__: Incomplete + parent: Incomplete + title: Incomplete + sheet_state: str + def __init__(self, parent_workbook, title, worksheet_path, shared_strings) -> None: ... + def calculate_dimension(self, force: bool = ...): ... + def reset_dimensions(self) -> None: ... + @property + def min_row(self): ... + @property + def max_row(self): ... + @property + def min_column(self): ... + @property + def max_column(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/_reader.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/_reader.pyi new file mode 100644 index 00000000..08286065 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/_reader.pyi @@ -0,0 +1,88 @@ +from _typeshed import Incomplete +from collections.abc import Generator + +CELL_TAG: Incomplete +VALUE_TAG: Incomplete +FORMULA_TAG: Incomplete +MERGE_TAG: Incomplete +INLINE_STRING: Incomplete +COL_TAG: Incomplete +ROW_TAG: Incomplete +CF_TAG: Incomplete +LEGACY_TAG: Incomplete +PROT_TAG: Incomplete +EXT_TAG: Incomplete +HYPERLINK_TAG: Incomplete +TABLE_TAG: Incomplete +PRINT_TAG: Incomplete +MARGINS_TAG: Incomplete +PAGE_TAG: Incomplete +HEADER_TAG: Incomplete +FILTER_TAG: Incomplete +VALIDATION_TAG: Incomplete +PROPERTIES_TAG: Incomplete +VIEWS_TAG: Incomplete +FORMAT_TAG: Incomplete +ROW_BREAK_TAG: Incomplete +COL_BREAK_TAG: Incomplete +SCENARIOS_TAG: Incomplete +DATA_TAG: Incomplete +DIMENSION_TAG: Incomplete +CUSTOM_VIEWS_TAG: Incomplete + +class WorkSheetParser: + min_row: Incomplete + epoch: Incomplete + source: Incomplete + shared_strings: Incomplete + data_only: Incomplete + shared_formulae: Incomplete + array_formulae: Incomplete + row_counter: int + tables: Incomplete + date_formats: Incomplete + timedelta_formats: Incomplete + row_dimensions: Incomplete + column_dimensions: Incomplete + number_formats: Incomplete + keep_vba: bool + hyperlinks: Incomplete + formatting: Incomplete + legacy_drawing: Incomplete + merged_cells: Incomplete + row_breaks: Incomplete + col_breaks: Incomplete + def __init__( + self, src, shared_strings, data_only: bool = ..., epoch=..., date_formats=..., timedelta_formats=... + ) -> None: ... + def parse(self) -> Generator[Incomplete, None, None]: ... + def parse_dimensions(self): ... + col_counter: Incomplete + def parse_cell(self, element): ... + def parse_formula(self, element): ... + def parse_column_dimensions(self, col) -> None: ... + def parse_row(self, row): ... + def parse_formatting(self, element) -> None: ... + protection: Incomplete + def parse_sheet_protection(self, element) -> None: ... + def parse_extensions(self, element) -> None: ... + def parse_legacy(self, element) -> None: ... + def parse_row_breaks(self, element) -> None: ... + def parse_col_breaks(self, element) -> None: ... + def parse_custom_views(self, element) -> None: ... + +class WorksheetReader: + ws: Incomplete + parser: Incomplete + tables: Incomplete + def __init__(self, ws, xml_source, shared_strings, data_only) -> None: ... + def bind_cells(self) -> None: ... + def bind_formatting(self) -> None: ... + def bind_tables(self) -> None: ... + def bind_merged_cells(self) -> None: ... + def bind_hyperlinks(self) -> None: ... + def normalize_merged_cell_link(self, coord): ... + def bind_col_dimensions(self) -> None: ... + def bind_row_dimensions(self) -> None: ... + def bind_properties(self) -> None: ... + def bind_all(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/_write_only.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/_write_only.pyi new file mode 100644 index 00000000..d2c5195e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/_write_only.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete + +from openpyxl.workbook.child import _WorkbookChild + +class WriteOnlyWorksheet(_WorkbookChild): + mime_type: Incomplete + add_chart: Incomplete + add_image: Incomplete + add_table: Incomplete + @property + def tables(self): ... + @property + def print_titles(self): ... + print_title_cols: Incomplete + print_title_rows: Incomplete + freeze_panes: Incomplete + print_area: Incomplete + @property + def sheet_view(self): ... + def __init__(self, parent, title) -> None: ... + @property + def closed(self): ... + def close(self) -> None: ... + def append(self, row) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/_writer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/_writer.pyi new file mode 100644 index 00000000..f5cc760b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/_writer.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete +from collections.abc import Generator + +ALL_TEMP_FILES: Incomplete + +def create_temporary_file(suffix: str = ...): ... + +class WorksheetWriter: + ws: Incomplete + out: Incomplete + xf: Incomplete + def __init__(self, ws, out: Incomplete | None = ...) -> None: ... + def write_properties(self) -> None: ... + def write_dimensions(self) -> None: ... + def write_format(self) -> None: ... + def write_views(self) -> None: ... + def write_cols(self) -> None: ... + def write_top(self) -> None: ... + def rows(self): ... + def write_rows(self) -> None: ... + def write_row(self, xf, row, row_idx) -> None: ... + def write_protection(self) -> None: ... + def write_scenarios(self) -> None: ... + def write_filter(self) -> None: ... + def write_sort(self) -> None: ... + def write_merged_cells(self) -> None: ... + def write_formatting(self) -> None: ... + def write_validations(self) -> None: ... + def write_hyperlinks(self) -> None: ... + def write_print(self) -> None: ... + def write_margins(self) -> None: ... + def write_page(self) -> None: ... + def write_header(self) -> None: ... + def write_breaks(self) -> None: ... + def write_drawings(self) -> None: ... + def write_legacy(self) -> None: ... + def write_tables(self) -> None: ... + def get_stream(self) -> Generator[Incomplete, Incomplete, None]: ... + def write_tail(self) -> None: ... + def write(self) -> None: ... + def close(self) -> None: ... + def read(self): ... + def cleanup(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/cell_range.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/cell_range.pyi new file mode 100644 index 00000000..72edc3e2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/cell_range.pyi @@ -0,0 +1,73 @@ +from _typeshed import Incomplete +from collections.abc import Generator + +from openpyxl.descriptors import Strict +from openpyxl.descriptors.serialisable import Serialisable + +class CellRange(Serialisable): # type: ignore[misc] + min_col: Incomplete + min_row: Incomplete + max_col: Incomplete + max_row: Incomplete + title: Incomplete + def __init__( + self, + range_string: Incomplete | None = ..., + min_col: Incomplete | None = ..., + min_row: Incomplete | None = ..., + max_col: Incomplete | None = ..., + max_row: Incomplete | None = ..., + title: Incomplete | None = ..., + ) -> None: ... + @property + def bounds(self): ... + @property + def coord(self): ... + @property + def rows(self) -> Generator[Incomplete, None, None]: ... + @property + def cols(self) -> Generator[Incomplete, None, None]: ... + @property + def cells(self): ... + def __copy__(self): ... + def shift(self, col_shift: int = ..., row_shift: int = ...) -> None: ... + def __ne__(self, other): ... + def __eq__(self, other): ... + def issubset(self, other): ... + __le__: Incomplete + def __lt__(self, other): ... + def issuperset(self, other): ... + __ge__: Incomplete + def __contains__(self, coord): ... + def __gt__(self, other): ... + def isdisjoint(self, other): ... + def intersection(self, other): ... + __and__: Incomplete + def union(self, other): ... + __or__: Incomplete + def __iter__(self): ... + def expand(self, right: int = ..., down: int = ..., left: int = ..., up: int = ...) -> None: ... + def shrink(self, right: int = ..., bottom: int = ..., left: int = ..., top: int = ...) -> None: ... + @property + def size(self): ... + @property + def top(self): ... + @property + def bottom(self): ... + @property + def left(self): ... + @property + def right(self): ... + +class MultiCellRange(Strict): + ranges: Incomplete + def __init__(self, ranges=...) -> None: ... + def __contains__(self, coord): ... + def add(self, coord) -> None: ... + def __iadd__(self, coord): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __bool__(self) -> bool: ... + def remove(self, coord) -> None: ... + def __iter__(self): ... + def __copy__(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/cell_watch.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/cell_watch.pyi new file mode 100644 index 00000000..35ebbda0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/cell_watch.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class CellWatch(Serialisable): + tagname: str + r: Incomplete + def __init__(self, r: Incomplete | None = ...) -> None: ... + +class CellWatches(Serialisable): + tagname: str + cellWatch: Incomplete + __elements__: Incomplete + def __init__(self, cellWatch=...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/controls.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/controls.pyi new file mode 100644 index 00000000..f56d3ae3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/controls.pyi @@ -0,0 +1,57 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class ControlProperty(Serialisable): + tagname: str + anchor: Incomplete + locked: Incomplete + defaultSize: Incomplete + disabled: Incomplete + recalcAlways: Incomplete + uiObject: Incomplete + autoFill: Incomplete + autoLine: Incomplete + autoPict: Incomplete + macro: Incomplete + altText: Incomplete + linkedCell: Incomplete + listFillRange: Incomplete + cf: Incomplete + id: Incomplete + __elements__: Incomplete + def __init__( + self, + anchor: Incomplete | None = ..., + locked: bool = ..., + defaultSize: bool = ..., + _print: bool = ..., + disabled: bool = ..., + recalcAlways: bool = ..., + uiObject: bool = ..., + autoFill: bool = ..., + autoLine: bool = ..., + autoPict: bool = ..., + macro: Incomplete | None = ..., + altText: Incomplete | None = ..., + linkedCell: Incomplete | None = ..., + listFillRange: Incomplete | None = ..., + cf: str = ..., + id: Incomplete | None = ..., + ) -> None: ... + +class Control(Serialisable): + tagname: str + controlPr: Incomplete + shapeId: Incomplete + name: Incomplete + __elements__: Incomplete + def __init__( + self, controlPr: Incomplete | None = ..., shapeId: Incomplete | None = ..., name: Incomplete | None = ... + ) -> None: ... + +class Controls(Serialisable): + tagname: str + control: Incomplete + __elements__: Incomplete + def __init__(self, control=...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/copier.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/copier.pyi new file mode 100644 index 00000000..5e8886fd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/copier.pyi @@ -0,0 +1,7 @@ +from _typeshed import Incomplete + +class WorksheetCopy: + source: Incomplete + target: Incomplete + def __init__(self, source_worksheet, target_worksheet) -> None: ... + def copy_worksheet(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/custom.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/custom.pyi new file mode 100644 index 00000000..ba95ab03 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/custom.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class CustomProperty(Serialisable): + tagname: str + name: Incomplete + def __init__(self, name: Incomplete | None = ...) -> None: ... + +class CustomProperties(Serialisable): + tagname: str + customPr: Incomplete + __elements__: Incomplete + def __init__(self, customPr=...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/datavalidation.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/datavalidation.pyi new file mode 100644 index 00000000..51bc50b7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/datavalidation.pyi @@ -0,0 +1,72 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +def collapse_cell_addresses(cells, input_ranges=...): ... +def expand_cell_ranges(range_string): ... + +class DataValidation(Serialisable): + tagname: str + sqref: Incomplete + cells: Incomplete + ranges: Incomplete + showErrorMessage: Incomplete + showDropDown: Incomplete + hide_drop_down: Incomplete + showInputMessage: Incomplete + allowBlank: Incomplete + allow_blank: Incomplete + errorTitle: Incomplete + error: Incomplete + promptTitle: Incomplete + prompt: Incomplete + formula1: Incomplete + formula2: Incomplete + type: Incomplete + errorStyle: Incomplete + imeMode: Incomplete + operator: Incomplete + validation_type: Incomplete + def __init__( + self, + type: Incomplete | None = ..., + formula1: Incomplete | None = ..., + formula2: Incomplete | None = ..., + showErrorMessage: bool = ..., + showInputMessage: bool = ..., + showDropDown: Incomplete | None = ..., + allowBlank: Incomplete | None = ..., + sqref=..., + promptTitle: Incomplete | None = ..., + errorStyle: Incomplete | None = ..., + error: Incomplete | None = ..., + prompt: Incomplete | None = ..., + errorTitle: Incomplete | None = ..., + imeMode: Incomplete | None = ..., + operator: Incomplete | None = ..., + allow_blank: Incomplete | None = ..., + ) -> None: ... + def add(self, cell) -> None: ... + def __contains__(self, cell): ... + +class DataValidationList(Serialisable): + tagname: str + disablePrompts: Incomplete + xWindow: Incomplete + yWindow: Incomplete + dataValidation: Incomplete + __elements__: Incomplete + __attrs__: Incomplete + def __init__( + self, + disablePrompts: Incomplete | None = ..., + xWindow: Incomplete | None = ..., + yWindow: Incomplete | None = ..., + count: Incomplete | None = ..., + dataValidation=..., + ) -> None: ... + @property + def count(self): ... + def __len__(self) -> int: ... + def append(self, dv) -> None: ... + def to_tree(self, tagname: Incomplete | None = ...): ... # type: ignore[override] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/dimensions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/dimensions.pyi new file mode 100644 index 00000000..8193a77b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/dimensions.pyi @@ -0,0 +1,123 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors import Strict +from openpyxl.descriptors.serialisable import Serialisable +from openpyxl.styles.styleable import StyleableObject +from openpyxl.utils.bound_dictionary import BoundDictionary + +class Dimension(Strict, StyleableObject): + __fields__: Incomplete + index: Incomplete + hidden: Incomplete + outlineLevel: Incomplete + outline_level: Incomplete + collapsed: Incomplete + style: Incomplete + def __init__( + self, index, hidden, outlineLevel, collapsed, worksheet, visible: bool = ..., style: Incomplete | None = ... + ) -> None: ... + def __iter__(self): ... + def __copy__(self): ... + +class RowDimension(Dimension): + __fields__: Incomplete + r: Incomplete + s: Incomplete + ht: Incomplete + height: Incomplete + thickBot: Incomplete + thickTop: Incomplete + def __init__( + self, + worksheet, + index: int = ..., + ht: Incomplete | None = ..., + customHeight: Incomplete | None = ..., + s: Incomplete | None = ..., + customFormat: Incomplete | None = ..., + hidden: bool = ..., + outlineLevel: int = ..., + outline_level: Incomplete | None = ..., + collapsed: bool = ..., + visible: Incomplete | None = ..., + height: Incomplete | None = ..., + r: Incomplete | None = ..., + spans: Incomplete | None = ..., + thickBot: Incomplete | None = ..., + thickTop: Incomplete | None = ..., + **kw, + ) -> None: ... + @property + def customFormat(self): ... + @property + def customHeight(self): ... + +class ColumnDimension(Dimension): + width: Incomplete + bestFit: Incomplete + auto_size: Incomplete + index: Incomplete + min: Incomplete + max: Incomplete + collapsed: Incomplete + __fields__: Incomplete + def __init__( + self, + worksheet, + index: str = ..., + width=..., + bestFit: bool = ..., + hidden: bool = ..., + outlineLevel: int = ..., + outline_level: Incomplete | None = ..., + collapsed: bool = ..., + style: Incomplete | None = ..., + min: Incomplete | None = ..., + max: Incomplete | None = ..., + customWidth: bool = ..., + visible: Incomplete | None = ..., + auto_size: Incomplete | None = ..., + ) -> None: ... + @property + def customWidth(self): ... + def reindex(self) -> None: ... + def to_tree(self): ... + +class DimensionHolder(BoundDictionary): + worksheet: Incomplete + max_outline: Incomplete + default_factory: Incomplete + def __init__(self, worksheet, reference: str = ..., default_factory: Incomplete | None = ...) -> None: ... + def group(self, start, end: Incomplete | None = ..., outline_level: int = ..., hidden: bool = ...) -> None: ... + def to_tree(self): ... + +class SheetFormatProperties(Serialisable): + tagname: str + baseColWidth: Incomplete + defaultColWidth: Incomplete + defaultRowHeight: Incomplete + customHeight: Incomplete + zeroHeight: Incomplete + thickTop: Incomplete + thickBottom: Incomplete + outlineLevelRow: Incomplete + outlineLevelCol: Incomplete + def __init__( + self, + baseColWidth: int = ..., + defaultColWidth: Incomplete | None = ..., + defaultRowHeight: int = ..., + customHeight: Incomplete | None = ..., + zeroHeight: Incomplete | None = ..., + thickTop: Incomplete | None = ..., + thickBottom: Incomplete | None = ..., + outlineLevelRow: Incomplete | None = ..., + outlineLevelCol: Incomplete | None = ..., + ) -> None: ... + +class SheetDimension(Serialisable): + tagname: str + ref: Incomplete + def __init__(self, ref: Incomplete | None = ...) -> None: ... + @property + def boundaries(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/drawing.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/drawing.pyi new file mode 100644 index 00000000..25de5747 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/drawing.pyi @@ -0,0 +1,8 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class Drawing(Serialisable): + tagname: str + id: Incomplete + def __init__(self, id: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/errors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/errors.pyi new file mode 100644 index 00000000..0481d766 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/errors.pyi @@ -0,0 +1,47 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class Extension(Serialisable): + tagname: str + uri: Incomplete + def __init__(self, uri: Incomplete | None = ...) -> None: ... + +class ExtensionList(Serialisable): + tagname: str + ext: Incomplete + __elements__: Incomplete + def __init__(self, ext=...) -> None: ... + +class IgnoredError(Serialisable): + tagname: str + sqref: Incomplete + evalError: Incomplete + twoDigitTextYear: Incomplete + numberStoredAsText: Incomplete + formula: Incomplete + formulaRange: Incomplete + unlockedFormula: Incomplete + emptyCellReference: Incomplete + listDataValidation: Incomplete + calculatedColumn: Incomplete + def __init__( + self, + sqref: Incomplete | None = ..., + evalError: bool = ..., + twoDigitTextYear: bool = ..., + numberStoredAsText: bool = ..., + formula: bool = ..., + formulaRange: bool = ..., + unlockedFormula: bool = ..., + emptyCellReference: bool = ..., + listDataValidation: bool = ..., + calculatedColumn: bool = ..., + ) -> None: ... + +class IgnoredErrors(Serialisable): + tagname: str + ignoredError: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__(self, ignoredError=..., extLst: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/filters.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/filters.pyi new file mode 100644 index 00000000..7526971d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/filters.pyi @@ -0,0 +1,172 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class SortCondition(Serialisable): + tagname: str + descending: Incomplete + sortBy: Incomplete + ref: Incomplete + customList: Incomplete + dxfId: Incomplete + iconSet: Incomplete + iconId: Incomplete + def __init__( + self, + ref: Incomplete | None = ..., + descending: Incomplete | None = ..., + sortBy: Incomplete | None = ..., + customList: Incomplete | None = ..., + dxfId: Incomplete | None = ..., + iconSet: Incomplete | None = ..., + iconId: Incomplete | None = ..., + ) -> None: ... + +class SortState(Serialisable): + tagname: str + columnSort: Incomplete + caseSensitive: Incomplete + sortMethod: Incomplete + ref: Incomplete + sortCondition: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + columnSort: Incomplete | None = ..., + caseSensitive: Incomplete | None = ..., + sortMethod: Incomplete | None = ..., + ref: Incomplete | None = ..., + sortCondition=..., + extLst: Incomplete | None = ..., + ) -> None: ... + def __bool__(self) -> bool: ... + +class IconFilter(Serialisable): + tagname: str + iconSet: Incomplete + iconId: Incomplete + def __init__(self, iconSet: Incomplete | None = ..., iconId: Incomplete | None = ...) -> None: ... + +class ColorFilter(Serialisable): + tagname: str + dxfId: Incomplete + cellColor: Incomplete + def __init__(self, dxfId: Incomplete | None = ..., cellColor: Incomplete | None = ...) -> None: ... + +class DynamicFilter(Serialisable): + tagname: str + type: Incomplete + val: Incomplete + valIso: Incomplete + maxVal: Incomplete + maxValIso: Incomplete + def __init__( + self, + type: Incomplete | None = ..., + val: Incomplete | None = ..., + valIso: Incomplete | None = ..., + maxVal: Incomplete | None = ..., + maxValIso: Incomplete | None = ..., + ) -> None: ... + +class CustomFilter(Serialisable): + tagname: str + operator: Incomplete + val: Incomplete + def __init__(self, operator: Incomplete | None = ..., val: Incomplete | None = ...) -> None: ... + +class CustomFilters(Serialisable): + tagname: str + customFilter: Incomplete + __elements__: Incomplete + def __init__(self, _and: Incomplete | None = ..., customFilter=...) -> None: ... + +class Top10(Serialisable): + tagname: str + top: Incomplete + percent: Incomplete + val: Incomplete + filterVal: Incomplete + def __init__( + self, + top: Incomplete | None = ..., + percent: Incomplete | None = ..., + val: Incomplete | None = ..., + filterVal: Incomplete | None = ..., + ) -> None: ... + +class DateGroupItem(Serialisable): + tagname: str + year: Incomplete + month: Incomplete + day: Incomplete + hour: Incomplete + minute: Incomplete + second: Incomplete + dateTimeGrouping: Incomplete + def __init__( + self, + year: Incomplete | None = ..., + month: Incomplete | None = ..., + day: Incomplete | None = ..., + hour: Incomplete | None = ..., + minute: Incomplete | None = ..., + second: Incomplete | None = ..., + dateTimeGrouping: Incomplete | None = ..., + ) -> None: ... + +class Filters(Serialisable): + tagname: str + blank: Incomplete + calendarType: Incomplete + filter: Incomplete + dateGroupItem: Incomplete + __elements__: Incomplete + def __init__( + self, blank: Incomplete | None = ..., calendarType: Incomplete | None = ..., filter=..., dateGroupItem=... + ) -> None: ... + +class FilterColumn(Serialisable): + tagname: str + colId: Incomplete + col_id: Incomplete + hiddenButton: Incomplete + showButton: Incomplete + filters: Incomplete + top10: Incomplete + customFilters: Incomplete + dynamicFilter: Incomplete + colorFilter: Incomplete + iconFilter: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + colId: Incomplete | None = ..., + hiddenButton: Incomplete | None = ..., + showButton: Incomplete | None = ..., + filters: Incomplete | None = ..., + top10: Incomplete | None = ..., + customFilters: Incomplete | None = ..., + dynamicFilter: Incomplete | None = ..., + colorFilter: Incomplete | None = ..., + iconFilter: Incomplete | None = ..., + extLst: Incomplete | None = ..., + blank: Incomplete | None = ..., + vals: Incomplete | None = ..., + ) -> None: ... + +class AutoFilter(Serialisable): + tagname: str + ref: Incomplete + filterColumn: Incomplete + sortState: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, ref: Incomplete | None = ..., filterColumn=..., sortState: Incomplete | None = ..., extLst: Incomplete | None = ... + ) -> None: ... + def __bool__(self) -> bool: ... + def add_filter_column(self, col_id, vals, blank: bool = ...) -> None: ... + def add_sort_condition(self, ref, descending: bool = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/header_footer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/header_footer.pyi new file mode 100644 index 00000000..3c476770 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/header_footer.pyi @@ -0,0 +1,67 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors import Strict +from openpyxl.descriptors.serialisable import Serialisable + +FONT_PATTERN: str +COLOR_PATTERN: str +SIZE_REGEX: str +FORMAT_REGEX: Incomplete + +class _HeaderFooterPart(Strict): + text: Incomplete + font: Incomplete + size: Incomplete + RGB: str + color: Incomplete + def __init__( + self, + text: Incomplete | None = ..., + font: Incomplete | None = ..., + size: Incomplete | None = ..., + color: Incomplete | None = ..., + ) -> None: ... + def __bool__(self) -> bool: ... + @classmethod + def from_str(cls, text): ... + +class HeaderFooterItem(Strict): + left: Incomplete + center: Incomplete + centre: Incomplete + right: Incomplete + def __init__( + self, left: Incomplete | None = ..., right: Incomplete | None = ..., center: Incomplete | None = ... + ) -> None: ... + def __bool__(self) -> bool: ... + def to_tree(self, tagname): ... + @classmethod + def from_tree(cls, node): ... + +class HeaderFooter(Serialisable): + tagname: str + differentOddEven: Incomplete + differentFirst: Incomplete + scaleWithDoc: Incomplete + alignWithMargins: Incomplete + oddHeader: Incomplete + oddFooter: Incomplete + evenHeader: Incomplete + evenFooter: Incomplete + firstHeader: Incomplete + firstFooter: Incomplete + __elements__: Incomplete + def __init__( + self, + differentOddEven: Incomplete | None = ..., + differentFirst: Incomplete | None = ..., + scaleWithDoc: Incomplete | None = ..., + alignWithMargins: Incomplete | None = ..., + oddHeader: Incomplete | None = ..., + oddFooter: Incomplete | None = ..., + evenHeader: Incomplete | None = ..., + evenFooter: Incomplete | None = ..., + firstHeader: Incomplete | None = ..., + firstFooter: Incomplete | None = ..., + ) -> None: ... + def __bool__(self) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/hyperlink.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/hyperlink.pyi new file mode 100644 index 00000000..ebf821ad --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/hyperlink.pyi @@ -0,0 +1,30 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class Hyperlink(Serialisable): + tagname: str + ref: Incomplete + location: Incomplete + tooltip: Incomplete + display: Incomplete + id: Incomplete + target: Incomplete + __attrs__: Incomplete + def __init__( + self, + ref: Incomplete | None = ..., + location: Incomplete | None = ..., + tooltip: Incomplete | None = ..., + display: Incomplete | None = ..., + id: Incomplete | None = ..., + target: Incomplete | None = ..., + ) -> None: ... + +class HyperlinkList(Serialisable): + tagname: str + hyperlink: Incomplete + def __init__(self, hyperlink=...) -> None: ... + def __bool__(self) -> bool: ... + def __len__(self) -> int: ... + def append(self, value) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/merge.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/merge.pyi new file mode 100644 index 00000000..2c7ca0a0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/merge.pyi @@ -0,0 +1,32 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +from .cell_range import CellRange + +class MergeCell(CellRange): + tagname: str + @property + def ref(self): ... + __attrs__: Incomplete + def __init__(self, ref: Incomplete | None = ...) -> None: ... + def __copy__(self): ... + +class MergeCells(Serialisable): + tagname: str + # Overwritten by property below + # count: Integer + mergeCell: Incomplete + __elements__: Incomplete + __attrs__: Incomplete + def __init__(self, count: Incomplete | None = ..., mergeCell=...) -> None: ... + @property + def count(self): ... + +class MergedCellRange(CellRange): + ws: Incomplete + start_cell: Incomplete + def __init__(self, worksheet, coord) -> None: ... + def format(self) -> None: ... + def __contains__(self, coord): ... + def __copy__(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/ole.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/ole.pyi new file mode 100644 index 00000000..ce36b517 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/ole.pyi @@ -0,0 +1,75 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class ObjectAnchor(Serialisable): + tagname: str + to: Incomplete + moveWithCells: Incomplete + sizeWithCells: Incomplete + z_order: Incomplete + def __init__( + self, + _from: Incomplete | None = ..., + to: Incomplete | None = ..., + moveWithCells: bool = ..., + sizeWithCells: bool = ..., + z_order: Incomplete | None = ..., + ) -> None: ... + +class ObjectPr(Serialisable): + tagname: str + anchor: Incomplete + locked: Incomplete + defaultSize: Incomplete + disabled: Incomplete + uiObject: Incomplete + autoFill: Incomplete + autoLine: Incomplete + autoPict: Incomplete + macro: Incomplete + altText: Incomplete + dde: Incomplete + __elements__: Incomplete + def __init__( + self, + anchor: Incomplete | None = ..., + locked: bool = ..., + defaultSize: bool = ..., + _print: bool = ..., + disabled: bool = ..., + uiObject: bool = ..., + autoFill: bool = ..., + autoLine: bool = ..., + autoPict: bool = ..., + macro: Incomplete | None = ..., + altText: Incomplete | None = ..., + dde: bool = ..., + ) -> None: ... + +class OleObject(Serialisable): + tagname: str + objectPr: Incomplete + progId: Incomplete + dvAspect: Incomplete + link: Incomplete + oleUpdate: Incomplete + autoLoad: Incomplete + shapeId: Incomplete + __elements__: Incomplete + def __init__( + self, + objectPr: Incomplete | None = ..., + progId: Incomplete | None = ..., + dvAspect: str = ..., + link: Incomplete | None = ..., + oleUpdate: Incomplete | None = ..., + autoLoad: bool = ..., + shapeId: Incomplete | None = ..., + ) -> None: ... + +class OleObjects(Serialisable): + tagname: str + oleObject: Incomplete + __elements__: Incomplete + def __init__(self, oleObject=...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/page.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/page.pyi new file mode 100644 index 00000000..c81b5f28 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/page.pyi @@ -0,0 +1,90 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class PrintPageSetup(Serialisable): + tagname: str + orientation: Incomplete + paperSize: Incomplete + scale: Incomplete + fitToHeight: Incomplete + fitToWidth: Incomplete + firstPageNumber: Incomplete + useFirstPageNumber: Incomplete + paperHeight: Incomplete + paperWidth: Incomplete + pageOrder: Incomplete + usePrinterDefaults: Incomplete + blackAndWhite: Incomplete + draft: Incomplete + cellComments: Incomplete + errors: Incomplete + horizontalDpi: Incomplete + verticalDpi: Incomplete + copies: Incomplete + id: Incomplete + def __init__( + self, + worksheet: Incomplete | None = ..., + orientation: Incomplete | None = ..., + paperSize: Incomplete | None = ..., + scale: Incomplete | None = ..., + fitToHeight: Incomplete | None = ..., + fitToWidth: Incomplete | None = ..., + firstPageNumber: Incomplete | None = ..., + useFirstPageNumber: Incomplete | None = ..., + paperHeight: Incomplete | None = ..., + paperWidth: Incomplete | None = ..., + pageOrder: Incomplete | None = ..., + usePrinterDefaults: Incomplete | None = ..., + blackAndWhite: Incomplete | None = ..., + draft: Incomplete | None = ..., + cellComments: Incomplete | None = ..., + errors: Incomplete | None = ..., + horizontalDpi: Incomplete | None = ..., + verticalDpi: Incomplete | None = ..., + copies: Incomplete | None = ..., + id: Incomplete | None = ..., + ) -> None: ... + def __bool__(self) -> bool: ... + @property + def sheet_properties(self): ... + @property + def fitToPage(self): ... + @fitToPage.setter + def fitToPage(self, value) -> None: ... + @property + def autoPageBreaks(self): ... + @autoPageBreaks.setter + def autoPageBreaks(self, value) -> None: ... + @classmethod + def from_tree(cls, node): ... + +class PrintOptions(Serialisable): + tagname: str + horizontalCentered: Incomplete + verticalCentered: Incomplete + headings: Incomplete + gridLines: Incomplete + gridLinesSet: Incomplete + def __init__( + self, + horizontalCentered: Incomplete | None = ..., + verticalCentered: Incomplete | None = ..., + headings: Incomplete | None = ..., + gridLines: Incomplete | None = ..., + gridLinesSet: Incomplete | None = ..., + ) -> None: ... + def __bool__(self) -> bool: ... + +class PageMargins(Serialisable): + tagname: str + left: Incomplete + right: Incomplete + top: Incomplete + bottom: Incomplete + header: Incomplete + footer: Incomplete + def __init__( + self, left: float = ..., right: float = ..., top: int = ..., bottom: int = ..., header: float = ..., footer: float = ... + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/pagebreak.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/pagebreak.pyi new file mode 100644 index 00000000..c1e6420f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/pagebreak.pyi @@ -0,0 +1,40 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class Break(Serialisable): + tagname: str + id: Incomplete + min: Incomplete + max: Incomplete + man: Incomplete + pt: Incomplete + def __init__(self, id: int = ..., min: int = ..., max: int = ..., man: bool = ..., pt: Incomplete | None = ...) -> None: ... + +class RowBreak(Serialisable): + tagname: str + # Overwritten by properties below + # count: Integer + # manualBreakCount: Integer + brk: Incomplete + __elements__: Incomplete + __attrs__: Incomplete + def __init__(self, count: Incomplete | None = ..., manualBreakCount: Incomplete | None = ..., brk=...) -> None: ... + def __bool__(self) -> bool: ... + def __len__(self) -> int: ... + @property + def count(self): ... + @property + def manualBreakCount(self): ... + def append(self, brk: Incomplete | None = ...) -> None: ... + +PageBreak = RowBreak + +class ColBreak(RowBreak): + tagname: str + @property + def count(self): ... + @property + def manualBreakCount(self): ... + brk: Incomplete + __attrs__: Incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/picture.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/picture.pyi new file mode 100644 index 00000000..71b3f134 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/picture.pyi @@ -0,0 +1,4 @@ +from openpyxl.descriptors.serialisable import Serialisable + +class SheetBackgroundPicture(Serialisable): + tagname: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/properties.pyi new file mode 100644 index 00000000..37cbdac1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/properties.pyi @@ -0,0 +1,54 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class Outline(Serialisable): + tagname: str + applyStyles: Incomplete + summaryBelow: Incomplete + summaryRight: Incomplete + showOutlineSymbols: Incomplete + def __init__( + self, + applyStyles: Incomplete | None = ..., + summaryBelow: Incomplete | None = ..., + summaryRight: Incomplete | None = ..., + showOutlineSymbols: Incomplete | None = ..., + ) -> None: ... + +class PageSetupProperties(Serialisable): + tagname: str + autoPageBreaks: Incomplete + fitToPage: Incomplete + def __init__(self, autoPageBreaks: Incomplete | None = ..., fitToPage: Incomplete | None = ...) -> None: ... + +class WorksheetProperties(Serialisable): + tagname: str + codeName: Incomplete + enableFormatConditionsCalculation: Incomplete + filterMode: Incomplete + published: Incomplete + syncHorizontal: Incomplete + syncRef: Incomplete + syncVertical: Incomplete + transitionEvaluation: Incomplete + transitionEntry: Incomplete + tabColor: Incomplete + outlinePr: Incomplete + pageSetUpPr: Incomplete + __elements__: Incomplete + def __init__( + self, + codeName: Incomplete | None = ..., + enableFormatConditionsCalculation: Incomplete | None = ..., + filterMode: Incomplete | None = ..., + published: Incomplete | None = ..., + syncHorizontal: Incomplete | None = ..., + syncRef: Incomplete | None = ..., + syncVertical: Incomplete | None = ..., + transitionEvaluation: Incomplete | None = ..., + transitionEntry: Incomplete | None = ..., + tabColor: Incomplete | None = ..., + outlinePr: Incomplete | None = ..., + pageSetUpPr: Incomplete | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/protection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/protection.pyi new file mode 100644 index 00000000..8bd46afc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/protection.pyi @@ -0,0 +1,64 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class _Protected: + def set_password(self, value: str = ..., already_hashed: bool = ...) -> None: ... + @property + def password(self): ... + @password.setter + def password(self, value) -> None: ... + +class SheetProtection(Serialisable, _Protected): + tagname: str + sheet: Incomplete + enabled: Incomplete + objects: Incomplete + scenarios: Incomplete + formatCells: Incomplete + formatColumns: Incomplete + formatRows: Incomplete + insertColumns: Incomplete + insertRows: Incomplete + insertHyperlinks: Incomplete + deleteColumns: Incomplete + deleteRows: Incomplete + selectLockedCells: Incomplete + selectUnlockedCells: Incomplete + sort: Incomplete + autoFilter: Incomplete + pivotTables: Incomplete + saltValue: Incomplete + spinCount: Incomplete + algorithmName: Incomplete + hashValue: Incomplete + __attrs__: Incomplete + password: Incomplete + def __init__( + self, + sheet: bool = ..., + objects: bool = ..., + scenarios: bool = ..., + formatCells: bool = ..., + formatRows: bool = ..., + formatColumns: bool = ..., + insertColumns: bool = ..., + insertRows: bool = ..., + insertHyperlinks: bool = ..., + deleteColumns: bool = ..., + deleteRows: bool = ..., + selectLockedCells: bool = ..., + selectUnlockedCells: bool = ..., + sort: bool = ..., + autoFilter: bool = ..., + pivotTables: bool = ..., + password: Incomplete | None = ..., + algorithmName: Incomplete | None = ..., + saltValue: Incomplete | None = ..., + spinCount: Incomplete | None = ..., + hashValue: Incomplete | None = ..., + ) -> None: ... + def set_password(self, value: str = ..., already_hashed: bool = ...) -> None: ... + def enable(self) -> None: ... + def disable(self) -> None: ... + def __bool__(self) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/related.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/related.pyi new file mode 100644 index 00000000..89252ae6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/related.pyi @@ -0,0 +1,8 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class Related(Serialisable): # type: ignore[misc] + id: Incomplete + def __init__(self, id: Incomplete | None = ...) -> None: ... + def to_tree(self, tagname, idx: Incomplete | None = ...): ... # type: ignore[override] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/scenario.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/scenario.pyi new file mode 100644 index 00000000..2d287b95 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/scenario.pyi @@ -0,0 +1,55 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class InputCells(Serialisable): + tagname: str + r: Incomplete + deleted: Incomplete + undone: Incomplete + val: Incomplete + numFmtId: Incomplete + def __init__( + self, + r: Incomplete | None = ..., + deleted: bool = ..., + undone: bool = ..., + val: Incomplete | None = ..., + numFmtId: Incomplete | None = ..., + ) -> None: ... + +class Scenario(Serialisable): + tagname: str + inputCells: Incomplete + name: Incomplete + locked: Incomplete + hidden: Incomplete + user: Incomplete + comment: Incomplete + __elements__: Incomplete + __attrs__: Incomplete + def __init__( + self, + inputCells=..., + name: Incomplete | None = ..., + locked: bool = ..., + hidden: bool = ..., + count: Incomplete | None = ..., + user: Incomplete | None = ..., + comment: Incomplete | None = ..., + ) -> None: ... + @property + def count(self): ... + +class ScenarioList(Serialisable): + tagname: str + scenario: Incomplete + current: Incomplete + show: Incomplete + sqref: Incomplete + __elements__: Incomplete + def __init__( + self, scenario=..., current: Incomplete | None = ..., show: Incomplete | None = ..., sqref: Incomplete | None = ... + ) -> None: ... + def append(self, scenario) -> None: ... + def __bool__(self) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/smart_tag.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/smart_tag.pyi new file mode 100644 index 00000000..682d7791 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/smart_tag.pyi @@ -0,0 +1,31 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class CellSmartTagPr(Serialisable): + tagname: str + key: Incomplete + val: Incomplete + def __init__(self, key: Incomplete | None = ..., val: Incomplete | None = ...) -> None: ... + +class CellSmartTag(Serialisable): + tagname: str + cellSmartTagPr: Incomplete + type: Incomplete + deleted: Incomplete + xmlBased: Incomplete + __elements__: Incomplete + def __init__(self, cellSmartTagPr=..., type: Incomplete | None = ..., deleted: bool = ..., xmlBased: bool = ...) -> None: ... + +class CellSmartTags(Serialisable): + tagname: str + cellSmartTag: Incomplete + r: Incomplete + __elements__: Incomplete + def __init__(self, cellSmartTag=..., r: Incomplete | None = ...) -> None: ... + +class SmartTags(Serialisable): + tagname: str + cellSmartTags: Incomplete + __elements__: Incomplete + def __init__(self, cellSmartTags=...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/table.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/table.pyi new file mode 100644 index 00000000..c5eca03e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/table.pyi @@ -0,0 +1,177 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors import String +from openpyxl.descriptors.serialisable import Serialisable + +TABLESTYLES: Incomplete +PIVOTSTYLES: Incomplete + +class TableStyleInfo(Serialisable): + tagname: str + name: Incomplete + showFirstColumn: Incomplete + showLastColumn: Incomplete + showRowStripes: Incomplete + showColumnStripes: Incomplete + def __init__( + self, + name: Incomplete | None = ..., + showFirstColumn: Incomplete | None = ..., + showLastColumn: Incomplete | None = ..., + showRowStripes: Incomplete | None = ..., + showColumnStripes: Incomplete | None = ..., + ) -> None: ... + +class XMLColumnProps(Serialisable): + tagname: str + mapId: Incomplete + xpath: Incomplete + denormalized: Incomplete + xmlDataType: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + mapId: Incomplete | None = ..., + xpath: Incomplete | None = ..., + denormalized: Incomplete | None = ..., + xmlDataType: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + +class TableFormula(Serialisable): + tagname: str + array: Incomplete + attr_text: Incomplete + text: Incomplete + def __init__(self, array: Incomplete | None = ..., attr_text: Incomplete | None = ...) -> None: ... + +class TableColumn(Serialisable): + tagname: str + id: Incomplete + uniqueName: Incomplete + name: Incomplete + totalsRowFunction: Incomplete + totalsRowLabel: Incomplete + queryTableFieldId: Incomplete + headerRowDxfId: Incomplete + dataDxfId: Incomplete + totalsRowDxfId: Incomplete + headerRowCellStyle: Incomplete + dataCellStyle: Incomplete + totalsRowCellStyle: Incomplete + calculatedColumnFormula: Incomplete + totalsRowFormula: Incomplete + xmlColumnPr: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + id: Incomplete | None = ..., + uniqueName: Incomplete | None = ..., + name: Incomplete | None = ..., + totalsRowFunction: Incomplete | None = ..., + totalsRowLabel: Incomplete | None = ..., + queryTableFieldId: Incomplete | None = ..., + headerRowDxfId: Incomplete | None = ..., + dataDxfId: Incomplete | None = ..., + totalsRowDxfId: Incomplete | None = ..., + headerRowCellStyle: Incomplete | None = ..., + dataCellStyle: Incomplete | None = ..., + totalsRowCellStyle: Incomplete | None = ..., + calculatedColumnFormula: Incomplete | None = ..., + totalsRowFormula: Incomplete | None = ..., + xmlColumnPr: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + def __iter__(self): ... + @classmethod + def from_tree(cls, node): ... + +class TableNameDescriptor(String): + def __set__(self, instance, value) -> None: ... + +class Table(Serialisable): + mime_type: str + tagname: str + id: Incomplete + name: Incomplete + displayName: Incomplete + comment: Incomplete + ref: Incomplete + tableType: Incomplete + headerRowCount: Incomplete + insertRow: Incomplete + insertRowShift: Incomplete + totalsRowCount: Incomplete + totalsRowShown: Incomplete + published: Incomplete + headerRowDxfId: Incomplete + dataDxfId: Incomplete + totalsRowDxfId: Incomplete + headerRowBorderDxfId: Incomplete + tableBorderDxfId: Incomplete + totalsRowBorderDxfId: Incomplete + headerRowCellStyle: Incomplete + dataCellStyle: Incomplete + totalsRowCellStyle: Incomplete + connectionId: Incomplete + autoFilter: Incomplete + sortState: Incomplete + tableColumns: Incomplete + tableStyleInfo: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__( + self, + id: int = ..., + displayName: Incomplete | None = ..., + ref: Incomplete | None = ..., + name: Incomplete | None = ..., + comment: Incomplete | None = ..., + tableType: Incomplete | None = ..., + headerRowCount: int = ..., + insertRow: Incomplete | None = ..., + insertRowShift: Incomplete | None = ..., + totalsRowCount: Incomplete | None = ..., + totalsRowShown: Incomplete | None = ..., + published: Incomplete | None = ..., + headerRowDxfId: Incomplete | None = ..., + dataDxfId: Incomplete | None = ..., + totalsRowDxfId: Incomplete | None = ..., + headerRowBorderDxfId: Incomplete | None = ..., + tableBorderDxfId: Incomplete | None = ..., + totalsRowBorderDxfId: Incomplete | None = ..., + headerRowCellStyle: Incomplete | None = ..., + dataCellStyle: Incomplete | None = ..., + totalsRowCellStyle: Incomplete | None = ..., + connectionId: Incomplete | None = ..., + autoFilter: Incomplete | None = ..., + sortState: Incomplete | None = ..., + tableColumns=..., + tableStyleInfo: Incomplete | None = ..., + extLst: Incomplete | None = ..., + ) -> None: ... + def to_tree(self): ... + @property + def path(self): ... + @property + def column_names(self): ... + +class TablePartList(Serialisable): + tagname: str + # Overwritten by property below + # count: Integer + tablePart: Incomplete + __elements__: Incomplete + __attrs__: Incomplete + def __init__(self, count: Incomplete | None = ..., tablePart=...) -> None: ... + def append(self, part) -> None: ... + @property + def count(self): ... + def __bool__(self) -> bool: ... + +class TableList(dict[Incomplete, Incomplete]): + def add(self, table) -> None: ... + def get(self, name: Incomplete | None = ..., table_range: Incomplete | None = ...): ... + def items(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/views.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/views.pyi new file mode 100644 index 00000000..0abd81b9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/views.pyi @@ -0,0 +1,84 @@ +from _typeshed import Incomplete + +from openpyxl.descriptors.serialisable import Serialisable + +class Pane(Serialisable): # type: ignore[misc] + xSplit: Incomplete + ySplit: Incomplete + topLeftCell: Incomplete + activePane: Incomplete + state: Incomplete + def __init__( + self, + xSplit: Incomplete | None = ..., + ySplit: Incomplete | None = ..., + topLeftCell: Incomplete | None = ..., + activePane: str = ..., + state: str = ..., + ) -> None: ... + +class Selection(Serialisable): # type: ignore[misc] + pane: Incomplete + activeCell: Incomplete + activeCellId: Incomplete + sqref: Incomplete + def __init__( + self, pane: Incomplete | None = ..., activeCell: str = ..., activeCellId: Incomplete | None = ..., sqref: str = ... + ) -> None: ... + +class SheetView(Serialisable): + tagname: str + windowProtection: Incomplete + showFormulas: Incomplete + showGridLines: Incomplete + showRowColHeaders: Incomplete + showZeros: Incomplete + rightToLeft: Incomplete + tabSelected: Incomplete + showRuler: Incomplete + showOutlineSymbols: Incomplete + defaultGridColor: Incomplete + showWhiteSpace: Incomplete + view: Incomplete + topLeftCell: Incomplete + colorId: Incomplete + zoomScale: Incomplete + zoomScaleNormal: Incomplete + zoomScaleSheetLayoutView: Incomplete + zoomScalePageLayoutView: Incomplete + zoomToFit: Incomplete + workbookViewId: Incomplete + selection: Incomplete + pane: Incomplete + def __init__( + self, + windowProtection: Incomplete | None = ..., + showFormulas: Incomplete | None = ..., + showGridLines: Incomplete | None = ..., + showRowColHeaders: Incomplete | None = ..., + showZeros: Incomplete | None = ..., + rightToLeft: Incomplete | None = ..., + tabSelected: Incomplete | None = ..., + showRuler: Incomplete | None = ..., + showOutlineSymbols: Incomplete | None = ..., + defaultGridColor: Incomplete | None = ..., + showWhiteSpace: Incomplete | None = ..., + view: Incomplete | None = ..., + topLeftCell: Incomplete | None = ..., + colorId: Incomplete | None = ..., + zoomScale: Incomplete | None = ..., + zoomScaleNormal: Incomplete | None = ..., + zoomScaleSheetLayoutView: Incomplete | None = ..., + zoomScalePageLayoutView: Incomplete | None = ..., + zoomToFit: Incomplete | None = ..., + workbookViewId: int = ..., + selection: Incomplete | None = ..., + pane: Incomplete | None = ..., + ) -> None: ... + +class SheetViewList(Serialisable): + tagname: str + sheetView: Incomplete + extLst: Incomplete + __elements__: Incomplete + def __init__(self, sheetView: Incomplete | None = ..., extLst: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/worksheet.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/worksheet.pyi new file mode 100644 index 00000000..a5e49fe5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/worksheet/worksheet.pyi @@ -0,0 +1,199 @@ +from _typeshed import Incomplete +from collections.abc import Generator, Iterable, Iterator +from datetime import datetime +from typing import overload +from typing_extensions import Literal + +from openpyxl.cell.cell import Cell +from openpyxl.workbook.child import _WorkbookChild +from openpyxl.workbook.workbook import Workbook +from openpyxl.worksheet.cell_range import CellRange +from openpyxl.worksheet.datavalidation import DataValidation +from openpyxl.worksheet.pagebreak import ColBreak, RowBreak +from openpyxl.worksheet.table import Table, TableList +from openpyxl.worksheet.views import SheetView + +class Worksheet(_WorkbookChild): + mime_type: str + BREAK_NONE: int + BREAK_ROW: int + BREAK_COLUMN: int + SHEETSTATE_VISIBLE: str + SHEETSTATE_HIDDEN: str + SHEETSTATE_VERYHIDDEN: str + PAPERSIZE_LETTER: str + PAPERSIZE_LETTER_SMALL: str + PAPERSIZE_TABLOID: str + PAPERSIZE_LEDGER: str + PAPERSIZE_LEGAL: str + PAPERSIZE_STATEMENT: str + PAPERSIZE_EXECUTIVE: str + PAPERSIZE_A3: str + PAPERSIZE_A4: str + PAPERSIZE_A4_SMALL: str + PAPERSIZE_A5: str + ORIENTATION_PORTRAIT: str + ORIENTATION_LANDSCAPE: str + def __init__(self, parent: Workbook, title: str | None = ...) -> None: ... + @property + def sheet_view(self) -> SheetView: ... + @property + def selected_cell(self) -> Cell: ... + @property + def active_cell(self) -> Cell: ... + @property + def page_breaks(self) -> tuple[RowBreak, ColBreak]: ... + @property + def show_gridlines(self) -> bool: ... + @property + def show_summary_below(self) -> bool: ... + @property + def show_summary_right(self) -> bool: ... + @property + def freeze_panes(self) -> str | None: ... + @freeze_panes.setter + def freeze_panes(self, topLeftCell: Incomplete | None = ...) -> None: ... + def cell(self, row: int, column: int, value: str | None = ...) -> Cell: ... + def __getitem__(self, key: str | int | slice) -> Cell | tuple[Cell, ...]: ... + def __setitem__(self, key: str, value: str) -> None: ... + def __iter__(self) -> Iterator[Cell]: ... + def __delitem__(self, key: str) -> None: ... + @property + def min_row(self) -> int: ... + @property + def max_row(self) -> int: ... + @property + def min_column(self) -> int: ... + @property + def max_column(self) -> int: ... + def calculate_dimension(self) -> str: ... + @property + def dimensions(self) -> str: ... + @overload + def iter_rows( + self, min_row: int | None, max_row: int | None, min_col: int | None, max_col: int | None, values_only: Literal[True] + ) -> Generator[tuple[str | float | datetime | None, ...], None, None]: ... + @overload + def iter_rows( + self, + min_row: int | None = None, + max_row: int | None = None, + min_col: int | None = None, + max_col: int | None = None, + *, + values_only: Literal[True], + ) -> Generator[tuple[str | float | datetime | None, ...], None, None]: ... + @overload + def iter_rows( + self, + min_row: int | None = ..., + max_row: int | None = ..., + min_col: int | None = ..., + max_col: int | None = ..., + values_only: Literal[False] = False, + ) -> Generator[tuple[Cell, ...], None, None]: ... + @overload + def iter_rows( + self, min_row: int | None, max_row: int | None, min_col: int | None, max_col: int | None, values_only: bool + ) -> Generator[tuple[Cell | str | float | datetime | None, ...], None, None]: ... + @overload + def iter_rows( + self, + min_row: int | None = None, + max_row: int | None = None, + min_col: int | None = None, + max_col: int | None = None, + *, + values_only: bool, + ) -> Generator[tuple[Cell | str | float | datetime | None, ...], None, None]: ... + @property + def rows(self) -> Generator[Cell, None, None]: ... + @property + def values(self) -> Generator[str | float | datetime | None, None, None]: ... + @overload + def iter_cols( + self, min_col: int | None, max_col: int | None, min_row: int | None, max_row: int | None, values_only: Literal[True] + ) -> Generator[tuple[str | float | datetime | None, ...], None, None]: ... + @overload + def iter_cols( + self, + min_col: int | None = None, + max_col: int | None = None, + min_row: int | None = None, + max_row: int | None = None, + *, + values_only: Literal[True], + ) -> Generator[tuple[str | float | datetime | None, ...], None, None]: ... + @overload + def iter_cols( + self, + min_col: int | None = ..., + max_col: int | None = ..., + min_row: int | None = ..., + max_row: int | None = ..., + values_only: Literal[False] = False, + ) -> Generator[tuple[Cell, ...], None, None]: ... + @overload + def iter_cols( + self, min_col: int | None, max_col: int | None, min_row: int | None, max_row: int | None, values_only: bool + ) -> Generator[tuple[Cell | str | float | datetime | None, ...], None, None]: ... + @overload + def iter_cols( + self, + min_col: int | None = None, + max_col: int | None = None, + min_row: int | None = None, + max_row: int | None = None, + *, + values_only: bool, + ) -> Generator[tuple[Cell | str | float | datetime | None, ...], None, None]: ... + @property + def columns(self) -> Generator[Cell, None, None]: ... + def set_printer_settings( + self, paper_size: int | None, orientation: None | Literal["default", "portrait", "landscape"] + ) -> None: ... + def add_data_validation(self, data_validation: DataValidation) -> None: ... + def add_chart(self, chart, anchor: Incomplete | None = ...) -> None: ... + def add_image(self, img, anchor: Incomplete | None = ...) -> None: ... + def add_table(self, table: Table) -> None: ... + @property + def tables(self) -> TableList: ... + def add_pivot(self, pivot) -> None: ... + def merge_cells( + self, + range_string: str | None = ..., + start_row: int | None = ..., + start_column: int | None = ..., + end_row: int | None = ..., + end_column: int | None = ..., + ) -> None: ... + @property + def merged_cell_ranges(self) -> list[CellRange]: ... + def unmerge_cells( + self, + range_string: str | None = ..., + start_row: int | None = ..., + start_column: int | None = ..., + end_row: int | None = ..., + end_column: int | None = ..., + ) -> None: ... + def append(self, iterable: Iterable[Incomplete]) -> None: ... + def insert_rows(self, idx: int, amount: int = ...) -> None: ... + def insert_cols(self, idx: int, amount: int = ...) -> None: ... + def delete_rows(self, idx: int, amount: int = ...) -> None: ... + def delete_cols(self, idx: int, amount: int = ...) -> None: ... + def move_range(self, cell_range: CellRange | str, rows: int = ..., cols: int = ..., translate: bool = ...) -> None: ... + @property + def print_title_rows(self) -> str | None: ... + @print_title_rows.setter + def print_title_rows(self, rows: str | None) -> None: ... + @property + def print_title_cols(self) -> str | None: ... + @print_title_cols.setter + def print_title_cols(self, cols: str | None) -> None: ... + @property + def print_titles(self) -> str | None: ... + @property + def print_area(self) -> list[str]: ... + @print_area.setter + def print_area(self, value: str | Iterable[str]) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/writer/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/writer/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/writer/excel.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/writer/excel.pyi new file mode 100644 index 00000000..3864e2b7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/writer/excel.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete + +class ExcelWriter: + workbook: Incomplete + manifest: Incomplete + vba_modified: Incomplete + def __init__(self, workbook, archive) -> None: ... + def write_data(self) -> None: ... + def write_worksheet(self, ws) -> None: ... + def save(self) -> None: ... + +def save_workbook(workbook, filename): ... +def save_virtual_workbook(workbook): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/writer/theme.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/writer/theme.pyi new file mode 100644 index 00000000..079c7d65 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/writer/theme.pyi @@ -0,0 +1,3 @@ +theme_xml: str + +def write_theme(): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/xml/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/xml/__init__.pyi new file mode 100644 index 00000000..9c467008 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/xml/__init__.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete + +def lxml_available(): ... +def lxml_env_set(): ... + +LXML: Incomplete + +def defusedxml_available(): ... +def defusedxml_env_set(): ... + +DEFUSEDXML: Incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/xml/constants.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/xml/constants.pyi new file mode 100644 index 00000000..63217f41 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/xml/constants.pyi @@ -0,0 +1,74 @@ +from _typeshed import Incomplete + +MIN_ROW: int +MIN_COLUMN: int +MAX_COLUMN: int +MAX_ROW: int +PACKAGE_PROPS: str +PACKAGE_XL: str +PACKAGE_RELS: str +PACKAGE_THEME: Incomplete +PACKAGE_WORKSHEETS: Incomplete +PACKAGE_CHARTSHEETS: Incomplete +PACKAGE_DRAWINGS: Incomplete +PACKAGE_CHARTS: Incomplete +PACKAGE_IMAGES: Incomplete +PACKAGE_WORKSHEET_RELS: Incomplete +PACKAGE_CHARTSHEETS_RELS: Incomplete +PACKAGE_PIVOT_TABLE: Incomplete +PACKAGE_PIVOT_CACHE: Incomplete +ARC_CONTENT_TYPES: str +ARC_ROOT_RELS: Incomplete +ARC_WORKBOOK_RELS: Incomplete +ARC_CORE: Incomplete +ARC_APP: Incomplete +ARC_WORKBOOK: Incomplete +ARC_STYLE: Incomplete +ARC_THEME: Incomplete +ARC_SHARED_STRINGS: Incomplete +ARC_CUSTOM_UI: str +XML_NS: str +DCORE_NS: str +DCTERMS_NS: str +DCTERMS_PREFIX: str +DOC_NS: str +REL_NS: Incomplete +COMMENTS_NS: Incomplete +IMAGE_NS: Incomplete +VML_NS: Incomplete +VTYPES_NS: Incomplete +XPROPS_NS: Incomplete +EXTERNAL_LINK_NS: Incomplete +PKG_NS: str +PKG_REL_NS: Incomplete +COREPROPS_NS: Incomplete +CONTYPES_NS: Incomplete +XSI_NS: str +SHEET_MAIN_NS: str +CHART_NS: str +DRAWING_NS: str +SHEET_DRAWING_NS: str +CHART_DRAWING_NS: str +CUSTOMUI_NS: str +NAMESPACES: Incomplete +WORKBOOK_MACRO: str +WORKBOOK: str +SPREADSHEET: str +SHARED_STRINGS: Incomplete +EXTERNAL_LINK: Incomplete +WORKSHEET_TYPE: Incomplete +COMMENTS_TYPE: Incomplete +STYLES_TYPE: Incomplete +CHARTSHEET_TYPE: Incomplete +DRAWING_TYPE: str +CHART_TYPE: str +CHARTSHAPE_TYPE: str +THEME_TYPE: str +XLTM: Incomplete +XLSM: Incomplete +XLTX: Incomplete +XLSX: Incomplete +EXT_TYPES: Incomplete +CTRL: str +ACTIVEX: str +VBA: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/xml/functions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/xml/functions.pyi new file mode 100644 index 00000000..2a6bab56 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/openpyxl/openpyxl/xml/functions.pyi @@ -0,0 +1,6 @@ +from _typeshed import Incomplete + +NS_REGEX: Incomplete + +def localname(node): ... +def whitespace(node) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..7c96a340 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/@tests/stubtest_allowlist.txt @@ -0,0 +1,4 @@ +opentracing.harness.api_check +opentracing.harness.scope_check +opentracing.scope_managers.gevent +opentracing.scope_managers.tornado diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/METADATA.toml new file mode 100644 index 00000000..d00ec051 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/METADATA.toml @@ -0,0 +1,4 @@ +version = "2.4.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/__init__.pyi new file mode 100644 index 00000000..69d6e6b7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/__init__.pyi @@ -0,0 +1,24 @@ +from .propagation import ( + Format as Format, + InvalidCarrierException as InvalidCarrierException, + SpanContextCorruptedException as SpanContextCorruptedException, + UnsupportedFormatException as UnsupportedFormatException, +) +from .scope import Scope as Scope +from .scope_manager import ScopeManager as ScopeManager +from .span import Span as Span, SpanContext as SpanContext +from .tracer import ( + Reference as Reference, + ReferenceType as ReferenceType, + Tracer as Tracer, + child_of as child_of, + follows_from as follows_from, + start_child_span as start_child_span, +) + +tracer: Tracer +is_tracer_registered: bool + +def global_tracer() -> Tracer: ... +def set_global_tracer(value: Tracer) -> None: ... +def is_global_tracer_registered() -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/ext/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/ext/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/ext/tags.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/ext/tags.pyi new file mode 100644 index 00000000..08687e50 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/ext/tags.pyi @@ -0,0 +1 @@ +from ..tags import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/harness/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/harness/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/harness/api_check.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/harness/api_check.pyi new file mode 100644 index 00000000..dcc97348 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/harness/api_check.pyi @@ -0,0 +1,34 @@ +from opentracing.span import Span + +from ..tracer import Tracer + +class APICompatibilityCheckMixin: + def tracer(self) -> Tracer: ... + def check_baggage_values(self) -> bool: ... + def check_scope_manager(self) -> bool: ... + def is_parent(self, parent: Span, span: Span) -> bool: ... + def test_active_span(self) -> None: ... + def test_start_active_span(self) -> None: ... + def test_start_active_span_parent(self) -> None: ... + def test_start_active_span_ignore_active_span(self) -> None: ... + def test_start_active_span_not_finish_on_close(self) -> None: ... + def test_start_active_span_finish_on_close(self) -> None: ... + def test_start_active_span_default_finish_on_close(self) -> None: ... + def test_start_span(self) -> None: ... + def test_start_span_propagation(self) -> None: ... + def test_start_span_propagation_ignore_active_span(self) -> None: ... + def test_start_span_with_parent(self) -> None: ... + def test_start_child_span(self) -> None: ... + def test_set_operation_name(self) -> None: ... + def test_span_as_context_manager(self) -> None: ... + def test_span_tag_value_types(self) -> None: ... + def test_span_tags_with_chaining(self) -> None: ... + def test_span_logs(self) -> None: ... + def test_span_baggage(self) -> None: ... + def test_context_baggage(self) -> None: ... + def test_text_propagation(self) -> None: ... + def test_binary_propagation(self) -> None: ... + def test_mandatory_formats(self) -> None: ... + def test_unknown_format(self) -> None: ... + def test_tracer_start_active_span_scope(self) -> None: ... + def test_tracer_start_span_scope(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/harness/scope_check.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/harness/scope_check.pyi new file mode 100644 index 00000000..10b5ce11 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/harness/scope_check.pyi @@ -0,0 +1,15 @@ +from collections.abc import Callable + +from ..scope_manager import ScopeManager + +class ScopeCompatibilityCheckMixin: + def scope_manager(self) -> ScopeManager: ... + def run_test(self, test_fn: Callable[[], object]) -> None: ... + def test_missing_active_external(self) -> None: ... + def test_missing_active(self) -> None: ... + def test_activate(self) -> None: ... + def test_activate_external(self) -> None: ... + def test_activate_finish_on_close(self) -> None: ... + def test_activate_nested(self) -> None: ... + def test_activate_finish_on_close_nested(self) -> None: ... + def test_close_wrong_order(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/logs.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/logs.pyi new file mode 100644 index 00000000..15cda689 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/logs.pyi @@ -0,0 +1,5 @@ +ERROR_KIND: str +ERROR_OBJECT: str +EVENT: str +MESSAGE: str +STACK: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/mocktracer/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/mocktracer/__init__.pyi new file mode 100644 index 00000000..85fa2ac9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/mocktracer/__init__.pyi @@ -0,0 +1,2 @@ +from .propagator import Propagator as Propagator +from .tracer import MockTracer as MockTracer diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/mocktracer/binary_propagator.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/mocktracer/binary_propagator.pyi new file mode 100644 index 00000000..33f94f4b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/mocktracer/binary_propagator.pyi @@ -0,0 +1,8 @@ +from typing import Any + +from .context import SpanContext +from .propagator import Propagator + +class BinaryPropagator(Propagator): + def inject(self, span_context: SpanContext, carrier: dict[Any, Any]) -> None: ... + def extract(self, carrier: dict[Any, Any]) -> SpanContext: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/mocktracer/context.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/mocktracer/context.pyi new file mode 100644 index 00000000..3264dd82 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/mocktracer/context.pyi @@ -0,0 +1,11 @@ +from typing_extensions import Self + +import opentracing + +class SpanContext(opentracing.SpanContext): + trace_id: int | None + span_id: int | None + def __init__(self, trace_id: int | None = ..., span_id: int | None = ..., baggage: dict[str, str] | None = ...) -> None: ... + @property + def baggage(self) -> dict[str, str]: ... + def with_baggage_item(self, key: str, value: str) -> Self: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/mocktracer/propagator.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/mocktracer/propagator.pyi new file mode 100644 index 00000000..0a2ffb44 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/mocktracer/propagator.pyi @@ -0,0 +1,7 @@ +from typing import Any + +from .context import SpanContext + +class Propagator: + def inject(self, span_context: SpanContext, carrier: dict[Any, Any]) -> None: ... + def extract(self, carrier: dict[Any, Any]) -> SpanContext: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/mocktracer/span.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/mocktracer/span.pyi new file mode 100644 index 00000000..9d310d5b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/mocktracer/span.pyi @@ -0,0 +1,38 @@ +from typing import Any +from typing_extensions import Self + +from ..span import Span +from ..tracer import Tracer +from .context import SpanContext +from .tracer import MockTracer + +class MockSpan(Span): + operation_name: str | None + start_time: Any + parent_id: int | None + tags: dict[str, Any] + finish_time: float + finished: bool + logs: list[LogData] + def __init__( + self, + tracer: Tracer, + operation_name: str | None = ..., + context: SpanContext | None = ..., + parent_id: int | None = ..., + tags: dict[str, Any] | None = ..., + start_time: float | None = ..., + ) -> None: ... + @property + def tracer(self) -> MockTracer: ... + @property + def context(self) -> SpanContext: ... + def set_operation_name(self, operation_name: str) -> Self: ... + def set_tag(self, key: str, value: str | bool | float) -> Self: ... + def log_kv(self, key_values: dict[str, Any], timestamp: float | None = ...) -> Self: ... + def set_baggage_item(self, key: str, value: str) -> Self: ... + +class LogData: + key_values: dict[str, Any] + timestamp: float | None + def __init__(self, key_values: dict[str, Any], timestamp: float | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/mocktracer/text_propagator.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/mocktracer/text_propagator.pyi new file mode 100644 index 00000000..d828fe2f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/mocktracer/text_propagator.pyi @@ -0,0 +1,14 @@ +from typing import Any + +from .context import SpanContext +from .propagator import Propagator + +prefix_tracer_state: str +prefix_baggage: str +field_name_trace_id: str +field_name_span_id: str +field_count: int + +class TextPropagator(Propagator): + def inject(self, span_context: SpanContext, carrier: dict[Any, Any]) -> None: ... + def extract(self, carrier: dict[Any, Any]) -> SpanContext: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/mocktracer/tracer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/mocktracer/tracer.pyi new file mode 100644 index 00000000..9336c1a0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/mocktracer/tracer.pyi @@ -0,0 +1,26 @@ +from typing import Any + +from ..scope_manager import ScopeManager +from ..span import Span +from ..tracer import Reference, Tracer +from .context import SpanContext +from .propagator import Propagator +from .span import MockSpan + +class MockTracer(Tracer): + def __init__(self, scope_manager: ScopeManager | None = ...) -> None: ... + @property + def active_span(self) -> MockSpan | None: ... + def register_propagator(self, format: str, propagator: Propagator) -> None: ... + def finished_spans(self) -> list[MockSpan]: ... + def reset(self) -> None: ... + def start_span( # type: ignore[override] + self, + operation_name: str | None = ..., + child_of: Span | SpanContext | None = ..., + references: list[Reference] | None = ..., + tags: dict[Any, Any] | None = ..., + start_time: float | None = ..., + ignore_active_span: bool = ..., + ) -> MockSpan: ... + def extract(self, format: str, carrier: dict[Any, Any]) -> SpanContext: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/propagation.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/propagation.pyi new file mode 100644 index 00000000..81307c63 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/propagation.pyi @@ -0,0 +1,8 @@ +class UnsupportedFormatException(Exception): ... +class InvalidCarrierException(Exception): ... +class SpanContextCorruptedException(Exception): ... + +class Format: + BINARY: str + TEXT_MAP: str + HTTP_HEADERS: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope.pyi new file mode 100644 index 00000000..e312df14 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope.pyi @@ -0,0 +1,17 @@ +from types import TracebackType +from typing_extensions import Self + +from .scope_manager import ScopeManager +from .span import Span + +class Scope: + def __init__(self, manager: ScopeManager, span: Span) -> None: ... + @property + def span(self) -> Span: ... + @property + def manager(self) -> ScopeManager: ... + def close(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope_manager.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope_manager.pyi new file mode 100644 index 00000000..cd074754 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope_manager.pyi @@ -0,0 +1,8 @@ +from .scope import Scope +from .span import Span + +class ScopeManager: + def __init__(self) -> None: ... + def activate(self, span: Span, finish_on_close: bool) -> Scope: ... + @property + def active(self) -> Scope | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope_managers/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope_managers/__init__.pyi new file mode 100644 index 00000000..2b0f720c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope_managers/__init__.pyi @@ -0,0 +1,9 @@ +from ..scope import Scope +from ..scope_manager import ScopeManager +from ..span import Span + +class ThreadLocalScopeManager(ScopeManager): + def __init__(self) -> None: ... + def activate(self, span: Span, finish_on_close: bool) -> Scope: ... + @property + def active(self) -> Scope: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope_managers/asyncio.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope_managers/asyncio.pyi new file mode 100644 index 00000000..4b96d788 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope_managers/asyncio.pyi @@ -0,0 +1,8 @@ +from ..scope import Scope +from ..scope_managers import ThreadLocalScopeManager +from ..span import Span + +class AsyncioScopeManager(ThreadLocalScopeManager): + def activate(self, span: Span, finish_on_close: bool) -> Scope: ... + @property + def active(self) -> Scope: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope_managers/constants.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope_managers/constants.pyi new file mode 100644 index 00000000..0a791982 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope_managers/constants.pyi @@ -0,0 +1 @@ +ACTIVE_ATTR: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope_managers/contextvars.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope_managers/contextvars.pyi new file mode 100644 index 00000000..990045ee --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope_managers/contextvars.pyi @@ -0,0 +1,10 @@ +from ..scope import Scope +from ..scope_manager import ScopeManager +from ..span import Span + +class ContextVarsScopeManager(ScopeManager): + def activate(self, span: Span, finish_on_close: bool) -> Scope: ... + @property + def active(self) -> Scope: ... + +def no_parent_scope() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope_managers/gevent.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope_managers/gevent.pyi new file mode 100644 index 00000000..6b835cd9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope_managers/gevent.pyi @@ -0,0 +1,8 @@ +from ..scope import Scope +from ..scope_manager import ScopeManager +from ..span import Span + +class GeventScopeManager(ScopeManager): + def activate(self, span: Span, finish_on_close: bool) -> Scope: ... + @property + def active(self) -> Scope: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope_managers/tornado.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope_managers/tornado.pyi new file mode 100644 index 00000000..59b1cab9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/scope_managers/tornado.pyi @@ -0,0 +1,16 @@ +from typing import Any + +from ..scope import Scope +from ..scope_managers import ThreadLocalScopeManager +from ..span import Span + +class TornadoScopeManager(ThreadLocalScopeManager): + def activate(self, span: Span, finish_on_close: bool) -> Scope: ... + @property + def active(self) -> Scope: ... + +class ThreadSafeStackContext: + contexts: Any + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + +def tracer_stack_context() -> ThreadSafeStackContext: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/span.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/span.pyi new file mode 100644 index 00000000..74a6dc5d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/span.pyi @@ -0,0 +1,30 @@ +from _typeshed import Incomplete +from types import TracebackType +from typing import Any +from typing_extensions import Self + +from .tracer import Tracer + +class SpanContext: + EMPTY_BAGGAGE: dict[str, str] + @property + def baggage(self) -> dict[str, str]: ... + +class Span: + def __init__(self, tracer: Tracer, context: SpanContext) -> None: ... + @property + def context(self) -> SpanContext: ... + @property + def tracer(self) -> Tracer: ... + def set_operation_name(self, operation_name: str) -> Self: ... + def finish(self, finish_time: float | None = ...) -> None: ... + def set_tag(self, key: str, value: str | bool | float) -> Self: ... + def log_kv(self, key_values: dict[str, Any], timestamp: float | None = ...) -> Self: ... + def set_baggage_item(self, key: str, value: str) -> Self: ... + def get_baggage_item(self, key: str) -> str | None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def log_event(self, event: Any, payload: Incomplete | None = ...) -> Self: ... + def log(self, **kwargs: Any) -> Self: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/tags.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/tags.pyi new file mode 100644 index 00000000..c48c210b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/tags.pyi @@ -0,0 +1,23 @@ +SPAN_KIND: str +SPAN_KIND_RPC_CLIENT: str +SPAN_KIND_RPC_SERVER: str +SPAN_KIND_CONSUMER: str +SPAN_KIND_PRODUCER: str +SERVICE: str +ERROR: str +COMPONENT: str +SAMPLING_PRIORITY: str +PEER_SERVICE: str +PEER_HOSTNAME: str +PEER_ADDRESS: str +PEER_HOST_IPV4: str +PEER_HOST_IPV6: str +PEER_PORT: str +HTTP_URL: str +HTTP_METHOD: str +HTTP_STATUS_CODE: str +DATABASE_INSTANCE: str +DATABASE_STATEMENT: str +DATABASE_TYPE: str +DATABASE_USER: str +MESSAGE_BUS_DESTINATION: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/tracer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/tracer.pyi new file mode 100644 index 00000000..6dcc47cd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/opentracing/opentracing/tracer.pyi @@ -0,0 +1,47 @@ +from typing import Any, NamedTuple + +from .scope import Scope +from .scope_manager import ScopeManager +from .span import Span, SpanContext + +class Tracer: + def __init__(self, scope_manager: ScopeManager | None = ...) -> None: ... + @property + def scope_manager(self) -> ScopeManager: ... + @property + def active_span(self) -> Span | None: ... + def start_active_span( + self, + operation_name: str, + child_of: Span | SpanContext | None = ..., + references: list[Reference] | None = ..., + tags: dict[Any, Any] | None = ..., + start_time: float | None = ..., + ignore_active_span: bool = ..., + finish_on_close: bool = ..., + ) -> Scope: ... + def start_span( + self, + operation_name: str | None = ..., + child_of: Span | SpanContext | None = ..., + references: list[Reference] | None = ..., + tags: dict[Any, Any] | None = ..., + start_time: float | None = ..., + ignore_active_span: bool = ..., + ) -> Span: ... + def inject(self, span_context: SpanContext, format: str, carrier: dict[Any, Any]) -> None: ... + def extract(self, format: str, carrier: dict[Any, Any]) -> SpanContext: ... + +class ReferenceType: + CHILD_OF: str + FOLLOWS_FROM: str + +class Reference(NamedTuple): + type: str + referenced_context: SpanContext | None + +def child_of(referenced_context: SpanContext | None = ...) -> Reference: ... +def follows_from(referenced_context: SpanContext | None = ...) -> Reference: ... +def start_child_span( + parent_span: Span, operation_name: str, tags: dict[Any, Any] | None = ..., start_time: float | None = ... +) -> Span: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/METADATA.toml new file mode 100644 index 00000000..6cf9fae4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/METADATA.toml @@ -0,0 +1 @@ +version = "1.6.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/__init__.pyi new file mode 100644 index 00000000..ce4f3fba --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/__init__.pyi @@ -0,0 +1 @@ +class MQTTException(Exception): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/client.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/client.pyi new file mode 100644 index 00000000..2cbe2380 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/client.pyi @@ -0,0 +1,321 @@ +import logging +import socket as _socket +import ssl as _ssl +import time +import types +from _typeshed import Incomplete, Unused +from collections.abc import Callable +from typing import Any, TypeVar +from typing_extensions import TypeAlias + +from .matcher import MQTTMatcher as MQTTMatcher +from .properties import Properties as Properties +from .reasoncodes import ReasonCodes as ReasonCodes +from .subscribeoptions import SubscribeOptions as SubscribeOptions + +ssl: types.ModuleType | None +socks: types.ModuleType | None +time_func = time.monotonic +HAVE_DNS: bool +EAGAIN: int | Incomplete +MQTTv31: int +MQTTv311: int +MQTTv5: int +unicode = str +basestring = str +CONNECT: int +CONNACK: int +PUBLISH: int +PUBACK: int +PUBREC: int +PUBREL: int +PUBCOMP: int +SUBSCRIBE: int +SUBACK: int +UNSUBSCRIBE: int +UNSUBACK: int +PINGREQ: int +PINGRESP: int +DISCONNECT: int +AUTH: int +MQTT_LOG_INFO: int +MQTT_LOG_NOTICE: int +MQTT_LOG_WARNING: int +MQTT_LOG_ERR: int +MQTT_LOG_DEBUG: int +LOGGING_LEVEL: dict[int, int] +CONNACK_ACCEPTED: int +CONNACK_REFUSED_PROTOCOL_VERSION: int +CONNACK_REFUSED_IDENTIFIER_REJECTED: int +CONNACK_REFUSED_SERVER_UNAVAILABLE: int +CONNACK_REFUSED_BAD_USERNAME_PASSWORD: int +CONNACK_REFUSED_NOT_AUTHORIZED: int +mqtt_cs_new: int +mqtt_cs_connected: int +mqtt_cs_disconnecting: int +mqtt_cs_connect_async: int +mqtt_ms_invalid: int +mqtt_ms_publish: int +mqtt_ms_wait_for_puback: int +mqtt_ms_wait_for_pubrec: int +mqtt_ms_resend_pubrel: int +mqtt_ms_wait_for_pubrel: int +mqtt_ms_resend_pubcomp: int +mqtt_ms_wait_for_pubcomp: int +mqtt_ms_send_pubrec: int +mqtt_ms_queued: int +MQTT_ERR_AGAIN: int +MQTT_ERR_SUCCESS: int +MQTT_ERR_NOMEM: int +MQTT_ERR_PROTOCOL: int +MQTT_ERR_INVAL: int +MQTT_ERR_NO_CONN: int +MQTT_ERR_CONN_REFUSED: int +MQTT_ERR_NOT_FOUND: int +MQTT_ERR_CONN_LOST: int +MQTT_ERR_TLS: int +MQTT_ERR_PAYLOAD_SIZE: int +MQTT_ERR_NOT_SUPPORTED: int +MQTT_ERR_AUTH: int +MQTT_ERR_ACL_DENIED: int +MQTT_ERR_UNKNOWN: int +MQTT_ERR_ERRNO: int +MQTT_ERR_QUEUE_SIZE: int +MQTT_ERR_KEEPALIVE: int +MQTT_CLIENT: int +MQTT_BRIDGE: int +MQTT_CLEAN_START_FIRST_ONLY: int +sockpair_data: bytes +_UserData: TypeAlias = Any +_Socket: TypeAlias = _socket.socket | _ssl.SSLSocket | Incomplete +_Payload: TypeAlias = str | bytes | bytearray | float +_ExtraHeader: TypeAlias = dict[str, str] | Callable[[dict[str, str]], dict[str, str]] +_OnLog: TypeAlias = Callable[[Client, _UserData, int, str], object] +_OnConnect: TypeAlias = Callable[[Client, _UserData, dict[str, int], int], object] +_OnConnectV5: TypeAlias = Callable[[Client, _UserData, dict[str, int], ReasonCodes, Properties | None], object] +_TOnConnect = TypeVar("_TOnConnect", _OnConnect, _OnConnectV5) +_OnConnectFail: TypeAlias = Callable[[Client, _UserData], object] +_OnSubscribe: TypeAlias = Callable[[Client, _UserData, int, tuple[int]], object] +_OnSubscribeV5: TypeAlias = Callable[[Client, _UserData, int, list[ReasonCodes], Properties], object] +_TOnSubscribe = TypeVar("_TOnSubscribe", _OnSubscribe, _OnSubscribeV5) +_OnMessage: TypeAlias = Callable[[Client, _UserData, MQTTMessage], object] +_OnPublish: TypeAlias = Callable[[Client, _UserData, int], object] +_OnUnsubscribe: TypeAlias = Callable[[Client, _UserData, int], object] +_OnUnsubscribeV5: TypeAlias = Callable[[Client, _UserData, int, Properties, list[ReasonCodes] | ReasonCodes], object] +_TOnUnsubscribe = TypeVar("_TOnUnsubscribe", _OnUnsubscribe, _OnUnsubscribeV5) +_OnDisconnect: TypeAlias = Callable[[Client, _UserData, int], object] +_OnDisconnectV5: TypeAlias = Callable[[Client, _UserData, ReasonCodes | None, Properties | None], object] +_TOnDisconnect = TypeVar("_TOnDisconnect", _OnDisconnect, _OnDisconnectV5) +_OnSocket: TypeAlias = Callable[[Client, _UserData, _Socket | WebsocketWrapper | None], object] + +class WebsocketConnectionError(ValueError): ... + +def error_string(mqtt_errno: int) -> str: ... +def connack_string(connack_code: int) -> str: ... +def base62(num: int, base: str = ..., padding: int = ...) -> str: ... +def topic_matches_sub(sub: str, topic: str) -> bool: ... + +class MQTTMessageInfo: + mid: int + rc: int + def __init__(self, mid: int) -> None: ... + def __iter__(self) -> MQTTMessageInfo: ... + def __next__(self) -> int: ... + def next(self) -> int: ... + def __getitem__(self, index: int) -> int: ... + def wait_for_publish(self, timeout: float | None = ...) -> None: ... + def is_published(self) -> bool: ... + +class MQTTMessage: + timestamp: int + state: int + dup: bool + mid: int + payload: bytes | bytearray + qos: int + retain: bool + info: MQTTMessageInfo + properties: Properties | None + def __init__(self, mid: int = ..., topic: bytes | bytearray = ...) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + @property + def topic(self) -> str: ... + @topic.setter + def topic(self, value: bytes | bytearray) -> None: ... + +class Client: + suppress_exceptions: bool + def __init__( + self, + client_id: str | None = ..., + clean_session: bool | None = ..., + userdata: _UserData | None = ..., + protocol: int = ..., + transport: str = ..., + reconnect_on_failure: bool = ..., + ) -> None: ... + def __del__(self) -> None: ... + def reinitialise(self, client_id: str = ..., clean_session: bool = ..., userdata: _UserData | None = ...) -> None: ... + def ws_set_options(self, path: str = ..., headers: _ExtraHeader | None = ...) -> None: ... + def tls_set_context(self, context: _ssl.SSLContext | None = ...) -> None: ... + def tls_set( + self, + ca_certs: str | None = ..., + certfile: str | None = ..., + keyfile: str | None = ..., + cert_reqs: _ssl.VerifyMode | None = ..., + tls_version: _ssl._SSLMethod | None = ..., + ciphers: str | None = ..., + keyfile_password: _ssl._PasswordType | None = ..., + ) -> None: ... + def tls_insecure_set(self, value: bool) -> None: ... + def proxy_set(self, **proxy_args: Any) -> None: ... + def enable_logger(self, logger: logging.Logger | None = ...) -> None: ... + def disable_logger(self) -> None: ... + def connect( + self, + host: str, + port: int = ..., + keepalive: int = ..., + bind_address: str = ..., + bind_port: int = ..., + clean_start: int = ..., + properties: Properties | None = ..., + ) -> int: ... + def connect_srv( + self, + domain: str | None = ..., + keepalive: int = ..., + bind_address: str = ..., + clean_start: int = ..., + properties: Properties | None = ..., + ) -> int: ... + def connect_async( + self, + host: str, + port: int = ..., + keepalive: int = ..., + bind_address: str = ..., + bind_port: int = ..., + clean_start: int = ..., + properties: Properties | None = ..., + ) -> None: ... + def reconnect_delay_set(self, min_delay: int = ..., max_delay: int = ...) -> None: ... + def reconnect(self) -> int: ... + def loop(self, timeout: float = ..., max_packets: int = ...) -> int: ... + def publish( + self, topic: str, payload: _Payload | None = ..., qos: int = ..., retain: bool = ..., properties: Properties | None = ... + ) -> MQTTMessageInfo: ... + def username_pw_set(self, username: str, password: str | bytes | bytearray | None = ...) -> None: ... + def enable_bridge_mode(self) -> None: ... + def is_connected(self) -> bool: ... + def disconnect(self, reasoncode: ReasonCodes | None = ..., properties: Properties | None = ...) -> int: ... + def subscribe( + self, + topic: str | tuple[str, SubscribeOptions] | list[tuple[str, SubscribeOptions]] | list[tuple[str, int]], + qos: int = ..., + options: SubscribeOptions | None = ..., + properties: Properties | None = ..., + ) -> tuple[int, int]: ... + def unsubscribe(self, topic: str | list[str], properties: Properties | None = ...) -> tuple[int, int]: ... + def loop_read(self, max_packets: int = ...) -> int: ... + def loop_write(self, max_packets: int = ...) -> int: ... + def want_write(self) -> bool: ... + def loop_misc(self) -> int: ... + def max_inflight_messages_set(self, inflight: int) -> None: ... + def max_queued_messages_set(self, queue_size: int) -> Client: ... + def message_retry_set(self, retry: Unused) -> None: ... + def user_data_set(self, userdata: _UserData) -> None: ... + def will_set( + self, topic: str, payload: _Payload | None = ..., qos: int = ..., retain: bool = ..., properties: Properties | None = ... + ) -> None: ... + def will_clear(self) -> None: ... + def socket(self) -> _Socket | WebsocketWrapper: ... + def loop_forever(self, timeout: float = ..., max_packets: int = ..., retry_first_connection: bool = ...) -> int: ... + def loop_start(self) -> int | None: ... + def loop_stop(self, force: bool = ...) -> int | None: ... + @property + def on_log(self) -> _OnLog | None: ... + @on_log.setter + def on_log(self, func: _OnLog | None) -> None: ... + def log_callback(self) -> Callable[[_OnLog], _OnLog]: ... + @property + def on_connect(self) -> _OnConnect | _OnConnectV5 | None: ... + @on_connect.setter + def on_connect(self, func: _OnConnect | _OnConnectV5 | None) -> None: ... + def connect_callback(self) -> Callable[[_TOnConnect], _TOnConnect]: ... + @property + def on_connect_fail(self) -> _OnConnectFail | None: ... + @on_connect_fail.setter + def on_connect_fail(self, func: _OnConnectFail | None) -> None: ... + def connect_fail_callback(self) -> Callable[[_OnConnectFail], _OnConnectFail]: ... + @property + def on_subscribe(self) -> _OnSubscribe | _OnSubscribeV5 | None: ... + @on_subscribe.setter + def on_subscribe(self, func: _OnSubscribe | _OnSubscribeV5 | None) -> None: ... + def subscribe_callback(self) -> Callable[[_TOnSubscribe], _TOnSubscribe]: ... + @property + def on_message(self) -> _OnMessage | None: ... + @on_message.setter + def on_message(self, func: _OnMessage | None) -> None: ... + def message_callback(self) -> Callable[[_OnMessage], _OnMessage]: ... + @property + def on_publish(self) -> _OnPublish | None: ... + @on_publish.setter + def on_publish(self, func: _OnPublish | None) -> None: ... + def publish_callback(self) -> Callable[[_OnPublish], _OnPublish]: ... + @property + def on_unsubscribe(self) -> _OnUnsubscribe | _OnUnsubscribeV5 | None: ... + @on_unsubscribe.setter + def on_unsubscribe(self, func: _OnUnsubscribe | _OnUnsubscribeV5 | None) -> None: ... + def unsubscribe_callback(self) -> Callable[[_TOnUnsubscribe], _TOnUnsubscribe]: ... + @property + def on_disconnect(self) -> _OnDisconnect | _OnDisconnectV5 | None: ... + @on_disconnect.setter + def on_disconnect(self, func: _OnDisconnect | _OnDisconnectV5 | None) -> None: ... + def disconnect_callback(self) -> Callable[[_TOnDisconnect], _TOnDisconnect]: ... + @property + def on_socket_open(self) -> _OnSocket | None: ... + @on_socket_open.setter + def on_socket_open(self, func: _OnSocket | None) -> None: ... + def socket_open_callback(self) -> Callable[[_OnSocket], _OnSocket]: ... + @property + def on_socket_close(self) -> _OnSocket | None: ... + @on_socket_close.setter + def on_socket_close(self, func: _OnSocket | None) -> None: ... + def socket_close_callback(self) -> Callable[[_OnSocket], _OnSocket]: ... + @property + def on_socket_register_write(self) -> _OnSocket | None: ... + @on_socket_register_write.setter + def on_socket_register_write(self, func: _OnSocket | None) -> None: ... + def socket_register_write_callback(self) -> Callable[[_OnSocket], _OnSocket]: ... + @property + def on_socket_unregister_write(self) -> _OnSocket | None: ... + @on_socket_unregister_write.setter + def on_socket_unregister_write(self, func: _OnSocket | None) -> None: ... + def socket_unregister_write_callback(self) -> Callable[[_OnSocket], _OnSocket]: ... + def message_callback_add(self, sub: str, callback: _OnMessage) -> None: ... + def topic_callback(self, sub: str) -> Callable[[_OnMessage], _OnMessage]: ... + def message_callback_remove(self, sub: str) -> None: ... + +class WebsocketWrapper: + OPCODE_CONTINUATION: int + OPCODE_TEXT: int + OPCODE_BINARY: int + OPCODE_CONNCLOSE: int + OPCODE_PING: int + OPCODE_PONG: int + connected: bool + def __init__( + self, socket: _Socket, host: str, port: int, is_ssl: bool, path: str, extra_headers: _ExtraHeader | None + ) -> None: ... + def __del__(self) -> None: ... + def recv(self, length: int) -> bytes | bytearray | None: ... + def read(self, length: int) -> bytes | bytearray | None: ... + def send(self, data: bytes | bytearray) -> int: ... + def write(self, data: bytes | bytearray) -> int: ... + def close(self) -> None: ... + def fileno(self) -> int: ... + def pending(self) -> int: ... + def setblocking(self, flag: bool) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/matcher.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/matcher.pyi new file mode 100644 index 00000000..549ac1b3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/matcher.pyi @@ -0,0 +1,11 @@ +from collections.abc import Generator +from typing import Any + +class MQTTMatcher: + class Node: ... + + def __init__(self) -> None: ... + def __setitem__(self, key: str, value: Any) -> None: ... + def __getitem__(self, key: str) -> Any: ... + def __delitem__(self, key: str) -> None: ... + def iter_match(self, topic: str) -> Generator[Any, None, None]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/packettypes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/packettypes.pyi new file mode 100644 index 00000000..2fe5644e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/packettypes.pyi @@ -0,0 +1,19 @@ +class PacketTypes: + indexes: range + CONNECT: int + CONNACK: int + PUBLISH: int + PUBACK: int + PUBREC: int + PUBREL: int + PUBCOMP: int + SUBSCRIBE: int + SUBACK: int + UNSUBSCRIBE: int + UNSUBACK: int + PINGREQ: int + PINGRESP: int + DISCONNECT: int + AUTH: int + WILLMESSAGE: int + Names: list[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/properties.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/properties.pyi new file mode 100644 index 00000000..174a6a0e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/properties.pyi @@ -0,0 +1,38 @@ +from typing import Any + +from . import MQTTException as MQTTException + +class MalformedPacket(MQTTException): ... + +def writeInt16(length: int) -> bytearray: ... +def readInt16(buf: bytes) -> int: ... +def writeInt32(length: int) -> bytearray: ... +def readInt32(buf: bytes) -> int: ... +def writeUTF(data: str | bytes) -> bytearray: ... +def readUTF(buffer: bytes, maxlen: int) -> tuple[str, int]: ... +def writeBytes(buffer: bytes) -> bytearray: ... +def readBytes(buffer: bytes) -> tuple[bytes, int]: ... + +class VariableByteIntegers: + @staticmethod + def encode(x: int) -> bytes: ... + @staticmethod + def decode(buffer: bytes) -> tuple[int, int]: ... + +class Properties: + packetType: int + types: list[str] + names: dict[str, int] + properties: dict[int, tuple[int, list[int]]] + def __init__(self, packetType: int) -> None: ... + def allowsMultiple(self, compressedName: str) -> bool: ... + def getIdentFromName(self, compressedName: str) -> int: ... + def __setattr__(self, name: str, value: Any) -> None: ... + def json(self) -> dict[str, Any]: ... + def isEmpty(self) -> bool: ... + def clear(self) -> None: ... + def writeProperty(self, identifier: int, type: int, value: Any) -> bytes: ... + def pack(self) -> bytes: ... + def readProperty(self, buffer: bytes, type: int, propslen: int) -> Any: ... + def getNameFromIdent(self, identifier: int) -> str | None: ... + def unpack(self, buffer: bytes) -> tuple[Properties, int]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/publish.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/publish.pyi new file mode 100644 index 00000000..af140233 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/publish.pyi @@ -0,0 +1,62 @@ +import ssl +from collections.abc import Iterable +from typing_extensions import NotRequired, TypeAlias, TypedDict + +_Payload: TypeAlias = str | bytes | bytearray | float + +class _Msg(TypedDict): + topic: str + payload: NotRequired[_Payload | None] + qos: NotRequired[int] + retain: NotRequired[int] + +class _Auth(TypedDict): + username: str + password: NotRequired[str] + +class _TLS(TypedDict): + ca_certs: str + certfile: NotRequired[str] + keyfile: NotRequired[str] + tls_version: NotRequired[ssl._SSLMethod] + ciphers: NotRequired[str] + insecure: NotRequired[str] + cert_reqs: NotRequired[ssl.VerifyMode] + keyfile_password: NotRequired[ssl._PasswordType] + +class _Proxy(TypedDict): + proxy_type: int + proxy_addr: str + proxy_rdns: NotRequired[bool] + proxy_username: NotRequired[str] + proxy_passwor: NotRequired[str] + +def multiple( + msgs: Iterable[_Msg], + hostname: str = ..., + port: int = ..., + client_id: str = ..., + keepalive: int = ..., + will: _Msg | None = ..., + auth: _Auth | None = ..., + tls: _TLS | None = ..., + protocol: int = ..., + transport: str = ..., + proxy_args: _Proxy | None = ..., +) -> None: ... +def single( + topic: str, + payload: _Payload | None = ..., + qos: int | None = ..., + retain: bool | None = ..., + hostname: str = ..., + port: int = ..., + client_id: str = ..., + keepalive: int = ..., + will: _Msg | None = ..., + auth: _Auth | None = ..., + tls: _TLS | None = ..., + protocol: int = ..., + transport: str = ..., + proxy_args: _Proxy | None = ..., +) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/reasoncodes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/reasoncodes.pyi new file mode 100644 index 00000000..3a3b20f5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/reasoncodes.pyi @@ -0,0 +1,13 @@ +class ReasonCodes: + packetType: int + names: dict[int, dict[str, list[int]]] + value: int + def __init__(self, packetType: int, aName: str = ..., identifier: int = ...) -> None: ... + def __getName__(self, packetType: int, identifier: int) -> str: ... + def getId(self, name: str) -> int: ... + def set(self, name: str) -> None: ... + def unpack(self, buffer: bytearray) -> int: ... + def getName(self) -> str: ... + def __eq__(self, other: object) -> bool: ... + def json(self) -> str: ... + def pack(self) -> bytearray: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/subscribe.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/subscribe.pyi new file mode 100644 index 00000000..a9a7e005 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/subscribe.pyi @@ -0,0 +1,39 @@ +from collections.abc import Callable + +from .client import Client, MQTTMessage, _UserData +from .publish import _TLS, _Auth, _Msg, _Proxy + +def callback( + callback: Callable[[Client, _UserData, MQTTMessage], None], + topics: list[str], + qos: int = ..., + userdata: _UserData | None = ..., + hostname: str = ..., + port: int = ..., + client_id: str = ..., + keepalive: int = ..., + will: _Msg | None = ..., + auth: _Auth | None = ..., + tls: _TLS | None = ..., + protocol: int = ..., + transport: str = ..., + clean_session: bool = ..., + proxy_args: _Proxy | None = ..., +) -> None: ... +def simple( + topics: str | list[str], + qos: int = ..., + msg_count: int = ..., + retained: bool = ..., + hostname: str = ..., + port: int = ..., + client_id: str = ..., + keepalive: int = ..., + will: _Msg | None = ..., + auth: _Auth | None = ..., + tls: _TLS | None = ..., + protocol: int = ..., + transport: str = ..., + clean_session: bool = ..., + proxy_args: _Proxy | None = ..., +) -> list[MQTTMessage] | MQTTMessage: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/subscribeoptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/subscribeoptions.pyi new file mode 100644 index 00000000..4d9c5850 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paho-mqtt/paho/mqtt/subscribeoptions.pyi @@ -0,0 +1,25 @@ +from typing import Any +from typing_extensions import TypedDict + +from . import MQTTException as MQTTException + +class _SubscribeOptionsJson(TypedDict): + QoS: int + noLocal: bool + retainAsPublished: bool + retainHandling: int + +class SubscribeOptions: + RETAIN_SEND_ON_SUBSCRIBE: int + RETAIN_SEND_IF_NEW_SUB: int + RETAIN_DO_NOT_SEND: int + names: list[str] + Qos: int + noLocal: bool + retainAsPublished: bool + retainHandling: int + def __init__(self, qos: int = ..., noLocal: bool = ..., retainAsPublished: bool = ..., retainHandling: int = ...) -> None: ... + def __setattr__(self, name: str, value: Any) -> None: ... + def pack(self) -> bytes: ... + def unpack(self, buffer: bytes | bytearray) -> int: ... + def json(self) -> _SubscribeOptionsJson: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..930b0e7f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/@tests/stubtest_allowlist.txt @@ -0,0 +1,9 @@ +paramiko.SFTPServer.__init__ +paramiko.Transport.open_x11_channel +paramiko.Transport.send_ignore +paramiko.Transport.start_server +paramiko.sftp_server.SFTPServer.__init__ +paramiko.transport.Transport.open_x11_channel +paramiko.transport.Transport.send_ignore +paramiko.transport.Transport.start_server +paramiko.util.SupportsClose diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/@tests/stubtest_allowlist_darwin.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/@tests/stubtest_allowlist_darwin.txt new file mode 100644 index 00000000..62983197 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/@tests/stubtest_allowlist_darwin.txt @@ -0,0 +1,2 @@ +paramiko._winapi +paramiko.win_pageant diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/@tests/stubtest_allowlist_linux.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/@tests/stubtest_allowlist_linux.txt new file mode 100644 index 00000000..62983197 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/@tests/stubtest_allowlist_linux.txt @@ -0,0 +1,2 @@ +paramiko._winapi +paramiko.win_pageant diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/@tests/stubtest_allowlist_win32.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/@tests/stubtest_allowlist_win32.txt new file mode 100644 index 00000000..51664f85 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/@tests/stubtest_allowlist_win32.txt @@ -0,0 +1,2 @@ +# Type-checkers don't support architecture checks. So we have to Union +paramiko.win_pageant.ULONG_PTR diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/METADATA.toml new file mode 100644 index 00000000..3fdfdc78 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/METADATA.toml @@ -0,0 +1,8 @@ +version = "3.0.*" +# Requires a version of cryptography where cryptography.hazmat.primitives.ciphers.Cipher is generic +requires = ["cryptography>=37.0.0"] + +[tool.stubtest] +ignore_missing_stub = true +# linux and darwin are equivalent +platforms = ["linux", "win32"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/__init__.pyi new file mode 100644 index 00000000..6d9c20fb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/__init__.pyi @@ -0,0 +1,46 @@ +from paramiko.agent import Agent as Agent, AgentKey as AgentKey +from paramiko.channel import Channel as Channel +from paramiko.client import ( + AutoAddPolicy as AutoAddPolicy, + MissingHostKeyPolicy as MissingHostKeyPolicy, + RejectPolicy as RejectPolicy, + SSHClient as SSHClient, + WarningPolicy as WarningPolicy, +) +from paramiko.common import io_sleep as io_sleep +from paramiko.config import SSHConfig as SSHConfig, SSHConfigDict as SSHConfigDict +from paramiko.dsskey import DSSKey as DSSKey +from paramiko.ecdsakey import ECDSAKey as ECDSAKey +from paramiko.ed25519key import Ed25519Key as Ed25519Key +from paramiko.file import BufferedFile as BufferedFile +from paramiko.hostkeys import HostKeys as HostKeys +from paramiko.message import Message as Message +from paramiko.pkey import PKey as PKey +from paramiko.proxy import ProxyCommand as ProxyCommand +from paramiko.rsakey import RSAKey as RSAKey +from paramiko.server import ServerInterface as ServerInterface, SubsystemHandler as SubsystemHandler +from paramiko.sftp import SFTPError as SFTPError +from paramiko.sftp_attr import SFTPAttributes as SFTPAttributes +from paramiko.sftp_client import SFTP as SFTP, SFTPClient as SFTPClient +from paramiko.sftp_file import SFTPFile as SFTPFile +from paramiko.sftp_handle import SFTPHandle as SFTPHandle +from paramiko.sftp_server import SFTPServer as SFTPServer +from paramiko.sftp_si import SFTPServerInterface as SFTPServerInterface +from paramiko.ssh_exception import ( + AuthenticationException as AuthenticationException, + BadAuthenticationType as BadAuthenticationType, + BadHostKeyException as BadHostKeyException, + ChannelException as ChannelException, + ConfigParseError as ConfigParseError, + CouldNotCanonicalize as CouldNotCanonicalize, + PasswordRequiredException as PasswordRequiredException, + ProxyCommandFailure as ProxyCommandFailure, + SSHException as SSHException, +) +from paramiko.transport import SecurityOptions as SecurityOptions, Transport as Transport + +__author__: str +__license__: str + +# Names in __all__ with no definition: +# util diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/_version.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/_version.pyi new file mode 100644 index 00000000..8faa8a2d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/_version.pyi @@ -0,0 +1,2 @@ +__version_info__: tuple[int, int, int] +__version__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/_winapi.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/_winapi.pyi new file mode 100644 index 00000000..2943652e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/_winapi.pyi @@ -0,0 +1,94 @@ +import builtins +import ctypes +import sys +from _typeshed import Incomplete +from types import TracebackType +from typing import Any +from typing_extensions import Self + +if sys.platform == "win32": + def format_system_message(errno: int) -> str | None: ... + + class WindowsError(builtins.WindowsError): + def __init__(self, value: int | None = ...) -> None: ... + @property + def message(self) -> str: ... + @property + def code(self) -> int: ... + + def handle_nonzero_success(result: int) -> None: ... + GMEM_MOVEABLE: int + GlobalAlloc: Any + GlobalLock: Any + GlobalUnlock: Any + GlobalSize: Any + CreateFileMapping: Any + MapViewOfFile: Any + UnmapViewOfFile: Any + RtlMoveMemory: Any + + class MemoryMap: + name: str + length: int + security_attributes: Any = ... + pos: int + filemap: Any = ... + view: Any = ... + def __init__(self, name: str, length: int, security_attributes: Incomplete | None = ...) -> None: ... + def __enter__(self) -> Self: ... + def seek(self, pos: int) -> None: ... + def write(self, msg: bytes) -> None: ... + def read(self, n: int) -> bytes: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, tb: TracebackType | None + ) -> None: ... + READ_CONTROL: int + STANDARD_RIGHTS_REQUIRED: int + STANDARD_RIGHTS_READ: int + STANDARD_RIGHTS_WRITE: int + STANDARD_RIGHTS_EXECUTE: int + STANDARD_RIGHTS_ALL: int + POLICY_VIEW_LOCAL_INFORMATION: int + POLICY_VIEW_AUDIT_INFORMATION: int + POLICY_GET_PRIVATE_INFORMATION: int + POLICY_TRUST_ADMIN: int + POLICY_CREATE_ACCOUNT: int + POLICY_CREATE_SECRET: int + POLICY_CREATE_PRIVILEGE: int + POLICY_SET_DEFAULT_QUOTA_LIMITS: int + POLICY_SET_AUDIT_REQUIREMENTS: int + POLICY_AUDIT_LOG_ADMIN: int + POLICY_SERVER_ADMIN: int + POLICY_LOOKUP_NAMES: int + POLICY_NOTIFICATION: int + POLICY_ALL_ACCESS: int + POLICY_READ: int + POLICY_WRITE: int + POLICY_EXECUTE: int + + class TokenAccess: + TOKEN_QUERY: int + + class TokenInformationClass: + TokenUser: int + + class TOKEN_USER(ctypes.Structure): + num: int + + class SECURITY_DESCRIPTOR(ctypes.Structure): + SECURITY_DESCRIPTOR_CONTROL: Any + REVISION: int + + class SECURITY_ATTRIBUTES(ctypes.Structure): + nLength: int + lpSecurityDescriptor: Any + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + @property + def descriptor(self) -> Any: ... + @descriptor.setter + def descriptor(self, value: Any) -> None: ... + + def GetTokenInformation(token: Any, information_class: Any) -> Any: ... + def OpenProcessToken(proc_handle: Any, access: Any) -> Any: ... + def get_current_user() -> TOKEN_USER: ... + def get_security_attributes_for_user(user: TOKEN_USER | None = ...) -> SECURITY_ATTRIBUTES: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/agent.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/agent.pyi new file mode 100644 index 00000000..a8a8c532 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/agent.pyi @@ -0,0 +1,67 @@ +from socket import _RetAddress, socket +from threading import Thread +from typing import Protocol + +from paramiko.channel import Channel +from paramiko.message import Message +from paramiko.pkey import PKey +from paramiko.transport import Transport + +class _AgentProxy(Protocol): + def connect(self) -> None: ... + def close(self) -> None: ... + +cSSH2_AGENTC_REQUEST_IDENTITIES: bytes +SSH2_AGENT_IDENTITIES_ANSWER: int +cSSH2_AGENTC_SIGN_REQUEST: bytes +SSH2_AGENT_SIGN_RESPONSE: int + +class AgentSSH: + def __init__(self) -> None: ... + def get_keys(self) -> tuple[AgentKey, ...]: ... + +class AgentProxyThread(Thread): + def __init__(self, agent: _AgentProxy) -> None: ... + def run(self) -> None: ... + +class AgentLocalProxy(AgentProxyThread): + def __init__(self, agent: AgentServerProxy) -> None: ... + def get_connection(self) -> tuple[socket, _RetAddress]: ... + +class AgentRemoteProxy(AgentProxyThread): + def __init__(self, agent: AgentClientProxy, chan: Channel) -> None: ... + def get_connection(self) -> tuple[socket, _RetAddress]: ... + +class AgentClientProxy: + thread: Thread + def __init__(self, chanRemote: Channel) -> None: ... + def __del__(self) -> None: ... + def connect(self) -> None: ... + def close(self) -> None: ... + +class AgentServerProxy(AgentSSH): + thread: Thread + def __init__(self, t: Transport) -> None: ... + def __del__(self) -> None: ... + def connect(self) -> None: ... + def close(self) -> None: ... + def get_env(self) -> dict[str, str]: ... + +class AgentRequestHandler: + def __init__(self, chanClient: Channel) -> None: ... + def __del__(self) -> None: ... + def close(self) -> None: ... + +class Agent(AgentSSH): + def __init__(self) -> None: ... + def close(self) -> None: ... + +class AgentKey(PKey): + agent: AgentSSH + blob: bytes + public_blob: None + name: str + def __init__(self, agent: AgentSSH, blob: bytes) -> None: ... + def asbytes(self) -> bytes: ... + def get_name(self) -> str: ... + def sign_ssh_data(self, data: bytes, algorithm: str | None = ...) -> Message: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/auth_handler.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/auth_handler.pyi new file mode 100644 index 00000000..d82d27e3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/auth_handler.pyi @@ -0,0 +1,48 @@ +from collections.abc import Callable +from threading import Event +from typing_extensions import TypeAlias + +from paramiko.pkey import PKey +from paramiko.ssh_gss import _SSH_GSSAuth +from paramiko.transport import Transport + +_InteractiveCallback: TypeAlias = Callable[[str, str, list[tuple[str, bool]]], list[str]] + +class AuthHandler: + transport: Transport + username: str | None + authenticated: bool + auth_event: Event | None + auth_method: str + banner: str | None + password: str | None + private_key: PKey | None + interactive_handler: _InteractiveCallback | None + submethods: str | None + auth_username: str | None + auth_fail_count: int + gss_host: str | None + gss_deleg_creds: bool + def __init__(self, transport: Transport) -> None: ... + def is_authenticated(self) -> bool: ... + def get_username(self) -> str | None: ... + def auth_none(self, username: str, event: Event) -> None: ... + def auth_publickey(self, username: str, key: PKey, event: Event) -> None: ... + def auth_password(self, username: str, password: str, event: Event) -> None: ... + def auth_interactive(self, username: str, handler: _InteractiveCallback, event: Event, submethods: str = ...) -> None: ... + def auth_gssapi_with_mic(self, username: str, gss_host: str, gss_deleg_creds: bool, event: Event) -> None: ... + def auth_gssapi_keyex(self, username: str, event: Event) -> None: ... + def abort(self) -> None: ... + def wait_for_response(self, event: Event) -> list[str]: ... + +class GssapiWithMicAuthHandler: + method: str + sshgss: _SSH_GSSAuth + def __init__(self, delegate: AuthHandler, sshgss: _SSH_GSSAuth) -> None: ... + def abort(self) -> None: ... + @property + def transport(self) -> Transport: ... + @property + def auth_username(self) -> str: ... + @property + def gss_host(self) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/ber.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/ber.pyi new file mode 100644 index 00000000..831ef1ef --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/ber.pyi @@ -0,0 +1,18 @@ +from collections.abc import Iterable +from typing import Any + +class BERException(Exception): ... + +class BER: + content: bytes + idx: int + def __init__(self, content: bytes = ...) -> None: ... + def asbytes(self) -> bytes: ... + def decode(self) -> None | int | list[int]: ... + def decode_next(self) -> None | int | list[int]: ... + @staticmethod + def decode_sequence(data: bytes) -> list[int | list[int]]: ... + def encode_tlv(self, ident: int, val: bytes) -> None: ... + def encode(self, x: Any) -> None: ... + @staticmethod + def encode_sequence(data: Iterable[str]) -> bytes: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/buffered_pipe.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/buffered_pipe.pyi new file mode 100644 index 00000000..c31584e5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/buffered_pipe.pyi @@ -0,0 +1,14 @@ +from threading import Event +from typing import AnyStr, Generic + +class PipeTimeout(OSError): ... + +class BufferedPipe(Generic[AnyStr]): + def __init__(self) -> None: ... + def set_event(self, event: Event) -> None: ... + def feed(self, data: AnyStr) -> None: ... + def read_ready(self) -> bool: ... + def read(self, nbytes: int, timeout: float | None = ...) -> AnyStr: ... + def empty(self) -> AnyStr: ... + def close(self) -> None: ... + def __len__(self) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/channel.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/channel.pyi new file mode 100644 index 00000000..f88d5c75 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/channel.pyi @@ -0,0 +1,100 @@ +from collections.abc import Callable, Mapping +from logging import Logger +from threading import Condition, Event, Lock +from typing import Any, TypeVar +from typing_extensions import Literal + +from paramiko.buffered_pipe import BufferedPipe +from paramiko.file import BufferedFile +from paramiko.transport import Transport +from paramiko.util import ClosingContextManager + +_F = TypeVar("_F", bound=Callable[..., Any]) + +def open_only(func: _F) -> Callable[[_F], _F]: ... + +class Channel(ClosingContextManager): + chanid: int + remote_chanid: int + transport: Transport | None + active: bool + eof_received: int + eof_sent: int + in_buffer: BufferedPipe[Any] + in_stderr_buffer: BufferedPipe[Any] + timeout: float | None + closed: bool + ultra_debug: bool + lock: Lock + out_buffer_cv: Condition + in_window_size: int + out_window_size: int + in_max_packet_size: int + out_max_packet_size: int + in_window_threshold: int + in_window_sofar: int + status_event: Event + logger: Logger + event: Event + event_ready: bool + combine_stderr: bool + exit_status: int + origin_addr: None + def __init__(self, chanid: int) -> None: ... + def __del__(self) -> None: ... + def get_pty( + self, term: str | bytes = ..., width: int = ..., height: int = ..., width_pixels: int = ..., height_pixels: int = ... + ) -> None: ... + def invoke_shell(self) -> None: ... + def exec_command(self, command: str | bytes) -> None: ... + def invoke_subsystem(self, subsystem: str | bytes) -> None: ... + def resize_pty(self, width: int = ..., height: int = ..., width_pixels: int = ..., height_pixels: int = ...) -> None: ... + def update_environment(self, environment: Mapping[str | bytes, str | bytes]) -> None: ... + def set_environment_variable(self, name: str | bytes, value: str | bytes) -> None: ... + def exit_status_ready(self) -> bool: ... + def recv_exit_status(self) -> int: ... + def send_exit_status(self, status: int) -> None: ... + def request_x11( + self, + screen_number: int = ..., + auth_protocol: str | bytes | None = ..., + auth_cookie: str | bytes | None = ..., + single_connection: bool = ..., + handler: Callable[[Channel, tuple[str, int]], object] | None = ..., + ) -> bytes: ... + def request_forward_agent(self, handler: Callable[[Channel], object]) -> bool: ... + def get_transport(self) -> Transport: ... + def set_name(self, name: str) -> None: ... + def get_name(self) -> str: ... + def get_id(self) -> int: ... + def set_combine_stderr(self, combine: bool) -> bool: ... + def settimeout(self, timeout: float | None) -> None: ... + def gettimeout(self) -> float | None: ... + def setblocking(self, blocking: bool | Literal[0, 1]) -> None: ... + def getpeername(self) -> str: ... + def close(self) -> None: ... + def recv_ready(self) -> bool: ... + def recv(self, nbytes: int) -> bytes: ... + def recv_stderr_ready(self) -> bool: ... + def recv_stderr(self, nbytes: int) -> bytes: ... + def send_ready(self) -> bool: ... + def send(self, s: bytes) -> int: ... + def send_stderr(self, s: bytes) -> int: ... + def sendall(self, s: bytes) -> None: ... + def sendall_stderr(self, s: bytes) -> None: ... + def makefile(self, *params: Any) -> ChannelFile: ... + def makefile_stderr(self, *params: Any) -> ChannelStderrFile: ... + def makefile_stdin(self, *params: Any) -> ChannelStdinFile: ... + def fileno(self) -> int: ... + def shutdown(self, how: int) -> None: ... + def shutdown_read(self) -> None: ... + def shutdown_write(self) -> None: ... + +class ChannelFile(BufferedFile[Any]): + channel: Channel + def __init__(self, channel: Channel, mode: str = ..., bufsize: int = ...) -> None: ... + +class ChannelStderrFile(ChannelFile): ... + +class ChannelStdinFile(ChannelFile): + def close(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/client.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/client.pyi new file mode 100644 index 00000000..e8adb90f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/client.pyi @@ -0,0 +1,81 @@ +from collections.abc import Iterable, Mapping +from typing import NoReturn, Protocol + +from paramiko.channel import Channel, ChannelFile, ChannelStderrFile, ChannelStdinFile +from paramiko.hostkeys import HostKeys +from paramiko.pkey import PKey +from paramiko.sftp_client import SFTPClient +from paramiko.transport import Transport +from paramiko.util import ClosingContextManager + +from .transport import _SocketLike + +class _TransportFactory(Protocol): + def __call__( + self, __sock: _SocketLike, *, gss_kex: bool, gss_deleg_creds: bool, disabled_algorithms: dict[str, Iterable[str]] | None + ) -> Transport: ... + +class SSHClient(ClosingContextManager): + def __init__(self) -> None: ... + def load_system_host_keys(self, filename: str | None = ...) -> None: ... + def load_host_keys(self, filename: str) -> None: ... + def save_host_keys(self, filename: str) -> None: ... + def get_host_keys(self) -> HostKeys: ... + def set_log_channel(self, name: str) -> None: ... + def set_missing_host_key_policy(self, policy: type[MissingHostKeyPolicy] | MissingHostKeyPolicy) -> None: ... + def connect( + self, + hostname: str, + port: int = ..., + username: str | None = ..., + password: str | None = ..., + pkey: PKey | None = ..., + key_filename: str | None = ..., + timeout: float | None = ..., + allow_agent: bool = ..., + look_for_keys: bool = ..., + compress: bool = ..., + sock: _SocketLike | None = ..., + gss_auth: bool = ..., + gss_kex: bool = ..., + gss_deleg_creds: bool = ..., + gss_host: str | None = ..., + banner_timeout: float | None = ..., + auth_timeout: float | None = ..., + gss_trust_dns: bool = ..., + passphrase: str | None = ..., + disabled_algorithms: dict[str, Iterable[str]] | None = ..., + transport_factory: _TransportFactory | None = ..., + ) -> None: ... + def close(self) -> None: ... + def exec_command( + self, + command: str, + bufsize: int = ..., + timeout: float | None = ..., + get_pty: bool = ..., + environment: dict[str, str] | None = ..., + ) -> tuple[ChannelStdinFile, ChannelFile, ChannelStderrFile]: ... + def invoke_shell( + self, + term: str = ..., + width: int = ..., + height: int = ..., + width_pixels: int = ..., + height_pixels: int = ..., + environment: Mapping[str, str] | None = ..., + ) -> Channel: ... + def open_sftp(self) -> SFTPClient: ... + def get_transport(self) -> Transport | None: ... + +class MissingHostKeyPolicy: + def missing_host_key(self, client: SSHClient, hostname: str, key: PKey) -> None: ... + +class AutoAddPolicy(MissingHostKeyPolicy): + def missing_host_key(self, client: SSHClient, hostname: str, key: PKey) -> None: ... + +class RejectPolicy(MissingHostKeyPolicy): + def missing_host_key(self, client: SSHClient, hostname: str, key: PKey) -> NoReturn: ... + +class WarningPolicy(MissingHostKeyPolicy): + def missing_host_key(self, client: SSHClient, hostname: str, key: PKey) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/common.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/common.pyi new file mode 100644 index 00000000..c895dfd3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/common.pyi @@ -0,0 +1,128 @@ +def byte_ord(c: int | str) -> int: ... +def byte_chr(c: int) -> bytes: ... +def byte_mask(c: int, mask: int) -> bytes: ... + +MSG_DISCONNECT: int +MSG_IGNORE: int +MSG_UNIMPLEMENTED: int +MSG_DEBUG: int +MSG_SERVICE_REQUEST: int +MSG_SERVICE_ACCEPT: int +MSG_KEXINIT: int +MSG_NEWKEYS: int +MSG_USERAUTH_REQUEST: int +MSG_USERAUTH_FAILURE: int +MSG_USERAUTH_SUCCESS: int +MSG_USERAUTH_BANNER: int +MSG_USERAUTH_PK_OK: int +MSG_USERAUTH_INFO_REQUEST: int +MSG_USERAUTH_INFO_RESPONSE: int +MSG_USERAUTH_GSSAPI_RESPONSE: int +MSG_USERAUTH_GSSAPI_TOKEN: int +MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE: int +MSG_USERAUTH_GSSAPI_ERROR: int +MSG_USERAUTH_GSSAPI_ERRTOK: int +MSG_USERAUTH_GSSAPI_MIC: int +HIGHEST_USERAUTH_MESSAGE_ID: int +MSG_GLOBAL_REQUEST: int +MSG_REQUEST_SUCCESS: int +MSG_REQUEST_FAILURE: int +MSG_CHANNEL_OPEN: int +MSG_CHANNEL_OPEN_SUCCESS: int +MSG_CHANNEL_OPEN_FAILURE: int +MSG_CHANNEL_WINDOW_ADJUST: int +MSG_CHANNEL_DATA: int +MSG_CHANNEL_EXTENDED_DATA: int +MSG_CHANNEL_EOF: int +MSG_CHANNEL_CLOSE: int +MSG_CHANNEL_REQUEST: int +MSG_CHANNEL_SUCCESS: int +MSG_CHANNEL_FAILURE: int + +cMSG_DISCONNECT: bytes +cMSG_IGNORE: bytes +cMSG_UNIMPLEMENTED: bytes +cMSG_DEBUG: bytes +cMSG_SERVICE_REQUEST: bytes +cMSG_SERVICE_ACCEPT: bytes +cMSG_KEXINIT: bytes +cMSG_NEWKEYS: bytes +cMSG_USERAUTH_REQUEST: bytes +cMSG_USERAUTH_FAILURE: bytes +cMSG_USERAUTH_SUCCESS: bytes +cMSG_USERAUTH_BANNER: bytes +cMSG_USERAUTH_PK_OK: bytes +cMSG_USERAUTH_INFO_REQUEST: bytes +cMSG_USERAUTH_INFO_RESPONSE: bytes +cMSG_USERAUTH_GSSAPI_RESPONSE: bytes +cMSG_USERAUTH_GSSAPI_TOKEN: bytes +cMSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE: bytes +cMSG_USERAUTH_GSSAPI_ERROR: bytes +cMSG_USERAUTH_GSSAPI_ERRTOK: bytes +cMSG_USERAUTH_GSSAPI_MIC: bytes +cMSG_GLOBAL_REQUEST: bytes +cMSG_REQUEST_SUCCESS: bytes +cMSG_REQUEST_FAILURE: bytes +cMSG_CHANNEL_OPEN: bytes +cMSG_CHANNEL_OPEN_SUCCESS: bytes +cMSG_CHANNEL_OPEN_FAILURE: bytes +cMSG_CHANNEL_WINDOW_ADJUST: bytes +cMSG_CHANNEL_DATA: bytes +cMSG_CHANNEL_EXTENDED_DATA: bytes +cMSG_CHANNEL_EOF: bytes +cMSG_CHANNEL_CLOSE: bytes +cMSG_CHANNEL_REQUEST: bytes +cMSG_CHANNEL_SUCCESS: bytes +cMSG_CHANNEL_FAILURE: bytes + +MSG_NAMES: dict[int, str] + +AUTH_SUCCESSFUL: int +AUTH_PARTIALLY_SUCCESSFUL: int +AUTH_FAILED: int + +OPEN_SUCCEEDED: int +OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED: int +OPEN_FAILED_CONNECT_FAILED: int +OPEN_FAILED_UNKNOWN_CHANNEL_TYPE: int +OPEN_FAILED_RESOURCE_SHORTAGE: int + +CONNECTION_FAILED_CODE: dict[int, str] + +DISCONNECT_SERVICE_NOT_AVAILABLE: int +DISCONNECT_AUTH_CANCELLED_BY_USER: int +DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE: int + +zero_byte: bytes +one_byte: bytes +four_byte: bytes +max_byte: bytes +cr_byte: bytes +linefeed_byte: bytes +crlf: bytes +cr_byte_value: int +linefeed_byte_value: int +xffffffff: int +x80000000: int +o666: int +o660: int +o644: int +o600: int +o777: int +o700: int +o70: int + +DEBUG: int +INFO: int +WARNING: int +ERROR: int +CRITICAL: int + +io_sleep: float + +DEFAULT_WINDOW_SIZE: int +DEFAULT_MAX_PACKET_SIZE: int + +MIN_WINDOW_SIZE: int +MIN_PACKET_SIZE: int +MAX_WINDOW_SIZE: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/compress.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/compress.pyi new file mode 100644 index 00000000..da039479 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/compress.pyi @@ -0,0 +1,11 @@ +from zlib import _Compress, _Decompress + +class ZlibCompressor: + z: _Compress + def __init__(self) -> None: ... + def __call__(self, data: bytes) -> bytes: ... + +class ZlibDecompressor: + z: _Decompress + def __init__(self) -> None: ... + def __call__(self, data: bytes) -> bytes: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/config.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/config.pyi new file mode 100644 index 00000000..a7c30fea --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/config.pyi @@ -0,0 +1,33 @@ +from collections.abc import Iterable +from re import Pattern +from typing import IO +from typing_extensions import Self + +from paramiko.ssh_exception import ConfigParseError as ConfigParseError, CouldNotCanonicalize as CouldNotCanonicalize + +SSH_PORT: int + +class SSHConfig: + SETTINGS_REGEX: Pattern[str] + TOKENS_BY_CONFIG_KEY: dict[str, list[str]] + def __init__(self) -> None: ... + @classmethod + def from_text(cls, text: str) -> Self: ... + @classmethod + def from_path(cls, path: str) -> Self: ... + @classmethod + def from_file(cls, flo: IO[str]) -> Self: ... + def parse(self, file_obj: IO[str]) -> None: ... + def lookup(self, hostname: str) -> SSHConfigDict: ... + def canonicalize(self, hostname: str, options: SSHConfigDict, domains: Iterable[str]) -> str: ... + def get_hostnames(self) -> set[str]: ... + +class LazyFqdn: + fqdn: str | None + config: SSHConfig + host: str | None + def __init__(self, config: SSHConfigDict, host: str | None = ...) -> None: ... + +class SSHConfigDict(dict[str, str]): + def as_bool(self, key: str) -> bool: ... + def as_int(self, key: str) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/dsskey.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/dsskey.pyi new file mode 100644 index 00000000..f98c69f5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/dsskey.pyi @@ -0,0 +1,34 @@ +from collections.abc import Callable +from typing import IO + +from paramiko.message import Message +from paramiko.pkey import PKey + +class DSSKey(PKey): + p: int | None + q: int | None + g: int | None + y: int | None + x: int | None + public_blob: None + size: int + def __init__( + self, + msg: Message | None = ..., + data: bytes | None = ..., + filename: str | None = ..., + password: str | None = ..., + vals: tuple[int, int, int, int] | None = ..., + file_obj: IO[str] | None = ..., + ) -> None: ... + def asbytes(self) -> bytes: ... + def __hash__(self) -> int: ... + def get_name(self) -> str: ... + def get_bits(self) -> int: ... + def can_sign(self) -> bool: ... + def sign_ssh_data(self, data: bytes, algorithm: str | None = ...) -> Message: ... + def verify_ssh_sig(self, data: bytes, msg: Message) -> bool: ... + def write_private_key_file(self, filename: str, password: str | None = ...) -> None: ... + def write_private_key(self, file_obj: IO[str], password: str | None = ...) -> None: ... + @staticmethod + def generate(bits: int = ..., progress_func: Callable[..., object] | None = ...) -> DSSKey: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/ecdsakey.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/ecdsakey.pyi new file mode 100644 index 00000000..e5ff63c6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/ecdsakey.pyi @@ -0,0 +1,54 @@ +from collections.abc import Callable, Sequence +from typing import IO, Any + +from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurve, EllipticCurvePrivateKey, EllipticCurvePublicKey +from cryptography.hazmat.primitives.hashes import HashAlgorithm +from paramiko.message import Message +from paramiko.pkey import PKey + +class _ECDSACurve: + nist_name: str + key_length: int + key_format_identifier: str + hash_object: type[HashAlgorithm] + curve_class: type[EllipticCurve] + def __init__(self, curve_class: type[EllipticCurve], nist_name: str) -> None: ... + +class _ECDSACurveSet: + ecdsa_curves: Sequence[_ECDSACurve] + def __init__(self, ecdsa_curves: Sequence[_ECDSACurve]) -> None: ... + def get_key_format_identifier_list(self) -> list[str]: ... + def get_by_curve_class(self, curve_class: type[Any]) -> _ECDSACurve | None: ... + def get_by_key_format_identifier(self, key_format_identifier: str) -> _ECDSACurve | None: ... + def get_by_key_length(self, key_length: int) -> _ECDSACurve | None: ... + +class ECDSAKey(PKey): + verifying_key: EllipticCurvePublicKey + signing_key: EllipticCurvePrivateKey + public_blob: None + ecdsa_curve: _ECDSACurve | None + def __init__( + self, + msg: Message | None = ..., + data: bytes | None = ..., + filename: str | None = ..., + password: str | None = ..., + vals: tuple[EllipticCurvePrivateKey, EllipticCurvePublicKey] | None = ..., + file_obj: IO[str] | None = ..., + validate_point: bool = ..., + ) -> None: ... + @classmethod + def supported_key_format_identifiers(cls: Any) -> list[str]: ... + def asbytes(self) -> bytes: ... + def __hash__(self) -> int: ... + def get_name(self) -> str: ... + def get_bits(self) -> int: ... + def can_sign(self) -> bool: ... + def sign_ssh_data(self, data: bytes, algorithm: str | None = ...) -> Message: ... + def verify_ssh_sig(self, data: bytes, msg: Message) -> bool: ... + def write_private_key_file(self, filename: str, password: str | None = ...) -> None: ... + def write_private_key(self, file_obj: IO[str], password: str | None = ...) -> None: ... + @classmethod + def generate( + cls, curve: EllipticCurve = ..., progress_func: Callable[..., object] | None = ..., bits: int | None = ... + ) -> ECDSAKey: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/ed25519key.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/ed25519key.pyi new file mode 100644 index 00000000..43de9aa5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/ed25519key.pyi @@ -0,0 +1,22 @@ +from typing import IO + +from paramiko.message import Message +from paramiko.pkey import PKey + +class Ed25519Key(PKey): + public_blob: None + def __init__( + self, + msg: Message | None = ..., + data: bytes | None = ..., + filename: str | None = ..., + password: str | None = ..., + file_obj: IO[str] | None = ..., + ) -> None: ... + def asbytes(self) -> bytes: ... + def __hash__(self) -> int: ... + def get_name(self) -> str: ... + def get_bits(self) -> int: ... + def can_sign(self) -> bool: ... + def sign_ssh_data(self, data: bytes, algorithm: str | None = ...) -> Message: ... + def verify_ssh_sig(self, data: bytes, msg: Message) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/file.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/file.pyi new file mode 100644 index 00000000..e84863cf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/file.pyi @@ -0,0 +1,39 @@ +from collections.abc import Iterable +from typing import Any, AnyStr, Generic + +from paramiko.util import ClosingContextManager + +class BufferedFile(ClosingContextManager, Generic[AnyStr]): + SEEK_SET: int + SEEK_CUR: int + SEEK_END: int + + FLAG_READ: int + FLAG_WRITE: int + FLAG_APPEND: int + FLAG_BINARY: int + FLAG_BUFFERED: int + FLAG_LINE_BUFFERED: int + FLAG_UNIVERSAL_NEWLINE: int + + newlines: None | AnyStr | tuple[AnyStr, ...] + def __init__(self) -> None: ... + def __del__(self) -> None: ... + def __iter__(self) -> BufferedFile[Any]: ... + def close(self) -> None: ... + def flush(self) -> None: ... + def __next__(self) -> AnyStr: ... + def readable(self) -> bool: ... + def writable(self) -> bool: ... + def seekable(self) -> bool: ... + def readinto(self, buff: bytearray) -> int: ... + def read(self, size: int | None = ...) -> bytes: ... + def readline(self, size: int | None = ...) -> AnyStr: ... + def readlines(self, sizehint: int | None = ...) -> list[AnyStr]: ... + def seek(self, offset: int, whence: int = ...) -> None: ... + def tell(self) -> int: ... + def write(self, data: AnyStr) -> None: ... + def writelines(self, sequence: Iterable[AnyStr]) -> None: ... + def xreadlines(self) -> BufferedFile[Any]: ... + @property + def closed(self) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/hostkeys.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/hostkeys.pyi new file mode 100644 index 00000000..c48011a5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/hostkeys.pyi @@ -0,0 +1,46 @@ +from collections.abc import Iterator, Mapping, MutableMapping +from typing_extensions import Self + +from paramiko.pkey import PKey + +class _SubDict(MutableMapping[str, PKey]): + # Internal to HostKeys.lookup() + def __init__(self, hostname: str, entries: list[HostKeyEntry], hostkeys: HostKeys) -> None: ... + def __iter__(self) -> Iterator[str]: ... + def __len__(self) -> int: ... + def __delitem__(self, key: str) -> None: ... + def __getitem__(self, key: str) -> PKey: ... + def __setitem__(self, key: str, val: PKey) -> None: ... + def keys(self) -> list[str]: ... # type: ignore[override] + +class HostKeys(MutableMapping[str, _SubDict]): + def __init__(self, filename: str | None = ...) -> None: ... + def add(self, hostname: str, keytype: str, key: PKey) -> None: ... + def load(self, filename: str) -> None: ... + def save(self, filename: str) -> None: ... + def lookup(self, hostname: str) -> _SubDict | None: ... + def check(self, hostname: str, key: PKey) -> bool: ... + def clear(self) -> None: ... + def __iter__(self) -> Iterator[str]: ... + def __len__(self) -> int: ... + def __getitem__(self, key: str) -> _SubDict: ... + def __delitem__(self, key: str) -> None: ... + def __setitem__(self, hostname: str, entry: Mapping[str, PKey]) -> None: ... + def keys(self) -> list[str]: ... # type: ignore[override] + def values(self) -> list[_SubDict]: ... # type: ignore[override] + @staticmethod + def hash_host(hostname: str, salt: str | None = ...) -> str: ... + +class InvalidHostKey(Exception): + line: str + exc: Exception + def __init__(self, line: str, exc: Exception) -> None: ... + +class HostKeyEntry: + valid: bool + hostnames: list[str] + key: PKey + def __init__(self, hostnames: list[str] | None = ..., key: PKey | None = ...) -> None: ... + @classmethod + def from_line(cls, line: str, lineno: int | None = ...) -> Self | None: ... + def to_line(self) -> str | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/kex_curve25519.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/kex_curve25519.pyi new file mode 100644 index 00000000..be33b1a5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/kex_curve25519.pyi @@ -0,0 +1,20 @@ +from _typeshed import ReadableBuffer +from collections.abc import Callable +from hashlib import _Hash + +from cryptography.hazmat.primitives.asymmetric.x25519 import X25519PrivateKey +from paramiko.message import Message +from paramiko.transport import Transport + +c_MSG_KEXECDH_INIT: bytes +c_MSG_KEXECDH_REPLY: bytes + +class KexCurve25519: + hash_algo: Callable[[ReadableBuffer], _Hash] + transport: Transport + key: X25519PrivateKey | None + def __init__(self, transport: Transport) -> None: ... + @classmethod + def is_available(cls) -> bool: ... + def start_kex(self) -> None: ... + def parse_next(self, ptype: int, m: Message) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/kex_ecdh_nist.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/kex_ecdh_nist.pyi new file mode 100644 index 00000000..3178b181 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/kex_ecdh_nist.pyi @@ -0,0 +1,32 @@ +from _typeshed import ReadableBuffer +from collections.abc import Callable +from hashlib import _Hash + +from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurve, EllipticCurvePrivateKey, EllipticCurvePublicKey +from paramiko.message import Message +from paramiko.transport import Transport + +c_MSG_KEXECDH_INIT: bytes +c_MSG_KEXECDH_REPLY: bytes + +class KexNistp256: + name: str + hash_algo: Callable[[ReadableBuffer], _Hash] + curve: EllipticCurve + transport: Transport + P: int | EllipticCurvePrivateKey + Q_C: EllipticCurvePublicKey | None + Q_S: EllipticCurvePublicKey | None + def __init__(self, transport: Transport) -> None: ... + def start_kex(self) -> None: ... + def parse_next(self, ptype: int, m: Message) -> None: ... + +class KexNistp384(KexNistp256): + name: str + hash_algo: Callable[[ReadableBuffer], _Hash] + curve: EllipticCurve + +class KexNistp521(KexNistp256): + name: str + hash_algo: Callable[[ReadableBuffer], _Hash] + curve: EllipticCurve diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/kex_gex.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/kex_gex.pyi new file mode 100644 index 00000000..d65259a2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/kex_gex.pyi @@ -0,0 +1,34 @@ +from _typeshed import ReadableBuffer +from collections.abc import Callable +from hashlib import _Hash + +from paramiko.message import Message +from paramiko.transport import Transport + +c_MSG_KEXDH_GEX_REQUEST_OLD: bytes +c_MSG_KEXDH_GEX_GROUP: bytes +c_MSG_KEXDH_GEX_INIT: bytes +c_MSG_KEXDH_GEX_REPLY: bytes +c_MSG_KEXDH_GEX_REQUEST: bytes + +class KexGex: + name: str + min_bits: int + max_bits: int + preferred_bits: int + hash_algo: Callable[[ReadableBuffer], _Hash] = ... + transport: Transport + p: int | None + q: int | None + g: int | None + x: int | None + e: int | None + f: int | None + old_style: bool + def __init__(self, transport: Transport) -> None: ... + def start_kex(self, _test_old_style: bool = ...) -> None: ... + def parse_next(self, ptype: int, m: Message) -> None: ... + +class KexGexSHA256(KexGex): + name: str + hash_algo: Callable[[ReadableBuffer], _Hash] = ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/kex_group1.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/kex_group1.pyi new file mode 100644 index 00000000..8ff0292b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/kex_group1.pyi @@ -0,0 +1,24 @@ +from _typeshed import ReadableBuffer +from collections.abc import Callable +from hashlib import _Hash + +from paramiko.message import Message +from paramiko.transport import Transport + +c_MSG_KEXDH_INIT: bytes +c_MSG_KEXDH_REPLY: bytes +b7fffffffffffffff: bytes +b0000000000000000: bytes + +class KexGroup1: + P: int + G: int + name: str + hash_algo: Callable[[ReadableBuffer], _Hash] + transport: Transport + x: int + e: int + f: int + def __init__(self, transport: Transport) -> None: ... + def start_kex(self) -> None: ... + def parse_next(self, ptype: int, m: Message) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/kex_group14.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/kex_group14.pyi new file mode 100644 index 00000000..a4704af8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/kex_group14.pyi @@ -0,0 +1,15 @@ +from _typeshed import ReadableBuffer +from collections.abc import Callable +from hashlib import _Hash + +from paramiko.kex_group1 import KexGroup1 as KexGroup1 + +class KexGroup14(KexGroup1): + P: int + G: int + name: str + hash_algo: Callable[[ReadableBuffer], _Hash] + +class KexGroup14SHA256(KexGroup14): + name: str + hash_algo: Callable[[ReadableBuffer], _Hash] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/kex_group16.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/kex_group16.pyi new file mode 100644 index 00000000..37f29bb6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/kex_group16.pyi @@ -0,0 +1,11 @@ +from _typeshed import ReadableBuffer +from collections.abc import Callable +from hashlib import _Hash + +from paramiko.kex_group1 import KexGroup1 as KexGroup1 + +class KexGroup16SHA512(KexGroup1): + name: str + P: int + G: int + hash_algo: Callable[[ReadableBuffer], _Hash] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/kex_gss.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/kex_gss.pyi new file mode 100644 index 00000000..1fd04c6e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/kex_gss.pyi @@ -0,0 +1,64 @@ +from paramiko.message import Message +from paramiko.ssh_gss import _SSH_GSSAuth +from paramiko.transport import Transport + +MSG_KEXGSS_INIT: int +MSG_KEXGSS_CONTINUE: int +MSG_KEXGSS_COMPLETE: int +MSG_KEXGSS_HOSTKEY: int +MSG_KEXGSS_ERROR: int +MSG_KEXGSS_GROUPREQ: int +MSG_KEXGSS_GROUP: int + +c_MSG_KEXGSS_INIT: bytes +c_MSG_KEXGSS_CONTINUE: bytes +c_MSG_KEXGSS_COMPLETE: bytes +c_MSG_KEXGSS_HOSTKEY: bytes +c_MSG_KEXGSS_ERROR: bytes +c_MSG_KEXGSS_GROUPREQ: bytes +c_MSG_KEXGSS_GROUP: bytes + +class KexGSSGroup1: + P: int + G: int + b7fffffffffffffff: bytes + b0000000000000000: bytes + NAME: str + transport: Transport + kexgss: _SSH_GSSAuth + gss_host: str | None + x: int + e: int + f: int + def __init__(self, transport: Transport) -> None: ... + def start_kex(self) -> None: ... + def parse_next(self, ptype: int, m: Message) -> None: ... + +class KexGSSGroup14(KexGSSGroup1): + P: int + G: int + NAME: str + +class KexGSSGex: + NAME: str + min_bits: int + max_bits: int + preferred_bits: int + transport: Transport + kexgss: _SSH_GSSAuth + gss_host: str | None + p: int | None + q: int | None + g: int | None + x: int | None + e: int | None + f: int | None + old_style: bool + def __init__(self, transport: Transport) -> None: ... + def start_kex(self) -> None: ... + def parse_next(self, ptype: int, m: Message) -> None: ... + +class NullHostKey: + key: str + def __init__(self) -> None: ... + def get_name(self) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/message.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/message.pyi new file mode 100644 index 00000000..7314f1e0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/message.pyi @@ -0,0 +1,41 @@ +from collections.abc import Iterable +from io import BytesIO +from typing import Any, Protocol +from typing_extensions import TypeAlias + +class _SupportsAsBytes(Protocol): + def asbytes(self) -> bytes: ... + +_LikeBytes: TypeAlias = bytes | str | _SupportsAsBytes + +class Message: + big_int: int + packet: BytesIO + seqno: int # only when packet.Packetizer.read_message() is used + def __init__(self, content: bytes | None = ...) -> None: ... + def __bytes__(self) -> bytes: ... + def asbytes(self) -> bytes: ... + def rewind(self) -> None: ... + def get_remainder(self) -> bytes: ... + def get_so_far(self) -> bytes: ... + def get_bytes(self, n: int) -> bytes: ... + def get_byte(self) -> bytes: ... + def get_boolean(self) -> bool: ... + def get_adaptive_int(self) -> int: ... + def get_int(self) -> int: ... + def get_int64(self) -> int: ... + def get_mpint(self) -> int: ... + def get_string(self) -> bytes: ... + def get_text(self) -> str: ... + def get_binary(self) -> bytes: ... + def get_list(self) -> list[str]: ... + def add_bytes(self, b: bytes) -> Message: ... + def add_byte(self, b: bytes) -> Message: ... + def add_boolean(self, b: bool) -> Message: ... + def add_int(self, n: int) -> Message: ... + def add_adaptive_int(self, n: int) -> Message: ... + def add_int64(self, n: int) -> Message: ... + def add_mpint(self, z: int) -> Message: ... + def add_string(self, s: _LikeBytes) -> Message: ... + def add_list(self, l: Iterable[str]) -> Message: ... + def add(self, *seq: Any) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/packet.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/packet.pyi new file mode 100644 index 00000000..815eaed8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/packet.pyi @@ -0,0 +1,57 @@ +from _typeshed import Incomplete +from collections.abc import Callable +from hashlib import _Hash +from logging import Logger +from socket import socket +from typing import Any + +from cryptography.hazmat.primitives.ciphers import Cipher +from paramiko.compress import ZlibCompressor, ZlibDecompressor +from paramiko.message import Message + +def compute_hmac(key: bytes, message: bytes, digest_class: _Hash) -> bytes: ... + +class NeedRekeyException(Exception): ... + +def first_arg(e: Exception) -> Any: ... + +class Packetizer: + REKEY_PACKETS: int + REKEY_BYTES: int + REKEY_PACKETS_OVERFLOW_MAX: int + REKEY_BYTES_OVERFLOW_MAX: int + def __init__(self, socket: socket) -> None: ... + @property + def closed(self) -> bool: ... + def set_log(self, log: Logger) -> None: ... + def set_outbound_cipher( + self, + block_engine: Cipher[Incomplete], + block_size: int, + mac_engine: _Hash, + mac_size: int, + mac_key: bytes, + sdctr: bool = ..., + etm: bool = ..., + ) -> None: ... + def set_inbound_cipher( + self, block_engine: Cipher[Incomplete], block_size: int, mac_engine: _Hash, mac_size: int, mac_key: bytes, etm: bool = ... + ) -> None: ... + def set_outbound_compressor(self, compressor: ZlibCompressor) -> None: ... + def set_inbound_compressor(self, compressor: ZlibDecompressor) -> None: ... + def close(self) -> None: ... + def set_hexdump(self, hexdump: bool) -> None: ... + def get_hexdump(self) -> bool: ... + def get_mac_size_in(self) -> int: ... + def get_mac_size_out(self) -> int: ... + def need_rekey(self) -> bool: ... + def set_keepalive(self, interval: int, callback: Callable[[], object]) -> None: ... + def read_timer(self) -> None: ... + def start_handshake(self, timeout: float) -> None: ... + def handshake_timed_out(self) -> bool: ... + def complete_handshake(self) -> None: ... + def read_all(self, n: int, check_rekey: bool = ...) -> bytes: ... + def write_all(self, out: bytes) -> None: ... + def readline(self, timeout: float) -> str: ... + def send_message(self, data: Message) -> None: ... + def read_message(self) -> tuple[int, Message]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/pipe.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/pipe.pyi new file mode 100644 index 00000000..e9b1635e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/pipe.pyi @@ -0,0 +1,35 @@ +from typing import Protocol + +class _BasePipe(Protocol): + def clear(self) -> None: ... + def set(self) -> None: ... + +class _Pipe(_BasePipe, Protocol): + def close(self) -> None: ... + def fileno(self) -> int: ... + def set_forever(self) -> None: ... + +def make_pipe() -> _Pipe: ... + +class PosixPipe: + def __init__(self) -> None: ... + def close(self) -> None: ... + def fileno(self) -> int: ... + def clear(self) -> None: ... + def set(self) -> None: ... + def set_forever(self) -> None: ... + +class WindowsPipe: + def __init__(self) -> None: ... + def close(self) -> None: ... + def fileno(self) -> int: ... + def clear(self) -> None: ... + def set(self) -> None: ... + def set_forever(self) -> None: ... + +class OrPipe: + def __init__(self, pipe: _Pipe) -> None: ... + def set(self) -> None: ... + def clear(self) -> None: ... + +def make_or_pipe(pipe: _Pipe) -> tuple[OrPipe, OrPipe]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/pkey.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/pkey.pyi new file mode 100644 index 00000000..9daefd87 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/pkey.pyi @@ -0,0 +1,46 @@ +from re import Pattern +from typing import IO +from typing_extensions import Self + +from paramiko.message import Message + +OPENSSH_AUTH_MAGIC: bytes + +def _unpad_openssh(data: bytes) -> bytes: ... + +class PKey: + public_blob: PublicBlob | None + BEGIN_TAG: Pattern[str] + END_TAG: Pattern[str] + def __init__(self, msg: Message | None = ..., data: str | None = ...) -> None: ... + def asbytes(self) -> bytes: ... + def __bytes__(self) -> bytes: ... + def __eq__(self, other: object) -> bool: ... + def get_name(self) -> str: ... + def get_bits(self) -> int: ... + def can_sign(self) -> bool: ... + def get_fingerprint(self) -> bytes: ... + def get_base64(self) -> str: ... + def sign_ssh_data(self, data: bytes, algorithm: str | None = ...) -> Message: ... + def verify_ssh_sig(self, data: bytes, msg: Message) -> bool: ... + @classmethod + def from_private_key_file(cls, filename: str, password: str | None = ...) -> Self: ... + @classmethod + def from_private_key(cls, file_obj: IO[str], password: str | None = ...) -> Self: ... + def write_private_key_file(self, filename: str, password: str | None = ...) -> None: ... + def write_private_key(self, file_obj: IO[str], password: str | None = ...) -> None: ... + def load_certificate(self, value: Message | str) -> None: ... + +class PublicBlob: + key_type: str + key_blob: str + comment: str + def __init__(self, type_: str, blob: bytes, comment: str | None = ...) -> None: ... + @classmethod + def from_file(cls, filename: str) -> Self: ... + @classmethod + def from_string(cls, string: str) -> Self: ... + @classmethod + def from_message(cls, message: Message) -> Self: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/primes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/primes.pyi new file mode 100644 index 00000000..3d7cccd9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/primes.pyi @@ -0,0 +1,6 @@ +class ModulusPack: + pack: dict[int, list[tuple[int, int]]] + discarded: list[tuple[int, str]] + def __init__(self) -> None: ... + def read_file(self, filename: str) -> None: ... + def get_modulus(self, min: int, prefer: int, max: int) -> tuple[int, int]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/proxy.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/proxy.pyi new file mode 100644 index 00000000..111e056b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/proxy.pyi @@ -0,0 +1,16 @@ +from subprocess import Popen +from typing import Any + +from paramiko.util import ClosingContextManager + +class ProxyCommand(ClosingContextManager): + cmd: list[str] + process: Popen[Any] + timeout: float | None + def __init__(self, command_line: str) -> None: ... + def send(self, content: bytes) -> int: ... + def recv(self, size: int) -> bytes: ... + def close(self) -> None: ... + @property + def closed(self) -> bool: ... + def settimeout(self, timeout: float) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/rsakey.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/rsakey.pyi new file mode 100644 index 00000000..477ac328 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/rsakey.pyi @@ -0,0 +1,34 @@ +from collections.abc import Callable +from typing import IO + +from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey, RSAPublicKey, RSAPublicNumbers +from paramiko.message import Message +from paramiko.pkey import PKey + +class RSAKey(PKey): + key: None | RSAPublicKey | RSAPrivateKey + public_blob: None + def __init__( + self, + msg: Message | None = ..., + data: bytes | None = ..., + filename: str | None = ..., + password: str | None = ..., + key: None | RSAPublicKey | RSAPrivateKey = ..., + file_obj: IO[str] | None = ..., + ) -> None: ... + @property + def size(self) -> int: ... + @property + def public_numbers(self) -> RSAPublicNumbers: ... + def asbytes(self) -> bytes: ... + def __hash__(self) -> int: ... + def get_name(self) -> str: ... + def get_bits(self) -> int: ... + def can_sign(self) -> bool: ... + def sign_ssh_data(self, data: bytes, algorithm: str = ...) -> Message: ... # type: ignore[override] + def verify_ssh_sig(self, data: bytes, msg: Message) -> bool: ... + def write_private_key_file(self, filename: str, password: str | None = ...) -> None: ... + def write_private_key(self, file_obj: IO[str], password: str | None = ...) -> None: ... + @staticmethod + def generate(bits: int, progress_func: Callable[..., object] | None = ...) -> RSAKey: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/server.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/server.pyi new file mode 100644 index 00000000..5bc25c3d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/server.pyi @@ -0,0 +1,50 @@ +import threading + +from paramiko.channel import Channel +from paramiko.message import Message +from paramiko.pkey import PKey +from paramiko.transport import Transport + +class ServerInterface: + def check_channel_request(self, kind: str, chanid: int) -> int: ... + def get_allowed_auths(self, username: str) -> str: ... + def check_auth_none(self, username: str) -> int: ... + def check_auth_password(self, username: str, password: str) -> int: ... + def check_auth_publickey(self, username: str, key: PKey) -> int: ... + def check_auth_interactive(self, username: str, submethods: str) -> int | InteractiveQuery: ... + def check_auth_interactive_response(self, responses: list[str]) -> int | InteractiveQuery: ... + def check_auth_gssapi_with_mic(self, username: str, gss_authenticated: int = ..., cc_file: str | None = ...) -> int: ... + def check_auth_gssapi_keyex(self, username: str, gss_authenticated: int = ..., cc_file: str | None = ...) -> int: ... + def enable_auth_gssapi(self) -> bool: ... + def check_port_forward_request(self, address: str, port: int) -> int: ... + def cancel_port_forward_request(self, address: str, port: int) -> None: ... + def check_global_request(self, kind: str, msg: Message) -> bool | tuple[bool | int | str, ...]: ... + def check_channel_pty_request( + self, channel: Channel, term: bytes, width: int, height: int, pixelwidth: int, pixelheight: int, modes: bytes + ) -> bool: ... + def check_channel_shell_request(self, channel: Channel) -> bool: ... + def check_channel_exec_request(self, channel: Channel, command: bytes) -> bool: ... + def check_channel_subsystem_request(self, channel: Channel, name: str) -> bool: ... + def check_channel_window_change_request( + self, channel: Channel, width: int, height: int, pixelwidth: int, pixelheight: int + ) -> bool: ... + def check_channel_x11_request( + self, channel: Channel, single_connection: bool, auth_protocol: str, auth_cookie: bytes, screen_number: int + ) -> bool: ... + def check_channel_forward_agent_request(self, channel: Channel) -> bool: ... + def check_channel_direct_tcpip_request(self, chanid: int, origin: tuple[str, int], destination: tuple[str, int]) -> int: ... + def check_channel_env_request(self, channel: Channel, name: bytes, value: bytes) -> bool: ... + def get_banner(self) -> tuple[str | None, str | None]: ... + +class InteractiveQuery: + name: str + instructions: str + prompts: list[tuple[str, bool]] + def __init__(self, name: str = ..., instructions: str = ..., *prompts: str | tuple[str, bool]) -> None: ... + def add_prompt(self, prompt: str, echo: bool = ...) -> None: ... + +class SubsystemHandler(threading.Thread): + def __init__(self, channel: Channel, name: str, server: ServerInterface) -> None: ... + def get_server(self) -> ServerInterface: ... + def start_subsystem(self, name: str, transport: Transport, channel: Channel) -> None: ... + def finish_subsystem(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/sftp.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/sftp.pyi new file mode 100644 index 00000000..ec0b0506 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/sftp.pyi @@ -0,0 +1,60 @@ +from logging import Logger + +from paramiko.channel import Channel + +CMD_INIT: int +CMD_VERSION: int +CMD_OPEN: int +CMD_CLOSE: int +CMD_READ: int +CMD_WRITE: int +CMD_LSTAT: int +CMD_FSTAT: int +CMD_SETSTAT: int +CMD_FSETSTAT: int +CMD_OPENDIR: int +CMD_READDIR: int +CMD_REMOVE: int +CMD_MKDIR: int +CMD_RMDIR: int +CMD_REALPATH: int +CMD_STAT: int +CMD_RENAME: int +CMD_READLINK: int +CMD_SYMLINK: int +CMD_STATUS: int +CMD_HANDLE: int +CMD_DATA: int +CMD_NAME: int +CMD_ATTRS: int +CMD_EXTENDED: int +CMD_EXTENDED_REPLY: int + +SFTP_OK: int +SFTP_EOF: int +SFTP_NO_SUCH_FILE: int +SFTP_PERMISSION_DENIED: int +SFTP_FAILURE: int +SFTP_BAD_MESSAGE: int +SFTP_NO_CONNECTION: int +SFTP_CONNECTION_LOST: int +SFTP_OP_UNSUPPORTED: int + +SFTP_DESC: list[str] + +SFTP_FLAG_READ: int +SFTP_FLAG_WRITE: int +SFTP_FLAG_APPEND: int +SFTP_FLAG_CREATE: int +SFTP_FLAG_TRUNC: int +SFTP_FLAG_EXCL: int + +CMD_NAMES: dict[int, str] + +class SFTPError(Exception): ... + +class BaseSFTP: + logger: Logger + sock: Channel | None + ultra_debug: bool + def __init__(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/sftp_attr.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/sftp_attr.pyi new file mode 100644 index 00000000..b97f99df --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/sftp_attr.pyi @@ -0,0 +1,22 @@ +from os import stat_result +from typing_extensions import Self + +class SFTPAttributes: + FLAG_SIZE: int + FLAG_UIDGID: int + FLAG_PERMISSIONS: int + FLAG_AMTIME: int + FLAG_EXTENDED: int + st_size: int | None + st_uid: int | None + st_gid: int | None + st_mode: int | None + st_atime: int | None + st_mtime: int | None + filename: str # only when from_stat() is used + longname: str # only when from_stat() is used + attr: dict[str, str] + def __init__(self) -> None: ... + @classmethod + def from_stat(cls, obj: stat_result, filename: str | None = ...) -> Self: ... + def asbytes(self) -> bytes: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/sftp_client.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/sftp_client.pyi new file mode 100644 index 00000000..54d543b3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/sftp_client.pyi @@ -0,0 +1,60 @@ +from collections.abc import Callable, Iterator +from logging import Logger +from typing import IO +from typing_extensions import Self, TypeAlias + +from paramiko.channel import Channel +from paramiko.sftp import BaseSFTP +from paramiko.sftp_attr import SFTPAttributes +from paramiko.sftp_file import SFTPFile +from paramiko.transport import Transport +from paramiko.util import ClosingContextManager + +_Callback: TypeAlias = Callable[[int, int], object] + +b_slash: bytes + +class SFTPClient(BaseSFTP, ClosingContextManager): + sock: Channel + ultra_debug: bool + request_number: int + logger: Logger + def __init__(self, sock: Channel) -> None: ... + @classmethod + def from_transport(cls, t: Transport, window_size: int | None = ..., max_packet_size: int | None = ...) -> Self | None: ... + def close(self) -> None: ... + def get_channel(self) -> Channel | None: ... + def listdir(self, path: str = ...) -> list[str]: ... + def listdir_attr(self, path: str = ...) -> list[SFTPAttributes]: ... + def listdir_iter(self, path: bytes | str = ..., read_aheads: int = ...) -> Iterator[SFTPAttributes]: ... + def open(self, filename: bytes | str, mode: str = ..., bufsize: int = ...) -> SFTPFile: ... + file = open + def remove(self, path: bytes | str) -> None: ... + unlink = remove + def rename(self, oldpath: bytes | str, newpath: bytes | str) -> None: ... + def posix_rename(self, oldpath: bytes | str, newpath: bytes | str) -> None: ... + def mkdir(self, path: bytes | str, mode: int = ...) -> None: ... + def rmdir(self, path: bytes | str) -> None: ... + def stat(self, path: bytes | str) -> SFTPAttributes: ... + def lstat(self, path: bytes | str) -> SFTPAttributes: ... + def symlink(self, source: bytes | str, dest: bytes | str) -> None: ... + def chmod(self, path: bytes | str, mode: int) -> None: ... + def chown(self, path: bytes | str, uid: int, gid: int) -> None: ... + def utime(self, path: bytes | str, times: tuple[float, float] | None) -> None: ... + def truncate(self, path: bytes | str, size: int) -> None: ... + def readlink(self, path: bytes | str) -> str | None: ... + def normalize(self, path: bytes | str) -> str: ... + def chdir(self, path: None | bytes | str = ...) -> None: ... + def getcwd(self) -> str | None: ... + def putfo( + self, fl: IO[bytes], remotepath: bytes | str, file_size: int = ..., callback: _Callback | None = ..., confirm: bool = ... + ) -> SFTPAttributes: ... + def put( + self, localpath: bytes | str, remotepath: bytes | str, callback: _Callback | None = ..., confirm: bool = ... + ) -> SFTPAttributes: ... + def getfo(self, remotepath: bytes | str, fl: IO[bytes], callback: _Callback | None = ..., prefetch: bool = ...) -> int: ... + def get( + self, remotepath: bytes | str, localpath: bytes | str, callback: _Callback | None = ..., prefetch: bool = ... + ) -> None: ... + +class SFTP(SFTPClient): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/sftp_file.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/sftp_file.pyi new file mode 100644 index 00000000..50415d12 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/sftp_file.pyi @@ -0,0 +1,30 @@ +from collections.abc import Iterator, Sequence +from typing import Any + +from paramiko.file import BufferedFile +from paramiko.sftp_attr import SFTPAttributes +from paramiko.sftp_client import SFTPClient +from paramiko.sftp_handle import SFTPHandle + +class SFTPFile(BufferedFile[Any]): + MAX_REQUEST_SIZE: int + sftp: SFTPClient + handle: SFTPHandle + pipelined: bool + def __init__(self, sftp: SFTPClient, handle: bytes, mode: str = ..., bufsize: int = ...) -> None: ... + def __del__(self) -> None: ... + def close(self) -> None: ... + def settimeout(self, timeout: float) -> None: ... + def gettimeout(self) -> float: ... + def setblocking(self, blocking: bool) -> None: ... + def seekable(self) -> bool: ... + def seek(self, offset: int, whence: int = ...) -> None: ... + def stat(self) -> SFTPAttributes: ... + def chmod(self, mode: int) -> None: ... + def chown(self, uid: int, gid: int) -> None: ... + def utime(self, times: tuple[float, float] | None) -> None: ... + def truncate(self, size: int) -> None: ... + def check(self, hash_algorithm: str, offset: int = ..., length: int = ..., block_size: int = ...) -> bytes: ... + def set_pipelined(self, pipelined: bool = ...) -> None: ... + def prefetch(self, file_size: int | None = ...) -> None: ... + def readv(self, chunks: Sequence[tuple[int, int]]) -> Iterator[bytes]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/sftp_handle.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/sftp_handle.pyi new file mode 100644 index 00000000..888e3c44 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/sftp_handle.pyi @@ -0,0 +1,10 @@ +from paramiko.sftp_attr import SFTPAttributes +from paramiko.util import ClosingContextManager + +class SFTPHandle(ClosingContextManager): + def __init__(self, flags: int = ...) -> None: ... + def close(self) -> None: ... + def read(self, offset: int, length: int) -> bytes | int: ... + def write(self, offset: int, data: bytes) -> int: ... + def stat(self) -> int | SFTPAttributes: ... + def chattr(self, attr: SFTPAttributes) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/sftp_server.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/sftp_server.pyi new file mode 100644 index 00000000..8e8dddef --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/sftp_server.pyi @@ -0,0 +1,28 @@ +from logging import Logger +from typing import Any + +from paramiko.channel import Channel +from paramiko.server import ServerInterface, SubsystemHandler +from paramiko.sftp import BaseSFTP +from paramiko.sftp_attr import SFTPAttributes +from paramiko.sftp_handle import SFTPHandle +from paramiko.sftp_si import SFTPServerInterface +from paramiko.transport import Transport + +class SFTPServer(BaseSFTP, SubsystemHandler): + logger: Logger + ultra_debug: bool + next_handle: int + file_table: dict[bytes, SFTPHandle] + folder_table: dict[bytes, SFTPHandle] + server: SFTPServerInterface + sock: Channel | None + def __init__( + self, channel: Channel, name: str, server: ServerInterface, sftp_si: type[SFTPServerInterface], *largs: Any, **kwargs: Any + ) -> None: ... + def start_subsystem(self, name: str, transport: Transport, channel: Channel) -> None: ... + def finish_subsystem(self) -> None: ... + @staticmethod + def convert_errno(e: int) -> int: ... + @staticmethod + def set_file_attr(filename: str, attr: SFTPAttributes) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/sftp_si.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/sftp_si.pyi new file mode 100644 index 00000000..efca37e8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/sftp_si.pyi @@ -0,0 +1,23 @@ +from typing import Any + +from paramiko.server import ServerInterface +from paramiko.sftp_attr import SFTPAttributes +from paramiko.sftp_handle import SFTPHandle + +class SFTPServerInterface: + def __init__(self, server: ServerInterface, *largs: Any, **kwargs: Any) -> None: ... + def session_started(self) -> None: ... + def session_ended(self) -> None: ... + def open(self, path: str, flags: int, attr: SFTPAttributes) -> SFTPHandle | int: ... + def list_folder(self, path: str) -> list[SFTPAttributes] | int: ... + def stat(self, path: str) -> SFTPAttributes | int: ... + def lstat(self, path: str) -> SFTPAttributes | int: ... + def remove(self, path: str) -> int: ... + def rename(self, oldpath: str, newpath: str) -> int: ... + def posix_rename(self, oldpath: str, newpath: str) -> int: ... + def mkdir(self, path: str, attr: SFTPAttributes) -> int: ... + def rmdir(self, path: str) -> int: ... + def chattr(self, path: str, attr: SFTPAttributes) -> int: ... + def canonicalize(self, path: str) -> str: ... + def readlink(self, path: str) -> str | int: ... + def symlink(self, target_path: str, path: str) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/ssh_exception.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/ssh_exception.pyi new file mode 100644 index 00000000..9f617e0e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/ssh_exception.pyi @@ -0,0 +1,41 @@ +import socket +from collections.abc import Mapping + +from paramiko.pkey import PKey + +class SSHException(Exception): ... +class AuthenticationException(SSHException): ... +class PasswordRequiredException(AuthenticationException): ... + +class BadAuthenticationType(AuthenticationException): + allowed_types: list[str] + explanation: str + def __init__(self, explanation: str, types: list[str]) -> None: ... + +class PartialAuthentication(AuthenticationException): + allowed_types: list[str] + def __init__(self, types: list[str]) -> None: ... + +class ChannelException(SSHException): + code: int + text: str + def __init__(self, code: int, text: str) -> None: ... + +class BadHostKeyException(SSHException): + hostname: str + key: PKey + expected_key: PKey + def __init__(self, hostname: str, got_key: PKey, expected_key: PKey) -> None: ... + +class ProxyCommandFailure(SSHException): + command: str + error: str + def __init__(self, command: str, error: str) -> None: ... + +class NoValidConnectionsError(socket.error): + errors: Mapping[tuple[str, int] | tuple[str, int, int, int], Exception] + def __init__(self, errors: Mapping[tuple[str, int] | tuple[str, int, int, int], Exception]) -> None: ... + def __reduce__(self) -> tuple[type, tuple[Mapping[tuple[str, int] | tuple[str, int, int, int], Exception]]]: ... + +class CouldNotCanonicalize(SSHException): ... +class ConfigParseError(SSHException): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/ssh_gss.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/ssh_gss.pyi new file mode 100644 index 00000000..9c0d8bab --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/ssh_gss.pyi @@ -0,0 +1,52 @@ +from typing import Any + +GSS_AUTH_AVAILABLE: bool +GSS_EXCEPTIONS: tuple[type[Exception], ...] + +def GSSAuth(auth_method: str, gss_deleg_creds: bool = ...) -> _SSH_GSSAuth: ... + +class _SSH_GSSAuth: + cc_file: None + def __init__(self, auth_method: str, gss_deleg_creds: bool) -> None: ... + def set_service(self, service: str) -> None: ... + def set_username(self, username: str) -> None: ... + def ssh_gss_oids(self, mode: str = ...) -> bytes: ... + def ssh_check_mech(self, desired_mech: str) -> bool: ... + +class _SSH_GSSAPI_OLD(_SSH_GSSAuth): + def __init__(self, auth_method: str, gss_deleg_creds: bool) -> None: ... + def ssh_init_sec_context( + self, target: str, desired_mech: str | None = ..., username: str | None = ..., recv_token: str | None = ... + ) -> str | None: ... + def ssh_get_mic(self, session_id: bytes, gss_kex: bool = ...) -> Any: ... + def ssh_accept_sec_context(self, hostname: str, recv_token: str, username: str | None = ...) -> str | None: ... + def ssh_check_mic(self, mic_token: str, session_id: bytes, username: str | None = ...) -> None: ... + @property + def credentials_delegated(self) -> bool: ... + def save_client_creds(self, client_token: str) -> None: ... + +_SSH_GSSAPI = _SSH_GSSAPI_OLD + +class _SSH_GSSAPI_NEW(_SSH_GSSAuth): + def __init__(self, auth_method: str, gss_deleg_creds: bool) -> None: ... + def ssh_init_sec_context( + self, target: str, desired_mech: str | None = ..., username: str | None = ..., recv_token: str | None = ... + ) -> str: ... + def ssh_get_mic(self, session_id: bytes, gss_kex: bool = ...) -> Any: ... + def ssh_accept_sec_context(self, hostname: str, recv_token: str, username: str | None = ...) -> str | None: ... + def ssh_check_mic(self, mic_token: str, session_id: bytes, username: str | None = ...) -> None: ... + @property + def credentials_delegated(self) -> bool: ... + def save_client_creds(self, client_token: str) -> None: ... + +class _SSH_SSPI(_SSH_GSSAuth): + def __init__(self, auth_method: str, gss_deleg_creds: bool) -> None: ... + def ssh_init_sec_context( + self, target: str, desired_mech: str | None = ..., username: str | None = ..., recv_token: str | None = ... + ) -> str: ... + def ssh_get_mic(self, session_id: bytes, gss_kex: bool = ...) -> Any: ... + def ssh_accept_sec_context(self, hostname: str, username: str, recv_token: str) -> str | None: ... + def ssh_check_mic(self, mic_token: str, session_id: bytes, username: str | None = ...) -> None: ... + @property + def credentials_delegated(self) -> bool: ... + def save_client_creds(self, client_token: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/transport.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/transport.pyi new file mode 100644 index 00000000..efaacd9e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/transport.pyi @@ -0,0 +1,196 @@ +from collections.abc import Callable, Iterable, Sequence +from logging import Logger +from socket import socket +from threading import Condition, Event, Lock, Thread +from types import ModuleType +from typing import Any, Protocol +from typing_extensions import TypeAlias + +from paramiko.auth_handler import AuthHandler, _InteractiveCallback +from paramiko.channel import Channel +from paramiko.message import Message +from paramiko.packet import Packetizer +from paramiko.pkey import PKey +from paramiko.server import ServerInterface, SubsystemHandler +from paramiko.sftp_client import SFTPClient +from paramiko.ssh_gss import _SSH_GSSAuth +from paramiko.util import ClosingContextManager + +_Addr: TypeAlias = tuple[str, int] +_SocketLike: TypeAlias = str | _Addr | socket | Channel + +class _KexEngine(Protocol): + def start_kex(self) -> None: ... + def parse_next(self, ptype: int, m: Message) -> None: ... + +class Transport(Thread, ClosingContextManager): + active: bool + hostname: str | None + sock: socket | Channel + packetizer: Packetizer + local_version: str + remote_version: str + local_cipher: str + local_kex_init: bytes | None + local_mac: str | None + local_compression: str | None + session_id: bytes | None + host_key_type: str | None + host_key: PKey | None + use_gss_kex: bool + gss_kex_used: bool + kexgss_ctxt: _SSH_GSSAuth | None + gss_host: str + kex_engine: _KexEngine | None + H: bytes | None + K: int | None + initial_kex_done: bool + in_kex: bool + authenticated: bool + lock: Lock + channel_events: dict[int, Event] + channels_seen: dict[int, bool] + default_max_packet_size: int + default_window_size: int + saved_exception: Exception | None + clear_to_send: Event + clear_to_send_lock: Lock + clear_to_send_timeout: float + log_name: str + logger: Logger + auth_handler: AuthHandler | None + global_response: Message | None + completion_event: Event | None + banner_timeout: float + handshake_timeout: float + auth_timeout: float + disabled_algorithms: dict[str, Iterable[str]] | None + server_mode: bool + server_object: ServerInterface | None + server_key_dict: dict[str, PKey] + server_accepts: list[Channel] + server_accept_cv: Condition + subsystem_table: dict[str, tuple[type[SubsystemHandler], tuple[Any, ...], dict[str, Any]]] + sys: ModuleType + def __init__( + self, + sock: _SocketLike, + default_window_size: int = ..., + default_max_packet_size: int = ..., + gss_kex: bool = ..., + gss_deleg_creds: bool = ..., + disabled_algorithms: dict[str, Iterable[str]] | None = ..., + server_sig_algs: bool = ..., + ) -> None: ... + @property + def preferred_ciphers(self) -> Sequence[str]: ... + @property + def preferred_macs(self) -> Sequence[str]: ... + @property + def preferred_keys(self) -> Sequence[str]: ... + @property + def preferred_kex(self) -> Sequence[str]: ... + @property + def preferred_compression(self) -> Sequence[str]: ... + def atfork(self) -> None: ... + def get_security_options(self) -> SecurityOptions: ... + def set_gss_host(self, gss_host: str | None, trust_dns: bool = ..., gssapi_requested: bool = ...) -> None: ... + def start_client(self, event: Event | None = ..., timeout: float | None = ...) -> None: ... + def start_server(self, event: Event = ..., server: ServerInterface | None = ...) -> None: ... + def add_server_key(self, key: PKey) -> None: ... + def get_server_key(self) -> PKey | None: ... + @staticmethod + def load_server_moduli(filename: str | None = ...) -> bool: ... + def close(self) -> None: ... + def get_remote_server_key(self) -> PKey: ... + def is_active(self) -> bool: ... + def open_session( + self, window_size: int | None = ..., max_packet_size: int | None = ..., timeout: float | None = ... + ) -> Channel: ... + def open_x11_channel(self, src_addr: _Addr = ...) -> Channel: ... + def open_forward_agent_channel(self) -> Channel: ... + def open_forwarded_tcpip_channel(self, src_addr: _Addr, dest_addr: _Addr) -> Channel: ... + def open_channel( + self, + kind: str, + dest_addr: _Addr | None = ..., + src_addr: _Addr | None = ..., + window_size: int | None = ..., + max_packet_size: int | None = ..., + timeout: float | None = ..., + ) -> Channel: ... + def request_port_forward( + self, address: str, port: int, handler: Callable[[Channel, _Addr, _Addr], object] | None = ... + ) -> int: ... + def cancel_port_forward(self, address: str, port: int) -> None: ... + def open_sftp_client(self) -> SFTPClient | None: ... + def send_ignore(self, byte_count: int = ...) -> None: ... + def renegotiate_keys(self) -> None: ... + def set_keepalive(self, interval: int) -> None: ... + def global_request(self, kind: str, data: Iterable[Any] | None = ..., wait: bool = ...) -> Message | None: ... + def accept(self, timeout: float | None = ...) -> Channel | None: ... + def connect( + self, + hostkey: PKey | None = ..., + username: str = ..., + password: str | None = ..., + pkey: PKey | None = ..., + gss_host: str | None = ..., + gss_auth: bool = ..., + gss_kex: bool = ..., + gss_deleg_creds: bool = ..., + gss_trust_dns: bool = ..., + ) -> None: ... + def get_exception(self) -> Exception | None: ... + def set_subsystem_handler(self, name: str, handler: type[SubsystemHandler], *larg: Any, **kwarg: Any) -> None: ... + def is_authenticated(self) -> bool: ... + def get_username(self) -> str | None: ... + def get_banner(self) -> bytes | None: ... + def auth_none(self, username: str) -> list[str]: ... + def auth_password(self, username: str, password: str, event: Event | None = ..., fallback: bool = ...) -> list[str]: ... + def auth_publickey(self, username: str, key: PKey, event: Event | None = ...) -> list[str]: ... + def auth_interactive(self, username: str, handler: _InteractiveCallback, submethods: str = ...) -> list[str]: ... + def auth_interactive_dumb( + self, username: str, handler: _InteractiveCallback | None = ..., submethods: str = ... + ) -> list[str]: ... + def auth_gssapi_with_mic(self, username: str, gss_host: str, gss_deleg_creds: bool) -> list[str]: ... + def auth_gssapi_keyex(self, username: str) -> list[str]: ... + def set_log_channel(self, name: str) -> None: ... + def get_log_channel(self) -> str: ... + def set_hexdump(self, hexdump: bool) -> None: ... + def get_hexdump(self) -> bool: ... + def use_compression(self, compress: bool = ...) -> None: ... + def getpeername(self) -> tuple[str, int]: ... + def stop_thread(self) -> None: ... + def run(self) -> None: ... + +class SecurityOptions: + def __init__(self, transport: Transport) -> None: ... + @property + def ciphers(self) -> Sequence[str]: ... + @ciphers.setter + def ciphers(self, x: Sequence[str]) -> None: ... + @property + def digests(self) -> Sequence[str]: ... + @digests.setter + def digests(self, x: Sequence[str]) -> None: ... + @property + def key_types(self) -> Sequence[str]: ... + @key_types.setter + def key_types(self, x: Sequence[str]) -> None: ... + @property + def kex(self) -> Sequence[str]: ... + @kex.setter + def kex(self, x: Sequence[str]) -> None: ... + @property + def compression(self) -> Sequence[str]: ... + @compression.setter + def compression(self, x: Sequence[str]) -> None: ... + +class ChannelMap: + def __init__(self) -> None: ... + def put(self, chanid: int, chan: Channel) -> None: ... + def get(self, chanid: int) -> Channel: ... + def delete(self, chanid: int) -> None: ... + def values(self) -> list[Channel]: ... + def __len__(self) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/util.pyi new file mode 100644 index 00000000..7f94a074 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/util.pyi @@ -0,0 +1,46 @@ +from hashlib import _Hash +from logging import Logger, LogRecord +from types import TracebackType +from typing import IO, AnyStr, Protocol +from typing_extensions import Self + +from paramiko.config import SSHConfig, SSHConfigDict +from paramiko.hostkeys import HostKeys + +class SupportsClose(Protocol): + def close(self) -> None: ... + +def inflate_long(s: bytes, always_positive: bool = ...) -> int: ... +def deflate_long(n: int, add_sign_padding: bool = ...) -> bytes: ... +def format_binary(data: bytes, prefix: str = ...) -> list[str]: ... +def format_binary_line(data: bytes) -> str: ... +def safe_string(s: bytes) -> bytes: ... +def bit_length(n: int) -> int: ... +def tb_strings() -> list[str]: ... +def generate_key_bytes(hash_alg: type[_Hash], salt: bytes, key: bytes | str, nbytes: int) -> bytes: ... +def load_host_keys(filename: str) -> HostKeys: ... +def parse_ssh_config(file_obj: IO[str]) -> SSHConfig: ... +def lookup_ssh_host_config(hostname: str, config: SSHConfig) -> SSHConfigDict: ... +def mod_inverse(x: int, m: int) -> int: ... +def get_thread_id() -> int: ... +def log_to_file(filename: str, level: int = ...) -> None: ... + +class PFilter: + def filter(self, record: LogRecord) -> bool: ... + +def get_logger(name: str) -> Logger: ... +def constant_time_bytes_eq(a: AnyStr, b: AnyStr) -> bool: ... + +class ClosingContextManager: + def __enter__(self) -> Self: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + +def clamp_value(minimum: int, val: int, maximum: int) -> int: ... + +# This function attempts to convert objects to bytes, +# *but* just returns the object unchanged if that was unsuccessful! +def asbytes(s: object) -> object: ... +def b(s: str | bytes, encoding: str = "utf8") -> bytes: ... +def u(s: str | bytes, encoding: str = "utf8") -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/win_openssh.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/win_openssh.pyi new file mode 100644 index 00000000..bc74d4c2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/win_openssh.pyi @@ -0,0 +1,12 @@ +import sys + +if sys.platform == "win32": + PIPE_NAME: str + + def can_talk_to_agent() -> bool: ... + + class OpenSSHAgentConnection: + def __init__(self) -> None: ... + def send(self, data: bytes) -> int: ... + def recv(self, n: int) -> bytes: ... + def close(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/win_pageant.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/win_pageant.pyi new file mode 100644 index 00000000..9c8dba68 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/paramiko/paramiko/win_pageant.pyi @@ -0,0 +1,17 @@ +import ctypes +import sys +from typing_extensions import Literal, TypeAlias + +if sys.platform == "win32": + win32con_WM_COPYDATA: int + def can_talk_to_agent() -> bool: ... + + ULONG_PTR: TypeAlias = ctypes.c_uint64 | ctypes.c_uint32 + + class COPYDATASTRUCT(ctypes.Structure): ... + + class PageantConnection: + def __init__(self) -> None: ... + def send(self, data: bytes) -> None: ... + def recv(self, n: int) -> Literal[""] | bytes: ... + def close(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..f870d696 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/@tests/stubtest_allowlist.txt @@ -0,0 +1,8 @@ +parsimonious.nodes.RuleDecoratorMeta.__new__ + +# Magic: +parsimonious.adhoc_expression + +# Tests are shipped with the source, we ignore it: +parsimonious.tests +parsimonious\.tests\..* diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/METADATA.toml new file mode 100644 index 00000000..5c7ed21e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/METADATA.toml @@ -0,0 +1 @@ +version = "0.10.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/parsimonious/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/parsimonious/__init__.pyi new file mode 100644 index 00000000..31f99b47 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/parsimonious/__init__.pyi @@ -0,0 +1,8 @@ +from parsimonious.exceptions import ( + BadGrammar as BadGrammar, + IncompleteParseError as IncompleteParseError, + ParseError as ParseError, + UndefinedLabel as UndefinedLabel, +) +from parsimonious.grammar import Grammar as Grammar, TokenGrammar as TokenGrammar +from parsimonious.nodes import NodeVisitor as NodeVisitor, VisitationError as VisitationError, rule as rule diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/parsimonious/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/parsimonious/exceptions.pyi new file mode 100644 index 00000000..9401ce5f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/parsimonious/exceptions.pyi @@ -0,0 +1,25 @@ +from parsimonious.expressions import Expression +from parsimonious.grammar import LazyReference +from parsimonious.nodes import Node +from parsimonious.utils import StrAndRepr + +class ParseError(StrAndRepr, Exception): + text: str + pos: int + expr: Expression | None + def __init__(self, text: str, pos: int = ..., expr: Expression | None = ...) -> None: ... + def line(self) -> int: ... + def column(self) -> int: ... + +class LeftRecursionError(ParseError): ... +class IncompleteParseError(ParseError): ... + +class VisitationError(Exception): + original_class: type[BaseException] + def __init__(self, exc: BaseException, exc_class: type[BaseException], node: Node) -> None: ... + +class BadGrammar(StrAndRepr, Exception): ... + +class UndefinedLabel(BadGrammar): + label: LazyReference + def __init__(self, label: LazyReference) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/parsimonious/expressions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/parsimonious/expressions.pyi new file mode 100644 index 00000000..eef53891 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/parsimonious/expressions.pyi @@ -0,0 +1,76 @@ +import collections.abc +from collections.abc import Callable, Mapping +from re import Pattern +from typing import Any +from typing_extensions import Self, TypeAlias + +from parsimonious.exceptions import ParseError +from parsimonious.grammar import Grammar +from parsimonious.nodes import Node +from parsimonious.utils import StrAndRepr + +_CALLABLE_RETURN_TYPE: TypeAlias = int | tuple[int, list[Node]] | Node | None +_CALLABLE_TYPE: TypeAlias = ( + Callable[[str, int], _CALLABLE_RETURN_TYPE] + | Callable[[str, int, Mapping[tuple[int, int], Node], ParseError, Grammar], _CALLABLE_RETURN_TYPE] +) + +def is_callable(value: object) -> bool: ... +def expression(callable: _CALLABLE_TYPE, rule_name: str, grammar: Grammar) -> Expression: ... + +IN_PROGRESS: object + +class Expression(StrAndRepr): + name: str + identity_tuple: tuple[str] + def __init__(self, name: str = ...) -> None: ... + def resolve_refs(self, rule_map: Mapping[str, Expression]) -> Self: ... + def parse(self, text: str, pos: int = ...) -> Node: ... + def match(self, text: str, pos: int = ...) -> Node: ... + def match_core(self, text: str, pos: int, cache: Mapping[tuple[int, int], Node], error: ParseError) -> Node: ... + def as_rule(self) -> str: ... + +class Literal(Expression): + literal: str + identity_tuple: tuple[str, str] # type: ignore[assignment] + def __init__(self, literal: str, name: str = ...) -> None: ... + +class TokenMatcher(Literal): ... + +class Regex(Expression): + re: Pattern[str] + identity_tuple: tuple[str, Pattern[str]] # type: ignore[assignment] + def __init__( + self, + pattern: str, + name: str = ..., + ignore_case: bool = ..., + locale: bool = ..., + multiline: bool = ..., + dot_all: bool = ..., + unicode: bool = ..., + verbose: bool = ..., + ascii: bool = ..., + ) -> None: ... + +class Compound(Expression): + members: collections.abc.Sequence[Expression] + def __init__(self, *members: Expression, **kwargs: Any) -> None: ... + +class Sequence(Compound): ... +class OneOf(Compound): ... + +class Lookahead(Compound): + negativity: bool + def __init__(self, member: Expression, *, negative: bool = ..., **kwargs: Any) -> None: ... + +def Not(term: Expression) -> Lookahead: ... + +class Quantifier(Compound): + min: int + max: float + def __init__(self, member: Expression, *, min: int = ..., max: float = ..., name: str = ..., **kwargs: Any) -> None: ... + +def ZeroOrMore(member: Expression, name: str = ...) -> Quantifier: ... +def OneOrMore(member: Expression, name: str = ..., min: int = ...) -> Quantifier: ... +def Optional(member: Expression, name: str = ...) -> Quantifier: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/parsimonious/grammar.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/parsimonious/grammar.pyi new file mode 100644 index 00000000..ee6dde71 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/parsimonious/grammar.pyi @@ -0,0 +1,58 @@ +import collections.abc +from _typeshed import Incomplete +from collections import OrderedDict +from collections.abc import Callable, Mapping +from typing import Any, NoReturn + +from parsimonious.expressions import _CALLABLE_TYPE, Expression, Literal, Lookahead, OneOf, Regex, Sequence, TokenMatcher +from parsimonious.nodes import Node, NodeVisitor + +class Grammar(OrderedDict[str, Expression]): + default_rule: Expression | Incomplete + def __init__(self, rules: str = ..., **more_rules: Expression | _CALLABLE_TYPE) -> None: ... + def default(self, rule_name: str) -> Grammar: ... + def parse(self, text: str, pos: int = ...) -> Node: ... + def match(self, text: str, pos: int = ...) -> Node: ... + +class TokenGrammar(Grammar): ... +class BootstrappingGrammar(Grammar): ... + +rule_syntax: str + +class LazyReference(str): + name: str + def resolve_refs(self, rule_map: Mapping[str, Expression | LazyReference]) -> Expression: ... + +class RuleVisitor(NodeVisitor): + quantifier_classes: dict[str, type[Expression]] + visit_expression: Callable[[RuleVisitor, Node, collections.abc.Sequence[Any]], Any] + visit_term: Callable[[RuleVisitor, Node, collections.abc.Sequence[Any]], Any] + visit_atom: Callable[[RuleVisitor, Node, collections.abc.Sequence[Any]], Any] + custom_rules: dict[str, Expression] + def __init__(self, custom_rules: Mapping[str, Expression] | None = ...) -> None: ... + def visit_parenthesized(self, node: Node, parenthesized: collections.abc.Sequence[Any]) -> Expression: ... + def visit_quantifier(self, node: Node, quantifier: collections.abc.Sequence[Any]) -> Node: ... + def visit_quantified(self, node: Node, quantified: collections.abc.Sequence[Any]) -> Expression: ... + def visit_lookahead_term(self, node: Node, lookahead_term: collections.abc.Sequence[Any]) -> Lookahead: ... + def visit_not_term(self, node: Node, not_term: collections.abc.Sequence[Any]) -> Lookahead: ... + def visit_rule(self, node: Node, rule: collections.abc.Sequence[Any]) -> Expression: ... + def visit_sequence(self, node: Node, sequence: collections.abc.Sequence[Any]) -> Sequence: ... + def visit_ored(self, node: Node, ored: collections.abc.Sequence[Any]) -> OneOf: ... + def visit_or_term(self, node: Node, or_term: collections.abc.Sequence[Any]) -> Expression: ... + def visit_label(self, node: Node, label: collections.abc.Sequence[Any]) -> str: ... + def visit_reference(self, node: Node, reference: collections.abc.Sequence[Any]) -> LazyReference: ... + def visit_regex(self, node: Node, regex: collections.abc.Sequence[Any]) -> Regex: ... + def visit_spaceless_literal(self, spaceless_literal: Node, visited_children: collections.abc.Sequence[Any]) -> Literal: ... + def visit_literal(self, node: Node, literal: collections.abc.Sequence[Any]) -> Literal: ... + def generic_visit(self, node: Node, visited_children: collections.abc.Sequence[Any]) -> collections.abc.Sequence[Any] | Node: ... # type: ignore[override] + def visit_rules( + self, node: Node, rules_list: collections.abc.Sequence[Any] + ) -> tuple[OrderedDict[str, Expression], Expression | None]: ... + +class TokenRuleVisitor(RuleVisitor): + def visit_spaceless_literal( + self, spaceless_literal: Node, visited_children: collections.abc.Sequence[Any] + ) -> TokenMatcher: ... + def visit_regex(self, node: Node, regex: collections.abc.Sequence[Any]) -> NoReturn: ... + +rule_grammar: Grammar diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/parsimonious/nodes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/parsimonious/nodes.pyi new file mode 100644 index 00000000..03aff4a5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/parsimonious/nodes.pyi @@ -0,0 +1,41 @@ +from _typeshed import Incomplete +from collections.abc import Callable, Iterator, Sequence +from re import Match +from typing import Any, NoReturn, TypeVar + +from parsimonious.exceptions import VisitationError as VisitationError +from parsimonious.expressions import Expression +from parsimonious.grammar import Grammar + +class Node: + expr: Expression + full_text: str + start: int + end: int + children: Sequence[Node] + def __init__(self, expr: Expression, full_text: str, start: int, end: int, children: Sequence[Node] | None = ...) -> None: ... + @property + def expr_name(self) -> str: ... + def __iter__(self) -> Iterator[Node]: ... + @property + def text(self) -> str: ... + def prettily(self, error: Node | None = ...) -> str: ... + def __repr__(self, top_level: bool = ...) -> str: ... + +class RegexNode(Node): + match: Match[str] + +class RuleDecoratorMeta(type): ... + +class NodeVisitor(metaclass=RuleDecoratorMeta): + grammar: Grammar | Incomplete + unwrapped_exceptions: tuple[type[BaseException], ...] + def visit(self, node: Node) -> Any: ... + def generic_visit(self, node: Node, visited_children: Sequence[Any]) -> NoReturn: ... + def parse(self, text: str, pos: int = ...) -> Node: ... + def match(self, text: str, pos: int = ...) -> Node: ... + def lift_child(self, node: Node, children: Sequence[Any]) -> Any: ... + +_CallableT = TypeVar("_CallableT", bound=Callable[..., Any]) + +def rule(rule_string: str) -> Callable[[_CallableT], _CallableT]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/parsimonious/utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/parsimonious/utils.pyi new file mode 100644 index 00000000..eea5d118 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/parsimonious/parsimonious/utils.pyi @@ -0,0 +1,10 @@ +import ast +from typing import Any + +class StrAndRepr: ... + +def evaluate_string(string: str | ast.AST) -> Any: ... + +class Token(StrAndRepr): + type: str + def __init__(self, type: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..d2087ce3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/@tests/stubtest_allowlist.txt @@ -0,0 +1,50 @@ +# proxy module that uses some import magic incompatible with stubtest +passlib.hash + +# django unsupported in stubs +passlib.ext.django.models + +# uses @memoized_property at runtime, but @property in the stubs +passlib.crypto.digest.HashInfo.supported +passlib.crypto.digest.HashInfo.supported_by_fastpbkdf2 +passlib.crypto.digest.HashInfo.supported_by_hashlib_pbkdf2 +passlib.pwd.PhraseGenerator.symbol_count +passlib.pwd.SequenceGenerator.entropy +passlib.pwd.SequenceGenerator.entropy_per_symbol +passlib.pwd.SequenceGenerator.symbol_count +passlib.pwd.WordGenerator.symbol_count +passlib.totp.TotpMatch.cache_seconds +passlib.totp.TotpMatch.cache_time +passlib.totp.TotpMatch.expected_counter +passlib.totp.TotpMatch.expire_time +passlib.totp.TotpMatch.skipped +passlib.totp.TotpToken.expire_time +passlib.totp.TotpToken.start_time + +# "hybrid" method that can be called on an instance or class +passlib.totp.TOTP.normalize_token + +# import problem +passlib.utils.compat._ordered_dict + +# initialized to None, but set by concrete sub-classes +passlib.handlers.pbkdf2.Pbkdf2DigestHandler.default_rounds +passlib.utils.handlers.GenericHandler.setting_kwds + +# set to None on class level, but initialized in __init__ +passlib.utils.handlers.HasManyIdents.ident +passlib.utils.handlers.HasRounds.rounds + +# Errors in `__all__` at runtime: +# TODO: change after https://github.com/python/mypy/pull/14217 is released +passlib.handlers.oracle.__all__ +passlib.handlers.oracle.oracle11g +passlib.handlers.oracle.oracle10g +passlib.handlers.mysql.__all__ +passlib.handlers.mysql.mysq41 + +# Compat tools are ignored: +passlib.utils.compat.* + +# Tests are not included: +passlib.tests.* diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/@tests/stubtest_allowlist_win32.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/@tests/stubtest_allowlist_win32.txt new file mode 100644 index 00000000..217d59c7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/@tests/stubtest_allowlist_win32.txt @@ -0,0 +1,2 @@ +# Only exists if crypt is present +passlib.hosts.host_context diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/METADATA.toml new file mode 100644 index 00000000..ef796600 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/METADATA.toml @@ -0,0 +1 @@ +version = "1.7.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/apache.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/apache.pyi new file mode 100644 index 00000000..fcb0a458 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/apache.pyi @@ -0,0 +1,59 @@ +from _typeshed import Incomplete +from typing import Any + +class _CommonFile: + encoding: Any + return_unicode: Any + autosave: bool + @classmethod + def from_string(cls, data, **kwds): ... + @classmethod + def from_path(cls, path, **kwds): ... + def __init__( + self, + path: Incomplete | None = ..., + new: bool = ..., + autoload: bool = ..., + autosave: bool = ..., + encoding: str = ..., + return_unicode=..., + ) -> None: ... + @property + def path(self): ... + @path.setter + def path(self, value) -> None: ... + @property + def mtime(self): ... + def load_if_changed(self): ... + def load(self, path: Incomplete | None = ..., force: bool = ...): ... + def load_string(self, data) -> None: ... + def save(self, path: Incomplete | None = ...) -> None: ... + def to_string(self): ... + +class HtpasswdFile(_CommonFile): + context: Any + def __init__(self, path: Incomplete | None = ..., default_scheme: Incomplete | None = ..., context=..., **kwds) -> None: ... + def users(self): ... + def set_password(self, user, password): ... + def update(self, user, password): ... + def get_hash(self, user): ... + def set_hash(self, user, hash): ... + def find(self, user): ... + def delete(self, user): ... + def check_password(self, user, password): ... + def verify(self, user, password): ... + +class HtdigestFile(_CommonFile): + default_realm: Any + def __init__(self, path: Incomplete | None = ..., default_realm: Incomplete | None = ..., **kwds) -> None: ... + def realms(self): ... + def users(self, realm: Incomplete | None = ...): ... + def set_password(self, user, realm: Incomplete | None = ..., password=...): ... + def update(self, user, realm, password): ... + def get_hash(self, user, realm: Incomplete | None = ...): ... + def set_hash(self, user, realm: Incomplete | None = ..., hash=...): ... + def find(self, user, realm): ... + def delete(self, user, realm: Incomplete | None = ...): ... + def delete_realm(self, realm): ... + def check_password(self, user, realm: Incomplete | None = ..., password=...): ... + def verify(self, user, realm, password): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/apps.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/apps.pyi new file mode 100644 index 00000000..2b87ba44 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/apps.pyi @@ -0,0 +1,35 @@ +from .context import CryptContext + +__all__ = [ + "custom_app_context", + "django_context", + "ldap_context", + "ldap_nocrypt_context", + "mysql_context", + "mysql4_context", + "mysql3_context", + "phpass_context", + "phpbb3_context", + "postgres_context", +] + +master_context: CryptContext +custom_app_context: CryptContext +django10_context: CryptContext +django14_context: CryptContext +django16_context: CryptContext +django110_context: CryptContext +django21_context: CryptContext +django_context = django21_context # noqa: F821 +std_ldap_schemes: list[str] +ldap_nocrypt_context: CryptContext +ldap_context: CryptContext +mysql3_context: CryptContext +mysql4_context: CryptContext +mysql_context = mysql4_context # noqa: F821 +postgres_context: CryptContext +phpass_context: CryptContext +phpbb3_context: CryptContext +roundup10_context: CryptContext +roundup15_context: CryptContext +roundup_context = roundup15_context # noqa: F821 diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/context.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/context.pyi new file mode 100644 index 00000000..f2c511a3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/context.pyi @@ -0,0 +1,85 @@ +from _typeshed import Incomplete, StrOrBytesPath, SupportsItems +from typing import Any +from typing_extensions import Self + +class CryptPolicy: + @classmethod + def from_path(cls, path, section: str = ..., encoding: str = ...): ... + @classmethod + def from_string(cls, source, section: str = ..., encoding: str = ...): ... + @classmethod + def from_source(cls, source, _warn: bool = ...): ... + @classmethod + def from_sources(cls, sources, _warn: bool = ...): ... + def replace(self, *args, **kwds): ... + def __init__(self, *args, **kwds) -> None: ... + def has_schemes(self): ... + def iter_handlers(self): ... + def schemes(self, resolve: bool = ...): ... + def get_handler(self, name: Incomplete | None = ..., category: Incomplete | None = ..., required: bool = ...): ... + def get_min_verify_time(self, category: Incomplete | None = ...): ... + def get_options(self, name, category: Incomplete | None = ...): ... + def handler_is_deprecated(self, name, category: Incomplete | None = ...): ... + def iter_config(self, ini: bool = ..., resolve: bool = ...): ... + def to_dict(self, resolve: bool = ...): ... + def to_file(self, stream, section: str = ...) -> None: ... + def to_string(self, section: str = ..., encoding: Incomplete | None = ...): ... + +class CryptContext: + @classmethod + def from_string(cls, source: str | bytes, section: str = ..., encoding: str = ...) -> Self: ... + @classmethod + def from_path(cls, path: StrOrBytesPath, section: str = ..., encoding: str = ...) -> Self: ... + def copy(self, **kwds: Any) -> CryptContext: ... + def using(self, **kwds: Any) -> CryptContext: ... + def replace(self, **kwds): ... + def __init__(self, schemes: Incomplete | None = ..., policy=..., _autoload: bool = ..., **kwds) -> None: ... + policy: CryptPolicy + def load_path(self, path: StrOrBytesPath, update: bool = ..., section: str = ..., encoding: str = ...) -> None: ... + def load( + self, + source: str | bytes | SupportsItems[str, Any] | CryptContext, + update: bool = ..., + section: str = ..., + encoding: str = ..., + ) -> None: ... + def update(self, *args: Any, **kwds: Any) -> None: ... + def schemes(self, resolve: bool = ..., category: Incomplete | None = ..., unconfigured: bool = ...): ... + def default_scheme(self, category: Incomplete | None = ..., resolve: bool = ..., unconfigured: bool = ...): ... + def handler(self, scheme: Incomplete | None = ..., category: Incomplete | None = ..., unconfigured: bool = ...): ... + @property + def context_kwds(self): ... + def to_dict(self, resolve: bool = ...) -> dict[str, Any]: ... + def to_string(self, section: str = ...) -> str: ... + mvt_estimate_max_samples: int + mvt_estimate_min_samples: int + mvt_estimate_max_time: int + mvt_estimate_resolution: float + harden_verify: Any + min_verify_time: int + def reset_min_verify_time(self) -> None: ... + def needs_update( + self, hash: str | bytes, scheme: str | None = ..., category: str | None = ..., secret: str | bytes | None = ... + ) -> bool: ... + def hash_needs_update(self, hash, scheme: Incomplete | None = ..., category: Incomplete | None = ...): ... + def genconfig(self, scheme: Incomplete | None = ..., category: Incomplete | None = ..., **settings): ... + def genhash(self, secret, config, scheme: Incomplete | None = ..., category: Incomplete | None = ..., **kwds): ... + def identify( + self, hash, category: Incomplete | None = ..., resolve: bool = ..., required: bool = ..., unconfigured: bool = ... + ): ... + def hash(self, secret: str | bytes, scheme: str | None = ..., category: str | None = ..., **kwds: Any) -> str: ... + def encrypt(self, *args, **kwds): ... + def verify( + self, secret: str | bytes, hash: str | bytes | None, scheme: str | None = ..., category: str | None = ..., **kwds: Any + ) -> bool: ... + def verify_and_update( + self, secret: str | bytes, hash: str | bytes | None, scheme: str | None = ..., category: str | None = ..., **kwds: Any + ) -> tuple[bool, str | None]: ... + def dummy_verify(self, elapsed: int = ...): ... + def is_enabled(self, hash: str | bytes) -> bool: ... + def disable(self, hash: str | bytes | None = ...) -> str: ... + def enable(self, hash: str | bytes) -> str: ... + +class LazyCryptContext(CryptContext): + def __init__(self, schemes: Incomplete | None = ..., **kwds) -> None: ... + def __getattribute__(self, attr: str) -> Any: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/_blowfish/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/_blowfish/__init__.pyi new file mode 100644 index 00000000..093b06fe --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/_blowfish/__init__.pyi @@ -0,0 +1,3 @@ +from passlib.crypto._blowfish.unrolled import BlowfishEngine as BlowfishEngine + +def raw_bcrypt(password, ident, salt, log_rounds): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/_blowfish/_gen_files.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/_blowfish/_gen_files.pyi new file mode 100644 index 00000000..fe0d7886 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/_blowfish/_gen_files.pyi @@ -0,0 +1,11 @@ +from typing import Any + +def varlist(name, count): ... +def indent_block(block, padding): ... + +BFSTR: Any + +def render_encipher(write, indent: int = ...) -> None: ... +def write_encipher_function(write, indent: int = ...) -> None: ... +def write_expand_function(write, indent: int = ...) -> None: ... +def main() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/_blowfish/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/_blowfish/base.pyi new file mode 100644 index 00000000..8ad01d75 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/_blowfish/base.pyi @@ -0,0 +1,13 @@ +from typing import Any + +class BlowfishEngine: + P: Any + S: Any + def __init__(self) -> None: ... + @staticmethod + def key_to_words(data, size: int = ...): ... + def encipher(self, l, r): ... + def expand(self, key_words) -> None: ... + def eks_salted_expand(self, key_words, salt_words) -> None: ... + def eks_repeated_expand(self, key_words, salt_words, rounds) -> None: ... + def repeat_encipher(self, l, r, count): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/_blowfish/unrolled.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/_blowfish/unrolled.pyi new file mode 100644 index 00000000..f0ad5475 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/_blowfish/unrolled.pyi @@ -0,0 +1,5 @@ +from passlib.crypto._blowfish.base import BlowfishEngine as _BlowfishEngine + +class BlowfishEngine(_BlowfishEngine): + def encipher(self, l, r): ... + def expand(self, key_words) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/_md4.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/_md4.pyi new file mode 100644 index 00000000..0a21a688 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/_md4.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete + +class md4: + name: str + digest_size: int + digestsize: int + block_size: int + def __init__(self, content: Incomplete | None = ...) -> None: ... + def update(self, content) -> None: ... + def copy(self): ... + def digest(self): ... + def hexdigest(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/des.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/des.pyi new file mode 100644 index 00000000..4d342d8e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/des.pyi @@ -0,0 +1,5 @@ +__all__ = ["expand_des_key", "des_encrypt_block"] + +def expand_des_key(key): ... +def des_encrypt_block(key, input, salt: int = ..., rounds: int = ...): ... +def des_encrypt_int_block(key, input, salt: int = ..., rounds: int = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/digest.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/digest.pyi new file mode 100644 index 00000000..fcc8070d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/digest.pyi @@ -0,0 +1,28 @@ +from _typeshed import Incomplete +from typing import Any + +from passlib.utils import SequenceMixin + +def lookup_hash(digest, return_unknown: bool = ..., required: bool = ...): ... +def norm_hash_name(name, format: str = ...): ... + +class HashInfo(SequenceMixin): + name: Any + iana_name: Any + aliases: Any + const: Any + digest_size: Any + block_size: Any + error_text: Any + unknown: bool + def __init__(self, const, names, required: bool = ...) -> None: ... + @property + def supported(self): ... + @property + def supported_by_fastpbkdf2(self): ... + @property + def supported_by_hashlib_pbkdf2(self): ... + +def compile_hmac(digest, key, multipart: bool = ...): ... +def pbkdf1(digest, secret, salt, rounds, keylen: Incomplete | None = ...): ... +def pbkdf2_hmac(digest, secret, salt, rounds, keylen: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/scrypt/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/scrypt/__init__.pyi new file mode 100644 index 00000000..88c74fe4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/scrypt/__init__.pyi @@ -0,0 +1,2 @@ +def validate(n, r, p): ... +def scrypt(secret, salt, n, r, p: int = ..., keylen: int = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/scrypt/_builtin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/scrypt/_builtin.pyi new file mode 100644 index 00000000..d7fa6867 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/scrypt/_builtin.pyi @@ -0,0 +1,19 @@ +from collections.abc import Generator +from typing import Any + +class ScryptEngine: + n: int + r: int + p: int + smix_bytes: int + iv_bytes: int + bmix_len: int + bmix_half_len: int + bmix_struct: Any + integerify: Any + @classmethod + def execute(cls, secret, salt, n, r, p, keylen): ... + def __init__(self, n, r, p): ... + def run(self, secret, salt, keylen): ... + def smix(self, input) -> Generator[None, None, Any]: ... + def bmix(self, source, target) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/scrypt/_gen_files.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/scrypt/_gen_files.pyi new file mode 100644 index 00000000..7e7363e7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/scrypt/_gen_files.pyi @@ -0,0 +1 @@ +def main() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/scrypt/_salsa.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/scrypt/_salsa.pyi new file mode 100644 index 00000000..d5ead632 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/crypto/scrypt/_salsa.pyi @@ -0,0 +1 @@ +def salsa20(input): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/exc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/exc.pyi new file mode 100644 index 00000000..d7664ec1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/exc.pyi @@ -0,0 +1,56 @@ +from _typeshed import Incomplete +from typing import Any + +class UnknownBackendError(ValueError): + hasher: Any + backend: Any + def __init__(self, hasher, backend) -> None: ... + +class MissingBackendError(RuntimeError): ... +class InternalBackendError(RuntimeError): ... +class PasswordValueError(ValueError): ... + +class PasswordSizeError(PasswordValueError): + max_size: Any + def __init__(self, max_size, msg: Incomplete | None = ...) -> None: ... + +class PasswordTruncateError(PasswordSizeError): + def __init__(self, cls, msg: Incomplete | None = ...) -> None: ... + +class PasslibSecurityError(RuntimeError): ... + +class TokenError(ValueError): + def __init__(self, msg: Incomplete | None = ..., *args, **kwds) -> None: ... + +class MalformedTokenError(TokenError): ... +class InvalidTokenError(TokenError): ... + +class UsedTokenError(TokenError): + expire_time: Any + def __init__(self, *args, **kwds) -> None: ... + +class UnknownHashError(ValueError): + value: Any + message: Any + def __init__(self, message: Incomplete | None = ..., value: Incomplete | None = ...) -> None: ... + +class PasslibWarning(UserWarning): ... +class PasslibConfigWarning(PasslibWarning): ... +class PasslibHashWarning(PasslibWarning): ... +class PasslibRuntimeWarning(PasslibWarning): ... +class PasslibSecurityWarning(PasslibWarning): ... + +def type_name(value): ... +def ExpectedTypeError(value, expected, param): ... +def ExpectedStringError(value, param): ... +def MissingDigestError(handler: Incomplete | None = ...): ... +def NullPasswordError(handler: Incomplete | None = ...): ... +def InvalidHashError(handler: Incomplete | None = ...): ... +def MalformedHashError(handler: Incomplete | None = ..., reason: Incomplete | None = ...): ... +def ZeroPaddedRoundsError(handler: Incomplete | None = ...): ... +def ChecksumSizeError(handler, raw: bool = ...): ... + +ENABLE_DEBUG_ONLY_REPR: bool + +def debug_only_repr(value, param: str = ...): ... +def CryptBackendError(handler, config, hash, source: str = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/ext/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/ext/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/ext/django/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/ext/django/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/ext/django/models.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/ext/django/models.pyi new file mode 100644 index 00000000..2fd1d214 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/ext/django/models.pyi @@ -0,0 +1,3 @@ +from typing import Any + +password_context: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/ext/django/utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/ext/django/utils.pyi new file mode 100644 index 00000000..e032611a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/ext/django/utils.pyi @@ -0,0 +1,56 @@ +from _typeshed import Incomplete +from typing import Any + +__all__ = ["DJANGO_VERSION", "MIN_DJANGO_VERSION", "get_preset_config", "quirks"] + +DJANGO_VERSION: tuple[Any, ...] +MIN_DJANGO_VERSION: tuple[int, int] + +class quirks: + none_causes_check_password_error: Any + empty_is_usable_password: Any + invalid_is_usable_password: Any + +def get_preset_config(name): ... + +class DjangoTranslator: + context: Any + def __init__(self, context: Incomplete | None = ..., **kwds) -> None: ... + def reset_hashers(self) -> None: ... + def passlib_to_django_name(self, passlib_name): ... + def passlib_to_django(self, passlib_hasher, cached: bool = ...): ... + def django_to_passlib_name(self, django_name): ... + def django_to_passlib(self, django_name, cached: bool = ...): ... + def resolve_django_hasher(self, django_name, cached: bool = ...): ... + +class DjangoContextAdapter(DjangoTranslator): + context: Any + is_password_usable: Any + enabled: bool + patched: bool + log: Any + def __init__(self, context: Incomplete | None = ..., get_user_category: Incomplete | None = ..., **kwds) -> None: ... + def reset_hashers(self) -> None: ... + def get_hashers(self): ... + def get_hasher(self, algorithm: str = ...): ... + def identify_hasher(self, encoded): ... + def make_password(self, password, salt: Incomplete | None = ..., hasher: str = ...): ... + def check_password(self, password, encoded, setter: Incomplete | None = ..., preferred: str = ...): ... + def user_check_password(self, user, password): ... + def user_set_password(self, user, password) -> None: ... + def get_user_category(self, user): ... + HASHERS_PATH: str + MODELS_PATH: str + USER_CLASS_PATH: Any + FORMS_PATH: str + patch_locations: Any + def install_patch(self): ... + def remove_patch(self): ... + def load_model(self) -> None: ... + +class ProxyProperty: + attr: Any + def __init__(self, attr) -> None: ... + def __get__(self, obj, cls): ... + def __set__(self, obj, value) -> None: ... + def __delete__(self, obj) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/argon2.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/argon2.pyi new file mode 100644 index 00000000..5624b27d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/argon2.pyi @@ -0,0 +1,85 @@ +from _typeshed import Incomplete +from typing import Any, ClassVar + +import passlib.utils.handlers as uh + +class _DummyCffiHasher: + time_cost: int + memory_cost: int + parallelism: int + salt_len: int + hash_len: int + +class _Argon2Common( # type: ignore[misc] + uh.SubclassBackendMixin, uh.ParallelismMixin, uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler +): + name: ClassVar[str] + checksum_size: ClassVar[int] + default_salt_size: ClassVar[int] + min_salt_size: ClassVar[int] + max_salt_size: ClassVar[int] + default_rounds: ClassVar[int] + min_rounds: ClassVar[int] + max_rounds: ClassVar[int] + rounds_cost: ClassVar[str] + max_parallelism: ClassVar[int] + max_version: ClassVar[int] + min_desired_version: ClassVar[int | None] + min_memory_cost: ClassVar[int] + max_threads: ClassVar[int] + pure_use_threads: ClassVar[bool] + def type_values(cls): ... + type: str + parallelism: int + version: int + memory_cost: int + @property + def type_d(self): ... + data: Any + @classmethod + def using( # type: ignore[override] + cls, + type: Incomplete | None = ..., + memory_cost: Incomplete | None = ..., + salt_len: Incomplete | None = ..., + time_cost: Incomplete | None = ..., + digest_size: Incomplete | None = ..., + checksum_size: Incomplete | None = ..., + hash_len: Incomplete | None = ..., + max_threads: Incomplete | None = ..., + **kwds, + ): ... + @classmethod + def identify(cls, hash): ... + @classmethod + def from_string(cls, hash): ... + def __init__( + self, + type: Incomplete | None = ..., + type_d: bool = ..., + version: Incomplete | None = ..., + memory_cost: Incomplete | None = ..., + data: Incomplete | None = ..., + **kwds, + ) -> None: ... + +class _NoBackend(_Argon2Common): + @classmethod + def hash(cls, secret): ... + @classmethod + def verify(cls, secret, hash): ... + @classmethod + def genhash(cls, secret, config): ... + +class _CffiBackend(_Argon2Common): + @classmethod + def hash(cls, secret): ... + @classmethod + def verify(cls, secret, hash): ... + @classmethod + def genhash(cls, secret, config): ... + +class _PureBackend(_Argon2Common): ... + +class argon2(_NoBackend, _Argon2Common): # type: ignore[misc] + backends: ClassVar[tuple[str, ...]] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/bcrypt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/bcrypt.pyi new file mode 100644 index 00000000..426f1472 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/bcrypt.pyi @@ -0,0 +1,55 @@ +from _typeshed import Incomplete +from typing import Any, ClassVar + +import passlib.utils.handlers as uh + +class _BcryptCommon(uh.SubclassBackendMixin, uh.TruncateMixin, uh.HasManyIdents, uh.HasRounds, uh.HasSalt, uh.GenericHandler): # type: ignore[misc] + name: ClassVar[str] + checksum_size: ClassVar[int] + checksum_chars: ClassVar[str] + default_ident: ClassVar[str] + ident_values: ClassVar[tuple[str, ...]] + ident_aliases: ClassVar[dict[str, str]] + min_salt_size: ClassVar[int] + max_salt_size: ClassVar[int] + salt_chars: ClassVar[str] + final_salt_chars: ClassVar[str] + default_rounds: ClassVar[int] + min_rounds: ClassVar[int] + max_rounds: ClassVar[int] + rounds_cost: ClassVar[str] + truncate_size: ClassVar[int | None] + @classmethod + def from_string(cls, hash): ... + @classmethod + def needs_update(cls, hash, **kwds): ... + @classmethod + def normhash(cls, hash): ... + +class _NoBackend(_BcryptCommon): ... +class _BcryptBackend(_BcryptCommon): ... +class _BcryptorBackend(_BcryptCommon): ... +class _PyBcryptBackend(_BcryptCommon): ... +class _OsCryptBackend(_BcryptCommon): ... +class _BuiltinBackend(_BcryptCommon): ... + +class bcrypt(_NoBackend, _BcryptCommon): # type: ignore[misc] + backends: ClassVar[tuple[str, ...]] + +class _wrapped_bcrypt(bcrypt): + truncate_size: ClassVar[None] + +class bcrypt_sha256(_wrapped_bcrypt): + name: ClassVar[str] + ident_values: ClassVar[tuple[str, ...]] + ident_aliases: ClassVar[dict[str, str]] + default_ident: ClassVar[str] + version: ClassVar[int] + @classmethod + def using(cls, version: Incomplete | None = ..., **kwds): ... # type: ignore[override] + prefix: Any + @classmethod + def identify(cls, hash): ... + @classmethod + def from_string(cls, hash): ... + def __init__(self, version: Incomplete | None = ..., **kwds) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/cisco.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/cisco.pyi new file mode 100644 index 00000000..2820a67f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/cisco.pyi @@ -0,0 +1,27 @@ +from typing import ClassVar + +import passlib.utils.handlers as uh + +class cisco_pix(uh.HasUserContext, uh.StaticHandler): + name: ClassVar[str] + truncate_size: ClassVar[int] + truncate_error: ClassVar[bool] + truncate_verify_reject: ClassVar[bool] + checksum_size: ClassVar[int] + checksum_chars: ClassVar[str] + +class cisco_asa(cisco_pix): ... + +class cisco_type7(uh.GenericHandler): + name: ClassVar[str] + checksum_chars: ClassVar[str] + min_salt_value: ClassVar[int] + max_salt_value: ClassVar[int] + @classmethod + def using(cls, salt: int | None = ..., **kwds): ... # type: ignore[override] + @classmethod + def from_string(cls, hash): ... + salt: int + def __init__(self, salt: int | None = ..., **kwds) -> None: ... + @classmethod + def decode(cls, hash, encoding: str = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/des_crypt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/des_crypt.pyi new file mode 100644 index 00000000..a8a10abb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/des_crypt.pyi @@ -0,0 +1,52 @@ +from typing import ClassVar + +import passlib.utils.handlers as uh + +class des_crypt(uh.TruncateMixin, uh.HasManyBackends, uh.HasSalt, uh.GenericHandler): # type: ignore[misc] + name: ClassVar[str] + checksum_chars: ClassVar[str] + checksum_size: ClassVar[int] + min_salt_size: ClassVar[int] + max_salt_size: ClassVar[int] + salt_chars: ClassVar[str] + truncate_size: ClassVar[int] + @classmethod + def from_string(cls, hash): ... + backends: ClassVar[tuple[str, ...]] + +class bsdi_crypt(uh.HasManyBackends, uh.HasRounds, uh.HasSalt, uh.GenericHandler): # type: ignore[misc] + name: ClassVar[str] + checksum_size: ClassVar[int] + checksum_chars: ClassVar[str] + min_salt_size: ClassVar[int] + max_salt_size: ClassVar[int] + salt_chars: ClassVar[str] + default_rounds: ClassVar[int] + min_rounds: ClassVar[int] + max_rounds: ClassVar[int] + rounds_cost: ClassVar[str] + @classmethod + def from_string(cls, hash): ... + @classmethod + def using(cls, **kwds): ... + backends: ClassVar[tuple[str, ...]] + +class bigcrypt(uh.HasSalt, uh.GenericHandler): + name: ClassVar[str] + checksum_chars: ClassVar[str] + min_salt_size: ClassVar[int] + max_salt_size: ClassVar[int] + salt_chars: ClassVar[str] + @classmethod + def from_string(cls, hash): ... + +class crypt16(uh.TruncateMixin, uh.HasSalt, uh.GenericHandler): # type: ignore[misc] + name: ClassVar[str] + checksum_size: ClassVar[int] + checksum_chars: ClassVar[str] + min_salt_size: ClassVar[int] + max_salt_size: ClassVar[int] + salt_chars: ClassVar[str] + truncate_size: ClassVar[int] + @classmethod + def from_string(cls, hash): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/digests.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/digests.pyi new file mode 100644 index 00000000..c97bdb47 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/digests.pyi @@ -0,0 +1,32 @@ +from _typeshed import Incomplete +from typing import Any, ClassVar + +import passlib.utils.handlers as uh + +class HexDigestHash(uh.StaticHandler): + checksum_chars: ClassVar[str] + supported: ClassVar[bool] + +def create_hex_hash(digest, module=..., django_name: Incomplete | None = ..., required: bool = ...): ... + +hex_md4: Any +hex_md5: Any +hex_sha1: Any +hex_sha256: Any +hex_sha512: Any + +class htdigest(uh.MinimalHandler): + name: ClassVar[str] + default_encoding: ClassVar[str] + setting_kwds: ClassVar[tuple[str, ...]] + context_kwds: ClassVar[tuple[str, ...]] + @classmethod + def hash(cls, secret, user, realm, encoding: Incomplete | None = ...): ... # type: ignore[override] + @classmethod + def verify(cls, secret, hash, user, realm, encoding: str = ...): ... # type: ignore[override] + @classmethod + def identify(cls, hash): ... + @classmethod + def genconfig(cls): ... + @classmethod + def genhash(cls, secret, config, user, realm, encoding: Incomplete | None = ...): ... # type: ignore[override] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/django.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/django.pyi new file mode 100644 index 00000000..2996700e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/django.pyi @@ -0,0 +1,81 @@ +from typing import Any, ClassVar + +import passlib.utils.handlers as uh +from passlib.handlers.bcrypt import _wrapped_bcrypt +from passlib.ifc import DisabledHash + +class DjangoSaltedHash(uh.HasSalt, uh.GenericHandler): + default_salt_size: ClassVar[int] + salt_chars: ClassVar[str] + checksum_chars: ClassVar[str] + @classmethod + def from_string(cls, hash): ... + +class DjangoVariableHash(uh.HasRounds, DjangoSaltedHash): # type: ignore[misc] + min_rounds: ClassVar[int] + @classmethod + def from_string(cls, hash): ... + +class django_salted_sha1(DjangoSaltedHash): + name: ClassVar[str] + django_name: ClassVar[str] + ident: ClassVar[str] + checksum_size: ClassVar[int] + +class django_salted_md5(DjangoSaltedHash): + name: ClassVar[str] + django_name: ClassVar[str] + ident: ClassVar[str] + checksum_size: ClassVar[int] + +django_bcrypt: Any + +class django_bcrypt_sha256(_wrapped_bcrypt): + name: ClassVar[str] + django_name: ClassVar[str] + django_prefix: ClassVar[str] + @classmethod + def identify(cls, hash): ... + @classmethod + def from_string(cls, hash): ... + +class django_pbkdf2_sha256(DjangoVariableHash): + name: ClassVar[str] + django_name: ClassVar[str] + ident: ClassVar[str] + min_salt_size: ClassVar[int] + max_rounds: ClassVar[int] + checksum_chars: ClassVar[str] + checksum_size: ClassVar[int] + default_rounds: ClassVar[int] + +class django_pbkdf2_sha1(django_pbkdf2_sha256): + name: ClassVar[str] + django_name: ClassVar[str] + ident: ClassVar[str] + checksum_size: ClassVar[int] + default_rounds: ClassVar[int] + +django_argon2: Any + +class django_des_crypt(uh.TruncateMixin, uh.HasSalt, uh.GenericHandler): # type: ignore[misc] + name: ClassVar[str] + django_name: ClassVar[str] + ident: ClassVar[str] + checksum_chars: ClassVar[str] + salt_chars: ClassVar[str] + checksum_size: ClassVar[int] + min_salt_size: ClassVar[int] + default_salt_size: ClassVar[int] + truncate_size: ClassVar[int] + use_duplicate_salt: bool + @classmethod + def from_string(cls, hash): ... + +class django_disabled(DisabledHash, uh.StaticHandler): + name: ClassVar[str] + suffix_length: ClassVar[int] + @classmethod + def identify(cls, hash: str | bytes) -> bool: ... + @classmethod + def verify(cls, secret: str | bytes, hash: str | bytes) -> bool: ... # type: ignore[override] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/fshp.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/fshp.pyi new file mode 100644 index 00000000..373a44a7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/fshp.pyi @@ -0,0 +1,26 @@ +from typing import Any, ClassVar + +import passlib.utils.handlers as uh + +class fshp(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): # type: ignore[misc] + name: ClassVar[str] + checksum_chars: ClassVar[str] + ident: ClassVar[str] + default_salt_size: ClassVar[int] + max_salt_size: ClassVar[None] + default_rounds: ClassVar[int] + min_rounds: ClassVar[int] + max_rounds: ClassVar[int] + rounds_cost: ClassVar[str] + default_variant: ClassVar[int] + @classmethod + def using(cls, variant: int | str | bytes | None = ..., **kwds): ... # type: ignore[override] + variant: int | None + use_defaults: Any + def __init__(self, variant: int | str | bytes | None = ..., **kwds) -> None: ... + @property + def checksum_alg(self): ... + @property + def checksum_size(self): ... + @classmethod + def from_string(cls, hash): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/ldap_digests.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/ldap_digests.pyi new file mode 100644 index 00000000..a37b5a83 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/ldap_digests.pyi @@ -0,0 +1,81 @@ +from typing import ClassVar + +import passlib.utils.handlers as uh +from passlib.handlers.misc import plaintext +from passlib.utils.handlers import PrefixWrapper + +__all__ = [ + "ldap_plaintext", + "ldap_md5", + "ldap_sha1", + "ldap_salted_md5", + "ldap_salted_sha1", + "ldap_salted_sha256", + "ldap_salted_sha512", + "ldap_des_crypt", + "ldap_bsdi_crypt", + "ldap_md5_crypt", + "ldap_sha1_crypt", + "ldap_bcrypt", + "ldap_sha256_crypt", + "ldap_sha512_crypt", +] + +class _Base64DigestHelper(uh.StaticHandler): + ident: ClassVar[str | None] + checksum_chars: ClassVar[str] + +class _SaltedBase64DigestHelper(uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + checksum_chars: ClassVar[str] + ident: ClassVar[str | None] + min_salt_size: ClassVar[int] + max_salt_size: ClassVar[int] + default_salt_size: ClassVar[int] + @classmethod + def from_string(cls, hash): ... + +class ldap_md5(_Base64DigestHelper): + name: ClassVar[str] + ident: ClassVar[str] + +class ldap_sha1(_Base64DigestHelper): + name: ClassVar[str] + ident: ClassVar[str] + +class ldap_salted_md5(_SaltedBase64DigestHelper): + name: ClassVar[str] + ident: ClassVar[str] + checksum_size: ClassVar[int] + +class ldap_salted_sha1(_SaltedBase64DigestHelper): + name: ClassVar[str] + ident: ClassVar[str] + checksum_size: ClassVar[int] + +class ldap_salted_sha256(_SaltedBase64DigestHelper): + name: ClassVar[str] + ident: ClassVar[str] + checksum_size: ClassVar[int] + default_salt_size: ClassVar[int] + +class ldap_salted_sha512(_SaltedBase64DigestHelper): + name: ClassVar[str] + ident: ClassVar[str] + checksum_size: ClassVar[int] + default_salt_size: ClassVar[int] + +class ldap_plaintext(plaintext): + name: ClassVar[str] + @classmethod + def genconfig(cls): ... + @classmethod + def identify(cls, hash): ... + +# Dynamically created +ldap_sha512_crypt: PrefixWrapper +ldap_sha256_crypt: PrefixWrapper +ldap_sha1_crypt: PrefixWrapper +ldap_bcrypt: PrefixWrapper +ldap_md5_crypt: PrefixWrapper +ldap_bsdi_crypt: PrefixWrapper +ldap_des_crypt: PrefixWrapper diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/md5_crypt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/md5_crypt.pyi new file mode 100644 index 00000000..53bf4afa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/md5_crypt.pyi @@ -0,0 +1,20 @@ +from typing import ClassVar + +import passlib.utils.handlers as uh + +class _MD5_Common(uh.HasSalt, uh.GenericHandler): + checksum_size: ClassVar[int] + checksum_chars: ClassVar[str] + max_salt_size: ClassVar[int] + salt_chars: ClassVar[str] + @classmethod + def from_string(cls, hash): ... + +class md5_crypt(uh.HasManyBackends, _MD5_Common): + name: ClassVar[str] + ident: ClassVar[str] + backends: ClassVar[tuple[str, ...]] + +class apr_md5_crypt(_MD5_Common): + name: ClassVar[str] + ident: ClassVar[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/misc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/misc.pyi new file mode 100644 index 00000000..3c68095b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/misc.pyi @@ -0,0 +1,50 @@ +from _typeshed import Incomplete +from typing import Any, ClassVar + +import passlib.utils.handlers as uh +from passlib.ifc import DisabledHash + +class unix_fallback(DisabledHash, uh.StaticHandler): + name: ClassVar[str] + @classmethod + def identify(cls, hash: str | bytes) -> bool: ... + enable_wildcard: Any + def __init__(self, enable_wildcard: bool = ..., **kwds) -> None: ... + @classmethod + def verify(cls, secret: str | bytes, hash: str | bytes, enable_wildcard: bool = ...): ... # type: ignore[override] + +class unix_disabled(DisabledHash, uh.MinimalHandler): + name: ClassVar[str] + default_marker: ClassVar[str] + setting_kwds: ClassVar[tuple[str, ...]] + context_kwds: ClassVar[tuple[str, ...]] + @classmethod + def using(cls, marker: Incomplete | None = ..., **kwds): ... # type: ignore[override] + @classmethod + def identify(cls, hash: str | bytes) -> bool: ... + @classmethod + def verify(cls, secret: str | bytes, hash: str | bytes) -> bool: ... # type: ignore[override] + @classmethod + def hash(cls, secret: str | bytes, **kwds) -> str: ... + @classmethod + def genhash(cls, secret: str | bytes, config, marker: Incomplete | None = ...): ... # type: ignore[override] + @classmethod + def disable(cls, hash: str | bytes | None = ...) -> str: ... + @classmethod + def enable(cls, hash: str | bytes) -> str: ... + +class plaintext(uh.MinimalHandler): + name: ClassVar[str] + default_encoding: ClassVar[str] + setting_kwds: ClassVar[tuple[str, ...]] + context_kwds: ClassVar[tuple[str, ...]] + @classmethod + def identify(cls, hash: str | bytes): ... + @classmethod + def hash(cls, secret: str | bytes, encoding: Incomplete | None = ...): ... # type: ignore[override] + @classmethod + def verify(cls, secret: str | bytes, hash: str | bytes, encoding: str | None = ...): ... # type: ignore[override] + @classmethod + def genconfig(cls): ... + @classmethod + def genhash(cls, secret, config, encoding: str | None = ...): ... # type: ignore[override] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/mssql.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/mssql.pyi new file mode 100644 index 00000000..3bf6fc43 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/mssql.pyi @@ -0,0 +1,21 @@ +from typing import ClassVar + +import passlib.utils.handlers as uh + +class mssql2000(uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + name: ClassVar[str] + checksum_size: ClassVar[int] + min_salt_size: ClassVar[int] + max_salt_size: ClassVar[int] + @classmethod + def from_string(cls, hash): ... + @classmethod + def verify(cls, secret: str | bytes, hash: str | bytes) -> bool: ... # type: ignore[override] + +class mssql2005(uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + name: ClassVar[str] + checksum_size: ClassVar[int] + min_salt_size: ClassVar[int] + max_salt_size: ClassVar[int] + @classmethod + def from_string(cls, hash): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/mysql.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/mysql.pyi new file mode 100644 index 00000000..ae08bdb2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/mysql.pyi @@ -0,0 +1,15 @@ +from typing import ClassVar + +import passlib.utils.handlers as uh + +__all__ = ["mysql323"] + +class mysql323(uh.StaticHandler): + name: ClassVar[str] + checksum_size: ClassVar[int] + checksum_chars: ClassVar[str] + +class mysql41(uh.StaticHandler): + name: ClassVar[str] + checksum_chars: ClassVar[str] + checksum_size: ClassVar[int] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/oracle.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/oracle.pyi new file mode 100644 index 00000000..6b9931e7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/oracle.pyi @@ -0,0 +1,20 @@ +from typing import ClassVar + +import passlib.utils.handlers as uh + +__all__: list[str] = [] + +class oracle10(uh.HasUserContext, uh.StaticHandler): + name: ClassVar[str] + checksum_chars: ClassVar[str] + checksum_size: ClassVar[int] + +class oracle11(uh.HasSalt, uh.GenericHandler): + name: ClassVar[str] + checksum_size: ClassVar[int] + checksum_chars: ClassVar[str] + min_salt_size: ClassVar[int] + max_salt_size: ClassVar[int] + salt_chars: ClassVar[str] + @classmethod + def from_string(cls, hash): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/pbkdf2.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/pbkdf2.pyi new file mode 100644 index 00000000..280849de --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/pbkdf2.pyi @@ -0,0 +1,89 @@ +from typing import ClassVar +from typing_extensions import Self + +import passlib.utils.handlers as uh +from passlib.utils.handlers import PrefixWrapper + +class Pbkdf2DigestHandler(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): # type: ignore[misc] + checksum_chars: ClassVar[str] + default_salt_size: ClassVar[int] + max_salt_size: ClassVar[int] + default_rounds: ClassVar[int] + min_rounds: ClassVar[int] + max_rounds: ClassVar[int] + rounds_cost: ClassVar[str] + @classmethod + def from_string(cls, hash: str | bytes) -> Self: ... # type: ignore[override] + +# dynamically created by create_pbkdf2_hash() +class pbkdf2_sha1(Pbkdf2DigestHandler): + name: ClassVar[str] + ident: ClassVar[str] + checksum_size: ClassVar[int] + encoded_checksum_size: ClassVar[int] + +# dynamically created by create_pbkdf2_hash() +class pbkdf2_sha256(Pbkdf2DigestHandler): + name: ClassVar[str] + ident: ClassVar[str] + checksum_size: ClassVar[int] + encoded_checksum_size: ClassVar[int] + +# dynamically created by create_pbkdf2_hash() +class pbkdf2_sha512(Pbkdf2DigestHandler): + name: ClassVar[str] + ident: ClassVar[str] + checksum_size: ClassVar[int] + encoded_checksum_size: ClassVar[int] + +ldap_pbkdf2_sha1: PrefixWrapper +ldap_pbkdf2_sha256: PrefixWrapper +ldap_pbkdf2_sha512: PrefixWrapper + +class cta_pbkdf2_sha1(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): # type: ignore[misc] + name: ClassVar[str] + ident: ClassVar[str] + checksum_size: ClassVar[int] + default_salt_size: ClassVar[int] + max_salt_size: ClassVar[int] + default_rounds: ClassVar[int] + min_rounds: ClassVar[int] + max_rounds: ClassVar[int] + rounds_cost: ClassVar[str] + @classmethod + def from_string(cls, hash): ... + +class dlitz_pbkdf2_sha1(uh.HasRounds, uh.HasSalt, uh.GenericHandler): # type: ignore[misc] + name: ClassVar[str] + ident: ClassVar[str] + default_salt_size: ClassVar[int] + max_salt_size: ClassVar[int] + salt_chars: ClassVar[str] + default_rounds: ClassVar[int] + min_rounds: ClassVar[int] + max_rounds: ClassVar[int] + rounds_cost: ClassVar[str] + @classmethod + def from_string(cls, hash): ... + +class atlassian_pbkdf2_sha1(uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + name: ClassVar[str] + ident: ClassVar[str] + checksum_size: ClassVar[int] + min_salt_size: ClassVar[int] + max_salt_size: ClassVar[int] + @classmethod + def from_string(cls, hash): ... + +class grub_pbkdf2_sha512(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): # type: ignore[misc] + name: ClassVar[str] + ident: ClassVar[str] + checksum_size: ClassVar[int] + default_salt_size: ClassVar[int] + max_salt_size: ClassVar[int] + default_rounds: ClassVar[int] + min_rounds: ClassVar[int] + max_rounds: ClassVar[int] + rounds_cost: ClassVar[str] + @classmethod + def from_string(cls, hash): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/phpass.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/phpass.pyi new file mode 100644 index 00000000..02419bdc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/phpass.pyi @@ -0,0 +1,20 @@ +from typing import ClassVar +from typing_extensions import Self + +import passlib.utils.handlers as uh + +class phpass(uh.HasManyIdents, uh.HasRounds, uh.HasSalt, uh.GenericHandler): # type: ignore[misc] + name: ClassVar[str] + checksum_chars: ClassVar[str] + min_salt_size: ClassVar[int] + max_salt_size: ClassVar[int] + salt_chars: ClassVar[str] + default_rounds: ClassVar[int] + min_rounds: ClassVar[int] + max_rounds: ClassVar[int] + rounds_cost: ClassVar[str] + default_ident: ClassVar[str] + ident_values: ClassVar[tuple[str, ...]] + ident_aliases: ClassVar[dict[str, str]] + @classmethod + def from_string(cls, hash: str | bytes) -> Self: ... # type: ignore[override] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/postgres.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/postgres.pyi new file mode 100644 index 00000000..99a515e4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/postgres.pyi @@ -0,0 +1,8 @@ +from typing import ClassVar + +import passlib.utils.handlers as uh + +class postgres_md5(uh.HasUserContext, uh.StaticHandler): + name: ClassVar[str] + checksum_chars: ClassVar[str] + checksum_size: ClassVar[int] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/roundup.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/roundup.pyi new file mode 100644 index 00000000..53ca720b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/roundup.pyi @@ -0,0 +1,5 @@ +from typing import Any + +roundup_plaintext: Any +ldap_hex_md5: Any +ldap_hex_sha1: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/scram.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/scram.pyi new file mode 100644 index 00000000..80e4586c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/scram.pyi @@ -0,0 +1,29 @@ +from _typeshed import Incomplete +from typing import ClassVar + +import passlib.utils.handlers as uh + +class scram(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): # type: ignore[misc] + name: ClassVar[str] + ident: ClassVar[str] + default_salt_size: ClassVar[int] + max_salt_size: ClassVar[int] + default_rounds: ClassVar[int] + min_rounds: ClassVar[int] + max_rounds: ClassVar[int] + rounds_cost: ClassVar[str] + default_algs: ClassVar[list[str]] + algs: Incomplete | None + @classmethod + def extract_digest_info(cls, hash, alg): ... + @classmethod + def extract_digest_algs(cls, hash, format: str = ...): ... + @classmethod + def derive_digest(cls, password, salt, rounds, alg): ... + @classmethod + def from_string(cls, hash): ... + @classmethod + def using(cls, default_algs: Incomplete | None = ..., algs: Incomplete | None = ..., **kwds): ... # type: ignore[override] + def __init__(self, algs: Incomplete | None = ..., **kwds) -> None: ... + @classmethod + def verify(cls, secret, hash, full: bool = ...): ... # type: ignore[override] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/scrypt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/scrypt.pyi new file mode 100644 index 00000000..1f8ffda1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/scrypt.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete +from typing import ClassVar + +import passlib.utils.handlers as uh + +class scrypt(uh.ParallelismMixin, uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.HasManyIdents, uh.GenericHandler): # type: ignore[misc] + backends: ClassVar[tuple[str, ...]] + name: ClassVar[str] + checksum_size: ClassVar[int] + default_ident: ClassVar[str] + ident_values: ClassVar[tuple[str, ...]] + default_salt_size: ClassVar[int] + max_salt_size: ClassVar[int] + default_rounds: ClassVar[int] + min_rounds: ClassVar[int] + max_rounds: ClassVar[int] + rounds_cost: ClassVar[str] + parallelism: int + block_size: int + @classmethod + def using(cls, block_size: Incomplete | None = ..., **kwds): ... # type: ignore[override] + @classmethod + def from_string(cls, hash): ... + @classmethod + def parse(cls, hash): ... + def to_string(self): ... + def __init__(self, block_size: Incomplete | None = ..., **kwds) -> None: ... + @classmethod + def get_backend(cls): ... + @classmethod + def has_backend(cls, name: str = ...): ... + @classmethod + def set_backend(cls, name: str = ..., dryrun: bool = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/sha1_crypt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/sha1_crypt.pyi new file mode 100644 index 00000000..14ea298d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/sha1_crypt.pyi @@ -0,0 +1,23 @@ +from typing import Any, ClassVar +from typing_extensions import Self + +import passlib.utils.handlers as uh + +log: Any + +class sha1_crypt(uh.HasManyBackends, uh.HasRounds, uh.HasSalt, uh.GenericHandler): # type: ignore[misc] + name: ClassVar[str] + ident: ClassVar[str] + checksum_size: ClassVar[int] + checksum_chars: ClassVar[str] + default_salt_size: ClassVar[int] + max_salt_size: ClassVar[int] + salt_chars: ClassVar[str] + default_rounds: ClassVar[int] + min_rounds: ClassVar[int] + max_rounds: ClassVar[int] + rounds_cost: ClassVar[str] + @classmethod + def from_string(cls, hash: str | bytes) -> Self: ... # type: ignore[override] + def to_string(self, config: bool = ...) -> str: ... + backends: ClassVar[tuple[str, ...]] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/sha2_crypt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/sha2_crypt.pyi new file mode 100644 index 00000000..82ba37af --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/sha2_crypt.pyi @@ -0,0 +1,29 @@ +from typing import ClassVar +from typing_extensions import Self + +import passlib.utils.handlers as uh + +class _SHA2_Common(uh.HasManyBackends, uh.HasRounds, uh.HasSalt, uh.GenericHandler): # type: ignore[misc] + checksum_chars: ClassVar[str] + max_salt_size: ClassVar[int] + salt_chars: ClassVar[str] + min_rounds: ClassVar[int] + max_rounds: ClassVar[int] + rounds_cost: ClassVar[str] + implicit_rounds: bool + def __init__(self, implicit_rounds: bool | None = ..., **kwds) -> None: ... + @classmethod + def from_string(cls, hash: str | bytes) -> Self: ... # type: ignore[override] + backends: ClassVar[tuple[str, ...]] + +class sha256_crypt(_SHA2_Common): + name: ClassVar[str] + ident: ClassVar[str] + checksum_size: ClassVar[int] + default_rounds: ClassVar[int] + +class sha512_crypt(_SHA2_Common): + name: ClassVar[str] + ident: ClassVar[str] + checksum_size: ClassVar[int] + default_rounds: ClassVar[int] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/sun_md5_crypt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/sun_md5_crypt.pyi new file mode 100644 index 00000000..43cb4a06 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/sun_md5_crypt.pyi @@ -0,0 +1,24 @@ +from typing import ClassVar +from typing_extensions import Self + +import passlib.utils.handlers as uh + +class sun_md5_crypt(uh.HasRounds, uh.HasSalt, uh.GenericHandler): # type: ignore[misc] + name: ClassVar[str] + checksum_chars: ClassVar[str] + checksum_size: ClassVar[int] + default_salt_size: ClassVar[int] + max_salt_size: ClassVar[int | None] + salt_chars: ClassVar[str] + default_rounds: ClassVar[int] + min_rounds: ClassVar[int] + max_rounds: ClassVar[int] + rounds_cost: ClassVar[str] + ident_values: ClassVar[tuple[str, ...]] + bare_salt: bool + def __init__(self, bare_salt: bool = ..., **kwds) -> None: ... + @classmethod + def identify(cls, hash): ... + @classmethod + def from_string(cls, hash: str | bytes) -> Self: ... # type: ignore[override] + def to_string(self, _withchk: bool = ...) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/windows.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/windows.pyi new file mode 100644 index 00000000..cc5d6a64 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/handlers/windows.pyi @@ -0,0 +1,37 @@ +from _typeshed import Incomplete +from typing import Any, ClassVar + +import passlib.utils.handlers as uh + +class lmhash(uh.TruncateMixin, uh.HasEncodingContext, uh.StaticHandler): + name: ClassVar[str] + checksum_chars: ClassVar[str] + checksum_size: ClassVar[int] + truncate_size: ClassVar[int] + @classmethod + def raw(cls, secret, encoding: Incomplete | None = ...): ... + +class nthash(uh.StaticHandler): + name: ClassVar[str] + checksum_chars: ClassVar[str] + checksum_size: ClassVar[int] + @classmethod + def raw(cls, secret): ... + @classmethod + def raw_nthash(cls, secret, hex: bool = ...): ... + +bsd_nthash: Any + +class msdcc(uh.HasUserContext, uh.StaticHandler): + name: ClassVar[str] + checksum_chars: ClassVar[str] + checksum_size: ClassVar[int] + @classmethod + def raw(cls, secret, user): ... + +class msdcc2(uh.HasUserContext, uh.StaticHandler): + name: ClassVar[str] + checksum_chars: ClassVar[str] + checksum_size: ClassVar[int] + @classmethod + def raw(cls, secret, user): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/hash.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/hash.pyi new file mode 100644 index 00000000..b71311a3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/hash.pyi @@ -0,0 +1,75 @@ +from passlib.handlers.argon2 import argon2 as argon2 +from passlib.handlers.bcrypt import bcrypt as bcrypt, bcrypt_sha256 as bcrypt_sha256 +from passlib.handlers.cisco import cisco_asa as cisco_asa, cisco_pix as cisco_pix, cisco_type7 as cisco_type7 +from passlib.handlers.des_crypt import bigcrypt as bigcrypt, bsdi_crypt as bsdi_crypt, crypt16 as crypt16, des_crypt as des_crypt +from passlib.handlers.digests import ( + hex_md4 as hex_md4, + hex_md5 as hex_md5, + hex_sha1 as hex_sha1, + hex_sha256 as hex_sha256, + hex_sha512 as hex_sha512, + htdigest as htdigest, +) +from passlib.handlers.django import ( + django_bcrypt as django_bcrypt, + django_bcrypt_sha256 as django_bcrypt_sha256, + django_des_crypt as django_des_crypt, + django_disabled as django_disabled, + django_pbkdf2_sha1 as django_pbkdf2_sha1, + django_pbkdf2_sha256 as django_pbkdf2_sha256, + django_salted_md5 as django_salted_md5, + django_salted_sha1 as django_salted_sha1, +) +from passlib.handlers.fshp import fshp as fshp +from passlib.handlers.ldap_digests import ( + ldap_bcrypt as ldap_bcrypt, + ldap_bsdi_crypt as ldap_bsdi_crypt, + ldap_des_crypt as ldap_des_crypt, + ldap_md5 as ldap_md5, + ldap_md5_crypt as ldap_md5_crypt, + ldap_plaintext as ldap_plaintext, + ldap_salted_md5 as ldap_salted_md5, + ldap_salted_sha1 as ldap_salted_sha1, + ldap_salted_sha256 as ldap_salted_sha256, + ldap_salted_sha512 as ldap_salted_sha512, + ldap_sha1 as ldap_sha1, + ldap_sha1_crypt as ldap_sha1_crypt, + ldap_sha256_crypt as ldap_sha256_crypt, + ldap_sha512_crypt as ldap_sha512_crypt, +) +from passlib.handlers.md5_crypt import apr_md5_crypt as apr_md5_crypt, md5_crypt as md5_crypt +from passlib.handlers.misc import plaintext as plaintext, unix_disabled as unix_disabled, unix_fallback as unix_fallback +from passlib.handlers.mssql import mssql2000 as mssql2000, mssql2005 as mssql2005 +from passlib.handlers.mysql import mysql41 as mysql41, mysql323 as mysql323 +from passlib.handlers.oracle import oracle10 as oracle10, oracle11 as oracle11 +from passlib.handlers.pbkdf2 import ( + atlassian_pbkdf2_sha1 as atlassian_pbkdf2_sha1, + cta_pbkdf2_sha1 as cta_pbkdf2_sha1, + dlitz_pbkdf2_sha1 as dlitz_pbkdf2_sha1, + grub_pbkdf2_sha512 as grub_pbkdf2_sha512, + ldap_pbkdf2_sha1 as ldap_pbkdf2_sha1, + ldap_pbkdf2_sha256 as ldap_pbkdf2_sha256, + ldap_pbkdf2_sha512 as ldap_pbkdf2_sha512, + pbkdf2_sha1 as pbkdf2_sha1, + pbkdf2_sha256 as pbkdf2_sha256, + pbkdf2_sha512 as pbkdf2_sha512, +) +from passlib.handlers.phpass import phpass as phpass +from passlib.handlers.postgres import postgres_md5 as postgres_md5 +from passlib.handlers.roundup import ( + ldap_hex_md5 as ldap_hex_md5, + ldap_hex_sha1 as ldap_hex_sha1, + roundup_plaintext as roundup_plaintext, +) +from passlib.handlers.scram import scram as scram +from passlib.handlers.scrypt import scrypt as scrypt +from passlib.handlers.sha1_crypt import sha1_crypt as sha1_crypt +from passlib.handlers.sha2_crypt import sha256_crypt as sha256_crypt, sha512_crypt as sha512_crypt +from passlib.handlers.sun_md5_crypt import sun_md5_crypt as sun_md5_crypt +from passlib.handlers.windows import ( + bsd_nthash as bsd_nthash, + lmhash as lmhash, + msdcc as msdcc, + msdcc2 as msdcc2, + nthash as nthash, +) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/hosts.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/hosts.pyi new file mode 100644 index 00000000..732e0210 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/hosts.pyi @@ -0,0 +1,11 @@ +from typing import Any + +from passlib.context import CryptContext + +linux_context: Any +linux2_context: Any +freebsd_context: Any +openbsd_context: Any +netbsd_context: Any +# Only exists if crypt is present +host_context: CryptContext diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/ifc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/ifc.pyi new file mode 100644 index 00000000..aec4570c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/ifc.pyi @@ -0,0 +1,37 @@ +from abc import ABCMeta, abstractmethod +from typing import Any, ClassVar +from typing_extensions import Literal, Self + +class PasswordHash(metaclass=ABCMeta): + is_disabled: ClassVar[bool] + truncate_size: ClassVar[int | None] + truncate_error: ClassVar[bool] + truncate_verify_reject: ClassVar[bool] + @classmethod + @abstractmethod + def hash(cls, secret: str | bytes, **setting_and_context_kwds) -> str: ... + @classmethod + def encrypt(cls, secret: str | bytes, **kwds) -> str: ... + @classmethod + @abstractmethod + def verify(cls, secret: str | bytes, hash: str | bytes, **context_kwds): ... + @classmethod + @abstractmethod + def using(cls, relaxed: bool = ..., **kwds: Any) -> type[Self]: ... + @classmethod + def needs_update(cls, hash: str, secret: str | bytes | None = ...) -> bool: ... + @classmethod + @abstractmethod + def identify(cls, hash: str | bytes) -> bool: ... + @classmethod + def genconfig(cls, **setting_kwds: Any) -> str: ... + @classmethod + def genhash(cls, secret: str | bytes, config: str, **context: Any) -> str: ... + deprecated: bool + +class DisabledHash(PasswordHash, metaclass=ABCMeta): + is_disabled: ClassVar[Literal[True]] + @classmethod + def disable(cls, hash: str | None = ...) -> str: ... + @classmethod + def enable(cls, hash: str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/pwd.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/pwd.pyi new file mode 100644 index 00000000..42e01ae6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/pwd.pyi @@ -0,0 +1,60 @@ +from _typeshed import Incomplete +from abc import abstractmethod +from collections.abc import MutableMapping +from typing import Any + +class SequenceGenerator: + length: Any + requested_entropy: str + rng: Any + @property + @abstractmethod + def symbol_count(self) -> int: ... + def __init__( + self, entropy: Incomplete | None = ..., length: Incomplete | None = ..., rng: Incomplete | None = ..., **kwds + ) -> None: ... + @property + def entropy_per_symbol(self) -> float: ... + @property + def entropy(self) -> float: ... + def __next__(self) -> None: ... + def __call__(self, returns: Incomplete | None = ...): ... + def __iter__(self): ... + +default_charsets: Any + +class WordGenerator(SequenceGenerator): + charset: str + chars: Any + def __init__(self, chars: Incomplete | None = ..., charset: Incomplete | None = ..., **kwds) -> None: ... + @property + def symbol_count(self): ... + def __next__(self): ... + +def genword(entropy: Incomplete | None = ..., length: Incomplete | None = ..., returns: Incomplete | None = ..., **kwds): ... + +class WordsetDict(MutableMapping[Any, Any]): + paths: Any + def __init__(self, *args, **kwds) -> None: ... + def __getitem__(self, key): ... + def set_path(self, key, path) -> None: ... + def __setitem__(self, key, value) -> None: ... + def __delitem__(self, key) -> None: ... + def __iter__(self): ... + def __len__(self) -> int: ... + def __contains__(self, key): ... + +default_wordsets: Any + +class PhraseGenerator(SequenceGenerator): + wordset: str + words: Any + sep: str + def __init__( + self, wordset: Incomplete | None = ..., words: Incomplete | None = ..., sep: Incomplete | None = ..., **kwds + ) -> None: ... + @property + def symbol_count(self): ... + def __next__(self): ... + +def genphrase(entropy: Incomplete | None = ..., length: Incomplete | None = ..., returns: Incomplete | None = ..., **kwds): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/registry.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/registry.pyi new file mode 100644 index 00000000..abfb25ef --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/registry.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete +from typing import Any + +class _PasslibRegistryProxy: + __name__: str + __package__: Any + def __getattr__(self, attr: str): ... + def __setattr__(self, attr: str, value) -> None: ... + def __dir__(self): ... + +def register_crypt_handler_path(name, path) -> None: ... +def register_crypt_handler(handler, force: bool = ..., _attr: Incomplete | None = ...) -> None: ... +def get_crypt_handler(name, default=...): ... +def list_crypt_handlers(loaded_only: bool = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/totp.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/totp.pyi new file mode 100644 index 00000000..7e5d23b6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/totp.pyi @@ -0,0 +1,130 @@ +from _typeshed import Incomplete +from typing import Any + +from passlib.exc import ( + InvalidTokenError as InvalidTokenError, + MalformedTokenError as MalformedTokenError, + TokenError as TokenError, + UsedTokenError as UsedTokenError, +) +from passlib.utils import SequenceMixin + +class AppWallet: + salt_size: int + encrypt_cost: int + default_tag: Any + def __init__( + self, + secrets: Incomplete | None = ..., + default_tag: Incomplete | None = ..., + encrypt_cost: Incomplete | None = ..., + secrets_path: Incomplete | None = ..., + ) -> None: ... + @property + def has_secrets(self): ... + def get_secret(self, tag): ... + def encrypt_key(self, key): ... + def decrypt_key(self, enckey): ... + +class TOTP: + min_json_version: int + json_version: int + wallet: Any + now: Any + digits: int + alg: str + label: Any + issuer: Any + period: int + changed: bool + @classmethod + def using( + cls, + digits: Incomplete | None = ..., + alg: Incomplete | None = ..., + period: Incomplete | None = ..., + issuer: Incomplete | None = ..., + wallet: Incomplete | None = ..., + now: Incomplete | None = ..., + **kwds, + ): ... + @classmethod + def new(cls, **kwds): ... + def __init__( + self, + key: Incomplete | None = ..., + format: str = ..., + new: bool = ..., + digits: Incomplete | None = ..., + alg: Incomplete | None = ..., + size: Incomplete | None = ..., + period: Incomplete | None = ..., + label: Incomplete | None = ..., + issuer: Incomplete | None = ..., + changed: bool = ..., + **kwds, + ) -> None: ... + @property + def key(self): ... + @key.setter + def key(self, value) -> None: ... + @property + def encrypted_key(self): ... + @encrypted_key.setter + def encrypted_key(self, value) -> None: ... + @property + def hex_key(self): ... + @property + def base32_key(self): ... + def pretty_key(self, format: str = ..., sep: str = ...): ... + @classmethod + def normalize_time(cls, time): ... + def normalize_token(self_or_cls, token): ... + def generate(self, time: Incomplete | None = ...): ... + @classmethod + def verify(cls, token, source, **kwds): ... + def match( + self, token, time: Incomplete | None = ..., window: int = ..., skew: int = ..., last_counter: Incomplete | None = ... + ): ... + @classmethod + def from_source(cls, source): ... + @classmethod + def from_uri(cls, uri): ... + def to_uri(self, label: Incomplete | None = ..., issuer: Incomplete | None = ...): ... + @classmethod + def from_json(cls, source): ... + def to_json(self, encrypt: Incomplete | None = ...): ... + @classmethod + def from_dict(cls, source): ... + def to_dict(self, encrypt: Incomplete | None = ...): ... + +class TotpToken(SequenceMixin): + totp: Any + token: Any + counter: Any + def __init__(self, totp, token, counter) -> None: ... + @property + def start_time(self): ... + @property + def expire_time(self): ... + @property + def remaining(self): ... + @property + def valid(self): ... + +class TotpMatch(SequenceMixin): + totp: Any + counter: int + time: int + window: int + def __init__(self, totp, counter, time, window: int = ...) -> None: ... + @property + def expected_counter(self): ... + @property + def skipped(self): ... + @property + def expire_time(self): ... + @property + def cache_seconds(self): ... + @property + def cache_time(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/__init__.pyi new file mode 100644 index 00000000..5e8fd218 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/__init__.pyi @@ -0,0 +1,75 @@ +import timeit +from _typeshed import Incomplete +from collections.abc import Generator +from hmac import compare_digest +from typing import Any + +from passlib.utils.compat import JYTHON as JYTHON + +__all__ = [ + "JYTHON", + "sys_bits", + "unix_crypt_schemes", + "rounds_cost_values", + "consteq", + "saslprep", + "xor_bytes", + "render_bytes", + "is_same_codec", + "is_ascii_safe", + "to_bytes", + "to_unicode", + "to_native_str", + "has_crypt", + "test_crypt", + "safe_crypt", + "tick", + "rng", + "getrandbytes", + "getrandstr", + "generate_password", + "is_crypt_handler", + "is_crypt_context", + "has_rounds_info", + "has_salt_info", +] + +sys_bits: Any +unix_crypt_schemes: list[str] +rounds_cost_values: Any + +class SequenceMixin: + def __getitem__(self, idx): ... + def __iter__(self): ... + def __len__(self) -> int: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + +consteq = compare_digest + +def str_consteq(left, right): ... +def saslprep(source, param: str = ...): ... +def render_bytes(source, *args): ... +def xor_bytes(left, right): ... +def is_same_codec(left, right): ... +def is_ascii_safe(source): ... +def to_bytes(source, encoding: str = ..., param: str = ..., source_encoding: Incomplete | None = ...): ... +def to_unicode(source, encoding: str = ..., param: str = ...): ... +def to_native_str(source, encoding: str = ..., param: str = ...): ... + +has_crypt: bool + +def safe_crypt(secret, hash) -> None: ... +def test_crypt(secret, hash): ... + +timer = timeit.default_timer +tick = timer +rng: Any + +def getrandbytes(rng, count) -> Generator[None, None, Any]: ... +def getrandstr(rng, charset, count) -> Generator[None, None, Any]: ... +def generate_password(size: int = ..., charset=...): ... +def is_crypt_handler(obj): ... +def is_crypt_context(obj): ... +def has_rounds_info(handler): ... +def has_salt_info(handler): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/binary.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/binary.pyi new file mode 100644 index 00000000..82928d5c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/binary.pyi @@ -0,0 +1,51 @@ +from _typeshed import Incomplete +from typing import Any + +BASE64_CHARS: Any +AB64_CHARS: Any +HASH64_CHARS: Any +BCRYPT_CHARS: Any +PADDED_BASE64_CHARS: Any +HEX_CHARS: Any +UPPER_HEX_CHARS: Any +LOWER_HEX_CHARS: Any +ALL_BYTE_VALUES: Any + +def compile_byte_translation(mapping, source: Incomplete | None = ...): ... +def b64s_encode(data): ... +def b64s_decode(data): ... +def ab64_encode(data): ... +def ab64_decode(data): ... +def b32encode(source): ... +def b32decode(source): ... + +class Base64Engine: + bytemap: Any + big: Any + def __init__(self, charmap, big: bool = ...) -> None: ... + @property + def charmap(self): ... + def encode_bytes(self, source): ... + def decode_bytes(self, source): ... + def check_repair_unused(self, source): ... + def repair_unused(self, source): ... + def encode_transposed_bytes(self, source, offsets): ... + def decode_transposed_bytes(self, source, offsets): ... + def decode_int6(self, source): ... + def decode_int12(self, source): ... + def decode_int24(self, source): ... + def decode_int30(self, source): ... + def decode_int64(self, source): ... + def encode_int6(self, value): ... + def encode_int12(self, value): ... + def encode_int24(self, value): ... + def encode_int30(self, value): ... + def encode_int64(self, value): ... + +class LazyBase64Engine(Base64Engine): + def __init__(self, *args, **kwds) -> None: ... + def __getattribute__(self, attr: str): ... + +h64: Any +h64big: Any +bcrypt64: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/compat/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/compat/__init__.pyi new file mode 100644 index 00000000..808c7f8d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/compat/__init__.pyi @@ -0,0 +1,8 @@ +from typing_extensions import Literal + +PY2: Literal[False] +PY3: Literal[True] +PY26: Literal[False] +JYTHON: bool +PYPY: bool +PYSTON: bool diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/compat/_ordered_dict.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/compat/_ordered_dict.pyi new file mode 100644 index 00000000..7fafb44f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/compat/_ordered_dict.pyi @@ -0,0 +1,27 @@ +from _typeshed import Incomplete +from collections.abc import Generator +from typing import Any + +class OrderedDict(dict[Any, Any]): + def __init__(self, *args, **kwds) -> None: ... + def __setitem__(self, key, value, dict_setitem=...) -> None: ... + def __delitem__(self, key, dict_delitem=...) -> None: ... + def __iter__(self): ... + def __reversed__(self) -> Generator[Any, None, None]: ... + def clear(self) -> None: ... + def popitem(self, last: bool = ...): ... + def keys(self): ... + def values(self): ... + def items(self): ... + def iterkeys(self): ... + def itervalues(self) -> Generator[Any, None, None]: ... + def iteritems(self) -> Generator[Any, None, None]: ... + def update(*args, **kwds) -> None: ... # type: ignore[override] + def pop(self, key, default=...): ... + def setdefault(self, key, default: Incomplete | None = ...): ... + def __reduce__(self): ... + def copy(self): ... + @classmethod + def fromkeys(cls, iterable, value: Incomplete | None = ...): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/decor.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/decor.pyi new file mode 100644 index 00000000..4c7eda38 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/decor.pyi @@ -0,0 +1,42 @@ +from _typeshed import Incomplete +from typing import Any + +class classproperty: + im_func: Any + def __init__(self, func) -> None: ... + def __get__(self, obj, cls): ... + @property + def __func__(self): ... + +class hybrid_method: + func: Any + def __init__(self, func) -> None: ... + def __get__(self, obj, cls): ... + +def memoize_single_value(func): ... + +class memoized_property: + __func__: Any + __name__: Any + __doc__: Any + def __init__(self, func) -> None: ... + def __get__(self, obj, cls): ... + def clear_cache(self, obj) -> None: ... + def peek_cache(self, obj, default: Incomplete | None = ...): ... + +def deprecated_function( + msg: Incomplete | None = ..., + deprecated: Incomplete | None = ..., + removed: Incomplete | None = ..., + updoc: bool = ..., + replacement: Incomplete | None = ..., + _is_method: bool = ..., + func_module: Incomplete | None = ..., +): ... +def deprecated_method( + msg: Incomplete | None = ..., + deprecated: Incomplete | None = ..., + removed: Incomplete | None = ..., + updoc: bool = ..., + replacement: Incomplete | None = ..., +): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/des.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/des.pyi new file mode 100644 index 00000000..fe9982ff --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/des.pyi @@ -0,0 +1,8 @@ +from passlib.crypto.des import ( + des_encrypt_block as des_encrypt_block, + des_encrypt_int_block as des_encrypt_int_block, + expand_des_key as expand_des_key, +) +from passlib.utils.decor import deprecated_function as deprecated_function + +def mdes_encrypt_int_block(key, input, salt: int = ..., rounds: int = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/handlers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/handlers.pyi new file mode 100644 index 00000000..89ede12f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/handlers.pyi @@ -0,0 +1,181 @@ +import abc +from _typeshed import Incomplete +from typing import Any, ClassVar +from typing_extensions import Self + +from passlib.ifc import PasswordHash +from passlib.utils.binary import BASE64_CHARS, HASH64_CHARS, LOWER_HEX_CHARS, PADDED_BASE64_CHARS, UPPER_HEX_CHARS + +H64_CHARS = HASH64_CHARS +B64_CHARS = BASE64_CHARS +PADDED_B64_CHARS = PADDED_BASE64_CHARS +UC_HEX_CHARS = UPPER_HEX_CHARS +LC_HEX_CHARS = LOWER_HEX_CHARS + +def parse_mc2(hash, prefix, sep=..., handler: Incomplete | None = ...): ... +def parse_mc3( + hash, prefix, sep=..., rounds_base: int = ..., default_rounds: Incomplete | None = ..., handler: Incomplete | None = ... +): ... +def render_mc2(ident, salt, checksum, sep=...): ... +def render_mc3(ident, rounds, salt, checksum, sep=..., rounds_base: int = ...): ... + +class MinimalHandler(PasswordHash, metaclass=abc.ABCMeta): + @classmethod + def using(cls, relaxed: bool = ...) -> type[Self]: ... # type: ignore[override] + +class TruncateMixin(MinimalHandler, metaclass=abc.ABCMeta): + truncate_error: ClassVar[bool] + truncate_verify_reject: ClassVar[bool] + @classmethod + def using(cls, truncate_error: object = ..., *, relaxed: bool = ...) -> type[Self]: ... # type: ignore[override] + +class GenericHandler(MinimalHandler): + setting_kwds: ClassVar[tuple[str, ...]] + context_kwds: ClassVar[tuple[str, ...]] + ident: ClassVar[str | None] + checksum_size: ClassVar[int | None] + checksum_chars: ClassVar[str | None] + checksum: str | None + use_defaults: bool + def __init__(self, checksum: str | None = ..., use_defaults: bool = ...) -> None: ... + @classmethod + def identify(cls, hash: str | bytes) -> bool: ... + @classmethod + def from_string(cls, hash: str | bytes, **context: Any) -> Self: ... + def to_string(self) -> str: ... + @classmethod + def hash(cls, secret: str | bytes, **kwds: Any) -> str: ... + @classmethod + def verify(cls, secret: str | bytes, hash: str | bytes, **context: Any) -> bool: ... + @classmethod + def genconfig(cls, **kwds: Any) -> str: ... + @classmethod + def genhash(cls, secret: str | bytes, config: str, **context: Any) -> str: ... + @classmethod + def needs_update(cls, hash: str | bytes, secret: str | bytes | None = ..., **kwds: Any) -> bool: ... + @classmethod + def parsehash(cls, hash: str | bytes, checksum: bool = ..., sanitize: bool = ...) -> dict[str, Any]: ... + @classmethod + def bitsize(cls, **kwds: Any) -> dict[str, Any]: ... + +class StaticHandler(GenericHandler): + setting_kwds: ClassVar[tuple[str, ...]] + +class HasEncodingContext(GenericHandler): + default_encoding: ClassVar[str] + encoding: str + def __init__(self, encoding: str | None = ..., **kwds) -> None: ... + +class HasUserContext(GenericHandler): + user: Incomplete | None + def __init__(self, user: Incomplete | None = ..., **kwds) -> None: ... + @classmethod + def hash(cls, secret, user: Incomplete | None = ..., **context): ... + @classmethod + def verify(cls, secret, hash, user: Incomplete | None = ..., **context): ... + @classmethod + def genhash(cls, secret, config, user: Incomplete | None = ..., **context): ... + +class HasRawChecksum(GenericHandler): ... + +class HasManyIdents(GenericHandler): + default_ident: ClassVar[str | None] + ident_values: ClassVar[tuple[str, ...] | None] + ident_aliases: ClassVar[dict[str, str] | None] + ident: str # type: ignore[misc] + @classmethod + def using(cls, default_ident: Incomplete | None = ..., ident: Incomplete | None = ..., **kwds): ... # type: ignore[override] + def __init__(self, ident: Incomplete | None = ..., **kwds) -> None: ... + +class HasSalt(GenericHandler): + min_salt_size: ClassVar[int] + max_salt_size: ClassVar[int | None] + salt_chars: ClassVar[str | None] + default_salt_size: ClassVar[int | None] + default_salt_chars: ClassVar[str | None] + salt: str | bytes | None + @classmethod + def using(cls, default_salt_size: int | None = ..., salt_size: int | None = ..., salt: str | bytes | None = ..., **kwds): ... # type: ignore[override] + def __init__(self, salt: str | bytes | None = ..., **kwds) -> None: ... + @classmethod + def bitsize(cls, salt_size: int | None = ..., **kwds): ... + +class HasRawSalt(HasSalt): + salt_chars: ClassVar[bytes] # type: ignore[assignment] + +class HasRounds(GenericHandler): + min_rounds: ClassVar[int] + max_rounds: ClassVar[int | None] + rounds_cost: ClassVar[str] + using_rounds_kwds: ClassVar[tuple[str, ...]] + min_desired_rounds: ClassVar[int | None] + max_desired_rounds: ClassVar[int | None] + default_rounds: ClassVar[int | None] + vary_rounds: ClassVar[Incomplete | None] + rounds: int + @classmethod + def using( # type: ignore[override] + cls, + min_desired_rounds: Incomplete | None = ..., + max_desired_rounds: Incomplete | None = ..., + default_rounds: Incomplete | None = ..., + vary_rounds: Incomplete | None = ..., + min_rounds: Incomplete | None = ..., + max_rounds: Incomplete | None = ..., + rounds: Incomplete | None = ..., + **kwds, + ): ... + def __init__(self, rounds: Incomplete | None = ..., **kwds) -> None: ... + @classmethod + def bitsize(cls, rounds: Incomplete | None = ..., vary_rounds: float = ..., **kwds): ... + +class ParallelismMixin(GenericHandler): + parallelism: int + @classmethod + def using(cls, parallelism: Incomplete | None = ..., **kwds): ... # type: ignore[override] + def __init__(self, parallelism: Incomplete | None = ..., **kwds) -> None: ... + +class BackendMixin(PasswordHash, metaclass=abc.ABCMeta): + backends: ClassVar[tuple[str, ...] | None] + @classmethod + def get_backend(cls): ... + @classmethod + def has_backend(cls, name: str = ...) -> bool: ... + @classmethod + def set_backend(cls, name: str = ..., dryrun: bool = ...): ... + +class SubclassBackendMixin(BackendMixin, metaclass=abc.ABCMeta): ... +class HasManyBackends(BackendMixin, GenericHandler): ... + +class PrefixWrapper: + name: Any + prefix: Any + orig_prefix: Any + __doc__: Any + def __init__( + self, + name, + wrapped, + prefix=..., + orig_prefix=..., + lazy: bool = ..., + doc: Incomplete | None = ..., + ident: Incomplete | None = ..., + ) -> None: ... + @property + def wrapped(self): ... + @property + def ident(self): ... + @property + def ident_values(self): ... + def __dir__(self): ... + def __getattr__(self, attr: str): ... + def __setattr__(self, attr: str, value) -> None: ... + def using(self, **kwds): ... + def needs_update(self, hash, **kwds): ... + def identify(self, hash): ... + def genconfig(self, **kwds): ... + def genhash(self, secret, config, **kwds): ... + def encrypt(self, secret, **kwds): ... + def hash(self, secret, **kwds): ... + def verify(self, secret, hash, **kwds): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/md4.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/md4.pyi new file mode 100644 index 00000000..a316e0c7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/md4.pyi @@ -0,0 +1,3 @@ +from typing import Any + +md4: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/pbkdf2.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/pbkdf2.pyi new file mode 100644 index 00000000..2b192ec2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/utils/pbkdf2.pyi @@ -0,0 +1,7 @@ +from _typeshed import Incomplete + +from passlib.crypto.digest import norm_hash_name as norm_hash_name + +def get_prf(name): ... +def pbkdf1(secret, salt, rounds, keylen: Incomplete | None = ..., hash: str = ...): ... +def pbkdf2(secret, salt, rounds, keylen: Incomplete | None = ..., prf: str = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/win32.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/win32.pyi new file mode 100644 index 00000000..07d54a7b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passlib/passlib/win32.pyi @@ -0,0 +1,7 @@ +from typing import Any + +from passlib.hash import nthash as nthash + +raw_nthash: Any + +def raw_lmhash(secret, encoding: str = ..., hex: bool = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passpy/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passpy/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..f7e53b66 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passpy/@tests/stubtest_allowlist.txt @@ -0,0 +1,6 @@ +passpy.__main__ + +# Uses `git` dependency: +passpy.git +# Uses `gpg` dependency: +passpy.gpg diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passpy/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passpy/METADATA.toml new file mode 100644 index 00000000..f3e83f9c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passpy/METADATA.toml @@ -0,0 +1 @@ +version = "1.0.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passpy/passpy/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passpy/passpy/__init__.pyi new file mode 100644 index 00000000..98430cc7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passpy/passpy/__init__.pyi @@ -0,0 +1,5 @@ +from .exceptions import RecursiveCopyMoveError as RecursiveCopyMoveError, StoreNotInitialisedError as StoreNotInitialisedError +from .store import Store as Store +from .util import gen_password as gen_password + +VERSION: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passpy/passpy/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passpy/passpy/exceptions.pyi new file mode 100644 index 00000000..f3a532d6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passpy/passpy/exceptions.pyi @@ -0,0 +1,2 @@ +class StoreNotInitialisedError(FileNotFoundError): ... +class RecursiveCopyMoveError(OSError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passpy/passpy/store.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passpy/passpy/store.pyi new file mode 100644 index 00000000..07a9fcc5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passpy/passpy/store.pyi @@ -0,0 +1,31 @@ +from _typeshed import StrPath +from collections.abc import Iterator +from re import Match + +class Store: + def __init__( + self, + gpg_bin: str = ..., + git_bin: str = ..., + store_dir: str = ..., + use_agent: bool = ..., + interactive: bool = ..., + verbose: bool = ..., + ) -> None: ... + def __iter__(self) -> Iterator[str]: ... + def is_init(self) -> bool: ... + def init_store(self, gpg_ids: None | str | list[str], path: StrPath | None = ...) -> None: ... + def init_git(self) -> None: ... + def git(self, method: str, *args: object, **kwargs: object) -> None: ... + def get_key(self, path: StrPath | None) -> str | None: ... + def set_key(self, path: StrPath | None, key_data: str, force: bool = ...) -> None: ... + def remove_path(self, path: StrPath, recursive: bool = ..., force: bool = ...) -> None: ... + def gen_key( + self, path: StrPath | None, length: int, symbols: bool = ..., force: bool = ..., inplace: bool = ... + ) -> str | None: ... + def copy_path(self, old_path: StrPath, new_path: StrPath, force: bool = ...) -> None: ... + def move_path(self, old_path: StrPath, new_path: StrPath, force: bool = ...) -> None: ... + def list_dir(self, path: StrPath) -> tuple[list[str], list[str]]: ... + def iter_dir(self, path: StrPath) -> Iterator[str]: ... + def find(self, names: None | str | list[str]) -> list[str]: ... + def search(self, term: str) -> dict[str, list[tuple[str, Match[str]]]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passpy/passpy/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passpy/passpy/util.pyi new file mode 100644 index 00000000..4c10cdea --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/passpy/passpy/util.pyi @@ -0,0 +1,13 @@ +from collections.abc import Callable +from typing import Any, TypeVar + +_C = TypeVar("_C", bound=Callable[..., Any]) + +# Technically, the first argument of `_C` must be `Store`, +# but for now we leave it simple: +def initialised(func: _C) -> _C: ... +def trap(path_index: str | int) -> Callable[[_C], _C]: ... +def gen_password(length: int, symbols: bool = ...) -> str: ... +def copy_move( + src: str, dst: str, force: bool = ..., move: bool = ..., interactive: bool = ..., verbose: bool = ... +) -> str | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/peewee/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/peewee/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..98477068 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/peewee/@tests/stubtest_allowlist.txt @@ -0,0 +1,25 @@ +peewee.DQ.__invert__ +peewee.DateField.day +peewee.DateField.month +peewee.DateField.year +peewee.DateTimeField.day +peewee.DateTimeField.hour +peewee.DateTimeField.minute +peewee.DateTimeField.month +peewee.DateTimeField.second +peewee.DateTimeField.year +peewee.Model.insert +peewee.Model.replace +peewee.Model.update +peewee.TimeField.hour +peewee.TimeField.minute +peewee.TimeField.second +peewee.TimestampField.day +peewee.TimestampField.hour +peewee.TimestampField.minute +peewee.TimestampField.month +peewee.TimestampField.second +peewee.TimestampField.year +peewee.Window.as_groups +peewee.Window.as_range +peewee.Window.as_rows diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/peewee/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/peewee/METADATA.toml new file mode 100644 index 00000000..6b010cfb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/peewee/METADATA.toml @@ -0,0 +1 @@ +version = "3.15.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/peewee/peewee.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/peewee/peewee.pyi new file mode 100644 index 00000000..a58a53a9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/peewee/peewee.pyi @@ -0,0 +1,1779 @@ +import itertools +import logging +import threading +from _typeshed import Incomplete, SupportsKeysAndGetItem +from collections.abc import Generator, Iterable +from typing import ClassVar, NamedTuple, TypeVar +from typing_extensions import Self + +class NullHandler(logging.Handler): + def emit(self, record) -> None: ... + +text_type = str +bytes_type = bytes +buffer_type = memoryview +basestring = str +long = int +izip_longest = itertools.zip_longest + +_VT = TypeVar("_VT") + +class attrdict(dict[str, _VT]): + def __getattr__(self, attr: str) -> _VT: ... + def __setattr__(self, attr: str, value: _VT) -> None: ... + # calls dict.update() + def __iadd__(self, rhs: SupportsKeysAndGetItem[str, _VT] | Iterable[tuple[str, _VT]]) -> Self: ... + def __add__(self, rhs: SupportsKeysAndGetItem[str, _VT] | Iterable[tuple[str, _VT]]) -> attrdict[_VT]: ... + +OP: Incomplete +DJANGO_MAP: Incomplete +JOIN: Incomplete +PREFETCH_TYPE: attrdict[int] + +def chunked(it, n) -> Generator[Incomplete, None, None]: ... + +class _callable_context_manager: + def __call__(self, fn): ... + +class Proxy: + def __init__(self) -> None: ... + obj: Incomplete + def initialize(self, obj) -> None: ... + def attach_callback(self, callback): ... + def passthrough(method): ... + __enter__: Incomplete + __exit__: Incomplete + def __getattr__(self, attr: str): ... + def __setattr__(self, attr: str, value) -> None: ... + +class DatabaseProxy(Proxy): + def connection_context(self): ... + def atomic(self, *args, **kwargs): ... + def manual_commit(self): ... + def transaction(self, *args, **kwargs): ... + def savepoint(self): ... + @property + def Model(self) -> type[Model]: ... + +class ModelDescriptor: ... + +class AliasManager: + def __init__(self) -> None: ... + @property + def mapping(self): ... + def add(self, source): ... + def get(self, source, any_depth: bool = ...): ... + def __getitem__(self, source): ... + def __setitem__(self, source, alias) -> None: ... + def push(self) -> None: ... + def pop(self) -> None: ... + +class State: + def __new__(cls, scope=..., parentheses: bool = ..., **kwargs): ... + def __call__(self, scope: Incomplete | None = ..., parentheses: Incomplete | None = ..., **kwargs): ... + def __getattr__(self, attr_name: str): ... + +class Context: + stack: Incomplete + alias_manager: Incomplete + state: Incomplete + def __init__(self, **settings) -> None: ... + def as_new(self): ... + def column_sort_key(self, item): ... + @property + def scope(self): ... + @property + def parentheses(self): ... + @property + def subquery(self): ... + def __call__(self, **overrides): ... + scope_normal: Incomplete + scope_source: Incomplete + scope_values: Incomplete + scope_cte: Incomplete + scope_column: Incomplete + def __enter__(self): ... + def __exit__(self, exc_type, exc_val, exc_tb) -> None: ... + def push_alias(self) -> Generator[None, None, None]: ... + def sql(self, obj): ... + def literal(self, keyword): ... + def value(self, value, converter: Incomplete | None = ..., add_param: bool = ...): ... + def __sql__(self, ctx): ... + def parse(self, node): ... + def query(self): ... + +class Node: + def clone(self): ... + def __sql__(self, ctx) -> None: ... + @staticmethod + def copy(method): ... + def coerce(self, _coerce: bool = ...): ... + def is_alias(self): ... + def unwrap(self): ... + +class ColumnFactory: + node: Incomplete + def __init__(self, node) -> None: ... + def __getattr__(self, attr: str): ... + +class _DynamicColumn: + def __get__(self, instance, instance_type: Incomplete | None = ...): ... + +class _ExplicitColumn: + def __get__(self, instance, instance_type: Incomplete | None = ...): ... + +class Source(Node): + c: Incomplete + def __init__(self, alias: Incomplete | None = ...) -> None: ... + def alias(self, name) -> None: ... + def select(self, *columns): ... + def join(self, dest, join_type=..., on: Incomplete | None = ...): ... + def left_outer_join(self, dest, on: Incomplete | None = ...): ... + def cte(self, name, recursive: bool = ..., columns: Incomplete | None = ..., materialized: Incomplete | None = ...): ... + def get_sort_key(self, ctx): ... + def apply_alias(self, ctx): ... + def apply_column(self, ctx): ... + +class _HashableSource: + def __init__(self, *args, **kwargs) -> None: ... + def alias(self, name) -> None: ... + def __hash__(self) -> int: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + __lt__: Incomplete + __le__: Incomplete + __gt__: Incomplete + __ge__: Incomplete + +class BaseTable(Source): + __and__: Incomplete + __add__: Incomplete + __sub__: Incomplete + __or__: Incomplete + __mul__: Incomplete + __rand__: Incomplete + __radd__: Incomplete + __rsub__: Incomplete + __ror__: Incomplete + __rmul__: Incomplete + +class _BoundTableContext(_callable_context_manager): + table: Incomplete + database: Incomplete + def __init__(self, table, database) -> None: ... + def __enter__(self): ... + def __exit__(self, exc_type, exc_val, exc_tb) -> None: ... + +class Table(_HashableSource, BaseTable): + __name__: Incomplete + c: Incomplete + primary_key: Incomplete + def __init__( + self, + name, + columns: Incomplete | None = ..., + primary_key: Incomplete | None = ..., + schema: Incomplete | None = ..., + alias: Incomplete | None = ..., + _model: Incomplete | None = ..., + _database: Incomplete | None = ..., + ) -> None: ... + def clone(self): ... + def bind(self, database: Incomplete | None = ...): ... + def bind_ctx(self, database: Incomplete | None = ...): ... + def select(self, *columns): ... + def insert(self, insert: Incomplete | None = ..., columns: Incomplete | None = ..., **kwargs): ... + def replace(self, insert: Incomplete | None = ..., columns: Incomplete | None = ..., **kwargs): ... + def update(self, update: Incomplete | None = ..., **kwargs): ... + def delete(self): ... + def __sql__(self, ctx): ... + +class Join(BaseTable): + lhs: Incomplete + rhs: Incomplete + join_type: Incomplete + def __init__(self, lhs, rhs, join_type=..., on: Incomplete | None = ..., alias: Incomplete | None = ...) -> None: ... + def on(self, predicate): ... + def __sql__(self, ctx): ... + +class ValuesList(_HashableSource, BaseTable): + def __init__(self, values, columns: Incomplete | None = ..., alias: Incomplete | None = ...) -> None: ... + def columns(self, *names) -> None: ... + def __sql__(self, ctx): ... + +class CTE(_HashableSource, Source): + def __init__( + self, name, query, recursive: bool = ..., columns: Incomplete | None = ..., materialized: Incomplete | None = ... + ) -> None: ... + def select_from(self, *columns): ... + def union_all(self, rhs): ... + __add__: Incomplete + def union(self, rhs): ... + __or__: Incomplete + def __sql__(self, ctx): ... + +class ColumnBase(Node): + def converter(self, converter: Incomplete | None = ...) -> None: ... + def alias(self, alias): ... + def unalias(self): ... + def bind_to(self, dest): ... + def cast(self, as_type): ... + def asc(self, collation: Incomplete | None = ..., nulls: Incomplete | None = ...): ... + __pos__: Incomplete + def desc(self, collation: Incomplete | None = ..., nulls: Incomplete | None = ...): ... + __neg__: Incomplete + def __invert__(self): ... + __and__: Incomplete + __or__: Incomplete + __add__: Incomplete + __sub__: Incomplete + __mul__: Incomplete + __div__: Incomplete + __truediv__: Incomplete + __xor__: Incomplete + __radd__: Incomplete + __rsub__: Incomplete + __rmul__: Incomplete + __rdiv__: Incomplete + __rtruediv__: Incomplete + __rand__: Incomplete + __ror__: Incomplete + __rxor__: Incomplete + def __eq__(self, rhs): ... + def __ne__(self, rhs): ... + __lt__: Incomplete + __le__: Incomplete + __gt__: Incomplete + __ge__: Incomplete + __lshift__: Incomplete + __rshift__: Incomplete + __mod__: Incomplete + __pow__: Incomplete + like: Incomplete + ilike: Incomplete + bin_and: Incomplete + bin_or: Incomplete + in_: Incomplete + not_in: Incomplete + def is_null(self, is_null: bool = ...): ... + def contains(self, rhs): ... + def startswith(self, rhs): ... + def endswith(self, rhs): ... + def between(self, lo, hi): ... + def concat(self, rhs): ... + def regexp(self, rhs): ... + def iregexp(self, rhs): ... + def __getitem__(self, item): ... + __iter__: Incomplete + def distinct(self): ... + def collate(self, collation): ... + def get_sort_key(self, ctx): ... + +class Column(ColumnBase): + source: Incomplete + name: Incomplete + def __init__(self, source, name) -> None: ... + def get_sort_key(self, ctx): ... + def __hash__(self) -> int: ... + def __sql__(self, ctx): ... + +class WrappedNode(ColumnBase): + node: Incomplete + def __init__(self, node) -> None: ... + def is_alias(self): ... + def unwrap(self): ... + +class EntityFactory: + node: Incomplete + def __init__(self, node) -> None: ... + def __getattr__(self, attr: str): ... + +class _DynamicEntity: + def __get__(self, instance, instance_type: Incomplete | None = ...): ... + +class Alias(WrappedNode): + c: Incomplete + def __init__(self, node, alias) -> None: ... + def __hash__(self) -> int: ... + @property + def name(self): ... + @name.setter + def name(self, value) -> None: ... + def alias(self, alias: Incomplete | None = ...): ... + def unalias(self): ... + def is_alias(self): ... + def __sql__(self, ctx): ... + +class BindTo(WrappedNode): + dest: Incomplete + def __init__(self, node, dest) -> None: ... + def __sql__(self, ctx): ... + +class Negated(WrappedNode): + def __invert__(self): ... + def __sql__(self, ctx): ... + +class BitwiseMixin: + def __and__(self, other): ... + def __or__(self, other): ... + def __sub__(self, other): ... + def __invert__(self): ... + +class BitwiseNegated(BitwiseMixin, WrappedNode): + def __invert__(self): ... + def __sql__(self, ctx): ... + +class Value(ColumnBase): + value: Incomplete + converter: Incomplete + multi: Incomplete + values: Incomplete + def __init__(self, value, converter: Incomplete | None = ..., unpack: bool = ...) -> None: ... + def __sql__(self, ctx): ... + +class ValueLiterals(WrappedNode): + def __sql__(self, ctx): ... + +def AsIs(value): ... + +class Cast(WrappedNode): + def __init__(self, node, cast) -> None: ... + def __sql__(self, ctx): ... + +class Ordering(WrappedNode): + direction: Incomplete + collation: Incomplete + nulls: Incomplete + def __init__(self, node, direction, collation: Incomplete | None = ..., nulls: Incomplete | None = ...) -> None: ... + def collate(self, collation: Incomplete | None = ...): ... + def __sql__(self, ctx): ... + +class Expression(ColumnBase): + lhs: Incomplete + op: Incomplete + rhs: Incomplete + flat: Incomplete + def __init__(self, lhs, op, rhs, flat: bool = ...) -> None: ... + def __sql__(self, ctx): ... + +class StringExpression(Expression): + def __add__(self, rhs): ... + def __radd__(self, lhs): ... + +class Entity(ColumnBase): + def __init__(self, *path) -> None: ... + def __getattr__(self, attr: str): ... + def get_sort_key(self, ctx): ... + def __hash__(self) -> int: ... + def __sql__(self, ctx): ... + +class SQL(ColumnBase): + sql: Incomplete + params: Incomplete + def __init__(self, sql, params: Incomplete | None = ...) -> None: ... + def __sql__(self, ctx): ... + +def Check(constraint, name: Incomplete | None = ...): ... + +class Function(ColumnBase): + no_coerce_functions: ClassVar[set[str]] + name: Incomplete + arguments: Incomplete + def __init__(self, name, arguments, coerce: bool = ..., python_value: Incomplete | None = ...) -> None: ... + def __getattr__(self, attr: str): ... + def filter(self, where: Incomplete | None = ...) -> None: ... + def order_by(self, *ordering) -> None: ... + def python_value(self, func: Incomplete | None = ...) -> None: ... + def over( + self, + partition_by: Incomplete | None = ..., + order_by: Incomplete | None = ..., + start: Incomplete | None = ..., + end: Incomplete | None = ..., + frame_type: Incomplete | None = ..., + window: Incomplete | None = ..., + exclude: Incomplete | None = ..., + ): ... + def __sql__(self, ctx): ... + +fn: Incomplete + +class Window(Node): + CURRENT_ROW: Incomplete + GROUP: Incomplete + TIES: Incomplete + NO_OTHERS: Incomplete + GROUPS: str + RANGE: str + ROWS: str + partition_by: Incomplete + order_by: Incomplete + start: Incomplete + end: Incomplete + frame_type: Incomplete + def __init__( + self, + partition_by: Incomplete | None = ..., + order_by: Incomplete | None = ..., + start: Incomplete | None = ..., + end: Incomplete | None = ..., + frame_type: Incomplete | None = ..., + extends: Incomplete | None = ..., + exclude: Incomplete | None = ..., + alias: Incomplete | None = ..., + _inline: bool = ..., + ) -> None: ... + def alias(self, alias: Incomplete | None = ...): ... + def as_range(self) -> None: ... + def as_rows(self) -> None: ... + def as_groups(self) -> None: ... + def extends(self, window: Incomplete | None = ...) -> None: ... + def exclude(self, frame_exclusion: Incomplete | None = ...) -> None: ... + @staticmethod + def following(value: Incomplete | None = ...): ... + @staticmethod + def preceding(value: Incomplete | None = ...): ... + def __sql__(self, ctx): ... + +class WindowAlias(Node): + window: Incomplete + def __init__(self, window) -> None: ... + def alias(self, window_alias): ... + def __sql__(self, ctx): ... + +class ForUpdate(Node): + def __init__(self, expr, of: Incomplete | None = ..., nowait: Incomplete | None = ...) -> None: ... + def __sql__(self, ctx): ... + +def Case(predicate, expression_tuples, default: Incomplete | None = ...): ... + +class NodeList(ColumnBase): + nodes: Incomplete + glue: Incomplete + parens: Incomplete + def __init__(self, nodes, glue: str = ..., parens: bool = ...) -> None: ... + def __sql__(self, ctx): ... + +class _Namespace(Node): + def __init__(self, name) -> None: ... + def __getattr__(self, attr: str): ... + __getitem__: Incomplete + +class NamespaceAttribute(ColumnBase): + def __init__(self, namespace, attribute) -> None: ... + def __sql__(self, ctx): ... + +EXCLUDED: Incomplete + +class DQ(ColumnBase): + query: Incomplete + def __init__(self, **query) -> None: ... + def __invert__(self) -> None: ... + def clone(self): ... + +Tuple: Incomplete + +class QualifiedNames(WrappedNode): + def __sql__(self, ctx): ... + +class OnConflict(Node): + def __init__( + self, + action: Incomplete | None = ..., + update: Incomplete | None = ..., + preserve: Incomplete | None = ..., + where: Incomplete | None = ..., + conflict_target: Incomplete | None = ..., + conflict_where: Incomplete | None = ..., + conflict_constraint: Incomplete | None = ..., + ) -> None: ... + def get_conflict_statement(self, ctx, query): ... + def get_conflict_update(self, ctx, query): ... + def preserve(self, *columns) -> None: ... + def update(self, _data: Incomplete | None = ..., **kwargs) -> None: ... + def where(self, *expressions) -> None: ... + def conflict_target(self, *constraints) -> None: ... + def conflict_where(self, *expressions) -> None: ... + def conflict_constraint(self, constraint) -> None: ... + +class BaseQuery(Node): + default_row_type: Incomplete + def __init__(self, _database: Incomplete | None = ..., **kwargs) -> None: ... + def bind(self, database: Incomplete | None = ...): ... + def clone(self): ... + def dicts(self, as_dict: bool = ...): ... + def tuples(self, as_tuple: bool = ...): ... + def namedtuples(self, as_namedtuple: bool = ...): ... + def objects(self, constructor: Incomplete | None = ...): ... + def __sql__(self, ctx) -> None: ... + def sql(self): ... + def execute(self, database): ... + def iterator(self, database: Incomplete | None = ...): ... + def __iter__(self): ... + def __getitem__(self, value): ... + def __len__(self) -> int: ... + +class RawQuery(BaseQuery): + def __init__(self, sql: Incomplete | None = ..., params: Incomplete | None = ..., **kwargs) -> None: ... + def __sql__(self, ctx): ... + +class Query(BaseQuery): + def __init__( + self, + where: Incomplete | None = ..., + order_by: Incomplete | None = ..., + limit: Incomplete | None = ..., + offset: Incomplete | None = ..., + **kwargs, + ) -> None: ... + def with_cte(self, *cte_list) -> None: ... + def where(self, *expressions) -> None: ... + def orwhere(self, *expressions) -> None: ... + def order_by(self, *values) -> None: ... + def order_by_extend(self, *values) -> None: ... + def limit(self, value: Incomplete | None = ...) -> None: ... + def offset(self, value: Incomplete | None = ...) -> None: ... + def paginate(self, page, paginate_by: int = ...) -> None: ... + def __sql__(self, ctx): ... + +class SelectQuery(Query): + union_all: Incomplete + __add__: Incomplete + union: Incomplete + __or__: Incomplete + intersect: Incomplete + __and__: Incomplete + except_: Incomplete + __sub__: Incomplete + __radd__: Incomplete + __ror__: Incomplete + __rand__: Incomplete + __rsub__: Incomplete + def select_from(self, *columns): ... + +class SelectBase(_HashableSource, Source, SelectQuery): + def peek(self, database, n: int = ...): ... + def first(self, database, n: int = ...): ... + def scalar(self, database, as_tuple: bool = ..., as_dict: bool = ...): ... + def scalars(self, database) -> Generator[Incomplete, None, None]: ... + def count(self, database, clear_limit: bool = ...): ... + def exists(self, database): ... + def get(self, database): ... + +class CompoundSelectQuery(SelectBase): + lhs: Incomplete + op: Incomplete + rhs: Incomplete + def __init__(self, lhs, op, rhs) -> None: ... + def exists(self, database): ... + def __sql__(self, ctx): ... + +class Select(SelectBase): + def __init__( + self, + from_list: Incomplete | None = ..., + columns: Incomplete | None = ..., + group_by: Incomplete | None = ..., + having: Incomplete | None = ..., + distinct: Incomplete | None = ..., + windows: Incomplete | None = ..., + for_update: Incomplete | None = ..., + for_update_of: Incomplete | None = ..., + nowait: Incomplete | None = ..., + lateral: Incomplete | None = ..., + **kwargs, + ) -> None: ... + def clone(self): ... + def columns(self, *columns, **kwargs) -> None: ... + select: Incomplete + def select_extend(self, *columns) -> None: ... + @property + def selected_columns(self): ... + @selected_columns.setter + def selected_columns(self, value) -> None: ... + def from_(self, *sources) -> None: ... + def join(self, dest, join_type=..., on: Incomplete | None = ...) -> None: ... + def left_outer_join(self, dest, on: Incomplete | None = ...): ... + def group_by(self, *columns) -> None: ... + def group_by_extend(self, *values): ... + def having(self, *expressions) -> None: ... + def distinct(self, *columns) -> None: ... + def window(self, *windows) -> None: ... + def for_update(self, for_update: bool = ..., of: Incomplete | None = ..., nowait: Incomplete | None = ...) -> None: ... + def lateral(self, lateral: bool = ...) -> None: ... + def __sql_selection__(self, ctx, is_subquery: bool = ...): ... + def __sql__(self, ctx): ... + +class _WriteQuery(Query): + table: Incomplete + def __init__(self, table, returning: Incomplete | None = ..., **kwargs) -> None: ... + def cte(self, name, recursive: bool = ..., columns: Incomplete | None = ..., materialized: Incomplete | None = ...): ... + def returning(self, *returning) -> None: ... + def apply_returning(self, ctx): ... + def execute_returning(self, database): ... + def handle_result(self, database, cursor): ... + def __sql__(self, ctx): ... + +class Update(_WriteQuery): + def __init__(self, table, update: Incomplete | None = ..., **kwargs) -> None: ... + def from_(self, *sources) -> None: ... + def __sql__(self, ctx): ... + +class Insert(_WriteQuery): + SIMPLE: int + QUERY: int + MULTI: int + + class DefaultValuesException(Exception): ... + + def __init__( + self, + table, + insert: Incomplete | None = ..., + columns: Incomplete | None = ..., + on_conflict: Incomplete | None = ..., + **kwargs, + ) -> None: ... + def where(self, *expressions) -> None: ... + def as_rowcount(self, _as_rowcount: bool = ...) -> None: ... + def on_conflict_ignore(self, ignore: bool = ...) -> None: ... + def on_conflict_replace(self, replace: bool = ...) -> None: ... + def on_conflict(self, *args, **kwargs) -> None: ... + def get_default_data(self): ... + def get_default_columns(self): ... + def __sql__(self, ctx): ... + def handle_result(self, database, cursor): ... + +class Delete(_WriteQuery): + def __sql__(self, ctx): ... + +class Index(Node): + def __init__( + self, + name, + table, + expressions, + unique: bool = ..., + safe: bool = ..., + where: Incomplete | None = ..., + using: Incomplete | None = ..., + ) -> None: ... + def safe(self, _safe: bool = ...) -> None: ... + def where(self, *expressions) -> None: ... + def using(self, _using: Incomplete | None = ...) -> None: ... + def __sql__(self, ctx): ... + +class ModelIndex(Index): + def __init__( + self, + model, + fields, + unique: bool = ..., + safe: bool = ..., + where: Incomplete | None = ..., + using: Incomplete | None = ..., + name: Incomplete | None = ..., + ) -> None: ... + +class PeeweeException(Exception): + def __init__(self, *args) -> None: ... + +class ImproperlyConfigured(PeeweeException): ... +class DatabaseError(PeeweeException): ... +class DataError(DatabaseError): ... +class IntegrityError(DatabaseError): ... +class InterfaceError(PeeweeException): ... +class InternalError(DatabaseError): ... +class NotSupportedError(DatabaseError): ... +class OperationalError(DatabaseError): ... +class ProgrammingError(DatabaseError): ... + +class ExceptionWrapper: + exceptions: Incomplete + def __init__(self, exceptions) -> None: ... + def __enter__(self) -> None: ... + def __exit__(self, exc_type, exc_value, traceback) -> None: ... + +class IndexMetadata(NamedTuple): + name: Incomplete + sql: Incomplete + columns: Incomplete + unique: Incomplete + table: Incomplete + +class ColumnMetadata(NamedTuple): + name: Incomplete + data_type: Incomplete + null: Incomplete + primary_key: Incomplete + table: Incomplete + default: Incomplete + +class ForeignKeyMetadata(NamedTuple): + column: Incomplete + dest_table: Incomplete + dest_column: Incomplete + table: Incomplete + +class ViewMetadata(NamedTuple): + name: Incomplete + sql: Incomplete + +class _ConnectionState: + def __init__(self, **kwargs) -> None: ... + closed: bool + conn: Incomplete + ctx: Incomplete + transactions: Incomplete + def reset(self) -> None: ... + def set_connection(self, conn) -> None: ... + +class _ConnectionLocal(_ConnectionState, threading.local): ... + +class _NoopLock: + def __enter__(self): ... + def __exit__(self, exc_type, exc_val, exc_tb) -> None: ... + +class ConnectionContext(_callable_context_manager): + db: Incomplete + def __init__(self, db) -> None: ... + def __enter__(self) -> None: ... + def __exit__(self, exc_type, exc_val, exc_tb) -> None: ... + +class Database(_callable_context_manager): + context_class: Incomplete + field_types: Incomplete + operations: Incomplete + param: str + quote: str + server_version: Incomplete + commit_select: bool + compound_select_parentheses: Incomplete + for_update: bool + index_schema_prefix: bool + index_using_precedes_table: bool + limit_max: Incomplete + nulls_ordering: bool + returning_clause: bool + safe_create_index: bool + safe_drop_index: bool + sequences: bool + truncate_table: bool + autoconnect: Incomplete + autorollback: Incomplete + thread_safe: Incomplete + connect_params: Incomplete + def __init__( + self, + database, + thread_safe: bool = ..., + autorollback: bool = ..., + field_types: Incomplete | None = ..., + operations: Incomplete | None = ..., + autocommit: Incomplete | None = ..., + autoconnect: bool = ..., + **kwargs, + ) -> None: ... + database: Incomplete + deferred: Incomplete + def init(self, database, **kwargs) -> None: ... + def __enter__(self): ... + def __exit__(self, exc_type, exc_val, exc_tb) -> None: ... + def connection_context(self): ... + def connect(self, reuse_if_open: bool = ...): ... + def close(self): ... + def is_closed(self): ... + def is_connection_usable(self): ... + def connection(self): ... + def cursor(self, commit: Incomplete | None = ...): ... + def execute_sql(self, sql, params: Incomplete | None = ..., commit=...): ... + def execute(self, query, commit=..., **context_options): ... + def get_context_options(self): ... + def get_sql_context(self, **context_options): ... + def conflict_statement(self, on_conflict, query) -> None: ... + def conflict_update(self, on_conflict, query) -> None: ... + def last_insert_id(self, cursor, query_type: Incomplete | None = ...): ... + def rows_affected(self, cursor): ... + def default_values_insert(self, ctx): ... + def session_start(self): ... + def session_commit(self): ... + def session_rollback(self): ... + def in_transaction(self): ... + def push_transaction(self, transaction) -> None: ... + def pop_transaction(self): ... + def transaction_depth(self): ... + def top_transaction(self): ... + def atomic(self, *args, **kwargs): ... + def manual_commit(self): ... + def transaction(self, *args, **kwargs): ... + def savepoint(self): ... + def begin(self) -> None: ... + def commit(self): ... + def rollback(self): ... + def batch_commit(self, it, n) -> Generator[Incomplete, None, None]: ... + def table_exists(self, table_name, schema: Incomplete | None = ...): ... + def get_tables(self, schema: Incomplete | None = ...) -> None: ... + def get_indexes(self, table, schema: Incomplete | None = ...) -> None: ... + def get_columns(self, table, schema: Incomplete | None = ...) -> None: ... + def get_primary_keys(self, table, schema: Incomplete | None = ...) -> None: ... + def get_foreign_keys(self, table, schema: Incomplete | None = ...) -> None: ... + def sequence_exists(self, seq) -> None: ... + def create_tables(self, models, **options) -> None: ... + def drop_tables(self, models, **kwargs) -> None: ... + def extract_date(self, date_part, date_field) -> None: ... + def truncate_date(self, date_part, date_field) -> None: ... + def to_timestamp(self, date_field) -> None: ... + def from_timestamp(self, date_field) -> None: ... + def random(self): ... + def bind(self, models, bind_refs: bool = ..., bind_backrefs: bool = ...) -> None: ... + def bind_ctx(self, models, bind_refs: bool = ..., bind_backrefs: bool = ...): ... + def get_noop_select(self, ctx): ... + @property + def Model(self) -> type[Model]: ... + +class SqliteDatabase(Database): + field_types: Incomplete + operations: Incomplete + index_schema_prefix: bool + limit_max: int + server_version: Incomplete + truncate_table: bool + nulls_ordering: Incomplete + def __init__(self, database, *args, **kwargs) -> None: ... + returning_clause: Incomplete + def init( + self, database, pragmas: Incomplete | None = ..., timeout: int = ..., returning_clause: Incomplete | None = ..., **kwargs + ) -> None: ... + def pragma(self, key, value=..., permanent: bool = ..., schema: Incomplete | None = ...): ... + cache_size: Incomplete + foreign_keys: Incomplete + journal_mode: Incomplete + journal_size_limit: Incomplete + mmap_size: Incomplete + page_size: Incomplete + read_uncommitted: Incomplete + synchronous: Incomplete + wal_autocheckpoint: Incomplete + application_id: Incomplete + user_version: Incomplete + data_version: Incomplete + @property + def timeout(self): ... + @timeout.setter + def timeout(self, seconds) -> None: ... + def register_aggregate(self, klass, name: Incomplete | None = ..., num_params: int = ...) -> None: ... + def aggregate(self, name: Incomplete | None = ..., num_params: int = ...): ... + def register_collation(self, fn, name: Incomplete | None = ...): ... + def collation(self, name: Incomplete | None = ...): ... + def register_function(self, fn, name: Incomplete | None = ..., num_params: int = ...) -> None: ... + def func(self, name: Incomplete | None = ..., num_params: int = ...): ... + def register_window_function(self, klass, name: Incomplete | None = ..., num_params: int = ...) -> None: ... + def window_function(self, name: Incomplete | None = ..., num_params: int = ...): ... + def register_table_function(self, klass, name: Incomplete | None = ...) -> None: ... + def table_function(self, name: Incomplete | None = ...): ... + def unregister_aggregate(self, name) -> None: ... + def unregister_collation(self, name) -> None: ... + def unregister_function(self, name) -> None: ... + def unregister_window_function(self, name) -> None: ... + def unregister_table_function(self, name): ... + def load_extension(self, extension) -> None: ... + def unload_extension(self, extension) -> None: ... + def attach(self, filename, name): ... + def detach(self, name): ... + def last_insert_id(self, cursor, query_type: Incomplete | None = ...): ... + def rows_affected(self, cursor): ... + def begin(self, lock_type: Incomplete | None = ...) -> None: ... + def get_tables(self, schema: Incomplete | None = ...): ... + def get_views(self, schema: Incomplete | None = ...): ... + def get_indexes(self, table, schema: Incomplete | None = ...): ... + def get_columns(self, table, schema: Incomplete | None = ...): ... + def get_primary_keys(self, table, schema: Incomplete | None = ...): ... + def get_foreign_keys(self, table, schema: Incomplete | None = ...): ... + def get_binary_type(self): ... + def conflict_statement(self, on_conflict, query): ... + def conflict_update(self, oc, query): ... + def extract_date(self, date_part, date_field): ... + def truncate_date(self, date_part, date_field): ... + def to_timestamp(self, date_field): ... + def from_timestamp(self, date_field): ... + +class PostgresqlDatabase(Database): + field_types: Incomplete + operations: Incomplete + param: str + commit_select: bool + compound_select_parentheses: Incomplete + for_update: bool + nulls_ordering: bool + returning_clause: bool + safe_create_index: bool + sequences: bool + def init( + self, + database, + register_unicode: bool = ..., + encoding: Incomplete | None = ..., + isolation_level: Incomplete | None = ..., + **kwargs, + ) -> None: ... + def is_connection_usable(self): ... + def last_insert_id(self, cursor, query_type: Incomplete | None = ...): ... + def rows_affected(self, cursor): ... + def get_tables(self, schema: Incomplete | None = ...): ... + def get_views(self, schema: Incomplete | None = ...): ... + def get_indexes(self, table, schema: Incomplete | None = ...): ... + def get_columns(self, table, schema: Incomplete | None = ...): ... + def get_primary_keys(self, table, schema: Incomplete | None = ...): ... + def get_foreign_keys(self, table, schema: Incomplete | None = ...): ... + def sequence_exists(self, sequence): ... + def get_binary_type(self): ... + def conflict_statement(self, on_conflict, query) -> None: ... + def conflict_update(self, oc, query): ... + def extract_date(self, date_part, date_field): ... + def truncate_date(self, date_part, date_field): ... + def to_timestamp(self, date_field): ... + def from_timestamp(self, date_field): ... + def get_noop_select(self, ctx): ... + def set_time_zone(self, timezone) -> None: ... + +class MySQLDatabase(Database): + field_types: Incomplete + operations: Incomplete + param: str + quote: str + commit_select: bool + compound_select_parentheses: Incomplete + for_update: bool + index_using_precedes_table: bool + limit_max: Incomplete + safe_create_index: bool + safe_drop_index: bool + sql_mode: str + def init(self, database, **kwargs) -> None: ... + def is_connection_usable(self): ... + def default_values_insert(self, ctx): ... + def get_tables(self, schema: Incomplete | None = ...): ... + def get_views(self, schema: Incomplete | None = ...): ... + def get_indexes(self, table, schema: Incomplete | None = ...): ... + def get_columns(self, table, schema: Incomplete | None = ...): ... + def get_primary_keys(self, table, schema: Incomplete | None = ...): ... + def get_foreign_keys(self, table, schema: Incomplete | None = ...): ... + def get_binary_type(self): ... + def conflict_statement(self, on_conflict, query): ... + def conflict_update(self, on_conflict, query): ... + def extract_date(self, date_part, date_field): ... + def truncate_date(self, date_part, date_field): ... + def to_timestamp(self, date_field): ... + def from_timestamp(self, date_field): ... + def random(self): ... + def get_noop_select(self, ctx): ... + +class _manual(_callable_context_manager): + db: Incomplete + def __init__(self, db) -> None: ... + def __enter__(self) -> None: ... + def __exit__(self, exc_type, exc_val, exc_tb) -> None: ... + +class _atomic(_callable_context_manager): + db: Incomplete + def __init__(self, db, *args, **kwargs) -> None: ... + def __enter__(self): ... + def __exit__(self, exc_type, exc_val, exc_tb): ... + +class _transaction(_callable_context_manager): + db: Incomplete + def __init__(self, db, *args, **kwargs) -> None: ... + def commit(self, begin: bool = ...) -> None: ... + def rollback(self, begin: bool = ...) -> None: ... + def __enter__(self): ... + def __exit__(self, exc_type, exc_val, exc_tb) -> None: ... + +class _savepoint(_callable_context_manager): + db: Incomplete + sid: Incomplete + quoted_sid: Incomplete + def __init__(self, db, sid: Incomplete | None = ...) -> None: ... + def commit(self, begin: bool = ...) -> None: ... + def rollback(self) -> None: ... + def __enter__(self): ... + def __exit__(self, exc_type, exc_val, exc_tb) -> None: ... + +class CursorWrapper: + cursor: Incomplete + count: int + index: int + initialized: bool + populated: bool + row_cache: Incomplete + def __init__(self, cursor) -> None: ... + def __iter__(self): ... + def __getitem__(self, item): ... + def __len__(self) -> int: ... + def initialize(self) -> None: ... + def iterate(self, cache: bool = ...): ... + def process_row(self, row): ... + def iterator(self) -> Generator[Incomplete, None, None]: ... + def fill_cache(self, n: int = ...) -> None: ... + +class DictCursorWrapper(CursorWrapper): + initialize: Incomplete + process_row: Incomplete + +class NamedTupleCursorWrapper(CursorWrapper): + tuple_class: Incomplete + def initialize(self) -> None: ... + def process_row(self, row): ... + +class ObjectCursorWrapper(DictCursorWrapper): + constructor: Incomplete + def __init__(self, cursor, constructor) -> None: ... + def process_row(self, row): ... + +class ResultIterator: + cursor_wrapper: Incomplete + index: int + def __init__(self, cursor_wrapper) -> None: ... + def __iter__(self): ... + def next(self): ... + __next__: Incomplete + +class FieldAccessor: + model: Incomplete + field: Incomplete + name: Incomplete + def __init__(self, model, field, name) -> None: ... + def __get__(self, instance, instance_type: Incomplete | None = ...): ... + def __set__(self, instance, value) -> None: ... + +class ForeignKeyAccessor(FieldAccessor): + rel_model: Incomplete + def __init__(self, model, field, name) -> None: ... + def get_rel_instance(self, instance): ... + def __get__(self, instance, instance_type: Incomplete | None = ...): ... + def __set__(self, instance, obj) -> None: ... + +class BackrefAccessor: + field: Incomplete + model: Incomplete + rel_model: Incomplete + def __init__(self, field) -> None: ... + def __get__(self, instance, instance_type: Incomplete | None = ...): ... + +class ObjectIdAccessor: + field: Incomplete + def __init__(self, field) -> None: ... + def __get__(self, instance, instance_type: Incomplete | None = ...): ... + def __set__(self, instance, value) -> None: ... + +class Field(ColumnBase): + accessor_class: Incomplete + auto_increment: bool + default_index_type: Incomplete + field_type: str + unpack: bool + null: Incomplete + index: Incomplete + unique: Incomplete + column_name: Incomplete + default: Incomplete + primary_key: Incomplete + constraints: Incomplete + sequence: Incomplete + collation: Incomplete + unindexed: Incomplete + choices: Incomplete + help_text: Incomplete + verbose_name: Incomplete + index_type: Incomplete + def __init__( + self, + null: bool = ..., + index: bool = ..., + unique: bool = ..., + column_name: Incomplete | None = ..., + default: Incomplete | None = ..., + primary_key: bool = ..., + constraints: Incomplete | None = ..., + sequence: Incomplete | None = ..., + collation: Incomplete | None = ..., + unindexed: bool = ..., + choices: Incomplete | None = ..., + help_text: Incomplete | None = ..., + verbose_name: Incomplete | None = ..., + index_type: Incomplete | None = ..., + db_column: Incomplete | None = ..., + _hidden: bool = ..., + ) -> None: ... + def __hash__(self) -> int: ... + model: Incomplete + name: Incomplete + def bind(self, model, name, set_attribute: bool = ...) -> None: ... + @property + def column(self): ... + def adapt(self, value): ... + def db_value(self, value): ... + def python_value(self, value): ... + def to_value(self, value): ... + def get_sort_key(self, ctx): ... + def __sql__(self, ctx): ... + def get_modifiers(self) -> None: ... + def ddl_datatype(self, ctx): ... + def ddl(self, ctx): ... + +class AnyField(Field): + field_type: str + +class IntegerField(Field): + field_type: str + def adapt(self, value): ... + +class BigIntegerField(IntegerField): + field_type: str + +class SmallIntegerField(IntegerField): + field_type: str + +class AutoField(IntegerField): + auto_increment: bool + field_type: str + def __init__(self, *args, **kwargs) -> None: ... + +class BigAutoField(AutoField): + field_type: str + +class IdentityField(AutoField): + field_type: str + def __init__(self, generate_always: bool = ..., **kwargs) -> None: ... + +class PrimaryKeyField(AutoField): + def __init__(self, *args, **kwargs) -> None: ... + +class FloatField(Field): + field_type: str + def adapt(self, value): ... + +class DoubleField(FloatField): + field_type: str + +class DecimalField(Field): + field_type: str + max_digits: Incomplete + decimal_places: Incomplete + auto_round: Incomplete + rounding: Incomplete + def __init__( + self, + max_digits: int = ..., + decimal_places: int = ..., + auto_round: bool = ..., + rounding: Incomplete | None = ..., + *args, + **kwargs, + ) -> None: ... + def get_modifiers(self): ... + def db_value(self, value): ... + def python_value(self, value): ... + +class _StringField(Field): + def adapt(self, value): ... + def __add__(self, other): ... + def __radd__(self, other): ... + +class CharField(_StringField): + field_type: str + max_length: Incomplete + def __init__(self, max_length: int = ..., *args, **kwargs) -> None: ... + def get_modifiers(self): ... + +class FixedCharField(CharField): + field_type: str + def python_value(self, value): ... + +class TextField(_StringField): + field_type: str + +class BlobField(Field): + field_type: str + def bind(self, model, name, set_attribute: bool = ...): ... + def db_value(self, value): ... + +class BitField(BitwiseMixin, BigIntegerField): + def __init__(self, *args, **kwargs) -> None: ... + def flag(self, value: Incomplete | None = ...): ... + +class BigBitFieldData: + instance: Incomplete + name: Incomplete + def __init__(self, instance, name) -> None: ... + def set_bit(self, idx) -> None: ... + def clear_bit(self, idx) -> None: ... + def toggle_bit(self, idx): ... + def is_set(self, idx): ... + +class BigBitFieldAccessor(FieldAccessor): + def __get__(self, instance, instance_type: Incomplete | None = ...): ... + def __set__(self, instance, value) -> None: ... + +class BigBitField(BlobField): + accessor_class: Incomplete + def __init__(self, *args, **kwargs) -> None: ... + def db_value(self, value): ... + +class UUIDField(Field): + field_type: str + def db_value(self, value): ... + def python_value(self, value): ... + +class BinaryUUIDField(BlobField): + field_type: str + def db_value(self, value): ... + def python_value(self, value): ... + +class _BaseFormattedField(Field): + formats: Incomplete + def __init__(self, formats: Incomplete | None = ..., *args, **kwargs) -> None: ... + +class DateTimeField(_BaseFormattedField): + field_type: str + formats: Incomplete + def adapt(self, value): ... + def to_timestamp(self): ... + def truncate(self, part): ... + year: Incomplete + month: Incomplete + day: Incomplete + hour: Incomplete + minute: Incomplete + second: Incomplete + +class DateField(_BaseFormattedField): + field_type: str + formats: Incomplete + def adapt(self, value): ... + def to_timestamp(self): ... + def truncate(self, part): ... + year: Incomplete + month: Incomplete + day: Incomplete + +class TimeField(_BaseFormattedField): + field_type: str + formats: Incomplete + def adapt(self, value): ... + hour: Incomplete + minute: Incomplete + second: Incomplete + +class TimestampField(BigIntegerField): + valid_resolutions: Incomplete + resolution: Incomplete + ticks_to_microsecond: Incomplete + utc: Incomplete + def __init__(self, *args, **kwargs) -> None: ... + def local_to_utc(self, dt): ... + def utc_to_local(self, dt): ... + def get_timestamp(self, value): ... + def db_value(self, value): ... + def python_value(self, value): ... + def from_timestamp(self): ... + year: Incomplete + month: Incomplete + day: Incomplete + hour: Incomplete + minute: Incomplete + second: Incomplete + +class IPField(BigIntegerField): + def db_value(self, val): ... + def python_value(self, val): ... + +class BooleanField(Field): + field_type: str + adapt: Incomplete + +class BareField(Field): + adapt: Incomplete + def __init__(self, adapt: Incomplete | None = ..., *args, **kwargs) -> None: ... + def ddl_datatype(self, ctx) -> None: ... + +class ForeignKeyField(Field): + accessor_class: Incomplete + backref_accessor_class: Incomplete + rel_model: Incomplete + rel_field: Incomplete + declared_backref: Incomplete + backref: Incomplete + on_delete: Incomplete + on_update: Incomplete + deferrable: Incomplete + deferred: Incomplete + object_id_name: Incomplete + lazy_load: Incomplete + constraint_name: Incomplete + def __init__( + self, + model, + field: Incomplete | None = ..., + backref: Incomplete | None = ..., + on_delete: Incomplete | None = ..., + on_update: Incomplete | None = ..., + deferrable: Incomplete | None = ..., + _deferred: Incomplete | None = ..., + rel_model: Incomplete | None = ..., + to_field: Incomplete | None = ..., + object_id_name: Incomplete | None = ..., + lazy_load: bool = ..., + constraint_name: Incomplete | None = ..., + related_name: Incomplete | None = ..., + *args, + **kwargs, + ) -> None: ... + @property + def field_type(self): ... + def get_modifiers(self): ... + def adapt(self, value): ... + def db_value(self, value): ... + def python_value(self, value): ... + column_name: Incomplete + safe_name: Incomplete + def bind(self, model, name, set_attribute: bool = ...) -> None: ... + def foreign_key_constraint(self): ... + def __getattr__(self, attr: str): ... + +class DeferredForeignKey(Field): + field_kwargs: Incomplete + rel_model_name: Incomplete + def __init__(self, rel_model_name, **kwargs) -> None: ... + __hash__: Incomplete + def __deepcopy__(self, memo: Incomplete | None = ...): ... + def set_model(self, rel_model) -> None: ... + @staticmethod + def resolve(model_cls) -> None: ... + +class DeferredThroughModel: + def __init__(self) -> None: ... + def set_field(self, model, field, name) -> None: ... + def set_model(self, through_model) -> None: ... + +class MetaField(Field): + column_name: Incomplete + default: Incomplete + model: Incomplete + name: Incomplete + primary_key: bool + +class ManyToManyFieldAccessor(FieldAccessor): + model: Incomplete + rel_model: Incomplete + through_model: Incomplete + src_fk: Incomplete + dest_fk: Incomplete + def __init__(self, model, field, name) -> None: ... + def __get__(self, instance, instance_type: Incomplete | None = ..., force_query: bool = ...): ... + def __set__(self, instance, value) -> None: ... + +class ManyToManyField(MetaField): + accessor_class: Incomplete + rel_model: Incomplete + backref: Incomplete + def __init__( + self, + model, + backref: Incomplete | None = ..., + through_model: Incomplete | None = ..., + on_delete: Incomplete | None = ..., + on_update: Incomplete | None = ..., + _is_backref: bool = ..., + ) -> None: ... + def bind(self, model, name, set_attribute: bool = ...) -> None: ... + def get_models(self): ... + @property + def through_model(self): ... + @through_model.setter + def through_model(self, value) -> None: ... + def get_through_model(self): ... + +class VirtualField(MetaField): + field_class: Incomplete + field_instance: Incomplete + def __init__(self, field_class: Incomplete | None = ..., *args, **kwargs) -> None: ... + def db_value(self, value): ... + def python_value(self, value): ... + model: Incomplete + column_name: Incomplete + def bind(self, model, name, set_attribute: bool = ...) -> None: ... + +class CompositeKey(MetaField): + sequence: Incomplete + field_names: Incomplete + def __init__(self, *field_names) -> None: ... + @property + def safe_field_names(self): ... + def __get__(self, instance, instance_type: Incomplete | None = ...): ... + def __set__(self, instance, value) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __hash__(self) -> int: ... + def __sql__(self, ctx): ... + model: Incomplete + column_name: Incomplete + def bind(self, model, name, set_attribute: bool = ...) -> None: ... + +class _SortedFieldList: + def __init__(self) -> None: ... + def __getitem__(self, i): ... + def __iter__(self): ... + def __contains__(self, item): ... + def index(self, field): ... + def insert(self, item) -> None: ... + def remove(self, item) -> None: ... + +class SchemaManager: + model: Incomplete + context_options: Incomplete + def __init__(self, model, database: Incomplete | None = ..., **context_options) -> None: ... + @property + def database(self): ... + @database.setter + def database(self, value) -> None: ... + def create_table(self, safe: bool = ..., **options) -> None: ... + def create_table_as(self, table_name, query, safe: bool = ..., **meta) -> None: ... + def drop_table(self, safe: bool = ..., **options) -> None: ... + def truncate_table(self, restart_identity: bool = ..., cascade: bool = ...) -> None: ... + def create_indexes(self, safe: bool = ...) -> None: ... + def drop_indexes(self, safe: bool = ...) -> None: ... + def create_sequence(self, field) -> None: ... + def drop_sequence(self, field) -> None: ... + def create_foreign_key(self, field) -> None: ... + def create_sequences(self) -> None: ... + def create_all(self, safe: bool = ..., **table_options) -> None: ... + def drop_sequences(self) -> None: ... + def drop_all(self, safe: bool = ..., drop_sequences: bool = ..., **options) -> None: ... + +class Metadata: + model: Incomplete + database: Incomplete + fields: Incomplete + columns: Incomplete + combined: Incomplete + sorted_fields: Incomplete + sorted_field_names: Incomplete + defaults: Incomplete + name: Incomplete + table_function: Incomplete + legacy_table_names: Incomplete + table_name: Incomplete + indexes: Incomplete + constraints: Incomplete + primary_key: Incomplete + composite_key: Incomplete + only_save_dirty: Incomplete + depends_on: Incomplete + table_settings: Incomplete + without_rowid: Incomplete + strict_tables: Incomplete + temporary: Incomplete + refs: Incomplete + backrefs: Incomplete + model_refs: Incomplete + model_backrefs: Incomplete + manytomany: Incomplete + options: Incomplete + def __init__( + self, + model, + database: Incomplete | None = ..., + table_name: Incomplete | None = ..., + indexes: Incomplete | None = ..., + primary_key: Incomplete | None = ..., + constraints: Incomplete | None = ..., + schema: Incomplete | None = ..., + only_save_dirty: bool = ..., + depends_on: Incomplete | None = ..., + options: Incomplete | None = ..., + db_table: Incomplete | None = ..., + table_function: Incomplete | None = ..., + table_settings: Incomplete | None = ..., + without_rowid: bool = ..., + temporary: bool = ..., + strict_tables: Incomplete | None = ..., + legacy_table_names: bool = ..., + **kwargs, + ) -> None: ... + def make_table_name(self): ... + def model_graph(self, refs: bool = ..., backrefs: bool = ..., depth_first: bool = ...): ... + def add_ref(self, field) -> None: ... + def remove_ref(self, field) -> None: ... + def add_manytomany(self, field) -> None: ... + def remove_manytomany(self, field) -> None: ... + @property + def table(self): ... + @table.deleter + def table(self) -> None: ... + @property + def schema(self): ... + @schema.setter + def schema(self, value) -> None: ... + @property + def entity(self): ... + def get_rel_for_model(self, model): ... + def add_field(self, field_name, field, set_attribute: bool = ...) -> None: ... + def remove_field(self, field_name) -> None: ... + auto_increment: Incomplete + def set_primary_key(self, name, field) -> None: ... + def get_primary_keys(self): ... + def get_default_dict(self): ... + def fields_to_index(self): ... + def set_database(self, database) -> None: ... + def set_table_name(self, table_name) -> None: ... + +class SubclassAwareMetadata(Metadata): + models: Incomplete + def __init__(self, model, *args, **kwargs) -> None: ... + def map_models(self, fn) -> None: ... + +class DoesNotExist(Exception): ... + +class ModelBase(type): + inheritable: Incomplete + def __new__(cls, name, bases, attrs): ... + def __iter__(self): ... + def __getitem__(self, key): ... + def __setitem__(self, key, value) -> None: ... + def __delitem__(self, key) -> None: ... + def __contains__(self, key): ... + def __len__(self) -> int: ... + def __bool__(self) -> bool: ... + def __nonzero__(self) -> bool: ... + def __sql__(self, ctx): ... + +class _BoundModelsContext(_callable_context_manager): + models: Incomplete + database: Incomplete + bind_refs: Incomplete + bind_backrefs: Incomplete + def __init__(self, models, database, bind_refs, bind_backrefs) -> None: ... + def __enter__(self): ... + def __exit__(self, exc_type, exc_val, exc_tb) -> None: ... + +class Model(metaclass=ModelBase): + __data__: Incomplete + __rel__: Incomplete + def __init__(self, *args, **kwargs) -> None: ... + @classmethod + def validate_model(cls) -> None: ... + @classmethod + def alias(cls, alias: Incomplete | None = ...): ... + @classmethod + def select(cls, *fields): ... + @classmethod + def update(cls, __data: Incomplete | None = ..., **update): ... + @classmethod + def insert(cls, __data: Incomplete | None = ..., **insert): ... + @classmethod + def insert_many(cls, rows, fields: Incomplete | None = ...): ... + @classmethod + def insert_from(cls, query, fields): ... + @classmethod + def replace(cls, __data: Incomplete | None = ..., **insert): ... + @classmethod + def replace_many(cls, rows, fields: Incomplete | None = ...): ... + @classmethod + def raw(cls, sql, *params): ... + @classmethod + def delete(cls): ... + @classmethod + def create(cls, **query): ... + @classmethod + def bulk_create(cls, model_list, batch_size: Incomplete | None = ...) -> None: ... + @classmethod + def bulk_update(cls, model_list, fields, batch_size: Incomplete | None = ...): ... + @classmethod + def noop(cls): ... + @classmethod + def get(cls, *query, **filters): ... + @classmethod + def get_or_none(cls, *query, **filters): ... + @classmethod + def get_by_id(cls, pk): ... + @classmethod + def set_by_id(cls, key, value): ... + @classmethod + def delete_by_id(cls, pk): ... + @classmethod + def get_or_create(cls, **kwargs): ... + @classmethod + def filter(cls, *dq_nodes, **filters): ... + def get_id(self): ... + def save(self, force_insert: bool = ..., only: Incomplete | None = ...): ... + def is_dirty(self): ... + @property + def dirty_fields(self): ... + def dependencies(self, search_nullable: bool = ...) -> Generator[Incomplete, None, None]: ... + def delete_instance(self, recursive: bool = ..., delete_nullable: bool = ...): ... + def __hash__(self) -> int: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __sql__(self, ctx): ... + @classmethod + def bind(cls, database, bind_refs: bool = ..., bind_backrefs: bool = ..., _exclude: Incomplete | None = ...): ... + @classmethod + def bind_ctx(cls, database, bind_refs: bool = ..., bind_backrefs: bool = ...): ... + @classmethod + def table_exists(cls): ... + @classmethod + def create_table(cls, safe: bool = ..., **options) -> None: ... + @classmethod + def drop_table(cls, safe: bool = ..., drop_sequences: bool = ..., **options) -> None: ... + @classmethod + def truncate_table(cls, **options) -> None: ... + @classmethod + def index(cls, *fields, **kwargs): ... + @classmethod + def add_index(cls, *fields, **kwargs) -> None: ... + +class ModelAlias(Node): + def __init__(self, model, alias: Incomplete | None = ...) -> None: ... + def __getattr__(self, attr: str): ... + def __setattr__(self, attr: str, value) -> None: ... + def get_field_aliases(self): ... + def select(self, *selection): ... + def __call__(self, **kwargs): ... + def __sql__(self, ctx): ... + +class FieldAlias(Field): + source: Incomplete + model: Incomplete + field: Incomplete + def __init__(self, source, field) -> None: ... + @classmethod + def create(cls, source, field): ... + def clone(self): ... + def adapt(self, value): ... + def python_value(self, value): ... + def db_value(self, value): ... + def __getattr__(self, attr: str): ... + def __sql__(self, ctx): ... + +class _ModelQueryHelper: + default_row_type: Incomplete + def __init__(self, *args, **kwargs) -> None: ... + def objects(self, constructor: Incomplete | None = ...) -> None: ... + +class ModelRaw(_ModelQueryHelper, RawQuery): + model: Incomplete + def __init__(self, model, sql, params, **kwargs) -> None: ... + def get(self): ... + +class BaseModelSelect(_ModelQueryHelper): + def union_all(self, rhs): ... + __add__: Incomplete + def union(self, rhs): ... + __or__: Incomplete + def intersect(self, rhs): ... + __and__: Incomplete + def except_(self, rhs): ... + __sub__: Incomplete + def __iter__(self): ... + def prefetch(self, *subqueries): ... + def get(self, database: Incomplete | None = ...): ... + def get_or_none(self, database: Incomplete | None = ...): ... + def group_by(self, *columns) -> None: ... + +class ModelCompoundSelectQuery(BaseModelSelect, CompoundSelectQuery): + model: Incomplete + def __init__(self, model, *args, **kwargs) -> None: ... + +class ModelSelect(BaseModelSelect, Select): + model: Incomplete + def __init__(self, model, fields_or_models, is_default: bool = ...) -> None: ... + def clone(self): ... + def select(self, *fields_or_models): ... + def select_extend(self, *columns): ... + def switch(self, ctx: Incomplete | None = ...): ... + def join( + self, dest, join_type=..., on: Incomplete | None = ..., src: Incomplete | None = ..., attr: Incomplete | None = ... + ) -> None: ... + def left_outer_join(self, dest, on: Incomplete | None = ..., src: Incomplete | None = ..., attr: Incomplete | None = ...): ... + def join_from(self, src, dest, join_type=..., on: Incomplete | None = ..., attr: Incomplete | None = ...): ... + def ensure_join(self, lm, rm, on: Incomplete | None = ..., **join_kwargs): ... + def convert_dict_to_node(self, qdict): ... + def filter(self, *args, **kwargs): ... + def create_table(self, name, safe: bool = ..., **meta): ... + def __sql_selection__(self, ctx, is_subquery: bool = ...): ... + +class NoopModelSelect(ModelSelect): + def __sql__(self, ctx): ... + +class _ModelWriteQueryHelper(_ModelQueryHelper): + model: Incomplete + def __init__(self, model, *args, **kwargs) -> None: ... + def returning(self, *returning): ... + +class ModelUpdate(_ModelWriteQueryHelper, Update): ... + +class ModelInsert(_ModelWriteQueryHelper, Insert): + default_row_type: Incomplete + def __init__(self, *args, **kwargs) -> None: ... + def returning(self, *returning): ... + def get_default_data(self): ... + def get_default_columns(self): ... + +class ModelDelete(_ModelWriteQueryHelper, Delete): ... + +class ManyToManyQuery(ModelSelect): + def __init__(self, instance, accessor, rel, *args, **kwargs) -> None: ... + def add(self, value, clear_existing: bool = ...) -> None: ... + def remove(self, value): ... + def clear(self): ... + +class BaseModelCursorWrapper(DictCursorWrapper): + model: Incomplete + select: Incomplete + def __init__(self, cursor, model, columns) -> None: ... + initialize: Incomplete + def process_row(self, row) -> None: ... + +class ModelDictCursorWrapper(BaseModelCursorWrapper): + def process_row(self, row): ... + +class ModelTupleCursorWrapper(ModelDictCursorWrapper): + constructor: Incomplete + def process_row(self, row): ... + +class ModelNamedTupleCursorWrapper(ModelTupleCursorWrapper): + tuple_class: Incomplete + constructor: Incomplete + def initialize(self): ... + +class ModelObjectCursorWrapper(ModelDictCursorWrapper): + constructor: Incomplete + is_model: Incomplete + def __init__(self, cursor, model, select, constructor) -> None: ... + def process_row(self, row): ... + +class ModelCursorWrapper(BaseModelCursorWrapper): + from_list: Incomplete + joins: Incomplete + def __init__(self, cursor, model, select, from_list, joins) -> None: ... + key_to_constructor: Incomplete + src_is_dest: Incomplete + src_to_dest: Incomplete + column_keys: Incomplete + def initialize(self) -> None: ... + def process_row(self, row): ... + +class PrefetchQuery: + def __new__( + cls, + query, + fields: Incomplete | None = ..., + is_backref: Incomplete | None = ..., + rel_models: Incomplete | None = ..., + field_to_name: Incomplete | None = ..., + model: Incomplete | None = ..., + ): ... + def populate_instance(self, instance, id_map) -> None: ... + def store_instance(self, instance, id_map) -> None: ... + +def prefetch(sq, *subqueries): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pep8-naming/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pep8-naming/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..0cceda88 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pep8-naming/@tests/stubtest_allowlist.txt @@ -0,0 +1,2 @@ +pep8ext_naming.NamingChecker.__getattr__ +pep8ext_naming.NamingChecker.parse_options diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pep8-naming/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pep8-naming/METADATA.toml new file mode 100644 index 00000000..04ddbe7b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pep8-naming/METADATA.toml @@ -0,0 +1,4 @@ +version = "0.13.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pep8-naming/pep8ext_naming.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pep8-naming/pep8ext_naming.pyi new file mode 100644 index 00000000..d1590713 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pep8-naming/pep8ext_naming.pyi @@ -0,0 +1,31 @@ +import ast +from _typeshed import Incomplete +from argparse import Namespace +from collections.abc import Generator, Iterable +from typing import Any + +__version__: str + +PYTHON_VERSION: tuple[int, int, int] +CLASS_METHODS: frozenset[str] +METACLASS_BASES: frozenset[str] +METHOD_CONTAINER_NODES: set[ast.AST] + +class NamingChecker: + name: str + version: str + visitors: Any + decorator_to_type: Any + ignore_names: frozenset[str] + parents: Any + def __init__(self, tree: ast.AST, filename: str) -> None: ... + @classmethod + def add_options(cls, parser: Any) -> None: ... + @classmethod + def parse_options(cls, option: Namespace) -> None: ... + def run(self) -> Generator[tuple[int, int, str, type[Any]], None, None]: ... + def tag_class_functions(self, cls_node: ast.ClassDef) -> None: ... + def set_function_nodes_types(self, nodes: Iterable[ast.AST], ismetaclass: bool, late_decoration: dict[str, str]) -> None: ... + def __getattr__(self, name: str) -> Incomplete: ... # incomplete (other attributes are normally not accessed) + +def __getattr__(name: str) -> Incomplete: ... # incomplete (other attributes are normally not accessed) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..11f4140c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/@tests/stubtest_allowlist.txt @@ -0,0 +1,30 @@ +# Inherited from stdlib's io module. +pika.compat.StringIO.seek +pika.compat.StringIO.truncate + +# Requires external libraries to be installed. +pika\.adapters\.gevent_connection.* +pika\.adapters\.tornado_connection.* +pika\.adapters\.twisted_connection.* + +# Stubtest doesn't understand that a property alias is also read-only. +pika.BlockingConnection.basic_nack +pika.BlockingConnection.consumer_cancel_notify +pika.BlockingConnection.exchange_exchange_bindings +pika.BlockingConnection.publisher_confirms +pika.adapters.BlockingConnection.basic_nack +pika.adapters.BlockingConnection.consumer_cancel_notify +pika.adapters.BlockingConnection.exchange_exchange_bindings +pika.adapters.BlockingConnection.publisher_confirms +pika.adapters.blocking_connection.BlockingConnection.basic_nack +pika.adapters.blocking_connection.BlockingConnection.consumer_cancel_notify +pika.adapters.blocking_connection.BlockingConnection.exchange_exchange_bindings +pika.adapters.blocking_connection.BlockingConnection.publisher_confirms + +# The implementation has defaults for the arguments that would make the +# created instances unusable, so we require the arguments in the stub. +pika.spec.Queue.DeclareOk.__init__ + +# Arguments have a sentinel default, which is not reflected in the stubs. +pika.ConnectionParameters.__init__ +pika.connection.ConnectionParameters.__init__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/METADATA.toml new file mode 100644 index 00000000..82393b7a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/METADATA.toml @@ -0,0 +1,6 @@ +version = "1.3.*" +stub_distribution = "types-pika-ts" # https://github.com/python/typeshed/issues/9246 +extra_description = """\ +The `types-pika` package contains alternate, more complete type stubs, that \ +are maintained outside of typeshed.\ +""" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/__init__.pyi new file mode 100644 index 00000000..de731d9a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/__init__.pyi @@ -0,0 +1,11 @@ +from pika import adapters as adapters +from pika.adapters import ( + BaseConnection as BaseConnection, + BlockingConnection as BlockingConnection, + SelectConnection as SelectConnection, +) +from pika.adapters.utils.connection_workflow import AMQPConnectionWorkflow as AMQPConnectionWorkflow +from pika.connection import ConnectionParameters as ConnectionParameters, SSLOptions as SSLOptions, URLParameters as URLParameters +from pika.credentials import PlainCredentials as PlainCredentials +from pika.delivery_mode import DeliveryMode as DeliveryMode +from pika.spec import BasicProperties as BasicProperties diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/__init__.pyi new file mode 100644 index 00000000..218d5aae --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/__init__.pyi @@ -0,0 +1,3 @@ +from pika.adapters.base_connection import BaseConnection as BaseConnection +from pika.adapters.blocking_connection import BlockingConnection as BlockingConnection +from pika.adapters.select_connection import IOLoop as IOLoop, SelectConnection as SelectConnection diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/asyncio_connection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/asyncio_connection.pyi new file mode 100644 index 00000000..621eb6fa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/asyncio_connection.pyi @@ -0,0 +1,53 @@ +from _typeshed import Incomplete +from asyncio import AbstractEventLoop +from collections.abc import Callable +from logging import Logger +from typing_extensions import Self + +from ..connection import Parameters +from .base_connection import BaseConnection +from .utils import io_services_utils, nbio_interface + +LOGGER: Logger + +class AsyncioConnection(BaseConnection): + def __init__( + self, + parameters: Parameters | None = ..., + on_open_callback: Callable[[Self], object] | None = ..., + on_open_error_callback: Callable[[Self, BaseException], object] | None = ..., + on_close_callback: Callable[[Self, BaseException], object] | None = ..., + custom_ioloop: AbstractEventLoop | None = ..., + internal_connection_workflow: bool = ..., + ) -> None: ... + @classmethod + def create_connection( + cls, connection_configs, on_done, custom_ioloop: AbstractEventLoop | None = ..., workflow: Incomplete | None = ... + ): ... + +class _AsyncioIOServicesAdapter( + io_services_utils.SocketConnectionMixin, + io_services_utils.StreamingConnectionMixin, + nbio_interface.AbstractIOServices, + nbio_interface.AbstractFileDescriptorServices, +): + def __init__(self, loop: Incomplete | None = ...) -> None: ... + def get_native_ioloop(self): ... + def close(self) -> None: ... + def run(self) -> None: ... + def stop(self) -> None: ... + def add_callback_threadsafe(self, callback) -> None: ... + def call_later(self, delay, callback): ... + def getaddrinfo(self, host, port, on_done, family: int = ..., socktype: int = ..., proto: int = ..., flags: int = ...): ... + def set_reader(self, fd, on_readable) -> None: ... + def remove_reader(self, fd): ... + def set_writer(self, fd, on_writable) -> None: ... + def remove_writer(self, fd): ... + +class _TimerHandle(nbio_interface.AbstractTimerReference): + def __init__(self, handle) -> None: ... + def cancel(self) -> None: ... + +class _AsyncioIOReference(nbio_interface.AbstractIOReference): + def __init__(self, future, on_done) -> None: ... + def cancel(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/base_connection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/base_connection.pyi new file mode 100644 index 00000000..db129fa7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/base_connection.pyi @@ -0,0 +1,36 @@ +import abc +from _typeshed import Incomplete +from collections.abc import Callable +from typing_extensions import Self + +from ..adapters.utils import nbio_interface +from ..connection import Connection + +LOGGER: Incomplete + +class BaseConnection(Connection, metaclass=abc.ABCMeta): + def __init__( + self, + parameters, + on_open_callback: Callable[[Self], object] | None, + on_open_error_callback: Callable[[Self, BaseException], object] | None, + on_close_callback: Callable[[Self, BaseException], object] | None, + nbio, + internal_connection_workflow: bool, + ) -> None: ... + @classmethod + @abc.abstractmethod + def create_connection( + cls, connection_configs, on_done, custom_ioloop: Incomplete | None = ..., workflow: Incomplete | None = ... + ): ... + @property + def ioloop(self): ... + +class _StreamingProtocolShim(nbio_interface.AbstractStreamProtocol): + connection_made: Incomplete + connection_lost: Incomplete + eof_received: Incomplete + data_received: Incomplete + conn: Incomplete + def __init__(self, conn) -> None: ... + def __getattr__(self, attr: str): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/blocking_connection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/blocking_connection.pyi new file mode 100644 index 00000000..5f330829 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/blocking_connection.pyi @@ -0,0 +1,246 @@ +from _typeshed import Incomplete +from collections.abc import Generator, Sequence +from typing import NamedTuple +from typing_extensions import Self + +from ..connection import Parameters +from ..data import _ArgumentMapping +from ..exchange_type import ExchangeType +from ..spec import BasicProperties + +LOGGER: Incomplete + +class _CallbackResult: + def __init__(self, value_class: Incomplete | None = ...) -> None: ... + def reset(self) -> None: ... + def __bool__(self) -> bool: ... + __nonzero__: Incomplete + def __enter__(self): ... + def __exit__(self, *args, **kwargs) -> None: ... + def is_ready(self): ... + @property + def ready(self): ... + def signal_once(self, *_args, **_kwargs) -> None: ... + def set_value_once(self, *args, **kwargs) -> None: ... + def append_element(self, *args, **kwargs) -> None: ... + @property + def value(self): ... + @property + def elements(self): ... + +class _IoloopTimerContext: + def __init__(self, duration, connection) -> None: ... + def __enter__(self): ... + def __exit__(self, *_args, **_kwargs) -> None: ... + def is_ready(self): ... + +class _TimerEvt: + timer_id: Incomplete + def __init__(self, callback) -> None: ... + def dispatch(self) -> None: ... + +class _ConnectionBlockedUnblockedEvtBase: + def __init__(self, callback, method_frame) -> None: ... + def dispatch(self) -> None: ... + +class _ConnectionBlockedEvt(_ConnectionBlockedUnblockedEvtBase): ... +class _ConnectionUnblockedEvt(_ConnectionBlockedUnblockedEvtBase): ... + +class BlockingConnection: + class _OnClosedArgs(NamedTuple): + connection: Incomplete + error: Incomplete + + class _OnChannelOpenedArgs(NamedTuple): + channel: Incomplete + def __init__( + self, parameters: Parameters | Sequence[Parameters] | None = ..., _impl_class: Incomplete | None = ... + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, exc_type: object, value: object, traceback: object) -> None: ... + def add_on_connection_blocked_callback(self, callback) -> None: ... + def add_on_connection_unblocked_callback(self, callback) -> None: ... + def call_later(self, delay, callback): ... + def add_callback_threadsafe(self, callback) -> None: ... + def remove_timeout(self, timeout_id) -> None: ... + def update_secret(self, new_secret, reason) -> None: ... + def close(self, reply_code: int = ..., reply_text: str = ...) -> None: ... + def process_data_events(self, time_limit: int = ...): ... + def sleep(self, duration: float) -> None: ... + def channel(self, channel_number: int | None = ...) -> BlockingChannel: ... + @property + def is_closed(self) -> bool: ... + @property + def is_open(self) -> bool: ... + @property + def basic_nack_supported(self) -> bool: ... + @property + def consumer_cancel_notify_supported(self) -> bool: ... + @property + def exchange_exchange_bindings_supported(self) -> bool: ... + @property + def publisher_confirms_supported(self) -> bool: ... + basic_nack = basic_nack_supported + consumer_cancel_notify = consumer_cancel_notify_supported + exchange_exchange_bindings = exchange_exchange_bindings_supported + publisher_confirms = publisher_confirms_supported + +class _ChannelPendingEvt: ... + +class _ConsumerDeliveryEvt(_ChannelPendingEvt): + method: Incomplete + properties: Incomplete + body: Incomplete + def __init__(self, method, properties, body) -> None: ... + +class _ConsumerCancellationEvt(_ChannelPendingEvt): + method_frame: Incomplete + def __init__(self, method_frame) -> None: ... + @property + def method(self): ... + +class _ReturnedMessageEvt(_ChannelPendingEvt): + callback: Incomplete + channel: Incomplete + method: Incomplete + properties: Incomplete + body: Incomplete + def __init__(self, callback, channel, method, properties, body) -> None: ... + def dispatch(self) -> None: ... + +class ReturnedMessage: + method: Incomplete + properties: Incomplete + body: Incomplete + def __init__(self, method, properties, body) -> None: ... + +class _ConsumerInfo: + SETTING_UP: int + ACTIVE: int + TEARING_DOWN: int + CANCELLED_BY_BROKER: int + consumer_tag: Incomplete + auto_ack: Incomplete + on_message_callback: Incomplete + alternate_event_sink: Incomplete + state: Incomplete + def __init__( + self, consumer_tag, auto_ack, on_message_callback: Incomplete | None = ..., alternate_event_sink: Incomplete | None = ... + ) -> None: ... + @property + def setting_up(self): ... + @property + def active(self): ... + @property + def tearing_down(self): ... + @property + def cancelled_by_broker(self): ... + +class _QueueConsumerGeneratorInfo: + params: Incomplete + consumer_tag: Incomplete + pending_events: Incomplete + def __init__(self, params, consumer_tag) -> None: ... + +class BlockingChannel: + class _RxMessageArgs(NamedTuple): + channel: Incomplete + method: Incomplete + properties: Incomplete + body: Incomplete + + class _MethodFrameCallbackResultArgs(NamedTuple): + method_frame: Incomplete + + class _OnMessageConfirmationReportArgs(NamedTuple): + method_frame: Incomplete + + class _FlowOkCallbackResultArgs(NamedTuple): + active: Incomplete + def __init__(self, channel_impl, connection) -> None: ... + def __int__(self) -> int: ... + def __enter__(self): ... + def __exit__(self, exc_type, value, traceback) -> None: ... + @property + def channel_number(self): ... + @property + def connection(self): ... + @property + def is_closed(self): ... + @property + def is_open(self): ... + @property + def consumer_tags(self): ... + def close(self, reply_code: int = ..., reply_text: str = ...): ... + def flow(self, active): ... + def add_on_cancel_callback(self, callback) -> None: ... + def add_on_return_callback(self, callback): ... + def basic_consume( + self, + queue, + on_message_callback, + auto_ack: bool = ..., + exclusive: bool = ..., + consumer_tag: Incomplete | None = ..., + arguments: Incomplete | None = ..., + ): ... + def basic_cancel(self, consumer_tag): ... + def start_consuming(self) -> None: ... + def stop_consuming(self, consumer_tag: Incomplete | None = ...) -> None: ... + def consume( + self, + queue, + auto_ack: bool = ..., + exclusive: bool = ..., + arguments: Incomplete | None = ..., + inactivity_timeout: Incomplete | None = ..., + ) -> Generator[Incomplete, None, None]: ... + def get_waiting_message_count(self): ... + def cancel(self): ... + def basic_ack(self, delivery_tag: int = ..., multiple: bool = ...) -> None: ... + def basic_nack(self, delivery_tag: int = ..., multiple: bool = ..., requeue: bool = ...) -> None: ... + def basic_get(self, queue, auto_ack: bool = ...): ... + def basic_publish( + self, exchange: str, routing_key: str, body: str | bytes, properties: BasicProperties | None = ..., mandatory: bool = ... + ) -> None: ... + def basic_qos(self, prefetch_size: int = ..., prefetch_count: int = ..., global_qos: bool = ...) -> None: ... + def basic_recover(self, requeue: bool = ...) -> None: ... + def basic_reject(self, delivery_tag: int = ..., requeue: bool = ...) -> None: ... + def confirm_delivery(self) -> None: ... + def exchange_declare( + self, + exchange: str, + exchange_type: ExchangeType | str = ..., + passive: bool = ..., + durable: bool = ..., + auto_delete: bool = ..., + internal: bool = ..., + arguments: _ArgumentMapping | None = ..., + ): ... + def exchange_delete(self, exchange: str | None = ..., if_unused: bool = ...): ... + def exchange_bind(self, destination, source, routing_key: str = ..., arguments: Incomplete | None = ...): ... + def exchange_unbind( + self, + destination: Incomplete | None = ..., + source: Incomplete | None = ..., + routing_key: str = ..., + arguments: Incomplete | None = ..., + ): ... + def queue_declare( + self, + queue, + passive: bool = ..., + durable: bool = ..., + exclusive: bool = ..., + auto_delete: bool = ..., + arguments: Incomplete | None = ..., + ): ... + def queue_delete(self, queue, if_unused: bool = ..., if_empty: bool = ...): ... + def queue_purge(self, queue): ... + def queue_bind(self, queue, exchange, routing_key: Incomplete | None = ..., arguments: Incomplete | None = ...): ... + def queue_unbind( + self, queue, exchange: Incomplete | None = ..., routing_key: Incomplete | None = ..., arguments: Incomplete | None = ... + ): ... + def tx_select(self): ... + def tx_commit(self): ... + def tx_rollback(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/gevent_connection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/gevent_connection.pyi new file mode 100644 index 00000000..932bc328 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/gevent_connection.pyi @@ -0,0 +1,56 @@ +from _typeshed import Incomplete + +from pika.adapters.base_connection import BaseConnection +from pika.adapters.utils.nbio_interface import AbstractIOReference +from pika.adapters.utils.selector_ioloop_adapter import AbstractSelectorIOLoop, SelectorIOServicesAdapter + +LOGGER: Incomplete + +class GeventConnection(BaseConnection): + def __init__( + self, + parameters: Incomplete | None = ..., + on_open_callback: Incomplete | None = ..., + on_open_error_callback: Incomplete | None = ..., + on_close_callback: Incomplete | None = ..., + custom_ioloop: Incomplete | None = ..., + internal_connection_workflow: bool = ..., + ) -> None: ... + @classmethod + def create_connection( + cls, connection_configs, on_done, custom_ioloop: Incomplete | None = ..., workflow: Incomplete | None = ... + ): ... + +class _TSafeCallbackQueue: + def __init__(self) -> None: ... + @property + def fd(self): ... + def add_callback_threadsafe(self, callback) -> None: ... + def run_next_callback(self) -> None: ... + +class _GeventSelectorIOLoop(AbstractSelectorIOLoop): + READ: int + WRITE: int + ERROR: int + def __init__(self, gevent_hub: Incomplete | None = ...) -> None: ... + def close(self) -> None: ... + def start(self) -> None: ... + def stop(self) -> None: ... + def add_callback(self, callback) -> None: ... + def call_later(self, delay, callback): ... + def remove_timeout(self, timeout_handle) -> None: ... + def add_handler(self, fd, handler, events) -> None: ... + def update_handler(self, fd, events) -> None: ... + def remove_handler(self, fd) -> None: ... + +class _GeventSelectorIOServicesAdapter(SelectorIOServicesAdapter): + def getaddrinfo(self, host, port, on_done, family: int = ..., socktype: int = ..., proto: int = ..., flags: int = ...): ... + +class _GeventIOLoopIOHandle(AbstractIOReference): + def __init__(self, subject) -> None: ... + def cancel(self): ... + +class _GeventAddressResolver: + def __init__(self, native_loop, host, port, family, socktype, proto, flags, on_done) -> None: ... + def start(self) -> None: ... + def cancel(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/select_connection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/select_connection.pyi new file mode 100644 index 00000000..1a094f50 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/select_connection.pyi @@ -0,0 +1,99 @@ +import abc +from _typeshed import Incomplete + +import pika.compat +from pika.adapters.base_connection import BaseConnection +from pika.adapters.utils.selector_ioloop_adapter import AbstractSelectorIOLoop + +LOGGER: Incomplete +SELECT_TYPE: Incomplete + +class SelectConnection(BaseConnection): + def __init__( + self, + parameters: Incomplete | None = ..., + on_open_callback: Incomplete | None = ..., + on_open_error_callback: Incomplete | None = ..., + on_close_callback: Incomplete | None = ..., + custom_ioloop: Incomplete | None = ..., + internal_connection_workflow: bool = ..., + ) -> None: ... + @classmethod + def create_connection( + cls, connection_configs, on_done, custom_ioloop: Incomplete | None = ..., workflow: Incomplete | None = ... + ): ... + +class _Timeout: + deadline: Incomplete + callback: Incomplete + def __init__(self, deadline, callback) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __lt__(self, other): ... + def __gt__(self, other): ... + def __le__(self, other): ... + def __ge__(self, other): ... + +class _Timer: + def __init__(self) -> None: ... + def close(self) -> None: ... + def call_later(self, delay, callback): ... + def remove_timeout(self, timeout) -> None: ... + def get_remaining_interval(self): ... + def process_timeouts(self) -> None: ... + +class PollEvents: + READ: Incomplete + WRITE: Incomplete + ERROR: Incomplete + +class IOLoop(AbstractSelectorIOLoop): + READ: Incomplete + WRITE: Incomplete + ERROR: Incomplete + def __init__(self) -> None: ... + def close(self) -> None: ... + def call_later(self, delay, callback): ... + def remove_timeout(self, timeout_handle) -> None: ... + def add_callback_threadsafe(self, callback) -> None: ... + add_callback: Incomplete + def process_timeouts(self) -> None: ... + def add_handler(self, fd, handler, events) -> None: ... + def update_handler(self, fd, events) -> None: ... + def remove_handler(self, fd) -> None: ... + def start(self) -> None: ... + def stop(self) -> None: ... + def activate_poller(self) -> None: ... + def deactivate_poller(self) -> None: ... + def poll(self) -> None: ... + +class _PollerBase(pika.compat.AbstractBase, metaclass=abc.ABCMeta): + POLL_TIMEOUT_MULT: int + def __init__(self, get_wait_seconds, process_timeouts) -> None: ... + def close(self) -> None: ... + def wake_threadsafe(self) -> None: ... + def add_handler(self, fileno, handler, events) -> None: ... + def update_handler(self, fileno, events) -> None: ... + def remove_handler(self, fileno) -> None: ... + def activate_poller(self) -> None: ... + def deactivate_poller(self) -> None: ... + def start(self) -> None: ... + def stop(self) -> None: ... + @abc.abstractmethod + def poll(self): ... + +class SelectPoller(_PollerBase): + POLL_TIMEOUT_MULT: int + def poll(self) -> None: ... + +class KQueuePoller(_PollerBase): + def __init__(self, get_wait_seconds, process_timeouts) -> None: ... + def poll(self) -> None: ... + +class PollPoller(_PollerBase): + POLL_TIMEOUT_MULT: int + def __init__(self, get_wait_seconds, process_timeouts) -> None: ... + def poll(self) -> None: ... + +class EPollPoller(PollPoller): + POLL_TIMEOUT_MULT: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/tornado_connection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/tornado_connection.pyi new file mode 100644 index 00000000..0e46bdf3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/tornado_connection.pyi @@ -0,0 +1,20 @@ +from _typeshed import Incomplete + +from pika.adapters import base_connection + +LOGGER: Incomplete + +class TornadoConnection(base_connection.BaseConnection): + def __init__( + self, + parameters: Incomplete | None = ..., + on_open_callback: Incomplete | None = ..., + on_open_error_callback: Incomplete | None = ..., + on_close_callback: Incomplete | None = ..., + custom_ioloop: Incomplete | None = ..., + internal_connection_workflow: bool = ..., + ) -> None: ... + @classmethod + def create_connection( + cls, connection_configs, on_done, custom_ioloop: Incomplete | None = ..., workflow: Incomplete | None = ... + ): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/twisted_connection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/twisted_connection.pyi new file mode 100644 index 00000000..1e6a6ab2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/twisted_connection.pyi @@ -0,0 +1,127 @@ +from _typeshed import Incomplete +from typing import Any, NamedTuple +from typing_extensions import TypeAlias + +import pika.connection +from pika.adapters.utils import nbio_interface + +DeferredQueue: TypeAlias = Any # TODO: twisted.internet.defer.DeferredQueue +Protocol: TypeAlias = Any # TODO: twisted.internet.protocol.Protocol + +LOGGER: Incomplete + +class ClosableDeferredQueue(DeferredQueue): + closed: Incomplete + def __init__(self, size: Incomplete | None = ..., backlog: Incomplete | None = ...) -> None: ... + def put(self, obj): ... + def get(self): ... + pending: Incomplete + def close(self, reason) -> None: ... + +class ReceivedMessage(NamedTuple): + channel: Incomplete + method: Incomplete + properties: Incomplete + body: Incomplete + +class TwistedChannel: + on_closed: Incomplete + def __init__(self, channel) -> None: ... + @property + def channel_number(self): ... + @property + def connection(self): ... + @property + def is_closed(self): ... + @property + def is_closing(self): ... + @property + def is_open(self): ... + @property + def flow_active(self): ... + @property + def consumer_tags(self): ... + def callback_deferred(self, deferred, replies) -> None: ... + def add_on_return_callback(self, callback): ... + def basic_ack(self, delivery_tag: int = ..., multiple: bool = ...): ... + def basic_cancel(self, consumer_tag: str = ...): ... + def basic_consume( + self, + queue, + auto_ack: bool = ..., + exclusive: bool = ..., + consumer_tag: Incomplete | None = ..., + arguments: Incomplete | None = ..., + ): ... + def basic_get(self, queue, auto_ack: bool = ...): ... + def basic_nack(self, delivery_tag: Incomplete | None = ..., multiple: bool = ..., requeue: bool = ...): ... + def basic_publish(self, exchange, routing_key, body, properties: Incomplete | None = ..., mandatory: bool = ...): ... + def basic_qos(self, prefetch_size: int = ..., prefetch_count: int = ..., global_qos: bool = ...): ... + def basic_reject(self, delivery_tag, requeue: bool = ...): ... + def basic_recover(self, requeue: bool = ...): ... + def close(self, reply_code: int = ..., reply_text: str = ...): ... + def confirm_delivery(self): ... + def exchange_bind(self, destination, source, routing_key: str = ..., arguments: Incomplete | None = ...): ... + def exchange_declare( + self, + exchange, + exchange_type=..., + passive: bool = ..., + durable: bool = ..., + auto_delete: bool = ..., + internal: bool = ..., + arguments: Incomplete | None = ..., + ): ... + def exchange_delete(self, exchange: Incomplete | None = ..., if_unused: bool = ...): ... + def exchange_unbind( + self, + destination: Incomplete | None = ..., + source: Incomplete | None = ..., + routing_key: str = ..., + arguments: Incomplete | None = ..., + ): ... + def flow(self, active): ... + def open(self): ... + def queue_bind(self, queue, exchange, routing_key: Incomplete | None = ..., arguments: Incomplete | None = ...): ... + def queue_declare( + self, + queue, + passive: bool = ..., + durable: bool = ..., + exclusive: bool = ..., + auto_delete: bool = ..., + arguments: Incomplete | None = ..., + ): ... + def queue_delete(self, queue, if_unused: bool = ..., if_empty: bool = ...): ... + def queue_purge(self, queue): ... + def queue_unbind( + self, queue, exchange: Incomplete | None = ..., routing_key: Incomplete | None = ..., arguments: Incomplete | None = ... + ): ... + def tx_commit(self): ... + def tx_rollback(self): ... + def tx_select(self): ... + +class _TwistedConnectionAdapter(pika.connection.Connection): + def __init__(self, parameters, on_open_callback, on_open_error_callback, on_close_callback, custom_reactor) -> None: ... + def connection_made(self, transport) -> None: ... + def connection_lost(self, error) -> None: ... + def data_received(self, data) -> None: ... + +class TwistedProtocolConnection(Protocol): + ready: Incomplete + closed: Incomplete + def __init__(self, parameters: Incomplete | None = ..., custom_reactor: Incomplete | None = ...) -> None: ... + def channel(self, channel_number: Incomplete | None = ...): ... + @property + def is_open(self): ... + @property + def is_closed(self): ... + def close(self, reply_code: int = ..., reply_text: str = ...): ... + def dataReceived(self, data) -> None: ... + def connectionLost(self, reason=...) -> None: ... + def makeConnection(self, transport) -> None: ... + def connectionReady(self): ... + +class _TimerHandle(nbio_interface.AbstractTimerReference): + def __init__(self, handle) -> None: ... + def cancel(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/utils/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/utils/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/utils/connection_workflow.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/utils/connection_workflow.pyi new file mode 100644 index 00000000..e68104c8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/utils/connection_workflow.pyi @@ -0,0 +1,37 @@ +from _typeshed import Incomplete + +import pika.compat + +class AMQPConnectorException(Exception): ... +class AMQPConnectorStackTimeout(AMQPConnectorException): ... +class AMQPConnectorAborted(AMQPConnectorException): ... +class AMQPConnectorWrongState(AMQPConnectorException): ... + +class AMQPConnectorPhaseErrorBase(AMQPConnectorException): + exception: Incomplete + def __init__(self, exception, *args) -> None: ... + +class AMQPConnectorSocketConnectError(AMQPConnectorPhaseErrorBase): ... +class AMQPConnectorTransportSetupError(AMQPConnectorPhaseErrorBase): ... +class AMQPConnectorAMQPHandshakeError(AMQPConnectorPhaseErrorBase): ... +class AMQPConnectionWorkflowAborted(AMQPConnectorException): ... +class AMQPConnectionWorkflowWrongState(AMQPConnectorException): ... + +class AMQPConnectionWorkflowFailed(AMQPConnectorException): + exceptions: Incomplete + def __init__(self, exceptions, *args) -> None: ... + +class AMQPConnector: + def __init__(self, conn_factory, nbio) -> None: ... + def start(self, addr_record, conn_params, on_done) -> None: ... + def abort(self) -> None: ... + +class AbstractAMQPConnectionWorkflow(pika.compat.AbstractBase): + def start(self, connection_configs, connector_factory, native_loop, on_done) -> None: ... + def abort(self) -> None: ... + +class AMQPConnectionWorkflow(AbstractAMQPConnectionWorkflow): + def __init__(self, _until_first_amqp_attempt: bool = ...) -> None: ... + def set_io_services(self, nbio) -> None: ... + def start(self, connection_configs, connector_factory, native_loop, on_done) -> None: ... + def abort(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/utils/io_services_utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/utils/io_services_utils.pyi new file mode 100644 index 00000000..81f8ddc6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/utils/io_services_utils.pyi @@ -0,0 +1,46 @@ +import abc +from _typeshed import Incomplete + +from pika.adapters.utils.nbio_interface import AbstractIOReference, AbstractStreamTransport + +def check_callback_arg(callback, name) -> None: ... +def check_fd_arg(fd) -> None: ... + +class SocketConnectionMixin: + def connect_socket(self, sock, resolved_addr, on_done): ... + +class StreamingConnectionMixin: + def create_streaming_connection( + self, protocol_factory, sock, on_done, ssl_context: Incomplete | None = ..., server_hostname: Incomplete | None = ... + ): ... + +class _AsyncServiceAsyncHandle(AbstractIOReference): + def __init__(self, subject) -> None: ... + def cancel(self): ... + +class _AsyncSocketConnector: + def __init__(self, nbio, sock, resolved_addr, on_done) -> None: ... + def start(self): ... + def cancel(self): ... + +class _AsyncStreamConnector: + def __init__(self, nbio, protocol_factory, sock, ssl_context, server_hostname, on_done) -> None: ... + def start(self): ... + def cancel(self): ... + +class _AsyncTransportBase(AbstractStreamTransport, metaclass=abc.ABCMeta): + class RxEndOfFile(OSError): + def __init__(self) -> None: ... + + def __init__(self, sock, protocol, nbio) -> None: ... + def abort(self) -> None: ... + def get_protocol(self): ... + def get_write_buffer_size(self): ... + +class _AsyncPlaintextTransport(_AsyncTransportBase): + def __init__(self, sock, protocol, nbio) -> None: ... + def write(self, data) -> None: ... + +class _AsyncSSLTransport(_AsyncTransportBase): + def __init__(self, sock, protocol, nbio) -> None: ... + def write(self, data) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/utils/nbio_interface.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/utils/nbio_interface.pyi new file mode 100644 index 00000000..450da2c5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/utils/nbio_interface.pyi @@ -0,0 +1,64 @@ +import abc +from _typeshed import Incomplete + +import pika.compat + +class AbstractIOServices(pika.compat.AbstractBase, metaclass=abc.ABCMeta): + @abc.abstractmethod + def get_native_ioloop(self): ... + @abc.abstractmethod + def close(self): ... + @abc.abstractmethod + def run(self): ... + @abc.abstractmethod + def stop(self): ... + @abc.abstractmethod + def add_callback_threadsafe(self, callback): ... + @abc.abstractmethod + def call_later(self, delay, callback): ... + @abc.abstractmethod + def getaddrinfo(self, host, port, on_done, family: int = ..., socktype: int = ..., proto: int = ..., flags: int = ...): ... + @abc.abstractmethod + def connect_socket(self, sock, resolved_addr, on_done): ... + @abc.abstractmethod + def create_streaming_connection( + self, protocol_factory, sock, on_done, ssl_context: Incomplete | None = ..., server_hostname: Incomplete | None = ... + ): ... + +class AbstractFileDescriptorServices(pika.compat.AbstractBase, metaclass=abc.ABCMeta): + @abc.abstractmethod + def set_reader(self, fd, on_readable): ... + @abc.abstractmethod + def remove_reader(self, fd): ... + @abc.abstractmethod + def set_writer(self, fd, on_writable): ... + @abc.abstractmethod + def remove_writer(self, fd): ... + +class AbstractTimerReference(pika.compat.AbstractBase, metaclass=abc.ABCMeta): + @abc.abstractmethod + def cancel(self): ... + +class AbstractIOReference(pika.compat.AbstractBase, metaclass=abc.ABCMeta): + @abc.abstractmethod + def cancel(self): ... + +class AbstractStreamProtocol(pika.compat.AbstractBase, metaclass=abc.ABCMeta): + @abc.abstractmethod + def connection_made(self, transport): ... + @abc.abstractmethod + def connection_lost(self, error): ... + @abc.abstractmethod + def eof_received(self): ... + @abc.abstractmethod + def data_received(self, data): ... + +class AbstractStreamTransport(pika.compat.AbstractBase, metaclass=abc.ABCMeta): + @abc.abstractmethod + def abort(self): ... + @abc.abstractmethod + def get_protocol(self): ... + @abc.abstractmethod + def write(self, data): ... + @abc.abstractmethod + def get_write_buffer_size(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/utils/selector_ioloop_adapter.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/utils/selector_ioloop_adapter.pyi new file mode 100644 index 00000000..3358c4df --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/adapters/utils/selector_ioloop_adapter.pyi @@ -0,0 +1,76 @@ +import abc +from _typeshed import Incomplete + +from pika.adapters.utils import io_services_utils, nbio_interface + +LOGGER: Incomplete + +class AbstractSelectorIOLoop(metaclass=abc.ABCMeta): + @property + @abc.abstractmethod + def READ(self): ... + @property + @abc.abstractmethod + def WRITE(self): ... + @property + @abc.abstractmethod + def ERROR(self): ... + @abc.abstractmethod + def close(self): ... + @abc.abstractmethod + def start(self): ... + @abc.abstractmethod + def stop(self): ... + @abc.abstractmethod + def call_later(self, delay, callback): ... + @abc.abstractmethod + def remove_timeout(self, timeout_handle): ... + @abc.abstractmethod + def add_callback(self, callback): ... + @abc.abstractmethod + def add_handler(self, fd, handler, events): ... + @abc.abstractmethod + def update_handler(self, fd, events): ... + @abc.abstractmethod + def remove_handler(self, fd): ... + +class SelectorIOServicesAdapter( + io_services_utils.SocketConnectionMixin, + io_services_utils.StreamingConnectionMixin, + nbio_interface.AbstractIOServices, + nbio_interface.AbstractFileDescriptorServices, +): + def __init__(self, native_loop) -> None: ... + def get_native_ioloop(self): ... + def close(self) -> None: ... + def run(self) -> None: ... + def stop(self) -> None: ... + def add_callback_threadsafe(self, callback) -> None: ... + def call_later(self, delay, callback): ... + def getaddrinfo(self, host, port, on_done, family: int = ..., socktype: int = ..., proto: int = ..., flags: int = ...): ... + def set_reader(self, fd, on_readable) -> None: ... + def remove_reader(self, fd): ... + def set_writer(self, fd, on_writable) -> None: ... + def remove_writer(self, fd): ... + +class _FileDescriptorCallbacks: + reader: Incomplete + writer: Incomplete + def __init__(self, reader: Incomplete | None = ..., writer: Incomplete | None = ...) -> None: ... + +class _TimerHandle(nbio_interface.AbstractTimerReference): + def __init__(self, handle, loop) -> None: ... + def cancel(self) -> None: ... + +class _SelectorIOLoopIOHandle(nbio_interface.AbstractIOReference): + def __init__(self, subject) -> None: ... + def cancel(self): ... + +class _AddressResolver: + NOT_STARTED: int + ACTIVE: int + CANCELED: int + COMPLETED: int + def __init__(self, native_loop, host, port, family, socktype, proto, flags, on_done) -> None: ... + def start(self): ... + def cancel(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/amqp_object.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/amqp_object.pyi new file mode 100644 index 00000000..7236b7a0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/amqp_object.pyi @@ -0,0 +1,18 @@ +from typing import ClassVar + +class AMQPObject: + NAME: ClassVar[str] + INDEX: ClassVar[int | None] + def __eq__(self, other: AMQPObject | None) -> bool: ... # type: ignore[override] + +class Class(AMQPObject): ... + +class Method(AMQPObject): + # This is a class attribute in the implementation, but subclasses use @property, + # so it's more convenient to use that here as well. + @property + def synchronous(self) -> bool: ... + def get_properties(self) -> Properties: ... + def get_body(self) -> str: ... + +class Properties(AMQPObject): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/callback.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/callback.pyi new file mode 100644 index 00000000..593ad4b2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/callback.pyi @@ -0,0 +1,31 @@ +from _typeshed import Incomplete + +LOGGER: Incomplete + +def name_or_value(value): ... +def sanitize_prefix(function): ... +def check_for_prefix_and_key(function): ... + +class CallbackManager: + CALLS: str + ARGUMENTS: str + DUPLICATE_WARNING: str + CALLBACK: str + ONE_SHOT: str + ONLY_CALLER: str + def __init__(self) -> None: ... + def add( + self, + prefix, + key, + callback, + one_shot: bool = ..., + only_caller: Incomplete | None = ..., + arguments: Incomplete | None = ..., + ): ... + def clear(self) -> None: ... + def cleanup(self, prefix): ... + def pending(self, prefix, key): ... + def process(self, prefix, key, caller, *args, **keywords): ... + def remove(self, prefix, key, callback_value: Incomplete | None = ..., arguments: Incomplete | None = ...): ... + def remove_all(self, prefix, key) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/channel.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/channel.pyi new file mode 100644 index 00000000..3f28e847 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/channel.pyi @@ -0,0 +1,156 @@ +from _typeshed import Incomplete +from collections.abc import Callable +from logging import Logger +from typing import Any +from typing_extensions import Final, Self + +from .callback import CallbackManager +from .connection import Connection +from .data import _ArgumentMapping +from .exchange_type import ExchangeType +from .frame import Body, Header, Method +from .spec import Basic, BasicProperties, Confirm, Exchange, Queue, Tx + +LOGGER: Logger +MAX_CHANNELS: Final[int] + +class Channel: + CLOSED: Final = 0 + OPENING: Final = 1 + OPEN: Final = 2 + CLOSING: Final = 3 + + channel_number: int + callbacks: CallbackManager + connection: Connection + flow_active: bool + + def __init__(self, connection: Connection, channel_number: int, on_open_callback: Callable[[Self], object]) -> None: ... + def __int__(self) -> int: ... + def add_callback(self, callback, replies, one_shot: bool = ...) -> None: ... + def add_on_cancel_callback(self, callback) -> None: ... + def add_on_close_callback(self, callback) -> None: ... + def add_on_flow_callback(self, callback) -> None: ... + def add_on_return_callback(self, callback) -> None: ... + def basic_ack(self, delivery_tag: int = ..., multiple: bool = ...) -> None: ... + def basic_cancel( + self, consumer_tag: str = ..., callback: Callable[[Method[Basic.CancelOk]], object] | None = ... + ) -> None: ... + def basic_consume( + self, + queue: str, + on_message_callback: Callable[[Channel, Basic.Deliver, BasicProperties, bytes], object], + auto_ack: bool = ..., + exclusive: bool = ..., + consumer_tag: str | None = ..., + arguments: _ArgumentMapping | None = ..., + callback: Callable[[Method[Basic.ConsumeOk]], object] | None = ..., + ) -> str: ... + def basic_get( + self, queue: str, callback: Callable[[Channel, Basic.GetOk, BasicProperties, bytes], object], auto_ack: bool = ... + ) -> None: ... + def basic_nack(self, delivery_tag: int = ..., multiple: bool = ..., requeue: bool = ...) -> None: ... + def basic_publish( + self, exchange: str, routing_key: str, body: str | bytes, properties: BasicProperties | None = ..., mandatory: bool = ... + ) -> None: ... + def basic_qos( + self, + prefetch_size: int = ..., + prefetch_count: int = ..., + global_qos: bool = ..., + callback: Callable[[Method[Basic.QosOk]], object] | None = ..., + ) -> None: ... + def basic_reject(self, delivery_tag: int = ..., requeue: bool = ...) -> None: ... + def basic_recover(self, requeue: bool = ..., callback: Callable[[Method[Basic.RecoverOk]], object] | None = ...) -> None: ... + def close(self, reply_code: int = ..., reply_text: str = ...) -> None: ... + def confirm_delivery( + self, + ack_nack_callback: Callable[[Method[Basic.Ack | Basic.Nack]], object], + callback: Callable[[Method[Confirm.SelectOk]], object] | None = ..., + ) -> None: ... + @property + def consumer_tags(self) -> list[str]: ... + def exchange_bind( + self, + destination: str, + source: str, + routing_key: str = ..., + arguments: _ArgumentMapping | None = ..., + callback: Callable[[Method[Exchange.BindOk]], object] | None = ..., + ) -> None: ... + def exchange_declare( + self, + exchange: str, + exchange_type: ExchangeType | str = ..., + passive: bool = ..., + durable: bool = ..., + auto_delete: bool = ..., + internal: bool = ..., + arguments: _ArgumentMapping | None = ..., + callback: Callable[[Method[Exchange.DeclareOk]], object] | None = ..., + ) -> None: ... + def exchange_delete( + self, + exchange: str | None = ..., + if_unused: bool = ..., + callback: Callable[[Method[Exchange.DeleteOk]], object] | None = ..., + ) -> None: ... + def exchange_unbind( + self, + destination: str | None = ..., + source: str | None = ..., + routing_key: str = ..., + arguments: _ArgumentMapping | None = ..., + callback: Callable[[Method[Exchange.UnbindOk]], object] | None = ..., + ) -> None: ... + def flow(self, active: bool, callback: Callable[[bool], object] | None = ...) -> None: ... + @property + def is_closed(self) -> bool: ... + @property + def is_closing(self) -> bool: ... + @property + def is_open(self) -> bool: ... + @property + def is_opening(self) -> bool: ... + def open(self) -> None: ... + def queue_bind( + self, + queue: str, + exchange: str, + routing_key: str | None = ..., + arguments: _ArgumentMapping | None = ..., + callback: Callable[[Method[Queue.BindOk]], object] | None = ..., + ) -> None: ... + def queue_declare( + self, + queue: str, + passive: bool = ..., + durable: bool = ..., + exclusive: bool = ..., + auto_delete: bool = ..., + arguments: _ArgumentMapping | None = ..., + callback: Callable[[Method[Queue.DeclareOk]], object] | None = ..., + ) -> None: ... + def queue_delete( + self, + queue: str, + if_unused: bool = ..., + if_empty: bool = ..., + callback: Callable[[Method[Queue.DeleteOk]], object] | None = ..., + ) -> None: ... + def queue_purge(self, queue: str, callback: Callable[[Method[Queue.PurgeOk]], object] | None = ...) -> None: ... + def queue_unbind( + self, + queue: str, + exchange: str | None = ..., + routing_key: str | None = ..., + arguments: _ArgumentMapping | None = ..., + callback: Callable[[Method[Queue.UnbindOk]], object] | None = ..., + ): ... + def tx_commit(self, callback: Callable[[Method[Tx.CommitOk]], object] | None = ...) -> None: ... + def tx_rollback(self, callback: Callable[[Method[Tx.RollbackOk]], object] | None = ...) -> None: ... + def tx_select(self, callback: Callable[[Method[Tx.SelectOk]], object] | None = ...) -> None: ... + +class ContentFrameAssembler: + def __init__(self) -> None: ... + def process(self, frame_value: Method[Any] | Header | Body) -> tuple[Incomplete, Incomplete, bytes] | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/compat.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/compat.pyi new file mode 100644 index 00000000..de6dd8e9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/compat.pyi @@ -0,0 +1,49 @@ +from abc import ABCMeta +from collections.abc import ItemsView, Mapping, ValuesView +from io import StringIO as StringIO +from re import Pattern +from typing import Any, TypeVar +from typing_extensions import Final, Literal, SupportsIndex, TypeGuard +from urllib.parse import parse_qs, quote, unquote, urlencode as urlencode, urlparse as urlparse + +_KT = TypeVar("_KT") +_VT_co = TypeVar("_VT_co", covariant=True) + +url_quote = quote +url_unquote = unquote +url_parse_qs = parse_qs + +PY2: Final[Literal[False]] +PY3: Final[Literal[True]] +RE_NUM: Final[Pattern[str]] +ON_LINUX: Final[bool] +ON_OSX: Final[bool] +ON_WINDOWS: Final[bool] + +class AbstractBase(metaclass=ABCMeta): ... + +SOCKET_ERROR = OSError +SOL_TCP: Final[int] +basestring: Final[tuple[type[str]]] +str_or_bytes: Final[tuple[type[str], type[bytes]]] +xrange = range +unicode_type = str + +def time_now() -> float: ... +def dictkeys(dct: Mapping[_KT, Any]) -> list[_KT]: ... +def dictvalues(dct: Mapping[Any, _VT_co]) -> list[_VT_co]: ... +def dict_iteritems(dct: Mapping[_KT, _VT_co]) -> ItemsView[_KT, _VT_co]: ... +def dict_itervalues(dct: Mapping[Any, _VT_co]) -> ValuesView[_VT_co]: ... +def byte(*args: SupportsIndex) -> bytes: ... + +class long(int): ... + +def canonical_str(value: object) -> str: ... +def is_integer(value: object) -> TypeGuard[int]: ... +def as_bytes(value: str | bytes) -> bytes: ... +def to_digit(value: str) -> int: ... +def get_linux_version(release_str: str) -> tuple[int, int, int]: ... + +HAVE_SIGNAL: Final[bool] +EINTR_IS_EXPOSED: Final[Literal[False]] +LINUX_VERSION: tuple[int, int, int] | None diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/connection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/connection.pyi new file mode 100644 index 00000000..21024b42 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/connection.pyi @@ -0,0 +1,187 @@ +import abc +from _typeshed import Incomplete +from collections.abc import Callable +from logging import Logger +from typing_extensions import Final, Self + +from .callback import CallbackManager +from .channel import Channel +from .compat import AbstractBase +from .credentials import _Credentials +from .frame import Method +from .spec import Connection as SpecConnection + +PRODUCT: str +LOGGER: Logger + +class Parameters: + DEFAULT_USERNAME: str + DEFAULT_PASSWORD: str + DEFAULT_BLOCKED_CONNECTION_TIMEOUT: Incomplete + DEFAULT_CHANNEL_MAX: Incomplete + DEFAULT_CLIENT_PROPERTIES: Incomplete + DEFAULT_CREDENTIALS: Incomplete + DEFAULT_CONNECTION_ATTEMPTS: int + DEFAULT_FRAME_MAX: Incomplete + DEFAULT_HEARTBEAT_TIMEOUT: Incomplete + DEFAULT_HOST: str + DEFAULT_LOCALE: str + DEFAULT_PORT: int + DEFAULT_RETRY_DELAY: float + DEFAULT_SOCKET_TIMEOUT: float + DEFAULT_STACK_TIMEOUT: float + DEFAULT_SSL: bool + DEFAULT_SSL_OPTIONS: Incomplete + DEFAULT_SSL_PORT: int + DEFAULT_VIRTUAL_HOST: str + DEFAULT_TCP_OPTIONS: Incomplete + def __init__(self) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + @property + def blocked_connection_timeout(self) -> float | None: ... + @blocked_connection_timeout.setter + def blocked_connection_timeout(self, value: float | None) -> None: ... + @property + def channel_max(self) -> int: ... + @channel_max.setter + def channel_max(self, value: int) -> None: ... + @property + def client_properties(self) -> dict[Incomplete, Incomplete] | None: ... + @client_properties.setter + def client_properties(self, value: dict[Incomplete, Incomplete] | None) -> None: ... + @property + def connection_attempts(self) -> int: ... + @connection_attempts.setter + def connection_attempts(self, value: int) -> None: ... + @property + def credentials(self) -> _Credentials: ... + @credentials.setter + def credentials(self, value: _Credentials) -> None: ... + @property + def frame_max(self) -> int: ... + @frame_max.setter + def frame_max(self, value: int) -> None: ... + @property + def heartbeat(self) -> int | Callable[[Connection, int], int] | None: ... + @heartbeat.setter + def heartbeat(self, value: int | Callable[[Connection, int], int] | None) -> None: ... + @property + def host(self) -> str: ... + @host.setter + def host(self, value: str) -> None: ... + @property + def locale(self) -> str: ... + @locale.setter + def locale(self, value: str) -> None: ... + @property + def port(self) -> int: ... + @port.setter + def port(self, value: int | str) -> None: ... + @property + def retry_delay(self) -> int | float: ... + @retry_delay.setter + def retry_delay(self, value: float) -> None: ... + @property + def socket_timeout(self) -> float | None: ... + @socket_timeout.setter + def socket_timeout(self, value: float | None) -> None: ... + @property + def stack_timeout(self) -> float | None: ... + @stack_timeout.setter + def stack_timeout(self, value: float | None) -> None: ... + @property + def ssl_options(self) -> SSLOptions | None: ... + @ssl_options.setter + def ssl_options(self, value: SSLOptions | None) -> None: ... + @property + def virtual_host(self) -> str: ... + @virtual_host.setter + def virtual_host(self, value: str) -> None: ... + @property + def tcp_options(self) -> dict[Incomplete, Incomplete] | None: ... + @tcp_options.setter + def tcp_options(self, value: dict[Incomplete, Incomplete] | None) -> None: ... + +class ConnectionParameters(Parameters): + def __init__( + self, + host: str = ..., + port: int | str = ..., + virtual_host: str = ..., + credentials: _Credentials = ..., + channel_max: int = ..., + frame_max: int = ..., + heartbeat: int | Callable[[Connection, int], int] | None = ..., + ssl_options: SSLOptions | None = ..., + connection_attempts: int = ..., + retry_delay: float = ..., + socket_timeout: float | None = ..., + stack_timeout: float | None = ..., + locale: str = ..., + blocked_connection_timeout: float | None = ..., + client_properties: dict[Incomplete, Incomplete] | None = ..., + tcp_options: dict[Incomplete, Incomplete] | None = ..., + ) -> None: ... + +class URLParameters(Parameters): + def __init__(self, url: str) -> None: ... + +class SSLOptions: + context: Incomplete + server_hostname: Incomplete + def __init__(self, context, server_hostname: Incomplete | None = ...) -> None: ... + +class Connection(AbstractBase, metaclass=abc.ABCMeta): + ON_CONNECTION_CLOSED: Final[str] + ON_CONNECTION_ERROR: Final[str] + ON_CONNECTION_OPEN_OK: Final[str] + CONNECTION_CLOSED: Final[int] + CONNECTION_INIT: Final[int] + CONNECTION_PROTOCOL: Final[int] + CONNECTION_START: Final[int] + CONNECTION_TUNE: Final[int] + CONNECTION_OPEN: Final[int] + CONNECTION_CLOSING: Final[int] + connection_state: int # one of the constants above + params: Parameters + callbacks: CallbackManager + server_capabilities: Incomplete + server_properties: Incomplete + known_hosts: Incomplete + def __init__( + self, + parameters: Parameters | None = ..., + on_open_callback: Callable[[Self], object] | None = ..., + on_open_error_callback: Callable[[Self, BaseException], object] | None = ..., + on_close_callback: Callable[[Self, BaseException], object] | None = ..., + internal_connection_workflow: bool = ..., + ) -> None: ... + def add_on_close_callback(self, callback: Callable[[Self, BaseException], object]) -> None: ... + def add_on_connection_blocked_callback(self, callback: Callable[[Self, Method[SpecConnection.Blocked]], object]) -> None: ... + def add_on_connection_unblocked_callback( + self, callback: Callable[[Self, Method[SpecConnection.Unblocked]], object] + ) -> None: ... + def add_on_open_callback(self, callback: Callable[[Self], object]) -> None: ... + def add_on_open_error_callback( + self, callback: Callable[[Self, BaseException], object], remove_default: bool = ... + ) -> None: ... + def channel( + self, channel_number: int | None = ..., on_open_callback: Callable[[Channel], object] | None = ... + ) -> Channel: ... + def update_secret(self, new_secret, reason, callback: Incomplete | None = ...) -> None: ... + def close(self, reply_code: int = ..., reply_text: str = ...) -> None: ... + @property + def is_closed(self) -> bool: ... + @property + def is_closing(self) -> bool: ... + @property + def is_open(self) -> bool: ... + @property + def basic_nack(self) -> bool: ... + @property + def consumer_cancel_notify(self) -> bool: ... + @property + def exchange_exchange_bindings(self) -> bool: ... + @property + def publisher_confirms(self) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/credentials.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/credentials.pyi new file mode 100644 index 00000000..b813f7c9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/credentials.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete +from logging import Logger +from typing import ClassVar +from typing_extensions import TypeAlias + +from .spec import Connection + +# TODO: This could be turned into a protocol. +_Credentials: TypeAlias = Incomplete # noqa: Y047 + +LOGGER: Logger + +class PlainCredentials: + TYPE: ClassVar[str] + username: str + password: str + erase_on_connect: bool + def __init__(self, username: str, password: str, erase_on_connect: bool = ...) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def response_for(self, start: Connection.Start) -> tuple[str | None, bytes | None]: ... + def erase_credentials(self) -> None: ... + +class ExternalCredentials: + TYPE: ClassVar[str] + erase_on_connect: bool + def __init__(self) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def response_for(self, start: Connection.Start) -> tuple[str | None, bytes | None]: ... + def erase_credentials(self) -> None: ... + +VALID_TYPES: Incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/data.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/data.pyi new file mode 100644 index 00000000..c32cde38 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/data.pyi @@ -0,0 +1,14 @@ +from collections.abc import Mapping +from datetime import datetime +from decimal import Decimal +from typing_extensions import TypeAlias + +_Value: TypeAlias = str | bytes | bool | int | Decimal | datetime | _ArgumentMapping | list[_Value] | None +_ArgumentMapping: TypeAlias = Mapping[str, _Value] + +def encode_short_string(pieces: list[bytes], value: str | bytes) -> int: ... +def decode_short_string(encoded: bytes, offset: int) -> tuple[str, int]: ... +def encode_table(pieces: list[bytes], table: _ArgumentMapping) -> int: ... +def encode_value(pieces: list[bytes], value: _Value) -> int: ... +def decode_table(encoded: bytes, offset: int) -> tuple[dict[str, _Value], int]: ... +def decode_value(encoded: bytes, offset: int) -> tuple[_Value, int]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/delivery_mode.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/delivery_mode.pyi new file mode 100644 index 00000000..8395e412 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/delivery_mode.pyi @@ -0,0 +1,5 @@ +from enum import Enum + +class DeliveryMode(Enum): + Transient: int + Persistent: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/diagnostic_utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/diagnostic_utils.pyi new file mode 100644 index 00000000..7c8d2279 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/diagnostic_utils.pyi @@ -0,0 +1 @@ +def create_log_exception_decorator(logger): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/exceptions.pyi new file mode 100644 index 00000000..23bf3a0f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/exceptions.pyi @@ -0,0 +1,60 @@ +from _typeshed import Incomplete + +class AMQPError(Exception): ... +class AMQPConnectionError(AMQPError): ... +class ConnectionOpenAborted(AMQPConnectionError): ... +class StreamLostError(AMQPConnectionError): ... +class IncompatibleProtocolError(AMQPConnectionError): ... +class AuthenticationError(AMQPConnectionError): ... +class ProbableAuthenticationError(AMQPConnectionError): ... +class ProbableAccessDeniedError(AMQPConnectionError): ... +class NoFreeChannels(AMQPConnectionError): ... +class ConnectionWrongStateError(AMQPConnectionError): ... + +class ConnectionClosed(AMQPConnectionError): + def __init__(self, reply_code, reply_text) -> None: ... + @property + def reply_code(self): ... + @property + def reply_text(self): ... + +class ConnectionClosedByBroker(ConnectionClosed): ... +class ConnectionClosedByClient(ConnectionClosed): ... +class ConnectionBlockedTimeout(AMQPConnectionError): ... +class AMQPHeartbeatTimeout(AMQPConnectionError): ... +class AMQPChannelError(AMQPError): ... +class ChannelWrongStateError(AMQPChannelError): ... + +class ChannelClosed(AMQPChannelError): + def __init__(self, reply_code, reply_text) -> None: ... + @property + def reply_code(self): ... + @property + def reply_text(self): ... + +class ChannelClosedByBroker(ChannelClosed): ... +class ChannelClosedByClient(ChannelClosed): ... +class DuplicateConsumerTag(AMQPChannelError): ... +class ConsumerCancelled(AMQPChannelError): ... + +class UnroutableError(AMQPChannelError): + messages: Incomplete + def __init__(self, messages) -> None: ... + +class NackError(AMQPChannelError): + messages: Incomplete + def __init__(self, messages) -> None: ... + +class InvalidChannelNumber(AMQPError): ... +class ProtocolSyntaxError(AMQPError): ... +class UnexpectedFrameError(ProtocolSyntaxError): ... +class ProtocolVersionMismatch(ProtocolSyntaxError): ... +class BodyTooLongError(ProtocolSyntaxError): ... +class InvalidFrameError(ProtocolSyntaxError): ... +class InvalidFieldTypeException(ProtocolSyntaxError): ... +class UnsupportedAMQPFieldException(ProtocolSyntaxError): ... +class MethodNotImplemented(AMQPError): ... +class ChannelError(Exception): ... +class ReentrancyError(Exception): ... +class ShortStringTooLong(AMQPError): ... +class DuplicateGetOkCallback(ChannelError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/exchange_type.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/exchange_type.pyi new file mode 100644 index 00000000..73cbf368 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/exchange_type.pyi @@ -0,0 +1,7 @@ +from enum import Enum + +class ExchangeType(Enum): + direct: str + fanout: str + headers: str + topic: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/frame.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/frame.pyi new file mode 100644 index 00000000..e448dc8e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/frame.pyi @@ -0,0 +1,47 @@ +from abc import abstractmethod +from logging import Logger +from typing import Generic, TypeVar + +from .amqp_object import AMQPObject, Method as AMQPMethod +from .spec import BasicProperties + +_M = TypeVar("_M", bound=AMQPMethod) + +LOGGER: Logger + +class Frame(AMQPObject): + frame_type: int + channel_number: int + def __init__(self, frame_type: int, channel_number: int) -> None: ... + @abstractmethod + def marshal(self) -> bytes: ... + +class Method(Frame, Generic[_M]): + method: _M + def __init__(self, channel_number: int, method: _M) -> None: ... + def marshal(self) -> bytes: ... + +class Header(Frame): + body_size: int + properties: BasicProperties + def __init__(self, channel_number: int, body_size: int, props: BasicProperties) -> None: ... + def marshal(self) -> bytes: ... + +class Body(Frame): + fragment: bytes + def __init__(self, channel_number: int, fragment: bytes) -> None: ... + def marshal(self) -> bytes: ... + +class Heartbeat(Frame): + def __init__(self) -> None: ... + def marshal(self) -> bytes: ... + +class ProtocolHeader(AMQPObject): + frame_type: int + major: int + minor: int + revision: int + def __init__(self, major: int | None = ..., minor: int | None = ..., revision: int | None = ...) -> None: ... + def marshal(self) -> bytes: ... + +def decode_frame(data_in: bytes) -> tuple[int, Frame | None]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/heartbeat.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/heartbeat.pyi new file mode 100644 index 00000000..7d1d7309 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/heartbeat.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete + +LOGGER: Incomplete + +class HeartbeatChecker: + def __init__(self, connection, timeout) -> None: ... + @property + def bytes_received_on_connection(self): ... + @property + def connection_is_idle(self): ... + def received(self) -> None: ... + def stop(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/spec.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/spec.pyi new file mode 100644 index 00000000..c213867c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/spec.pyi @@ -0,0 +1,920 @@ +from _typeshed import Incomplete +from typing import ClassVar +from typing_extensions import Literal, Self, TypeAlias + +from .amqp_object import Class, Method, Properties + +# Ouch. Since str = bytes at runtime, we need a type alias for "str". +_str: TypeAlias = str # noqa: Y042 +str = bytes + +PROTOCOL_VERSION: Incomplete +PORT: int +ACCESS_REFUSED: int +CHANNEL_ERROR: int +COMMAND_INVALID: int +CONNECTION_FORCED: int +CONTENT_TOO_LARGE: int +FRAME_BODY: int +FRAME_END: int +FRAME_END_SIZE: int +FRAME_ERROR: int +FRAME_HEADER: int +FRAME_HEADER_SIZE: int +FRAME_HEARTBEAT: int +FRAME_MAX_SIZE: int +FRAME_METHOD: int +FRAME_MIN_SIZE: int +INTERNAL_ERROR: int +INVALID_PATH: int +NOT_ALLOWED: int +NOT_FOUND: int +NOT_IMPLEMENTED: int +NO_CONSUMERS: int +NO_ROUTE: int +PERSISTENT_DELIVERY_MODE: int +PRECONDITION_FAILED: int +REPLY_SUCCESS: int +RESOURCE_ERROR: int +RESOURCE_LOCKED: int +SYNTAX_ERROR: int +TRANSIENT_DELIVERY_MODE: int +UNEXPECTED_FRAME: int + +class Connection(Class): + INDEX: ClassVar[int] + + class Start(Method): + INDEX: ClassVar[int] + version_major: Incomplete + version_minor: Incomplete + server_properties: Incomplete + mechanisms: Incomplete + locales: Incomplete + def __init__( + self, + version_major: int = ..., + version_minor: int = ..., + server_properties: Incomplete | None = ..., + mechanisms: _str = ..., + locales: _str = ..., + ) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class StartOk(Method): + INDEX: ClassVar[int] + client_properties: Incomplete + mechanism: Incomplete + response: Incomplete + locale: Incomplete + def __init__( + self, + client_properties: Incomplete | None = ..., + mechanism: _str = ..., + response: Incomplete | None = ..., + locale: _str = ..., + ) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Secure(Method): + INDEX: ClassVar[int] + challenge: Incomplete + def __init__(self, challenge: Incomplete | None = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class SecureOk(Method): + INDEX: ClassVar[int] + response: Incomplete + def __init__(self, response: Incomplete | None = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Tune(Method): + INDEX: ClassVar[int] + channel_max: Incomplete + frame_max: Incomplete + heartbeat: Incomplete + def __init__(self, channel_max: int = ..., frame_max: int = ..., heartbeat: int = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class TuneOk(Method): + INDEX: ClassVar[int] + channel_max: Incomplete + frame_max: Incomplete + heartbeat: Incomplete + def __init__(self, channel_max: int = ..., frame_max: int = ..., heartbeat: int = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Open(Method): + INDEX: ClassVar[int] + virtual_host: Incomplete + capabilities: Incomplete + insist: Incomplete + def __init__(self, virtual_host: _str = ..., capabilities: _str = ..., insist: bool = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class OpenOk(Method): + INDEX: ClassVar[int] + known_hosts: Incomplete + def __init__(self, known_hosts: _str = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Close(Method): + INDEX: ClassVar[int] + reply_code: Incomplete + reply_text: Incomplete + class_id: Incomplete + method_id: Incomplete + def __init__( + self, + reply_code: Incomplete | None = ..., + reply_text: _str = ..., + class_id: Incomplete | None = ..., + method_id: Incomplete | None = ..., + ) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class CloseOk(Method): + INDEX: ClassVar[int] + def __init__(self) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Blocked(Method): + INDEX: ClassVar[int] + reason: Incomplete + def __init__(self, reason: _str = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Unblocked(Method): + INDEX: ClassVar[int] + def __init__(self) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class UpdateSecret(Method): + INDEX: ClassVar[int] + new_secret: Incomplete + reason: Incomplete + def __init__(self, new_secret, reason) -> None: ... + @property + def synchronous(self): ... + mechanisms: Incomplete + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class UpdateSecretOk(Method): + INDEX: ClassVar[int] + def __init__(self) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + +class Channel(Class): + INDEX: ClassVar[int] + + class Open(Method): + INDEX: ClassVar[int] + out_of_band: Incomplete + def __init__(self, out_of_band: _str = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class OpenOk(Method): + INDEX: ClassVar[int] + channel_id: Incomplete + def __init__(self, channel_id: _str = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Flow(Method): + INDEX: ClassVar[int] + active: Incomplete + def __init__(self, active: Incomplete | None = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class FlowOk(Method): + INDEX: ClassVar[int] + active: Incomplete + def __init__(self, active: Incomplete | None = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Close(Method): + INDEX: ClassVar[int] + reply_code: Incomplete + reply_text: Incomplete + class_id: Incomplete + method_id: Incomplete + def __init__( + self, + reply_code: Incomplete | None = ..., + reply_text: _str = ..., + class_id: Incomplete | None = ..., + method_id: Incomplete | None = ..., + ) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class CloseOk(Method): + INDEX: ClassVar[int] + def __init__(self) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + +class Access(Class): + INDEX: ClassVar[int] + + class Request(Method): + INDEX: ClassVar[int] + realm: Incomplete + exclusive: Incomplete + passive: Incomplete + active: Incomplete + write: Incomplete + read: Incomplete + def __init__( + self, + realm: _str = ..., + exclusive: bool = ..., + passive: bool = ..., + active: bool = ..., + write: bool = ..., + read: bool = ..., + ) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class RequestOk(Method): + INDEX: ClassVar[int] + ticket: Incomplete + def __init__(self, ticket: int = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + +class Exchange(Class): + INDEX: ClassVar[int] + + class Declare(Method): + INDEX: ClassVar[int] + ticket: Incomplete + exchange: Incomplete + type: Incomplete + passive: Incomplete + durable: Incomplete + auto_delete: Incomplete + internal: Incomplete + nowait: Incomplete + arguments: Incomplete + def __init__( + self, + ticket: int = ..., + exchange: Incomplete | None = ..., + type=..., + passive: bool = ..., + durable: bool = ..., + auto_delete: bool = ..., + internal: bool = ..., + nowait: bool = ..., + arguments: Incomplete | None = ..., + ) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class DeclareOk(Method): + INDEX: ClassVar[int] + def __init__(self) -> None: ... + @property + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = ...) -> Self: ... + def encode(self) -> list[bytes]: ... + + class Delete(Method): + INDEX: ClassVar[int] + ticket: Incomplete + exchange: Incomplete + if_unused: Incomplete + nowait: Incomplete + def __init__( + self, ticket: int = ..., exchange: Incomplete | None = ..., if_unused: bool = ..., nowait: bool = ... + ) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class DeleteOk(Method): + INDEX: ClassVar[int] + def __init__(self) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Bind(Method): + INDEX: ClassVar[int] + ticket: int + destination: Incomplete | None + source: Incomplete | None + routing_key: _str + nowait: bool + arguments: Incomplete | None + def __init__( + self, + ticket: int = ..., + destination: Incomplete | None = ..., + source: Incomplete | None = ..., + routing_key: _str = ..., + nowait: bool = ..., + arguments: Incomplete | None = ..., + ) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class BindOk(Method): + INDEX: ClassVar[int] + def __init__(self) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Unbind(Method): + INDEX: ClassVar[int] + ticket: Incomplete + destination: Incomplete + source: Incomplete + routing_key: Incomplete + nowait: Incomplete + arguments: Incomplete + def __init__( + self, + ticket: int = ..., + destination: Incomplete | None = ..., + source: Incomplete | None = ..., + routing_key: _str = ..., + nowait: bool = ..., + arguments: Incomplete | None = ..., + ) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class UnbindOk(Method): + INDEX: ClassVar[int] + def __init__(self) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + +class Queue(Class): + INDEX: ClassVar[int] + + class Declare(Method): + INDEX: ClassVar[int] + ticket: Incomplete + queue: Incomplete + passive: Incomplete + durable: Incomplete + exclusive: Incomplete + auto_delete: Incomplete + nowait: Incomplete + arguments: Incomplete + def __init__( + self, + ticket: int = ..., + queue: _str = ..., + passive: bool = ..., + durable: bool = ..., + exclusive: bool = ..., + auto_delete: bool = ..., + nowait: bool = ..., + arguments: Incomplete | None = ..., + ) -> None: ... + @property + def synchronous(self) -> Literal[True]: ... + def decode(self, encoded: bytes, offset: int = ...) -> Self: ... + def encode(self) -> list[bytes]: ... + + class DeclareOk(Method): + INDEX: ClassVar[int] + queue: _str + message_count: int + consumer_count: int + def __init__(self, queue: _str, message_count: int, consumer_count: int) -> None: ... + @property + def synchronous(self) -> Literal[False]: ... + def decode(self, encoded: bytes, offset: int = ...) -> Self: ... + def encode(self) -> list[bytes]: ... + + class Bind(Method): + INDEX: ClassVar[int] + ticket: Incomplete + queue: Incomplete + exchange: Incomplete + routing_key: Incomplete + nowait: Incomplete + arguments: Incomplete + def __init__( + self, + ticket: int = ..., + queue: _str = ..., + exchange: Incomplete | None = ..., + routing_key: _str = ..., + nowait: bool = ..., + arguments: Incomplete | None = ..., + ) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class BindOk(Method): + INDEX: ClassVar[int] + def __init__(self) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Purge(Method): + INDEX: ClassVar[int] + ticket: Incomplete + queue: Incomplete + nowait: Incomplete + def __init__(self, ticket: int = ..., queue: _str = ..., nowait: bool = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class PurgeOk(Method): + INDEX: ClassVar[int] + message_count: Incomplete + def __init__(self, message_count: Incomplete | None = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Delete(Method): + INDEX: ClassVar[int] + ticket: Incomplete + queue: Incomplete + if_unused: Incomplete + if_empty: Incomplete + nowait: Incomplete + def __init__( + self, ticket: int = ..., queue: _str = ..., if_unused: bool = ..., if_empty: bool = ..., nowait: bool = ... + ) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class DeleteOk(Method): + INDEX: ClassVar[int] + message_count: Incomplete + def __init__(self, message_count: Incomplete | None = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Unbind(Method): + INDEX: ClassVar[int] + ticket: Incomplete + queue: Incomplete + exchange: Incomplete + routing_key: Incomplete + arguments: Incomplete + def __init__( + self, + ticket: int = ..., + queue: _str = ..., + exchange: Incomplete | None = ..., + routing_key: _str = ..., + arguments: Incomplete | None = ..., + ) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class UnbindOk(Method): + INDEX: ClassVar[int] + def __init__(self) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + +class Basic(Class): + INDEX: ClassVar[int] + + class Qos(Method): + INDEX: ClassVar[int] + prefetch_size: Incomplete + prefetch_count: Incomplete + global_qos: Incomplete + def __init__(self, prefetch_size: int = ..., prefetch_count: int = ..., global_qos: bool = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class QosOk(Method): + INDEX: ClassVar[int] + def __init__(self) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Consume(Method): + INDEX: ClassVar[int] + ticket: Incomplete + queue: Incomplete + consumer_tag: Incomplete + no_local: Incomplete + no_ack: Incomplete + exclusive: Incomplete + nowait: Incomplete + arguments: Incomplete + def __init__( + self, + ticket: int = ..., + queue: _str = ..., + consumer_tag: _str = ..., + no_local: bool = ..., + no_ack: bool = ..., + exclusive: bool = ..., + nowait: bool = ..., + arguments: Incomplete | None = ..., + ) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class ConsumeOk(Method): + INDEX: ClassVar[int] + consumer_tag: Incomplete + def __init__(self, consumer_tag: Incomplete | None = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Cancel(Method): + INDEX: ClassVar[int] + consumer_tag: Incomplete + nowait: Incomplete + def __init__(self, consumer_tag: Incomplete | None = ..., nowait: bool = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class CancelOk(Method): + INDEX: ClassVar[int] + consumer_tag: Incomplete + def __init__(self, consumer_tag: Incomplete | None = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Publish(Method): + INDEX: ClassVar[int] + ticket: Incomplete + exchange: Incomplete + routing_key: Incomplete + mandatory: Incomplete + immediate: Incomplete + def __init__( + self, ticket: int = ..., exchange: _str = ..., routing_key: _str = ..., mandatory: bool = ..., immediate: bool = ... + ) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Return(Method): + INDEX: ClassVar[int] + reply_code: Incomplete + reply_text: Incomplete + exchange: Incomplete + routing_key: Incomplete + def __init__( + self, + reply_code: Incomplete | None = ..., + reply_text: _str = ..., + exchange: Incomplete | None = ..., + routing_key: Incomplete | None = ..., + ) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Deliver(Method): + INDEX: ClassVar[int] + consumer_tag: Incomplete + delivery_tag: Incomplete + redelivered: Incomplete + exchange: Incomplete + routing_key: Incomplete + def __init__( + self, + consumer_tag: Incomplete | None = ..., + delivery_tag: Incomplete | None = ..., + redelivered: bool = ..., + exchange: Incomplete | None = ..., + routing_key: Incomplete | None = ..., + ) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Get(Method): + INDEX: ClassVar[int] + ticket: Incomplete + queue: Incomplete + no_ack: Incomplete + def __init__(self, ticket: int = ..., queue: _str = ..., no_ack: bool = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class GetOk(Method): + INDEX: ClassVar[int] + delivery_tag: Incomplete + redelivered: Incomplete + exchange: Incomplete + routing_key: Incomplete + message_count: Incomplete + def __init__( + self, + delivery_tag: Incomplete | None = ..., + redelivered: bool = ..., + exchange: Incomplete | None = ..., + routing_key: Incomplete | None = ..., + message_count: Incomplete | None = ..., + ) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class GetEmpty(Method): + INDEX: ClassVar[int] + cluster_id: Incomplete + def __init__(self, cluster_id: _str = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Ack(Method): + INDEX: ClassVar[int] + delivery_tag: Incomplete + multiple: Incomplete + def __init__(self, delivery_tag: int = ..., multiple: bool = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Reject(Method): + INDEX: ClassVar[int] + delivery_tag: Incomplete + requeue: Incomplete + def __init__(self, delivery_tag: Incomplete | None = ..., requeue: bool = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class RecoverAsync(Method): + INDEX: ClassVar[int] + requeue: Incomplete + def __init__(self, requeue: bool = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Recover(Method): + INDEX: ClassVar[int] + requeue: Incomplete + def __init__(self, requeue: bool = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class RecoverOk(Method): + INDEX: ClassVar[int] + def __init__(self) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Nack(Method): + INDEX: ClassVar[int] + delivery_tag: Incomplete + multiple: Incomplete + requeue: Incomplete + def __init__(self, delivery_tag: int = ..., multiple: bool = ..., requeue: bool = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + +class Tx(Class): + INDEX: ClassVar[int] + + class Select(Method): + INDEX: ClassVar[int] + def __init__(self) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class SelectOk(Method): + INDEX: ClassVar[int] + def __init__(self) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Commit(Method): + INDEX: ClassVar[int] + def __init__(self) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class CommitOk(Method): + INDEX: ClassVar[int] + def __init__(self) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class Rollback(Method): + INDEX: ClassVar[int] + def __init__(self) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class RollbackOk(Method): + INDEX: ClassVar[int] + def __init__(self) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + +class Confirm(Class): + INDEX: ClassVar[int] + + class Select(Method): + INDEX: ClassVar[int] + nowait: Incomplete + def __init__(self, nowait: bool = ...) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + + class SelectOk(Method): + INDEX: ClassVar[int] + def __init__(self) -> None: ... + @property + def synchronous(self): ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + +class BasicProperties(Properties): + CLASS: Incomplete + INDEX: ClassVar[int] + FLAG_CONTENT_TYPE: Incomplete + FLAG_CONTENT_ENCODING: Incomplete + FLAG_HEADERS: Incomplete + FLAG_DELIVERY_MODE: Incomplete + FLAG_PRIORITY: Incomplete + FLAG_CORRELATION_ID: Incomplete + FLAG_REPLY_TO: Incomplete + FLAG_EXPIRATION: Incomplete + FLAG_MESSAGE_ID: Incomplete + FLAG_TIMESTAMP: Incomplete + FLAG_TYPE: Incomplete + FLAG_USER_ID: Incomplete + FLAG_APP_ID: Incomplete + FLAG_CLUSTER_ID: Incomplete + content_type: Incomplete + content_encoding: Incomplete + headers: Incomplete + delivery_mode: Incomplete + priority: Incomplete + correlation_id: Incomplete + reply_to: Incomplete + expiration: Incomplete + message_id: Incomplete + timestamp: Incomplete + type: Incomplete + user_id: Incomplete + app_id: Incomplete + cluster_id: Incomplete + def __init__( + self, + content_type: Incomplete | None = ..., + content_encoding: Incomplete | None = ..., + headers: Incomplete | None = ..., + delivery_mode: Incomplete | None = ..., + priority: Incomplete | None = ..., + correlation_id: Incomplete | None = ..., + reply_to: Incomplete | None = ..., + expiration: Incomplete | None = ..., + message_id: Incomplete | None = ..., + timestamp: Incomplete | None = ..., + type: Incomplete | None = ..., + user_id: Incomplete | None = ..., + app_id: Incomplete | None = ..., + cluster_id: Incomplete | None = ..., + ) -> None: ... + def decode(self, encoded, offset: int = ...): ... + def encode(self): ... + +methods: Incomplete +props: Incomplete + +def has_content(methodNumber): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/tcp_socket_opts.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/tcp_socket_opts.pyi new file mode 100644 index 00000000..81837440 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/tcp_socket_opts.pyi @@ -0,0 +1,6 @@ +from _typeshed import Incomplete + +LOGGER: Incomplete + +def socket_requires_keepalive(tcp_options): ... +def set_sock_opts(tcp_options, sock) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/validators.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/validators.pyi new file mode 100644 index 00000000..52ea0362 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pika/pika/validators.pyi @@ -0,0 +1,4 @@ +def require_string(value, value_name) -> None: ... +def require_callback(callback, callback_name: str = ...) -> None: ... +def rpc_completion_callback(callback): ... +def zero_or_greater(name, value) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/playsound/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/playsound/METADATA.toml new file mode 100644 index 00000000..3ea18392 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/playsound/METADATA.toml @@ -0,0 +1 @@ +version = "1.3.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/playsound/playsound.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/playsound/playsound.pyi new file mode 100644 index 00000000..b70f7902 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/playsound/playsound.pyi @@ -0,0 +1,8 @@ +import logging +import pathlib + +logger: logging.Logger + +class PlaysoundException(Exception): ... + +def playsound(sound: str | pathlib.Path, block: bool = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/polib/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/polib/METADATA.toml new file mode 100644 index 00000000..c9f594bd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/polib/METADATA.toml @@ -0,0 +1 @@ +version = "1.1.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/polib/polib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/polib/polib.pyi new file mode 100644 index 00000000..03a0338b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/polib/polib.pyi @@ -0,0 +1,155 @@ +from collections.abc import Callable +from typing import IO, Any, Generic, TypeVar, overload +from typing_extensions import SupportsIndex + +_TB = TypeVar("_TB", bound=_BaseEntry) +_TP = TypeVar("_TP", bound=POFile) +_TM = TypeVar("_TM", bound=MOFile) + +default_encoding: str + +# wrapwidth: int +# encoding: str +# check_for_duplicates: bool +@overload +def pofile(pofile: str, *, klass: type[_TP], **kwargs: Any) -> _TP: ... +@overload +def pofile(pofile: str, **kwargs: Any) -> POFile: ... +@overload +def mofile(mofile: str, *, klass: type[_TM], **kwargs: Any) -> _TM: ... +@overload +def mofile(mofile: str, **kwargs: Any) -> MOFile: ... +def detect_encoding(file: bytes | str, binary_mode: bool = ...) -> str: ... +def escape(st: str) -> str: ... +def unescape(st: str) -> str: ... + +class _BaseFile(list[_TB]): + fpath: str + wrapwidth: int + encoding: str + check_for_duplicates: bool + header: str + metadata: dict[str, str] + metadata_is_fuzzy: bool + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + def __unicode__(self) -> str: ... + def __contains__(self, entry: _TB) -> bool: ... # type: ignore[override] # AttributeError otherwise + def __eq__(self, other: object) -> bool: ... + def append(self, entry: _TB) -> None: ... + def insert(self, index: SupportsIndex, entry: _TB) -> None: ... + def metadata_as_entry(self) -> POEntry: ... + def save(self, fpath: str | None = ..., repr_method: str = ..., newline: str | None = ...) -> None: ... + def find(self, st: str, by: str = ..., include_obsolete_entries: bool = ..., msgctxt: bool = ...) -> _TB | None: ... + def ordered_metadata(self) -> list[tuple[str, str]]: ... + def to_binary(self) -> bytes: ... + +class POFile(_BaseFile[POEntry]): + def __unicode__(self) -> str: ... + def save_as_mofile(self, fpath: str) -> None: ... + def percent_translated(self) -> int: ... + def translated_entries(self) -> list[POEntry]: ... + def untranslated_entries(self) -> list[POEntry]: ... + def fuzzy_entries(self) -> list[POEntry]: ... + def obsolete_entries(self) -> list[POEntry]: ... + def merge(self, refpot: POFile) -> None: ... + +class MOFile(_BaseFile[MOEntry]): + MAGIC: int + MAGIC_SWAPPED: int + magic_number: int | None + version: int + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + def save_as_pofile(self, fpath: str) -> None: ... + def save(self, fpath: str | None = ...) -> None: ... # type: ignore[override] # binary file does not allow argument repr_method + def percent_translated(self) -> int: ... + def translated_entries(self) -> list[MOEntry]: ... + def untranslated_entries(self) -> list[MOEntry]: ... + def fuzzy_entries(self) -> list[MOEntry]: ... + def obsolete_entries(self) -> list[MOEntry]: ... + +class _BaseEntry: + msgid: str + msgstr: str + msgid_plural: str + msgstr_plural: list[str] + msgctxt: str + obsolete: bool + encoding: str + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + def __unicode__(self, wrapwidth: int = ...) -> str: ... + def __eq__(self, other: object) -> bool: ... + @property + def msgid_with_context(self) -> str: ... + +class POEntry(_BaseEntry): + comment: str + tcomment: str + occurrences: list[tuple[str, int]] + flags: list[str] + previous_msgctxt: str | None + previous_msgid: str | None + previous_msgid_plural: str | None + linenum: int | None + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + def __unicode__(self, wrapwidth: int = ...) -> str: ... + def __cmp__(self, other: POEntry) -> int: ... + def __gt__(self, other: POEntry) -> bool: ... + def __lt__(self, other: POEntry) -> bool: ... + def __ge__(self, other: POEntry) -> bool: ... + def __le__(self, other: POEntry) -> bool: ... + def __eq__(self, other: POEntry) -> bool: ... # type: ignore[override] + def __ne__(self, other: POEntry) -> bool: ... # type: ignore[override] + def translated(self) -> bool: ... + def merge(self, other: POEntry) -> None: ... + @property + def fuzzy(self) -> bool: ... + @property + def msgid_with_context(self) -> str: ... + def __hash__(self) -> int: ... + +class MOEntry(_BaseEntry): + comment: str + tcomment: str + occurrences: list[tuple[str, int]] + flags: list[str] + previous_msgctxt: str | None + previous_msgid: str | None + previous_msgid_plural: str | None + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + def __hash__(self) -> int: ... + +class _POFileParser(Generic[_TP]): + fhandle: IO[str] + instance: _TP + transitions: dict[tuple[str, str], tuple[Callable[[], bool], str]] + current_line: int + current_entry: POEntry + current_state: str + current_token: str | None + msgstr_index: int + entry_obsolete: int + def __init__(self, pofile: str, *args: Any, **kwargs: Any) -> None: ... + def parse(self) -> _TP: ... + def add(self, symbol: str, states: list[str], next_state: str) -> None: ... + def process(self, symbol: str) -> None: ... + def handle_he(self) -> bool: ... + def handle_tc(self) -> bool: ... + def handle_gc(self) -> bool: ... + def handle_oc(self) -> bool: ... + def handle_fl(self) -> bool: ... + def handle_pp(self) -> bool: ... + def handle_pm(self) -> bool: ... + def handle_pc(self) -> bool: ... + def handle_ct(self) -> bool: ... + def handle_mi(self) -> bool: ... + def handle_mp(self) -> bool: ... + def handle_ms(self) -> bool: ... + def handle_mx(self) -> bool: ... + def handle_mc(self) -> bool: ... + +class _MOFileParser(Generic[_TM]): + fhandle: IO[bytes] + instance: _TM + def __init__(self, mofile: str, *args: Any, **kwargs: Any) -> None: ... + def __del__(self) -> None: ... + def parse(self) -> _TM: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/prettytable/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/prettytable/METADATA.toml new file mode 100644 index 00000000..a2c3acdb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/prettytable/METADATA.toml @@ -0,0 +1,2 @@ +version = "3.4.*" +obsolete_since = "3.5.0" # Released on 2022-10-28 diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/prettytable/prettytable/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/prettytable/prettytable/__init__.pyi new file mode 100644 index 00000000..f66d036b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/prettytable/prettytable/__init__.pyi @@ -0,0 +1,21 @@ +from .prettytable import ( + ALL as ALL, + DEFAULT as DEFAULT, + DOUBLE_BORDER as DOUBLE_BORDER, + FRAME as FRAME, + HEADER as HEADER, + MARKDOWN as MARKDOWN, + MSWORD_FRIENDLY as MSWORD_FRIENDLY, + NONE as NONE, + ORGMODE as ORGMODE, + PLAIN_COLUMNS as PLAIN_COLUMNS, + RANDOM as RANDOM, + SINGLE_BORDER as SINGLE_BORDER, + PrettyTable as PrettyTable, + TableHandler as TableHandler, + from_csv as from_csv, + from_db_cursor as from_db_cursor, + from_html as from_html, + from_html_one as from_html_one, + from_json as from_json, +) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/prettytable/prettytable/colortable.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/prettytable/prettytable/colortable.pyi new file mode 100644 index 00000000..39a60f59 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/prettytable/prettytable/colortable.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete +from collections.abc import Callable +from typing import ClassVar + +from .prettytable import PrettyTable + +RESET_CODE: str + +init: Callable[[], object] + +class Theme: + default_color: str + vertical_char: str + vertical_color: str + horizontal_char: str + horizontal_color: str + junction_char: str + junction_color: str + def __init__( + self, + default_color: str = ..., + vertical_char: str = ..., + vertical_color: str = ..., + horizontal_char: str = ..., + horizontal_color: str = ..., + junction_char: str = ..., + junction_color: str = ..., + ) -> None: ... + # The following method is broken in upstream code. + def format_code(s: str) -> str: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + +class Themes: + DEFAULT: ClassVar[Theme] + OCEAN: ClassVar[Theme] + +class ColorTable(PrettyTable): + def __init__(self, field_names: Incomplete | None = ..., **kwargs) -> None: ... + @property + def theme(self) -> Theme: ... + @theme.setter + def theme(self, value: Theme): ... + def update_theme(self) -> None: ... + def get_string(self, **kwargs) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/prettytable/prettytable/prettytable.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/prettytable/prettytable/prettytable.pyi new file mode 100644 index 00000000..1c148250 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/prettytable/prettytable/prettytable.pyi @@ -0,0 +1,241 @@ +from _typeshed import Incomplete +from html.parser import HTMLParser +from typing import Any + +FRAME: int +ALL: int +NONE: int +HEADER: int +DEFAULT: int +MSWORD_FRIENDLY: int +PLAIN_COLUMNS: int +MARKDOWN: int +ORGMODE: int +RANDOM: int +SINGLE_BORDER: int +DOUBLE_BORDER: int +BASE_ALIGN_VALUE: str + +class PrettyTable: + encoding: Any + def __init__(self, field_names: Incomplete | None = ..., **kwargs): ... + def __getattr__(self, name: str): ... + def __getitem__(self, index): ... + @property + def field_names(self): ... + @field_names.setter + def field_names(self, val) -> None: ... + @property + def align(self): ... + @align.setter + def align(self, val) -> None: ... + @property + def valign(self): ... + @valign.setter + def valign(self, val) -> None: ... + @property + def max_width(self): ... + @max_width.setter + def max_width(self, val) -> None: ... + @property + def min_width(self): ... + @min_width.setter + def min_width(self, val) -> None: ... + @property + def min_table_width(self): ... + @min_table_width.setter + def min_table_width(self, val) -> None: ... + @property + def max_table_width(self): ... + @max_table_width.setter + def max_table_width(self, val) -> None: ... + @property + def fields(self): ... + @fields.setter + def fields(self, val) -> None: ... + @property + def title(self): ... + @title.setter + def title(self, val) -> None: ... + @property + def start(self): ... + @start.setter + def start(self, val) -> None: ... + @property + def end(self): ... + @end.setter + def end(self, val) -> None: ... + @property + def sortby(self): ... + @sortby.setter + def sortby(self, val) -> None: ... + @property + def reversesort(self): ... + @reversesort.setter + def reversesort(self, val) -> None: ... + @property + def sort_key(self): ... + @sort_key.setter + def sort_key(self, val) -> None: ... + @property + def header(self): ... + @header.setter + def header(self, val) -> None: ... + @property + def header_style(self): ... + @header_style.setter + def header_style(self, val) -> None: ... + @property + def border(self): ... + @border.setter + def border(self, val) -> None: ... + @property + def hrules(self): ... + @hrules.setter + def hrules(self, val) -> None: ... + @property + def vrules(self): ... + @vrules.setter + def vrules(self, val) -> None: ... + @property + def int_format(self): ... + @int_format.setter + def int_format(self, val) -> None: ... + @property + def float_format(self): ... + @float_format.setter + def float_format(self, val) -> None: ... + @property + def padding_width(self): ... + @padding_width.setter + def padding_width(self, val) -> None: ... + @property + def left_padding_width(self): ... + @left_padding_width.setter + def left_padding_width(self, val) -> None: ... + @property + def right_padding_width(self): ... + @right_padding_width.setter + def right_padding_width(self, val) -> None: ... + @property + def vertical_char(self): ... + @vertical_char.setter + def vertical_char(self, val) -> None: ... + @property + def horizontal_char(self): ... + @horizontal_char.setter + def horizontal_char(self, val) -> None: ... + @property + def junction_char(self): ... + @junction_char.setter + def junction_char(self, val) -> None: ... + @property + def format(self): ... + @format.setter + def format(self, val) -> None: ... + @property + def print_empty(self): ... + @print_empty.setter + def print_empty(self, val) -> None: ... + @property + def attributes(self): ... + @attributes.setter + def attributes(self, val) -> None: ... + @property + def oldsortslice(self): ... + @oldsortslice.setter + def oldsortslice(self, val) -> None: ... + @property + def bottom_junction_char(self): ... + @bottom_junction_char.setter + def bottom_junction_char(self, val) -> None: ... + @property + def bottom_left_junction_char(self): ... + @bottom_left_junction_char.setter + def bottom_left_junction_char(self, val) -> None: ... + @property + def bottom_right_junction_char(self): ... + @bottom_right_junction_char.setter + def bottom_right_junction_char(self, val) -> None: ... + @property + def custom_format(self): ... + @custom_format.setter + def custom_format(self, val) -> None: ... + @property + def horizontal_align_char(self): ... + @horizontal_align_char.setter + def horizontal_align_char(self, val) -> None: ... + @property + def left_junction_char(self): ... + @left_junction_char.setter + def left_junction_char(self, val) -> None: ... + @property + def none_format(self): ... + @none_format.setter + def none_format(self, val) -> None: ... + @property + def preserve_internal_border(self): ... + @preserve_internal_border.setter + def preserve_internal_border(self, val) -> None: ... + @property + def right_junction_char(self): ... + @right_junction_char.setter + def right_junction_char(self, val) -> None: ... + @property + def top_junction_char(self): ... + @top_junction_char.setter + def top_junction_char(self, val) -> None: ... + @property + def top_left_junction_char(self): ... + @top_left_junction_char.setter + def top_left_junction_char(self, val) -> None: ... + @property + def top_right_junction_char(self): ... + @top_right_junction_char.setter + def top_right_junction_char(self, val) -> None: ... + @property + def xhtml(self) -> bool: ... + @xhtml.setter + def xhtml(self, val: bool) -> None: ... + @property + def rows(self) -> list[Incomplete]: ... + def add_autoindex(self, fieldname: str = ...): ... + def get_latex_string(self, **kwargs) -> str: ... + def set_style(self, style) -> None: ... + def add_rows(self, rows) -> None: ... + def add_row(self, row) -> None: ... + def del_row(self, row_index) -> None: ... + def add_column(self, fieldname, column, align: str = ..., valign: str = ...) -> None: ... + def del_column(self, fieldname) -> None: ... + def clear_rows(self) -> None: ... + def clear(self) -> None: ... + def copy(self): ... + def get_string(self, **kwargs) -> str: ... + def paginate(self, page_length: int = ..., line_break: str = ..., **kwargs): ... + def get_csv_string(self, **kwargs) -> str: ... + def get_json_string(self, **kwargs) -> str: ... + def get_html_string(self, **kwargs) -> str: ... + +def from_csv(fp, field_names: Incomplete | None = ..., **kwargs): ... +def from_db_cursor(cursor, **kwargs): ... +def from_json(json_string, **kwargs): ... + +class TableHandler(HTMLParser): + kwargs: Any + tables: Any + last_row: Any + rows: Any + max_row_width: int + active: Any + last_content: str + is_last_row_header: bool + colspan: int + def __init__(self, **kwargs) -> None: ... + def handle_starttag(self, tag, attrs) -> None: ... + def handle_endtag(self, tag) -> None: ... + def handle_data(self, data) -> None: ... + def generate_table(self, rows): ... + def make_fields_unique(self, fields) -> None: ... + +def from_html(html_code, **kwargs): ... +def from_html_one(html_code, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..9b135c70 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/@tests/stubtest_allowlist.txt @@ -0,0 +1,41 @@ +# Generated pb2 methods diverge for a variety of reasons. They are tested +# carefully in mypy-protobuf which internally runs stubtest. Skip those here. +google.protobuf\..*_pb2\..* + +# While Message and Descriptor are both defined with a null DESCRIPTOR, +# subclasses of Message and instances of EnumTypeWrapper require this value to +# be set, and since these type stubs are intended for use with protoc-generated +# python it's more accurate to make them non-nullable. +google.protobuf.internal.enum_type_wrapper.EnumTypeWrapper.DESCRIPTOR +google.protobuf.message.Message.DESCRIPTOR + +# Exists at runtime, but via a __getitem__/__setitem__ hack +# See https://github.com/protocolbuffers/protobuf/blob/3ea30d80847cd9561db570ae7f673afc15523545/python/google/protobuf/message.py#L67 +google.protobuf.message.Message.Extensions + +# These are typed as (self, **kwargs) at runtime as thin wrapper functions +# around the underlying true typing. We prefer the true typing +google.protobuf.internal.containers.BaseContainer.sort +google.protobuf.message.Message.SerializePartialToString +google.protobuf.message.Message.SerializeToString +google.protobuf.text_format.MessageToBytes + +# Stubbed as static method, but actually exists as a property that's +# a function. Typeshed's typing is more useful +google.protobuf.service.Service.GetDescriptor + +# These are deliberately omitted in the stub. +# The classes can't be constructed directly anyway, +# so the signatures of their constructors are somewhat irrelevant. +google.protobuf.descriptor.Descriptor.__new__ +google.protobuf.descriptor.ServiceDescriptor.__new__ + +# Set to None at runtime - which doesn't match the Sequence base class. +# It's a hack - just allow it. +google.protobuf.internal.containers.BaseContainer.__hash__ + +# Metaclass differs: +google.protobuf.descriptor.OneofDescriptor + +# Runtime does not have __iter__ (yet...): hack in spirit of https://github.com/python/typeshed/issues/7813 +google.protobuf.internal.well_known_types.ListValue.__iter__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/@tests/test_cases/check_struct.py b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/@tests/test_cases/check_struct.py new file mode 100644 index 00000000..d3679af4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/@tests/test_cases/check_struct.py @@ -0,0 +1,17 @@ +from __future__ import annotations + +from google.protobuf.struct_pb2 import ListValue, Struct + +list_value = ListValue() + +lst = list(list_value) # Ensure type checkers recognise that the class is iterable (doesn't have an `__iter__` method at runtime) + +list_value[0] = 42.42 +list_value[0] = "42" +list_value[0] = None +list_value[0] = True +list_value[0] = [42.42, "42", None, True, [42.42, "42", None, True], {"42": 42}] +list_value[0] = ListValue() +list_value[0] = Struct() + +list_element = list_value[0] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/METADATA.toml new file mode 100644 index 00000000..6d1b58c9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/METADATA.toml @@ -0,0 +1,5 @@ +version = "4.21.*" +extra_description = "Generated with aid from mypy-protobuf v3.4.0" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/__init__.pyi new file mode 100644 index 00000000..bda5b5a7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/__init__.pyi @@ -0,0 +1 @@ +__version__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/any_pb2.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/any_pb2.pyi new file mode 100644 index 00000000..398a9aba --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/any_pb2.pyi @@ -0,0 +1,176 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Protocol Buffers - Google's data interchange format +Copyright 2008 Google Inc. All rights reserved. +https://developers.google.com/protocol-buffers/ + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +""" +import builtins +import google.protobuf.descriptor +import google.protobuf.internal.well_known_types +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing_extensions.final +class Any(google.protobuf.message.Message, google.protobuf.internal.well_known_types.Any): + """`Any` contains an arbitrary serialized protocol buffer message along with a + URL that describes the type of the serialized message. + + Protobuf library provides support to pack/unpack Any values in the form + of utility functions or additional generated methods of the Any type. + + Example 1: Pack and unpack a message in C++. + + Foo foo = ...; + Any any; + any.PackFrom(foo); + ... + if (any.UnpackTo(&foo)) { + ... + } + + Example 2: Pack and unpack a message in Java. + + Foo foo = ...; + Any any = Any.pack(foo); + ... + if (any.is(Foo.class)) { + foo = any.unpack(Foo.class); + } + + Example 3: Pack and unpack a message in Python. + + foo = Foo(...) + any = Any() + any.Pack(foo) + ... + if any.Is(Foo.DESCRIPTOR): + any.Unpack(foo) + ... + + Example 4: Pack and unpack a message in Go + + foo := &pb.Foo{...} + any, err := anypb.New(foo) + if err != nil { + ... + } + ... + foo := &pb.Foo{} + if err := any.UnmarshalTo(foo); err != nil { + ... + } + + The pack methods provided by protobuf library will by default use + 'type.googleapis.com/full.type.name' as the type URL and the unpack + methods only use the fully qualified type name after the last '/' + in the type URL, for example "foo.bar.com/x/y.z" will yield type + name "y.z". + + + JSON + + The JSON representation of an `Any` value uses the regular + representation of the deserialized, embedded message, with an + additional field `@type` which contains the type URL. Example: + + package google.profile; + message Person { + string first_name = 1; + string last_name = 2; + } + + { + "@type": "type.googleapis.com/google.profile.Person", + "firstName": , + "lastName": + } + + If the embedded message type is well-known and has a custom JSON + representation, that representation will be embedded adding a field + `value` which holds the custom JSON in addition to the `@type` + field. Example (for message [google.protobuf.Duration][]): + + { + "@type": "type.googleapis.com/google.protobuf.Duration", + "value": "1.212s" + } + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TYPE_URL_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + type_url: builtins.str + """A URL/resource name that uniquely identifies the type of the serialized + protocol buffer message. This string must contain at least + one "/" character. The last segment of the URL's path must represent + the fully qualified name of the type (as in + `path/google.protobuf.Duration`). The name should be in a canonical form + (e.g., leading "." is not accepted). + + In practice, teams usually precompile into the binary all types that they + expect it to use in the context of Any. However, for URLs which use the + scheme `http`, `https`, or no scheme, one can optionally set up a type + server that maps type URLs to message definitions as follows: + + * If no scheme is provided, `https` is assumed. + * An HTTP GET on the URL must yield a [google.protobuf.Type][] + value in binary format, or produce an error. + * Applications are allowed to cache lookup results based on the + URL, or have them precompiled into a binary to avoid any + lookup. Therefore, binary compatibility needs to be preserved + on changes to types. (Use versioned type names to manage + breaking changes.) + + Note: this functionality is not currently available in the official + protobuf release, and it is not used for type URLs beginning with + type.googleapis.com. + + Schemes other than `http`, `https` (or the empty scheme) might be + used with implementation specific semantics. + """ + value: builtins.bytes + """Must be a valid serialized protocol buffer of the above specified type.""" + def __init__( + self, + *, + type_url: builtins.str | None = ..., + value: builtins.bytes | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["type_url", b"type_url", "value", b"value"]) -> None: ... + +global___Any = Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/api_pb2.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/api_pb2.pyi new file mode 100644 index 00000000..4cedb0da --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/api_pb2.pyi @@ -0,0 +1,272 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Protocol Buffers - Google's data interchange format +Copyright 2008 Google Inc. All rights reserved. +https://developers.google.com/protocol-buffers/ + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +""" +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.source_context_pb2 +import google.protobuf.type_pb2 +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing_extensions.final +class Api(google.protobuf.message.Message): + """Api is a light-weight descriptor for an API Interface. + + Interfaces are also described as "protocol buffer services" in some contexts, + such as by the "service" keyword in a .proto file, but they are different + from API Services, which represent a concrete implementation of an interface + as opposed to simply a description of methods and bindings. They are also + sometimes simply referred to as "APIs" in other contexts, such as the name of + this message itself. See https://cloud.google.com/apis/design/glossary for + detailed terminology. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + METHODS_FIELD_NUMBER: builtins.int + OPTIONS_FIELD_NUMBER: builtins.int + VERSION_FIELD_NUMBER: builtins.int + SOURCE_CONTEXT_FIELD_NUMBER: builtins.int + MIXINS_FIELD_NUMBER: builtins.int + SYNTAX_FIELD_NUMBER: builtins.int + name: builtins.str + """The fully qualified name of this interface, including package name + followed by the interface's simple name. + """ + @property + def methods(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Method]: + """The methods of this interface, in unspecified order.""" + @property + def options(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.type_pb2.Option]: + """Any metadata attached to the interface.""" + version: builtins.str + """A version string for this interface. If specified, must have the form + `major-version.minor-version`, as in `1.10`. If the minor version is + omitted, it defaults to zero. If the entire version field is empty, the + major version is derived from the package name, as outlined below. If the + field is not empty, the version in the package name will be verified to be + consistent with what is provided here. + + The versioning schema uses [semantic + versioning](http://semver.org) where the major version number + indicates a breaking change and the minor version an additive, + non-breaking change. Both version numbers are signals to users + what to expect from different versions, and should be carefully + chosen based on the product plan. + + The major version is also reflected in the package name of the + interface, which must end in `v`, as in + `google.feature.v1`. For major versions 0 and 1, the suffix can + be omitted. Zero major versions must only be used for + experimental, non-GA interfaces. + """ + @property + def source_context(self) -> google.protobuf.source_context_pb2.SourceContext: + """Source context for the protocol buffer service represented by this + message. + """ + @property + def mixins(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Mixin]: + """Included interfaces. See [Mixin][].""" + syntax: google.protobuf.type_pb2.Syntax.ValueType + """The source syntax of the service.""" + def __init__( + self, + *, + name: builtins.str | None = ..., + methods: collections.abc.Iterable[global___Method] | None = ..., + options: collections.abc.Iterable[google.protobuf.type_pb2.Option] | None = ..., + version: builtins.str | None = ..., + source_context: google.protobuf.source_context_pb2.SourceContext | None = ..., + mixins: collections.abc.Iterable[global___Mixin] | None = ..., + syntax: google.protobuf.type_pb2.Syntax.ValueType | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["source_context", b"source_context"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["methods", b"methods", "mixins", b"mixins", "name", b"name", "options", b"options", "source_context", b"source_context", "syntax", b"syntax", "version", b"version"]) -> None: ... + +global___Api = Api + +@typing_extensions.final +class Method(google.protobuf.message.Message): + """Method represents a method of an API interface.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + REQUEST_TYPE_URL_FIELD_NUMBER: builtins.int + REQUEST_STREAMING_FIELD_NUMBER: builtins.int + RESPONSE_TYPE_URL_FIELD_NUMBER: builtins.int + RESPONSE_STREAMING_FIELD_NUMBER: builtins.int + OPTIONS_FIELD_NUMBER: builtins.int + SYNTAX_FIELD_NUMBER: builtins.int + name: builtins.str + """The simple name of this method.""" + request_type_url: builtins.str + """A URL of the input message type.""" + request_streaming: builtins.bool + """If true, the request is streamed.""" + response_type_url: builtins.str + """The URL of the output message type.""" + response_streaming: builtins.bool + """If true, the response is streamed.""" + @property + def options(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.type_pb2.Option]: + """Any metadata attached to the method.""" + syntax: google.protobuf.type_pb2.Syntax.ValueType + """The source syntax of this method.""" + def __init__( + self, + *, + name: builtins.str | None = ..., + request_type_url: builtins.str | None = ..., + request_streaming: builtins.bool | None = ..., + response_type_url: builtins.str | None = ..., + response_streaming: builtins.bool | None = ..., + options: collections.abc.Iterable[google.protobuf.type_pb2.Option] | None = ..., + syntax: google.protobuf.type_pb2.Syntax.ValueType | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "options", b"options", "request_streaming", b"request_streaming", "request_type_url", b"request_type_url", "response_streaming", b"response_streaming", "response_type_url", b"response_type_url", "syntax", b"syntax"]) -> None: ... + +global___Method = Method + +@typing_extensions.final +class Mixin(google.protobuf.message.Message): + """Declares an API Interface to be included in this interface. The including + interface must redeclare all the methods from the included interface, but + documentation and options are inherited as follows: + + - If after comment and whitespace stripping, the documentation + string of the redeclared method is empty, it will be inherited + from the original method. + + - Each annotation belonging to the service config (http, + visibility) which is not set in the redeclared method will be + inherited. + + - If an http annotation is inherited, the path pattern will be + modified as follows. Any version prefix will be replaced by the + version of the including interface plus the [root][] path if + specified. + + Example of a simple mixin: + + package google.acl.v1; + service AccessControl { + // Get the underlying ACL object. + rpc GetAcl(GetAclRequest) returns (Acl) { + option (google.api.http).get = "/v1/{resource=**}:getAcl"; + } + } + + package google.storage.v2; + service Storage { + rpc GetAcl(GetAclRequest) returns (Acl); + + // Get a data record. + rpc GetData(GetDataRequest) returns (Data) { + option (google.api.http).get = "/v2/{resource=**}"; + } + } + + Example of a mixin configuration: + + apis: + - name: google.storage.v2.Storage + mixins: + - name: google.acl.v1.AccessControl + + The mixin construct implies that all methods in `AccessControl` are + also declared with same name and request/response types in + `Storage`. A documentation generator or annotation processor will + see the effective `Storage.GetAcl` method after inheriting + documentation and annotations as follows: + + service Storage { + // Get the underlying ACL object. + rpc GetAcl(GetAclRequest) returns (Acl) { + option (google.api.http).get = "/v2/{resource=**}:getAcl"; + } + ... + } + + Note how the version in the path pattern changed from `v1` to `v2`. + + If the `root` field in the mixin is specified, it should be a + relative path under which inherited HTTP paths are placed. Example: + + apis: + - name: google.storage.v2.Storage + mixins: + - name: google.acl.v1.AccessControl + root: acls + + This implies the following inherited HTTP annotation: + + service Storage { + // Get the underlying ACL object. + rpc GetAcl(GetAclRequest) returns (Acl) { + option (google.api.http).get = "/v2/acls/{resource=**}:getAcl"; + } + ... + } + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + ROOT_FIELD_NUMBER: builtins.int + name: builtins.str + """The fully qualified name of the interface which is included.""" + root: builtins.str + """If non-empty specifies a path under which inherited HTTP paths + are rooted. + """ + def __init__( + self, + *, + name: builtins.str | None = ..., + root: builtins.str | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "root", b"root"]) -> None: ... + +global___Mixin = Mixin diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/compiler/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/compiler/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/compiler/plugin_pb2.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/compiler/plugin_pb2.pyi new file mode 100644 index 00000000..2c54ee22 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/compiler/plugin_pb2.pyi @@ -0,0 +1,249 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Author: kenton@google.com (Kenton Varda) + +WARNING: The plugin interface is currently EXPERIMENTAL and is subject to + change. + +protoc (aka the Protocol Compiler) can be extended via plugins. A plugin is +just a program that reads a CodeGeneratorRequest from stdin and writes a +CodeGeneratorResponse to stdout. + +Plugins written using C++ can use google/protobuf/compiler/plugin.h instead +of dealing with the raw protocol defined here. + +A plugin executable needs only to be placed somewhere in the path. The +plugin should be named "protoc-gen-$NAME", and will then be used when the +flag "--${NAME}_out" is passed to protoc. +""" +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.descriptor_pb2 +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing_extensions.final +class Version(google.protobuf.message.Message): + """The version number of protocol compiler.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + MAJOR_FIELD_NUMBER: builtins.int + MINOR_FIELD_NUMBER: builtins.int + PATCH_FIELD_NUMBER: builtins.int + SUFFIX_FIELD_NUMBER: builtins.int + major: builtins.int + minor: builtins.int + patch: builtins.int + suffix: builtins.str + """A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should + be empty for mainline stable releases. + """ + def __init__( + self, + *, + major: builtins.int | None = ..., + minor: builtins.int | None = ..., + patch: builtins.int | None = ..., + suffix: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["major", b"major", "minor", b"minor", "patch", b"patch", "suffix", b"suffix"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["major", b"major", "minor", b"minor", "patch", b"patch", "suffix", b"suffix"]) -> None: ... + +global___Version = Version + +@typing_extensions.final +class CodeGeneratorRequest(google.protobuf.message.Message): + """An encoded CodeGeneratorRequest is written to the plugin's stdin.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FILE_TO_GENERATE_FIELD_NUMBER: builtins.int + PARAMETER_FIELD_NUMBER: builtins.int + PROTO_FILE_FIELD_NUMBER: builtins.int + COMPILER_VERSION_FIELD_NUMBER: builtins.int + @property + def file_to_generate(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """The .proto files that were explicitly listed on the command-line. The + code generator should generate code only for these files. Each file's + descriptor will be included in proto_file, below. + """ + parameter: builtins.str + """The generator parameter passed on the command-line.""" + @property + def proto_file(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.descriptor_pb2.FileDescriptorProto]: + """FileDescriptorProtos for all files in files_to_generate and everything + they import. The files will appear in topological order, so each file + appears before any file that imports it. + + protoc guarantees that all proto_files will be written after + the fields above, even though this is not technically guaranteed by the + protobuf wire format. This theoretically could allow a plugin to stream + in the FileDescriptorProtos and handle them one by one rather than read + the entire set into memory at once. However, as of this writing, this + is not similarly optimized on protoc's end -- it will store all fields in + memory at once before sending them to the plugin. + + Type names of fields and extensions in the FileDescriptorProto are always + fully qualified. + """ + @property + def compiler_version(self) -> global___Version: + """The version number of protocol compiler.""" + def __init__( + self, + *, + file_to_generate: collections.abc.Iterable[builtins.str] | None = ..., + parameter: builtins.str | None = ..., + proto_file: collections.abc.Iterable[google.protobuf.descriptor_pb2.FileDescriptorProto] | None = ..., + compiler_version: global___Version | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["compiler_version", b"compiler_version", "parameter", b"parameter"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["compiler_version", b"compiler_version", "file_to_generate", b"file_to_generate", "parameter", b"parameter", "proto_file", b"proto_file"]) -> None: ... + +global___CodeGeneratorRequest = CodeGeneratorRequest + +@typing_extensions.final +class CodeGeneratorResponse(google.protobuf.message.Message): + """The plugin writes an encoded CodeGeneratorResponse to stdout.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _Feature: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _FeatureEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[CodeGeneratorResponse._Feature.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + FEATURE_NONE: CodeGeneratorResponse._Feature.ValueType # 0 + FEATURE_PROTO3_OPTIONAL: CodeGeneratorResponse._Feature.ValueType # 1 + + class Feature(_Feature, metaclass=_FeatureEnumTypeWrapper): + """Sync with code_generator.h.""" + + FEATURE_NONE: CodeGeneratorResponse.Feature.ValueType # 0 + FEATURE_PROTO3_OPTIONAL: CodeGeneratorResponse.Feature.ValueType # 1 + + @typing_extensions.final + class File(google.protobuf.message.Message): + """Represents a single generated file.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + INSERTION_POINT_FIELD_NUMBER: builtins.int + CONTENT_FIELD_NUMBER: builtins.int + GENERATED_CODE_INFO_FIELD_NUMBER: builtins.int + name: builtins.str + """The file name, relative to the output directory. The name must not + contain "." or ".." components and must be relative, not be absolute (so, + the file cannot lie outside the output directory). "/" must be used as + the path separator, not "\\". + + If the name is omitted, the content will be appended to the previous + file. This allows the generator to break large files into small chunks, + and allows the generated text to be streamed back to protoc so that large + files need not reside completely in memory at one time. Note that as of + this writing protoc does not optimize for this -- it will read the entire + CodeGeneratorResponse before writing files to disk. + """ + insertion_point: builtins.str + """If non-empty, indicates that the named file should already exist, and the + content here is to be inserted into that file at a defined insertion + point. This feature allows a code generator to extend the output + produced by another code generator. The original generator may provide + insertion points by placing special annotations in the file that look + like: + @@protoc_insertion_point(NAME) + The annotation can have arbitrary text before and after it on the line, + which allows it to be placed in a comment. NAME should be replaced with + an identifier naming the point -- this is what other generators will use + as the insertion_point. Code inserted at this point will be placed + immediately above the line containing the insertion point (thus multiple + insertions to the same point will come out in the order they were added). + The double-@ is intended to make it unlikely that the generated code + could contain things that look like insertion points by accident. + + For example, the C++ code generator places the following line in the + .pb.h files that it generates: + // @@protoc_insertion_point(namespace_scope) + This line appears within the scope of the file's package namespace, but + outside of any particular class. Another plugin can then specify the + insertion_point "namespace_scope" to generate additional classes or + other declarations that should be placed in this scope. + + Note that if the line containing the insertion point begins with + whitespace, the same whitespace will be added to every line of the + inserted text. This is useful for languages like Python, where + indentation matters. In these languages, the insertion point comment + should be indented the same amount as any inserted code will need to be + in order to work correctly in that context. + + The code generator that generates the initial file and the one which + inserts into it must both run as part of a single invocation of protoc. + Code generators are executed in the order in which they appear on the + command line. + + If |insertion_point| is present, |name| must also be present. + """ + content: builtins.str + """The file contents.""" + @property + def generated_code_info(self) -> google.protobuf.descriptor_pb2.GeneratedCodeInfo: + """Information describing the file content being inserted. If an insertion + point is used, this information will be appropriately offset and inserted + into the code generation metadata for the generated files. + """ + def __init__( + self, + *, + name: builtins.str | None = ..., + insertion_point: builtins.str | None = ..., + content: builtins.str | None = ..., + generated_code_info: google.protobuf.descriptor_pb2.GeneratedCodeInfo | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["content", b"content", "generated_code_info", b"generated_code_info", "insertion_point", b"insertion_point", "name", b"name"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["content", b"content", "generated_code_info", b"generated_code_info", "insertion_point", b"insertion_point", "name", b"name"]) -> None: ... + + ERROR_FIELD_NUMBER: builtins.int + SUPPORTED_FEATURES_FIELD_NUMBER: builtins.int + FILE_FIELD_NUMBER: builtins.int + error: builtins.str + """Error message. If non-empty, code generation failed. The plugin process + should exit with status code zero even if it reports an error in this way. + + This should be used to indicate errors in .proto files which prevent the + code generator from generating correct code. Errors which indicate a + problem in protoc itself -- such as the input CodeGeneratorRequest being + unparseable -- should be reported by writing a message to stderr and + exiting with a non-zero status code. + """ + supported_features: builtins.int + """A bitmask of supported features that the code generator supports. + This is a bitwise "or" of values from the Feature enum. + """ + @property + def file(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CodeGeneratorResponse.File]: ... + def __init__( + self, + *, + error: builtins.str | None = ..., + supported_features: builtins.int | None = ..., + file: collections.abc.Iterable[global___CodeGeneratorResponse.File] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["error", b"error", "supported_features", b"supported_features"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["error", b"error", "file", b"file", "supported_features", b"supported_features"]) -> None: ... + +global___CodeGeneratorResponse = CodeGeneratorResponse diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/descriptor.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/descriptor.pyi new file mode 100644 index 00000000..2b4f36e3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/descriptor.pyi @@ -0,0 +1,351 @@ +from _typeshed import Incomplete +from typing import Any + +from .descriptor_pb2 import ( + EnumOptions, + EnumValueOptions, + FieldOptions, + FileOptions, + MessageOptions, + MethodOptions, + OneofOptions, + ServiceOptions, +) +from .message import Message + +class Error(Exception): ... +class TypeTransformationError(Error): ... + +class DescriptorMetaclass(type): + def __instancecheck__(self, obj: Any) -> bool: ... + +_internal_create_key: object + +class DescriptorBase(metaclass=DescriptorMetaclass): + has_options: Any + def __init__(self, options, serialized_options, options_class_name) -> None: ... + def GetOptions(self): ... + +class _NestedDescriptorBase(DescriptorBase): + name: Any + full_name: Any + file: Any + containing_type: Any + def __init__( + self, + options, + options_class_name, + name, + full_name, + file, + containing_type, + serialized_start=..., + serialized_end=..., + serialized_options=..., + ) -> None: ... + def CopyToProto(self, proto): ... + +class Descriptor(_NestedDescriptorBase): + fields: Any + fields_by_number: Any + fields_by_name: Any + nested_types: Any + nested_types_by_name: Any + enum_types: Any + enum_types_by_name: Any + enum_values_by_name: Any + extensions: Any + extensions_by_name: Any + is_extendable: Any + extension_ranges: Any + oneofs: Any + oneofs_by_name: Any + syntax: Any + def __init__( + self, + name: str, + full_name: str, + filename: Any, + containing_type: Descriptor | None, + fields: list[FieldDescriptor], + nested_types: list[FieldDescriptor], + enum_types: list[EnumDescriptor], + extensions: list[FieldDescriptor], + options: Incomplete | None = ..., + serialized_options: Incomplete | None = ..., + is_extendable: bool | None = ..., + extension_ranges: Incomplete | None = ..., + oneofs: list[OneofDescriptor] | None = ..., + file: FileDescriptor | None = ..., + serialized_start: Incomplete | None = ..., + serialized_end: Incomplete | None = ..., + syntax: str | None = ..., + create_key: Incomplete | None = ..., + ): ... + def EnumValueName(self, enum, value): ... + def CopyToProto(self, proto): ... + def GetOptions(self) -> MessageOptions: ... + +class FieldDescriptor(DescriptorBase): + TYPE_DOUBLE: Any + TYPE_FLOAT: Any + TYPE_INT64: Any + TYPE_UINT64: Any + TYPE_INT32: Any + TYPE_FIXED64: Any + TYPE_FIXED32: Any + TYPE_BOOL: Any + TYPE_STRING: Any + TYPE_GROUP: Any + TYPE_MESSAGE: Any + TYPE_BYTES: Any + TYPE_UINT32: Any + TYPE_ENUM: Any + TYPE_SFIXED32: Any + TYPE_SFIXED64: Any + TYPE_SINT32: Any + TYPE_SINT64: Any + MAX_TYPE: Any + CPPTYPE_INT32: Any + CPPTYPE_INT64: Any + CPPTYPE_UINT32: Any + CPPTYPE_UINT64: Any + CPPTYPE_DOUBLE: Any + CPPTYPE_FLOAT: Any + CPPTYPE_BOOL: Any + CPPTYPE_ENUM: Any + CPPTYPE_STRING: Any + CPPTYPE_MESSAGE: Any + MAX_CPPTYPE: Any + LABEL_OPTIONAL: Any + LABEL_REQUIRED: Any + LABEL_REPEATED: Any + MAX_LABEL: Any + MAX_FIELD_NUMBER: Any + FIRST_RESERVED_FIELD_NUMBER: Any + LAST_RESERVED_FIELD_NUMBER: Any + def __new__( + cls, + name, + full_name, + index, + number, + type, + cpp_type, + label, + default_value, + message_type, + enum_type, + containing_type, + is_extension, + extension_scope, + options=..., + serialized_options=..., + has_default_value=..., + containing_oneof=..., + json_name=..., + file=..., + create_key=..., + ): ... + name: Any + full_name: Any + index: Any + number: Any + type: Any + cpp_type: Any + label: Any + has_default_value: Any + default_value: Any + containing_type: Any + message_type: Any + enum_type: Any + is_extension: Any + extension_scope: Any + containing_oneof: Any + def __init__( + self, + name, + full_name, + index, + number, + type, + cpp_type, + label, + default_value, + message_type, + enum_type, + containing_type, + is_extension, + extension_scope, + options=..., + serialized_options=..., + has_default_value=..., + containing_oneof=..., + json_name=..., + file=..., + create_key=..., + ) -> None: ... + @staticmethod + def ProtoTypeToCppProtoType(proto_type): ... + def GetOptions(self) -> FieldOptions: ... + +class EnumDescriptor(_NestedDescriptorBase): + def __new__( + cls, + name, + full_name, + filename, + values, + containing_type=..., + options=..., + serialized_options=..., + file=..., + serialized_start=..., + serialized_end=..., + create_key=..., + ): ... + values: Any + values_by_name: Any + values_by_number: Any + def __init__( + self, + name, + full_name, + filename, + values, + containing_type=..., + options=..., + serialized_options=..., + file=..., + serialized_start=..., + serialized_end=..., + create_key=..., + ) -> None: ... + def CopyToProto(self, proto): ... + def GetOptions(self) -> EnumOptions: ... + +class EnumValueDescriptor(DescriptorBase): + def __new__(cls, name, index, number, type=..., options=..., serialized_options=..., create_key=...): ... + name: Any + index: Any + number: Any + type: Any + def __init__(self, name, index, number, type=..., options=..., serialized_options=..., create_key=...) -> None: ... + def GetOptions(self) -> EnumValueOptions: ... + +class OneofDescriptor: + def __new__(cls, name, full_name, index, containing_type, fields, options=..., serialized_options=..., create_key=...): ... + name: Any + full_name: Any + index: Any + containing_type: Any + fields: Any + def __init__( + self, name, full_name, index, containing_type, fields, options=..., serialized_options=..., create_key=... + ) -> None: ... + def GetOptions(self) -> OneofOptions: ... + +class ServiceDescriptor(_NestedDescriptorBase): + index: Any + methods: Any + methods_by_name: Any + def __init__( + self, + name: str, + full_name: str, + index: int, + methods: list[MethodDescriptor], + options: ServiceOptions | None = ..., + serialized_options: Incomplete | None = ..., + file: FileDescriptor | None = ..., + serialized_start: Incomplete | None = ..., + serialized_end: Incomplete | None = ..., + create_key: Incomplete | None = ..., + ): ... + def FindMethodByName(self, name): ... + def CopyToProto(self, proto): ... + def GetOptions(self) -> ServiceOptions: ... + +class MethodDescriptor(DescriptorBase): + def __new__( + cls, + name, + full_name, + index, + containing_service, + input_type, + output_type, + client_streaming=..., + server_streaming=..., + options=..., + serialized_options=..., + create_key=..., + ): ... + name: Any + full_name: Any + index: Any + containing_service: Any + input_type: Any + output_type: Any + client_streaming: bool + server_streaming: bool + def __init__( + self, + name, + full_name, + index, + containing_service, + input_type, + output_type, + client_streaming=..., + server_streaming=..., + options=..., + serialized_options=..., + create_key=..., + ) -> None: ... + def GetOptions(self) -> MethodOptions: ... + +class FileDescriptor(DescriptorBase): + def __new__( + cls, + name, + package, + options=..., + serialized_options=..., + serialized_pb=..., + dependencies=..., + public_dependencies=..., + syntax=..., + pool=..., + create_key=..., + ): ... + _options: Any + pool: Any + message_types_by_name: Any + name: Any + package: Any + syntax: Any + serialized_pb: Any + enum_types_by_name: Any + extensions_by_name: Any + services_by_name: Any + dependencies: Any + public_dependencies: Any + def __init__( + self, + name, + package, + options=..., + serialized_options=..., + serialized_pb=..., + dependencies=..., + public_dependencies=..., + syntax=..., + pool=..., + create_key=..., + ) -> None: ... + def CopyToProto(self, proto): ... + def GetOptions(self) -> FileOptions: ... + +def MakeDescriptor(desc_proto, package=..., build_file_if_cpp=..., syntax=...): ... +def _ParseOptions(message: Message, string: bytes) -> Message: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/descriptor_pb2.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/descriptor_pb2.pyi new file mode 100644 index 00000000..e37479b5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/descriptor_pb2.pyi @@ -0,0 +1,1521 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Author: kenton@google.com (Kenton Varda) + Based on original Protocol Buffers design by + Sanjay Ghemawat, Jeff Dean, and others. + +The messages in this file describe the definitions found in .proto files. +A valid .proto file can be translated directly to a FileDescriptorProto +without any other information (e.g. without reading its imports). +""" +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing_extensions.final +class FileDescriptorSet(google.protobuf.message.Message): + """The protocol compiler can output a FileDescriptorSet containing the .proto + files it parses. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FILE_FIELD_NUMBER: builtins.int + @property + def file(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___FileDescriptorProto]: ... + def __init__( + self, + *, + file: collections.abc.Iterable[global___FileDescriptorProto] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["file", b"file"]) -> None: ... + +global___FileDescriptorSet = FileDescriptorSet + +@typing_extensions.final +class FileDescriptorProto(google.protobuf.message.Message): + """Describes a complete .proto file.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PACKAGE_FIELD_NUMBER: builtins.int + DEPENDENCY_FIELD_NUMBER: builtins.int + PUBLIC_DEPENDENCY_FIELD_NUMBER: builtins.int + WEAK_DEPENDENCY_FIELD_NUMBER: builtins.int + MESSAGE_TYPE_FIELD_NUMBER: builtins.int + ENUM_TYPE_FIELD_NUMBER: builtins.int + SERVICE_FIELD_NUMBER: builtins.int + EXTENSION_FIELD_NUMBER: builtins.int + OPTIONS_FIELD_NUMBER: builtins.int + SOURCE_CODE_INFO_FIELD_NUMBER: builtins.int + SYNTAX_FIELD_NUMBER: builtins.int + name: builtins.str + """file name, relative to root of source tree""" + package: builtins.str + """e.g. "foo", "foo.bar", etc.""" + @property + def dependency(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """Names of files imported by this file.""" + @property + def public_dependency(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """Indexes of the public imported files in the dependency list above.""" + @property + def weak_dependency(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """Indexes of the weak imported files in the dependency list. + For Google-internal migration only. Do not use. + """ + @property + def message_type(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DescriptorProto]: + """All top-level definitions in this file.""" + @property + def enum_type(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EnumDescriptorProto]: ... + @property + def service(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ServiceDescriptorProto]: ... + @property + def extension(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___FieldDescriptorProto]: ... + @property + def options(self) -> global___FileOptions: ... + @property + def source_code_info(self) -> global___SourceCodeInfo: + """This field contains optional information about the original source code. + You may safely remove this entire field without harming runtime + functionality of the descriptors -- the information is needed only by + development tools. + """ + syntax: builtins.str + """The syntax of the proto file. + The supported values are "proto2" and "proto3". + """ + def __init__( + self, + *, + name: builtins.str | None = ..., + package: builtins.str | None = ..., + dependency: collections.abc.Iterable[builtins.str] | None = ..., + public_dependency: collections.abc.Iterable[builtins.int] | None = ..., + weak_dependency: collections.abc.Iterable[builtins.int] | None = ..., + message_type: collections.abc.Iterable[global___DescriptorProto] | None = ..., + enum_type: collections.abc.Iterable[global___EnumDescriptorProto] | None = ..., + service: collections.abc.Iterable[global___ServiceDescriptorProto] | None = ..., + extension: collections.abc.Iterable[global___FieldDescriptorProto] | None = ..., + options: global___FileOptions | None = ..., + source_code_info: global___SourceCodeInfo | None = ..., + syntax: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["name", b"name", "options", b"options", "package", b"package", "source_code_info", b"source_code_info", "syntax", b"syntax"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["dependency", b"dependency", "enum_type", b"enum_type", "extension", b"extension", "message_type", b"message_type", "name", b"name", "options", b"options", "package", b"package", "public_dependency", b"public_dependency", "service", b"service", "source_code_info", b"source_code_info", "syntax", b"syntax", "weak_dependency", b"weak_dependency"]) -> None: ... + +global___FileDescriptorProto = FileDescriptorProto + +@typing_extensions.final +class DescriptorProto(google.protobuf.message.Message): + """Describes a message type.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing_extensions.final + class ExtensionRange(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + START_FIELD_NUMBER: builtins.int + END_FIELD_NUMBER: builtins.int + OPTIONS_FIELD_NUMBER: builtins.int + start: builtins.int + """Inclusive.""" + end: builtins.int + """Exclusive.""" + @property + def options(self) -> global___ExtensionRangeOptions: ... + def __init__( + self, + *, + start: builtins.int | None = ..., + end: builtins.int | None = ..., + options: global___ExtensionRangeOptions | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["end", b"end", "options", b"options", "start", b"start"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["end", b"end", "options", b"options", "start", b"start"]) -> None: ... + + @typing_extensions.final + class ReservedRange(google.protobuf.message.Message): + """Range of reserved tag numbers. Reserved tag numbers may not be used by + fields or extension ranges in the same message. Reserved ranges may + not overlap. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + START_FIELD_NUMBER: builtins.int + END_FIELD_NUMBER: builtins.int + start: builtins.int + """Inclusive.""" + end: builtins.int + """Exclusive.""" + def __init__( + self, + *, + start: builtins.int | None = ..., + end: builtins.int | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["end", b"end", "start", b"start"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["end", b"end", "start", b"start"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + FIELD_FIELD_NUMBER: builtins.int + EXTENSION_FIELD_NUMBER: builtins.int + NESTED_TYPE_FIELD_NUMBER: builtins.int + ENUM_TYPE_FIELD_NUMBER: builtins.int + EXTENSION_RANGE_FIELD_NUMBER: builtins.int + ONEOF_DECL_FIELD_NUMBER: builtins.int + OPTIONS_FIELD_NUMBER: builtins.int + RESERVED_RANGE_FIELD_NUMBER: builtins.int + RESERVED_NAME_FIELD_NUMBER: builtins.int + name: builtins.str + @property + def field(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___FieldDescriptorProto]: ... + @property + def extension(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___FieldDescriptorProto]: ... + @property + def nested_type(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DescriptorProto]: ... + @property + def enum_type(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EnumDescriptorProto]: ... + @property + def extension_range(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DescriptorProto.ExtensionRange]: ... + @property + def oneof_decl(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___OneofDescriptorProto]: ... + @property + def options(self) -> global___MessageOptions: ... + @property + def reserved_range(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DescriptorProto.ReservedRange]: ... + @property + def reserved_name(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """Reserved field names, which may not be used by fields in the same message. + A given name may only be reserved once. + """ + def __init__( + self, + *, + name: builtins.str | None = ..., + field: collections.abc.Iterable[global___FieldDescriptorProto] | None = ..., + extension: collections.abc.Iterable[global___FieldDescriptorProto] | None = ..., + nested_type: collections.abc.Iterable[global___DescriptorProto] | None = ..., + enum_type: collections.abc.Iterable[global___EnumDescriptorProto] | None = ..., + extension_range: collections.abc.Iterable[global___DescriptorProto.ExtensionRange] | None = ..., + oneof_decl: collections.abc.Iterable[global___OneofDescriptorProto] | None = ..., + options: global___MessageOptions | None = ..., + reserved_range: collections.abc.Iterable[global___DescriptorProto.ReservedRange] | None = ..., + reserved_name: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["name", b"name", "options", b"options"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["enum_type", b"enum_type", "extension", b"extension", "extension_range", b"extension_range", "field", b"field", "name", b"name", "nested_type", b"nested_type", "oneof_decl", b"oneof_decl", "options", b"options", "reserved_name", b"reserved_name", "reserved_range", b"reserved_range"]) -> None: ... + +global___DescriptorProto = DescriptorProto + +@typing_extensions.final +class ExtensionRangeOptions(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int + @property + def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + """The parser stores options it doesn't recognize here. See above.""" + def __init__( + self, + *, + uninterpreted_option: collections.abc.Iterable[global___UninterpretedOption] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["uninterpreted_option", b"uninterpreted_option"]) -> None: ... + +global___ExtensionRangeOptions = ExtensionRangeOptions + +@typing_extensions.final +class FieldDescriptorProto(google.protobuf.message.Message): + """Describes a field within a message.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _Type: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _TypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldDescriptorProto._Type.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + TYPE_DOUBLE: FieldDescriptorProto._Type.ValueType # 1 + """0 is reserved for errors. + Order is weird for historical reasons. + """ + TYPE_FLOAT: FieldDescriptorProto._Type.ValueType # 2 + TYPE_INT64: FieldDescriptorProto._Type.ValueType # 3 + """Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + negative values are likely. + """ + TYPE_UINT64: FieldDescriptorProto._Type.ValueType # 4 + TYPE_INT32: FieldDescriptorProto._Type.ValueType # 5 + """Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + negative values are likely. + """ + TYPE_FIXED64: FieldDescriptorProto._Type.ValueType # 6 + TYPE_FIXED32: FieldDescriptorProto._Type.ValueType # 7 + TYPE_BOOL: FieldDescriptorProto._Type.ValueType # 8 + TYPE_STRING: FieldDescriptorProto._Type.ValueType # 9 + TYPE_GROUP: FieldDescriptorProto._Type.ValueType # 10 + """Tag-delimited aggregate. + Group type is deprecated and not supported in proto3. However, Proto3 + implementations should still be able to parse the group wire format and + treat group fields as unknown fields. + """ + TYPE_MESSAGE: FieldDescriptorProto._Type.ValueType # 11 + """Length-delimited aggregate.""" + TYPE_BYTES: FieldDescriptorProto._Type.ValueType # 12 + """New in version 2.""" + TYPE_UINT32: FieldDescriptorProto._Type.ValueType # 13 + TYPE_ENUM: FieldDescriptorProto._Type.ValueType # 14 + TYPE_SFIXED32: FieldDescriptorProto._Type.ValueType # 15 + TYPE_SFIXED64: FieldDescriptorProto._Type.ValueType # 16 + TYPE_SINT32: FieldDescriptorProto._Type.ValueType # 17 + """Uses ZigZag encoding.""" + TYPE_SINT64: FieldDescriptorProto._Type.ValueType # 18 + """Uses ZigZag encoding.""" + + class Type(_Type, metaclass=_TypeEnumTypeWrapper): ... + TYPE_DOUBLE: FieldDescriptorProto.Type.ValueType # 1 + """0 is reserved for errors. + Order is weird for historical reasons. + """ + TYPE_FLOAT: FieldDescriptorProto.Type.ValueType # 2 + TYPE_INT64: FieldDescriptorProto.Type.ValueType # 3 + """Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + negative values are likely. + """ + TYPE_UINT64: FieldDescriptorProto.Type.ValueType # 4 + TYPE_INT32: FieldDescriptorProto.Type.ValueType # 5 + """Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + negative values are likely. + """ + TYPE_FIXED64: FieldDescriptorProto.Type.ValueType # 6 + TYPE_FIXED32: FieldDescriptorProto.Type.ValueType # 7 + TYPE_BOOL: FieldDescriptorProto.Type.ValueType # 8 + TYPE_STRING: FieldDescriptorProto.Type.ValueType # 9 + TYPE_GROUP: FieldDescriptorProto.Type.ValueType # 10 + """Tag-delimited aggregate. + Group type is deprecated and not supported in proto3. However, Proto3 + implementations should still be able to parse the group wire format and + treat group fields as unknown fields. + """ + TYPE_MESSAGE: FieldDescriptorProto.Type.ValueType # 11 + """Length-delimited aggregate.""" + TYPE_BYTES: FieldDescriptorProto.Type.ValueType # 12 + """New in version 2.""" + TYPE_UINT32: FieldDescriptorProto.Type.ValueType # 13 + TYPE_ENUM: FieldDescriptorProto.Type.ValueType # 14 + TYPE_SFIXED32: FieldDescriptorProto.Type.ValueType # 15 + TYPE_SFIXED64: FieldDescriptorProto.Type.ValueType # 16 + TYPE_SINT32: FieldDescriptorProto.Type.ValueType # 17 + """Uses ZigZag encoding.""" + TYPE_SINT64: FieldDescriptorProto.Type.ValueType # 18 + """Uses ZigZag encoding.""" + + class _Label: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _LabelEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldDescriptorProto._Label.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + LABEL_OPTIONAL: FieldDescriptorProto._Label.ValueType # 1 + """0 is reserved for errors""" + LABEL_REQUIRED: FieldDescriptorProto._Label.ValueType # 2 + LABEL_REPEATED: FieldDescriptorProto._Label.ValueType # 3 + + class Label(_Label, metaclass=_LabelEnumTypeWrapper): ... + LABEL_OPTIONAL: FieldDescriptorProto.Label.ValueType # 1 + """0 is reserved for errors""" + LABEL_REQUIRED: FieldDescriptorProto.Label.ValueType # 2 + LABEL_REPEATED: FieldDescriptorProto.Label.ValueType # 3 + + NAME_FIELD_NUMBER: builtins.int + NUMBER_FIELD_NUMBER: builtins.int + LABEL_FIELD_NUMBER: builtins.int + TYPE_FIELD_NUMBER: builtins.int + TYPE_NAME_FIELD_NUMBER: builtins.int + EXTENDEE_FIELD_NUMBER: builtins.int + DEFAULT_VALUE_FIELD_NUMBER: builtins.int + ONEOF_INDEX_FIELD_NUMBER: builtins.int + JSON_NAME_FIELD_NUMBER: builtins.int + OPTIONS_FIELD_NUMBER: builtins.int + PROTO3_OPTIONAL_FIELD_NUMBER: builtins.int + name: builtins.str + number: builtins.int + label: global___FieldDescriptorProto.Label.ValueType + type: global___FieldDescriptorProto.Type.ValueType + """If type_name is set, this need not be set. If both this and type_name + are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + """ + type_name: builtins.str + """For message and enum types, this is the name of the type. If the name + starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + rules are used to find the type (i.e. first the nested types within this + message are searched, then within the parent, on up to the root + namespace). + """ + extendee: builtins.str + """For extensions, this is the name of the type being extended. It is + resolved in the same manner as type_name. + """ + default_value: builtins.str + """For numeric types, contains the original text representation of the value. + For booleans, "true" or "false". + For strings, contains the default text contents (not escaped in any way). + For bytes, contains the C escaped value. All bytes >= 128 are escaped. + """ + oneof_index: builtins.int + """If set, gives the index of a oneof in the containing type's oneof_decl + list. This field is a member of that oneof. + """ + json_name: builtins.str + """JSON name of this field. The value is set by protocol compiler. If the + user has set a "json_name" option on this field, that option's value + will be used. Otherwise, it's deduced from the field's name by converting + it to camelCase. + """ + @property + def options(self) -> global___FieldOptions: ... + proto3_optional: builtins.bool + """If true, this is a proto3 "optional". When a proto3 field is optional, it + tracks presence regardless of field type. + + When proto3_optional is true, this field must be belong to a oneof to + signal to old proto3 clients that presence is tracked for this field. This + oneof is known as a "synthetic" oneof, and this field must be its sole + member (each proto3 optional field gets its own synthetic oneof). Synthetic + oneofs exist in the descriptor only, and do not generate any API. Synthetic + oneofs must be ordered after all "real" oneofs. + + For message fields, proto3_optional doesn't create any semantic change, + since non-repeated message fields always track presence. However it still + indicates the semantic detail of whether the user wrote "optional" or not. + This can be useful for round-tripping the .proto file. For consistency we + give message fields a synthetic oneof also, even though it is not required + to track presence. This is especially important because the parser can't + tell if a field is a message or an enum, so it must always create a + synthetic oneof. + + Proto2 optional fields do not set this flag, because they already indicate + optional with `LABEL_OPTIONAL`. + """ + def __init__( + self, + *, + name: builtins.str | None = ..., + number: builtins.int | None = ..., + label: global___FieldDescriptorProto.Label.ValueType | None = ..., + type: global___FieldDescriptorProto.Type.ValueType | None = ..., + type_name: builtins.str | None = ..., + extendee: builtins.str | None = ..., + default_value: builtins.str | None = ..., + oneof_index: builtins.int | None = ..., + json_name: builtins.str | None = ..., + options: global___FieldOptions | None = ..., + proto3_optional: builtins.bool | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["default_value", b"default_value", "extendee", b"extendee", "json_name", b"json_name", "label", b"label", "name", b"name", "number", b"number", "oneof_index", b"oneof_index", "options", b"options", "proto3_optional", b"proto3_optional", "type", b"type", "type_name", b"type_name"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["default_value", b"default_value", "extendee", b"extendee", "json_name", b"json_name", "label", b"label", "name", b"name", "number", b"number", "oneof_index", b"oneof_index", "options", b"options", "proto3_optional", b"proto3_optional", "type", b"type", "type_name", b"type_name"]) -> None: ... + +global___FieldDescriptorProto = FieldDescriptorProto + +@typing_extensions.final +class OneofDescriptorProto(google.protobuf.message.Message): + """Describes a oneof.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + OPTIONS_FIELD_NUMBER: builtins.int + name: builtins.str + @property + def options(self) -> global___OneofOptions: ... + def __init__( + self, + *, + name: builtins.str | None = ..., + options: global___OneofOptions | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["name", b"name", "options", b"options"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "options", b"options"]) -> None: ... + +global___OneofDescriptorProto = OneofDescriptorProto + +@typing_extensions.final +class EnumDescriptorProto(google.protobuf.message.Message): + """Describes an enum type.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing_extensions.final + class EnumReservedRange(google.protobuf.message.Message): + """Range of reserved numeric values. Reserved values may not be used by + entries in the same enum. Reserved ranges may not overlap. + + Note that this is distinct from DescriptorProto.ReservedRange in that it + is inclusive such that it can appropriately represent the entire int32 + domain. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + START_FIELD_NUMBER: builtins.int + END_FIELD_NUMBER: builtins.int + start: builtins.int + """Inclusive.""" + end: builtins.int + """Inclusive.""" + def __init__( + self, + *, + start: builtins.int | None = ..., + end: builtins.int | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["end", b"end", "start", b"start"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["end", b"end", "start", b"start"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + OPTIONS_FIELD_NUMBER: builtins.int + RESERVED_RANGE_FIELD_NUMBER: builtins.int + RESERVED_NAME_FIELD_NUMBER: builtins.int + name: builtins.str + @property + def value(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EnumValueDescriptorProto]: ... + @property + def options(self) -> global___EnumOptions: ... + @property + def reserved_range(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EnumDescriptorProto.EnumReservedRange]: + """Range of reserved numeric values. Reserved numeric values may not be used + by enum values in the same enum declaration. Reserved ranges may not + overlap. + """ + @property + def reserved_name(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """Reserved enum value names, which may not be reused. A given name may only + be reserved once. + """ + def __init__( + self, + *, + name: builtins.str | None = ..., + value: collections.abc.Iterable[global___EnumValueDescriptorProto] | None = ..., + options: global___EnumOptions | None = ..., + reserved_range: collections.abc.Iterable[global___EnumDescriptorProto.EnumReservedRange] | None = ..., + reserved_name: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["name", b"name", "options", b"options"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "options", b"options", "reserved_name", b"reserved_name", "reserved_range", b"reserved_range", "value", b"value"]) -> None: ... + +global___EnumDescriptorProto = EnumDescriptorProto + +@typing_extensions.final +class EnumValueDescriptorProto(google.protobuf.message.Message): + """Describes a value within an enum.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + NUMBER_FIELD_NUMBER: builtins.int + OPTIONS_FIELD_NUMBER: builtins.int + name: builtins.str + number: builtins.int + @property + def options(self) -> global___EnumValueOptions: ... + def __init__( + self, + *, + name: builtins.str | None = ..., + number: builtins.int | None = ..., + options: global___EnumValueOptions | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["name", b"name", "number", b"number", "options", b"options"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "number", b"number", "options", b"options"]) -> None: ... + +global___EnumValueDescriptorProto = EnumValueDescriptorProto + +@typing_extensions.final +class ServiceDescriptorProto(google.protobuf.message.Message): + """Describes a service.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + METHOD_FIELD_NUMBER: builtins.int + OPTIONS_FIELD_NUMBER: builtins.int + name: builtins.str + @property + def method(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MethodDescriptorProto]: ... + @property + def options(self) -> global___ServiceOptions: ... + def __init__( + self, + *, + name: builtins.str | None = ..., + method: collections.abc.Iterable[global___MethodDescriptorProto] | None = ..., + options: global___ServiceOptions | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["name", b"name", "options", b"options"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["method", b"method", "name", b"name", "options", b"options"]) -> None: ... + +global___ServiceDescriptorProto = ServiceDescriptorProto + +@typing_extensions.final +class MethodDescriptorProto(google.protobuf.message.Message): + """Describes a method of a service.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + INPUT_TYPE_FIELD_NUMBER: builtins.int + OUTPUT_TYPE_FIELD_NUMBER: builtins.int + OPTIONS_FIELD_NUMBER: builtins.int + CLIENT_STREAMING_FIELD_NUMBER: builtins.int + SERVER_STREAMING_FIELD_NUMBER: builtins.int + name: builtins.str + input_type: builtins.str + """Input and output type names. These are resolved in the same way as + FieldDescriptorProto.type_name, but must refer to a message type. + """ + output_type: builtins.str + @property + def options(self) -> global___MethodOptions: ... + client_streaming: builtins.bool + """Identifies if client streams multiple client messages""" + server_streaming: builtins.bool + """Identifies if server streams multiple server messages""" + def __init__( + self, + *, + name: builtins.str | None = ..., + input_type: builtins.str | None = ..., + output_type: builtins.str | None = ..., + options: global___MethodOptions | None = ..., + client_streaming: builtins.bool | None = ..., + server_streaming: builtins.bool | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["client_streaming", b"client_streaming", "input_type", b"input_type", "name", b"name", "options", b"options", "output_type", b"output_type", "server_streaming", b"server_streaming"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["client_streaming", b"client_streaming", "input_type", b"input_type", "name", b"name", "options", b"options", "output_type", b"output_type", "server_streaming", b"server_streaming"]) -> None: ... + +global___MethodDescriptorProto = MethodDescriptorProto + +@typing_extensions.final +class FileOptions(google.protobuf.message.Message): + """Each of the definitions above may have "options" attached. These are + just annotations which may cause code to be generated slightly differently + or may contain hints for code that manipulates protocol messages. + + Clients may define custom options as extensions of the *Options messages. + These extensions may not yet be known at parsing time, so the parser cannot + store the values in them. Instead it stores them in a field in the *Options + message called uninterpreted_option. This field must have the same name + across all *Options messages. We then use this field to populate the + extensions when we build a descriptor, at which point all protos have been + parsed and so all extensions are known. + + Extension numbers for custom options may be chosen as follows: + * For options which will only be used within a single application or + organization, or for experimental options, use field numbers 50000 + through 99999. It is up to you to ensure that you do not use the + same number for multiple options. + * For options which will be published and used publicly by multiple + independent entities, e-mail protobuf-global-extension-registry@google.com + to reserve extension numbers. Simply provide your project name (e.g. + Objective-C plugin) and your project website (if available) -- there's no + need to explain how you intend to use them. Usually you only need one + extension number. You can declare multiple options with only one extension + number by putting them in a sub-message. See the Custom Options section of + the docs for examples: + https://developers.google.com/protocol-buffers/docs/proto#options + If this turns out to be popular, a web service will be set up + to automatically assign option numbers. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _OptimizeMode: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _OptimizeModeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FileOptions._OptimizeMode.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + SPEED: FileOptions._OptimizeMode.ValueType # 1 + """Generate complete code for parsing, serialization,""" + CODE_SIZE: FileOptions._OptimizeMode.ValueType # 2 + """etc. + Use ReflectionOps to implement these methods. + """ + LITE_RUNTIME: FileOptions._OptimizeMode.ValueType # 3 + """Generate code using MessageLite and the lite runtime.""" + + class OptimizeMode(_OptimizeMode, metaclass=_OptimizeModeEnumTypeWrapper): + """Generated classes can be optimized for speed or code size.""" + + SPEED: FileOptions.OptimizeMode.ValueType # 1 + """Generate complete code for parsing, serialization,""" + CODE_SIZE: FileOptions.OptimizeMode.ValueType # 2 + """etc. + Use ReflectionOps to implement these methods. + """ + LITE_RUNTIME: FileOptions.OptimizeMode.ValueType # 3 + """Generate code using MessageLite and the lite runtime.""" + + JAVA_PACKAGE_FIELD_NUMBER: builtins.int + JAVA_OUTER_CLASSNAME_FIELD_NUMBER: builtins.int + JAVA_MULTIPLE_FILES_FIELD_NUMBER: builtins.int + JAVA_GENERATE_EQUALS_AND_HASH_FIELD_NUMBER: builtins.int + JAVA_STRING_CHECK_UTF8_FIELD_NUMBER: builtins.int + OPTIMIZE_FOR_FIELD_NUMBER: builtins.int + GO_PACKAGE_FIELD_NUMBER: builtins.int + CC_GENERIC_SERVICES_FIELD_NUMBER: builtins.int + JAVA_GENERIC_SERVICES_FIELD_NUMBER: builtins.int + PY_GENERIC_SERVICES_FIELD_NUMBER: builtins.int + PHP_GENERIC_SERVICES_FIELD_NUMBER: builtins.int + DEPRECATED_FIELD_NUMBER: builtins.int + CC_ENABLE_ARENAS_FIELD_NUMBER: builtins.int + OBJC_CLASS_PREFIX_FIELD_NUMBER: builtins.int + CSHARP_NAMESPACE_FIELD_NUMBER: builtins.int + SWIFT_PREFIX_FIELD_NUMBER: builtins.int + PHP_CLASS_PREFIX_FIELD_NUMBER: builtins.int + PHP_NAMESPACE_FIELD_NUMBER: builtins.int + PHP_METADATA_NAMESPACE_FIELD_NUMBER: builtins.int + RUBY_PACKAGE_FIELD_NUMBER: builtins.int + UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int + java_package: builtins.str + """Sets the Java package where classes generated from this .proto will be + placed. By default, the proto package is used, but this is often + inappropriate because proto packages do not normally start with backwards + domain names. + """ + java_outer_classname: builtins.str + """Controls the name of the wrapper Java class generated for the .proto file. + That class will always contain the .proto file's getDescriptor() method as + well as any top-level extensions defined in the .proto file. + If java_multiple_files is disabled, then all the other classes from the + .proto file will be nested inside the single wrapper outer class. + """ + java_multiple_files: builtins.bool + """If enabled, then the Java code generator will generate a separate .java + file for each top-level message, enum, and service defined in the .proto + file. Thus, these types will *not* be nested inside the wrapper class + named by java_outer_classname. However, the wrapper class will still be + generated to contain the file's getDescriptor() method as well as any + top-level extensions defined in the file. + """ + java_generate_equals_and_hash: builtins.bool + """This option does nothing.""" + java_string_check_utf8: builtins.bool + """If set true, then the Java2 code generator will generate code that + throws an exception whenever an attempt is made to assign a non-UTF-8 + byte sequence to a string field. + Message reflection will do the same. + However, an extension field still accepts non-UTF-8 byte sequences. + This option has no effect on when used with the lite runtime. + """ + optimize_for: global___FileOptions.OptimizeMode.ValueType + go_package: builtins.str + """Sets the Go package where structs generated from this .proto will be + placed. If omitted, the Go package will be derived from the following: + - The basename of the package import path, if provided. + - Otherwise, the package statement in the .proto file, if present. + - Otherwise, the basename of the .proto file, without extension. + """ + cc_generic_services: builtins.bool + """Should generic services be generated in each language? "Generic" services + are not specific to any particular RPC system. They are generated by the + main code generators in each language (without additional plugins). + Generic services were the only kind of service generation supported by + early versions of google.protobuf. + + Generic services are now considered deprecated in favor of using plugins + that generate code specific to your particular RPC system. Therefore, + these default to false. Old code which depends on generic services should + explicitly set them to true. + """ + java_generic_services: builtins.bool + py_generic_services: builtins.bool + php_generic_services: builtins.bool + deprecated: builtins.bool + """Is this file deprecated? + Depending on the target platform, this can emit Deprecated annotations + for everything in the file, or it will be completely ignored; in the very + least, this is a formalization for deprecating files. + """ + cc_enable_arenas: builtins.bool + """Enables the use of arenas for the proto messages in this file. This applies + only to generated classes for C++. + """ + objc_class_prefix: builtins.str + """Sets the objective c class prefix which is prepended to all objective c + generated classes from this .proto. There is no default. + """ + csharp_namespace: builtins.str + """Namespace for generated classes; defaults to the package.""" + swift_prefix: builtins.str + """By default Swift generators will take the proto package and CamelCase it + replacing '.' with underscore and use that to prefix the types/symbols + defined. When this options is provided, they will use this value instead + to prefix the types/symbols defined. + """ + php_class_prefix: builtins.str + """Sets the php class prefix which is prepended to all php generated classes + from this .proto. Default is empty. + """ + php_namespace: builtins.str + """Use this option to change the namespace of php generated classes. Default + is empty. When this option is empty, the package name will be used for + determining the namespace. + """ + php_metadata_namespace: builtins.str + """Use this option to change the namespace of php generated metadata classes. + Default is empty. When this option is empty, the proto file name will be + used for determining the namespace. + """ + ruby_package: builtins.str + """Use this option to change the package of ruby generated classes. Default + is empty. When this option is not set, the package name will be used for + determining the ruby package. + """ + @property + def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + """The parser stores options it doesn't recognize here. + See the documentation for the "Options" section above. + """ + def __init__( + self, + *, + java_package: builtins.str | None = ..., + java_outer_classname: builtins.str | None = ..., + java_multiple_files: builtins.bool | None = ..., + java_generate_equals_and_hash: builtins.bool | None = ..., + java_string_check_utf8: builtins.bool | None = ..., + optimize_for: global___FileOptions.OptimizeMode.ValueType | None = ..., + go_package: builtins.str | None = ..., + cc_generic_services: builtins.bool | None = ..., + java_generic_services: builtins.bool | None = ..., + py_generic_services: builtins.bool | None = ..., + php_generic_services: builtins.bool | None = ..., + deprecated: builtins.bool | None = ..., + cc_enable_arenas: builtins.bool | None = ..., + objc_class_prefix: builtins.str | None = ..., + csharp_namespace: builtins.str | None = ..., + swift_prefix: builtins.str | None = ..., + php_class_prefix: builtins.str | None = ..., + php_namespace: builtins.str | None = ..., + php_metadata_namespace: builtins.str | None = ..., + ruby_package: builtins.str | None = ..., + uninterpreted_option: collections.abc.Iterable[global___UninterpretedOption] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["cc_enable_arenas", b"cc_enable_arenas", "cc_generic_services", b"cc_generic_services", "csharp_namespace", b"csharp_namespace", "deprecated", b"deprecated", "go_package", b"go_package", "java_generate_equals_and_hash", b"java_generate_equals_and_hash", "java_generic_services", b"java_generic_services", "java_multiple_files", b"java_multiple_files", "java_outer_classname", b"java_outer_classname", "java_package", b"java_package", "java_string_check_utf8", b"java_string_check_utf8", "objc_class_prefix", b"objc_class_prefix", "optimize_for", b"optimize_for", "php_class_prefix", b"php_class_prefix", "php_generic_services", b"php_generic_services", "php_metadata_namespace", b"php_metadata_namespace", "php_namespace", b"php_namespace", "py_generic_services", b"py_generic_services", "ruby_package", b"ruby_package", "swift_prefix", b"swift_prefix"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["cc_enable_arenas", b"cc_enable_arenas", "cc_generic_services", b"cc_generic_services", "csharp_namespace", b"csharp_namespace", "deprecated", b"deprecated", "go_package", b"go_package", "java_generate_equals_and_hash", b"java_generate_equals_and_hash", "java_generic_services", b"java_generic_services", "java_multiple_files", b"java_multiple_files", "java_outer_classname", b"java_outer_classname", "java_package", b"java_package", "java_string_check_utf8", b"java_string_check_utf8", "objc_class_prefix", b"objc_class_prefix", "optimize_for", b"optimize_for", "php_class_prefix", b"php_class_prefix", "php_generic_services", b"php_generic_services", "php_metadata_namespace", b"php_metadata_namespace", "php_namespace", b"php_namespace", "py_generic_services", b"py_generic_services", "ruby_package", b"ruby_package", "swift_prefix", b"swift_prefix", "uninterpreted_option", b"uninterpreted_option"]) -> None: ... + +global___FileOptions = FileOptions + +@typing_extensions.final +class MessageOptions(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + MESSAGE_SET_WIRE_FORMAT_FIELD_NUMBER: builtins.int + NO_STANDARD_DESCRIPTOR_ACCESSOR_FIELD_NUMBER: builtins.int + DEPRECATED_FIELD_NUMBER: builtins.int + MAP_ENTRY_FIELD_NUMBER: builtins.int + UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int + message_set_wire_format: builtins.bool + """Set true to use the old proto1 MessageSet wire format for extensions. + This is provided for backwards-compatibility with the MessageSet wire + format. You should not use this for any other reason: It's less + efficient, has fewer features, and is more complicated. + + The message must be defined exactly as follows: + message Foo { + option message_set_wire_format = true; + extensions 4 to max; + } + Note that the message cannot have any defined fields; MessageSets only + have extensions. + + All extensions of your type must be singular messages; e.g. they cannot + be int32s, enums, or repeated messages. + + Because this is an option, the above two restrictions are not enforced by + the protocol compiler. + """ + no_standard_descriptor_accessor: builtins.bool + """Disables the generation of the standard "descriptor()" accessor, which can + conflict with a field of the same name. This is meant to make migration + from proto1 easier; new code should avoid fields named "descriptor". + """ + deprecated: builtins.bool + """Is this message deprecated? + Depending on the target platform, this can emit Deprecated annotations + for the message, or it will be completely ignored; in the very least, + this is a formalization for deprecating messages. + """ + map_entry: builtins.bool + """Whether the message is an automatically generated map entry type for the + maps field. + + For maps fields: + map map_field = 1; + The parsed descriptor looks like: + message MapFieldEntry { + option map_entry = true; + optional KeyType key = 1; + optional ValueType value = 2; + } + repeated MapFieldEntry map_field = 1; + + Implementations may choose not to generate the map_entry=true message, but + use a native map in the target language to hold the keys and values. + The reflection APIs in such implementations still need to work as + if the field is a repeated message field. + + NOTE: Do not set the option in .proto files. Always use the maps syntax + instead. The option should only be implicitly set by the proto compiler + parser. + """ + @property + def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + """The parser stores options it doesn't recognize here. See above.""" + def __init__( + self, + *, + message_set_wire_format: builtins.bool | None = ..., + no_standard_descriptor_accessor: builtins.bool | None = ..., + deprecated: builtins.bool | None = ..., + map_entry: builtins.bool | None = ..., + uninterpreted_option: collections.abc.Iterable[global___UninterpretedOption] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["deprecated", b"deprecated", "map_entry", b"map_entry", "message_set_wire_format", b"message_set_wire_format", "no_standard_descriptor_accessor", b"no_standard_descriptor_accessor"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["deprecated", b"deprecated", "map_entry", b"map_entry", "message_set_wire_format", b"message_set_wire_format", "no_standard_descriptor_accessor", b"no_standard_descriptor_accessor", "uninterpreted_option", b"uninterpreted_option"]) -> None: ... + +global___MessageOptions = MessageOptions + +@typing_extensions.final +class FieldOptions(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _CType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _CTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldOptions._CType.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + STRING: FieldOptions._CType.ValueType # 0 + """Default mode.""" + CORD: FieldOptions._CType.ValueType # 1 + STRING_PIECE: FieldOptions._CType.ValueType # 2 + + class CType(_CType, metaclass=_CTypeEnumTypeWrapper): ... + STRING: FieldOptions.CType.ValueType # 0 + """Default mode.""" + CORD: FieldOptions.CType.ValueType # 1 + STRING_PIECE: FieldOptions.CType.ValueType # 2 + + class _JSType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _JSTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldOptions._JSType.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + JS_NORMAL: FieldOptions._JSType.ValueType # 0 + """Use the default type.""" + JS_STRING: FieldOptions._JSType.ValueType # 1 + """Use JavaScript strings.""" + JS_NUMBER: FieldOptions._JSType.ValueType # 2 + """Use JavaScript numbers.""" + + class JSType(_JSType, metaclass=_JSTypeEnumTypeWrapper): ... + JS_NORMAL: FieldOptions.JSType.ValueType # 0 + """Use the default type.""" + JS_STRING: FieldOptions.JSType.ValueType # 1 + """Use JavaScript strings.""" + JS_NUMBER: FieldOptions.JSType.ValueType # 2 + """Use JavaScript numbers.""" + + CTYPE_FIELD_NUMBER: builtins.int + PACKED_FIELD_NUMBER: builtins.int + JSTYPE_FIELD_NUMBER: builtins.int + LAZY_FIELD_NUMBER: builtins.int + UNVERIFIED_LAZY_FIELD_NUMBER: builtins.int + DEPRECATED_FIELD_NUMBER: builtins.int + WEAK_FIELD_NUMBER: builtins.int + UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int + ctype: global___FieldOptions.CType.ValueType + """The ctype option instructs the C++ code generator to use a different + representation of the field than it normally would. See the specific + options below. This option is not yet implemented in the open source + release -- sorry, we'll try to include it in a future version! + """ + packed: builtins.bool + """The packed option can be enabled for repeated primitive fields to enable + a more efficient representation on the wire. Rather than repeatedly + writing the tag and type for each element, the entire array is encoded as + a single length-delimited blob. In proto3, only explicit setting it to + false will avoid using packed encoding. + """ + jstype: global___FieldOptions.JSType.ValueType + """The jstype option determines the JavaScript type used for values of the + field. The option is permitted only for 64 bit integral and fixed types + (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + is represented as JavaScript string, which avoids loss of precision that + can happen when a large value is converted to a floating point JavaScript. + Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + use the JavaScript "number" type. The behavior of the default option + JS_NORMAL is implementation dependent. + + This option is an enum to permit additional types to be added, e.g. + goog.math.Integer. + """ + lazy: builtins.bool + """Should this field be parsed lazily? Lazy applies only to message-type + fields. It means that when the outer message is initially parsed, the + inner message's contents will not be parsed but instead stored in encoded + form. The inner message will actually be parsed when it is first accessed. + + This is only a hint. Implementations are free to choose whether to use + eager or lazy parsing regardless of the value of this option. However, + setting this option true suggests that the protocol author believes that + using lazy parsing on this field is worth the additional bookkeeping + overhead typically needed to implement it. + + This option does not affect the public interface of any generated code; + all method signatures remain the same. Furthermore, thread-safety of the + interface is not affected by this option; const methods remain safe to + call from multiple threads concurrently, while non-const methods continue + to require exclusive access. + + + Note that implementations may choose not to check required fields within + a lazy sub-message. That is, calling IsInitialized() on the outer message + may return true even if the inner message has missing required fields. + This is necessary because otherwise the inner message would have to be + parsed in order to perform the check, defeating the purpose of lazy + parsing. An implementation which chooses not to check required fields + must be consistent about it. That is, for any particular sub-message, the + implementation must either *always* check its required fields, or *never* + check its required fields, regardless of whether or not the message has + been parsed. + + As of 2021, lazy does no correctness checks on the byte stream during + parsing. This may lead to crashes if and when an invalid byte stream is + finally parsed upon access. + + TODO(b/211906113): Enable validation on lazy fields. + """ + unverified_lazy: builtins.bool + """unverified_lazy does no correctness checks on the byte stream. This should + only be used where lazy with verification is prohibitive for performance + reasons. + """ + deprecated: builtins.bool + """Is this field deprecated? + Depending on the target platform, this can emit Deprecated annotations + for accessors, or it will be completely ignored; in the very least, this + is a formalization for deprecating fields. + """ + weak: builtins.bool + """For Google-internal migration only. Do not use.""" + @property + def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + """The parser stores options it doesn't recognize here. See above.""" + def __init__( + self, + *, + ctype: global___FieldOptions.CType.ValueType | None = ..., + packed: builtins.bool | None = ..., + jstype: global___FieldOptions.JSType.ValueType | None = ..., + lazy: builtins.bool | None = ..., + unverified_lazy: builtins.bool | None = ..., + deprecated: builtins.bool | None = ..., + weak: builtins.bool | None = ..., + uninterpreted_option: collections.abc.Iterable[global___UninterpretedOption] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["ctype", b"ctype", "deprecated", b"deprecated", "jstype", b"jstype", "lazy", b"lazy", "packed", b"packed", "unverified_lazy", b"unverified_lazy", "weak", b"weak"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["ctype", b"ctype", "deprecated", b"deprecated", "jstype", b"jstype", "lazy", b"lazy", "packed", b"packed", "uninterpreted_option", b"uninterpreted_option", "unverified_lazy", b"unverified_lazy", "weak", b"weak"]) -> None: ... + +global___FieldOptions = FieldOptions + +@typing_extensions.final +class OneofOptions(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int + @property + def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + """The parser stores options it doesn't recognize here. See above.""" + def __init__( + self, + *, + uninterpreted_option: collections.abc.Iterable[global___UninterpretedOption] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["uninterpreted_option", b"uninterpreted_option"]) -> None: ... + +global___OneofOptions = OneofOptions + +@typing_extensions.final +class EnumOptions(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ALLOW_ALIAS_FIELD_NUMBER: builtins.int + DEPRECATED_FIELD_NUMBER: builtins.int + UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int + allow_alias: builtins.bool + """Set this option to true to allow mapping different tag names to the same + value. + """ + deprecated: builtins.bool + """Is this enum deprecated? + Depending on the target platform, this can emit Deprecated annotations + for the enum, or it will be completely ignored; in the very least, this + is a formalization for deprecating enums. + """ + @property + def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + """The parser stores options it doesn't recognize here. See above.""" + def __init__( + self, + *, + allow_alias: builtins.bool | None = ..., + deprecated: builtins.bool | None = ..., + uninterpreted_option: collections.abc.Iterable[global___UninterpretedOption] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["allow_alias", b"allow_alias", "deprecated", b"deprecated"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_alias", b"allow_alias", "deprecated", b"deprecated", "uninterpreted_option", b"uninterpreted_option"]) -> None: ... + +global___EnumOptions = EnumOptions + +@typing_extensions.final +class EnumValueOptions(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DEPRECATED_FIELD_NUMBER: builtins.int + UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int + deprecated: builtins.bool + """Is this enum value deprecated? + Depending on the target platform, this can emit Deprecated annotations + for the enum value, or it will be completely ignored; in the very least, + this is a formalization for deprecating enum values. + """ + @property + def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + """The parser stores options it doesn't recognize here. See above.""" + def __init__( + self, + *, + deprecated: builtins.bool | None = ..., + uninterpreted_option: collections.abc.Iterable[global___UninterpretedOption] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["deprecated", b"deprecated"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["deprecated", b"deprecated", "uninterpreted_option", b"uninterpreted_option"]) -> None: ... + +global___EnumValueOptions = EnumValueOptions + +@typing_extensions.final +class ServiceOptions(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DEPRECATED_FIELD_NUMBER: builtins.int + UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int + deprecated: builtins.bool + """Note: Field numbers 1 through 32 are reserved for Google's internal RPC + framework. We apologize for hoarding these numbers to ourselves, but + we were already using them long before we decided to release Protocol + Buffers. + + Is this service deprecated? + Depending on the target platform, this can emit Deprecated annotations + for the service, or it will be completely ignored; in the very least, + this is a formalization for deprecating services. + """ + @property + def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + """The parser stores options it doesn't recognize here. See above.""" + def __init__( + self, + *, + deprecated: builtins.bool | None = ..., + uninterpreted_option: collections.abc.Iterable[global___UninterpretedOption] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["deprecated", b"deprecated"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["deprecated", b"deprecated", "uninterpreted_option", b"uninterpreted_option"]) -> None: ... + +global___ServiceOptions = ServiceOptions + +@typing_extensions.final +class MethodOptions(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _IdempotencyLevel: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _IdempotencyLevelEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[MethodOptions._IdempotencyLevel.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + IDEMPOTENCY_UNKNOWN: MethodOptions._IdempotencyLevel.ValueType # 0 + NO_SIDE_EFFECTS: MethodOptions._IdempotencyLevel.ValueType # 1 + """implies idempotent""" + IDEMPOTENT: MethodOptions._IdempotencyLevel.ValueType # 2 + """idempotent, but may have side effects""" + + class IdempotencyLevel(_IdempotencyLevel, metaclass=_IdempotencyLevelEnumTypeWrapper): + """Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + or neither? HTTP based RPC implementation may choose GET verb for safe + methods, and PUT verb for idempotent methods instead of the default POST. + """ + + IDEMPOTENCY_UNKNOWN: MethodOptions.IdempotencyLevel.ValueType # 0 + NO_SIDE_EFFECTS: MethodOptions.IdempotencyLevel.ValueType # 1 + """implies idempotent""" + IDEMPOTENT: MethodOptions.IdempotencyLevel.ValueType # 2 + """idempotent, but may have side effects""" + + DEPRECATED_FIELD_NUMBER: builtins.int + IDEMPOTENCY_LEVEL_FIELD_NUMBER: builtins.int + UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int + deprecated: builtins.bool + """Note: Field numbers 1 through 32 are reserved for Google's internal RPC + framework. We apologize for hoarding these numbers to ourselves, but + we were already using them long before we decided to release Protocol + Buffers. + + Is this method deprecated? + Depending on the target platform, this can emit Deprecated annotations + for the method, or it will be completely ignored; in the very least, + this is a formalization for deprecating methods. + """ + idempotency_level: global___MethodOptions.IdempotencyLevel.ValueType + @property + def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + """The parser stores options it doesn't recognize here. See above.""" + def __init__( + self, + *, + deprecated: builtins.bool | None = ..., + idempotency_level: global___MethodOptions.IdempotencyLevel.ValueType | None = ..., + uninterpreted_option: collections.abc.Iterable[global___UninterpretedOption] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["deprecated", b"deprecated", "idempotency_level", b"idempotency_level"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["deprecated", b"deprecated", "idempotency_level", b"idempotency_level", "uninterpreted_option", b"uninterpreted_option"]) -> None: ... + +global___MethodOptions = MethodOptions + +@typing_extensions.final +class UninterpretedOption(google.protobuf.message.Message): + """A message representing a option the parser does not recognize. This only + appears in options protos created by the compiler::Parser class. + DescriptorPool resolves these when building Descriptor objects. Therefore, + options protos in descriptor objects (e.g. returned by Descriptor::options(), + or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + in them. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing_extensions.final + class NamePart(google.protobuf.message.Message): + """The name of the uninterpreted option. Each string represents a segment in + a dot-separated name. is_extension is true iff a segment represents an + extension (denoted with parentheses in options specs in .proto files). + E.g.,{ ["foo", false], ["bar.baz", true], ["moo", false] } represents + "foo.(bar.baz).moo". + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_PART_FIELD_NUMBER: builtins.int + IS_EXTENSION_FIELD_NUMBER: builtins.int + name_part: builtins.str + is_extension: builtins.bool + def __init__( + self, + *, + name_part: builtins.str | None = ..., + is_extension: builtins.bool | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["is_extension", b"is_extension", "name_part", b"name_part"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["is_extension", b"is_extension", "name_part", b"name_part"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + IDENTIFIER_VALUE_FIELD_NUMBER: builtins.int + POSITIVE_INT_VALUE_FIELD_NUMBER: builtins.int + NEGATIVE_INT_VALUE_FIELD_NUMBER: builtins.int + DOUBLE_VALUE_FIELD_NUMBER: builtins.int + STRING_VALUE_FIELD_NUMBER: builtins.int + AGGREGATE_VALUE_FIELD_NUMBER: builtins.int + @property + def name(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption.NamePart]: ... + identifier_value: builtins.str + """The value of the uninterpreted option, in whatever type the tokenizer + identified it as during parsing. Exactly one of these should be set. + """ + positive_int_value: builtins.int + negative_int_value: builtins.int + double_value: builtins.float + string_value: builtins.bytes + aggregate_value: builtins.str + def __init__( + self, + *, + name: collections.abc.Iterable[global___UninterpretedOption.NamePart] | None = ..., + identifier_value: builtins.str | None = ..., + positive_int_value: builtins.int | None = ..., + negative_int_value: builtins.int | None = ..., + double_value: builtins.float | None = ..., + string_value: builtins.bytes | None = ..., + aggregate_value: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["aggregate_value", b"aggregate_value", "double_value", b"double_value", "identifier_value", b"identifier_value", "negative_int_value", b"negative_int_value", "positive_int_value", b"positive_int_value", "string_value", b"string_value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["aggregate_value", b"aggregate_value", "double_value", b"double_value", "identifier_value", b"identifier_value", "name", b"name", "negative_int_value", b"negative_int_value", "positive_int_value", b"positive_int_value", "string_value", b"string_value"]) -> None: ... + +global___UninterpretedOption = UninterpretedOption + +@typing_extensions.final +class SourceCodeInfo(google.protobuf.message.Message): + """=================================================================== + Optional source code info + + Encapsulates information about the original source file from which a + FileDescriptorProto was generated. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing_extensions.final + class Location(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PATH_FIELD_NUMBER: builtins.int + SPAN_FIELD_NUMBER: builtins.int + LEADING_COMMENTS_FIELD_NUMBER: builtins.int + TRAILING_COMMENTS_FIELD_NUMBER: builtins.int + LEADING_DETACHED_COMMENTS_FIELD_NUMBER: builtins.int + @property + def path(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """Identifies which part of the FileDescriptorProto was defined at this + location. + + Each element is a field number or an index. They form a path from + the root FileDescriptorProto to the place where the definition occurs. + For example, this path: + [ 4, 3, 2, 7, 1 ] + refers to: + file.message_type(3) // 4, 3 + .field(7) // 2, 7 + .name() // 1 + This is because FileDescriptorProto.message_type has field number 4: + repeated DescriptorProto message_type = 4; + and DescriptorProto.field has field number 2: + repeated FieldDescriptorProto field = 2; + and FieldDescriptorProto.name has field number 1: + optional string name = 1; + + Thus, the above path gives the location of a field name. If we removed + the last element: + [ 4, 3, 2, 7 ] + this path refers to the whole field declaration (from the beginning + of the label to the terminating semicolon). + """ + @property + def span(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """Always has exactly three or four elements: start line, start column, + end line (optional, otherwise assumed same as start line), end column. + These are packed into a single field for efficiency. Note that line + and column numbers are zero-based -- typically you will want to add + 1 to each before displaying to a user. + """ + leading_comments: builtins.str + """If this SourceCodeInfo represents a complete declaration, these are any + comments appearing before and after the declaration which appear to be + attached to the declaration. + + A series of line comments appearing on consecutive lines, with no other + tokens appearing on those lines, will be treated as a single comment. + + leading_detached_comments will keep paragraphs of comments that appear + before (but not connected to) the current element. Each paragraph, + separated by empty lines, will be one comment element in the repeated + field. + + Only the comment content is provided; comment markers (e.g. //) are + stripped out. For block comments, leading whitespace and an asterisk + will be stripped from the beginning of each line other than the first. + Newlines are included in the output. + + Examples: + + optional int32 foo = 1; // Comment attached to foo. + // Comment attached to bar. + optional int32 bar = 2; + + optional string baz = 3; + // Comment attached to baz. + // Another line attached to baz. + + // Comment attached to moo. + // + // Another line attached to moo. + optional double moo = 4; + + // Detached comment for corge. This is not leading or trailing comments + // to moo or corge because there are blank lines separating it from + // both. + + // Detached comment for corge paragraph 2. + + optional string corge = 5; + /* Block comment attached + * to corge. Leading asterisks + * will be removed. */ + /* Block comment attached to + * grault. */ + optional int32 grault = 6; + + // ignored detached comments. + """ + trailing_comments: builtins.str + @property + def leading_detached_comments(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + def __init__( + self, + *, + path: collections.abc.Iterable[builtins.int] | None = ..., + span: collections.abc.Iterable[builtins.int] | None = ..., + leading_comments: builtins.str | None = ..., + trailing_comments: builtins.str | None = ..., + leading_detached_comments: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["leading_comments", b"leading_comments", "trailing_comments", b"trailing_comments"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["leading_comments", b"leading_comments", "leading_detached_comments", b"leading_detached_comments", "path", b"path", "span", b"span", "trailing_comments", b"trailing_comments"]) -> None: ... + + LOCATION_FIELD_NUMBER: builtins.int + @property + def location(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SourceCodeInfo.Location]: + """A Location identifies a piece of source code in a .proto file which + corresponds to a particular definition. This information is intended + to be useful to IDEs, code indexers, documentation generators, and similar + tools. + + For example, say we have a file like: + message Foo { + optional string foo = 1; + } + Let's look at just the field definition: + optional string foo = 1; + ^ ^^ ^^ ^ ^^^ + a bc de f ghi + We have the following locations: + span path represents + [a,i) [ 4, 0, 2, 0 ] The whole field definition. + [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + + Notes: + - A location may refer to a repeated field itself (i.e. not to any + particular index within it). This is used whenever a set of elements are + logically enclosed in a single code segment. For example, an entire + extend block (possibly containing multiple extension definitions) will + have an outer location whose path refers to the "extensions" repeated + field without an index. + - Multiple locations may have the same path. This happens when a single + logical declaration is spread out across multiple places. The most + obvious example is the "extend" block again -- there may be multiple + extend blocks in the same scope, each of which will have the same path. + - A location's span is not always a subset of its parent's span. For + example, the "extendee" of an extension declaration appears at the + beginning of the "extend" block and is shared by all extensions within + the block. + - Just because a location's span is a subset of some other location's span + does not mean that it is a descendant. For example, a "group" defines + both a type and a field in a single declaration. Thus, the locations + corresponding to the type and field and their components will overlap. + - Code which tries to interpret locations should probably be designed to + ignore those that it doesn't understand, as more types of locations could + be recorded in the future. + """ + def __init__( + self, + *, + location: collections.abc.Iterable[global___SourceCodeInfo.Location] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["location", b"location"]) -> None: ... + +global___SourceCodeInfo = SourceCodeInfo + +@typing_extensions.final +class GeneratedCodeInfo(google.protobuf.message.Message): + """Describes the relationship between generated code and its original source + file. A GeneratedCodeInfo message is associated with only one generated + source file, but may contain references to different source .proto files. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing_extensions.final + class Annotation(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PATH_FIELD_NUMBER: builtins.int + SOURCE_FILE_FIELD_NUMBER: builtins.int + BEGIN_FIELD_NUMBER: builtins.int + END_FIELD_NUMBER: builtins.int + @property + def path(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """Identifies the element in the original source .proto file. This field + is formatted the same as SourceCodeInfo.Location.path. + """ + source_file: builtins.str + """Identifies the filesystem path to the original source .proto.""" + begin: builtins.int + """Identifies the starting offset in bytes in the generated code + that relates to the identified object. + """ + end: builtins.int + """Identifies the ending offset in bytes in the generated code that + relates to the identified offset. The end offset should be one past + the last relevant byte (so the length of the text = end - begin). + """ + def __init__( + self, + *, + path: collections.abc.Iterable[builtins.int] | None = ..., + source_file: builtins.str | None = ..., + begin: builtins.int | None = ..., + end: builtins.int | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["begin", b"begin", "end", b"end", "source_file", b"source_file"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["begin", b"begin", "end", b"end", "path", b"path", "source_file", b"source_file"]) -> None: ... + + ANNOTATION_FIELD_NUMBER: builtins.int + @property + def annotation(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GeneratedCodeInfo.Annotation]: + """An Annotation connects some span of text in generated code to an element + of its generating .proto file. + """ + def __init__( + self, + *, + annotation: collections.abc.Iterable[global___GeneratedCodeInfo.Annotation] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["annotation", b"annotation"]) -> None: ... + +global___GeneratedCodeInfo = GeneratedCodeInfo diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/descriptor_pool.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/descriptor_pool.pyi new file mode 100644 index 00000000..dd613854 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/descriptor_pool.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +class DescriptorPool: + def __new__(cls, descriptor_db: Incomplete | None = ...): ... + def __init__(self, descriptor_db: Incomplete | None = ...) -> None: ... + def Add(self, file_desc_proto): ... + def AddSerializedFile(self, serialized_file_desc_proto): ... + def AddDescriptor(self, desc): ... + def AddEnumDescriptor(self, enum_desc): ... + def AddServiceDescriptor(self, service_desc): ... + def AddExtensionDescriptor(self, extension): ... + def AddFileDescriptor(self, file_desc): ... + def FindFileByName(self, file_name): ... + def FindFileContainingSymbol(self, symbol): ... + def FindMessageTypeByName(self, full_name): ... + def FindEnumTypeByName(self, full_name): ... + def FindFieldByName(self, full_name): ... + def FindOneofByName(self, full_name): ... + def FindExtensionByName(self, full_name): ... + def FindExtensionByNumber(self, message_descriptor, number): ... + def FindAllExtensions(self, message_descriptor): ... + def FindServiceByName(self, full_name): ... + def FindMethodByName(self, full_name): ... + +def Default(): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/duration_pb2.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/duration_pb2.pyi new file mode 100644 index 00000000..d8f7931a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/duration_pb2.pyi @@ -0,0 +1,134 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Protocol Buffers - Google's data interchange format +Copyright 2008 Google Inc. All rights reserved. +https://developers.google.com/protocol-buffers/ + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +""" +import builtins +import google.protobuf.descriptor +import google.protobuf.internal.well_known_types +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing_extensions.final +class Duration(google.protobuf.message.Message, google.protobuf.internal.well_known_types.Duration): + """A Duration represents a signed, fixed-length span of time represented + as a count of seconds and fractions of seconds at nanosecond + resolution. It is independent of any calendar and concepts like "day" + or "month". It is related to Timestamp in that the difference between + two Timestamp values is a Duration and it can be added or subtracted + from a Timestamp. Range is approximately +-10,000 years. + + # Examples + + Example 1: Compute Duration from two Timestamps in pseudo code. + + Timestamp start = ...; + Timestamp end = ...; + Duration duration = ...; + + duration.seconds = end.seconds - start.seconds; + duration.nanos = end.nanos - start.nanos; + + if (duration.seconds < 0 && duration.nanos > 0) { + duration.seconds += 1; + duration.nanos -= 1000000000; + } else if (duration.seconds > 0 && duration.nanos < 0) { + duration.seconds -= 1; + duration.nanos += 1000000000; + } + + Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. + + Timestamp start = ...; + Duration duration = ...; + Timestamp end = ...; + + end.seconds = start.seconds + duration.seconds; + end.nanos = start.nanos + duration.nanos; + + if (end.nanos < 0) { + end.seconds -= 1; + end.nanos += 1000000000; + } else if (end.nanos >= 1000000000) { + end.seconds += 1; + end.nanos -= 1000000000; + } + + Example 3: Compute Duration from datetime.timedelta in Python. + + td = datetime.timedelta(days=3, minutes=10) + duration = Duration() + duration.FromTimedelta(td) + + # JSON Mapping + + In JSON format, the Duration type is encoded as a string rather than an + object, where the string ends in the suffix "s" (indicating seconds) and + is preceded by the number of seconds, with nanoseconds expressed as + fractional seconds. For example, 3 seconds with 0 nanoseconds should be + encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should + be expressed in JSON format as "3.000000001s", and 3 seconds and 1 + microsecond should be expressed in JSON format as "3.000001s". + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SECONDS_FIELD_NUMBER: builtins.int + NANOS_FIELD_NUMBER: builtins.int + seconds: builtins.int + """Signed seconds of the span of time. Must be from -315,576,000,000 + to +315,576,000,000 inclusive. Note: these bounds are computed from: + 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + """ + nanos: builtins.int + """Signed fractions of a second at nanosecond resolution of the span + of time. Durations less than one second are represented with a 0 + `seconds` field and a positive or negative `nanos` field. For durations + of one second or more, a non-zero value for the `nanos` field must be + of the same sign as the `seconds` field. Must be from -999,999,999 + to +999,999,999 inclusive. + """ + def __init__( + self, + *, + seconds: builtins.int | None = ..., + nanos: builtins.int | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["nanos", b"nanos", "seconds", b"seconds"]) -> None: ... + +global___Duration = Duration diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/empty_pb2.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/empty_pb2.pyi new file mode 100644 index 00000000..22ef26b7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/empty_pb2.pyi @@ -0,0 +1,62 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Protocol Buffers - Google's data interchange format +Copyright 2008 Google Inc. All rights reserved. +https://developers.google.com/protocol-buffers/ + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +""" +import google.protobuf.descriptor +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing_extensions.final +class Empty(google.protobuf.message.Message): + """A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to use it as the request + or the response type of an API method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); + } + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +global___Empty = Empty diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/field_mask_pb2.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/field_mask_pb2.pyi new file mode 100644 index 00000000..1721c1bc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/field_mask_pb2.pyi @@ -0,0 +1,265 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Protocol Buffers - Google's data interchange format +Copyright 2008 Google Inc. All rights reserved. +https://developers.google.com/protocol-buffers/ + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +""" +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.well_known_types +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing_extensions.final +class FieldMask(google.protobuf.message.Message, google.protobuf.internal.well_known_types.FieldMask): + """`FieldMask` represents a set of symbolic field paths, for example: + + paths: "f.a" + paths: "f.b.d" + + Here `f` represents a field in some root message, `a` and `b` + fields in the message found in `f`, and `d` a field found in the + message in `f.b`. + + Field masks are used to specify a subset of fields that should be + returned by a get operation or modified by an update operation. + Field masks also have a custom JSON encoding (see below). + + # Field Masks in Projections + + When used in the context of a projection, a response message or + sub-message is filtered by the API to only contain those fields as + specified in the mask. For example, if the mask in the previous + example is applied to a response message as follows: + + f { + a : 22 + b { + d : 1 + x : 2 + } + y : 13 + } + z: 8 + + The result will not contain specific values for fields x,y and z + (their value will be set to the default, and omitted in proto text + output): + + + f { + a : 22 + b { + d : 1 + } + } + + A repeated field is not allowed except at the last position of a + paths string. + + If a FieldMask object is not present in a get operation, the + operation applies to all fields (as if a FieldMask of all fields + had been specified). + + Note that a field mask does not necessarily apply to the + top-level response message. In case of a REST get operation, the + field mask applies directly to the response, but in case of a REST + list operation, the mask instead applies to each individual message + in the returned resource list. In case of a REST custom method, + other definitions may be used. Where the mask applies will be + clearly documented together with its declaration in the API. In + any case, the effect on the returned resource/resources is required + behavior for APIs. + + # Field Masks in Update Operations + + A field mask in update operations specifies which fields of the + targeted resource are going to be updated. The API is required + to only change the values of the fields as specified in the mask + and leave the others untouched. If a resource is passed in to + describe the updated values, the API ignores the values of all + fields not covered by the mask. + + If a repeated field is specified for an update operation, new values will + be appended to the existing repeated field in the target resource. Note that + a repeated field is only allowed in the last position of a `paths` string. + + If a sub-message is specified in the last position of the field mask for an + update operation, then new value will be merged into the existing sub-message + in the target resource. + + For example, given the target message: + + f { + b { + d: 1 + x: 2 + } + c: [1] + } + + And an update message: + + f { + b { + d: 10 + } + c: [2] + } + + then if the field mask is: + + paths: ["f.b", "f.c"] + + then the result will be: + + f { + b { + d: 10 + x: 2 + } + c: [1, 2] + } + + An implementation may provide options to override this default behavior for + repeated and message fields. + + In order to reset a field's value to the default, the field must + be in the mask and set to the default value in the provided resource. + Hence, in order to reset all fields of a resource, provide a default + instance of the resource and set all fields in the mask, or do + not provide a mask as described below. + + If a field mask is not present on update, the operation applies to + all fields (as if a field mask of all fields has been specified). + Note that in the presence of schema evolution, this may mean that + fields the client does not know and has therefore not filled into + the request will be reset to their default. If this is unwanted + behavior, a specific service may require a client to always specify + a field mask, producing an error if not. + + As with get operations, the location of the resource which + describes the updated values in the request message depends on the + operation kind. In any case, the effect of the field mask is + required to be honored by the API. + + ## Considerations for HTTP REST + + The HTTP kind of an update operation which uses a field mask must + be set to PATCH instead of PUT in order to satisfy HTTP semantics + (PUT must only be used for full updates). + + # JSON Encoding of Field Masks + + In JSON, a field mask is encoded as a single string where paths are + separated by a comma. Fields name in each path are converted + to/from lower-camel naming conventions. + + As an example, consider the following message declarations: + + message Profile { + User user = 1; + Photo photo = 2; + } + message User { + string display_name = 1; + string address = 2; + } + + In proto a field mask for `Profile` may look as such: + + mask { + paths: "user.display_name" + paths: "photo" + } + + In JSON, the same mask is represented as below: + + { + mask: "user.displayName,photo" + } + + # Field Masks and Oneof Fields + + Field masks treat fields in oneofs just as regular fields. Consider the + following message: + + message SampleMessage { + oneof test_oneof { + string name = 4; + SubMessage sub_message = 9; + } + } + + The field mask can be: + + mask { + paths: "name" + } + + Or: + + mask { + paths: "sub_message" + } + + Note that oneof type names ("test_oneof" in this case) cannot be used in + paths. + + ## Field Mask Verification + + The implementation of any API method which has a FieldMask type field in the + request should verify the included field paths, and return an + `INVALID_ARGUMENT` error if any path is unmappable. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PATHS_FIELD_NUMBER: builtins.int + @property + def paths(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """The set of field mask paths.""" + def __init__( + self, + *, + paths: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["paths", b"paths"]) -> None: ... + +global___FieldMask = FieldMask diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/api_implementation.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/api_implementation.pyi new file mode 100644 index 00000000..4940124f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/api_implementation.pyi @@ -0,0 +1,3 @@ +def Type() -> str: ... +def Version() -> int: ... +def IsPythonDefaultSerializationDeterministic() -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/containers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/containers.pyi new file mode 100644 index 00000000..80da52a6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/containers.pyi @@ -0,0 +1,98 @@ +from collections.abc import Callable, Iterable, Iterator, MutableMapping, Sequence +from typing import Any, TypeVar, overload +from typing_extensions import SupportsIndex + +from google.protobuf.descriptor import Descriptor +from google.protobuf.internal.message_listener import MessageListener +from google.protobuf.internal.python_message import GeneratedProtocolMessageType +from google.protobuf.internal.type_checkers import _ValueChecker +from google.protobuf.message import Message + +_T = TypeVar("_T") +_K = TypeVar("_K", bound=bool | int | str) +_ScalarV = TypeVar("_ScalarV", bound=bool | int | float | str | bytes) +_MessageV = TypeVar("_MessageV", bound=Message) +_M = TypeVar("_M") + +class BaseContainer(Sequence[_T]): + def __init__(self, message_listener: MessageListener) -> None: ... + def __len__(self) -> int: ... + def __ne__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + def sort(self, *, key: Callable[[_T], Any] | None = ..., reverse: bool = ...) -> None: ... + @overload + def __getitem__(self, key: SupportsIndex) -> _T: ... + @overload + def __getitem__(self, key: slice) -> list[_T]: ... + +class RepeatedScalarFieldContainer(BaseContainer[_ScalarV]): + def __init__(self, message_listener: MessageListener, type_checker: _ValueChecker[_ScalarV]) -> None: ... + def append(self, value: _ScalarV) -> None: ... + def insert(self, key: int, value: _ScalarV) -> None: ... + def extend(self, elem_seq: Iterable[_ScalarV] | None) -> None: ... + def MergeFrom(self: _M, other: _M) -> None: ... + def remove(self, elem: _ScalarV) -> None: ... + def pop(self, key: int = ...) -> _ScalarV: ... + @overload + def __setitem__(self, key: int, value: _ScalarV) -> None: ... + @overload + def __setitem__(self, key: slice, value: Iterable[_ScalarV]) -> None: ... + def __delitem__(self, key: int | slice) -> None: ... + def __eq__(self, other: object) -> bool: ... + +class RepeatedCompositeFieldContainer(BaseContainer[_MessageV]): + def __init__(self, message_listener: MessageListener, message_descriptor: Descriptor) -> None: ... + def add(self, **kwargs: Any) -> _MessageV: ... + def append(self, value: _MessageV) -> None: ... + def insert(self, key: int, value: _MessageV) -> None: ... + def extend(self, elem_seq: Iterable[_MessageV]) -> None: ... + def MergeFrom(self: _M, other: _M) -> None: ... + def remove(self, elem: _MessageV) -> None: ... + def pop(self, key: int = ...) -> _MessageV: ... + def __delitem__(self, key: int | slice) -> None: ... + def __eq__(self, other: object) -> bool: ... + +class ScalarMap(MutableMapping[_K, _ScalarV]): + def __init__( + self, + message_listener: MessageListener, + key_checker: _ValueChecker[_K], + value_checker: _ValueChecker[_ScalarV], + entry_descriptor: Descriptor, + ) -> None: ... + def __setitem__(self, k: _K, v: _ScalarV) -> None: ... + def __delitem__(self, v: _K) -> None: ... + def __getitem__(self, k: _K) -> _ScalarV: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[_K]: ... + def __eq__(self, other: object) -> bool: ... + @overload + def get(self, key: _K, default: None = ...) -> _ScalarV: ... + @overload + def get(self, key: _K, default: _ScalarV | _T) -> _ScalarV | _T: ... + def MergeFrom(self: _M, other: _M): ... + def InvalidateIterators(self) -> None: ... + def GetEntryClass(self) -> GeneratedProtocolMessageType: ... + +class MessageMap(MutableMapping[_K, _MessageV]): + def __init__( + self, + message_listener: MessageListener, + message_descriptor: Descriptor, + key_checker: _ValueChecker[_K], + entry_descriptor: Descriptor, + ) -> None: ... + def __setitem__(self, k: _K, v: _MessageV) -> None: ... + def __delitem__(self, v: _K) -> None: ... + def __getitem__(self, k: _K) -> _MessageV: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[_K]: ... + def __eq__(self, other: object) -> bool: ... + @overload + def get(self, key: _K, default: None = ...) -> _MessageV: ... + @overload + def get(self, key: _K, default: _MessageV | _T) -> _MessageV | _T: ... + def get_or_create(self, key: _K) -> _MessageV: ... + def MergeFrom(self: _M, other: _M): ... + def InvalidateIterators(self) -> None: ... + def GetEntryClass(self) -> GeneratedProtocolMessageType: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/decoder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/decoder.pyi new file mode 100644 index 00000000..41a50035 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/decoder.pyi @@ -0,0 +1,63 @@ +from collections.abc import Callable +from typing import Any +from typing_extensions import TypeAlias + +from google.protobuf.descriptor import Descriptor, FieldDescriptor +from google.protobuf.message import Message + +_Decoder: TypeAlias = Callable[[str, int, int, Message, dict[FieldDescriptor, Any]], int] +_NewDefault: TypeAlias = Callable[[Message], Message] + +def ReadTag(buffer, pos): ... + +Int32Decoder: _Decoder +Int64Decoder: _Decoder +UInt32Decoder: _Decoder +UInt64Decoder: _Decoder +SInt32Decoder: _Decoder +SInt64Decoder: _Decoder +Fixed32Decoder: _Decoder +Fixed64Decoder: _Decoder +SFixed32Decoder: _Decoder +SFixed64Decoder: _Decoder +FloatDecoder: _Decoder +DoubleDecoder: _Decoder +BoolDecoder: _Decoder + +def EnumDecoder( + field_number: int, + is_repeated: bool, + is_packed: bool, + key: FieldDescriptor, + new_default: _NewDefault, + clear_if_default: bool = ..., +) -> _Decoder: ... +def StringDecoder( + field_number: int, + is_repeated: bool, + is_packed: bool, + key: FieldDescriptor, + new_default: _NewDefault, + clear_if_default: bool = ..., +) -> _Decoder: ... +def BytesDecoder( + field_number: int, + is_repeated: bool, + is_packed: bool, + key: FieldDescriptor, + new_default: _NewDefault, + clear_if_default: bool = ..., +) -> _Decoder: ... +def GroupDecoder( + field_number: int, is_repeated: bool, is_packed: bool, key: FieldDescriptor, new_default: _NewDefault +) -> _Decoder: ... +def MessageDecoder( + field_number: int, is_repeated: bool, is_packed: bool, key: FieldDescriptor, new_default: _NewDefault +) -> _Decoder: ... + +MESSAGE_SET_ITEM_TAG: bytes + +def MessageSetItemDecoder(descriptor: Descriptor) -> _Decoder: ... +def MapDecoder(field_descriptor, new_default, is_message_map) -> _Decoder: ... + +SkipField: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/encoder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/encoder.pyi new file mode 100644 index 00000000..278478ed --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/encoder.pyi @@ -0,0 +1,41 @@ +from collections.abc import Callable +from typing_extensions import TypeAlias + +from google.protobuf.descriptor import FieldDescriptor + +_Sizer: TypeAlias = Callable[[int, bool, bool], int] + +Int32Sizer: _Sizer +UInt32Sizer: _Sizer +SInt32Sizer: _Sizer +Fixed32Sizer: _Sizer +Fixed64Sizer: _Sizer +BoolSizer: _Sizer + +def StringSizer(field_number: int, is_repeated: bool, is_packed: bool) -> _Sizer: ... +def BytesSizer(field_number: int, is_repeated: bool, is_packed: bool) -> _Sizer: ... +def GroupSizer(field_number: int, is_repeated: bool, is_packed: bool) -> _Sizer: ... +def MessageSizer(field_number: int, is_repeated: bool, is_packed: bool) -> _Sizer: ... +def MessageSetItemSizer(field_number: int) -> _Sizer: ... +def MapSizer(field_descriptor: FieldDescriptor, is_message_map: bool) -> _Sizer: ... +def TagBytes(field_number: int, wire_type: int) -> bytes: ... + +_Encoder: TypeAlias = Callable[[Callable[[bytes], int], bytes, bool], int] + +Int32Encoder: _Encoder +UInt32Encoder: _Encoder +SInt32Encoder: _Encoder +Fixed32Encoder: _Encoder +Fixed64Encoder: _Encoder +SFixed32Encoder: _Encoder +SFixed64Encoder: _Encoder +FloatEncoder: _Encoder +DoubleEncoder: _Encoder + +def BoolEncoder(field_number: int, is_repeated: bool, is_packed: bool) -> _Encoder: ... +def StringEncoder(field_number: int, is_repeated: bool, is_packed: bool) -> _Encoder: ... +def BytesEncoder(field_number: int, is_repeated: bool, is_packed: bool) -> _Encoder: ... +def GroupEncoder(field_number: int, is_repeated: bool, is_packed: bool) -> _Encoder: ... +def MessageEncoder(field_number: int, is_repeated: bool, is_packed: bool) -> _Encoder: ... +def MessageSetItemEncoder(field_number: int) -> _Encoder: ... +def MapEncoder(field_descriptor: FieldDescriptor) -> _Encoder: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/enum_type_wrapper.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/enum_type_wrapper.pyi new file mode 100644 index 00000000..18da7c23 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/enum_type_wrapper.pyi @@ -0,0 +1,18 @@ +from typing import Generic, TypeVar + +from google.protobuf.descriptor import EnumDescriptor + +_V = TypeVar("_V", bound=int) + +# Expose a generic version so that those using mypy-protobuf +# can get autogenerated NewType wrapper around the int values +class _EnumTypeWrapper(Generic[_V]): + DESCRIPTOR: EnumDescriptor + def __init__(self, enum_type: EnumDescriptor) -> None: ... + def Name(self, number: _V) -> str: ... + def Value(self, name: str | bytes) -> _V: ... + def keys(self) -> list[str]: ... + def values(self) -> list[_V]: ... + def items(self) -> list[tuple[str, _V]]: ... + +class EnumTypeWrapper(_EnumTypeWrapper[int]): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/extension_dict.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/extension_dict.pyi new file mode 100644 index 00000000..ecf56a36 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/extension_dict.pyi @@ -0,0 +1,27 @@ +from collections.abc import Iterator +from typing import Any, Generic, TypeVar + +from google.protobuf.descriptor import FieldDescriptor +from google.protobuf.internal.containers import RepeatedCompositeFieldContainer, RepeatedScalarFieldContainer +from google.protobuf.message import Message + +_ContainerMessageT = TypeVar("_ContainerMessageT", bound=Message) +_ExtenderMessageT = TypeVar( + "_ExtenderMessageT", + bound=Message | RepeatedScalarFieldContainer[Any] | RepeatedCompositeFieldContainer[Any] | bool | float | str | bytes, +) + +class _ExtensionFieldDescriptor(FieldDescriptor, Generic[_ContainerMessageT, _ExtenderMessageT]): ... + +class _ExtensionDict(Generic[_ContainerMessageT]): + def __init__(self, extended_message: _ContainerMessageT) -> None: ... + def __getitem__( + self, extension_handle: _ExtensionFieldDescriptor[_ContainerMessageT, _ExtenderMessageT] + ) -> _ExtenderMessageT: ... + def __setitem__( + self, extension_handle: _ExtensionFieldDescriptor[_ContainerMessageT, _ExtenderMessageT], value: _ExtenderMessageT + ) -> None: ... + def __delitem__(self, extension_handle: _ExtensionFieldDescriptor[_ContainerMessageT, _ExtenderMessageT]) -> None: ... + def __contains__(self, extension_handle: _ExtensionFieldDescriptor[_ContainerMessageT, _ExtenderMessageT]) -> bool: ... + def __iter__(self) -> Iterator[_ExtensionFieldDescriptor[_ContainerMessageT, Any]]: ... + def __len__(self) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/message_listener.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/message_listener.pyi new file mode 100644 index 00000000..01c3be01 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/message_listener.pyi @@ -0,0 +1,5 @@ +class MessageListener: + def Modified(self) -> None: ... + +class NullMessageListener(MessageListener): + def Modified(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/python_message.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/python_message.pyi new file mode 100644 index 00000000..0395ff64 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/python_message.pyi @@ -0,0 +1,3 @@ +class GeneratedProtocolMessageType(type): + def __new__(cls, name, bases, dictionary): ... + def __init__(cls, name, bases, dictionary): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/type_checkers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/type_checkers.pyi new file mode 100644 index 00000000..fbcc35eb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/type_checkers.pyi @@ -0,0 +1,15 @@ +from typing import Generic, Protocol, TypeVar + +_T = TypeVar("_T") + +class _ValueChecker(Protocol[_T]): + def CheckValue(self, proposed_value: _T) -> _T: ... + def DefaultValue(self) -> _T: ... + +class TypeChecker(Generic[_T]): + def __init__(self, *acceptable_types: _T): ... + def CheckValue(self, proposed_value: _T) -> _T: ... + +class TypeCheckerWithDefault(TypeChecker[_T]): + def __init__(self, default_value: _T, *acceptable_types: _T): ... + def DefaultValue(self) -> _T: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/well_known_types.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/well_known_types.pyi new file mode 100644 index 00000000..f0e17309 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/well_known_types.pyi @@ -0,0 +1,101 @@ +from _typeshed import Incomplete, SupportsItems +from collections.abc import Iterable, Iterator, KeysView, Mapping, Sequence +from datetime import datetime, timedelta, tzinfo +from typing import Any as tAny +from typing_extensions import TypeAlias + +from google.protobuf import struct_pb2 + +class Any: + type_url: tAny = ... + value: tAny = ... + def Pack(self, msg: tAny, type_url_prefix: str = ..., deterministic: Incomplete | None = ...) -> None: ... + def Unpack(self, msg: tAny) -> bool: ... + def TypeName(self) -> str: ... + def Is(self, descriptor: tAny) -> bool: ... + +class Timestamp: + def ToJsonString(self) -> str: ... + seconds: int = ... + nanos: int = ... + def FromJsonString(self, value: str) -> None: ... + def GetCurrentTime(self) -> None: ... + def ToNanoseconds(self) -> int: ... + def ToMicroseconds(self) -> int: ... + def ToMilliseconds(self) -> int: ... + def ToSeconds(self) -> int: ... + def FromNanoseconds(self, nanos: int) -> None: ... + def FromMicroseconds(self, micros: int) -> None: ... + def FromMilliseconds(self, millis: int) -> None: ... + def FromSeconds(self, seconds: int) -> None: ... + def ToDatetime(self, tzinfo: tzinfo | None = ...) -> datetime: ... + def FromDatetime(self, dt: datetime) -> None: ... + +class Duration: + def ToJsonString(self) -> str: ... + seconds: int = ... + nanos: int = ... + def FromJsonString(self, value: tAny) -> None: ... + def ToNanoseconds(self) -> int: ... + def ToMicroseconds(self) -> int: ... + def ToMilliseconds(self) -> int: ... + def ToSeconds(self) -> int: ... + def FromNanoseconds(self, nanos: int) -> None: ... + def FromMicroseconds(self, micros: int) -> None: ... + def FromMilliseconds(self, millis: int) -> None: ... + def FromSeconds(self, seconds: int) -> None: ... + def ToTimedelta(self) -> timedelta: ... + def FromTimedelta(self, td: timedelta) -> None: ... + +class FieldMask: + def ToJsonString(self) -> str: ... + def FromJsonString(self, value: tAny) -> None: ... + def IsValidForDescriptor(self, message_descriptor: tAny): ... + def AllFieldsFromDescriptor(self, message_descriptor: tAny) -> None: ... + def CanonicalFormFromMask(self, mask: tAny) -> None: ... + def Union(self, mask1: tAny, mask2: tAny) -> None: ... + def Intersect(self, mask1: tAny, mask2: tAny) -> None: ... + def MergeMessage( + self, source: tAny, destination: tAny, replace_message_field: bool = ..., replace_repeated_field: bool = ... + ) -> None: ... + +class _FieldMaskTree: + def __init__(self, field_mask: Incomplete | None = ...) -> None: ... + def MergeFromFieldMask(self, field_mask: tAny) -> None: ... + def AddPath(self, path: tAny): ... + def ToFieldMask(self, field_mask: tAny) -> None: ... + def IntersectPath(self, path: tAny, intersection: tAny): ... + def AddLeafNodes(self, prefix: tAny, node: tAny) -> None: ... + def MergeMessage(self, source: tAny, destination: tAny, replace_message: tAny, replace_repeated: tAny) -> None: ... + +_StructValue: TypeAlias = struct_pb2.Struct | struct_pb2.ListValue | str | float | bool | None +_StructValueArg: TypeAlias = _StructValue | Mapping[str, _StructValueArg] | Sequence[_StructValueArg] + +class Struct: + def __getitem__(self, key: str) -> _StructValue: ... + def __contains__(self, item: object) -> bool: ... + def __setitem__(self, key: str, value: _StructValueArg) -> None: ... + def __delitem__(self, key: str) -> None: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[str]: ... + def keys(self) -> KeysView[str]: ... + def values(self) -> list[_StructValue]: ... + def items(self) -> list[tuple[str, _StructValue]]: ... + def get_or_create_list(self, key: str) -> struct_pb2.ListValue: ... + def get_or_create_struct(self, key: str) -> struct_pb2.Struct: ... + def update(self, dictionary: SupportsItems[str, _StructValueArg]) -> None: ... + +class ListValue: + def __len__(self) -> int: ... + def append(self, value: _StructValue) -> None: ... + def extend(self, elem_seq: Iterable[_StructValue]) -> None: ... + def __getitem__(self, index: int) -> _StructValue: ... + def __setitem__(self, index: int, value: _StructValueArg) -> None: ... + def __delitem__(self, key: int) -> None: ... + # Doesn't actually exist at runtime; needed so type checkers understand the class is iterable + def __iter__(self) -> Iterator[_StructValue]: ... + def items(self) -> Iterator[_StructValue]: ... + def add_struct(self) -> struct_pb2.Struct: ... + def add_list(self) -> struct_pb2.ListValue: ... + +WKTBASES: dict[str, type[tAny]] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/wire_format.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/wire_format.pyi new file mode 100644 index 00000000..3dcbd043 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/internal/wire_format.pyi @@ -0,0 +1,50 @@ +from typing import Any + +TAG_TYPE_BITS: Any +TAG_TYPE_MASK: Any +WIRETYPE_VARINT: Any +WIRETYPE_FIXED64: Any +WIRETYPE_LENGTH_DELIMITED: Any +WIRETYPE_START_GROUP: Any +WIRETYPE_END_GROUP: Any +WIRETYPE_FIXED32: Any +INT32_MAX: Any +INT32_MIN: Any +UINT32_MAX: Any +INT64_MAX: Any +INT64_MIN: Any +UINT64_MAX: Any +FORMAT_UINT32_LITTLE_ENDIAN: Any +FORMAT_UINT64_LITTLE_ENDIAN: Any +FORMAT_FLOAT_LITTLE_ENDIAN: Any +FORMAT_DOUBLE_LITTLE_ENDIAN: Any + +def PackTag(field_number, wire_type): ... +def UnpackTag(tag): ... +def ZigZagEncode(value): ... +def ZigZagDecode(value): ... +def Int32ByteSize(field_number, int32): ... +def Int32ByteSizeNoTag(int32): ... +def Int64ByteSize(field_number, int64): ... +def UInt32ByteSize(field_number, uint32): ... +def UInt64ByteSize(field_number, uint64): ... +def SInt32ByteSize(field_number, int32): ... +def SInt64ByteSize(field_number, int64): ... +def Fixed32ByteSize(field_number, fixed32): ... +def Fixed64ByteSize(field_number, fixed64): ... +def SFixed32ByteSize(field_number, sfixed32): ... +def SFixed64ByteSize(field_number, sfixed64): ... +def FloatByteSize(field_number, flt): ... +def DoubleByteSize(field_number, double): ... +def BoolByteSize(field_number, b): ... +def EnumByteSize(field_number, enum): ... +def StringByteSize(field_number, string): ... +def BytesByteSize(field_number, b): ... +def GroupByteSize(field_number, message): ... +def MessageByteSize(field_number, message): ... +def MessageSetItemByteSize(field_number, msg): ... +def TagByteSize(field_number): ... + +NON_PACKABLE_TYPES: Any + +def IsTypePackable(field_type): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/json_format.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/json_format.pyi new file mode 100644 index 00000000..62ad1bad --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/json_format.pyi @@ -0,0 +1,44 @@ +from typing import Any, TypeVar + +from google.protobuf.descriptor_pool import DescriptorPool +from google.protobuf.message import Message + +_MessageT = TypeVar("_MessageT", bound=Message) + +class Error(Exception): ... +class ParseError(Error): ... +class SerializeToJsonError(Error): ... + +def MessageToJson( + message: Message, + including_default_value_fields: bool = ..., + preserving_proto_field_name: bool = ..., + indent: int | None = ..., + sort_keys: bool = ..., + use_integers_for_enums: bool = ..., + descriptor_pool: DescriptorPool | None = ..., + float_precision: int | None = ..., + ensure_ascii: bool = ..., +) -> str: ... +def MessageToDict( + message: Message, + including_default_value_fields: bool = ..., + preserving_proto_field_name: bool = ..., + use_integers_for_enums: bool = ..., + descriptor_pool: DescriptorPool | None = ..., + float_precision: int | None = ..., +) -> dict[str, Any]: ... +def Parse( + text: bytes | str, + message: _MessageT, + ignore_unknown_fields: bool = ..., + descriptor_pool: DescriptorPool | None = ..., + max_recursion_depth: int = ..., +) -> _MessageT: ... +def ParseDict( + js_dict: Any, + message: _MessageT, + ignore_unknown_fields: bool = ..., + descriptor_pool: DescriptorPool | None = ..., + max_recursion_depth: int = ..., +) -> _MessageT: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/message.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/message.pyi new file mode 100644 index 00000000..798d0026 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/message.pyi @@ -0,0 +1,46 @@ +from collections.abc import Sequence +from typing import Any, TypeVar +from typing_extensions import Self + +from .descriptor import Descriptor, FieldDescriptor +from .internal.extension_dict import _ExtensionDict, _ExtensionFieldDescriptor + +class Error(Exception): ... +class DecodeError(Error): ... +class EncodeError(Error): ... + +_M = TypeVar("_M", bound=Message) # message type (of self) + +class Message: + DESCRIPTOR: Descriptor + def __deepcopy__(self, memo: Any = ...) -> Self: ... + def __eq__(self, other_msg): ... + def __ne__(self, other_msg): ... + def MergeFrom(self, other_msg: Self) -> None: ... + def CopyFrom(self, other_msg: Self) -> None: ... + def Clear(self) -> None: ... + def SetInParent(self) -> None: ... + def IsInitialized(self) -> bool: ... + def MergeFromString(self, serialized: bytes) -> int: ... + def ParseFromString(self, serialized: bytes) -> int: ... + def SerializeToString(self, deterministic: bool = ...) -> bytes: ... + def SerializePartialToString(self, deterministic: bool = ...) -> bytes: ... + def ListFields(self) -> Sequence[tuple[FieldDescriptor, Any]]: ... + # The TypeVar must be bound to `Message` or we get mypy errors, so we cannot use `Self` for `HasExtension` & `ClearExtension` + def HasExtension(self: _M, extension_handle: _ExtensionFieldDescriptor[_M, Any]) -> bool: ... + def ClearExtension(self: _M, extension_handle: _ExtensionFieldDescriptor[_M, Any]) -> None: ... + # The TypeVar must be bound to `Message` or we get mypy errors, so we cannot use `Self` for `Extensions` + @property + def Extensions(self: _M) -> _ExtensionDict[_M]: ... + def ByteSize(self) -> int: ... + @classmethod + def FromString(cls, s: bytes) -> Self: ... + # Intentionally left out typing on these three methods, because they are + # stringly typed and it is not useful to call them on a Message directly. + # We prefer more specific typing on individual subclasses of Message + # See https://github.com/dropbox/mypy-protobuf/issues/62 for details + def HasField(self, field_name: Any) -> bool: ... + def ClearField(self, field_name: Any) -> None: ... + def WhichOneof(self, oneof_group: Any) -> Any: ... + # TODO: check kwargs + def __init__(self, *args, **kwargs) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/message_factory.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/message_factory.pyi new file mode 100644 index 00000000..5493ea88 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/message_factory.pyi @@ -0,0 +1,15 @@ +from collections.abc import Iterable +from typing import Any + +from google.protobuf.descriptor import Descriptor +from google.protobuf.descriptor_pb2 import FileDescriptorProto +from google.protobuf.descriptor_pool import DescriptorPool +from google.protobuf.message import Message + +class MessageFactory: + pool: Any + def __init__(self, pool: DescriptorPool | None = ...) -> None: ... + def GetPrototype(self, descriptor: Descriptor) -> type[Message]: ... + def GetMessages(self, files: Iterable[str]) -> dict[str, type[Message]]: ... + +def GetMessages(file_protos: Iterable[FileDescriptorProto]) -> dict[str, type[Message]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/reflection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/reflection.pyi new file mode 100644 index 00000000..4bfbd2f4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/reflection.pyi @@ -0,0 +1,6 @@ +class GeneratedProtocolMessageType(type): + def __new__(cls, name, bases, dictionary): ... + def __init__(__self, name, bases, dictionary) -> None: ... + +def ParseMessage(descriptor, byte_str): ... +def MakeClass(descriptor): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/service.pyi new file mode 100644 index 00000000..1123b613 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/service.pyi @@ -0,0 +1,39 @@ +from collections.abc import Callable +from concurrent.futures import Future + +from google.protobuf.descriptor import MethodDescriptor, ServiceDescriptor +from google.protobuf.message import Message + +class RpcException(Exception): ... + +class Service: + @staticmethod + def GetDescriptor() -> ServiceDescriptor: ... + def CallMethod( + self, + method_descriptor: MethodDescriptor, + rpc_controller: RpcController, + request: Message, + done: Callable[[Message], None] | None, + ) -> Future[Message] | None: ... + def GetRequestClass(self, method_descriptor: MethodDescriptor) -> type[Message]: ... + def GetResponseClass(self, method_descriptor: MethodDescriptor) -> type[Message]: ... + +class RpcController: + def Reset(self) -> None: ... + def Failed(self) -> bool: ... + def ErrorText(self) -> str | None: ... + def StartCancel(self) -> None: ... + def SetFailed(self, reason: str) -> None: ... + def IsCanceled(self) -> bool: ... + def NotifyOnCancel(self, callback: Callable[[], None]) -> None: ... + +class RpcChannel: + def CallMethod( + self, + method_descriptor: MethodDescriptor, + rpc_controller: RpcController, + request: Message, + response_class: type[Message], + done: Callable[[Message], None] | None, + ) -> Future[Message] | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/source_context_pb2.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/source_context_pb2.pyi new file mode 100644 index 00000000..cb4fb572 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/source_context_pb2.pyi @@ -0,0 +1,66 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Protocol Buffers - Google's data interchange format +Copyright 2008 Google Inc. All rights reserved. +https://developers.google.com/protocol-buffers/ + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +""" +import builtins +import google.protobuf.descriptor +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing_extensions.final +class SourceContext(google.protobuf.message.Message): + """`SourceContext` represents information about the source of a + protobuf element, like the file in which it is defined. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FILE_NAME_FIELD_NUMBER: builtins.int + file_name: builtins.str + """The path-qualified name of the .proto file that contained the associated + protobuf element. For example: `"google/protobuf/source_context.proto"`. + """ + def __init__( + self, + *, + file_name: builtins.str | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["file_name", b"file_name"]) -> None: ... + +global___SourceContext = SourceContext diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/struct_pb2.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/struct_pb2.pyi new file mode 100644 index 00000000..bf2771ec --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/struct_pb2.pyi @@ -0,0 +1,184 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Protocol Buffers - Google's data interchange format +Copyright 2008 Google Inc. All rights reserved. +https://developers.google.com/protocol-buffers/ + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +""" +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.internal.well_known_types +import google.protobuf.message +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _NullValue: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _NullValueEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_NullValue.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + NULL_VALUE: _NullValue.ValueType # 0 + """Null value.""" + +class NullValue(_NullValue, metaclass=_NullValueEnumTypeWrapper): + """`NullValue` is a singleton enumeration to represent the null value for the + `Value` type union. + + The JSON representation for `NullValue` is JSON `null`. + """ + +NULL_VALUE: NullValue.ValueType # 0 +"""Null value.""" +global___NullValue = NullValue + +@typing_extensions.final +class Struct(google.protobuf.message.Message, google.protobuf.internal.well_known_types.Struct): + """`Struct` represents a structured data value, consisting of fields + which map to dynamically typed values. In some languages, `Struct` + might be supported by a native representation. For example, in + scripting languages like JS a struct is represented as an + object. The details of that representation are described together + with the proto support for the language. + + The JSON representation for `Struct` is JSON object. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing_extensions.final + class FieldsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + @property + def value(self) -> global___Value: ... + def __init__( + self, + *, + key: builtins.str | None = ..., + value: global___Value | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + FIELDS_FIELD_NUMBER: builtins.int + @property + def fields(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___Value]: + """Unordered map of dynamically typed values.""" + def __init__( + self, + *, + fields: collections.abc.Mapping[builtins.str, global___Value] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["fields", b"fields"]) -> None: ... + +global___Struct = Struct + +@typing_extensions.final +class Value(google.protobuf.message.Message): + """`Value` represents a dynamically typed value which can be either + null, a number, a string, a boolean, a recursive struct value, or a + list of values. A producer of value is expected to set one of these + variants. Absence of any variant indicates an error. + + The JSON representation for `Value` is JSON value. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NULL_VALUE_FIELD_NUMBER: builtins.int + NUMBER_VALUE_FIELD_NUMBER: builtins.int + STRING_VALUE_FIELD_NUMBER: builtins.int + BOOL_VALUE_FIELD_NUMBER: builtins.int + STRUCT_VALUE_FIELD_NUMBER: builtins.int + LIST_VALUE_FIELD_NUMBER: builtins.int + null_value: global___NullValue.ValueType + """Represents a null value.""" + number_value: builtins.float + """Represents a double value.""" + string_value: builtins.str + """Represents a string value.""" + bool_value: builtins.bool + """Represents a boolean value.""" + @property + def struct_value(self) -> global___Struct: + """Represents a structured value.""" + @property + def list_value(self) -> global___ListValue: + """Represents a repeated `Value`.""" + def __init__( + self, + *, + null_value: global___NullValue.ValueType | None = ..., + number_value: builtins.float | None = ..., + string_value: builtins.str | None = ..., + bool_value: builtins.bool | None = ..., + struct_value: global___Struct | None = ..., + list_value: global___ListValue | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["bool_value", b"bool_value", "kind", b"kind", "list_value", b"list_value", "null_value", b"null_value", "number_value", b"number_value", "string_value", b"string_value", "struct_value", b"struct_value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["bool_value", b"bool_value", "kind", b"kind", "list_value", b"list_value", "null_value", b"null_value", "number_value", b"number_value", "string_value", b"string_value", "struct_value", b"struct_value"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["kind", b"kind"]) -> typing_extensions.Literal["null_value", "number_value", "string_value", "bool_value", "struct_value", "list_value"] | None: ... + +global___Value = Value + +@typing_extensions.final +class ListValue(google.protobuf.message.Message, google.protobuf.internal.well_known_types.ListValue): + """`ListValue` is a wrapper around a repeated field of values. + + The JSON representation for `ListValue` is JSON array. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VALUES_FIELD_NUMBER: builtins.int + @property + def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Value]: + """Repeated field of dynamically typed values.""" + def __init__( + self, + *, + values: collections.abc.Iterable[global___Value] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["values", b"values"]) -> None: ... + +global___ListValue = ListValue diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/symbol_database.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/symbol_database.pyi new file mode 100644 index 00000000..c595d9c5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/symbol_database.pyi @@ -0,0 +1,16 @@ +from collections.abc import Iterable + +from google.protobuf.descriptor import Descriptor, EnumDescriptor, FileDescriptor, ServiceDescriptor +from google.protobuf.message import Message +from google.protobuf.message_factory import MessageFactory + +class SymbolDatabase(MessageFactory): + def RegisterMessage(self, message: type[Message] | Message) -> type[Message] | Message: ... + def RegisterMessageDescriptor(self, message_descriptor: Descriptor) -> None: ... + def RegisterEnumDescriptor(self, enum_descriptor: EnumDescriptor) -> EnumDescriptor: ... + def RegisterServiceDescriptor(self, service_descriptor: ServiceDescriptor) -> None: ... + def RegisterFileDescriptor(self, file_descriptor: FileDescriptor) -> None: ... + def GetSymbol(self, symbol: str) -> type[Message]: ... + def GetMessages(self, files: Iterable[str]) -> dict[str, type[Message]]: ... + +def Default() -> SymbolDatabase: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/text_format.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/text_format.pyi new file mode 100644 index 00000000..3cd24591 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/text_format.pyi @@ -0,0 +1,216 @@ +from _typeshed import SupportsWrite +from collections.abc import Callable, Iterable +from typing import Any, TypeVar +from typing_extensions import TypeAlias + +from .descriptor import FieldDescriptor +from .descriptor_pool import DescriptorPool +from .message import Message + +_M = TypeVar("_M", bound=Message) # message type (of self) + +class Error(Exception): ... + +class ParseError(Error): + def __init__(self, message: str | None = ..., line: int | None = ..., column: int | None = ...) -> None: ... + def GetLine(self) -> int | None: ... + def GetColumn(self) -> int | None: ... + +class TextWriter: + def __init__(self, as_utf8: bool) -> None: ... + def write(self, val: str) -> int: ... + def getvalue(self) -> str: ... + def close(self) -> None: ... + +_MessageFormatter: TypeAlias = Callable[[Message, int, bool], str | None] + +def MessageToString( + message: Message, + as_utf8: bool = ..., + as_one_line: bool = ..., + use_short_repeated_primitives: bool = ..., + pointy_brackets: bool = ..., + use_index_order: bool = ..., + float_format: str | None = ..., + double_format: str | None = ..., + use_field_number: bool = ..., + descriptor_pool: DescriptorPool | None = ..., + indent: int = ..., + message_formatter: _MessageFormatter | None = ..., + print_unknown_fields: bool = ..., + force_colon: bool = ..., +) -> str: ... +def MessageToBytes( + message: Message, + as_utf8: bool = ..., + as_one_line: bool = ..., + use_short_repeated_primitives: bool = ..., + pointy_brackets: bool = ..., + use_index_order: bool = ..., + float_format: str | None = ..., + double_format: str | None = ..., + use_field_number: bool = ..., + descriptor_pool: DescriptorPool | None = ..., + indent: int = ..., + message_formatter: _MessageFormatter = ..., + print_unknown_fields: bool = ..., + force_colon: bool = ..., +) -> bytes: ... +def PrintMessage( + message: Message, + out: SupportsWrite[str], + indent: int = ..., + as_utf8: bool = ..., + as_one_line: bool = ..., + use_short_repeated_primitives: bool = ..., + pointy_brackets: bool = ..., + use_index_order: bool = ..., + float_format: str | None = ..., + double_format: str | None = ..., + use_field_number: bool = ..., + descriptor_pool: DescriptorPool | None = ..., + message_formatter: _MessageFormatter | None = ..., + print_unknown_fields: bool = ..., + force_colon: bool = ..., +) -> None: ... +def PrintField( + field: FieldDescriptor, + value: Any, + out: SupportsWrite[str], + indent: int = ..., + as_utf8: bool = ..., + as_one_line: bool = ..., + use_short_repeated_primitives: bool = ..., + pointy_brackets: bool = ..., + use_index_order: bool = ..., + float_format: str | None = ..., + double_format: str | None = ..., + message_formatter: _MessageFormatter | None = ..., + print_unknown_fields: bool = ..., + force_colon: bool = ..., +) -> None: ... +def PrintFieldValue( + field: FieldDescriptor, + value: Any, + out: SupportsWrite[str], + indent: int = ..., + as_utf8: bool = ..., + as_one_line: bool = ..., + use_short_repeated_primitives: bool = ..., + pointy_brackets: bool = ..., + use_index_order: bool = ..., + float_format: str | None = ..., + double_format: str | None = ..., + message_formatter: _MessageFormatter | None = ..., + print_unknown_fields: bool = ..., + force_colon: bool = ..., +) -> None: ... + +class _Printer: + out: SupportsWrite[str] = ... + indent: int = ... + as_utf8: bool = ... + as_one_line: bool = ... + use_short_repeated_primitives: bool = ... + pointy_brackets: bool = ... + use_index_order: bool = ... + float_format: str | None = ... + double_format: str | None = ... + use_field_number: bool = ... + descriptor_pool: DescriptorPool | None = ... + message_formatter: _MessageFormatter | None = ... + print_unknown_fields: bool = ... + force_colon: bool = ... + def __init__( + self, + out: SupportsWrite[str], + indent: int = ..., + as_utf8: bool = ..., + as_one_line: bool = ..., + use_short_repeated_primitives: bool = ..., + pointy_brackets: bool = ..., + use_index_order: bool = ..., + float_format: str | None = ..., + double_format: str | None = ..., + use_field_number: bool = ..., + descriptor_pool: DescriptorPool | None = ..., + message_formatter: _MessageFormatter | None = ..., + print_unknown_fields: bool = ..., + force_colon: bool = ..., + ) -> None: ... + def PrintMessage(self, message: Message) -> None: ... + def PrintField(self, field: FieldDescriptor, value: Any) -> None: ... + def PrintFieldValue(self, field: FieldDescriptor, value: Any) -> None: ... + +def Parse( + text: str | bytes, + message: _M, + allow_unknown_extension: bool = ..., + allow_field_number: bool = ..., + descriptor_pool: DescriptorPool | None = ..., + allow_unknown_field: bool = ..., +) -> _M: ... +def Merge( + text: str | bytes, + message: _M, + allow_unknown_extension: bool = ..., + allow_field_number: bool = ..., + descriptor_pool: DescriptorPool | None = ..., + allow_unknown_field: bool = ..., +) -> _M: ... +def MergeLines( + lines: Iterable[str | bytes], + message: _M, + allow_unknown_extension: bool = ..., + allow_field_number: bool = ..., + descriptor_pool: DescriptorPool | None = ..., + allow_unknown_field: bool = ..., +) -> _M: ... + +class _Parser: + allow_unknown_extension: bool = ... + allow_field_number: bool = ... + descriptor_pool: DescriptorPool | None = ... + allow_unknown_field: bool = ... + def __init__( + self, + allow_unknown_extension: bool = ..., + allow_field_number: bool = ..., + descriptor_pool: DescriptorPool | None = ..., + allow_unknown_field: bool = ..., + ) -> None: ... + def ParseLines(self, lines: Iterable[str | bytes], message: _M) -> _M: ... + def MergeLines(self, lines: Iterable[str | bytes], message: _M) -> _M: ... + +_ParseError: TypeAlias = ParseError + +class Tokenizer: + token: str = ... + def __init__(self, lines: Iterable[str], skip_comments: bool = ...) -> None: ... + def LookingAt(self, token: str) -> bool: ... + def AtEnd(self) -> bool: ... + def TryConsume(self, token: str) -> bool: ... + def Consume(self, token: str) -> None: ... + def ConsumeComment(self) -> str: ... + def ConsumeCommentOrTrailingComment(self) -> tuple[bool, str]: ... + def TryConsumeIdentifier(self) -> bool: ... + def ConsumeIdentifier(self) -> str: ... + def TryConsumeIdentifierOrNumber(self) -> bool: ... + def ConsumeIdentifierOrNumber(self) -> str: ... + def TryConsumeInteger(self) -> bool: ... + def ConsumeInteger(self) -> int: ... + def TryConsumeFloat(self) -> bool: ... + def ConsumeFloat(self) -> float: ... + def ConsumeBool(self) -> bool: ... + def TryConsumeByteString(self) -> bool: ... + def ConsumeString(self) -> str: ... + def ConsumeByteString(self) -> bytes: ... + def ConsumeEnum(self, field: FieldDescriptor) -> int: ... + def ParseErrorPreviousToken(self, message: Message) -> _ParseError: ... + def ParseError(self, message: Message) -> _ParseError: ... + def NextToken(self) -> None: ... + +def ParseInteger(text: str, is_signed: bool = ..., is_long: bool = ...) -> int: ... +def ParseFloat(text: str) -> float: ... +def ParseBool(text: str) -> bool: ... +def ParseEnum(field: FieldDescriptor, value: str) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/timestamp_pb2.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/timestamp_pb2.pyi new file mode 100644 index 00000000..b8f5b636 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/timestamp_pb2.pyi @@ -0,0 +1,165 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Protocol Buffers - Google's data interchange format +Copyright 2008 Google Inc. All rights reserved. +https://developers.google.com/protocol-buffers/ + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +""" +import builtins +import google.protobuf.descriptor +import google.protobuf.internal.well_known_types +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing_extensions.final +class Timestamp(google.protobuf.message.Message, google.protobuf.internal.well_known_types.Timestamp): + """A Timestamp represents a point in time independent of any time zone or local + calendar, encoded as a count of seconds and fractions of seconds at + nanosecond resolution. The count is relative to an epoch at UTC midnight on + January 1, 1970, in the proleptic Gregorian calendar which extends the + Gregorian calendar backwards to year one. + + All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap + second table is needed for interpretation, using a [24-hour linear + smear](https://developers.google.com/time/smear). + + The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By + restricting to that range, we ensure that we can convert to and from [RFC + 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + + # Examples + + Example 1: Compute Timestamp from POSIX `time()`. + + Timestamp timestamp; + timestamp.set_seconds(time(NULL)); + timestamp.set_nanos(0); + + Example 2: Compute Timestamp from POSIX `gettimeofday()`. + + struct timeval tv; + gettimeofday(&tv, NULL); + + Timestamp timestamp; + timestamp.set_seconds(tv.tv_sec); + timestamp.set_nanos(tv.tv_usec * 1000); + + Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + + FILETIME ft; + GetSystemTimeAsFileTime(&ft); + UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + + // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + Timestamp timestamp; + timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + + Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + + long millis = System.currentTimeMillis(); + + Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + .setNanos((int) ((millis % 1000) * 1000000)).build(); + + + Example 5: Compute Timestamp from Java `Instant.now()`. + + Instant now = Instant.now(); + + Timestamp timestamp = + Timestamp.newBuilder().setSeconds(now.getEpochSecond()) + .setNanos(now.getNano()).build(); + + + Example 6: Compute Timestamp from current time in Python. + + timestamp = Timestamp() + timestamp.GetCurrentTime() + + # JSON Mapping + + In JSON format, the Timestamp type is encoded as a string in the + [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" + where {year} is always expressed using four digits while {month}, {day}, + {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional + seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + is required. A proto3 JSON serializer should always use UTC (as indicated by + "Z") when printing the Timestamp type and a proto3 JSON parser should be + able to accept both UTC and other timezones (as indicated by an offset). + + For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + 01:30 UTC on January 15, 2017. + + In JavaScript, one can convert a Date object to this format using the + standard + [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) + method. In Python, a standard `datetime.datetime` object can be converted + to this format using + [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with + the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use + the Joda Time's [`ISODateTimeFormat.dateTime()`]( + http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D + ) to obtain a formatter capable of generating timestamps in this format. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SECONDS_FIELD_NUMBER: builtins.int + NANOS_FIELD_NUMBER: builtins.int + seconds: builtins.int + """Represents seconds of UTC time since Unix epoch + 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + 9999-12-31T23:59:59Z inclusive. + """ + nanos: builtins.int + """Non-negative fractions of a second at nanosecond resolution. Negative + second values with fractions must still have non-negative nanos values + that count forward in time. Must be from 0 to 999,999,999 + inclusive. + """ + def __init__( + self, + *, + seconds: builtins.int | None = ..., + nanos: builtins.int | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["nanos", b"nanos", "seconds", b"seconds"]) -> None: ... + +global___Timestamp = Timestamp diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/type_pb2.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/type_pb2.pyi new file mode 100644 index 00000000..5071cdf6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/type_pb2.pyi @@ -0,0 +1,385 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Protocol Buffers - Google's data interchange format +Copyright 2008 Google Inc. All rights reserved. +https://developers.google.com/protocol-buffers/ + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +""" +import builtins +import collections.abc +import google.protobuf.any_pb2 +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import google.protobuf.source_context_pb2 +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _Syntax: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _SyntaxEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_Syntax.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + SYNTAX_PROTO2: _Syntax.ValueType # 0 + """Syntax `proto2`.""" + SYNTAX_PROTO3: _Syntax.ValueType # 1 + """Syntax `proto3`.""" + +class Syntax(_Syntax, metaclass=_SyntaxEnumTypeWrapper): + """The syntax in which a protocol buffer element is defined.""" + +SYNTAX_PROTO2: Syntax.ValueType # 0 +"""Syntax `proto2`.""" +SYNTAX_PROTO3: Syntax.ValueType # 1 +"""Syntax `proto3`.""" +global___Syntax = Syntax + +@typing_extensions.final +class Type(google.protobuf.message.Message): + """A protocol buffer message type.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + FIELDS_FIELD_NUMBER: builtins.int + ONEOFS_FIELD_NUMBER: builtins.int + OPTIONS_FIELD_NUMBER: builtins.int + SOURCE_CONTEXT_FIELD_NUMBER: builtins.int + SYNTAX_FIELD_NUMBER: builtins.int + name: builtins.str + """The fully qualified message name.""" + @property + def fields(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Field]: + """The list of fields.""" + @property + def oneofs(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """The list of types appearing in `oneof` definitions in this type.""" + @property + def options(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Option]: + """The protocol buffer options.""" + @property + def source_context(self) -> google.protobuf.source_context_pb2.SourceContext: + """The source context.""" + syntax: global___Syntax.ValueType + """The source syntax.""" + def __init__( + self, + *, + name: builtins.str | None = ..., + fields: collections.abc.Iterable[global___Field] | None = ..., + oneofs: collections.abc.Iterable[builtins.str] | None = ..., + options: collections.abc.Iterable[global___Option] | None = ..., + source_context: google.protobuf.source_context_pb2.SourceContext | None = ..., + syntax: global___Syntax.ValueType | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["source_context", b"source_context"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["fields", b"fields", "name", b"name", "oneofs", b"oneofs", "options", b"options", "source_context", b"source_context", "syntax", b"syntax"]) -> None: ... + +global___Type = Type + +@typing_extensions.final +class Field(google.protobuf.message.Message): + """A single field of a message type.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _Kind: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _KindEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Field._Kind.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + TYPE_UNKNOWN: Field._Kind.ValueType # 0 + """Field type unknown.""" + TYPE_DOUBLE: Field._Kind.ValueType # 1 + """Field type double.""" + TYPE_FLOAT: Field._Kind.ValueType # 2 + """Field type float.""" + TYPE_INT64: Field._Kind.ValueType # 3 + """Field type int64.""" + TYPE_UINT64: Field._Kind.ValueType # 4 + """Field type uint64.""" + TYPE_INT32: Field._Kind.ValueType # 5 + """Field type int32.""" + TYPE_FIXED64: Field._Kind.ValueType # 6 + """Field type fixed64.""" + TYPE_FIXED32: Field._Kind.ValueType # 7 + """Field type fixed32.""" + TYPE_BOOL: Field._Kind.ValueType # 8 + """Field type bool.""" + TYPE_STRING: Field._Kind.ValueType # 9 + """Field type string.""" + TYPE_GROUP: Field._Kind.ValueType # 10 + """Field type group. Proto2 syntax only, and deprecated.""" + TYPE_MESSAGE: Field._Kind.ValueType # 11 + """Field type message.""" + TYPE_BYTES: Field._Kind.ValueType # 12 + """Field type bytes.""" + TYPE_UINT32: Field._Kind.ValueType # 13 + """Field type uint32.""" + TYPE_ENUM: Field._Kind.ValueType # 14 + """Field type enum.""" + TYPE_SFIXED32: Field._Kind.ValueType # 15 + """Field type sfixed32.""" + TYPE_SFIXED64: Field._Kind.ValueType # 16 + """Field type sfixed64.""" + TYPE_SINT32: Field._Kind.ValueType # 17 + """Field type sint32.""" + TYPE_SINT64: Field._Kind.ValueType # 18 + """Field type sint64.""" + + class Kind(_Kind, metaclass=_KindEnumTypeWrapper): + """Basic field types.""" + + TYPE_UNKNOWN: Field.Kind.ValueType # 0 + """Field type unknown.""" + TYPE_DOUBLE: Field.Kind.ValueType # 1 + """Field type double.""" + TYPE_FLOAT: Field.Kind.ValueType # 2 + """Field type float.""" + TYPE_INT64: Field.Kind.ValueType # 3 + """Field type int64.""" + TYPE_UINT64: Field.Kind.ValueType # 4 + """Field type uint64.""" + TYPE_INT32: Field.Kind.ValueType # 5 + """Field type int32.""" + TYPE_FIXED64: Field.Kind.ValueType # 6 + """Field type fixed64.""" + TYPE_FIXED32: Field.Kind.ValueType # 7 + """Field type fixed32.""" + TYPE_BOOL: Field.Kind.ValueType # 8 + """Field type bool.""" + TYPE_STRING: Field.Kind.ValueType # 9 + """Field type string.""" + TYPE_GROUP: Field.Kind.ValueType # 10 + """Field type group. Proto2 syntax only, and deprecated.""" + TYPE_MESSAGE: Field.Kind.ValueType # 11 + """Field type message.""" + TYPE_BYTES: Field.Kind.ValueType # 12 + """Field type bytes.""" + TYPE_UINT32: Field.Kind.ValueType # 13 + """Field type uint32.""" + TYPE_ENUM: Field.Kind.ValueType # 14 + """Field type enum.""" + TYPE_SFIXED32: Field.Kind.ValueType # 15 + """Field type sfixed32.""" + TYPE_SFIXED64: Field.Kind.ValueType # 16 + """Field type sfixed64.""" + TYPE_SINT32: Field.Kind.ValueType # 17 + """Field type sint32.""" + TYPE_SINT64: Field.Kind.ValueType # 18 + """Field type sint64.""" + + class _Cardinality: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _CardinalityEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Field._Cardinality.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + CARDINALITY_UNKNOWN: Field._Cardinality.ValueType # 0 + """For fields with unknown cardinality.""" + CARDINALITY_OPTIONAL: Field._Cardinality.ValueType # 1 + """For optional fields.""" + CARDINALITY_REQUIRED: Field._Cardinality.ValueType # 2 + """For required fields. Proto2 syntax only.""" + CARDINALITY_REPEATED: Field._Cardinality.ValueType # 3 + """For repeated fields.""" + + class Cardinality(_Cardinality, metaclass=_CardinalityEnumTypeWrapper): + """Whether a field is optional, required, or repeated.""" + + CARDINALITY_UNKNOWN: Field.Cardinality.ValueType # 0 + """For fields with unknown cardinality.""" + CARDINALITY_OPTIONAL: Field.Cardinality.ValueType # 1 + """For optional fields.""" + CARDINALITY_REQUIRED: Field.Cardinality.ValueType # 2 + """For required fields. Proto2 syntax only.""" + CARDINALITY_REPEATED: Field.Cardinality.ValueType # 3 + """For repeated fields.""" + + KIND_FIELD_NUMBER: builtins.int + CARDINALITY_FIELD_NUMBER: builtins.int + NUMBER_FIELD_NUMBER: builtins.int + NAME_FIELD_NUMBER: builtins.int + TYPE_URL_FIELD_NUMBER: builtins.int + ONEOF_INDEX_FIELD_NUMBER: builtins.int + PACKED_FIELD_NUMBER: builtins.int + OPTIONS_FIELD_NUMBER: builtins.int + JSON_NAME_FIELD_NUMBER: builtins.int + DEFAULT_VALUE_FIELD_NUMBER: builtins.int + kind: global___Field.Kind.ValueType + """The field type.""" + cardinality: global___Field.Cardinality.ValueType + """The field cardinality.""" + number: builtins.int + """The field number.""" + name: builtins.str + """The field name.""" + type_url: builtins.str + """The field type URL, without the scheme, for message or enumeration + types. Example: `"type.googleapis.com/google.protobuf.Timestamp"`. + """ + oneof_index: builtins.int + """The index of the field type in `Type.oneofs`, for message or enumeration + types. The first type has index 1; zero means the type is not in the list. + """ + packed: builtins.bool + """Whether to use alternative packed wire representation.""" + @property + def options(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Option]: + """The protocol buffer options.""" + json_name: builtins.str + """The field JSON name.""" + default_value: builtins.str + """The string value of the default value of this field. Proto2 syntax only.""" + def __init__( + self, + *, + kind: global___Field.Kind.ValueType | None = ..., + cardinality: global___Field.Cardinality.ValueType | None = ..., + number: builtins.int | None = ..., + name: builtins.str | None = ..., + type_url: builtins.str | None = ..., + oneof_index: builtins.int | None = ..., + packed: builtins.bool | None = ..., + options: collections.abc.Iterable[global___Option] | None = ..., + json_name: builtins.str | None = ..., + default_value: builtins.str | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["cardinality", b"cardinality", "default_value", b"default_value", "json_name", b"json_name", "kind", b"kind", "name", b"name", "number", b"number", "oneof_index", b"oneof_index", "options", b"options", "packed", b"packed", "type_url", b"type_url"]) -> None: ... + +global___Field = Field + +@typing_extensions.final +class Enum(google.protobuf.message.Message): + """Enum type definition.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + ENUMVALUE_FIELD_NUMBER: builtins.int + OPTIONS_FIELD_NUMBER: builtins.int + SOURCE_CONTEXT_FIELD_NUMBER: builtins.int + SYNTAX_FIELD_NUMBER: builtins.int + name: builtins.str + """Enum type name.""" + @property + def enumvalue(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EnumValue]: + """Enum value definitions.""" + @property + def options(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Option]: + """Protocol buffer options.""" + @property + def source_context(self) -> google.protobuf.source_context_pb2.SourceContext: + """The source context.""" + syntax: global___Syntax.ValueType + """The source syntax.""" + def __init__( + self, + *, + name: builtins.str | None = ..., + enumvalue: collections.abc.Iterable[global___EnumValue] | None = ..., + options: collections.abc.Iterable[global___Option] | None = ..., + source_context: google.protobuf.source_context_pb2.SourceContext | None = ..., + syntax: global___Syntax.ValueType | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["source_context", b"source_context"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["enumvalue", b"enumvalue", "name", b"name", "options", b"options", "source_context", b"source_context", "syntax", b"syntax"]) -> None: ... + +global___Enum = Enum + +@typing_extensions.final +class EnumValue(google.protobuf.message.Message): + """Enum value definition.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + NUMBER_FIELD_NUMBER: builtins.int + OPTIONS_FIELD_NUMBER: builtins.int + name: builtins.str + """Enum value name.""" + number: builtins.int + """Enum value number.""" + @property + def options(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Option]: + """Protocol buffer options.""" + def __init__( + self, + *, + name: builtins.str | None = ..., + number: builtins.int | None = ..., + options: collections.abc.Iterable[global___Option] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "number", b"number", "options", b"options"]) -> None: ... + +global___EnumValue = EnumValue + +@typing_extensions.final +class Option(google.protobuf.message.Message): + """A protocol buffer option, which can be attached to a message, field, + enumeration, etc. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + name: builtins.str + """The option's name. For protobuf built-in options (options defined in + descriptor.proto), this is the short name. For example, `"map_entry"`. + For custom options, it should be the fully-qualified name. For example, + `"google.api.http"`. + """ + @property + def value(self) -> google.protobuf.any_pb2.Any: + """The option's value packed in an Any message. If the value is a primitive, + the corresponding wrapper type defined in google/protobuf/wrappers.proto + should be used. If the value is an enum, it should be stored as an int32 + value using the google.protobuf.Int32Value type. + """ + def __init__( + self, + *, + name: builtins.str | None = ..., + value: google.protobuf.any_pb2.Any | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "value", b"value"]) -> None: ... + +global___Option = Option diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/util/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/util/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/wrappers_pb2.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/wrappers_pb2.pyi new file mode 100644 index 00000000..201b9258 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/protobuf/google/protobuf/wrappers_pb2.pyi @@ -0,0 +1,213 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Wrappers for primitive (non-message) types. These types are useful +for embedding primitives in the `google.protobuf.Any` type and for places +where we need to distinguish between the absence of a primitive +typed field and its default value. + +These wrappers have no meaningful use within repeated fields as they lack +the ability to detect presence on individual elements. +These wrappers have no meaningful use within a map or a oneof since +individual entries of a map or fields of a oneof can already detect presence. +""" +import builtins +import google.protobuf.descriptor +import google.protobuf.message +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing_extensions.final +class DoubleValue(google.protobuf.message.Message): + """Wrapper message for `double`. + + The JSON representation for `DoubleValue` is JSON number. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VALUE_FIELD_NUMBER: builtins.int + value: builtins.float + """The double value.""" + def __init__( + self, + *, + value: builtins.float | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ... + +global___DoubleValue = DoubleValue + +@typing_extensions.final +class FloatValue(google.protobuf.message.Message): + """Wrapper message for `float`. + + The JSON representation for `FloatValue` is JSON number. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VALUE_FIELD_NUMBER: builtins.int + value: builtins.float + """The float value.""" + def __init__( + self, + *, + value: builtins.float | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ... + +global___FloatValue = FloatValue + +@typing_extensions.final +class Int64Value(google.protobuf.message.Message): + """Wrapper message for `int64`. + + The JSON representation for `Int64Value` is JSON string. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VALUE_FIELD_NUMBER: builtins.int + value: builtins.int + """The int64 value.""" + def __init__( + self, + *, + value: builtins.int | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ... + +global___Int64Value = Int64Value + +@typing_extensions.final +class UInt64Value(google.protobuf.message.Message): + """Wrapper message for `uint64`. + + The JSON representation for `UInt64Value` is JSON string. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VALUE_FIELD_NUMBER: builtins.int + value: builtins.int + """The uint64 value.""" + def __init__( + self, + *, + value: builtins.int | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ... + +global___UInt64Value = UInt64Value + +@typing_extensions.final +class Int32Value(google.protobuf.message.Message): + """Wrapper message for `int32`. + + The JSON representation for `Int32Value` is JSON number. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VALUE_FIELD_NUMBER: builtins.int + value: builtins.int + """The int32 value.""" + def __init__( + self, + *, + value: builtins.int | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ... + +global___Int32Value = Int32Value + +@typing_extensions.final +class UInt32Value(google.protobuf.message.Message): + """Wrapper message for `uint32`. + + The JSON representation for `UInt32Value` is JSON number. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VALUE_FIELD_NUMBER: builtins.int + value: builtins.int + """The uint32 value.""" + def __init__( + self, + *, + value: builtins.int | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ... + +global___UInt32Value = UInt32Value + +@typing_extensions.final +class BoolValue(google.protobuf.message.Message): + """Wrapper message for `bool`. + + The JSON representation for `BoolValue` is JSON `true` and `false`. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VALUE_FIELD_NUMBER: builtins.int + value: builtins.bool + """The bool value.""" + def __init__( + self, + *, + value: builtins.bool | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ... + +global___BoolValue = BoolValue + +@typing_extensions.final +class StringValue(google.protobuf.message.Message): + """Wrapper message for `string`. + + The JSON representation for `StringValue` is JSON string. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VALUE_FIELD_NUMBER: builtins.int + value: builtins.str + """The string value.""" + def __init__( + self, + *, + value: builtins.str | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ... + +global___StringValue = StringValue + +@typing_extensions.final +class BytesValue(google.protobuf.message.Message): + """Wrapper message for `bytes`. + + The JSON representation for `BytesValue` is JSON string. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VALUE_FIELD_NUMBER: builtins.int + value: builtins.bytes + """The bytes value.""" + def __init__( + self, + *, + value: builtins.bytes | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ... + +global___BytesValue = BytesValue diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..0b6e1ce0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/@tests/stubtest_allowlist.txt @@ -0,0 +1,13 @@ +# These super() dunders don't seem to be particularly useful, +# and having them pop up on autocomplete suggestions would be annoying +psutil._compat.super.__self__ +psutil._compat.super.__self_class__ +psutil._compat.super.__thisclass__ + +# Stubtest does not support these platforms +psutil._psaix +psutil._psbsd +psutil._pssunos + +# Test utilities +psutil.tests.* diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/@tests/stubtest_allowlist_darwin.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/@tests/stubtest_allowlist_darwin.txt new file mode 100644 index 00000000..4cf5ba5d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/@tests/stubtest_allowlist_darwin.txt @@ -0,0 +1,4 @@ +psutil._pslinux +psutil._psutil_linux +psutil._psutil_windows +psutil._pswindows diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/@tests/stubtest_allowlist_linux.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/@tests/stubtest_allowlist_linux.txt new file mode 100644 index 00000000..eb279310 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/@tests/stubtest_allowlist_linux.txt @@ -0,0 +1,4 @@ +psutil._psosx +psutil._psutil_windows +psutil._psutil_osx +psutil._pswindows diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/@tests/stubtest_allowlist_win32.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/@tests/stubtest_allowlist_win32.txt new file mode 100644 index 00000000..4fa870ef --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/@tests/stubtest_allowlist_win32.txt @@ -0,0 +1,6 @@ +psutil._pslinux +psutil._psosx + +psutil._psutil_linux +psutil._psutil_osx +psutil._psutil_posix diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/METADATA.toml new file mode 100644 index 00000000..8d29ce52 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/METADATA.toml @@ -0,0 +1,4 @@ +version = "5.9.*" + +[tool.stubtest] +platforms = ["darwin", "linux", "win32"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/__init__.pyi new file mode 100644 index 00000000..f713d1bb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/__init__.pyi @@ -0,0 +1,255 @@ +import sys +from _typeshed import Incomplete +from collections.abc import Callable, Iterable, Iterator +from contextlib import AbstractContextManager +from typing import Any, overload +from typing_extensions import Literal, Self, TypeAlias + +from psutil._common import ( + AIX as AIX, + BSD as BSD, + CONN_CLOSE as CONN_CLOSE, + CONN_CLOSE_WAIT as CONN_CLOSE_WAIT, + CONN_CLOSING as CONN_CLOSING, + CONN_ESTABLISHED as CONN_ESTABLISHED, + CONN_FIN_WAIT1 as CONN_FIN_WAIT1, + CONN_FIN_WAIT2 as CONN_FIN_WAIT2, + CONN_LAST_ACK as CONN_LAST_ACK, + CONN_LISTEN as CONN_LISTEN, + CONN_NONE as CONN_NONE, + CONN_SYN_RECV as CONN_SYN_RECV, + CONN_SYN_SENT as CONN_SYN_SENT, + CONN_TIME_WAIT as CONN_TIME_WAIT, + FREEBSD as FREEBSD, + LINUX as LINUX, + MACOS as MACOS, + NETBSD as NETBSD, + NIC_DUPLEX_FULL as NIC_DUPLEX_FULL, + NIC_DUPLEX_HALF as NIC_DUPLEX_HALF, + NIC_DUPLEX_UNKNOWN as NIC_DUPLEX_UNKNOWN, + OPENBSD as OPENBSD, + OSX as OSX, + POSIX as POSIX, + POWER_TIME_UNKNOWN as POWER_TIME_UNKNOWN, + POWER_TIME_UNLIMITED as POWER_TIME_UNLIMITED, + STATUS_DEAD as STATUS_DEAD, + STATUS_DISK_SLEEP as STATUS_DISK_SLEEP, + STATUS_IDLE as STATUS_IDLE, + STATUS_LOCKED as STATUS_LOCKED, + STATUS_PARKED as STATUS_PARKED, + STATUS_RUNNING as STATUS_RUNNING, + STATUS_SLEEPING as STATUS_SLEEPING, + STATUS_STOPPED as STATUS_STOPPED, + STATUS_TRACING_STOP as STATUS_TRACING_STOP, + STATUS_WAITING as STATUS_WAITING, + STATUS_WAKING as STATUS_WAKING, + STATUS_ZOMBIE as STATUS_ZOMBIE, + SUNOS as SUNOS, + WINDOWS as WINDOWS, + AccessDenied as AccessDenied, + Error as Error, + NoSuchProcess as NoSuchProcess, + TimeoutExpired as TimeoutExpired, + ZombieProcess as ZombieProcess, + pconn, + pcputimes, + pctxsw, + pgids, + pionice, + popenfile, + pthread, + puids, + sconn, + scpufreq, + scpustats, + sdiskio, + sdiskpart, + sdiskusage, + sfan, + shwtemp, + snetio, + snicaddr, + snicstats, + sswap, + suser, +) + +if sys.platform == "linux": + from ._pslinux import ( + IOPRIO_CLASS_BE as IOPRIO_CLASS_BE, + IOPRIO_CLASS_IDLE as IOPRIO_CLASS_IDLE, + IOPRIO_CLASS_NONE as IOPRIO_CLASS_NONE, + IOPRIO_CLASS_RT as IOPRIO_CLASS_RT, + ) + def sensors_temperatures(fahrenheit: bool = ...) -> dict[str, list[shwtemp]]: ... + def sensors_fans() -> dict[str, list[sfan]]: ... + PROCFS_PATH: str + RLIMIT_AS: int + RLIMIT_CORE: int + RLIMIT_CPU: int + RLIMIT_DATA: int + RLIMIT_FSIZE: int + RLIMIT_LOCKS: int + RLIMIT_MEMLOCK: int + RLIMIT_MSGQUEUE: int + RLIMIT_NICE: int + RLIMIT_NOFILE: int + RLIMIT_NPROC: int + RLIMIT_RSS: int + RLIMIT_RTPRIO: int + RLIMIT_RTTIME: int + RLIMIT_SIGPENDING: int + RLIMIT_STACK: int + RLIM_INFINITY: int +if sys.platform == "win32": + from ._psutil_windows import ( + ABOVE_NORMAL_PRIORITY_CLASS as ABOVE_NORMAL_PRIORITY_CLASS, + BELOW_NORMAL_PRIORITY_CLASS as BELOW_NORMAL_PRIORITY_CLASS, + HIGH_PRIORITY_CLASS as HIGH_PRIORITY_CLASS, + IDLE_PRIORITY_CLASS as IDLE_PRIORITY_CLASS, + NORMAL_PRIORITY_CLASS as NORMAL_PRIORITY_CLASS, + REALTIME_PRIORITY_CLASS as REALTIME_PRIORITY_CLASS, + ) + from ._pswindows import ( + CONN_DELETE_TCB as CONN_DELETE_TCB, + IOPRIO_HIGH as IOPRIO_HIGH, + IOPRIO_LOW as IOPRIO_LOW, + IOPRIO_NORMAL as IOPRIO_NORMAL, + IOPRIO_VERYLOW as IOPRIO_VERYLOW, + win_service_get as win_service_get, + win_service_iter as win_service_iter, + ) + +if sys.platform == "linux": + from ._pslinux import pfullmem, pmem, sensors_battery as sensors_battery, svmem +elif sys.platform == "darwin": + from ._psosx import pfullmem, pmem, sensors_battery as sensors_battery, svmem +elif sys.platform == "win32": + from ._pswindows import pfullmem, pmem, sensors_battery as sensors_battery, svmem +else: + class pmem(Any): ... + class pfullmem(Any): ... + class svmem(Any): ... + + def sensors_battery(): ... + +AF_LINK: int +version_info: tuple[int, int, int] +__version__: str +__author__: str + +_Status: TypeAlias = Literal[ + "running", + "sleeping", + "disk-sleep", + "stopped", + "tracing-stop", + "zombie", + "dead", + "wake-kill", + "waking", + "idle", + "locked", + "waiting", + "suspended", + "parked", +] + +class Process: + def __init__(self, pid: int | None = ...) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + @property + def pid(self) -> int: ... + def oneshot(self) -> AbstractContextManager[None]: ... + def as_dict( + self, attrs: list[str] | tuple[str, ...] | set[str] | frozenset[str] | None = ..., ad_value: Incomplete | None = ... + ) -> dict[str, Any]: ... + def parent(self) -> Process: ... + def parents(self) -> list[Process]: ... + def is_running(self) -> bool: ... + def ppid(self) -> int: ... + def name(self) -> str: ... + def exe(self) -> str: ... + def cmdline(self) -> list[str]: ... + def status(self) -> _Status: ... + def username(self) -> str: ... + def create_time(self) -> float: ... + def cwd(self) -> str: ... + def nice(self, value: int | None = ...) -> int: ... + if sys.platform != "win32": + def uids(self) -> puids: ... + def gids(self) -> pgids: ... + def terminal(self) -> str: ... + def num_fds(self) -> int: ... + if sys.platform != "darwin": + def io_counters(self): ... + def ionice(self, ioclass: int | None = ..., value: int | None = ...) -> pionice: ... + def cpu_affinity(self, cpus: list[int] | None = ...) -> list[int] | None: ... + def memory_maps(self, grouped: bool = ...): ... + if sys.platform == "linux": + def rlimit(self, resource: int, limits: tuple[int, int] | None = ...) -> tuple[int, int]: ... + def cpu_num(self) -> int: ... + + def environ(self) -> dict[str, str]: ... + if sys.platform == "win32": + def num_handles(self) -> int: ... + + def num_ctx_switches(self) -> pctxsw: ... + def num_threads(self) -> int: ... + def threads(self) -> list[pthread]: ... + def children(self, recursive: bool = ...) -> list[Process]: ... + def cpu_percent(self, interval: float | None = ...) -> float: ... + def cpu_times(self) -> pcputimes: ... + def memory_info(self) -> pmem: ... + def memory_info_ex(self) -> pmem: ... + def memory_full_info(self) -> pfullmem: ... + def memory_percent(self, memtype: str = ...) -> float: ... + def open_files(self) -> list[popenfile]: ... + def connections(self, kind: str = ...) -> list[pconn]: ... + def send_signal(self, sig: int) -> None: ... + def suspend(self) -> None: ... + def resume(self) -> None: ... + def terminate(self) -> None: ... + def kill(self) -> None: ... + def wait(self, timeout: int | None = ...) -> int: ... + +class Popen(Process): + def __init__(self, *args, **kwargs) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args, **kwargs) -> None: ... + def __getattribute__(self, name: str) -> Any: ... + +def pids() -> list[int]: ... +def pid_exists(pid: int) -> bool: ... +def process_iter( + attrs: list[str] | tuple[str, ...] | set[str] | frozenset[str] | None = ..., ad_value: Incomplete | None = ... +) -> Iterator[Process]: ... +def wait_procs( + procs: Iterable[Process], timeout: float | None = ..., callback: Callable[[Process], object] | None = ... +) -> tuple[list[Process], list[Process]]: ... +def cpu_count(logical: bool = ...) -> int: ... +def cpu_times(percpu: bool = ...): ... +def cpu_percent(interval: float | None = ..., percpu: bool = ...) -> float: ... +def cpu_times_percent(interval: float | None = ..., percpu: bool = ...): ... +def cpu_stats() -> scpustats: ... +def cpu_freq(percpu: bool = ...) -> scpufreq: ... +def getloadavg() -> tuple[float, float, float]: ... +def virtual_memory() -> svmem: ... +def swap_memory() -> sswap: ... +def disk_usage(path: str) -> sdiskusage: ... +def disk_partitions(all: bool = ...) -> list[sdiskpart]: ... +@overload +def disk_io_counters(perdisk: Literal[False] = ..., nowrap: bool = ...) -> sdiskio | None: ... +@overload +def disk_io_counters(perdisk: Literal[True], nowrap: bool = ...) -> dict[str, sdiskio]: ... +@overload +def net_io_counters(pernic: Literal[False] = ..., nowrap: bool = ...) -> snetio: ... +@overload +def net_io_counters(pernic: Literal[True], nowrap: bool = ...) -> dict[str, snetio]: ... +def net_connections(kind: str = ...) -> list[sconn]: ... +def net_if_addrs() -> dict[str, list[snicaddr]]: ... +def net_if_stats() -> dict[str, snicstats]: ... +def boot_time() -> float: ... +def users() -> list[suser]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_common.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_common.pyi new file mode 100644 index 00000000..d0caabd5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_common.pyi @@ -0,0 +1,288 @@ +import enum +from _typeshed import Incomplete, StrOrBytesPath, SupportsWrite +from collections.abc import Callable +from socket import AF_INET6 as AF_INET6, AddressFamily, SocketKind +from typing import Any, NamedTuple, TypeVar, overload +from typing_extensions import Literal + +POSIX: bool +WINDOWS: bool +LINUX: bool +MACOS: bool +OSX: bool +FREEBSD: bool +OPENBSD: bool +NETBSD: bool +BSD: bool +SUNOS: bool +AIX: bool + +STATUS_RUNNING: Literal["running"] +STATUS_SLEEPING: Literal["sleeping"] +STATUS_DISK_SLEEP: Literal["disk-sleep"] +STATUS_STOPPED: Literal["stopped"] +STATUS_TRACING_STOP: Literal["tracing-stop"] +STATUS_ZOMBIE: Literal["zombie"] +STATUS_DEAD: Literal["dead"] +STATUS_WAKE_KILL: Literal["wake-kill"] +STATUS_WAKING: Literal["waking"] +STATUS_IDLE: Literal["idle"] +STATUS_LOCKED: Literal["locked"] +STATUS_WAITING: Literal["waiting"] +STATUS_SUSPENDED: Literal["suspended"] +STATUS_PARKED: Literal["parked"] + +CONN_ESTABLISHED: str +CONN_SYN_SENT: str +CONN_SYN_RECV: str +CONN_FIN_WAIT1: str +CONN_FIN_WAIT2: str +CONN_TIME_WAIT: str +CONN_CLOSE: str +CONN_CLOSE_WAIT: str +CONN_LAST_ACK: str +CONN_LISTEN: str +CONN_CLOSING: str +CONN_NONE: str +NIC_DUPLEX_FULL: int +NIC_DUPLEX_HALF: int +NIC_DUPLEX_UNKNOWN: int + +class NicDuplex(enum.IntEnum): + NIC_DUPLEX_FULL: int + NIC_DUPLEX_HALF: int + NIC_DUPLEX_UNKNOWN: int + +POWER_TIME_UNKNOWN: int +POWER_TIME_UNLIMITED: int + +class BatteryTime(enum.IntEnum): + POWER_TIME_UNKNOWN: int + POWER_TIME_UNLIMITED: int + +ENCODING: str +ENCODING_ERRS: str + +class sswap(NamedTuple): + total: int + used: int + free: int + percent: float + sin: int + sout: int + +class sdiskusage(NamedTuple): + total: int + used: int + free: int + percent: float + +class sdiskio(NamedTuple): + read_count: int + write_count: int + read_bytes: int + write_bytes: int + read_time: int + write_time: int + +class sdiskpart(NamedTuple): + device: str + mountpoint: str + fstype: str + opts: str + maxfile: int + maxpath: int + +class snetio(NamedTuple): + bytes_sent: int + bytes_recv: int + packets_sent: int + packets_recv: int + errin: int + errout: int + dropin: int + dropout: int + +class suser(NamedTuple): + name: str + terminal: str | None + host: str | None + started: float + pid: str + +class sconn(NamedTuple): + fd: int + family: AddressFamily + type: SocketKind + laddr: addr | tuple[()] + raddr: addr | tuple[()] + status: str + pid: int + +class snicaddr(NamedTuple): + family: AddressFamily + address: str + netmask: str | None + broadcast: str | None + ptp: str | None + +class snicstats(NamedTuple): + isup: bool + duplex: int + speed: int + mtu: int + flags: str + +class scpustats(NamedTuple): + ctx_switches: int + interrupts: int + soft_interrupts: int + syscalls: int + +class scpufreq(NamedTuple): + current: float + min: float + max: float + +class shwtemp(NamedTuple): + label: str + current: float + high: float | None + critical: float | None + +class sbattery(NamedTuple): + percent: int + secsleft: int + power_plugged: bool + +class sfan(NamedTuple): + label: str + current: int + +class pcputimes(NamedTuple): + user: float + system: float + children_user: float + children_system: float + +class popenfile(NamedTuple): + path: str + fd: int + +class pthread(NamedTuple): + id: int + user_time: float + system_time: float + +class puids(NamedTuple): + real: int + effective: int + saved: int + +class pgids(NamedTuple): + real: int + effective: int + saved: int + +class pio(NamedTuple): + read_count: int + write_count: int + read_bytes: int + write_bytes: int + +class pionice(NamedTuple): + ioclass: int + value: int + +class pctxsw(NamedTuple): + voluntary: int + involuntary: int + +class pconn(NamedTuple): + fd: int + family: AddressFamily + type: SocketKind + laddr: addr + raddr: addr + status: str + +class addr(NamedTuple): + ip: str + port: int + +conn_tmap: dict[str, tuple[list[AddressFamily], list[SocketKind]]] + +class Error(Exception): + __module__: str + msg: Any + def __init__(self, msg: str = ...) -> None: ... + +class NoSuchProcess(Error): + __module__: str + pid: Any + name: Any + msg: Any + def __init__(self, pid, name: Incomplete | None = ..., msg: Incomplete | None = ...) -> None: ... + +class ZombieProcess(NoSuchProcess): + __module__: str + pid: Any + ppid: Any + name: Any + msg: Any + def __init__( + self, pid, name: Incomplete | None = ..., ppid: Incomplete | None = ..., msg: Incomplete | None = ... + ) -> None: ... + +class AccessDenied(Error): + __module__: str + pid: Any + name: Any + msg: Any + def __init__(self, pid: Incomplete | None = ..., name: Incomplete | None = ..., msg: Incomplete | None = ...) -> None: ... + +class TimeoutExpired(Error): + __module__: str + seconds: Any + pid: Any + name: Any + def __init__(self, seconds, pid: Incomplete | None = ..., name: Incomplete | None = ...) -> None: ... + +_Func = TypeVar("_Func", bound=Callable[..., Any]) + +def usage_percent(used, total, round_: int | None = ...) -> float: ... +def memoize(fun: _Func) -> _Func: ... +def memoize_when_activated(fun: _Func) -> _Func: ... +def isfile_strict(path: StrOrBytesPath) -> bool: ... +def path_exists_strict(path: StrOrBytesPath) -> bool: ... +def supports_ipv6() -> bool: ... +def parse_environ_block(data): ... +def sockfam_to_enum(num: int) -> AddressFamily: ... +def socktype_to_enum(num: int) -> SocketKind: ... +@overload +def conn_to_ntuple(fd: int, fam: int, type_: int, laddr, raddr, status: str, status_map, pid: int) -> sconn: ... +@overload +def conn_to_ntuple(fd: int, fam: int, type_: int, laddr, raddr, status: str, status_map, pid: None = ...) -> pconn: ... +def deprecated_method(replacement: str) -> Callable[[_Func], _Func]: ... + +class _WrapNumbers: + lock: Any + cache: Any + reminders: Any + reminder_keys: Any + def __init__(self) -> None: ... + def run(self, input_dict, name): ... + def cache_clear(self, name: Incomplete | None = ...) -> None: ... + def cache_info(self): ... + +def wrap_numbers(input_dict, name: str): ... +def open_binary(fname): ... +def open_text(fname): ... +def cat(fname, fallback=..., _open=...): ... +def bcat(fname, fallback=...): ... +def bytes2human(n: int, format: str = ...) -> str: ... +def get_procfs_path() -> str: ... +def term_supports_colors(file: SupportsWrite[str] = ...) -> bool: ... +def hilite(s: str, color: str | None = ..., bold: bool = ...) -> str: ... +def print_color(s: str, color: str | None = ..., bold: bool = ..., file: SupportsWrite[str] = ...) -> None: ... +def debug(msg) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_compat.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_compat.pyi new file mode 100644 index 00000000..410a291f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_compat.pyi @@ -0,0 +1,26 @@ +from builtins import ( + ChildProcessError as ChildProcessError, + FileExistsError as FileExistsError, + FileNotFoundError as FileNotFoundError, + InterruptedError as InterruptedError, + PermissionError as PermissionError, + ProcessLookupError as ProcessLookupError, + range as range, + super as super, +) +from contextlib import redirect_stderr as redirect_stderr +from functools import lru_cache as lru_cache +from shutil import get_terminal_size as get_terminal_size, which as which +from subprocess import TimeoutExpired +from typing_extensions import Literal + +PY3: Literal[True] +long = int +xrange = range +unicode = str +basestring = str + +def u(s): ... +def b(s): ... + +SubprocessTimeoutExpired = TimeoutExpired diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psaix.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psaix.pyi new file mode 100644 index 00000000..4b0dabdb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psaix.pyi @@ -0,0 +1,104 @@ +from _typeshed import Incomplete +from typing import NamedTuple + +from psutil._common import ( + NIC_DUPLEX_FULL as NIC_DUPLEX_FULL, + NIC_DUPLEX_HALF as NIC_DUPLEX_HALF, + NIC_DUPLEX_UNKNOWN as NIC_DUPLEX_UNKNOWN, + AccessDenied as AccessDenied, + NoSuchProcess as NoSuchProcess, + ZombieProcess as ZombieProcess, + conn_to_ntuple as conn_to_ntuple, + get_procfs_path as get_procfs_path, + memoize_when_activated as memoize_when_activated, + usage_percent as usage_percent, +) +from psutil._compat import ( + PY3 as PY3, + FileNotFoundError as FileNotFoundError, + PermissionError as PermissionError, + ProcessLookupError as ProcessLookupError, +) + +__extra__all__: Incomplete +HAS_THREADS: Incomplete +HAS_NET_IO_COUNTERS: Incomplete +HAS_PROC_IO_COUNTERS: Incomplete +PAGE_SIZE: Incomplete +AF_LINK: Incomplete +PROC_STATUSES: Incomplete +TCP_STATUSES: Incomplete +proc_info_map: Incomplete + +class pmem(NamedTuple): + rss: Incomplete + vms: Incomplete + +pfullmem = pmem + +class scputimes(NamedTuple): + user: Incomplete + system: Incomplete + idle: Incomplete + iowait: Incomplete + +class svmem(NamedTuple): + total: Incomplete + available: Incomplete + percent: Incomplete + used: Incomplete + free: Incomplete + +def virtual_memory(): ... +def swap_memory(): ... +def cpu_times(): ... +def per_cpu_times(): ... +def cpu_count_logical(): ... +def cpu_count_cores(): ... +def cpu_stats(): ... + +disk_io_counters: Incomplete +disk_usage: Incomplete + +def disk_partitions(all: bool = ...): ... + +net_if_addrs: Incomplete +net_io_counters: Incomplete + +def net_connections(kind, _pid: int = ...): ... +def net_if_stats(): ... +def boot_time(): ... +def users(): ... +def pids(): ... +def pid_exists(pid): ... +def wrap_exceptions(fun): ... + +class Process: + pid: Incomplete + def __init__(self, pid) -> None: ... + def oneshot_enter(self) -> None: ... + def oneshot_exit(self) -> None: ... + def name(self): ... + def exe(self): ... + def cmdline(self): ... + def environ(self): ... + def create_time(self): ... + def num_threads(self): ... + def threads(self): ... + def connections(self, kind: str = ...): ... + def nice_get(self): ... + def nice_set(self, value): ... + def ppid(self): ... + def uids(self): ... + def gids(self): ... + def cpu_times(self): ... + def terminal(self): ... + def cwd(self): ... + def memory_info(self): ... + memory_full_info: Incomplete + def status(self): ... + def open_files(self): ... + def num_fds(self): ... + def num_ctx_switches(self): ... + def wait(self, timeout: Incomplete | None = ...): ... + def io_counters(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psbsd.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psbsd.pyi new file mode 100644 index 00000000..ef3a0385 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psbsd.pyi @@ -0,0 +1,166 @@ +from _typeshed import Incomplete +from contextlib import AbstractContextManager +from typing import Any, NamedTuple + +from psutil._common import ( + FREEBSD as FREEBSD, + NETBSD as NETBSD, + OPENBSD as OPENBSD, + AccessDenied as AccessDenied, + NoSuchProcess as NoSuchProcess, + ZombieProcess as ZombieProcess, + conn_tmap as conn_tmap, + conn_to_ntuple as conn_to_ntuple, + memoize as memoize, + usage_percent as usage_percent, +) + +__extra__all__: Any +PROC_STATUSES: Any +TCP_STATUSES: Any +PAGESIZE: Any +AF_LINK: Any +HAS_PER_CPU_TIMES: Any +HAS_PROC_NUM_THREADS: Any +HAS_PROC_OPEN_FILES: Any +HAS_PROC_NUM_FDS: Any +kinfo_proc_map: Any + +class svmem(NamedTuple): + total: int + available: int + percent: float + used: int + free: int + active: int + inactive: int + buffers: int + cached: int + shared: int + wired: int + +class scputimes(NamedTuple): + user: Any + nice: Any + system: Any + idle: Any + irq: Any + +class pmem(NamedTuple): + rss: Any + vms: Any + text: Any + data: Any + stack: Any + +pfullmem = pmem + +class pcputimes(NamedTuple): + user: Any + system: Any + children_user: Any + children_system: Any + +class pmmap_grouped(NamedTuple): + path: Any + rss: Any + private: Any + ref_count: Any + shadow_count: Any + +class pmmap_ext(NamedTuple): + addr: Any + perms: Any + path: Any + rss: Any + private: Any + ref_count: Any + shadow_count: Any + +class sdiskio(NamedTuple): + read_count: Any + write_count: Any + read_bytes: Any + write_bytes: Any + read_time: Any + write_time: Any + busy_time: Any + +def virtual_memory() -> svmem: ... +def swap_memory(): ... +def cpu_times(): ... +def per_cpu_times(): ... +def cpu_count_logical(): ... +def cpu_count_cores() -> int | None: ... +def cpu_stats(): ... +def disk_partitions(all: bool = ...): ... + +disk_usage: Any +disk_io_counters: Any +net_io_counters: Any +net_if_addrs: Any + +def net_if_stats(): ... +def net_connections(kind): ... +def sensors_battery(): ... +def sensors_temperatures(): ... +def cpu_freq(): ... +def boot_time(): ... +def users(): ... +def pids(): ... +def pid_exists(pid): ... +def is_zombie(pid): ... +def wrap_exceptions(fun): ... +def wrap_exceptions_procfs(inst) -> AbstractContextManager[None]: ... + +class Process: + pid: Any + def __init__(self, pid) -> None: ... + def oneshot(self): ... + def oneshot_enter(self) -> None: ... + def oneshot_exit(self) -> None: ... + def name(self): ... + def exe(self): ... + def cmdline(self): ... + def environ(self): ... + def terminal(self): ... + def ppid(self): ... + def uids(self): ... + def gids(self): ... + def cpu_times(self): ... + def cpu_num(self): ... + def memory_info(self): ... + memory_full_info: Any + def create_time(self): ... + def num_threads(self): ... + def num_ctx_switches(self): ... + def threads(self): ... + def connections(self, kind: str = ...): ... + def wait(self, timeout: Incomplete | None = ...): ... + def nice_get(self): ... + def nice_set(self, value): ... + def status(self): ... + def io_counters(self): ... + def cwd(self): ... + + class nt_mmap_grouped(NamedTuple): + path: Any + rss: Any + private: Any + ref_count: Any + shadow_count: Any + + class nt_mmap_ext(NamedTuple): + addr: Any + perms: Any + path: Any + rss: Any + private: Any + ref_count: Any + shadow_count: Any + def open_files(self): ... + def num_fds(self): ... + def cpu_affinity_get(self): ... + def cpu_affinity_set(self, cpus) -> None: ... + def memory_maps(self): ... + def rlimit(self, resource, limits: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_pslinux.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_pslinux.pyi new file mode 100644 index 00000000..b6502d08 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_pslinux.pyi @@ -0,0 +1,227 @@ +import enum +from _typeshed import Incomplete +from typing import Any, NamedTuple + +from psutil._common import ( + NIC_DUPLEX_FULL as NIC_DUPLEX_FULL, + NIC_DUPLEX_HALF as NIC_DUPLEX_HALF, + NIC_DUPLEX_UNKNOWN as NIC_DUPLEX_UNKNOWN, + AccessDenied as AccessDenied, + NoSuchProcess as NoSuchProcess, + ZombieProcess as ZombieProcess, + isfile_strict as isfile_strict, + parse_environ_block as parse_environ_block, + path_exists_strict as path_exists_strict, + supports_ipv6 as supports_ipv6, + usage_percent as usage_percent, +) +from psutil._compat import PY3 as PY3 + +__extra__all__: Any +POWER_SUPPLY_PATH: str +HAS_PROC_SMAPS: bool +HAS_PROC_SMAPS_ROLLUP: bool +HAS_PROC_IO_PRIORITY: Any +HAS_CPU_AFFINITY: Any +CLOCK_TICKS: Any +PAGESIZE: Any +BOOT_TIME: Any +LITTLE_ENDIAN: Any +DISK_SECTOR_SIZE: int +AF_LINK: Any +AddressFamily: Any +IOPRIO_CLASS_NONE: int +IOPRIO_CLASS_RT: int +IOPRIO_CLASS_BE: int +IOPRIO_CLASS_IDLE: int + +class IOPriority(enum.IntEnum): + IOPRIO_CLASS_NONE: int + IOPRIO_CLASS_RT: int + IOPRIO_CLASS_BE: int + IOPRIO_CLASS_IDLE: int + +PROC_STATUSES: Any +TCP_STATUSES: Any + +class svmem(NamedTuple): + total: int + available: int + percent: float + used: int + free: int + active: int + inactive: int + buffers: int + cached: int + shared: int + slab: int + +class sdiskio(NamedTuple): + read_count: Any + write_count: Any + read_bytes: Any + write_bytes: Any + read_time: Any + write_time: Any + read_merged_count: Any + write_merged_count: Any + busy_time: Any + +class popenfile(NamedTuple): + path: Any + fd: Any + position: Any + mode: Any + flags: Any + +class pmem(NamedTuple): + rss: Any + vms: Any + shared: Any + text: Any + lib: Any + data: Any + dirty: Any + +class pfullmem(NamedTuple): + rss: Incomplete + vms: Incomplete + shared: Incomplete + text: Incomplete + lib: Incomplete + data: Incomplete + dirty: Incomplete + uss: Incomplete + pss: Incomplete + swap: Incomplete + +class pmmap_grouped(NamedTuple): + path: Any + rss: Any + size: Any + pss: Any + shared_clean: Any + shared_dirty: Any + private_clean: Any + private_dirty: Any + referenced: Any + anonymous: Any + swap: Any + +pmmap_ext: Any + +class pio(NamedTuple): + read_count: Any + write_count: Any + read_bytes: Any + write_bytes: Any + read_chars: Any + write_chars: Any + +class pcputimes(NamedTuple): + user: Any + system: Any + children_user: Any + children_system: Any + iowait: Any + +def readlink(path): ... +def file_flags_to_mode(flags): ... +def is_storage_device(name): ... +def set_scputimes_ntuple(procfs_path) -> None: ... + +scputimes: Any +prlimit: Any + +def calculate_avail_vmem(mems): ... +def virtual_memory() -> svmem: ... +def swap_memory(): ... +def cpu_times(): ... +def per_cpu_times(): ... +def cpu_count_logical(): ... +def cpu_count_cores() -> int | None: ... +def cpu_stats(): ... +def cpu_freq(): ... + +net_if_addrs: Any + +class _Ipv6UnsupportedError(Exception): ... + +class Connections: + tmap: Any + def __init__(self) -> None: ... + def get_proc_inodes(self, pid): ... + def get_all_inodes(self): ... + @staticmethod + def decode_address(addr, family): ... + @staticmethod + def process_inet(file, family, type_, inodes, filter_pid: Incomplete | None = ...) -> None: ... + @staticmethod + def process_unix(file, family, inodes, filter_pid: Incomplete | None = ...) -> None: ... + def retrieve(self, kind, pid: Incomplete | None = ...): ... + +def net_connections(kind: str = ...): ... +def net_io_counters(): ... +def net_if_stats(): ... + +disk_usage: Any + +def disk_io_counters(perdisk: bool = ...): ... + +class RootFsDeviceFinder: + major: Incomplete + minor: Incomplete + def __init__(self) -> None: ... + def ask_proc_partitions(self): ... + def ask_sys_dev_block(self): ... + def ask_sys_class_block(self): ... + def find(self): ... + +def disk_partitions(all: bool = ...): ... +def sensors_temperatures(): ... +def sensors_fans(): ... +def sensors_battery(): ... +def users(): ... +def boot_time(): ... +def pids(): ... +def pid_exists(pid): ... +def ppid_map(): ... +def wrap_exceptions(fun): ... + +class Process: + pid: Any + def __init__(self, pid) -> None: ... + def oneshot_enter(self) -> None: ... + def oneshot_exit(self) -> None: ... + def name(self): ... + def exe(self): ... + def cmdline(self): ... + def environ(self): ... + def terminal(self): ... + def io_counters(self): ... + def cpu_times(self): ... + def cpu_num(self): ... + def wait(self, timeout: Incomplete | None = ...): ... + def create_time(self): ... + def memory_info(self): ... + def memory_full_info(self): ... + def memory_maps(self): ... + def cwd(self): ... + def num_ctx_switches(self, _ctxsw_re=...): ... + def num_threads(self, _num_threads_re=...): ... + def threads(self): ... + def nice_get(self): ... + def nice_set(self, value): ... + def cpu_affinity_get(self): ... + def cpu_affinity_set(self, cpus) -> None: ... + def ionice_get(self): ... + def ionice_set(self, ioclass, value): ... + def rlimit(self, resource_, limits: Incomplete | None = ...): ... + def status(self): ... + def open_files(self): ... + def connections(self, kind: str = ...): ... + def num_fds(self): ... + def ppid(self): ... + def uids(self, _uids_re=...): ... + def gids(self, _gids_re=...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psosx.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psosx.pyi new file mode 100644 index 00000000..895d66f7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psosx.pyi @@ -0,0 +1,108 @@ +from _typeshed import Incomplete +from typing import Any, NamedTuple + +from psutil._common import ( + AccessDenied as AccessDenied, + NoSuchProcess as NoSuchProcess, + ZombieProcess as ZombieProcess, + conn_tmap as conn_tmap, + conn_to_ntuple as conn_to_ntuple, + isfile_strict as isfile_strict, + parse_environ_block as parse_environ_block, + usage_percent as usage_percent, +) + +__extra__all__: Any +PAGESIZE: Any +AF_LINK: Any +TCP_STATUSES: Any +PROC_STATUSES: Any +kinfo_proc_map: Any +pidtaskinfo_map: Any + +class scputimes(NamedTuple): + user: Any + nice: Any + system: Any + idle: Any + +class svmem(NamedTuple): + total: int + available: int + percent: float + used: int + free: int + active: int + inactive: int + wired: int + +class pmem(NamedTuple): + rss: Any + vms: Any + pfaults: Any + pageins: Any + +class pfullmem(NamedTuple): + rss: Incomplete + vms: Incomplete + pfaults: Incomplete + pageins: Incomplete + uss: Incomplete + +def virtual_memory() -> svmem: ... +def swap_memory(): ... +def cpu_times(): ... +def per_cpu_times(): ... +def cpu_count_logical(): ... +def cpu_count_cores() -> int | None: ... +def cpu_stats(): ... +def cpu_freq(): ... + +disk_usage: Any +disk_io_counters: Any + +def disk_partitions(all: bool = ...): ... +def sensors_battery(): ... + +net_io_counters: Any +net_if_addrs: Any + +def net_connections(kind: str = ...): ... +def net_if_stats(): ... +def boot_time(): ... +def users(): ... +def pids(): ... + +pid_exists: Any + +def is_zombie(pid): ... +def wrap_exceptions(fun): ... + +class Process: + pid: Any + def __init__(self, pid) -> None: ... + def oneshot_enter(self) -> None: ... + def oneshot_exit(self) -> None: ... + def name(self): ... + def exe(self): ... + def cmdline(self): ... + def environ(self): ... + def ppid(self): ... + def cwd(self): ... + def uids(self): ... + def gids(self): ... + def terminal(self): ... + def memory_info(self): ... + def memory_full_info(self): ... + def cpu_times(self): ... + def create_time(self): ... + def num_ctx_switches(self): ... + def num_threads(self): ... + def open_files(self): ... + def connections(self, kind: str = ...): ... + def num_fds(self): ... + def wait(self, timeout: Incomplete | None = ...): ... + def nice_get(self): ... + def nice_set(self, value): ... + def status(self): ... + def threads(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psposix.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psposix.pyi new file mode 100644 index 00000000..972008dd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psposix.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +def pid_exists(pid): ... +def wait_pid( + pid, + timeout: Incomplete | None = ..., + proc_name: Incomplete | None = ..., + _waitpid=..., + _timer=..., + _min=..., + _sleep=..., + _pid_exists=..., +): ... +def disk_usage(path): ... +def get_terminal_map(): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_pssunos.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_pssunos.pyi new file mode 100644 index 00000000..22767c5f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_pssunos.pyi @@ -0,0 +1,135 @@ +from _typeshed import Incomplete +from typing import NamedTuple + +from psutil._common import ( + AF_INET6 as AF_INET6, + AccessDenied as AccessDenied, + NoSuchProcess as NoSuchProcess, + ZombieProcess as ZombieProcess, + debug as debug, + get_procfs_path as get_procfs_path, + isfile_strict as isfile_strict, + memoize_when_activated as memoize_when_activated, + sockfam_to_enum as sockfam_to_enum, + socktype_to_enum as socktype_to_enum, + usage_percent as usage_percent, +) +from psutil._compat import ( + PY3 as PY3, + FileNotFoundError as FileNotFoundError, + PermissionError as PermissionError, + ProcessLookupError as ProcessLookupError, + b as b, +) + +__extra__all__: Incomplete +PAGE_SIZE: Incomplete +AF_LINK: Incomplete +IS_64_BIT: Incomplete +CONN_IDLE: str +CONN_BOUND: str +PROC_STATUSES: Incomplete +TCP_STATUSES: Incomplete +proc_info_map: Incomplete + +class scputimes(NamedTuple): + user: Incomplete + system: Incomplete + idle: Incomplete + iowait: Incomplete + +class pcputimes(NamedTuple): + user: Incomplete + system: Incomplete + children_user: Incomplete + children_system: Incomplete + +class svmem(NamedTuple): + total: Incomplete + available: Incomplete + percent: Incomplete + used: Incomplete + free: Incomplete + +class pmem(NamedTuple): + rss: Incomplete + vms: Incomplete + +pfullmem = pmem + +class pmmap_grouped(NamedTuple): + path: Incomplete + rss: Incomplete + anonymous: Incomplete + locked: Incomplete + +pmmap_ext: Incomplete + +def virtual_memory(): ... +def swap_memory(): ... +def cpu_times(): ... +def per_cpu_times(): ... +def cpu_count_logical(): ... +def cpu_count_cores(): ... +def cpu_stats(): ... + +disk_io_counters: Incomplete +disk_usage: Incomplete + +def disk_partitions(all: bool = ...): ... + +net_io_counters: Incomplete +net_if_addrs: Incomplete + +def net_connections(kind, _pid: int = ...): ... +def net_if_stats(): ... +def boot_time(): ... +def users(): ... +def pids(): ... +def pid_exists(pid): ... +def wrap_exceptions(fun): ... + +class Process: + pid: Incomplete + def __init__(self, pid) -> None: ... + def oneshot_enter(self) -> None: ... + def oneshot_exit(self) -> None: ... + def name(self): ... + def exe(self): ... + def cmdline(self): ... + def environ(self): ... + def create_time(self): ... + def num_threads(self): ... + def nice_get(self): ... + def nice_set(self, value): ... + def ppid(self): ... + def uids(self): ... + def gids(self): ... + def cpu_times(self): ... + def cpu_num(self): ... + def terminal(self): ... + def cwd(self): ... + def memory_info(self): ... + memory_full_info: Incomplete + def status(self): ... + def threads(self): ... + def open_files(self): ... + def connections(self, kind: str = ...): ... + + class nt_mmap_grouped(NamedTuple): + path: Incomplete + rss: Incomplete + anon: Incomplete + locked: Incomplete + + class nt_mmap_ext(NamedTuple): + addr: Incomplete + perms: Incomplete + path: Incomplete + rss: Incomplete + anon: Incomplete + locked: Incomplete + def memory_maps(self): ... + def num_fds(self): ... + def num_ctx_switches(self): ... + def wait(self, timeout: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psutil_linux.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psutil_linux.pyi new file mode 100644 index 00000000..d204ef7a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psutil_linux.pyi @@ -0,0 +1,16 @@ +from typing import Any + +DUPLEX_FULL: int +DUPLEX_HALF: int +DUPLEX_UNKNOWN: int +version: int + +def disk_partitions(*args, **kwargs) -> Any: ... +def linux_sysinfo(*args, **kwargs) -> Any: ... +def net_if_duplex_speed(*args, **kwargs) -> Any: ... +def proc_cpu_affinity_get(*args, **kwargs) -> Any: ... +def proc_cpu_affinity_set(*args, **kwargs) -> Any: ... +def proc_ioprio_get(*args, **kwargs) -> Any: ... +def proc_ioprio_set(*args, **kwargs) -> Any: ... +def set_debug(*args, **kwargs) -> Any: ... +def users(*args, **kwargs) -> Any: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psutil_osx.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psutil_osx.pyi new file mode 100644 index 00000000..2578efca --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psutil_osx.pyi @@ -0,0 +1,52 @@ +from typing import Any + +PSUTIL_CONN_NONE: int +SIDL: int +SRUN: int +SSLEEP: int +SSTOP: int +SZOMB: int +TCPS_CLOSED: int +TCPS_CLOSE_WAIT: int +TCPS_CLOSING: int +TCPS_ESTABLISHED: int +TCPS_FIN_WAIT_1: int +TCPS_FIN_WAIT_2: int +TCPS_LAST_ACK: int +TCPS_LISTEN: int +TCPS_SYN_RECEIVED: int +TCPS_SYN_SENT: int +TCPS_TIME_WAIT: int +version: int + +class ZombieProcessError(Exception): ... + +def boot_time(*args, **kwargs) -> Any: ... +def cpu_count_cores(*args, **kwargs) -> Any: ... +def cpu_count_logical(*args, **kwargs) -> Any: ... +def cpu_freq(*args, **kwargs) -> Any: ... +def cpu_stats(*args, **kwargs) -> Any: ... +def cpu_times(*args, **kwargs) -> Any: ... +def disk_io_counters(*args, **kwargs) -> Any: ... +def disk_partitions(*args, **kwargs) -> Any: ... +def disk_usage_used(*args, **kwargs) -> Any: ... +def net_io_counters(*args, **kwargs) -> Any: ... +def per_cpu_times(*args, **kwargs) -> Any: ... +def pids(*args, **kwargs) -> Any: ... +def proc_cmdline(*args, **kwargs) -> Any: ... +def proc_connections(*args, **kwargs) -> Any: ... +def proc_cwd(*args, **kwargs) -> Any: ... +def proc_environ(*args, **kwargs) -> Any: ... +def proc_exe(*args, **kwargs) -> Any: ... +def proc_kinfo_oneshot(*args, **kwargs) -> Any: ... +def proc_memory_uss(*args, **kwargs) -> Any: ... +def proc_name(*args, **kwargs) -> Any: ... +def proc_num_fds(*args, **kwargs) -> Any: ... +def proc_open_files(*args, **kwargs) -> Any: ... +def proc_pidtaskinfo_oneshot(*args, **kwargs) -> Any: ... +def proc_threads(*args, **kwargs) -> Any: ... +def sensors_battery(*args, **kwargs) -> Any: ... +def set_debug(*args, **kwargs) -> Any: ... +def swap_mem(*args, **kwargs) -> Any: ... +def users(*args, **kwargs) -> Any: ... +def virtual_mem(*args, **kwargs) -> Any: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psutil_posix.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psutil_posix.pyi new file mode 100644 index 00000000..2a58ee9a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psutil_posix.pyi @@ -0,0 +1,34 @@ +import sys +from typing import Any + +if sys.platform == "linux": + RLIMIT_AS: int + RLIMIT_CORE: int + RLIMIT_CPU: int + RLIMIT_DATA: int + RLIMIT_FSIZE: int + RLIMIT_LOCKS: int + RLIMIT_MEMLOCK: int + RLIMIT_MSGQUEUE: int + RLIMIT_NICE: int + RLIMIT_NOFILE: int + RLIMIT_NPROC: int + RLIMIT_RSS: int + RLIMIT_RTPRIO: int + RLIMIT_RTTIME: int + RLIMIT_SIGPENDING: int + RLIMIT_STACK: int + RLIM_INFINITY: int + +def getpagesize(*args, **kwargs) -> Any: ... +def getpriority(*args, **kwargs) -> Any: ... +def net_if_addrs(*args, **kwargs) -> Any: ... +def net_if_flags(*args, **kwargs) -> Any: ... +def net_if_is_running(*args, **kwargs) -> Any: ... +def net_if_mtu(*args, **kwargs) -> Any: ... + +if sys.platform == "darwin": + AF_LINK: int + def net_if_duplex_speed(*args, **kwargs): ... + +def setpriority(*args, **kwargs) -> Any: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psutil_windows.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psutil_windows.pyi new file mode 100644 index 00000000..def19867 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_psutil_windows.pyi @@ -0,0 +1,90 @@ +ABOVE_NORMAL_PRIORITY_CLASS: int +BELOW_NORMAL_PRIORITY_CLASS: int +ERROR_ACCESS_DENIED: int +ERROR_INVALID_NAME: int +ERROR_PRIVILEGE_NOT_HELD: int +ERROR_SERVICE_DOES_NOT_EXIST: int +HIGH_PRIORITY_CLASS: int +IDLE_PRIORITY_CLASS: int +INFINITE: int +MIB_TCP_STATE_CLOSED: int +MIB_TCP_STATE_CLOSE_WAIT: int +MIB_TCP_STATE_CLOSING: int +MIB_TCP_STATE_DELETE_TCB: int +MIB_TCP_STATE_ESTAB: int +MIB_TCP_STATE_FIN_WAIT1: int +MIB_TCP_STATE_FIN_WAIT2: int +MIB_TCP_STATE_LAST_ACK: int +MIB_TCP_STATE_LISTEN: int +MIB_TCP_STATE_SYN_RCVD: int +MIB_TCP_STATE_SYN_SENT: int +MIB_TCP_STATE_TIME_WAIT: int +NORMAL_PRIORITY_CLASS: int +PSUTIL_CONN_NONE: int +REALTIME_PRIORITY_CLASS: int +WINDOWS_10: int +WINDOWS_7: int +WINDOWS_8: int +WINDOWS_8_1: int +WINDOWS_VISTA: int +WINVER: int +version: int + +class TimeoutAbandoned(Exception): ... +class TimeoutExpired(Exception): ... + +def QueryDosDevice(*args, **kwargs): ... # incomplete +def boot_time(*args, **kwargs): ... # incomplete +def cpu_count_cores(*args, **kwargs): ... # incomplete +def cpu_count_logical(*args, **kwargs): ... # incomplete +def cpu_freq(*args, **kwargs): ... # incomplete +def cpu_stats(*args, **kwargs): ... # incomplete +def cpu_times(*args, **kwargs): ... # incomplete +def disk_io_counters(*args, **kwargs): ... # incomplete +def disk_partitions(*args, **kwargs): ... # incomplete +def disk_usage(*args, **kwargs): ... # incomplete +def getloadavg(*args, **kwargs): ... # incomplete +def getpagesize(*args, **kwargs): ... # incomplete +def init_loadavg_counter(*args, **kwargs): ... # incomplete +def net_connections(*args, **kwargs): ... # incomplete +def net_if_addrs(*args, **kwargs): ... # incomplete +def net_if_stats(*args, **kwargs): ... # incomplete +def net_io_counters(*args, **kwargs): ... # incomplete +def per_cpu_times(*args, **kwargs): ... # incomplete +def pid_exists(*args, **kwargs): ... # incomplete +def pids(*args, **kwargs): ... # incomplete +def ppid_map(*args, **kwargs): ... # incomplete +def proc_cmdline(*args, **kwargs): ... # incomplete +def proc_cpu_affinity_get(*args, **kwargs): ... # incomplete +def proc_cpu_affinity_set(*args, **kwargs): ... # incomplete +def proc_cwd(*args, **kwargs): ... # incomplete +def proc_environ(*args, **kwargs): ... # incomplete +def proc_exe(*args, **kwargs): ... # incomplete +def proc_info(*args, **kwargs): ... # incomplete +def proc_io_counters(*args, **kwargs): ... # incomplete +def proc_io_priority_get(*args, **kwargs): ... # incomplete +def proc_io_priority_set(*args, **kwargs): ... # incomplete +def proc_is_suspended(*args, **kwargs): ... # incomplete +def proc_kill(*args, **kwargs): ... # incomplete +def proc_memory_info(*args, **kwargs): ... # incomplete +def proc_memory_maps(*args, **kwargs): ... # incomplete +def proc_memory_uss(*args, **kwargs): ... # incomplete +def proc_num_handles(*args, **kwargs): ... # incomplete +def proc_open_files(*args, **kwargs): ... # incomplete +def proc_priority_get(*args, **kwargs): ... # incomplete +def proc_priority_set(*args, **kwargs): ... # incomplete +def proc_suspend_or_resume(*args, **kwargs): ... # incomplete +def proc_threads(*args, **kwargs): ... # incomplete +def proc_times(*args, **kwargs): ... # incomplete +def proc_username(*args, **kwargs): ... # incomplete +def proc_wait(*args, **kwargs): ... # incomplete +def sensors_battery(*args, **kwargs): ... # incomplete +def set_debug(*args, **kwargs): ... # incomplete +def users(*args, **kwargs): ... # incomplete +def virtual_mem(*args, **kwargs): ... # incomplete +def winservice_enumerate(*args, **kwargs): ... # incomplete +def winservice_query_config(*args, **kwargs): ... # incomplete +def winservice_query_descr(*args, **kwargs): ... # incomplete +def winservice_query_status(*args, **kwargs): ... # incomplete +def winservice_start(*args, **kwargs): ... # incomplete +def winservice_stop(*args, **kwargs): ... # incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_pswindows.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_pswindows.pyi new file mode 100644 index 00000000..8dd501d9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psutil/psutil/_pswindows.pyi @@ -0,0 +1,201 @@ +import enum +from _typeshed import Incomplete +from collections.abc import Iterable +from typing import Any, NamedTuple + +from psutil._common import ( + ENCODING as ENCODING, + ENCODING_ERRS as ENCODING_ERRS, + AccessDenied as AccessDenied, + NoSuchProcess as NoSuchProcess, + TimeoutExpired as TimeoutExpired, + conn_tmap as conn_tmap, + conn_to_ntuple as conn_to_ntuple, + debug as debug, + isfile_strict as isfile_strict, + memoize as memoize, + parse_environ_block as parse_environ_block, + usage_percent as usage_percent, +) +from psutil._compat import PY3 as PY3 +from psutil._psutil_windows import ( + ABOVE_NORMAL_PRIORITY_CLASS as ABOVE_NORMAL_PRIORITY_CLASS, + BELOW_NORMAL_PRIORITY_CLASS as BELOW_NORMAL_PRIORITY_CLASS, + HIGH_PRIORITY_CLASS as HIGH_PRIORITY_CLASS, + IDLE_PRIORITY_CLASS as IDLE_PRIORITY_CLASS, + NORMAL_PRIORITY_CLASS as NORMAL_PRIORITY_CLASS, + REALTIME_PRIORITY_CLASS as REALTIME_PRIORITY_CLASS, +) + +__extra__all__: Any +CONN_DELETE_TCB: str +ERROR_PARTIAL_COPY: int +PYPY: Any +AF_LINK: int +AddressFamily: Any +TCP_STATUSES: Any + +class Priority(enum.IntEnum): + ABOVE_NORMAL_PRIORITY_CLASS: Any + BELOW_NORMAL_PRIORITY_CLASS: Any + HIGH_PRIORITY_CLASS: Any + IDLE_PRIORITY_CLASS: Any + NORMAL_PRIORITY_CLASS: Any + REALTIME_PRIORITY_CLASS: Any + +IOPRIO_VERYLOW: int +IOPRIO_LOW: int +IOPRIO_NORMAL: int +IOPRIO_HIGH: int + +class IOPriority(enum.IntEnum): + IOPRIO_VERYLOW: int + IOPRIO_LOW: int + IOPRIO_NORMAL: int + IOPRIO_HIGH: int + +pinfo_map: Any + +class scputimes(NamedTuple): + user: Any + system: Any + idle: Any + interrupt: Any + dpc: Any + +class svmem(NamedTuple): + total: int + available: int + percent: float + used: int + free: int + +class pmem(NamedTuple): + rss: Any + vms: Any + num_page_faults: Any + peak_wset: Any + wset: Any + peak_paged_pool: Any + paged_pool: Any + peak_nonpaged_pool: Any + nonpaged_pool: Any + pagefile: Any + peak_pagefile: Any + private: Any + +class pfullmem(NamedTuple): + rss: Incomplete + vms: Incomplete + num_page_faults: Incomplete + peak_wset: Incomplete + wset: Incomplete + peak_paged_pool: Incomplete + paged_pool: Incomplete + peak_nonpaged_pool: Incomplete + nonpaged_pool: Incomplete + pagefile: Incomplete + peak_pagefile: Incomplete + private: Incomplete + uss: Incomplete + +class pmmap_grouped(NamedTuple): + path: Any + rss: Any + +pmmap_ext: Any + +class pio(NamedTuple): + read_count: Any + write_count: Any + read_bytes: Any + write_bytes: Any + other_count: Any + other_bytes: Any + +def convert_dos_path(s): ... +def py2_strencode(s): ... +def getpagesize(): ... +def virtual_memory() -> svmem: ... +def swap_memory(): ... + +disk_io_counters: Any + +def disk_usage(path): ... +def disk_partitions(all): ... +def cpu_times(): ... +def per_cpu_times(): ... +def cpu_count_logical(): ... +def cpu_count_cores() -> int | None: ... +def cpu_stats(): ... +def cpu_freq(): ... +def getloadavg(): ... +def net_connections(kind, _pid: int = ...): ... +def net_if_stats(): ... +def net_io_counters(): ... +def net_if_addrs(): ... +def sensors_battery(): ... +def boot_time(): ... +def users(): ... +def win_service_iter() -> Iterable[WindowsService]: ... +def win_service_get(name): ... + +class WindowsService: + def __init__(self, name, display_name) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def name(self): ... + def display_name(self): ... + def binpath(self): ... + def username(self): ... + def start_type(self): ... + def pid(self): ... + def status(self): ... + def description(self): ... + def as_dict(self): ... + +pids: Any +pid_exists: Any +ppid_map: Any + +def is_permission_err(exc): ... +def convert_oserror(exc, pid: Incomplete | None = ..., name: Incomplete | None = ...): ... +def wrap_exceptions(fun): ... +def retry_error_partial_copy(fun): ... + +class Process: + pid: Any + def __init__(self, pid) -> None: ... + def oneshot_enter(self) -> None: ... + def oneshot_exit(self) -> None: ... + def name(self): ... + def exe(self): ... + def cmdline(self): ... + def environ(self): ... + def ppid(self): ... + def memory_info(self): ... + def memory_full_info(self): ... + def memory_maps(self) -> None: ... + def kill(self): ... + def send_signal(self, sig) -> None: ... + def wait(self, timeout: Incomplete | None = ...): ... + def username(self): ... + def create_time(self): ... + def num_threads(self): ... + def threads(self): ... + def cpu_times(self): ... + def suspend(self) -> None: ... + def resume(self) -> None: ... + def cwd(self): ... + def open_files(self): ... + def connections(self, kind: str = ...): ... + def nice_get(self): ... + def nice_set(self, value): ... + def ionice_get(self): ... + def ionice_set(self, ioclass, value) -> None: ... + def io_counters(self): ... + def status(self): ... + def cpu_affinity_get(self): ... + def cpu_affinity_set(self, value): ... + def num_handles(self): ... + def num_ctx_switches(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..0a93eaed --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/@tests/stubtest_allowlist.txt @@ -0,0 +1,5 @@ +psycopg2.connection +psycopg2.cursor +psycopg2.pool.AbstractConnectionPool.closeall +psycopg2.pool.AbstractConnectionPool.getconn +psycopg2.pool.AbstractConnectionPool.putconn diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/METADATA.toml new file mode 100644 index 00000000..2a3162d1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/METADATA.toml @@ -0,0 +1,4 @@ +version = "2.9.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/__init__.pyi new file mode 100644 index 00000000..5c027775 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/__init__.pyi @@ -0,0 +1,50 @@ +from collections.abc import Callable +from typing import Any, TypeVar, overload + +# connection and cursor not available at runtime +from psycopg2._psycopg import ( + BINARY as BINARY, + DATETIME as DATETIME, + NUMBER as NUMBER, + ROWID as ROWID, + STRING as STRING, + Binary as Binary, + DatabaseError as DatabaseError, + DataError as DataError, + Date as Date, + DateFromTicks as DateFromTicks, + Error as Error, + IntegrityError as IntegrityError, + InterfaceError as InterfaceError, + InternalError as InternalError, + NotSupportedError as NotSupportedError, + OperationalError as OperationalError, + ProgrammingError as ProgrammingError, + Time as Time, + TimeFromTicks as TimeFromTicks, + Timestamp as Timestamp, + TimestampFromTicks as TimestampFromTicks, + Warning as Warning, + __libpq_version__ as __libpq_version__, + apilevel as apilevel, + connection as connection, + cursor as cursor, + paramstyle as paramstyle, + threadsafety as threadsafety, +) + +_T_conn = TypeVar("_T_conn", bound=connection) + +@overload +def connect(dsn: str, connection_factory: Callable[..., _T_conn], cursor_factory: None = ..., **kwargs: Any) -> _T_conn: ... +@overload +def connect( + dsn: str | None = ..., *, connection_factory: Callable[..., _T_conn], cursor_factory: None = ..., **kwargs: Any +) -> _T_conn: ... +@overload +def connect( + dsn: str | None = ..., + connection_factory: Callable[..., connection] | None = ..., + cursor_factory: Callable[..., cursor] | None = ..., + **kwargs: Any, +) -> connection: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/_ipaddress.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/_ipaddress.pyi new file mode 100644 index 00000000..0b0cf5a8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/_ipaddress.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete +from typing import Any + +ipaddress: Any + +def register_ipaddress(conn_or_curs: Incomplete | None = ...) -> None: ... +def cast_interface(s, cur: Incomplete | None = ...): ... +def cast_network(s, cur: Incomplete | None = ...): ... +def adapt_ipaddress(obj): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/_json.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/_json.pyi new file mode 100644 index 00000000..785bbc43 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/_json.pyi @@ -0,0 +1,26 @@ +from _typeshed import Incomplete +from typing import Any + +JSON_OID: int +JSONARRAY_OID: int +JSONB_OID: int +JSONBARRAY_OID: int + +class Json: + adapted: Any + def __init__(self, adapted, dumps: Incomplete | None = ...) -> None: ... + def __conform__(self, proto): ... + def dumps(self, obj): ... + def prepare(self, conn) -> None: ... + def getquoted(self): ... + +def register_json( + conn_or_curs: Incomplete | None = ..., + globally: bool = ..., + loads: Incomplete | None = ..., + oid: Incomplete | None = ..., + array_oid: Incomplete | None = ..., + name: str = ..., +): ... +def register_default_json(conn_or_curs: Incomplete | None = ..., globally: bool = ..., loads: Incomplete | None = ...): ... +def register_default_jsonb(conn_or_curs: Incomplete | None = ..., globally: bool = ..., loads: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/_psycopg.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/_psycopg.pyi new file mode 100644 index 00000000..8f856568 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/_psycopg.pyi @@ -0,0 +1,501 @@ +from collections.abc import Callable, Iterable, Mapping, Sequence +from types import TracebackType +from typing import Any, TypeVar, overload +from typing_extensions import Literal, Self, TypeAlias + +import psycopg2 +import psycopg2.extensions +from psycopg2.sql import Composable + +_Vars: TypeAlias = Sequence[Any] | Mapping[str, Any] | None + +BINARY: Any +BINARYARRAY: Any +BOOLEAN: Any +BOOLEANARRAY: Any +BYTES: Any +BYTESARRAY: Any +CIDRARRAY: Any +DATE: Any +DATEARRAY: Any +DATETIME: Any +DATETIMEARRAY: Any +DATETIMETZ: Any +DATETIMETZARRAY: Any +DECIMAL: Any +DECIMALARRAY: Any +FLOAT: Any +FLOATARRAY: Any +INETARRAY: Any +INTEGER: Any +INTEGERARRAY: Any +INTERVAL: Any +INTERVALARRAY: Any +LONGINTEGER: Any +LONGINTEGERARRAY: Any +MACADDRARRAY: Any +NUMBER: Any +PYDATE: Any +PYDATEARRAY: Any +PYDATETIME: Any +PYDATETIMEARRAY: Any +PYDATETIMETZ: Any +PYDATETIMETZARRAY: Any +PYINTERVAL: Any +PYINTERVALARRAY: Any +PYTIME: Any +PYTIMEARRAY: Any +REPLICATION_LOGICAL: int +REPLICATION_PHYSICAL: int +ROWID: Any +ROWIDARRAY: Any +STRING: Any +STRINGARRAY: Any +TIME: Any +TIMEARRAY: Any +UNICODE: Any +UNICODEARRAY: Any +UNKNOWN: Any +adapters: dict[Any, Any] +apilevel: str +binary_types: dict[Any, Any] +encodings: dict[Any, Any] +paramstyle: str +sqlstate_errors: dict[Any, Any] +string_types: dict[Any, Any] +threadsafety: int + +__libpq_version__: int + +class cursor: + arraysize: int + binary_types: Any + closed: Any + connection: Any + description: Any + itersize: Any + lastrowid: Any + name: Any + pgresult_ptr: Any + query: Any + row_factory: Any + rowcount: int + rownumber: int + scrollable: bool | None + statusmessage: Any + string_types: Any + typecaster: Any + tzinfo_factory: Any + withhold: bool + def __init__(self, conn: connection, name: str | bytes | None = ...) -> None: ... + def callproc(self, procname, parameters=...): ... + def cast(self, oid, s): ... + def close(self): ... + def copy_expert(self, sql: str | bytes | Composable, file, size=...): ... + def copy_from(self, file, table, sep=..., null=..., size=..., columns=...): ... + def copy_to(self, file, table, sep=..., null=..., columns=...): ... + def execute(self, query: str | bytes | Composable, vars: _Vars = ...) -> None: ... + def executemany(self, query: str | bytes | Composable, vars_list: Iterable[_Vars]) -> None: ... + def fetchall(self) -> list[tuple[Any, ...]]: ... + def fetchmany(self, size: int | None = ...) -> list[tuple[Any, ...]]: ... + def fetchone(self) -> tuple[Any, ...] | None: ... + def mogrify(self, *args, **kwargs): ... + def nextset(self): ... + def scroll(self, value, mode=...): ... + def setinputsizes(self, sizes): ... + def setoutputsize(self, size, column=...): ... + def __enter__(self) -> Self: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __iter__(self) -> Self: ... + def __next__(self) -> tuple[Any, ...]: ... + +_Cursor: TypeAlias = cursor + +class AsIs: + adapted: Any + def __init__(self, *args, **kwargs) -> None: ... + def getquoted(self, *args, **kwargs): ... + def __conform__(self, *args, **kwargs): ... + +class Binary: + adapted: Any + buffer: Any + def __init__(self, *args, **kwargs) -> None: ... + def getquoted(self, *args, **kwargs): ... + def prepare(self, conn): ... + def __conform__(self, *args, **kwargs): ... + +class Boolean: + adapted: Any + def __init__(self, *args, **kwargs) -> None: ... + def getquoted(self, *args, **kwargs): ... + def __conform__(self, *args, **kwargs): ... + +class Column: + display_size: Any + internal_size: Any + name: Any + null_ok: Any + precision: Any + scale: Any + table_column: Any + table_oid: Any + type_code: Any + def __init__(self, *args, **kwargs) -> None: ... + def __eq__(self, __other): ... + def __ge__(self, __other): ... + def __getitem__(self, __index): ... + def __getstate__(self): ... + def __gt__(self, __other): ... + def __le__(self, __other): ... + def __len__(self) -> int: ... + def __lt__(self, __other): ... + def __ne__(self, __other): ... + def __setstate__(self, state): ... + +class ConnectionInfo: + # Note: the following properties can be None if their corresponding libpq function + # returns NULL. They're not annotated as such, because this is very unlikely in + # practice---the psycopg2 docs [1] don't even mention this as a possibility! + # + # - db_name + # - user + # - password + # - host + # - port + # - options + # + # (To prove this, one needs to inspect the psycopg2 source code [2], plus the + # documentation [3] and source code [4] of the corresponding libpq calls.) + # + # [1]: https://www.psycopg.org/docs/extensions.html#psycopg2.extensions.ConnectionInfo + # [2]: https://github.com/psycopg/psycopg2/blob/1d3a89a0bba621dc1cc9b32db6d241bd2da85ad1/psycopg/conninfo_type.c#L52 and below + # [3]: https://www.postgresql.org/docs/current/libpq-status.html + # [4]: https://github.com/postgres/postgres/blob/b39838889e76274b107935fa8e8951baf0e8b31b/src/interfaces/libpq/fe-connect.c#L6754 and below + @property + def backend_pid(self) -> int: ... + @property + def dbname(self) -> str: ... + @property + def dsn_parameters(self) -> dict[str, str]: ... + @property + def error_message(self) -> str | None: ... + @property + def host(self) -> str: ... + @property + def needs_password(self) -> bool: ... + @property + def options(self) -> str: ... + @property + def password(self) -> str: ... + @property + def port(self) -> int: ... + @property + def protocol_version(self) -> int: ... + @property + def server_version(self) -> int: ... + @property + def socket(self) -> int: ... + @property + def ssl_attribute_names(self) -> list[str]: ... + @property + def ssl_in_use(self) -> bool: ... + @property + def status(self) -> int: ... + @property + def transaction_status(self) -> int: ... + @property + def used_password(self) -> bool: ... + @property + def user(self) -> str: ... + def __init__(self, *args, **kwargs) -> None: ... + def parameter_status(self, name: str) -> str | None: ... + def ssl_attribute(self, name: str) -> str | None: ... + +class DataError(psycopg2.DatabaseError): ... +class DatabaseError(psycopg2.Error): ... + +class Decimal: + adapted: Any + def __init__(self, *args, **kwargs) -> None: ... + def getquoted(self, *args, **kwargs): ... + def __conform__(self, *args, **kwargs): ... + +class Diagnostics: + column_name: str | None + constraint_name: str | None + context: str | None + datatype_name: str | None + internal_position: str | None + internal_query: str | None + message_detail: str | None + message_hint: str | None + message_primary: str | None + schema_name: str | None + severity: str | None + severity_nonlocalized: str | None + source_file: str | None + source_function: str | None + source_line: str | None + sqlstate: str | None + statement_position: str | None + table_name: str | None + def __init__(self, __err: Error) -> None: ... + +class Error(Exception): + cursor: _Cursor | None + diag: Diagnostics + pgcode: str | None + pgerror: str | None + def __init__(self, *args, **kwargs) -> None: ... + def __reduce__(self): ... + def __setstate__(self, state): ... + +class Float: + adapted: Any + def __init__(self, *args, **kwargs) -> None: ... + def getquoted(self, *args, **kwargs): ... + def __conform__(self, *args, **kwargs): ... + +class ISQLQuote: + _wrapped: Any + def __init__(self, *args, **kwargs) -> None: ... + def getbinary(self, *args, **kwargs): ... + def getbuffer(self, *args, **kwargs): ... + def getquoted(self, *args, **kwargs): ... + +class Int: + adapted: Any + def __init__(self, *args, **kwargs) -> None: ... + def getquoted(self, *args, **kwargs): ... + def __conform__(self, *args, **kwargs): ... + +class IntegrityError(psycopg2.DatabaseError): ... +class InterfaceError(psycopg2.Error): ... +class InternalError(psycopg2.DatabaseError): ... + +class List: + adapted: Any + def __init__(self, *args, **kwargs) -> None: ... + def getquoted(self, *args, **kwargs): ... + def prepare(self, *args, **kwargs): ... + def __conform__(self, *args, **kwargs): ... + +class NotSupportedError(psycopg2.DatabaseError): ... + +class Notify: + channel: Any + payload: Any + pid: Any + def __init__(self, *args, **kwargs) -> None: ... + def __eq__(self, __other): ... + def __ge__(self, __other): ... + def __getitem__(self, __index): ... + def __gt__(self, __other): ... + def __hash__(self) -> int: ... + def __le__(self, __other): ... + def __len__(self) -> int: ... + def __lt__(self, __other): ... + def __ne__(self, __other): ... + +class OperationalError(psycopg2.DatabaseError): ... +class ProgrammingError(psycopg2.DatabaseError): ... +class QueryCanceledError(psycopg2.OperationalError): ... + +class QuotedString: + adapted: Any + buffer: Any + encoding: Any + def __init__(self, *args, **kwargs) -> None: ... + def getquoted(self, *args, **kwargs): ... + def prepare(self, *args, **kwargs): ... + def __conform__(self, *args, **kwargs): ... + +class ReplicationConnection(psycopg2.extensions.connection): + autocommit: Any + isolation_level: Any + replication_type: Any + reset: Any + set_isolation_level: Any + set_session: Any + def __init__(self, *args, **kwargs) -> None: ... + +class ReplicationCursor(cursor): + feedback_timestamp: Any + io_timestamp: Any + wal_end: Any + def __init__(self, *args, **kwargs) -> None: ... + def consume_stream(self, consumer, keepalive_interval=...): ... + def read_message(self, *args, **kwargs): ... + def send_feedback(self, write_lsn=..., flush_lsn=..., apply_lsn=..., reply=..., force=...): ... + def start_replication_expert(self, command, decode=..., status_interval=...): ... + +class ReplicationMessage: + cursor: Any + data_size: Any + data_start: Any + payload: Any + send_time: Any + wal_end: Any + def __init__(self, *args, **kwargs) -> None: ... + +class TransactionRollbackError(psycopg2.OperationalError): ... +class Warning(Exception): ... + +class Xid: + bqual: Any + database: Any + format_id: Any + gtrid: Any + owner: Any + prepared: Any + def __init__(self, *args, **kwargs) -> None: ... + def from_string(self, *args, **kwargs): ... + def __getitem__(self, __index): ... + def __len__(self) -> int: ... + +_T_cur = TypeVar("_T_cur", bound=cursor) + +class connection: + DataError: Any + DatabaseError: Any + Error: Any + IntegrityError: Any + InterfaceError: Any + InternalError: Any + NotSupportedError: Any + OperationalError: Any + ProgrammingError: Any + Warning: Any + @property + def async_(self) -> int: ... + autocommit: bool + @property + def binary_types(self) -> Any: ... + @property + def closed(self) -> int: ... + cursor_factory: Callable[..., _Cursor] + @property + def dsn(self) -> str: ... + @property + def encoding(self) -> str: ... + @property + def info(self) -> ConnectionInfo: ... + @property + def isolation_level(self) -> int | None: ... + @isolation_level.setter + def isolation_level(self, __value: str | bytes | int | None) -> None: ... + notices: list[Any] + notifies: list[Any] + @property + def pgconn_ptr(self) -> int | None: ... + @property + def protocol_version(self) -> int: ... + @property + def deferrable(self) -> bool | None: ... + @deferrable.setter + def deferrable(self, __value: Literal["default"] | bool | None) -> None: ... + @property + def readonly(self) -> bool | None: ... + @readonly.setter + def readonly(self, __value: Literal["default"] | bool | None) -> None: ... + @property + def server_version(self) -> int: ... + @property + def status(self) -> int: ... + @property + def string_types(self) -> Any: ... + # Really it's dsn: str, async: int = ..., async_: int = ..., but + # that would be a syntax error. + def __init__(self, dsn: str, *, async_: int = ...) -> None: ... + def cancel(self) -> None: ... + def close(self) -> None: ... + def commit(self) -> None: ... + @overload + def cursor(self, name: str | bytes | None = ..., *, withhold: bool = ..., scrollable: bool | None = ...) -> _Cursor: ... + @overload + def cursor( + self, + name: str | bytes | None = ..., + *, + cursor_factory: Callable[..., _T_cur], + withhold: bool = ..., + scrollable: bool | None = ..., + ) -> _T_cur: ... + @overload + def cursor( + self, name: str | bytes | None, cursor_factory: Callable[..., _T_cur], withhold: bool = ..., scrollable: bool | None = ... + ) -> _T_cur: ... + def fileno(self) -> int: ... + def get_backend_pid(self) -> int: ... + def get_dsn_parameters(self) -> dict[str, str]: ... + def get_native_connection(self): ... + def get_parameter_status(self, parameter: str) -> str | None: ... + def get_transaction_status(self) -> int: ... + def isexecuting(self) -> bool: ... + def lobject( + self, + oid: int = ..., + mode: str | None = ..., + new_oid: int = ..., + new_file: str | None = ..., + lobject_factory: type[lobject] = ..., + ) -> lobject: ... + def poll(self) -> int: ... + def reset(self) -> None: ... + def rollback(self) -> None: ... + def set_client_encoding(self, encoding: str) -> None: ... + def set_isolation_level(self, level: int | None) -> None: ... + def set_session( + self, + isolation_level: str | bytes | int | None = ..., + readonly: bool | Literal["default", b"default"] | None = ..., + deferrable: bool | Literal["default", b"default"] | None = ..., + autocommit: bool = ..., + ) -> None: ... + def tpc_begin(self, xid: str | bytes | Xid) -> None: ... + def tpc_commit(self, __xid: str | bytes | Xid = ...) -> None: ... + def tpc_prepare(self) -> None: ... + def tpc_recover(self) -> list[Xid]: ... + def tpc_rollback(self, __xid: str | bytes | Xid = ...) -> None: ... + def xid(self, format_id, gtrid, bqual) -> Xid: ... + def __enter__(self) -> Self: ... + def __exit__(self, __type: object, __name: object, __tb: object) -> None: ... + +class lobject: + closed: Any + mode: Any + oid: Any + def __init__(self, *args, **kwargs) -> None: ... + def close(self): ... + def export(self, filename): ... + def read(self, size=...): ... + def seek(self, offset, whence=...): ... + def tell(self): ... + def truncate(self, len=...): ... + def unlink(self): ... + def write(self, str): ... + +def Date(year, month, day): ... +def DateFromPy(*args, **kwargs): ... +def DateFromTicks(ticks): ... +def IntervalFromPy(*args, **kwargs): ... +def Time(hour, minutes, seconds, tzinfo=...): ... +def TimeFromPy(*args, **kwargs): ... +def TimeFromTicks(ticks): ... +def Timestamp(year, month, day, hour, minutes, seconds, tzinfo=...): ... +def TimestampFromPy(*args, **kwargs): ... +def TimestampFromTicks(ticks): ... +def _connect(*args, **kwargs): ... +def adapt(*args, **kwargs): ... +def encrypt_password(*args, **kwargs): ... +def get_wait_callback(*args, **kwargs): ... +def libpq_version(*args, **kwargs): ... +def new_array_type(oids, name, baseobj): ... +def new_type(oids, name, castobj): ... +def parse_dsn(dsn: str | bytes) -> dict[str, Any]: ... +def quote_ident(*args, **kwargs): ... +def register_type(*args, **kwargs): ... +def set_wait_callback(_none): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/_range.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/_range.pyi new file mode 100644 index 00000000..eed81ccb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/_range.pyi @@ -0,0 +1,62 @@ +from _typeshed import Incomplete +from typing import Any + +class Range: + def __init__( + self, lower: Incomplete | None = ..., upper: Incomplete | None = ..., bounds: str = ..., empty: bool = ... + ) -> None: ... + @property + def lower(self): ... + @property + def upper(self): ... + @property + def isempty(self): ... + @property + def lower_inf(self): ... + @property + def upper_inf(self): ... + @property + def lower_inc(self): ... + @property + def upper_inc(self): ... + def __contains__(self, x): ... + def __bool__(self) -> bool: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __hash__(self) -> int: ... + def __lt__(self, other): ... + def __le__(self, other): ... + def __gt__(self, other): ... + def __ge__(self, other): ... + +def register_range(pgrange, pyrange, conn_or_curs, globally: bool = ...): ... + +class RangeAdapter: + name: Any + adapted: Any + def __init__(self, adapted) -> None: ... + def __conform__(self, proto): ... + def prepare(self, conn) -> None: ... + def getquoted(self): ... + +class RangeCaster: + subtype_oid: Any + typecaster: Any + array_typecaster: Any + def __init__(self, pgrange, pyrange, oid, subtype_oid, array_oid: Incomplete | None = ...) -> None: ... + def parse(self, s, cur: Incomplete | None = ...): ... + +class NumericRange(Range): ... +class DateRange(Range): ... +class DateTimeRange(Range): ... +class DateTimeTZRange(Range): ... + +class NumberRangeAdapter(RangeAdapter): + def getquoted(self): ... + +int4range_caster: Any +int8range_caster: Any +numrange_caster: Any +daterange_caster: Any +tsrange_caster: Any +tstzrange_caster: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/errorcodes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/errorcodes.pyi new file mode 100644 index 00000000..f4726bd1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/errorcodes.pyi @@ -0,0 +1,304 @@ +def lookup(code, _cache=...): ... + +CLASS_SUCCESSFUL_COMPLETION: str +CLASS_WARNING: str +CLASS_NO_DATA: str +CLASS_SQL_STATEMENT_NOT_YET_COMPLETE: str +CLASS_CONNECTION_EXCEPTION: str +CLASS_TRIGGERED_ACTION_EXCEPTION: str +CLASS_FEATURE_NOT_SUPPORTED: str +CLASS_INVALID_TRANSACTION_INITIATION: str +CLASS_LOCATOR_EXCEPTION: str +CLASS_INVALID_GRANTOR: str +CLASS_INVALID_ROLE_SPECIFICATION: str +CLASS_DIAGNOSTICS_EXCEPTION: str +CLASS_CASE_NOT_FOUND: str +CLASS_CARDINALITY_VIOLATION: str +CLASS_DATA_EXCEPTION: str +CLASS_INTEGRITY_CONSTRAINT_VIOLATION: str +CLASS_INVALID_CURSOR_STATE: str +CLASS_INVALID_TRANSACTION_STATE: str +CLASS_INVALID_SQL_STATEMENT_NAME: str +CLASS_TRIGGERED_DATA_CHANGE_VIOLATION: str +CLASS_INVALID_AUTHORIZATION_SPECIFICATION: str +CLASS_DEPENDENT_PRIVILEGE_DESCRIPTORS_STILL_EXIST: str +CLASS_INVALID_TRANSACTION_TERMINATION: str +CLASS_SQL_ROUTINE_EXCEPTION: str +CLASS_INVALID_CURSOR_NAME: str +CLASS_EXTERNAL_ROUTINE_EXCEPTION: str +CLASS_EXTERNAL_ROUTINE_INVOCATION_EXCEPTION: str +CLASS_SAVEPOINT_EXCEPTION: str +CLASS_INVALID_CATALOG_NAME: str +CLASS_INVALID_SCHEMA_NAME: str +CLASS_TRANSACTION_ROLLBACK: str +CLASS_SYNTAX_ERROR_OR_ACCESS_RULE_VIOLATION: str +CLASS_WITH_CHECK_OPTION_VIOLATION: str +CLASS_INSUFFICIENT_RESOURCES: str +CLASS_PROGRAM_LIMIT_EXCEEDED: str +CLASS_OBJECT_NOT_IN_PREREQUISITE_STATE: str +CLASS_OPERATOR_INTERVENTION: str +CLASS_SYSTEM_ERROR: str +CLASS_SNAPSHOT_FAILURE: str +CLASS_CONFIGURATION_FILE_ERROR: str +CLASS_FOREIGN_DATA_WRAPPER_ERROR: str +CLASS_PL_PGSQL_ERROR: str +CLASS_INTERNAL_ERROR: str +SUCCESSFUL_COMPLETION: str +WARNING: str +NULL_VALUE_ELIMINATED_IN_SET_FUNCTION: str +STRING_DATA_RIGHT_TRUNCATION_: str +PRIVILEGE_NOT_REVOKED: str +PRIVILEGE_NOT_GRANTED: str +IMPLICIT_ZERO_BIT_PADDING: str +DYNAMIC_RESULT_SETS_RETURNED: str +DEPRECATED_FEATURE: str +NO_DATA: str +NO_ADDITIONAL_DYNAMIC_RESULT_SETS_RETURNED: str +SQL_STATEMENT_NOT_YET_COMPLETE: str +CONNECTION_EXCEPTION: str +SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION: str +CONNECTION_DOES_NOT_EXIST: str +SQLSERVER_REJECTED_ESTABLISHMENT_OF_SQLCONNECTION: str +CONNECTION_FAILURE: str +TRANSACTION_RESOLUTION_UNKNOWN: str +PROTOCOL_VIOLATION: str +TRIGGERED_ACTION_EXCEPTION: str +FEATURE_NOT_SUPPORTED: str +INVALID_TRANSACTION_INITIATION: str +LOCATOR_EXCEPTION: str +INVALID_LOCATOR_SPECIFICATION: str +INVALID_GRANTOR: str +INVALID_GRANT_OPERATION: str +INVALID_ROLE_SPECIFICATION: str +DIAGNOSTICS_EXCEPTION: str +STACKED_DIAGNOSTICS_ACCESSED_WITHOUT_ACTIVE_HANDLER: str +CASE_NOT_FOUND: str +CARDINALITY_VIOLATION: str +DATA_EXCEPTION: str +STRING_DATA_RIGHT_TRUNCATION: str +NULL_VALUE_NO_INDICATOR_PARAMETER: str +NUMERIC_VALUE_OUT_OF_RANGE: str +NULL_VALUE_NOT_ALLOWED_: str +ERROR_IN_ASSIGNMENT: str +INVALID_DATETIME_FORMAT: str +DATETIME_FIELD_OVERFLOW: str +INVALID_TIME_ZONE_DISPLACEMENT_VALUE: str +ESCAPE_CHARACTER_CONFLICT: str +INVALID_USE_OF_ESCAPE_CHARACTER: str +INVALID_ESCAPE_OCTET: str +ZERO_LENGTH_CHARACTER_STRING: str +MOST_SPECIFIC_TYPE_MISMATCH: str +SEQUENCE_GENERATOR_LIMIT_EXCEEDED: str +NOT_AN_XML_DOCUMENT: str +INVALID_XML_DOCUMENT: str +INVALID_XML_CONTENT: str +INVALID_XML_COMMENT: str +INVALID_XML_PROCESSING_INSTRUCTION: str +INVALID_INDICATOR_PARAMETER_VALUE: str +SUBSTRING_ERROR: str +DIVISION_BY_ZERO: str +INVALID_PRECEDING_OR_FOLLOWING_SIZE: str +INVALID_ARGUMENT_FOR_NTILE_FUNCTION: str +INTERVAL_FIELD_OVERFLOW: str +INVALID_ARGUMENT_FOR_NTH_VALUE_FUNCTION: str +INVALID_CHARACTER_VALUE_FOR_CAST: str +INVALID_ESCAPE_CHARACTER: str +INVALID_REGULAR_EXPRESSION: str +INVALID_ARGUMENT_FOR_LOGARITHM: str +INVALID_ARGUMENT_FOR_POWER_FUNCTION: str +INVALID_ARGUMENT_FOR_WIDTH_BUCKET_FUNCTION: str +INVALID_ROW_COUNT_IN_LIMIT_CLAUSE: str +INVALID_ROW_COUNT_IN_RESULT_OFFSET_CLAUSE: str +INVALID_LIMIT_VALUE: str +CHARACTER_NOT_IN_REPERTOIRE: str +INDICATOR_OVERFLOW: str +INVALID_PARAMETER_VALUE: str +UNTERMINATED_C_STRING: str +INVALID_ESCAPE_SEQUENCE: str +STRING_DATA_LENGTH_MISMATCH: str +TRIM_ERROR: str +ARRAY_SUBSCRIPT_ERROR: str +INVALID_TABLESAMPLE_REPEAT: str +INVALID_TABLESAMPLE_ARGUMENT: str +DUPLICATE_JSON_OBJECT_KEY_VALUE: str +INVALID_ARGUMENT_FOR_SQL_JSON_DATETIME_FUNCTION: str +INVALID_JSON_TEXT: str +INVALID_SQL_JSON_SUBSCRIPT: str +MORE_THAN_ONE_SQL_JSON_ITEM: str +NO_SQL_JSON_ITEM: str +NON_NUMERIC_SQL_JSON_ITEM: str +NON_UNIQUE_KEYS_IN_A_JSON_OBJECT: str +SINGLETON_SQL_JSON_ITEM_REQUIRED: str +SQL_JSON_ARRAY_NOT_FOUND: str +SQL_JSON_MEMBER_NOT_FOUND: str +SQL_JSON_NUMBER_NOT_FOUND: str +SQL_JSON_OBJECT_NOT_FOUND: str +TOO_MANY_JSON_ARRAY_ELEMENTS: str +TOO_MANY_JSON_OBJECT_MEMBERS: str +SQL_JSON_SCALAR_REQUIRED: str +FLOATING_POINT_EXCEPTION: str +INVALID_TEXT_REPRESENTATION: str +INVALID_BINARY_REPRESENTATION: str +BAD_COPY_FILE_FORMAT: str +UNTRANSLATABLE_CHARACTER: str +NONSTANDARD_USE_OF_ESCAPE_CHARACTER: str +INTEGRITY_CONSTRAINT_VIOLATION: str +RESTRICT_VIOLATION: str +NOT_NULL_VIOLATION: str +FOREIGN_KEY_VIOLATION: str +UNIQUE_VIOLATION: str +CHECK_VIOLATION: str +EXCLUSION_VIOLATION: str +INVALID_CURSOR_STATE: str +INVALID_TRANSACTION_STATE: str +ACTIVE_SQL_TRANSACTION: str +BRANCH_TRANSACTION_ALREADY_ACTIVE: str +INAPPROPRIATE_ACCESS_MODE_FOR_BRANCH_TRANSACTION: str +INAPPROPRIATE_ISOLATION_LEVEL_FOR_BRANCH_TRANSACTION: str +NO_ACTIVE_SQL_TRANSACTION_FOR_BRANCH_TRANSACTION: str +READ_ONLY_SQL_TRANSACTION: str +SCHEMA_AND_DATA_STATEMENT_MIXING_NOT_SUPPORTED: str +HELD_CURSOR_REQUIRES_SAME_ISOLATION_LEVEL: str +NO_ACTIVE_SQL_TRANSACTION: str +IN_FAILED_SQL_TRANSACTION: str +IDLE_IN_TRANSACTION_SESSION_TIMEOUT: str +INVALID_SQL_STATEMENT_NAME: str +TRIGGERED_DATA_CHANGE_VIOLATION: str +INVALID_AUTHORIZATION_SPECIFICATION: str +INVALID_PASSWORD: str +DEPENDENT_PRIVILEGE_DESCRIPTORS_STILL_EXIST: str +DEPENDENT_OBJECTS_STILL_EXIST: str +INVALID_TRANSACTION_TERMINATION: str +SQL_ROUTINE_EXCEPTION: str +MODIFYING_SQL_DATA_NOT_PERMITTED_: str +PROHIBITED_SQL_STATEMENT_ATTEMPTED_: str +READING_SQL_DATA_NOT_PERMITTED_: str +FUNCTION_EXECUTED_NO_RETURN_STATEMENT: str +INVALID_CURSOR_NAME: str +EXTERNAL_ROUTINE_EXCEPTION: str +CONTAINING_SQL_NOT_PERMITTED: str +MODIFYING_SQL_DATA_NOT_PERMITTED: str +PROHIBITED_SQL_STATEMENT_ATTEMPTED: str +READING_SQL_DATA_NOT_PERMITTED: str +EXTERNAL_ROUTINE_INVOCATION_EXCEPTION: str +INVALID_SQLSTATE_RETURNED: str +NULL_VALUE_NOT_ALLOWED: str +TRIGGER_PROTOCOL_VIOLATED: str +SRF_PROTOCOL_VIOLATED: str +EVENT_TRIGGER_PROTOCOL_VIOLATED: str +SAVEPOINT_EXCEPTION: str +INVALID_SAVEPOINT_SPECIFICATION: str +INVALID_CATALOG_NAME: str +INVALID_SCHEMA_NAME: str +TRANSACTION_ROLLBACK: str +SERIALIZATION_FAILURE: str +TRANSACTION_INTEGRITY_CONSTRAINT_VIOLATION: str +STATEMENT_COMPLETION_UNKNOWN: str +DEADLOCK_DETECTED: str +SYNTAX_ERROR_OR_ACCESS_RULE_VIOLATION: str +INSUFFICIENT_PRIVILEGE: str +SYNTAX_ERROR: str +INVALID_NAME: str +INVALID_COLUMN_DEFINITION: str +NAME_TOO_LONG: str +DUPLICATE_COLUMN: str +AMBIGUOUS_COLUMN: str +UNDEFINED_COLUMN: str +UNDEFINED_OBJECT: str +DUPLICATE_OBJECT: str +DUPLICATE_ALIAS: str +DUPLICATE_FUNCTION: str +AMBIGUOUS_FUNCTION: str +GROUPING_ERROR: str +DATATYPE_MISMATCH: str +WRONG_OBJECT_TYPE: str +INVALID_FOREIGN_KEY: str +CANNOT_COERCE: str +UNDEFINED_FUNCTION: str +GENERATED_ALWAYS: str +RESERVED_NAME: str +UNDEFINED_TABLE: str +UNDEFINED_PARAMETER: str +DUPLICATE_CURSOR: str +DUPLICATE_DATABASE: str +DUPLICATE_PREPARED_STATEMENT: str +DUPLICATE_SCHEMA: str +DUPLICATE_TABLE: str +AMBIGUOUS_PARAMETER: str +AMBIGUOUS_ALIAS: str +INVALID_COLUMN_REFERENCE: str +INVALID_CURSOR_DEFINITION: str +INVALID_DATABASE_DEFINITION: str +INVALID_FUNCTION_DEFINITION: str +INVALID_PREPARED_STATEMENT_DEFINITION: str +INVALID_SCHEMA_DEFINITION: str +INVALID_TABLE_DEFINITION: str +INVALID_OBJECT_DEFINITION: str +INDETERMINATE_DATATYPE: str +INVALID_RECURSION: str +WINDOWING_ERROR: str +COLLATION_MISMATCH: str +INDETERMINATE_COLLATION: str +WITH_CHECK_OPTION_VIOLATION: str +INSUFFICIENT_RESOURCES: str +DISK_FULL: str +OUT_OF_MEMORY: str +TOO_MANY_CONNECTIONS: str +CONFIGURATION_LIMIT_EXCEEDED: str +PROGRAM_LIMIT_EXCEEDED: str +STATEMENT_TOO_COMPLEX: str +TOO_MANY_COLUMNS: str +TOO_MANY_ARGUMENTS: str +OBJECT_NOT_IN_PREREQUISITE_STATE: str +OBJECT_IN_USE: str +CANT_CHANGE_RUNTIME_PARAM: str +LOCK_NOT_AVAILABLE: str +UNSAFE_NEW_ENUM_VALUE_USAGE: str +OPERATOR_INTERVENTION: str +QUERY_CANCELED: str +ADMIN_SHUTDOWN: str +CRASH_SHUTDOWN: str +CANNOT_CONNECT_NOW: str +DATABASE_DROPPED: str +SYSTEM_ERROR: str +IO_ERROR: str +UNDEFINED_FILE: str +DUPLICATE_FILE: str +SNAPSHOT_TOO_OLD: str +CONFIG_FILE_ERROR: str +LOCK_FILE_EXISTS: str +FDW_ERROR: str +FDW_OUT_OF_MEMORY: str +FDW_DYNAMIC_PARAMETER_VALUE_NEEDED: str +FDW_INVALID_DATA_TYPE: str +FDW_COLUMN_NAME_NOT_FOUND: str +FDW_INVALID_DATA_TYPE_DESCRIPTORS: str +FDW_INVALID_COLUMN_NAME: str +FDW_INVALID_COLUMN_NUMBER: str +FDW_INVALID_USE_OF_NULL_POINTER: str +FDW_INVALID_STRING_FORMAT: str +FDW_INVALID_HANDLE: str +FDW_INVALID_OPTION_INDEX: str +FDW_INVALID_OPTION_NAME: str +FDW_OPTION_NAME_NOT_FOUND: str +FDW_REPLY_HANDLE: str +FDW_UNABLE_TO_CREATE_EXECUTION: str +FDW_UNABLE_TO_CREATE_REPLY: str +FDW_UNABLE_TO_ESTABLISH_CONNECTION: str +FDW_NO_SCHEMAS: str +FDW_SCHEMA_NOT_FOUND: str +FDW_TABLE_NOT_FOUND: str +FDW_FUNCTION_SEQUENCE_ERROR: str +FDW_TOO_MANY_HANDLES: str +FDW_INCONSISTENT_DESCRIPTOR_INFORMATION: str +FDW_INVALID_ATTRIBUTE_VALUE: str +FDW_INVALID_STRING_LENGTH_OR_BUFFER_LENGTH: str +FDW_INVALID_DESCRIPTOR_FIELD_IDENTIFIER: str +PLPGSQL_ERROR: str +RAISE_EXCEPTION: str +NO_DATA_FOUND: str +TOO_MANY_ROWS: str +ASSERT_FAILURE: str +INTERNAL_ERROR: str +DATA_CORRUPTED: str +INDEX_CORRUPTED: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/errors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/errors.pyi new file mode 100644 index 00000000..bf7d3579 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/errors.pyi @@ -0,0 +1,263 @@ +from psycopg2._psycopg import Error as Error, Warning as Warning + +class DatabaseError(Error): ... +class InterfaceError(Error): ... +class DataError(DatabaseError): ... +class DiagnosticsException(DatabaseError): ... +class IntegrityError(DatabaseError): ... +class InternalError(DatabaseError): ... +class InvalidGrantOperation(DatabaseError): ... +class InvalidGrantor(DatabaseError): ... +class InvalidLocatorSpecification(DatabaseError): ... +class InvalidRoleSpecification(DatabaseError): ... +class InvalidTransactionInitiation(DatabaseError): ... +class LocatorException(DatabaseError): ... +class NoAdditionalDynamicResultSetsReturned(DatabaseError): ... +class NoData(DatabaseError): ... +class NotSupportedError(DatabaseError): ... +class OperationalError(DatabaseError): ... +class ProgrammingError(DatabaseError): ... +class SnapshotTooOld(DatabaseError): ... +class SqlStatementNotYetComplete(DatabaseError): ... +class StackedDiagnosticsAccessedWithoutActiveHandler(DatabaseError): ... +class TriggeredActionException(DatabaseError): ... +class ActiveSqlTransaction(InternalError): ... +class AdminShutdown(OperationalError): ... +class AmbiguousAlias(ProgrammingError): ... +class AmbiguousColumn(ProgrammingError): ... +class AmbiguousFunction(ProgrammingError): ... +class AmbiguousParameter(ProgrammingError): ... +class ArraySubscriptError(DataError): ... +class AssertFailure(InternalError): ... +class BadCopyFileFormat(DataError): ... +class BranchTransactionAlreadyActive(InternalError): ... +class CannotCoerce(ProgrammingError): ... +class CannotConnectNow(OperationalError): ... +class CantChangeRuntimeParam(OperationalError): ... +class CardinalityViolation(ProgrammingError): ... +class CaseNotFound(ProgrammingError): ... +class CharacterNotInRepertoire(DataError): ... +class CheckViolation(IntegrityError): ... +class CollationMismatch(ProgrammingError): ... +class ConfigFileError(InternalError): ... +class ConfigurationLimitExceeded(OperationalError): ... +class ConnectionDoesNotExist(OperationalError): ... +class ConnectionException(OperationalError): ... +class ConnectionFailure(OperationalError): ... +class ContainingSqlNotPermitted(InternalError): ... +class CrashShutdown(OperationalError): ... +class DataCorrupted(InternalError): ... +class DataException(DataError): ... +class DatabaseDropped(OperationalError): ... +class DatatypeMismatch(ProgrammingError): ... +class DatetimeFieldOverflow(DataError): ... +class DependentObjectsStillExist(InternalError): ... +class DependentPrivilegeDescriptorsStillExist(InternalError): ... +class DiskFull(OperationalError): ... +class DivisionByZero(DataError): ... +class DuplicateAlias(ProgrammingError): ... +class DuplicateColumn(ProgrammingError): ... +class DuplicateCursor(ProgrammingError): ... +class DuplicateDatabase(ProgrammingError): ... +class DuplicateFile(OperationalError): ... +class DuplicateFunction(ProgrammingError): ... +class DuplicateJsonObjectKeyValue(DataError): ... +class DuplicateObject(ProgrammingError): ... +class DuplicatePreparedStatement(ProgrammingError): ... +class DuplicateSchema(ProgrammingError): ... +class DuplicateTable(ProgrammingError): ... +class ErrorInAssignment(DataError): ... +class EscapeCharacterConflict(DataError): ... +class EventTriggerProtocolViolated(InternalError): ... +class ExclusionViolation(IntegrityError): ... +class ExternalRoutineException(InternalError): ... +class ExternalRoutineInvocationException(InternalError): ... +class FdwColumnNameNotFound(OperationalError): ... +class FdwDynamicParameterValueNeeded(OperationalError): ... +class FdwError(OperationalError): ... +class FdwFunctionSequenceError(OperationalError): ... +class FdwInconsistentDescriptorInformation(OperationalError): ... +class FdwInvalidAttributeValue(OperationalError): ... +class FdwInvalidColumnName(OperationalError): ... +class FdwInvalidColumnNumber(OperationalError): ... +class FdwInvalidDataType(OperationalError): ... +class FdwInvalidDataTypeDescriptors(OperationalError): ... +class FdwInvalidDescriptorFieldIdentifier(OperationalError): ... +class FdwInvalidHandle(OperationalError): ... +class FdwInvalidOptionIndex(OperationalError): ... +class FdwInvalidOptionName(OperationalError): ... +class FdwInvalidStringFormat(OperationalError): ... +class FdwInvalidStringLengthOrBufferLength(OperationalError): ... +class FdwInvalidUseOfNullPointer(OperationalError): ... +class FdwNoSchemas(OperationalError): ... +class FdwOptionNameNotFound(OperationalError): ... +class FdwOutOfMemory(OperationalError): ... +class FdwReplyHandle(OperationalError): ... +class FdwSchemaNotFound(OperationalError): ... +class FdwTableNotFound(OperationalError): ... +class FdwTooManyHandles(OperationalError): ... +class FdwUnableToCreateExecution(OperationalError): ... +class FdwUnableToCreateReply(OperationalError): ... +class FdwUnableToEstablishConnection(OperationalError): ... +class FeatureNotSupported(NotSupportedError): ... +class FloatingPointException(DataError): ... +class ForeignKeyViolation(IntegrityError): ... +class FunctionExecutedNoReturnStatement(InternalError): ... +class GeneratedAlways(ProgrammingError): ... +class GroupingError(ProgrammingError): ... +class HeldCursorRequiresSameIsolationLevel(InternalError): ... +class IdleInTransactionSessionTimeout(InternalError): ... +class InFailedSqlTransaction(InternalError): ... +class InappropriateAccessModeForBranchTransaction(InternalError): ... +class InappropriateIsolationLevelForBranchTransaction(InternalError): ... +class IndeterminateCollation(ProgrammingError): ... +class IndeterminateDatatype(ProgrammingError): ... +class IndexCorrupted(InternalError): ... +class IndicatorOverflow(DataError): ... +class InsufficientPrivilege(ProgrammingError): ... +class InsufficientResources(OperationalError): ... +class IntegrityConstraintViolation(IntegrityError): ... +class InternalError_(InternalError): ... +class IntervalFieldOverflow(DataError): ... +class InvalidArgumentForLogarithm(DataError): ... +class InvalidArgumentForNthValueFunction(DataError): ... +class InvalidArgumentForNtileFunction(DataError): ... +class InvalidArgumentForPowerFunction(DataError): ... +class InvalidArgumentForSqlJsonDatetimeFunction(DataError): ... +class InvalidArgumentForWidthBucketFunction(DataError): ... +class InvalidAuthorizationSpecification(OperationalError): ... +class InvalidBinaryRepresentation(DataError): ... +class InvalidCatalogName(ProgrammingError): ... +class InvalidCharacterValueForCast(DataError): ... +class InvalidColumnDefinition(ProgrammingError): ... +class InvalidColumnReference(ProgrammingError): ... +class InvalidCursorDefinition(ProgrammingError): ... +class InvalidCursorName(OperationalError): ... +class InvalidCursorState(InternalError): ... +class InvalidDatabaseDefinition(ProgrammingError): ... +class InvalidDatetimeFormat(DataError): ... +class InvalidEscapeCharacter(DataError): ... +class InvalidEscapeOctet(DataError): ... +class InvalidEscapeSequence(DataError): ... +class InvalidForeignKey(ProgrammingError): ... +class InvalidFunctionDefinition(ProgrammingError): ... +class InvalidIndicatorParameterValue(DataError): ... +class InvalidJsonText(DataError): ... +class InvalidName(ProgrammingError): ... +class InvalidObjectDefinition(ProgrammingError): ... +class InvalidParameterValue(DataError): ... +class InvalidPassword(OperationalError): ... +class InvalidPrecedingOrFollowingSize(DataError): ... +class InvalidPreparedStatementDefinition(ProgrammingError): ... +class InvalidRecursion(ProgrammingError): ... +class InvalidRegularExpression(DataError): ... +class InvalidRowCountInLimitClause(DataError): ... +class InvalidRowCountInResultOffsetClause(DataError): ... +class InvalidSavepointSpecification(InternalError): ... +class InvalidSchemaDefinition(ProgrammingError): ... +class InvalidSchemaName(ProgrammingError): ... +class InvalidSqlJsonSubscript(DataError): ... +class InvalidSqlStatementName(OperationalError): ... +class InvalidSqlstateReturned(InternalError): ... +class InvalidTableDefinition(ProgrammingError): ... +class InvalidTablesampleArgument(DataError): ... +class InvalidTablesampleRepeat(DataError): ... +class InvalidTextRepresentation(DataError): ... +class InvalidTimeZoneDisplacementValue(DataError): ... +class InvalidTransactionState(InternalError): ... +class InvalidTransactionTermination(InternalError): ... +class InvalidUseOfEscapeCharacter(DataError): ... +class InvalidXmlComment(DataError): ... +class InvalidXmlContent(DataError): ... +class InvalidXmlDocument(DataError): ... +class InvalidXmlProcessingInstruction(DataError): ... +class IoError(OperationalError): ... +class LockFileExists(InternalError): ... +class LockNotAvailable(OperationalError): ... +class ModifyingSqlDataNotPermitted(InternalError): ... +class ModifyingSqlDataNotPermittedExt(InternalError): ... +class MoreThanOneSqlJsonItem(DataError): ... +class MostSpecificTypeMismatch(DataError): ... +class NameTooLong(ProgrammingError): ... +class NoActiveSqlTransaction(InternalError): ... +class NoActiveSqlTransactionForBranchTransaction(InternalError): ... +class NoDataFound(InternalError): ... +class NoSqlJsonItem(DataError): ... +class NonNumericSqlJsonItem(DataError): ... +class NonUniqueKeysInAJsonObject(DataError): ... +class NonstandardUseOfEscapeCharacter(DataError): ... +class NotAnXmlDocument(DataError): ... +class NotNullViolation(IntegrityError): ... +class NullValueNoIndicatorParameter(DataError): ... +class NullValueNotAllowed(DataError): ... +class NullValueNotAllowedExt(InternalError): ... +class NumericValueOutOfRange(DataError): ... +class ObjectInUse(OperationalError): ... +class ObjectNotInPrerequisiteState(OperationalError): ... +class OperatorIntervention(OperationalError): ... +class OutOfMemory(OperationalError): ... +class PlpgsqlError(InternalError): ... +class ProgramLimitExceeded(OperationalError): ... +class ProhibitedSqlStatementAttempted(InternalError): ... +class ProhibitedSqlStatementAttemptedExt(InternalError): ... +class ProtocolViolation(OperationalError): ... +class QueryCanceledError(OperationalError): ... +class RaiseException(InternalError): ... +class ReadOnlySqlTransaction(InternalError): ... +class ReadingSqlDataNotPermitted(InternalError): ... +class ReadingSqlDataNotPermittedExt(InternalError): ... +class ReservedName(ProgrammingError): ... +class RestrictViolation(IntegrityError): ... +class SavepointException(InternalError): ... +class SchemaAndDataStatementMixingNotSupported(InternalError): ... +class SequenceGeneratorLimitExceeded(DataError): ... +class SingletonSqlJsonItemRequired(DataError): ... +class SqlJsonArrayNotFound(DataError): ... +class SqlJsonMemberNotFound(DataError): ... +class SqlJsonNumberNotFound(DataError): ... +class SqlJsonObjectNotFound(DataError): ... +class SqlJsonScalarRequired(DataError): ... +class SqlRoutineException(InternalError): ... +class SqlclientUnableToEstablishSqlconnection(OperationalError): ... +class SqlserverRejectedEstablishmentOfSqlconnection(OperationalError): ... +class SrfProtocolViolated(InternalError): ... +class StatementTooComplex(OperationalError): ... +class StringDataLengthMismatch(DataError): ... +class StringDataRightTruncation(DataError): ... +class SubstringError(DataError): ... +class SyntaxError(ProgrammingError): ... +class SyntaxErrorOrAccessRuleViolation(ProgrammingError): ... +class SystemError(OperationalError): ... +class TooManyArguments(OperationalError): ... +class TooManyColumns(OperationalError): ... +class TooManyConnections(OperationalError): ... +class TooManyJsonArrayElements(DataError): ... +class TooManyJsonObjectMembers(DataError): ... +class TooManyRows(InternalError): ... +class TransactionResolutionUnknown(OperationalError): ... +class TransactionRollbackError(OperationalError): ... +class TriggerProtocolViolated(InternalError): ... +class TriggeredDataChangeViolation(OperationalError): ... +class TrimError(DataError): ... +class UndefinedColumn(ProgrammingError): ... +class UndefinedFile(OperationalError): ... +class UndefinedFunction(ProgrammingError): ... +class UndefinedObject(ProgrammingError): ... +class UndefinedParameter(ProgrammingError): ... +class UndefinedTable(ProgrammingError): ... +class UniqueViolation(IntegrityError): ... +class UnsafeNewEnumValueUsage(OperationalError): ... +class UnterminatedCString(DataError): ... +class UntranslatableCharacter(DataError): ... +class WindowingError(ProgrammingError): ... +class WithCheckOptionViolation(ProgrammingError): ... +class WrongObjectType(ProgrammingError): ... +class ZeroLengthCharacterString(DataError): ... +class DeadlockDetected(TransactionRollbackError): ... +class QueryCanceled(QueryCanceledError): ... +class SerializationFailure(TransactionRollbackError): ... +class StatementCompletionUnknown(TransactionRollbackError): ... +class TransactionIntegrityConstraintViolation(TransactionRollbackError): ... +class TransactionRollback(TransactionRollbackError): ... + +def lookup(code): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/extensions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/extensions.pyi new file mode 100644 index 00000000..2dcac822 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/extensions.pyi @@ -0,0 +1,115 @@ +from _typeshed import Incomplete +from typing import Any + +from psycopg2._psycopg import ( + BINARYARRAY as BINARYARRAY, + BOOLEAN as BOOLEAN, + BOOLEANARRAY as BOOLEANARRAY, + BYTES as BYTES, + BYTESARRAY as BYTESARRAY, + DATE as DATE, + DATEARRAY as DATEARRAY, + DATETIMEARRAY as DATETIMEARRAY, + DECIMAL as DECIMAL, + DECIMALARRAY as DECIMALARRAY, + FLOAT as FLOAT, + FLOATARRAY as FLOATARRAY, + INTEGER as INTEGER, + INTEGERARRAY as INTEGERARRAY, + INTERVAL as INTERVAL, + INTERVALARRAY as INTERVALARRAY, + LONGINTEGER as LONGINTEGER, + LONGINTEGERARRAY as LONGINTEGERARRAY, + PYDATE as PYDATE, + PYDATEARRAY as PYDATEARRAY, + PYDATETIME as PYDATETIME, + PYDATETIMEARRAY as PYDATETIMEARRAY, + PYDATETIMETZ as PYDATETIMETZ, + PYDATETIMETZARRAY as PYDATETIMETZARRAY, + PYINTERVAL as PYINTERVAL, + PYINTERVALARRAY as PYINTERVALARRAY, + PYTIME as PYTIME, + PYTIMEARRAY as PYTIMEARRAY, + ROWIDARRAY as ROWIDARRAY, + STRINGARRAY as STRINGARRAY, + TIME as TIME, + TIMEARRAY as TIMEARRAY, + UNICODE as UNICODE, + UNICODEARRAY as UNICODEARRAY, + AsIs as AsIs, + Binary as Binary, + Boolean as Boolean, + Column as Column, + ConnectionInfo as ConnectionInfo, + DateFromPy as DateFromPy, + Diagnostics as Diagnostics, + Float as Float, + Int as Int, + IntervalFromPy as IntervalFromPy, + ISQLQuote as ISQLQuote, + Notify as Notify, + QueryCanceledError as QueryCanceledError, + QuotedString as QuotedString, + TimeFromPy as TimeFromPy, + TimestampFromPy as TimestampFromPy, + TransactionRollbackError as TransactionRollbackError, + Xid as Xid, + adapt as adapt, + adapters as adapters, + binary_types as binary_types, + connection as connection, + cursor as cursor, + encodings as encodings, + encrypt_password as encrypt_password, + get_wait_callback as get_wait_callback, + libpq_version as libpq_version, + lobject as lobject, + new_array_type as new_array_type, + new_type as new_type, + parse_dsn as parse_dsn, + quote_ident as quote_ident, + register_type as register_type, + set_wait_callback as set_wait_callback, + string_types as string_types, +) + +ISOLATION_LEVEL_AUTOCOMMIT: int +ISOLATION_LEVEL_READ_UNCOMMITTED: int +ISOLATION_LEVEL_READ_COMMITTED: int +ISOLATION_LEVEL_REPEATABLE_READ: int +ISOLATION_LEVEL_SERIALIZABLE: int +ISOLATION_LEVEL_DEFAULT: Any +STATUS_SETUP: int +STATUS_READY: int +STATUS_BEGIN: int +STATUS_SYNC: int +STATUS_ASYNC: int +STATUS_PREPARED: int +STATUS_IN_TRANSACTION: int +POLL_OK: int +POLL_READ: int +POLL_WRITE: int +POLL_ERROR: int +TRANSACTION_STATUS_IDLE: int +TRANSACTION_STATUS_ACTIVE: int +TRANSACTION_STATUS_INTRANS: int +TRANSACTION_STATUS_INERROR: int +TRANSACTION_STATUS_UNKNOWN: int + +def register_adapter(typ, callable) -> None: ... + +class SQL_IN: + def __init__(self, seq) -> None: ... + def prepare(self, conn) -> None: ... + def getquoted(self): ... + +class NoneAdapter: + def __init__(self, obj) -> None: ... + def getquoted(self, _null: bytes = ...): ... + +def make_dsn(dsn: Incomplete | None = ..., **kwargs): ... + +JSON: Any +JSONARRAY: Any +JSONB: Any +JSONBARRAY: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/extras.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/extras.pyi new file mode 100644 index 00000000..6de89e60 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/extras.pyi @@ -0,0 +1,236 @@ +from _typeshed import Incomplete +from collections import OrderedDict +from collections.abc import Callable +from typing import Any, NamedTuple, TypeVar, overload + +from psycopg2._ipaddress import register_ipaddress as register_ipaddress +from psycopg2._json import ( + Json as Json, + register_default_json as register_default_json, + register_default_jsonb as register_default_jsonb, + register_json as register_json, +) +from psycopg2._psycopg import ( + REPLICATION_LOGICAL as REPLICATION_LOGICAL, + REPLICATION_PHYSICAL as REPLICATION_PHYSICAL, + ReplicationConnection as _replicationConnection, + ReplicationCursor as _replicationCursor, + ReplicationMessage as ReplicationMessage, +) +from psycopg2._range import ( + DateRange as DateRange, + DateTimeRange as DateTimeRange, + DateTimeTZRange as DateTimeTZRange, + NumericRange as NumericRange, + Range as Range, + RangeAdapter as RangeAdapter, + RangeCaster as RangeCaster, + register_range as register_range, +) + +from .extensions import connection as _connection, cursor as _cursor, quote_ident as quote_ident + +_T_cur = TypeVar("_T_cur", bound=_cursor) + +class DictCursorBase(_cursor): + def __init__(self, *args, **kwargs) -> None: ... + +class DictConnection(_connection): + @overload + def cursor(self, name: str | bytes | None = ..., *, withhold: bool = ..., scrollable: bool | None = ...) -> DictCursor: ... + @overload + def cursor( + self, + name: str | bytes | None = ..., + *, + cursor_factory: Callable[..., _T_cur], + withhold: bool = ..., + scrollable: bool | None = ..., + ) -> _T_cur: ... + @overload + def cursor( + self, name: str | bytes | None, cursor_factory: Callable[..., _T_cur], withhold: bool = ..., scrollable: bool | None = ... + ) -> _T_cur: ... + +class DictCursor(DictCursorBase): + def __init__(self, *args, **kwargs) -> None: ... + index: Any + def execute(self, query, vars: Incomplete | None = ...): ... + def callproc(self, procname, vars: Incomplete | None = ...): ... + def fetchone(self) -> DictRow | None: ... # type: ignore[override] + def fetchmany(self, size: int | None = ...) -> list[DictRow]: ... # type: ignore[override] + def fetchall(self) -> list[DictRow]: ... # type: ignore[override] + def __next__(self) -> DictRow: ... # type: ignore[override] + +class DictRow(list[Any]): + def __init__(self, cursor) -> None: ... + def __getitem__(self, x): ... + def __setitem__(self, x, v) -> None: ... + def items(self): ... + def keys(self): ... + def values(self): ... + def get(self, x, default: Incomplete | None = ...): ... + def copy(self): ... + def __contains__(self, x): ... + def __reduce__(self): ... + +class RealDictConnection(_connection): + @overload + def cursor( + self, name: str | bytes | None = ..., *, withhold: bool = ..., scrollable: bool | None = ... + ) -> RealDictCursor: ... + @overload + def cursor( + self, + name: str | bytes | None = ..., + *, + cursor_factory: Callable[..., _T_cur], + withhold: bool = ..., + scrollable: bool | None = ..., + ) -> _T_cur: ... + @overload + def cursor( + self, name: str | bytes | None, cursor_factory: Callable[..., _T_cur], withhold: bool = ..., scrollable: bool | None = ... + ) -> _T_cur: ... + +class RealDictCursor(DictCursorBase): + def __init__(self, *args, **kwargs) -> None: ... + column_mapping: Any + def execute(self, query, vars: Incomplete | None = ...): ... + def callproc(self, procname, vars: Incomplete | None = ...): ... + def fetchone(self) -> RealDictRow | None: ... # type: ignore[override] + def fetchmany(self, size: int | None = ...) -> list[RealDictRow]: ... # type: ignore[override] + def fetchall(self) -> list[RealDictRow]: ... # type: ignore[override] + def __next__(self) -> RealDictRow: ... # type: ignore[override] + +class RealDictRow(OrderedDict[Any, Any]): + def __init__(self, *args, **kwargs) -> None: ... + def __setitem__(self, key, value) -> None: ... + +class NamedTupleConnection(_connection): + @overload + def cursor( + self, name: str | bytes | None = ..., *, withhold: bool = ..., scrollable: bool | None = ... + ) -> NamedTupleCursor: ... + @overload + def cursor( + self, + name: str | bytes | None = ..., + *, + cursor_factory: Callable[..., _T_cur], + withhold: bool = ..., + scrollable: bool | None = ..., + ) -> _T_cur: ... + @overload + def cursor( + self, name: str | bytes | None, cursor_factory: Callable[..., _T_cur], withhold: bool = ..., scrollable: bool | None = ... + ) -> _T_cur: ... + +class NamedTupleCursor(_cursor): + Record: Any + MAX_CACHE: int + def execute(self, query, vars: Incomplete | None = ...): ... + def executemany(self, query, vars): ... + def callproc(self, procname, vars: Incomplete | None = ...): ... + def fetchone(self) -> NamedTuple | None: ... # type: ignore[override] + def fetchmany(self, size: int | None = ...) -> list[NamedTuple]: ... # type: ignore[override] + def fetchall(self) -> list[NamedTuple]: ... # type: ignore[override] + def __next__(self) -> NamedTuple: ... # type: ignore[override] + +class LoggingConnection(_connection): + log: Any + def initialize(self, logobj) -> None: ... + def filter(self, msg, curs): ... + def cursor(self, *args, **kwargs): ... + +class LoggingCursor(_cursor): + def execute(self, query, vars: Incomplete | None = ...): ... + def callproc(self, procname, vars: Incomplete | None = ...): ... + +class MinTimeLoggingConnection(LoggingConnection): + def initialize(self, logobj, mintime: int = ...) -> None: ... + def filter(self, msg, curs): ... + def cursor(self, *args, **kwargs): ... + +class MinTimeLoggingCursor(LoggingCursor): + timestamp: Any + def execute(self, query, vars: Incomplete | None = ...): ... + def callproc(self, procname, vars: Incomplete | None = ...): ... + +class LogicalReplicationConnection(_replicationConnection): + def __init__(self, *args, **kwargs) -> None: ... + +class PhysicalReplicationConnection(_replicationConnection): + def __init__(self, *args, **kwargs) -> None: ... + +class StopReplication(Exception): ... + +class ReplicationCursor(_replicationCursor): + def create_replication_slot( + self, slot_name, slot_type: Incomplete | None = ..., output_plugin: Incomplete | None = ... + ) -> None: ... + def drop_replication_slot(self, slot_name) -> None: ... + def start_replication( + self, + slot_name: Incomplete | None = ..., + slot_type: Incomplete | None = ..., + start_lsn: int = ..., + timeline: int = ..., + options: Incomplete | None = ..., + decode: bool = ..., + status_interval: int = ..., + ) -> None: ... + def fileno(self): ... + +class UUID_adapter: + def __init__(self, uuid) -> None: ... + def __conform__(self, proto): ... + def getquoted(self): ... + +def register_uuid(oids: Incomplete | None = ..., conn_or_curs: Incomplete | None = ...): ... + +class Inet: + addr: Any + def __init__(self, addr) -> None: ... + def prepare(self, conn) -> None: ... + def getquoted(self): ... + def __conform__(self, proto): ... + +def register_inet(oid: Incomplete | None = ..., conn_or_curs: Incomplete | None = ...): ... +def wait_select(conn) -> None: ... + +class HstoreAdapter: + wrapped: Any + def __init__(self, wrapped) -> None: ... + conn: Any + getquoted: Any + def prepare(self, conn) -> None: ... + @classmethod + def parse(cls, s, cur, _bsdec=...): ... + @classmethod + def parse_unicode(cls, s, cur): ... + @classmethod + def get_oids(cls, conn_or_curs): ... + +def register_hstore( + conn_or_curs, globally: bool = ..., unicode: bool = ..., oid: Incomplete | None = ..., array_oid: Incomplete | None = ... +) -> None: ... + +class CompositeCaster: + name: Any + schema: Any + oid: Any + array_oid: Any + attnames: Any + atttypes: Any + typecaster: Any + array_typecaster: Any + def __init__(self, name, oid, attrs, array_oid: Incomplete | None = ..., schema: Incomplete | None = ...) -> None: ... + def parse(self, s, curs): ... + def make(self, values): ... + @classmethod + def tokenize(cls, s): ... + +def register_composite(name, conn_or_curs, globally: bool = ..., factory: Incomplete | None = ...): ... +def execute_batch(cur, sql, argslist, page_size: int = ...) -> None: ... +def execute_values(cur, sql, argslist, template: Incomplete | None = ..., page_size: int = ..., fetch: bool = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/pool.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/pool.pyi new file mode 100644 index 00000000..94c7b1c5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/pool.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete +from typing import Any + +import psycopg2 + +class PoolError(psycopg2.Error): ... + +class AbstractConnectionPool: + minconn: Any + maxconn: Any + closed: bool + def __init__(self, minconn, maxconn, *args, **kwargs) -> None: ... + # getconn, putconn and closeall are officially documented as methods of the + # abstract base class, but in reality, they only exist on the children classes + def getconn(self, key: Incomplete | None = ...): ... + def putconn(self, conn: Any, key: Incomplete | None = ..., close: bool = ...) -> None: ... + def closeall(self) -> None: ... + +class SimpleConnectionPool(AbstractConnectionPool): ... + +class ThreadedConnectionPool(AbstractConnectionPool): + # This subclass has a default value for conn which doesn't exist + # in the SimpleConnectionPool class, nor in the documentation + def putconn(self, conn: Incomplete | None = ..., key: Incomplete | None = ..., close: bool = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/sql.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/sql.pyi new file mode 100644 index 00000000..31f1ec25 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/sql.pyi @@ -0,0 +1,50 @@ +from _typeshed import Incomplete +from collections.abc import Iterator +from typing import Any + +class Composable: + def __init__(self, wrapped) -> None: ... + def as_string(self, context) -> str: ... + def __add__(self, other) -> Composed: ... + def __mul__(self, n) -> Composed: ... + def __eq__(self, other) -> bool: ... + def __ne__(self, other) -> bool: ... + +class Composed(Composable): + def __init__(self, seq) -> None: ... + @property + def seq(self) -> list[Composable]: ... + def as_string(self, context) -> str: ... + def __iter__(self) -> Iterator[Composable]: ... + def __add__(self, other) -> Composed: ... + def join(self, joiner) -> Composed: ... + +class SQL(Composable): + def __init__(self, string) -> None: ... + @property + def string(self) -> str: ... + def as_string(self, context) -> str: ... + def format(self, *args, **kwargs) -> Composed: ... + def join(self, seq) -> Composed: ... + +class Identifier(Composable): + def __init__(self, *strings) -> None: ... + @property + def strings(self) -> tuple[str, ...]: ... + @property + def string(self) -> str: ... + def as_string(self, context) -> str: ... + +class Literal(Composable): + @property + def wrapped(self): ... + def as_string(self, context) -> str: ... + +class Placeholder(Composable): + def __init__(self, name: Incomplete | None = ...) -> None: ... + @property + def name(self) -> str | None: ... + def as_string(self, context) -> str: ... + +NULL: Any +DEFAULT: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/tz.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/tz.pyi new file mode 100644 index 00000000..f7e658df --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/psycopg2/psycopg2/tz.pyi @@ -0,0 +1,26 @@ +import datetime +from _typeshed import Incomplete +from typing import Any + +ZERO: Any + +class FixedOffsetTimezone(datetime.tzinfo): + def __init__(self, offset: Incomplete | None = ..., name: Incomplete | None = ...) -> None: ... + def __new__(cls, offset: Incomplete | None = ..., name: Incomplete | None = ...): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __getinitargs__(self): ... + def utcoffset(self, dt): ... + def tzname(self, dt): ... + def dst(self, dt): ... + +STDOFFSET: Any +DSTOFFSET: Any +DSTDIFF: Any + +class LocalTimezone(datetime.tzinfo): + def utcoffset(self, dt): ... + def dst(self, dt): ... + def tzname(self, dt): ... + +LOCAL: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyOpenSSL/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyOpenSSL/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..ade54ad2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyOpenSSL/@tests/stubtest_allowlist.txt @@ -0,0 +1 @@ +OpenSSL.SSL.Context.__getattr__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyOpenSSL/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyOpenSSL/METADATA.toml new file mode 100644 index 00000000..f1c9279f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyOpenSSL/METADATA.toml @@ -0,0 +1,6 @@ +version = "23.0.*" +# Requires a version of cryptography with a `py.typed` file +requires = ["cryptography>=35.0.0"] + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyOpenSSL/OpenSSL/SSL.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyOpenSSL/OpenSSL/SSL.pyi new file mode 100644 index 00000000..7754145a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyOpenSSL/OpenSSL/SSL.pyi @@ -0,0 +1,200 @@ +import socket +from _socket import _Address, _RetAddress +from _typeshed import Incomplete, ReadableBuffer +from collections.abc import Callable, MutableSequence, Sequence +from typing import Any, TypeVar + +from OpenSSL.crypto import X509, PKey, X509Name + +OPENSSL_VERSION_NUMBER: int +SSLEAY_VERSION: int +SSLEAY_CFLAGS: int +SSLEAY_PLATFORM: int +SSLEAY_DIR: int +SSLEAY_BUILT_ON: int + +SENT_SHUTDOWN: int +RECEIVED_SHUTDOWN: int + +SSLv23_METHOD: int +TLSv1_METHOD: int +TLSv1_1_METHOD: int +TLSv1_2_METHOD: int + +TLS_METHOD: int +TLS_SERVER_METHOD: int +TLS_CLIENT_METHOD: int + +SSL3_VERSION: int +TLS1_VERSION: int +TLS1_1_VERSION: int +TLS1_2_VERSION: int +TLS1_3_VERSION: int + +OP_NO_SSLv2: int +OP_NO_SSLv3: int +OP_NO_TLSv1: int +OP_NO_TLSv1_1: int +OP_NO_TLSv1_2: int +OP_NO_TLSv1_3: int + +MODE_RELEASE_BUFFERS: int + +OP_SINGLE_DH_USE: int +OP_SINGLE_ECDH_USE: int +OP_EPHEMERAL_RSA: int +OP_MICROSOFT_SESS_ID_BUG: int +OP_NETSCAPE_CHALLENGE_BUG: int +OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG: int + +OP_SSLREF2_REUSE_CERT_TYPE_BUG: int +OP_MICROSOFT_BIG_SSLV3_BUFFER: int +OP_MSIE_SSLV2_RSA_PADDING: int +OP_SSLEAY_080_CLIENT_DH_BUG: int +OP_TLS_D5_BUG: int +OP_TLS_BLOCK_PADDING_BUG: int +OP_DONT_INSERT_EMPTY_FRAGMENTS: int +OP_CIPHER_SERVER_PREFERENCE: int +OP_TLS_ROLLBACK_BUG: int +OP_PKCS1_CHECK_1: int +OP_PKCS1_CHECK_2: int +OP_NETSCAPE_CA_DN_BUG: int +OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG: int + +OP_NO_COMPRESSION: int + +OP_NO_QUERY_MTU: int +OP_COOKIE_EXCHANGE: int +OP_NO_TICKET: int + +OP_ALL: int + +VERIFY_PEER: int +VERIFY_FAIL_IF_NO_PEER_CERT: int +VERIFY_CLIENT_ONCE: int +VERIFY_NONE: int + +SESS_CACHE_OFF: int +SESS_CACHE_CLIENT: int +SESS_CACHE_SERVER: int +SESS_CACHE_BOTH: int +SESS_CACHE_NO_AUTO_CLEAR: int +SESS_CACHE_NO_INTERNAL_LOOKUP: int +SESS_CACHE_NO_INTERNAL_STORE: int +SESS_CACHE_NO_INTERNAL: int + +SSL_ST_CONNECT: int +SSL_ST_ACCEPT: int +SSL_ST_MASK: int + +SSL_CB_LOOP: int +SSL_CB_EXIT: int +SSL_CB_READ: int +SSL_CB_WRITE: int +SSL_CB_ALERT: int +SSL_CB_READ_ALERT: int +SSL_CB_WRITE_ALERT: int +SSL_CB_ACCEPT_LOOP: int +SSL_CB_ACCEPT_EXIT: int +SSL_CB_CONNECT_LOOP: int +SSL_CB_CONNECT_EXIT: int +SSL_CB_HANDSHAKE_START: int +SSL_CB_HANDSHAKE_DONE: int + +NO_OVERLAPPING_PROTOCOLS: object + +class Error(Exception): ... +class WantReadError(Error): ... +class WantWriteError(Error): ... +class WantX509LookupError(Error): ... +class ZeroReturnError(Error): ... +class SysCallError(Error): ... + +def SSLeay_version(type: int) -> str: ... + +class Session: ... + +class Connection: + def __getattr__(self, name: str) -> Any: ... # takes attributes from `self._socket` + def __init__(self, context: Context, socket: socket.socket | None = ...) -> None: ... + def get_context(self) -> Context: ... + def set_context(self, context: Context) -> None: ... + def get_servername(self) -> bytes | None: ... + def set_tlsext_host_name(self, name: bytes) -> None: ... + def pending(self) -> int: ... + def send(self, buf: ReadableBuffer | str, flags: int = ...) -> int: ... + write = send + def sendall(self, buf: ReadableBuffer | str, flags: int = ...) -> int: ... + def recv(self, bufsiz: int, flags: int | None = ...) -> bytes: ... + read = recv + def recv_into(self, buffer: MutableSequence[int], nbytes: int | None = ..., flags: int | None = ...) -> int: ... + def connect(self, addr: str | bytes | Sequence[str | int]) -> None: ... + def connect_ex(self, addr: _Address | bytes) -> int: ... + def accept(self) -> tuple[Connection, _RetAddress]: ... + def shutdown(self) -> bool: ... + def do_handshake(self) -> None: ... + def get_certificate(self) -> X509 | None: ... + def get_peer_certificate(self) -> X509 | None: ... + def get_peer_cert_chain(self) -> list[X509] | None: ... + def get_verified_chain(self) -> list[X509] | None: ... + def bio_read(self, bufsiz: int) -> bytes: ... + def bio_write(self, buf: bytes) -> int: ... + def bio_shutdown(self) -> None: ... + def renegotiate(self) -> bool: ... + def renegotiate_pending(self) -> bool: ... + def total_renegotiations(self) -> int: ... + def set_accept_state(self) -> None: ... + def set_connect_state(self) -> None: ... + def get_client_ca_list(self) -> list[X509Name]: ... + def get_cipher_list(self) -> list[str]: ... + def get_cipher_name(self) -> str | None: ... + def get_cipher_bits(self) -> int | None: ... + def get_cipher_version(self) -> str | None: ... + def get_protocol_version_name(self) -> str: ... + def get_protocol_version(self) -> int: ... + def get_shutdown(self) -> int: ... + def set_shutdown(self, state: int) -> None: ... + def get_state_string(self) -> bytes: ... + def server_random(self) -> bytes | None: ... + def client_random(self) -> bytes | None: ... + def master_key(self) -> bytes | None: ... + def export_keying_material( + self, label: Incomplete, olen: Incomplete, context: Incomplete = ... + ) -> Incomplete: ... # TODO: type, see RFC-5705 + def get_app_data(self) -> Any: ... + def set_app_data(self, data: Any) -> None: ... + def sock_shutdown(self, __how: int) -> None: ... # alias to `_socket.socket.shutdown` + def want_read(self) -> bool: ... + def want_write(self) -> bool: ... + def get_session(self) -> Session | None: ... + def set_session(self, session: Session) -> None: ... + def get_finished(self) -> bytes | None: ... + def get_peer_finished(self) -> bytes | None: ... + def set_alpn_protos(self, protos: Sequence[bytes]) -> None: ... + def get_alpn_proto_negotiated(self) -> bytes: ... + def request_ocsp(self) -> None: ... + +_T = TypeVar("_T") + +class Context: + def __getattr__(self, name: str) -> Incomplete: ... + def __init__(self, method: int) -> None: ... + def load_verify_locations(self, cafile: str | None, capath: str | None = ...) -> None: ... + def set_options(self, options: int) -> None: ... + def set_verify(self, mode: int, callback: Callable[[Connection, X509, int, int, int], bool] | None = ...) -> None: ... + def set_min_proto_version(self, version: int) -> None: ... + def set_max_proto_version(self, version: int) -> None: ... + def use_certificate_chain_file(self, certfile: str | bytes) -> None: ... + def use_certificate_file(self, certfile: str | bytes, filetype: int = ...) -> None: ... + def use_certificate(self, cert: X509) -> None: ... + def use_privatekey_file(self, keyfile: str | bytes, filetype: int | None = ...) -> None: ... + def use_privatekey(self, pkey: PKey) -> None: ... + def add_extra_chain_cert(self, certobj: X509) -> None: ... + def set_cipher_list(self, cipher_list: bytes) -> None: ... + def set_keylog_callback(self, callback: Callable[[Connection, bytes], object]) -> None: ... + def set_alpn_protos(self, protos: Sequence[bytes]) -> None: ... + def set_alpn_select_callback(self, callback: Callable[[Connection, list[bytes]], bytes]) -> None: ... + def set_ocsp_server_callback(self, callback: Callable[[Connection, _T | None], bytes], data: _T | None = ...) -> None: ... + def set_ocsp_client_callback( + self, callback: Callable[[Connection, bytes, _T | None], bool], data: _T | None = ... + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyOpenSSL/OpenSSL/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyOpenSSL/OpenSSL/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyOpenSSL/OpenSSL/crypto.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyOpenSSL/OpenSSL/crypto.pyi new file mode 100644 index 00000000..8d02de69 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyOpenSSL/OpenSSL/crypto.pyi @@ -0,0 +1,208 @@ +from _typeshed import Incomplete, StrOrBytesPath +from collections.abc import Callable, Iterable, Sequence +from datetime import datetime +from typing import Any +from typing_extensions import TypeAlias + +from cryptography.hazmat.primitives.asymmetric.dsa import DSAPrivateKey, DSAPublicKey +from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey, RSAPublicKey +from cryptography.x509 import Certificate, CertificateRevocationList, CertificateSigningRequest + +_Key: TypeAlias = DSAPrivateKey | DSAPublicKey | RSAPrivateKey | RSAPublicKey + +FILETYPE_PEM: int +FILETYPE_ASN1: int +FILETYPE_TEXT: int + +TYPE_RSA: int +TYPE_DSA: int + +class _EllipticCurve: + def __init__(self, lib: Incomplete | None, nid: int, name: str) -> None: ... + +class Error(Exception): ... + +class PKey: + def __init__(self) -> None: ... + def bits(self) -> int: ... + def check(self) -> bool: ... + @classmethod + def from_cryptography_key(cls, crypto_key: _Key) -> PKey: ... + def generate_key(self, type: int, bits: int) -> None: ... + def to_cryptography_key(self) -> _Key: ... + def type(self) -> int: ... + +class X509Name: + countryName: str + C: str + stateOrProvinceName: str + ST: str + localityName: str + L: str + organizationName: str + O: str + organizationalUnitName: str + OU: str + commonName: str + CN: str + emailAddress: str + def __init__(self, name: X509Name) -> None: ... + def der(self) -> bytes: ... + def get_components(self) -> list[tuple[bytes, bytes]]: ... + def hash(self) -> int: ... + +class X509: + def __init__(self) -> None: ... + def add_extensions(self, extensions: Iterable[X509Extension]) -> None: ... + def digest(self, digest_name: str) -> bytes: ... + @classmethod + def from_cryptography(cls, crypto_cert: Certificate) -> X509: ... + def get_extension(self, index: int) -> X509Extension: ... + def get_extension_count(self) -> int: ... + def get_issuer(self) -> X509Name: ... + def get_notAfter(self) -> bytes | None: ... + def get_notBefore(self) -> bytes | None: ... + def get_pubkey(self) -> PKey: ... + def get_serial_number(self) -> int: ... + def get_signature_algorithm(self) -> bytes: ... + def get_subject(self) -> X509Name: ... + def get_version(self) -> int: ... + def gmtime_adj_notAfter(self, amount: int) -> None: ... + def gmtime_adj_notBefore(self, amount: int) -> None: ... + def has_expired(self) -> bool: ... + def set_issuer(self, issuer: X509Name) -> None: ... + def set_notAfter(self, when: bytes) -> None: ... + def set_notBefore(self, when: bytes) -> None: ... + def set_pubkey(self, pkey: PKey) -> None: ... + def set_serial_number(self, serial: int) -> None: ... + def set_subject(self, subject: X509Name) -> None: ... + def set_version(self, version: int) -> None: ... + def sign(self, pkey: PKey, digest: str) -> None: ... + def subject_name_hash(self) -> bytes: ... + def to_cryptography(self) -> Certificate: ... + +class X509Req: + def __init__(self) -> None: ... + def add_extensions(self, extensions: Iterable[X509Extension]) -> None: ... + @classmethod + def from_cryptography(cls, crypto_req: CertificateSigningRequest) -> X509Req: ... + def get_extensions(self) -> list[X509Extension]: ... + def get_pubkey(self) -> PKey: ... + def get_subject(self) -> X509Name: ... + def get_version(self) -> int: ... + def set_pubkey(self, pkey: PKey) -> None: ... + def set_version(self, version: int) -> None: ... + def sign(self, pkey: PKey, digest: str) -> None: ... + def to_cryptography(self) -> CertificateSigningRequest: ... + def verify(self, pkey: PKey) -> bool: ... + +class X509Extension: + def __init__( + self, type_name: bytes, critical: bool, value: bytes, subject: X509 | None = ..., issuer: X509 | None = ... + ) -> None: ... + def get_critical(self) -> bool: ... + def get_data(self) -> bytes: ... + def get_short_name(self) -> bytes: ... + +class Revoked: + def __init__(self) -> None: ... + def all_reasons(self) -> list[bytes]: ... + def get_reason(self) -> bytes | None: ... + def get_rev_date(self) -> bytes: ... + def get_serial(self) -> bytes: ... + def set_reason(self, reason: bytes | None) -> None: ... + def set_rev_date(self, when: bytes) -> None: ... + def set_serial(self, hex_str: bytes) -> None: ... + +class CRL: + def __init__(self) -> None: ... + def add_revoked(self, revoked: Revoked) -> None: ... + def export(self, cert: X509, key: PKey, type: int = ..., days: int = ..., digest: bytes = ...) -> bytes: ... + @classmethod + def from_cryptography(cls, crypto_crl: CertificateRevocationList) -> CRL: ... + def get_issuer(self) -> X509Name: ... + def get_revoked(self) -> tuple[Revoked, ...]: ... + def set_lastUpdate(self, when: bytes) -> None: ... + def set_nextUpdate(self, when: bytes) -> None: ... + def set_version(self, version: int) -> None: ... + def sign(self, issuer_cert: X509, issuer_key: PKey, digest: bytes) -> None: ... + def to_cryptography(self) -> CertificateRevocationList: ... + +class X509Store: + def __init__(self) -> None: ... + def add_cert(self, cert: X509) -> None: ... + def add_crl(self, crl: CRL) -> None: ... + def load_locations(self, cafile: StrOrBytesPath, capath: StrOrBytesPath | None = ...) -> None: ... + def set_flags(self, flags: int) -> None: ... + def set_time(self, vfy_time: datetime) -> None: ... + +class X509StoreContext: + def __init__(self, store: X509Store, certificate: X509, chain: Sequence[X509] | None = ...) -> None: ... + def get_verified_chain(self) -> list[X509]: ... + def set_store(self, store: X509Store) -> None: ... + def verify_certificate(self) -> None: ... + +class X509StoreContextError(Exception): + errors: list[Any] + certificate: X509 + def __init__(self, message: str, errors: list[Any], certificate: X509) -> None: ... + +class X509StoreFlags: + CRL_CHECK: int + CRL_CHECK_ALL: int + IGNORE_CRITICAL: int + X509_STRICT: int + ALLOW_PROXY_CERTS: int + POLICY_CHECK: int + EXPLICIT_POLICY: int + INHIBIT_MAP: int + NOTIFY_POLICY: int + CHECK_SS_SIGNATURE: int + CB_ISSUER_CHECK: int + PARTIAL_CHAIN: int + +class PKCS7: + def get_type_name(self) -> str: ... + def type_is_data(self) -> bool: ... + def type_is_enveloped(self) -> bool: ... + def type_is_signed(self) -> bool: ... + def type_is_signedAndEnveloped(self) -> bool: ... + +class PKCS12: + def __init__(self) -> None: ... + def export(self, passphrase: bytes | None = ..., iter: int = ..., maciter: int = ...) -> bytes: ... + def get_ca_certificates(self) -> tuple[X509, ...]: ... + def get_certificate(self) -> X509: ... + def get_friendlyname(self) -> bytes | None: ... + def get_privatekey(self) -> PKey: ... + def set_ca_certificates(self, cacerts: Iterable[X509] | None) -> None: ... + def set_certificate(self, cert: X509) -> None: ... + def set_friendlyname(self, name: bytes | None) -> None: ... + def set_privatekey(self, pkey: PKey) -> None: ... + +class NetscapeSPKI: + def __init__(self) -> None: ... + def b64_encode(self) -> bytes: ... + def get_pubkey(self) -> PKey: ... + def set_pubkey(self, pkey: PKey) -> None: ... + def sign(self, pkey: PKey, digest: str) -> None: ... + def verify(self, key: PKey) -> bool: ... + +def get_elliptic_curves() -> set[_EllipticCurve]: ... +def get_elliptic_curve(name: str) -> _EllipticCurve: ... +def dump_certificate(type: int, cert: X509) -> bytes: ... +def load_certificate(type: int, buffer: bytes) -> X509: ... +def dump_certificate_request(type: int, req: X509Req) -> bytes: ... +def load_certificate_request(type: int, buffer: bytes) -> X509Req: ... +def dump_privatekey( + type: int, pkey: PKey, cipher: str | None = ..., passphrase: bytes | Callable[[], bytes] | None = ... +) -> bytes: ... +def load_privatekey(type: int, buffer: str | bytes, passphrase: bytes | Callable[[], bytes] | None = ...) -> PKey: ... +def dump_publickey(type: int, pkey: PKey) -> bytes: ... +def load_publickey(type: int, buffer: str | bytes) -> PKey: ... +def dump_crl(type: int, crl: CRL) -> bytes: ... +def load_crl(type: int, buffer: str | bytes) -> CRL: ... +def load_pkcs7_data(type: int, buffer: str | bytes) -> PKCS7: ... +def load_pkcs12(buffer: str | bytes, passphrase: bytes | None = ...) -> PKCS12: ... +def sign(pkey: PKey, data: str | bytes, digest: str) -> bytes: ... +def verify(cert: X509, signature: bytes, data: str | bytes, digest: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyOpenSSL/OpenSSL/rand.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyOpenSSL/OpenSSL/rand.pyi new file mode 100644 index 00000000..30a76ee7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyOpenSSL/OpenSSL/rand.pyi @@ -0,0 +1,4 @@ +from typing_extensions import Literal + +def add(buffer: bytes, entropy: int) -> None: ... +def status() -> Literal[0, 1]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyOpenSSL/OpenSSL/version.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyOpenSSL/OpenSSL/version.pyi new file mode 100644 index 00000000..c57554ea --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyOpenSSL/OpenSSL/version.pyi @@ -0,0 +1,9 @@ +__version__: str + +__title__: str +__uri__: str +__summary__: str +__author__: str +__email__: str +__license__: str +__copyright__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyRFC3339/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyRFC3339/METADATA.toml new file mode 100644 index 00000000..bad265e4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyRFC3339/METADATA.toml @@ -0,0 +1 @@ +version = "1.1" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyRFC3339/pyrfc3339/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyRFC3339/pyrfc3339/__init__.pyi new file mode 100644 index 00000000..d8c1ca5e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyRFC3339/pyrfc3339/__init__.pyi @@ -0,0 +1,2 @@ +from .generator import generate as generate +from .parser import parse as parse diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyRFC3339/pyrfc3339/generator.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyRFC3339/pyrfc3339/generator.pyi new file mode 100644 index 00000000..9375b5b6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyRFC3339/pyrfc3339/generator.pyi @@ -0,0 +1,3 @@ +from datetime import datetime + +def generate(dt: datetime, utc: bool = ..., accept_naive: bool = ..., microseconds: bool = ...) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyRFC3339/pyrfc3339/parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyRFC3339/pyrfc3339/parser.pyi new file mode 100644 index 00000000..445b542c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyRFC3339/pyrfc3339/parser.pyi @@ -0,0 +1,3 @@ +from datetime import datetime + +def parse(timestamp: str, utc: bool = ..., produce_naive: bool = ...) -> datetime: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyRFC3339/pyrfc3339/utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyRFC3339/pyrfc3339/utils.pyi new file mode 100644 index 00000000..492d3c22 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyRFC3339/pyrfc3339/utils.pyi @@ -0,0 +1,13 @@ +from datetime import datetime, timedelta, tzinfo +from typing import Any +from typing_extensions import Self + +class FixedOffset(tzinfo): + def __init__(self, hours: float, minutes: float) -> None: ... + def dst(self, dt: datetime | None) -> timedelta: ... + def utcoffset(self, dt: datetime | None) -> timedelta: ... + def tzname(self, dt: datetime | None) -> str: ... + def __deepcopy__(self, memo: dict[int, Any]) -> Self: ... + +def timedelta_seconds(td: timedelta) -> int: ... +def timezone(utcoffset: float) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..2d7a6bbc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/@tests/stubtest_allowlist.txt @@ -0,0 +1,10 @@ +# type_check_only +pyasn1.type.base.NoValue.plug + +# typeshed typing differences with runtime collections.OrderedDict and builtins.dict +pyasn1.codec.native.encoder.SequenceEncoder.protoDict +pyasn1.codec.native.encoder.SetEncoder.protoDict + +# Attempted "__ne__" operation on ASN.1 schema object +pyasn1.type.base +pyasn1.type.univ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/METADATA.toml new file mode 100644 index 00000000..582104d3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/METADATA.toml @@ -0,0 +1 @@ +version = "0.4.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/ber/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/ber/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/ber/decoder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/ber/decoder.pyi new file mode 100644 index 00000000..a2544e0c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/ber/decoder.pyi @@ -0,0 +1,334 @@ +from _typeshed import Incomplete, Unused +from abc import ABCMeta, abstractmethod +from collections.abc import Callable + +from pyasn1.type import base, char, univ, useful +from pyasn1.type.base import Asn1Type +from pyasn1.type.tag import TagSet + +class AbstractDecoder: + protoComponent: Asn1Type | None + @abstractmethod + def valueDecoder( + self, + substrate, + asn1Spec, + tagSet: TagSet | None = ..., + length: int | None = ..., + state: Incomplete | None = ..., + decodeFun: Callable[..., Incomplete] | None = ..., + substrateFun: Callable[..., Incomplete] | None = ..., + **options, + ) -> None: ... + # Abstract, but implementation is optional + def indefLenValueDecoder( + self, + substrate, + asn1Spec, + tagSet: TagSet | None = ..., + length: int | None = ..., + state: Incomplete | None = ..., + decodeFun: Callable[..., Incomplete] | None = ..., + substrateFun: Callable[..., Incomplete] | None = ..., + **options, + ) -> None: ... + +class AbstractSimpleDecoder(AbstractDecoder, metaclass=ABCMeta): + @staticmethod + def substrateCollector(asn1Object, substrate, length): ... + +class ExplicitTagDecoder(AbstractSimpleDecoder): + protoComponent: univ.Any + def valueDecoder( + self, + substrate, + asn1Spec, + tagSet: TagSet | None = ..., + length: int | None = ..., + state: Unused = ..., + decodeFun: Callable[..., Incomplete] | None = ..., + substrateFun: Callable[..., Incomplete] | None = ..., + **options, + ): ... + def indefLenValueDecoder( + self, + substrate, + asn1Spec, + tagSet: TagSet | None = ..., + length: int | None = ..., + state: Unused = ..., + decodeFun: Callable[..., Incomplete] | None = ..., + substrateFun: Callable[..., Incomplete] | None = ..., + **options, + ): ... + +class IntegerDecoder(AbstractSimpleDecoder): + protoComponent: univ.Integer + def valueDecoder( + self, + substrate, + asn1Spec, + tagSet: TagSet | None = ..., + length: int | None = ..., + state: Unused = ..., + decodeFun: Unused = ..., + substrateFun: Unused = ..., + **options, + ): ... + +class BooleanDecoder(IntegerDecoder): + protoComponent: univ.Boolean + +class BitStringDecoder(AbstractSimpleDecoder): + protoComponent: univ.BitString + supportConstructedForm: bool + def valueDecoder( + self, + substrate, + asn1Spec, + tagSet: TagSet | None = ..., + length: int | None = ..., + state: Unused = ..., + decodeFun: Callable[..., Incomplete] | None = ..., + substrateFun: Callable[..., Incomplete] | None = ..., + **options, + ): ... + def indefLenValueDecoder( + self, + substrate, + asn1Spec, + tagSet: TagSet | None = ..., + length: int | None = ..., + state: Unused = ..., + decodeFun: Callable[..., Incomplete] | None = ..., + substrateFun: Callable[..., Incomplete] | None = ..., + **options, + ): ... + +class OctetStringDecoder(AbstractSimpleDecoder): + protoComponent: univ.OctetString + supportConstructedForm: bool + def valueDecoder( + self, + substrate, + asn1Spec, + tagSet: TagSet | None = ..., + length: int | None = ..., + state: Unused = ..., + decodeFun: Callable[..., Incomplete] | None = ..., + substrateFun: Callable[..., Incomplete] | None = ..., + **options, + ): ... + def indefLenValueDecoder( + self, + substrate, + asn1Spec, + tagSet: TagSet | None = ..., + length: int | None = ..., + state: Unused = ..., + decodeFun: Callable[..., Incomplete] | None = ..., + substrateFun: Callable[..., Incomplete] | None = ..., + **options, + ): ... + +class NullDecoder(AbstractSimpleDecoder): + protoComponent: univ.Null + def valueDecoder( + self, + substrate, + asn1Spec, + tagSet: TagSet | None = ..., + length: int | None = ..., + state: Unused = ..., + decodeFun: Unused = ..., + substrateFun: Unused = ..., + **options, + ): ... + +class ObjectIdentifierDecoder(AbstractSimpleDecoder): + protoComponent: univ.ObjectIdentifier + def valueDecoder( + self, + substrate, + asn1Spec, + tagSet: TagSet | None = ..., + length: int | None = ..., + state: Unused = ..., + decodeFun: Unused = ..., + substrateFun: Unused = ..., + **options, + ): ... + +class RealDecoder(AbstractSimpleDecoder): + protoComponent: univ.Real + def valueDecoder( + self, + substrate, + asn1Spec, + tagSet: TagSet | None = ..., + length: int | None = ..., + state: Unused = ..., + decodeFun: Unused = ..., + substrateFun: Unused = ..., + **options, + ): ... + +class AbstractConstructedDecoder(AbstractDecoder, metaclass=ABCMeta): + protoComponent: base.ConstructedAsn1Type | None + +class UniversalConstructedTypeDecoder(AbstractConstructedDecoder): + protoRecordComponent: univ.SequenceAndSetBase | None + protoSequenceComponent: univ.SequenceOfAndSetOfBase | None + def valueDecoder( + self, + substrate, + asn1Spec, + tagSet: TagSet | None = ..., + length: int | None = ..., + state: Unused = ..., + decodeFun: Callable[..., Incomplete] | None = ..., + substrateFun: Callable[..., Incomplete] | None = ..., + **options, + ): ... + def indefLenValueDecoder( + self, + substrate, + asn1Spec, + tagSet: TagSet | None = ..., + length: int | None = ..., + state: Unused = ..., + decodeFun: Callable[..., Incomplete] | None = ..., + substrateFun: Callable[..., Incomplete] | None = ..., + **options, + ): ... + +class SequenceOrSequenceOfDecoder(UniversalConstructedTypeDecoder): + protoRecordComponent: univ.Sequence + protoSequenceComponent: univ.SequenceOf + +class SequenceDecoder(SequenceOrSequenceOfDecoder): + protoComponent: univ.Sequence + +class SequenceOfDecoder(SequenceOrSequenceOfDecoder): + protoComponent: univ.SequenceOf + +class SetOrSetOfDecoder(UniversalConstructedTypeDecoder): + protoRecordComponent: univ.Set + protoSequenceComponent: univ.SetOf + +class SetDecoder(SetOrSetOfDecoder): + protoComponent: univ.Set + +class SetOfDecoder(SetOrSetOfDecoder): + protoComponent: univ.SetOf + +class ChoiceDecoder(AbstractConstructedDecoder): + protoComponent: univ.Choice + def valueDecoder( + self, + substrate, + asn1Spec, + tagSet: TagSet | None = ..., + length: int | None = ..., + state: Incomplete | None = ..., + decodeFun: Callable[..., Incomplete] | None = ..., + substrateFun: Callable[..., Incomplete] | None = ..., + **options, + ): ... + def indefLenValueDecoder( + self, + substrate, + asn1Spec, + tagSet: TagSet | None = ..., + length: int | None = ..., + state: Incomplete | None = ..., + decodeFun: Callable[..., Incomplete] | None = ..., + substrateFun: Callable[..., Incomplete] | None = ..., + **options, + ): ... + +class AnyDecoder(AbstractSimpleDecoder): + protoComponent: univ.Any + def valueDecoder( + self, + substrate, + asn1Spec, + tagSet: TagSet | None = ..., + length: int | None = ..., + state: Unused = ..., + decodeFun: Unused = ..., + substrateFun: Callable[..., Incomplete] | None = ..., + **options, + ): ... + def indefLenValueDecoder( + self, + substrate, + asn1Spec, + tagSet: TagSet | None = ..., + length: int | None = ..., + state: Unused = ..., + decodeFun: Callable[..., Incomplete] | None = ..., + substrateFun: Callable[..., Incomplete] | None = ..., + **options, + ): ... + +class UTF8StringDecoder(OctetStringDecoder): + protoComponent: char.UTF8String + +class NumericStringDecoder(OctetStringDecoder): + protoComponent: char.NumericString + +class PrintableStringDecoder(OctetStringDecoder): + protoComponent: char.PrintableString + +class TeletexStringDecoder(OctetStringDecoder): + protoComponent: char.TeletexString + +class VideotexStringDecoder(OctetStringDecoder): + protoComponent: char.VideotexString + +class IA5StringDecoder(OctetStringDecoder): + protoComponent: char.IA5String + +class GraphicStringDecoder(OctetStringDecoder): + protoComponent: char.GraphicString + +class VisibleStringDecoder(OctetStringDecoder): + protoComponent: char.VisibleString + +class GeneralStringDecoder(OctetStringDecoder): + protoComponent: char.GeneralString + +class UniversalStringDecoder(OctetStringDecoder): + protoComponent: char.UniversalString + +class BMPStringDecoder(OctetStringDecoder): + protoComponent: char.BMPString + +class ObjectDescriptorDecoder(OctetStringDecoder): + protoComponent: useful.ObjectDescriptor + +class GeneralizedTimeDecoder(OctetStringDecoder): + protoComponent: useful.GeneralizedTime + +class UTCTimeDecoder(OctetStringDecoder): + protoComponent: useful.UTCTime + +class Decoder: + defaultErrorState: int + defaultRawDecoder: AnyDecoder + supportIndefLength: bool + def __init__(self, tagMap, typeMap=...) -> None: ... + def __call__( + self, + substrate, + asn1Spec: Asn1Type | None = ..., + tagSet: TagSet | None = ..., + length: int | None = ..., + state=..., + decodeFun: Unused = ..., + substrateFun: Callable[..., Incomplete] | None = ..., + **options, + ): ... + +decode: Decoder diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/ber/encoder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/ber/encoder.pyi new file mode 100644 index 00000000..28156078 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/ber/encoder.pyi @@ -0,0 +1,67 @@ +from _typeshed import Incomplete +from abc import abstractmethod + +from pyasn1.type.base import Asn1Type + +class AbstractItemEncoder: + supportIndefLenMode: bool + eooIntegerSubstrate: tuple[int, int] + eooOctetsSubstrate: bytes + def encodeTag(self, singleTag, isConstructed): ... + def encodeLength(self, length, defMode): ... + @abstractmethod + def encodeValue(self, value, asn1Spec, encodeFun, **options) -> None: ... + def encode(self, value, asn1Spec: Asn1Type | None = ..., encodeFun: Incomplete | None = ..., **options): ... + +class EndOfOctetsEncoder(AbstractItemEncoder): + def encodeValue(self, value, asn1Spec, encodeFun, **options): ... + +class BooleanEncoder(AbstractItemEncoder): + supportIndefLenMode: bool + def encodeValue(self, value, asn1Spec, encodeFun, **options): ... + +class IntegerEncoder(AbstractItemEncoder): + supportIndefLenMode: bool + supportCompactZero: bool + def encodeValue(self, value, asn1Spec, encodeFun, **options): ... + +class BitStringEncoder(AbstractItemEncoder): + def encodeValue(self, value, asn1Spec, encodeFun, **options): ... + +class OctetStringEncoder(AbstractItemEncoder): + def encodeValue(self, value, asn1Spec, encodeFun, **options): ... + +class NullEncoder(AbstractItemEncoder): + supportIndefLenMode: bool + def encodeValue(self, value, asn1Spec, encodeFun, **options): ... + +class ObjectIdentifierEncoder(AbstractItemEncoder): + supportIndefLenMode: bool + def encodeValue(self, value, asn1Spec, encodeFun, **options): ... + +class RealEncoder(AbstractItemEncoder): + # Mistake in the module, should be False, but is 0 at runtime + supportIndefLenMode: int # type: ignore[assignment] + binEncBase: int + def encodeValue(self, value, asn1Spec, encodeFun, **options): ... + +class SequenceEncoder(AbstractItemEncoder): + omitEmptyOptionals: bool + def encodeValue(self, value, asn1Spec, encodeFun, **options): ... + +class SequenceOfEncoder(AbstractItemEncoder): + def encodeValue(self, value, asn1Spec, encodeFun, **options): ... + +class ChoiceEncoder(AbstractItemEncoder): + def encodeValue(self, value, asn1Spec, encodeFun, **options): ... + +class AnyEncoder(OctetStringEncoder): + def encodeValue(self, value, asn1Spec, encodeFun, **options): ... + +class Encoder: + fixedDefLengthMode: bool | None + fixedChunkSize: int | None + def __init__(self, tagMap, typeMap=...) -> None: ... + def __call__(self, value, asn1Spec: Asn1Type | None = ..., **options): ... + +encode: Encoder diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/ber/eoo.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/ber/eoo.pyi new file mode 100644 index 00000000..b34fb621 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/ber/eoo.pyi @@ -0,0 +1,9 @@ +from pyasn1.type import base +from pyasn1.type.tag import TagSet + +class EndOfOctets(base.SimpleAsn1Type): + defaultValue: int + tagSet: TagSet + def __new__(cls, *args, **kwargs): ... + +endOfOctets: EndOfOctets diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/cer/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/cer/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/cer/decoder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/cer/decoder.pyi new file mode 100644 index 00000000..22f51cdc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/cer/decoder.pyi @@ -0,0 +1,27 @@ +from _typeshed import Unused + +from pyasn1.codec.ber import decoder +from pyasn1.type import univ +from pyasn1.type.tag import TagSet + +class BooleanDecoder(decoder.AbstractSimpleDecoder): + protoComponent: univ.Boolean + def valueDecoder( + self, + substrate, + asn1Spec, + tagSet: TagSet | None = ..., + length: int | None = ..., + state: Unused = ..., + decodeFun: Unused = ..., + substrateFun: Unused = ..., + **options, + ): ... + +BitStringDecoder = decoder.BitStringDecoder +OctetStringDecoder = decoder.OctetStringDecoder +RealDecoder = decoder.RealDecoder + +class Decoder(decoder.Decoder): ... + +decode: Decoder diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/cer/encoder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/cer/encoder.pyi new file mode 100644 index 00000000..3b01db7d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/cer/encoder.pyi @@ -0,0 +1,40 @@ +from typing import ClassVar + +from pyasn1.codec.ber import encoder + +class BooleanEncoder(encoder.IntegerEncoder): + def encodeValue(self, value, asn1Spec, encodeFun, **options): ... + +class RealEncoder(encoder.RealEncoder): ... + +class TimeEncoderMixIn: + Z_CHAR: ClassVar[int] + PLUS_CHAR: ClassVar[int] + MINUS_CHAR: ClassVar[int] + COMMA_CHAR: ClassVar[int] + DOT_CHAR: ClassVar[int] + ZERO_CHAR: ClassVar[int] + MIN_LENGTH: ClassVar[int] + MAX_LENGTH: ClassVar[int] + def encodeValue(self, value, asn1Spec, encodeFun, **options): ... + +class GeneralizedTimeEncoder(TimeEncoderMixIn, encoder.OctetStringEncoder): ... +class UTCTimeEncoder(TimeEncoderMixIn, encoder.OctetStringEncoder): ... + +class SetOfEncoder(encoder.SequenceOfEncoder): + def encodeValue(self, value, asn1Spec, encodeFun, **options): ... + +class SequenceOfEncoder(encoder.SequenceOfEncoder): + def encodeValue(self, value, asn1Spec, encodeFun, **options): ... + +class SetEncoder(encoder.SequenceEncoder): + def encodeValue(self, value, asn1Spec, encodeFun, **options): ... + +class SequenceEncoder(encoder.SequenceEncoder): + omitEmptyOptionals: bool + +class Encoder(encoder.Encoder): + fixedDefLengthMode: bool + fixedChunkSize: int + +encode: Encoder diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/der/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/der/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/der/decoder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/der/decoder.pyi new file mode 100644 index 00000000..68aeb599 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/der/decoder.pyi @@ -0,0 +1,12 @@ +from pyasn1.codec.cer import decoder + +class BitStringDecoder(decoder.BitStringDecoder): + supportConstructedForm: bool + +class OctetStringDecoder(decoder.OctetStringDecoder): + supportConstructedForm: bool + +class Decoder(decoder.Decoder): + supportIndefLength: bool + +decode: Decoder diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/der/encoder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/der/encoder.pyi new file mode 100644 index 00000000..55c024e6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/der/encoder.pyi @@ -0,0 +1,9 @@ +from pyasn1.codec.cer import encoder + +class SetEncoder(encoder.SetEncoder): ... + +class Encoder(encoder.Encoder): + fixedDefLengthMode: bool + fixedChunkSize: int + +encode: Encoder diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/native/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/native/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/native/decoder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/native/decoder.pyi new file mode 100644 index 00000000..09d7b4eb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/native/decoder.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete, Unused +from collections.abc import Callable + +class AbstractScalarDecoder: + def __call__(self, pyObject, asn1Spec, decodeFun: Unused = ..., **options): ... + +class BitStringDecoder(AbstractScalarDecoder): + def __call__(self, pyObject, asn1Spec, decodeFun: Unused = ..., **options): ... + +class SequenceOrSetDecoder: + def __call__(self, pyObject, asn1Spec, decodeFun: Callable[..., Incomplete] | None = ..., **options): ... + +class SequenceOfOrSetOfDecoder: + def __call__(self, pyObject, asn1Spec, decodeFun: Callable[..., Incomplete] | None = ..., **options): ... + +class ChoiceDecoder: + def __call__(self, pyObject, asn1Spec, decodeFun: Callable[..., Incomplete] | None = ..., **options): ... + +class Decoder: + def __init__(self, tagMap, typeMap) -> None: ... + def __call__(self, pyObject, asn1Spec, **options): ... + +decode: Decoder diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/native/encoder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/native/encoder.pyi new file mode 100644 index 00000000..ec0cb54c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/codec/native/encoder.pyi @@ -0,0 +1,51 @@ +from abc import abstractmethod +from collections import OrderedDict + +class AbstractItemEncoder: + @abstractmethod + def encode(self, value, encodeFun, **options) -> None: ... + +class BooleanEncoder(AbstractItemEncoder): + def encode(self, value, encodeFun, **options): ... + +class IntegerEncoder(AbstractItemEncoder): + def encode(self, value, encodeFun, **options): ... + +class BitStringEncoder(AbstractItemEncoder): + def encode(self, value, encodeFun, **options): ... + +class OctetStringEncoder(AbstractItemEncoder): + def encode(self, value, encodeFun, **options): ... + +class TextStringEncoder(AbstractItemEncoder): + def encode(self, value, encodeFun, **options): ... + +class NullEncoder(AbstractItemEncoder): + def encode(self, value, encodeFun, **options) -> None: ... + +class ObjectIdentifierEncoder(AbstractItemEncoder): + def encode(self, value, encodeFun, **options): ... + +class RealEncoder(AbstractItemEncoder): + def encode(self, value, encodeFun, **options): ... + +class SetEncoder(AbstractItemEncoder): + protoDict = dict + def encode(self, value, encodeFun, **options): ... + +class SequenceEncoder(SetEncoder): + protoDict = OrderedDict + +class SequenceOfEncoder(AbstractItemEncoder): + def encode(self, value, encodeFun, **options): ... + +class ChoiceEncoder(SequenceEncoder): ... + +class AnyEncoder(AbstractItemEncoder): + def encode(self, value, encodeFun, **options): ... + +class Encoder: + def __init__(self, tagMap, typeMap=...) -> None: ... + def __call__(self, value, **options): ... + +encode: Encoder diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/compat/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/compat/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/compat/binary.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/compat/binary.pyi new file mode 100644 index 00000000..d034c504 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/compat/binary.pyi @@ -0,0 +1 @@ +from builtins import bin as bin diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/compat/calling.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/compat/calling.pyi new file mode 100644 index 00000000..9b1d682f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/compat/calling.pyi @@ -0,0 +1 @@ +from builtins import callable as callable diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/compat/dateandtime.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/compat/dateandtime.pyi new file mode 100644 index 00000000..739dbe57 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/compat/dateandtime.pyi @@ -0,0 +1,3 @@ +from datetime import datetime + +strptime = datetime.strptime diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/compat/integer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/compat/integer.pyi new file mode 100644 index 00000000..b7cafe63 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/compat/integer.pyi @@ -0,0 +1,8 @@ +from typing_extensions import Literal + +implementation: str +null: Literal[b""] + +def from_bytes(octets, signed: bool = ...): ... +def to_bytes(value, signed: bool = ..., length: int = ...): ... +def bitLength(number): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/compat/octets.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/compat/octets.pyi new file mode 100644 index 00000000..a5ad961d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/compat/octets.pyi @@ -0,0 +1,19 @@ +from typing import TypeVar +from typing_extensions import Literal + +_T = TypeVar("_T") + +ints2octs = bytes + +def int2oct(x) -> bytes: ... + +null: Literal[b""] + +def oct2int(x: _T) -> _T: ... +def octs2ints(x: _T) -> _T: ... +def str2octs(x: str) -> bytes: ... +def octs2str(x: bytes) -> str: ... +def isOctetsType(s: object) -> bool: ... +def isStringType(s: object) -> bool: ... + +ensureString = bytes diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/compat/string.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/compat/string.pyi new file mode 100644 index 00000000..c88881c1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/compat/string.pyi @@ -0,0 +1,2 @@ +# Same as string.partition(sep) +def partition(string: str, sep: str) -> tuple[str, str, str]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/debug.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/debug.pyi new file mode 100644 index 00000000..55480cdd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/debug.pyi @@ -0,0 +1,28 @@ +import logging +from typing import TextIO + +class Printer: + def __init__( + self, + logger: logging.Logger | None = ..., + handler: logging.StreamHandler[TextIO] | None = ..., + formatter: logging.Formatter | None = ..., + ) -> None: ... + def __call__(self, msg) -> None: ... + +NullHandler = logging.NullHandler + +class Debug: + defaultPrinter: Printer + def __init__(self, *flags, **options) -> None: ... + def __call__(self, msg) -> None: ... + def __and__(self, flag): ... + def __rand__(self, flag): ... + +def setLogger(userLogger) -> None: ... +def hexdump(octets): ... + +class Scope: + def __init__(self) -> None: ... + def push(self, token) -> None: ... + def pop(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/error.pyi new file mode 100644 index 00000000..97eb7898 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/error.pyi @@ -0,0 +1,9 @@ +class PyAsn1Error(Exception): ... +class ValueConstraintError(PyAsn1Error): ... +class SubstrateUnderrunError(PyAsn1Error): ... + +class PyAsn1UnicodeError(PyAsn1Error, UnicodeError): + def __init__(self, message, unicode_error: UnicodeError | None = ...) -> None: ... + +class PyAsn1UnicodeDecodeError(PyAsn1UnicodeError, UnicodeDecodeError): ... +class PyAsn1UnicodeEncodeError(PyAsn1UnicodeError, UnicodeEncodeError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/base.pyi new file mode 100644 index 00000000..b1b5e17f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/base.pyi @@ -0,0 +1,149 @@ +from _typeshed import Incomplete, Unused +from typing import NoReturn, type_check_only +from typing_extensions import final + +from pyasn1.type import constraint, namedtype +from pyasn1.type.tag import TagSet + +class Asn1Item: + @classmethod + def getTypeId(cls, increment: int = ...): ... + +class Asn1Type(Asn1Item): + tagSet: TagSet + subtypeSpec: constraint.ConstraintsIntersection + typeId: int | None + def __init__(self, **kwargs) -> None: ... + def __setattr__(self, name, value) -> None: ... + @property + def readOnly(self): ... + @property + def effectiveTagSet(self): ... + @property + def tagMap(self): ... + def isSameTypeWith(self, other, matchTags: bool = ..., matchConstraints: bool = ...): ... + def isSuperTypeOf(self, other, matchTags: bool = ..., matchConstraints: bool = ...): ... + @staticmethod + def isNoValue(*values): ... + def prettyPrint(self, scope: int = ...) -> None: ... + def getTagSet(self): ... + def getEffectiveTagSet(self): ... + def getTagMap(self): ... + def getSubtypeSpec(self): ... + def hasValue(self): ... + +Asn1ItemBase = Asn1Type + +@final +class NoValue: + skipMethods: set[str] + def __new__(cls): ... + def __getattr__(self, attr) -> None: ... + # def __new__..getPlug..plug + @type_check_only + def plug(self, *args: Unused, **kw: Unused) -> NoReturn: ... + # Magic methods assigned dynamically, priority from right to left: plug < str < int < list < dict + __abs__ = int.__abs__ + __add__ = list.__add__ + __and__ = int.__and__ + __bool__ = int.__bool__ + __ceil__ = int.__ceil__ + __class_getitem__ = plug + __contains__ = dict.__contains__ + __delitem__ = dict.__delitem__ + __dir__ = plug + __divmod__ = int.__divmod__ + __float__ = int.__float__ + __floor__ = int.__floor__ + __floordiv__ = int.__floordiv__ + __ge__ = list.__ge__ + __getitem__ = dict.__getitem__ + __gt__ = list.__gt__ + __iadd__ = list.__iadd__ + __imul__ = list.__imul__ + __index__ = int.__index__ + # self instead of cls + __init_subclass__ = plug # pyright: ignore[reportGeneralTypeIssues] + __int__ = int.__int__ + __invert__ = int.__invert__ + __ior__ = plug + __iter__ = dict.__iter__ + __le__ = list.__le__ + __len__ = dict.__len__ + __lshift__ = int.__lshift__ + __lt__ = list.__lt__ + __mod__ = int.__mod__ + __mul__ = list.__mul__ + __neg__ = int.__neg__ + __or__ = int.__or__ + __pos__ = int.__pos__ + __pow__ = int.__pow__ + __radd__ = int.__radd__ + __rand__ = int.__rand__ + __rdivmod__ = int.__rdivmod__ + __reversed__ = list.__reversed__ + __rfloordiv__ = int.__rfloordiv__ + __rlshift__ = int.__rlshift__ + __rmod__ = int.__rmod__ + __rmul__ = list.__rmul__ + __ror__ = int.__ror__ + __round__ = int.__round__ + __rpow__ = int.__rpow__ + __rrshift__ = int.__rrshift__ + __rshift__ = int.__rshift__ + __rsub__ = int.__rsub__ + __rtruediv__ = int.__rtruediv__ + __rxor__ = int.__rxor__ + __setitem__ = list.__setitem__ + __str__ = plug + __sub__ = int.__sub__ + __truediv__ = int.__truediv__ + __trunc__ = int.__trunc__ + __xor__ = int.__xor__ + +class SimpleAsn1Type(Asn1Type): + defaultValue: Incomplete | NoValue + def __init__(self, value=..., **kwargs) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __lt__(self, other): ... + def __le__(self, other): ... + def __gt__(self, other): ... + def __ge__(self, other): ... + def __bool__(self) -> bool: ... + def __hash__(self): ... + @property + def isValue(self): ... + def clone(self, value=..., **kwargs): ... + def subtype(self, value=..., **kwargs): ... + def prettyIn(self, value): ... + def prettyOut(self, value): ... + def prettyPrint(self, scope: int = ...): ... + def prettyPrintType(self, scope: int = ...): ... + +AbstractSimpleAsn1Item = SimpleAsn1Type + +class ConstructedAsn1Type(Asn1Type): + strictConstraints: bool + componentType: namedtype.NamedTypes | None + sizeSpec: constraint.ConstraintsIntersection + def __init__(self, **kwargs) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __lt__(self, other): ... + def __le__(self, other): ... + def __gt__(self, other): ... + def __ge__(self, other): ... + def __bool__(self) -> bool: ... + @property + def components(self) -> None: ... + def clone(self, **kwargs): ... + def subtype(self, **kwargs): ... + def getComponentByPosition(self, idx) -> None: ... + def setComponentByPosition(self, idx, value, verifyConstraints: bool = ...) -> None: ... + def setComponents(self, *args, **kwargs): ... + def setDefaultComponents(self) -> None: ... + def getComponentType(self): ... + def verifySizeSpec(self) -> None: ... + +AbstractConstructedAsn1Item = ConstructedAsn1Type diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/char.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/char.pyi new file mode 100644 index 00000000..b61ce2aa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/char.pyi @@ -0,0 +1,72 @@ +from pyasn1.type import univ +from pyasn1.type.tag import TagSet + +class AbstractCharacterString(univ.OctetString): + def __bytes__(self) -> bytes: ... + def prettyIn(self, value): ... + def asOctets(self, padding: bool = ...): ... + def asNumbers(self, padding: bool = ...): ... + def prettyOut(self, value): ... + def prettyPrint(self, scope: int = ...): ... + def __reversed__(self): ... + +class NumericString(AbstractCharacterString): + tagSet: TagSet + encoding: str + typeId: int + +class PrintableString(AbstractCharacterString): + tagSet: TagSet + encoding: str + typeId: int + +class TeletexString(AbstractCharacterString): + tagSet: TagSet + encoding: str + typeId: int + +class T61String(TeletexString): + typeId: int + +class VideotexString(AbstractCharacterString): + tagSet: TagSet + encoding: str + typeId: int + +class IA5String(AbstractCharacterString): + tagSet: TagSet + encoding: str + typeId: int + +class GraphicString(AbstractCharacterString): + tagSet: TagSet + encoding: str + typeId: int + +class VisibleString(AbstractCharacterString): + tagSet: TagSet + encoding: str + typeId: int + +class ISO646String(VisibleString): + typeId: int + +class GeneralString(AbstractCharacterString): + tagSet: TagSet + encoding: str + typeId: int + +class UniversalString(AbstractCharacterString): + tagSet: TagSet + encoding: str + typeId: int + +class BMPString(AbstractCharacterString): + tagSet: TagSet + encoding: str + typeId: int + +class UTF8String(AbstractCharacterString): + tagSet: TagSet + encoding: str + typeId: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/constraint.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/constraint.pyi new file mode 100644 index 00000000..313ab6f9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/constraint.pyi @@ -0,0 +1,40 @@ +class AbstractConstraint: + def __init__(self, *values) -> None: ... + def __call__(self, value, idx: int | None = ...) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __lt__(self, other): ... + def __le__(self, other): ... + def __gt__(self, other): ... + def __ge__(self, other): ... + def __bool__(self) -> bool: ... + def __hash__(self): ... + def getValueMap(self): ... + def isSuperTypeOf(self, otherConstraint): ... + def isSubTypeOf(self, otherConstraint): ... + +class SingleValueConstraint(AbstractConstraint): + def __contains__(self, item) -> bool: ... + def __iter__(self): ... + def __add__(self, constraint): ... + def __sub__(self, constraint): ... + +class ContainedSubtypeConstraint(AbstractConstraint): ... +class ValueRangeConstraint(AbstractConstraint): ... +class ValueSizeConstraint(ValueRangeConstraint): ... +class PermittedAlphabetConstraint(SingleValueConstraint): ... +class ComponentPresentConstraint(AbstractConstraint): ... +class ComponentAbsentConstraint(AbstractConstraint): ... +class WithComponentsConstraint(AbstractConstraint): ... +class InnerTypeConstraint(AbstractConstraint): ... +class ConstraintsExclusion(AbstractConstraint): ... + +class AbstractConstraintSet(AbstractConstraint): + def __getitem__(self, idx): ... + def __iter__(self): ... + def __add__(self, value): ... + def __radd__(self, value): ... + def __len__(self) -> int: ... + +class ConstraintsIntersection(AbstractConstraintSet): ... +class ConstraintsUnion(AbstractConstraintSet): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/error.pyi new file mode 100644 index 00000000..b2562205 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/error.pyi @@ -0,0 +1,3 @@ +from pyasn1.error import PyAsn1Error + +class ValueConstraintError(PyAsn1Error): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/namedtype.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/namedtype.pyi new file mode 100644 index 00000000..a7070416 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/namedtype.pyi @@ -0,0 +1,71 @@ +class NamedType: + isOptional: bool + isDefaulted: bool + def __init__(self, name, asn1Object, openType: type | None = ...) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __lt__(self, other): ... + def __le__(self, other): ... + def __gt__(self, other): ... + def __ge__(self, other): ... + def __hash__(self): ... + def __getitem__(self, idx): ... + def __iter__(self): ... + @property + def name(self): ... + @property + def asn1Object(self): ... + @property + def openType(self): ... + def getName(self): ... + def getType(self): ... + +class OptionalNamedType(NamedType): + isOptional: bool + +class DefaultedNamedType(NamedType): + isDefaulted: bool + +class NamedTypes: + def __init__(self, *namedTypes, **kwargs) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __lt__(self, other): ... + def __le__(self, other): ... + def __gt__(self, other): ... + def __ge__(self, other): ... + def __hash__(self): ... + def __getitem__(self, idx): ... + def __contains__(self, key) -> bool: ... + def __iter__(self): ... + def __bool__(self) -> bool: ... + def __len__(self) -> int: ... + def values(self): ... + def keys(self): ... + def items(self): ... + def clone(self): ... + + class PostponedError: + def __init__(self, errorMsg) -> None: ... + def __getitem__(self, item) -> None: ... + + def getTypeByPosition(self, idx): ... + def getPositionByType(self, tagSet): ... + def getNameByPosition(self, idx): ... + def getPositionByName(self, name): ... + def getTagMapNearPosition(self, idx): ... + def getPositionNearType(self, tagSet, idx): ... + @property + def minTagSet(self): ... + @property + def tagMap(self): ... + @property + def tagMapUnique(self): ... + @property + def hasOptionalOrDefault(self): ... + @property + def hasOpenTypes(self): ... + @property + def namedTypes(self): ... + @property + def requiredComponents(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/namedval.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/namedval.pyi new file mode 100644 index 00000000..348fb917 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/namedval.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete +from collections.abc import Generator + +class NamedValues: + def __init__(self, *args, **kwargs) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __lt__(self, other): ... + def __le__(self, other): ... + def __gt__(self, other): ... + def __ge__(self, other): ... + def __hash__(self): ... + def __getitem__(self, key): ... + def __len__(self) -> int: ... + def __contains__(self, key) -> bool: ... + def __iter__(self): ... + def values(self): ... + def keys(self): ... + def items(self) -> Generator[Incomplete, None, None]: ... + def __add__(self, namedValues): ... + def clone(self, *args, **kwargs): ... + def getName(self, value): ... + def getValue(self, name): ... + def getValues(self, *names): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/opentype.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/opentype.pyi new file mode 100644 index 00000000..e11aeac4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/opentype.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete +from collections.abc import Mapping + +from pyasn1.type.base import Asn1Type + +class OpenType: + def __init__(self, name, typeMap: Mapping[Incomplete, Asn1Type] | None = ...) -> None: ... + @property + def name(self): ... + def values(self): ... + def keys(self): ... + def items(self): ... + def __contains__(self, key) -> bool: ... + def __getitem__(self, key): ... + def __iter__(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/tag.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/tag.pyi new file mode 100644 index 00000000..8c484dd6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/tag.pyi @@ -0,0 +1,51 @@ +tagClassUniversal: int +tagClassApplication: int +tagClassContext: int +tagClassPrivate: int +tagFormatSimple: int +tagFormatConstructed: int +tagCategoryImplicit: int +tagCategoryExplicit: int +tagCategoryUntagged: int + +class Tag: + def __init__(self, tagClass, tagFormat, tagId) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __lt__(self, other): ... + def __le__(self, other): ... + def __gt__(self, other): ... + def __ge__(self, other): ... + def __hash__(self): ... + def __getitem__(self, idx): ... + def __iter__(self): ... + def __and__(self, otherTag): ... + def __or__(self, otherTag): ... + @property + def tagClass(self): ... + @property + def tagFormat(self): ... + @property + def tagId(self): ... + +class TagSet: + def __init__(self, baseTag=..., *superTags) -> None: ... + def __add__(self, superTag): ... + def __radd__(self, superTag): ... + def __getitem__(self, i): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __lt__(self, other): ... + def __le__(self, other): ... + def __gt__(self, other): ... + def __ge__(self, other): ... + def __hash__(self): ... + def __len__(self) -> int: ... + @property + def baseTag(self): ... + @property + def superTags(self): ... + def tagExplicitly(self, superTag): ... + def tagImplicitly(self, superTag): ... + def isSuperTagSetOf(self, tagSet): ... + def getBaseTag(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/tagmap.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/tagmap.pyi new file mode 100644 index 00000000..196234b7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/tagmap.pyi @@ -0,0 +1,23 @@ +from collections.abc import Container, Mapping + +from pyasn1.type.base import Asn1Type + +class TagMap: + def __init__( + self, + presentTypes: Mapping[TagMap, Asn1Type] | None = ..., + skipTypes: Container[TagMap] | None = ..., + defaultType: Asn1Type | None = ..., + ) -> None: ... + def __contains__(self, tagSet) -> bool: ... + def __getitem__(self, tagSet): ... + def __iter__(self): ... + @property + def presentTypes(self): ... + @property + def skipTypes(self): ... + @property + def defaultType(self): ... + def getPosMap(self): ... + def getNegMap(self): ... + def getDef(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/univ.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/univ.pyi new file mode 100644 index 00000000..7ac15848 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/univ.pyi @@ -0,0 +1,382 @@ +from _typeshed import Incomplete, ReadableBuffer, SupportsRichComparison, SupportsTrunc +from collections.abc import Callable, Generator +from typing import SupportsInt +from typing_extensions import Self, SupportsIndex, TypeAlias + +from pyasn1.type import base, constraint, namedtype, namedval +from pyasn1.type.tag import TagSet + +_SizedIntegerable: TypeAlias = ReadableBuffer | str | SupportsInt | SupportsIndex | SupportsTrunc + +NoValue = base.NoValue +noValue: NoValue + +class Integer(base.SimpleAsn1Type): + tagSet: TagSet + subtypeSpec: constraint.ConstraintsIntersection + namedValues: namedval.NamedValues + typeId: int + def __init__(self, value=..., **kwargs) -> None: ... + def __and__(self, value): ... + def __rand__(self, value): ... + def __or__(self, value): ... + def __ror__(self, value): ... + def __xor__(self, value): ... + def __rxor__(self, value): ... + def __lshift__(self, value): ... + def __rshift__(self, value): ... + def __add__(self, value): ... + def __radd__(self, value): ... + def __sub__(self, value): ... + def __rsub__(self, value): ... + def __mul__(self, value): ... + def __rmul__(self, value): ... + def __mod__(self, value): ... + def __rmod__(self, value): ... + # Accepts everything builtins.pow does + def __pow__(self, value: complex, modulo: int | None = ...) -> Self: ... + def __rpow__(self, value): ... + def __floordiv__(self, value): ... + def __rfloordiv__(self, value): ... + def __truediv__(self, value): ... + def __rtruediv__(self, value): ... + def __divmod__(self, value): ... + def __rdivmod__(self, value): ... + __hash__ = base.SimpleAsn1Type.__hash__ + def __int__(self) -> int: ... + def __float__(self) -> float: ... + def __abs__(self): ... + def __index__(self) -> int: ... + def __pos__(self): ... + def __neg__(self): ... + def __invert__(self): ... + def __round__(self, n: int = ...): ... + def __floor__(self): ... + def __ceil__(self): ... + def __trunc__(self): ... + def __lt__(self, value): ... + def __le__(self, value): ... + def __eq__(self, value): ... + def __ne__(self, value): ... + def __gt__(self, value): ... + def __ge__(self, value): ... + def prettyIn(self, value): ... + def prettyOut(self, value): ... + def getNamedValues(self): ... + +class Boolean(Integer): + tagSet: TagSet + subtypeSpec: constraint.ConstraintsIntersection + namedValues: namedval.NamedValues + typeId: int + +SizedIntegerBase = int + +class SizedInteger(SizedIntegerBase): + bitLength: int | None + leadingZeroBits: int | None + def setBitLength(self, bitLength): ... + def __len__(self) -> int: ... + +class BitString(base.SimpleAsn1Type): + tagSet: TagSet + subtypeSpec: constraint.ConstraintsIntersection + namedValues: namedval.NamedValues + typeId: int + defaultBinValue: str | base.NoValue + defaultHexValue: str | base.NoValue + def __init__(self, value=..., **kwargs) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __lt__(self, other): ... + def __le__(self, other): ... + def __gt__(self, other): ... + def __ge__(self, other): ... + def __len__(self) -> int: ... + def __getitem__(self, i): ... + def __iter__(self): ... + def __reversed__(self): ... + def __add__(self, value): ... + def __radd__(self, value): ... + def __mul__(self, value): ... + def __rmul__(self, value): ... + def __lshift__(self, count): ... + def __rshift__(self, count): ... + def __int__(self) -> int: ... + def __float__(self) -> float: ... + def asNumbers(self): ... + def asOctets(self): ... + def asInteger(self): ... + def asBinary(self): ... + @classmethod + def fromHexString(cls, value, internalFormat: bool = ..., prepend: _SizedIntegerable | None = ...): ... + @classmethod + def fromBinaryString(cls, value, internalFormat: bool = ..., prepend: _SizedIntegerable | None = ...): ... + @classmethod + def fromOctetString(cls, value, internalFormat: bool = ..., prepend: _SizedIntegerable | None = ..., padding: int = ...): ... + def prettyIn(self, value): ... + +class OctetString(base.SimpleAsn1Type): + tagSet: TagSet + subtypeSpec: constraint.ConstraintsIntersection + typeId: int + defaultBinValue: str | base.NoValue + defaultHexValue: str | base.NoValue + encoding: str + def __init__(self, value=..., **kwargs) -> None: ... + def prettyIn(self, value): ... + def __bytes__(self) -> bytes: ... + def asOctets(self): ... + def asNumbers(self): ... + def prettyOut(self, value): ... + def prettyPrint(self, scope: int = ...): ... + @staticmethod + def fromBinaryString(value): ... + @staticmethod + def fromHexString(value): ... + def __len__(self) -> int: ... + def __getitem__(self, i): ... + def __iter__(self): ... + def __contains__(self, value) -> bool: ... + def __add__(self, value): ... + def __radd__(self, value): ... + def __mul__(self, value): ... + def __rmul__(self, value): ... + def __int__(self) -> int: ... + def __float__(self) -> float: ... + def __reversed__(self): ... + +class Null(OctetString): + tagSet: TagSet + subtypeSpec: constraint.ConstraintsIntersection + typeId: int + def prettyIn(self, value): ... + +class ObjectIdentifier(base.SimpleAsn1Type): + tagSet: TagSet + subtypeSpec: constraint.ConstraintsIntersection + typeId: int + def __add__(self, other): ... + def __radd__(self, other): ... + def asTuple(self): ... + def __len__(self) -> int: ... + def __getitem__(self, i): ... + def __iter__(self): ... + def __contains__(self, value) -> bool: ... + def index(self, suboid): ... + def isPrefixOf(self, other): ... + def prettyIn(self, value): ... + def prettyOut(self, value): ... + +class Real(base.SimpleAsn1Type): + binEncBase: int | None + tagSet: TagSet + subtypeSpec: constraint.ConstraintsIntersection + typeId: int + def prettyIn(self, value): ... + def prettyPrint(self, scope: int = ...): ... + @property + def isPlusInf(self): ... + @property + def isMinusInf(self): ... + @property + def isInf(self): ... + def __add__(self, value): ... + def __radd__(self, value): ... + def __mul__(self, value): ... + def __rmul__(self, value): ... + def __sub__(self, value): ... + def __rsub__(self, value): ... + def __mod__(self, value): ... + def __rmod__(self, value): ... + # Accepts everything builtins.pow with a float base does + def __pow__(self, value: complex, modulo: int | None = ...) -> Self: ... + def __rpow__(self, value): ... + def __truediv__(self, value): ... + def __rtruediv__(self, value): ... + def __divmod__(self, value): ... + def __rdivmod__(self, value): ... + def __int__(self) -> int: ... + def __float__(self) -> float: ... + def __abs__(self): ... + def __pos__(self): ... + def __neg__(self): ... + def __round__(self, n: int = ...): ... + def __floor__(self): ... + def __ceil__(self): ... + def __trunc__(self): ... + def __lt__(self, value): ... + def __le__(self, value): ... + def __eq__(self, value): ... + def __ne__(self, value): ... + def __gt__(self, value): ... + def __ge__(self, value): ... + def __bool__(self) -> bool: ... + __hash__ = base.SimpleAsn1Type.__hash__ + def __getitem__(self, idx): ... + def isPlusInfinity(self): ... + def isMinusInfinity(self): ... + def isInfinity(self): ... + +class Enumerated(Integer): + tagSet: TagSet + subtypeSpec: constraint.ConstraintsIntersection + typeId: int + namedValues: namedval.NamedValues + +class SequenceOfAndSetOfBase(base.ConstructedAsn1Type): + componentType: namedtype.NamedTypes | None + tagSet: TagSet + subtypeSpec: constraint.ConstraintsIntersection + def __init__( + self, + *args, + componentType: namedtype.NamedTypes | None = ..., + tagSet: TagSet = ..., + subtypeSpec: constraint.ConstraintsIntersection = ..., + ) -> None: ... + def __getitem__(self, idx): ... + def __setitem__(self, idx, value) -> None: ... + def append(self, value) -> None: ... + def count(self, value): ... + def extend(self, values) -> None: ... + def index(self, value, start: int = ..., stop: int | None = ...): ... + def reverse(self) -> None: ... + def sort(self, key: Callable[[Incomplete], SupportsRichComparison] | None = ..., reverse: bool = ...) -> None: ... + def __len__(self) -> int: ... + def __iter__(self): ... + def getComponentByPosition(self, idx, default=..., instantiate: bool = ...): ... + def setComponentByPosition( + self, idx, value=..., verifyConstraints: bool = ..., matchTags: bool = ..., matchConstraints: bool = ... + ): ... + @property + def componentTagMap(self): ... + @property + def components(self): ... + def clear(self): ... + def reset(self): ... + def prettyPrint(self, scope: int = ...): ... + def prettyPrintType(self, scope: int = ...): ... + @property + def isValue(self): ... + @property + def isInconsistent(self): ... + +class SequenceOf(SequenceOfAndSetOfBase): + typeId: int + +class SetOf(SequenceOfAndSetOfBase): + typeId: int + +class SequenceAndSetBase(base.ConstructedAsn1Type): + componentType: namedtype.NamedTypes + + class DynamicNames: + def __init__(self) -> None: ... + def __len__(self) -> int: ... + def __contains__(self, item) -> bool: ... + def __iter__(self): ... + def __getitem__(self, item): ... + def getNameByPosition(self, idx): ... + def getPositionByName(self, name): ... + def addField(self, idx) -> None: ... + + def __init__(self, **kwargs) -> None: ... + def __getitem__(self, idx): ... + def __setitem__(self, idx, value) -> None: ... + def __contains__(self, key) -> bool: ... + def __len__(self) -> int: ... + def __iter__(self): ... + def values(self) -> Generator[Incomplete, None, None]: ... + def keys(self): ... + def items(self) -> Generator[Incomplete, None, None]: ... + def update(self, *iterValue, **mappingValue) -> None: ... + def clear(self): ... + def reset(self): ... + @property + def components(self): ... + def getComponentByName(self, name, default=..., instantiate: bool = ...): ... + def setComponentByName( + self, name, value=..., verifyConstraints: bool = ..., matchTags: bool = ..., matchConstraints: bool = ... + ): ... + def getComponentByPosition(self, idx, default=..., instantiate: bool = ...): ... + def setComponentByPosition( + self, idx, value=..., verifyConstraints: bool = ..., matchTags: bool = ..., matchConstraints: bool = ... + ): ... + @property + def isValue(self): ... + @property + def isInconsistent(self): ... + def prettyPrint(self, scope: int = ...): ... + def prettyPrintType(self, scope: int = ...): ... + def setDefaultComponents(self): ... + def getComponentType(self): ... + def getNameByPosition(self, idx): ... + +class Sequence(SequenceAndSetBase): + tagSet: TagSet + subtypeSpec: constraint.ConstraintsIntersection + componentType: namedtype.NamedTypes + typeId: int + def getComponentTagMapNearPosition(self, idx): ... + def getComponentPositionNearType(self, tagSet, idx): ... + +class Set(SequenceAndSetBase): + tagSet: TagSet + componentType: namedtype.NamedTypes + subtypeSpec: constraint.ConstraintsIntersection + typeId: int + def getComponent(self, innerFlag: bool = ...): ... + def getComponentByType(self, tagSet, default=..., instantiate: bool = ..., innerFlag: bool = ...): ... + def setComponentByType( + self, + tagSet, + value=..., + verifyConstraints: bool = ..., + matchTags: bool = ..., + matchConstraints: bool = ..., + innerFlag: bool = ..., + ): ... + @property + def componentTagMap(self): ... + +class Choice(Set): + tagSet: TagSet + componentType: namedtype.NamedTypes + subtypeSpec: constraint.ConstraintsIntersection + typeId: int + def __eq__(self, other): ... + def __ne__(self, other): ... + def __lt__(self, other): ... + def __le__(self, other): ... + def __gt__(self, other): ... + def __ge__(self, other): ... + def __bool__(self) -> bool: ... + def __len__(self) -> int: ... + def __contains__(self, key) -> bool: ... + def __iter__(self): ... + def values(self) -> Generator[Incomplete, None, None]: ... + def keys(self) -> Generator[Incomplete, None, None]: ... + def items(self) -> Generator[Incomplete, None, None]: ... + def checkConsistency(self) -> None: ... + def getComponentByPosition(self, idx, default=..., instantiate: bool = ...): ... + def setComponentByPosition( + self, idx, value=..., verifyConstraints: bool = ..., matchTags: bool = ..., matchConstraints: bool = ... + ): ... + @property + def effectiveTagSet(self): ... + @property + def tagMap(self): ... + def getComponent(self, innerFlag: bool = ...): ... + def getName(self, innerFlag: bool = ...): ... + @property + def isValue(self): ... + def clear(self): ... + def getMinTagSet(self): ... + +class Any(OctetString): + tagSet: TagSet + subtypeSpec: constraint.ConstraintsIntersection + typeId: int + @property + def tagMap(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/useful.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/useful.pyi new file mode 100644 index 00000000..33f13a33 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyasn1/pyasn1/type/useful.pyi @@ -0,0 +1,28 @@ +import datetime + +from pyasn1.type import char +from pyasn1.type.tag import TagSet + +class ObjectDescriptor(char.GraphicString): + tagSet: TagSet + typeId: int + +class TimeMixIn: + class FixedOffset(datetime.tzinfo): + def __init__(self, offset: int = ..., name: str = ...) -> None: ... + def utcoffset(self, dt): ... + def tzname(self, dt): ... + def dst(self, dt): ... + UTC: FixedOffset + @property + def asDateTime(self): ... + @classmethod + def fromDateTime(cls, dt): ... + +class GeneralizedTime(char.VisibleString, TimeMixIn): + tagSet: TagSet + typeId: int + +class UTCTime(char.VisibleString, TimeMixIn): + tagSet: TagSet + typeId: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyaudio/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyaudio/METADATA.toml new file mode 100644 index 00000000..477c3982 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyaudio/METADATA.toml @@ -0,0 +1,7 @@ +version = "0.2.*" + +[tool.stubtest] +# linux and win32 are equivalent +platforms = ["darwin", "linux"] +apt_dependencies = ["portaudio19-dev"] +brew_dependencies = ["portaudio"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyaudio/pyaudio.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyaudio/pyaudio.pyi new file mode 100644 index 00000000..b7a57cee --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyaudio/pyaudio.pyi @@ -0,0 +1,179 @@ +import sys +from collections.abc import Callable, Mapping, Sequence +from typing import ClassVar +from typing_extensions import Final, TypeAlias + +__docformat__: str + +paFloat32: Final[int] +paInt32: Final[int] +paInt24: Final[int] +paInt16: Final[int] +paInt8: Final[int] +paUInt8: Final[int] +paCustomFormat: Final[int] + +paInDevelopment: Final[int] +paDirectSound: Final[int] +paMME: Final[int] +paASIO: Final[int] +paSoundManager: Final[int] +paCoreAudio: Final[int] +paOSS: Final[int] +paALSA: Final[int] +paAL: Final[int] +paBeOS: Final[int] +paWDMKS: Final[int] +paJACK: Final[int] +paWASAPI: Final[int] +paNoDevice: Final[int] + +paNoError: Final[int] +paNotInitialized: Final[int] +paUnanticipatedHostError: Final[int] +paInvalidChannelCount: Final[int] +paInvalidSampleRate: Final[int] +paInvalidDevice: Final[int] +paInvalidFlag: Final[int] +paSampleFormatNotSupported: Final[int] +paBadIODeviceCombination: Final[int] +paInsufficientMemory: Final[int] +paBufferTooBig: Final[int] +paBufferTooSmall: Final[int] +paNullCallback: Final[int] +paBadStreamPtr: Final[int] +paTimedOut: Final[int] +paInternalError: Final[int] +paDeviceUnavailable: Final[int] +paIncompatibleHostApiSpecificStreamInfo: Final[int] +paStreamIsStopped: Final[int] +paStreamIsNotStopped: Final[int] +paInputOverflowed: Final[int] +paOutputUnderflowed: Final[int] +paHostApiNotFound: Final[int] +paInvalidHostApi: Final[int] +paCanNotReadFromACallbackStream: Final[int] +paCanNotWriteToACallbackStream: Final[int] +paCanNotReadFromAnOutputOnlyStream: Final[int] +paCanNotWriteToAnInputOnlyStream: Final[int] +paIncompatibleStreamHostApi: Final[int] + +paContinue: Final[int] +paComplete: Final[int] +paAbort: Final[int] + +paInputUnderflow: Final[int] +paInputOverflow: Final[int] +paOutputUnderflow: Final[int] +paOutputOverflow: Final[int] +paPrimingOutput: Final[int] + +paFramesPerBufferUnspecified: Final[int] + +if sys.platform == "darwin": + class PaMacCoreStreamInfo: + paMacCoreChangeDeviceParameters: Final[int] + paMacCoreFailIfConversionRequired: Final[int] + paMacCoreConversionQualityMin: Final[int] + paMacCoreConversionQualityMedium: Final[int] + paMacCoreConversionQualityLow: Final[int] + paMacCoreConversionQualityHigh: Final[int] + paMacCoreConversionQualityMax: Final[int] + paMacCorePlayNice: Final[int] + paMacCorePro: Final[int] + paMacCoreMinimizeCPUButPlayNice: Final[int] + paMacCoreMinimizeCPU: Final[int] + def __init__(self, flags: int | None = ..., channel_map: _ChannelMap | None = ...) -> None: ... + def get_flags(self) -> int: ... + def get_channel_map(self) -> _ChannelMap | None: ... + + _PaMacCoreStreamInfo: TypeAlias = PaMacCoreStreamInfo +else: + _PaMacCoreStreamInfo: TypeAlias = None + +# Auxiliary types +_ChannelMap: TypeAlias = Sequence[int] +_PaHostApiInfo: TypeAlias = Mapping[str, str | int] +_PaDeviceInfo: TypeAlias = Mapping[str, str | int | float] +_StreamCallback: TypeAlias = Callable[[bytes | None, int, Mapping[str, float], int], tuple[bytes | None, int]] + +def get_format_from_width(width: int, unsigned: bool = ...) -> int: ... +def get_portaudio_version() -> int: ... +def get_portaudio_version_text() -> str: ... +def get_sample_size(format: int) -> int: ... + +class Stream: + def __init__( + self, + PA_manager: PyAudio, + rate: int, + channels: int, + format: int, + input: bool = ..., + output: bool = ..., + input_device_index: int | None = ..., + output_device_index: int | None = ..., + frames_per_buffer: int = ..., + start: bool = ..., + input_host_api_specific_stream_info: _PaMacCoreStreamInfo | None = ..., + output_host_api_specific_stream_info: _PaMacCoreStreamInfo | None = ..., + stream_callback: _StreamCallback | None = ..., + ) -> None: ... + def close(self) -> None: ... + def get_cpu_load(self) -> float: ... + def get_input_latency(self) -> float: ... + def get_output_latency(self) -> float: ... + def get_read_available(self) -> int: ... + def get_time(self) -> float: ... + def get_write_available(self) -> int: ... + def is_active(self) -> bool: ... + def is_stopped(self) -> bool: ... + def read(self, num_frames: int, exception_on_overflow: bool = ...) -> bytes: ... + def start_stream(self) -> None: ... + def stop_stream(self) -> None: ... + def write(self, frames: bytes, num_frames: int | None = ..., exception_on_underflow: bool = ...) -> None: ... + +# Use an alias to workaround pyright complaints about recursive definitions in the PyAudio class +_Stream = Stream + +class PyAudio: + Stream: ClassVar[type[_Stream]] + def __init__(self) -> None: ... + def close(self, stream: _Stream) -> None: ... + def get_default_host_api_info(self) -> _PaHostApiInfo: ... + def get_default_input_device_info(self) -> _PaDeviceInfo: ... + def get_default_output_device_info(self) -> _PaDeviceInfo: ... + def get_device_count(self) -> int: ... + def get_device_info_by_host_api_device_index(self, host_api_index: int, host_api_device_index: int) -> _PaDeviceInfo: ... + def get_device_info_by_index(self, device_index: int) -> _PaDeviceInfo: ... + def get_format_from_width(self, width: int, unsigned: bool = ...) -> int: ... + def get_host_api_count(self) -> int: ... + def get_host_api_info_by_index(self, host_api_index: int) -> _PaHostApiInfo: ... + def get_host_api_info_by_type(self, host_api_type: int) -> _PaHostApiInfo: ... + def get_sample_size(self, format: int) -> int: ... + def is_format_supported( + self, + rate: int, + input_device: int | None = ..., + input_channels: int | None = ..., + input_format: int | None = ..., + output_device: int | None = ..., + output_channels: int | None = ..., + output_format: int | None = ..., + ) -> bool: ... + def open( + self, + rate: int, + channels: int, + format: int, + input: bool = ..., + output: bool = ..., + input_device_index: int | None = ..., + output_device_index: int | None = ..., + frames_per_buffer: int = ..., + start: bool = ..., + input_host_api_specific_stream_info: _PaMacCoreStreamInfo | None = ..., + output_host_api_specific_stream_info: _PaMacCoreStreamInfo | None = ..., + stream_callback: _StreamCallback | None = ..., + ) -> _Stream: ... + def terminate(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pycocotools/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pycocotools/METADATA.toml new file mode 100644 index 00000000..58bc3834 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pycocotools/METADATA.toml @@ -0,0 +1 @@ +version = "2.0.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pycocotools/pycocotools/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pycocotools/pycocotools/__init__.pyi new file mode 100644 index 00000000..f8b7a404 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pycocotools/pycocotools/__init__.pyi @@ -0,0 +1,6 @@ +from typing_extensions import TypedDict + +# Unused in this module, but imported in multiple submodules. +class _EncodedRLE(TypedDict): # noqa: Y049 + size: list[int] + counts: str | bytes diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pycocotools/pycocotools/coco.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pycocotools/pycocotools/coco.pyi new file mode 100644 index 00000000..1cb93c63 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pycocotools/pycocotools/coco.pyi @@ -0,0 +1,94 @@ +from _typeshed import Incomplete +from collections.abc import Collection, Sequence +from pathlib import Path +from typing import Generic, TypeVar, overload +from typing_extensions import Literal, TypeAlias, TypedDict + +from . import _EncodedRLE + +# TODO: Use numpy types when #5768 is resolved. +# import numpy as np +# import numpy.typing as npt + +PYTHON_VERSION: Incomplete +_NDArray: TypeAlias = Incomplete + +class _Image(TypedDict): + id: int + width: int + height: int + file_name: str + +_TPolygonSegmentation: TypeAlias = list[list[float]] + +class _RLE(TypedDict): + size: list[int] + counts: list[int] + +class _Annotation(TypedDict): + id: int + image_id: int + category_id: int + segmentation: _TPolygonSegmentation | _RLE | _EncodedRLE + area: float + bbox: list[float] + iscrowd: int + +_TSeg = TypeVar("_TSeg", _TPolygonSegmentation, _RLE, _EncodedRLE) + +class _AnnotationG(TypedDict, Generic[_TSeg]): + id: int + image_id: int + category_id: int + segmentation: _TSeg + area: float + bbox: list[float] + iscrowd: int + +class _Category(TypedDict): + id: int + name: str + supercategory: str + +class _Dataset(TypedDict): + images: list[_Image] + annotations: list[_Annotation] + categories: list[_Category] + +class COCO: + anns: dict[int, _Annotation] + dataset: _Dataset + cats: dict[int, _Category] + imgs: dict[int, _Image] + imgToAnns: dict[int, list[_Annotation]] + catToImgs: dict[int, list[int]] + def __init__(self, annotation_file: str | Path | None = ...) -> None: ... + def createIndex(self) -> None: ... + def info(self) -> None: ... + def getAnnIds( + self, + imgIds: Collection[int] | int = ..., + catIds: Collection[int] | int = ..., + areaRng: Sequence[float] = ..., + iscrowd: bool | None = ..., + ) -> list[int]: ... + def getCatIds( + self, catNms: Collection[str] | str = ..., supNms: Collection[str] | str = ..., catIds: Collection[int] | int = ... + ) -> list[int]: ... + def getImgIds(self, imgIds: Collection[int] | int = ..., catIds: list[int] | int = ...) -> list[int]: ... + def loadAnns(self, ids: Collection[int] | int = ...) -> list[_Annotation]: ... + def loadCats(self, ids: Collection[int] | int = ...) -> list[_Category]: ... + def loadImgs(self, ids: Collection[int] | int = ...) -> list[_Image]: ... + def showAnns(self, anns: Sequence[_Annotation], draw_bbox: bool = ...) -> None: ... + def loadRes(self, resFile: str) -> COCO: ... + def download(self, tarDir: str | None = ..., imgIds: Collection[int] = ...) -> Literal[-1] | None: ... + def loadNumpyAnnotations(self, data: _NDArray) -> list[_Annotation]: ... + # def loadNumpyAnnotations(self, data: npt.NDArray[np.float64]) -> list[_Annotation]: ... + @overload + def annToRLE(self, ann: _AnnotationG[_RLE]) -> _RLE: ... + @overload + def annToRLE(self, ann: _AnnotationG[_EncodedRLE]) -> _EncodedRLE: ... + @overload + def annToRLE(self, ann: _AnnotationG[_TPolygonSegmentation]) -> _EncodedRLE: ... + def annToMask(self, ann: _Annotation) -> _NDArray: ... + # def annToMask(self, ann: _Annotation) -> npt.NDArray[np.uint8]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pycocotools/pycocotools/cocoeval.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pycocotools/pycocotools/cocoeval.pyi new file mode 100644 index 00000000..d9845c6c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pycocotools/pycocotools/cocoeval.pyi @@ -0,0 +1,65 @@ +from _typeshed import Incomplete +from typing_extensions import Literal, TypeAlias, TypedDict + +from .coco import COCO + +# TODO: Use numpy types when #5768 is resolved. +# import numpy as np +# import numpy.typing as npt + +_NDArray: TypeAlias = Incomplete +_TIOU: TypeAlias = Literal["segm", "bbox", "keypoints"] + +class _EvaluationResult(TypedDict): + image_id: int + category_id: int + aRng: list[int] + maxDet: int + dtIds: list[int] + gtIds: list[int] + dtMatches: _NDArray + # dtMatches: npt.NDArray[np.float64] + gtMatches: _NDArray + # gtMatches: npt.NDArray[np.float64] + dtScores: list[float] + gtIgnore: _NDArray + # gtIgnore: npt.NDArray[np.float64] + dtIgnore: _NDArray + # dtIgnore: npt.NDArray[np.float64] + +class COCOeval: + cocoGt: COCO + cocoDt: COCO + evalImgs: list[_EvaluationResult] + eval: _EvaluationResult + params: Params + stats: _NDArray + # stats: npt.NDArray[np.float64] + ious: dict[tuple[int, int], list[float]] + def __init__(self, cocoGt: COCO | None = ..., cocoDt: COCO | None = ..., iouType: _TIOU = ...) -> None: ... + def evaluate(self) -> None: ... + def computeIoU(self, imgId: int, catId: int) -> list[float]: ... + def computeOks(self, imgId: int, catId: int) -> _NDArray: ... + # def computeOks(self, imgId: int, catId: int) -> npt.NDArray[np.float64]: ... + def evaluateImg(self, imgId: int, catId: int, aRng: list[int], maxDet: int) -> _EvaluationResult: ... + def accumulate(self, p: Params | None = ...) -> None: ... + def summarize(self) -> None: ... + +class Params: + imgIds: list[int] + catIds: list[int] + iouThrs: _NDArray + # iouThrs: npt.NDArray[np.float64] + recThrs: _NDArray + # recThrs: npt.NDArray[np.float64] + maxDets: list[int] + areaRng: list[int] + areaRngLbl: list[str] + useCats: int + kpt_oks_sigmas: _NDArray + # kpt_oks_sigmas: npt.NDArray[np.float64] + iouType: _TIOU + useSegm: int | None + def __init__(self, iouType: _TIOU = ...) -> None: ... + def setDetParams(self) -> None: ... + def setKpParams(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pycocotools/pycocotools/mask.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pycocotools/pycocotools/mask.pyi new file mode 100644 index 00000000..07d30d66 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pycocotools/pycocotools/mask.pyi @@ -0,0 +1,31 @@ +from _typeshed import Incomplete +from typing import Any, overload +from typing_extensions import TypeAlias + +from . import _EncodedRLE + +# TODO: Use numpy types when #5768 is resolved. +# import numpy as np +# import numpy.typing as npt + +_NPUInt32: TypeAlias = Incomplete # np.uint32 +_NDArrayUInt8: TypeAlias = Incomplete # npt.NDArray[np.uint8] +_NDArrayUInt32: TypeAlias = Incomplete # npt.NDArray[np.uint32] +_NDArrayFloat64: TypeAlias = Incomplete # npt.NDArray[np.float64] + +def iou( + dt: _NDArrayUInt32 | list[float] | list[_EncodedRLE], + gt: _NDArrayUInt32 | list[float] | list[_EncodedRLE], + pyiscrowd: list[int] | _NDArrayUInt8, +) -> list[Any] | _NDArrayFloat64: ... +def merge(rleObjs: list[_EncodedRLE], intersect: int = ...) -> _EncodedRLE: ... + +# ignore an "overlapping overloads" error due to _NDArrayInt32 being an alias for `Incomplete` for now +@overload +def frPyObjects(pyobj: _NDArrayUInt32 | list[list[int]] | list[_EncodedRLE], h: int, w: int) -> list[_EncodedRLE]: ... # type: ignore[misc] +@overload +def frPyObjects(pyobj: list[int] | _EncodedRLE, h: int, w: int) -> _EncodedRLE: ... +def encode(bimask: _NDArrayUInt8) -> _EncodedRLE: ... +def decode(rleObjs: _EncodedRLE) -> _NDArrayUInt8: ... +def area(rleObjs: _EncodedRLE) -> _NPUInt32: ... +def toBbox(rleObjs: _EncodedRLE) -> _NDArrayFloat64: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pycurl/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pycurl/METADATA.toml new file mode 100644 index 00000000..454b5b89 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pycurl/METADATA.toml @@ -0,0 +1,15 @@ +version = "7.45.2" + +[tool.stubtest] +# Install on Windows requires building PycURL from source +# +# Install on MacOS is too complicated for the CI and does not work with stubtest: +# % brew install openssl +# % export LDFLAGS="-L/usr/local/opt/openssl@3/lib" +# % export CPPFLAGS="-I/usr/local/opt/openssl@3/include" +# % pip install --compile --install-option="--with-openssl" pycurl +# TODO: Test on Windows and/or MacOS once wheels are available. +platforms = ["linux"] +apt_dependencies = ["libcurl4-openssl-dev"] +# No need to install on the CI. Leaving here as information for MacOS contributors. +# brew_dependencies = ["openssl"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pycurl/pycurl.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pycurl/pycurl.pyi new file mode 100644 index 00000000..2a531b2b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pycurl/pycurl.pyi @@ -0,0 +1,683 @@ +import sys +from _typeshed import Incomplete +from typing_extensions import Self, final + +version: str + +def global_init(option: int) -> None: ... +def global_cleanup() -> None: ... +def version_info() -> ( + tuple[int, str, int, str, int, str, int, str, tuple[str, ...], Incomplete | None, int, Incomplete | None] +): ... + +class error(Exception): ... + +@final +class Curl: + USERPWD: int + def close(self) -> None: ... + def setopt(self, option: int, value: Incomplete) -> None: ... + def setopt_string(self, option: int, value: str) -> None: ... + def perform(self) -> None: ... + def perform_rb(self) -> bytes: ... + def perform_rs(self) -> str: ... + def getinfo(self, info: Incomplete) -> Incomplete: ... + def getinfo_raw(self, info: Incomplete) -> Incomplete: ... + def reset(self) -> None: ... + def unsetopt(self, option: int) -> Incomplete: ... + def pause(self, bitmask: Incomplete) -> Incomplete: ... + def errstr(self) -> str: ... + def duphandle(self) -> Self: ... + def errstr_raw(self) -> bytes: ... + def set_ca_certs(self, __value: bytes | str) -> None: ... + +@final +class CurlMulti: + def close(self) -> None: ... + def add_handle(self, obj: Curl) -> None: ... + def remove_handle(self, obj: Curl) -> None: ... + def setopt(self, option: int, value: Incomplete) -> None: ... + def perform(self) -> tuple[Incomplete, int]: ... + def fdset(self) -> tuple[list[Incomplete], list[Incomplete], list[Incomplete]]: ... + def select(self, timeout: float = ...) -> int: ... + def info_read(self, max_objects: int = ...) -> tuple[int, list[Incomplete], list[Incomplete]]: ... + def socket_action(self, sockfd: int, ev_bitmask: int) -> tuple[int, int]: ... + def assign(self, __sockfd: int, __socket: Incomplete) -> Incomplete: ... + def socket_all(self) -> tuple[int, int]: ... + def timeout(self) -> int: ... + +@final +class CurlShare: + def close(self) -> None: ... + def setopt(self, option: int, value: Incomplete) -> Incomplete: ... + +if sys.platform != "darwin": + CURL_VERSION_HTTP3: int + MAXAGE_CONN: int + M_MAX_CONCURRENT_STREAMS: int + +ACCEPTTIMEOUT_MS: int +ACCEPT_ENCODING: int +ADDRESS_SCOPE: int +APPCONNECT_TIME: int +APPEND: int +AUTOREFERER: int +BUFFERSIZE: int +CAINFO: int +CAPATH: int +CLOSESOCKETFUNCTION: int +COMPILE_LIBCURL_VERSION_NUM: int +COMPILE_PY_VERSION_HEX: int +CONDITION_UNMET: int +CONNECTTIMEOUT: int +CONNECTTIMEOUT_MS: int +CONNECT_ONLY: int +CONNECT_TIME: int +CONNECT_TO: int +CONTENT_LENGTH_DOWNLOAD: int +CONTENT_LENGTH_UPLOAD: int +CONTENT_TYPE: int +COOKIE: int +COOKIEFILE: int +COOKIEJAR: int +COOKIELIST: int +COOKIESESSION: int +COPYPOSTFIELDS: int +CRLF: int +CRLFILE: int +CSELECT_ERR: int +CSELECT_IN: int +CSELECT_OUT: int +CURL_HTTP_VERSION_1_0: int +CURL_HTTP_VERSION_1_1: int +CURL_HTTP_VERSION_2: int +CURL_HTTP_VERSION_2TLS: int +CURL_HTTP_VERSION_2_0: int +CURL_HTTP_VERSION_2_PRIOR_KNOWLEDGE: int +CURL_HTTP_VERSION_3: int +CURL_HTTP_VERSION_LAST: int +CURL_HTTP_VERSION_NONE: int +CURL_VERSION_ALTSVC: int +CURL_VERSION_BROTLI: int +CURL_VERSION_GSASL: int +CURL_VERSION_HSTS: int +CURL_VERSION_HTTPS_PROXY: int +CURL_VERSION_MULTI_SSL: int +CURL_VERSION_UNICODE: int +CURL_VERSION_ZSTD: int +CUSTOMREQUEST: int +DEBUGFUNCTION: int +DEFAULT_PROTOCOL: int +DIRLISTONLY: int +DNS_CACHE_TIMEOUT: int +DNS_SERVERS: int +DNS_USE_GLOBAL_CACHE: int +DOH_URL: int +EFFECTIVE_URL: int +EGDSOCKET: int +ENCODING: int +EXPECT_100_TIMEOUT_MS: int +FAILONERROR: int +FILE: int +FOLLOWLOCATION: int +FORBID_REUSE: int +FORM_BUFFER: int +FORM_BUFFERPTR: int +FORM_CONTENTS: int +FORM_CONTENTTYPE: int +FORM_FILE: int +FORM_FILENAME: int +FRESH_CONNECT: int +FTPAPPEND: int +FTPAUTH_DEFAULT: int +FTPAUTH_SSL: int +FTPAUTH_TLS: int +FTPLISTONLY: int +FTPMETHOD_DEFAULT: int +FTPMETHOD_MULTICWD: int +FTPMETHOD_NOCWD: int +FTPMETHOD_SINGLECWD: int +FTPPORT: int +FTPSSLAUTH: int +FTPSSL_ALL: int +FTPSSL_CONTROL: int +FTPSSL_NONE: int +FTPSSL_TRY: int +FTP_ACCOUNT: int +FTP_ALTERNATIVE_TO_USER: int +FTP_CREATE_MISSING_DIRS: int +FTP_ENTRY_PATH: int +FTP_FILEMETHOD: int +FTP_RESPONSE_TIMEOUT: int +FTP_SKIP_PASV_IP: int +FTP_SSL: int +FTP_SSL_CCC: int +FTP_USE_EPRT: int +FTP_USE_EPSV: int +FTP_USE_PRET: int +GLOBAL_ACK_EINTR: int +GLOBAL_ALL: int +GLOBAL_DEFAULT: int +GLOBAL_NOTHING: int +GLOBAL_SSL: int +GLOBAL_WIN32: int +GSSAPI_DELEGATION: int +GSSAPI_DELEGATION_FLAG: int +GSSAPI_DELEGATION_NONE: int +GSSAPI_DELEGATION_POLICY_FLAG: int +HAPROXYPROTOCOL: int +HEADER: int +HEADERFUNCTION: int +HEADEROPT: int +HEADER_SEPARATE: int +HEADER_SIZE: int +HEADER_UNIFIED: int +HTTP09_ALLOWED: int +HTTP200ALIASES: int +HTTPAUTH: int +HTTPAUTH_ANY: int +HTTPAUTH_ANYSAFE: int +HTTPAUTH_AVAIL: int +HTTPAUTH_BASIC: int +HTTPAUTH_DIGEST: int +HTTPAUTH_DIGEST_IE: int +HTTPAUTH_GSSNEGOTIATE: int +HTTPAUTH_NEGOTIATE: int +HTTPAUTH_NONE: int +HTTPAUTH_NTLM: int +HTTPAUTH_NTLM_WB: int +HTTPAUTH_ONLY: int +HTTPGET: int +HTTPHEADER: int +HTTPPOST: int +HTTPPROXYTUNNEL: int +HTTP_CODE: int +HTTP_CONNECTCODE: int +HTTP_CONTENT_DECODING: int +HTTP_TRANSFER_DECODING: int +HTTP_VERSION: int +IGNORE_CONTENT_LENGTH: int +INFILE: int +INFILESIZE: int +INFILESIZE_LARGE: int +INFOTYPE_DATA_IN: int +INFOTYPE_DATA_OUT: int +INFOTYPE_HEADER_IN: int +INFOTYPE_HEADER_OUT: int +INFOTYPE_SSL_DATA_IN: int +INFOTYPE_SSL_DATA_OUT: int +INFOTYPE_TEXT: int +INFO_CERTINFO: int +INFO_COOKIELIST: int +INFO_FILETIME: int +INFO_HTTP_VERSION: int +INFO_RTSP_CLIENT_CSEQ: int +INFO_RTSP_CSEQ_RECV: int +INFO_RTSP_SERVER_CSEQ: int +INFO_RTSP_SESSION_ID: int +INTERFACE: int +IOCMD_NOP: int +IOCMD_RESTARTREAD: int +IOCTLFUNCTION: int +IOE_FAILRESTART: int +IOE_OK: int +IOE_UNKNOWNCMD: int +IPRESOLVE: int +IPRESOLVE_V4: int +IPRESOLVE_V6: int +IPRESOLVE_WHATEVER: int +ISSUERCERT: int +KEYPASSWD: int +KHMATCH_MISMATCH: int +KHMATCH_MISSING: int +KHMATCH_OK: int +KHSTAT_DEFER: int +KHSTAT_FINE: int +KHSTAT_FINE_ADD_TO_FILE: int +KHSTAT_REJECT: int +KHTYPE_DSS: int +KHTYPE_RSA: int +KHTYPE_RSA1: int +KHTYPE_UNKNOWN: int +KRB4LEVEL: int +KRBLEVEL: int +LASTSOCKET: int +LOCALPORT: int +LOCALPORTRANGE: int +LOCAL_IP: int +LOCAL_PORT: int +LOCK_DATA_CONNECT: int +LOCK_DATA_COOKIE: int +LOCK_DATA_DNS: int +LOCK_DATA_PSL: int +LOCK_DATA_SSL_SESSION: int +LOGIN_OPTIONS: int +LOW_SPEED_LIMIT: int +LOW_SPEED_TIME: int +MAIL_AUTH: int +MAIL_FROM: int +MAIL_RCPT: int +MAXCONNECTS: int +MAXFILESIZE: int +MAXFILESIZE_LARGE: int +MAXLIFETIME_CONN: int +MAXREDIRS: int +MAX_RECV_SPEED_LARGE: int +MAX_SEND_SPEED_LARGE: int +M_CHUNK_LENGTH_PENALTY_SIZE: int +M_CONTENT_LENGTH_PENALTY_SIZE: int +M_MAXCONNECTS: int +M_MAX_HOST_CONNECTIONS: int +M_MAX_PIPELINE_LENGTH: int +M_MAX_TOTAL_CONNECTIONS: int +M_PIPELINING: int +M_PIPELINING_SERVER_BL: int +M_PIPELINING_SITE_BL: int +M_SOCKETFUNCTION: int +M_TIMERFUNCTION: int +NAMELOOKUP_TIME: int +NETRC: int +NETRC_FILE: int +NETRC_IGNORED: int +NETRC_OPTIONAL: int +NETRC_REQUIRED: int +NEW_DIRECTORY_PERMS: int +NEW_FILE_PERMS: int +NOBODY: int +NOPROGRESS: int +NOPROXY: int +NOSIGNAL: int +NUM_CONNECTS: int +OPENSOCKETFUNCTION: int +OPT_CERTINFO: int +OPT_COOKIELIST: int +OPT_FILETIME: int +OPT_RTSP_CLIENT_CSEQ: int +OPT_RTSP_REQUEST: int +OPT_RTSP_SERVER_CSEQ: int +OPT_RTSP_SESSION_ID: int +OPT_RTSP_STREAM_URI: int +OPT_RTSP_TRANSPORT: int +OS_ERRNO: int +PASSWORD: int +PATH_AS_IS: int +PAUSE_ALL: int +PAUSE_CONT: int +PAUSE_RECV: int +PAUSE_SEND: int +PINNEDPUBLICKEY: int +PIPEWAIT: int +PIPE_HTTP1: int +PIPE_MULTIPLEX: int +PIPE_NOTHING: int +POLL_IN: int +POLL_INOUT: int +POLL_NONE: int +POLL_OUT: int +POLL_REMOVE: int +PORT: int +POST: int +POST301: int +POSTFIELDS: int +POSTFIELDSIZE: int +POSTFIELDSIZE_LARGE: int +POSTQUOTE: int +POSTREDIR: int +PREQUOTE: int +PRETRANSFER_TIME: int +PRE_PROXY: int +PRIMARY_IP: int +PRIMARY_PORT: int +PROGRESSFUNCTION: int +PROTOCOLS: int +PROTO_ALL: int +PROTO_DICT: int +PROTO_FILE: int +PROTO_FTP: int +PROTO_FTPS: int +PROTO_GOPHER: int +PROTO_HTTP: int +PROTO_HTTPS: int +PROTO_IMAP: int +PROTO_IMAPS: int +PROTO_LDAP: int +PROTO_LDAPS: int +PROTO_POP3: int +PROTO_POP3S: int +PROTO_RTMP: int +PROTO_RTMPE: int +PROTO_RTMPS: int +PROTO_RTMPT: int +PROTO_RTMPTE: int +PROTO_RTMPTS: int +PROTO_RTSP: int +PROTO_SCP: int +PROTO_SFTP: int +PROTO_SMB: int +PROTO_SMBS: int +PROTO_SMTP: int +PROTO_SMTPS: int +PROTO_TELNET: int +PROTO_TFTP: int +PROXY: int +PROXYAUTH: int +PROXYAUTH_AVAIL: int +PROXYHEADER: int +PROXYPASSWORD: int +PROXYPORT: int +PROXYTYPE: int +PROXYTYPE_HTTP: int +PROXYTYPE_HTTP_1_0: int +PROXYTYPE_SOCKS4: int +PROXYTYPE_SOCKS4A: int +PROXYTYPE_SOCKS5: int +PROXYTYPE_SOCKS5_HOSTNAME: int +PROXYUSERNAME: int +PROXYUSERPWD: int +PROXY_CAINFO: int +PROXY_CAPATH: int +PROXY_SERVICE_NAME: int +PROXY_SSLCERT: int +PROXY_SSLCERTTYPE: int +PROXY_SSLKEY: int +PROXY_SSLKEYTYPE: int +PROXY_SSL_VERIFYHOST: int +PROXY_SSL_VERIFYPEER: int +PROXY_TLS13_CIPHERS: int +PROXY_TRANSFER_MODE: int +PUT: int +QUOTE: int +RANDOM_FILE: int +RANGE: int +READDATA: int +READFUNCTION: int +READFUNC_ABORT: int +READFUNC_PAUSE: int +REDIRECT_COUNT: int +REDIRECT_TIME: int +REDIRECT_URL: int +REDIR_POST_301: int +REDIR_POST_302: int +REDIR_POST_303: int +REDIR_POST_ALL: int +REDIR_PROTOCOLS: int +REFERER: int +REQUEST_SIZE: int +RESOLVE: int +RESPONSE_CODE: int +RESUME_FROM: int +RESUME_FROM_LARGE: int +RTSPREQ_ANNOUNCE: int +RTSPREQ_DESCRIBE: int +RTSPREQ_GET_PARAMETER: int +RTSPREQ_LAST: int +RTSPREQ_NONE: int +RTSPREQ_OPTIONS: int +RTSPREQ_PAUSE: int +RTSPREQ_PLAY: int +RTSPREQ_RECEIVE: int +RTSPREQ_RECORD: int +RTSPREQ_SETUP: int +RTSPREQ_SET_PARAMETER: int +RTSPREQ_TEARDOWN: int +SASL_IR: int +SEEKFUNCTION: int +SEEKFUNC_CANTSEEK: int +SEEKFUNC_FAIL: int +SEEKFUNC_OK: int +SERVICE_NAME: int +SHARE: int +SH_SHARE: int +SH_UNSHARE: int +SIZE_DOWNLOAD: int +SIZE_UPLOAD: int +SOCKET_BAD: int +SOCKET_TIMEOUT: int +SOCKOPTFUNCTION: int +SOCKOPT_ALREADY_CONNECTED: int +SOCKOPT_ERROR: int +SOCKOPT_OK: int +SOCKS5_GSSAPI_NEC: int +SOCKS5_GSSAPI_SERVICE: int +SOCKTYPE_ACCEPT: int +SOCKTYPE_IPCXN: int +SPEED_DOWNLOAD: int +SPEED_UPLOAD: int +SSH_AUTH_AGENT: int +SSH_AUTH_ANY: int +SSH_AUTH_DEFAULT: int +SSH_AUTH_HOST: int +SSH_AUTH_KEYBOARD: int +SSH_AUTH_NONE: int +SSH_AUTH_PASSWORD: int +SSH_AUTH_PUBLICKEY: int +SSH_AUTH_TYPES: int +SSH_HOST_PUBLIC_KEY_MD5: int +SSH_KEYFUNCTION: int +SSH_KNOWNHOSTS: int +SSH_PRIVATE_KEYFILE: int +SSH_PUBLIC_KEYFILE: int +SSLCERT: int +SSLCERTPASSWD: int +SSLCERTTYPE: int +SSLENGINE: int +SSLENGINE_DEFAULT: int +SSLKEY: int +SSLKEYPASSWD: int +SSLKEYTYPE: int +SSLOPT_ALLOW_BEAST: int +SSLOPT_NO_REVOKE: int +SSLVERSION: int +SSLVERSION_DEFAULT: int +SSLVERSION_MAX_DEFAULT: int +SSLVERSION_MAX_TLSv1_0: int +SSLVERSION_MAX_TLSv1_1: int +SSLVERSION_MAX_TLSv1_2: int +SSLVERSION_MAX_TLSv1_3: int +SSLVERSION_SSLv2: int +SSLVERSION_SSLv3: int +SSLVERSION_TLSv1: int +SSLVERSION_TLSv1_0: int +SSLVERSION_TLSv1_1: int +SSLVERSION_TLSv1_2: int +SSLVERSION_TLSv1_3: int +SSL_CIPHER_LIST: int +SSL_ENABLE_ALPN: int +SSL_ENABLE_NPN: int +SSL_ENGINES: int +SSL_FALSESTART: int +SSL_OPTIONS: int +SSL_SESSIONID_CACHE: int +SSL_VERIFYHOST: int +SSL_VERIFYPEER: int +SSL_VERIFYRESULT: int +SSL_VERIFYSTATUS: int +STARTTRANSFER_TIME: int +STDERR: int +TCP_FASTOPEN: int +TCP_KEEPALIVE: int +TCP_KEEPIDLE: int +TCP_KEEPINTVL: int +TCP_NODELAY: int +TELNETOPTIONS: int +TFTP_BLKSIZE: int +TIMECONDITION: int +TIMECONDITION_IFMODSINCE: int +TIMECONDITION_IFUNMODSINCE: int +TIMECONDITION_LASTMOD: int +TIMECONDITION_NONE: int +TIMEOUT: int +TIMEOUT_MS: int +TIMEVALUE: int +TLS13_CIPHERS: int +TLSAUTH_PASSWORD: int +TLSAUTH_TYPE: int +TLSAUTH_USERNAME: int +TOTAL_TIME: int +TRANSFERTEXT: int +TRANSFER_ENCODING: int +UNIX_SOCKET_PATH: int +UNRESTRICTED_AUTH: int +UPLOAD: int +UPLOAD_BUFFERSIZE: int +URL: int +USERAGENT: int +USERNAME: int +USERPWD: int +USESSL_ALL: int +USESSL_CONTROL: int +USESSL_NONE: int +USESSL_TRY: int +USE_SSL: int +VERBOSE: int +VERSION_ASYNCHDNS: int +VERSION_CONV: int +VERSION_CURLDEBUG: int +VERSION_DEBUG: int +VERSION_GSSAPI: int +VERSION_GSSNEGOTIATE: int +VERSION_HTTP2: int +VERSION_IDN: int +VERSION_IPV6: int +VERSION_KERBEROS4: int +VERSION_KERBEROS5: int +VERSION_LARGEFILE: int +VERSION_LIBZ: int +VERSION_NTLM: int +VERSION_NTLM_WB: int +VERSION_PSL: int +VERSION_SPNEGO: int +VERSION_SSL: int +VERSION_SSPI: int +VERSION_TLSAUTH_SRP: int +VERSION_UNIX_SOCKETS: int +WILDCARDMATCH: int +WRITEDATA: int +WRITEFUNCTION: int +WRITEFUNC_PAUSE: int +WRITEHEADER: int +XFERINFOFUNCTION: int +XOAUTH2_BEARER: int + +E_ABORTED_BY_CALLBACK: int +E_AGAIN: int +E_ALREADY_COMPLETE: int +E_BAD_CALLING_ORDER: int +E_BAD_CONTENT_ENCODING: int +E_BAD_DOWNLOAD_RESUME: int +E_BAD_FUNCTION_ARGUMENT: int +E_BAD_PASSWORD_ENTERED: int +E_CALL_MULTI_PERFORM: int +E_CHUNK_FAILED: int +E_CONV_FAILED: int +E_CONV_REQD: int +E_COULDNT_CONNECT: int +E_COULDNT_RESOLVE_HOST: int +E_COULDNT_RESOLVE_PROXY: int +E_FAILED_INIT: int +E_FILESIZE_EXCEEDED: int +E_FILE_COULDNT_READ_FILE: int +E_FTP_ACCEPT_FAILED: int +E_FTP_ACCEPT_TIMEOUT: int +E_FTP_ACCESS_DENIED: int +E_FTP_BAD_DOWNLOAD_RESUME: int +E_FTP_BAD_FILE_LIST: int +E_FTP_CANT_GET_HOST: int +E_FTP_CANT_RECONNECT: int +E_FTP_COULDNT_GET_SIZE: int +E_FTP_COULDNT_RETR_FILE: int +E_FTP_COULDNT_SET_ASCII: int +E_FTP_COULDNT_SET_BINARY: int +E_FTP_COULDNT_SET_TYPE: int +E_FTP_COULDNT_STOR_FILE: int +E_FTP_COULDNT_USE_REST: int +E_FTP_PARTIAL_FILE: int +E_FTP_PORT_FAILED: int +E_FTP_PRET_FAILED: int +E_FTP_QUOTE_ERROR: int +E_FTP_SSL_FAILED: int +E_FTP_USER_PASSWORD_INCORRECT: int +E_FTP_WEIRD_227_FORMAT: int +E_FTP_WEIRD_PASS_REPLY: int +E_FTP_WEIRD_PASV_REPLY: int +E_FTP_WEIRD_SERVER_REPLY: int +E_FTP_WEIRD_USER_REPLY: int +E_FTP_WRITE_ERROR: int +E_FUNCTION_NOT_FOUND: int +E_GOT_NOTHING: int +E_HTTP2: int +E_HTTP_NOT_FOUND: int +E_HTTP_PORT_FAILED: int +E_HTTP_POST_ERROR: int +E_HTTP_RANGE_ERROR: int +E_HTTP_RETURNED_ERROR: int +E_INTERFACE_FAILED: int +E_LDAP_CANNOT_BIND: int +E_LDAP_INVALID_URL: int +E_LDAP_SEARCH_FAILED: int +E_LIBRARY_NOT_FOUND: int +E_LOGIN_DENIED: int +E_MALFORMAT_USER: int +E_MULTI_ADDED_ALREADY: int +E_MULTI_BAD_EASY_HANDLE: int +E_MULTI_BAD_HANDLE: int +E_MULTI_BAD_SOCKET: int +E_MULTI_CALL_MULTI_PERFORM: int +E_MULTI_CALL_MULTI_SOCKET: int +E_MULTI_INTERNAL_ERROR: int +E_MULTI_OK: int +E_MULTI_OUT_OF_MEMORY: int +E_MULTI_UNKNOWN_OPTION: int +E_NOT_BUILT_IN: int +E_NO_CONNECTION_AVAILABLE: int +E_OK: int +E_OPERATION_TIMEDOUT: int +E_OPERATION_TIMEOUTED: int +E_OUT_OF_MEMORY: int +E_PARTIAL_FILE: int +E_PEER_FAILED_VERIFICATION: int +E_QUOTE_ERROR: int +E_RANGE_ERROR: int +E_READ_ERROR: int +E_RECV_ERROR: int +E_REMOTE_ACCESS_DENIED: int +E_REMOTE_DISK_FULL: int +E_REMOTE_FILE_EXISTS: int +E_REMOTE_FILE_NOT_FOUND: int +E_RTSP_CSEQ_ERROR: int +E_RTSP_SESSION_ERROR: int +E_SEND_ERROR: int +E_SEND_FAIL_REWIND: int +E_SHARE_IN_USE: int +E_SSH: int +E_SSL_CACERT: int +E_SSL_CACERT_BADFILE: int +E_SSL_CERTPROBLEM: int +E_SSL_CIPHER: int +E_SSL_CONNECT_ERROR: int +E_SSL_CRL_BADFILE: int +E_SSL_ENGINE_INITFAILED: int +E_SSL_ENGINE_NOTFOUND: int +E_SSL_ENGINE_SETFAILED: int +E_SSL_INVALIDCERTSTATUS: int +E_SSL_ISSUER_ERROR: int +E_SSL_PEER_CERTIFICATE: int +E_SSL_PINNEDPUBKEYNOTMATCH: int +E_SSL_SHUTDOWN_FAILED: int +E_TELNET_OPTION_SYNTAX: int +E_TFTP_DISKFULL: int +E_TFTP_EXISTS: int +E_TFTP_ILLEGAL: int +E_TFTP_NOSUCHUSER: int +E_TFTP_NOTFOUND: int +E_TFTP_PERM: int +E_TFTP_UNKNOWNID: int +E_TOO_MANY_REDIRECTS: int +E_UNKNOWN_OPTION: int +E_UNKNOWN_TELNET_OPTION: int +E_UNSUPPORTED_PROTOCOL: int +E_UPLOAD_FAILED: int +E_URL_MALFORMAT: int +E_URL_MALFORMAT_USER: int +E_USE_SSL_FAILED: int +E_WRITE_ERROR: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyfarmhash/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyfarmhash/METADATA.toml new file mode 100644 index 00000000..d25c8f7d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyfarmhash/METADATA.toml @@ -0,0 +1 @@ +version = "0.3.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyfarmhash/farmhash.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyfarmhash/farmhash.pyi new file mode 100644 index 00000000..8b8fd0e9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyfarmhash/farmhash.pyi @@ -0,0 +1,9 @@ +def fingerprint128(__a: str) -> tuple[int, int]: ... +def fingerprint32(__a: str) -> int: ... +def fingerprint64(__a: str) -> int: ... +def hash128(__a: str) -> tuple[int, int]: ... +def hash128withseed(__a: str, __seed_low: int, __seed_high: int) -> tuple[int, int]: ... +def hash32(__a: str) -> int: ... +def hash32withseed(__a: str, __seed: int) -> int: ... +def hash64(__a: str) -> int: ... +def hash64withseed(__a: str, __seed: int) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyflakes/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyflakes/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..3ea35477 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyflakes/@tests/stubtest_allowlist.txt @@ -0,0 +1,25 @@ +# These all have class-level defaults that differ from the instance attributes +pyflakes.messages.DuplicateArgument.message_args +pyflakes.messages.ForwardAnnotationSyntaxError.message_args +pyflakes.messages.FutureFeatureNotDefined.message_args +pyflakes.messages.ImportShadowedByLoopVar.message_args +pyflakes.messages.ImportStarUsage.message_args +pyflakes.messages.ImportStarUsed.message_args +pyflakes.messages.MultiValueRepeatedKeyLiteral.message_args +pyflakes.messages.MultiValueRepeatedKeyVariable.message_args +pyflakes.messages.PercentFormatExtraNamedArguments.message_args +pyflakes.messages.PercentFormatInvalidFormat.message_args +pyflakes.messages.PercentFormatMissingArgument.message_args +pyflakes.messages.PercentFormatPositionalCountMismatch.message_args +pyflakes.messages.PercentFormatUnsupportedFormatCharacter.message_args +pyflakes.messages.RedefinedWhileUnused.message_args +pyflakes.messages.StringDotFormatExtraNamedArguments.message_args +pyflakes.messages.StringDotFormatExtraPositionalArguments.message_args +pyflakes.messages.StringDotFormatInvalidFormat.message_args +pyflakes.messages.StringDotFormatMissingArgument.message_args +pyflakes.messages.UndefinedExport.message_args +pyflakes.messages.UndefinedLocal.message_args +pyflakes.messages.UndefinedName.message_args +pyflakes.messages.UnusedAnnotation.message_args +pyflakes.messages.UnusedImport.message_args +pyflakes.messages.UnusedVariable.message_args diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyflakes/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyflakes/METADATA.toml new file mode 100644 index 00000000..7f11ff38 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyflakes/METADATA.toml @@ -0,0 +1,4 @@ +version = "3.0.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyflakes/pyflakes/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyflakes/pyflakes/__init__.pyi new file mode 100644 index 00000000..bda5b5a7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyflakes/pyflakes/__init__.pyi @@ -0,0 +1 @@ +__version__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyflakes/pyflakes/api.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyflakes/pyflakes/api.pyi new file mode 100644 index 00000000..945a7a39 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyflakes/pyflakes/api.pyi @@ -0,0 +1,16 @@ +from collections.abc import Iterable, Iterator, Sequence +from re import Pattern +from typing import Any + +from pyflakes.reporter import Reporter + +__all__ = ["check", "checkPath", "checkRecursive", "iterSourceCode", "main"] + +PYTHON_SHEBANG_REGEX: Pattern[bytes] + +def check(codeString: str, filename: str, reporter: Reporter | None = ...) -> int: ... +def checkPath(filename, reporter: Reporter | None = ...) -> int: ... +def isPythonFile(filename) -> bool: ... +def iterSourceCode(paths: Iterable[Any]) -> Iterator[Any]: ... +def checkRecursive(paths: Iterable[Any], reporter: Reporter) -> int: ... +def main(prog: str | None = ..., args: Sequence[Any] | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyflakes/pyflakes/checker.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyflakes/pyflakes/checker.pyi new file mode 100644 index 00000000..63a65638 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyflakes/pyflakes/checker.pyi @@ -0,0 +1,338 @@ +import ast +import sys +from collections.abc import Callable, Iterable, Iterator +from re import Pattern +from typing import Any, ClassVar, TypeVar, overload +from typing_extensions import Literal, ParamSpec, TypeAlias + +from pyflakes.messages import Message + +_AnyFunction: TypeAlias = Callable[..., Any] +_F = TypeVar("_F", bound=_AnyFunction) +_P = ParamSpec("_P") +_T = TypeVar("_T") + +PY38_PLUS: bool +PYPY: bool + +def getAlternatives(n: ast.If | ast.Try) -> list[ast.AST]: ... + +FOR_TYPES: tuple[type[ast.For], type[ast.AsyncFor]] +MAPPING_KEY_RE: Pattern[str] +CONVERSION_FLAG_RE: Pattern[str] +WIDTH_RE: Pattern[str] +PRECISION_RE: Pattern[str] +LENGTH_RE: Pattern[str] +VALID_CONVERSIONS: frozenset[str] + +_FormatType: TypeAlias = tuple[str | None, str | None, str | None, str | None, str] +_PercentFormat: TypeAlias = tuple[str, _FormatType | None] + +def parse_percent_format(s: str) -> tuple[_PercentFormat, ...]: ... + +class _FieldsOrder(dict[type[ast.AST], tuple[str, ...]]): + def __missing__(self, node_class: type[ast.AST]) -> tuple[str, ...]: ... + +def counter(items: Iterable[_T]) -> dict[_T, int]: ... + +_OmitType: TypeAlias = str | tuple[str, ...] | None + +def iter_child_nodes(node: ast.AST, omit: _OmitType = ..., _fields_order: _FieldsOrder = ...) -> Iterator[ast.AST]: ... +@overload +def convert_to_value(item: ast.Str) -> str: ... # type: ignore[misc] +@overload +def convert_to_value(item: ast.Bytes) -> bytes: ... # type: ignore[misc] +@overload +def convert_to_value(item: ast.Tuple) -> tuple[Any, ...]: ... # type: ignore[misc] +@overload +def convert_to_value(item: ast.Name | ast.NameConstant) -> Any: ... +@overload +def convert_to_value(item: ast.AST) -> UnhandledKeyType: ... +def is_notimplemented_name_node(node: object) -> bool: ... + +class Binding: + name: str + source: ast.AST | None + used: Literal[False] | tuple[Any, ast.AST] + def __init__(self, name: str, source: ast.AST | None) -> None: ... + def redefines(self, other: Binding) -> bool: ... + +class Definition(Binding): ... + +class Builtin(Definition): + def __init__(self, name: str) -> None: ... + +class UnhandledKeyType: ... + +class VariableKey: + name: str + def __init__(self, item: ast.Name) -> None: ... + def __eq__(self, compare: object) -> bool: ... + def __hash__(self) -> int: ... + +class Importation(Definition): + fullName: str + redefined: list[Any] + def __init__(self, name: str, source: ast.AST | None, full_name: str | None = ...) -> None: ... + @property + def source_statement(self) -> str: ... + +class SubmoduleImportation(Importation): + def __init__(self, name: str, source: ast.Import | None) -> None: ... + +class ImportationFrom(Importation): + module: str + real_name: str + def __init__(self, name: str, source: ast.AST, module: str, real_name: str | None = ...) -> None: ... + +class StarImportation(Importation): + def __init__(self, name: str, source: ast.AST) -> None: ... + +class FutureImportation(ImportationFrom): + used: tuple[Any, ast.AST] + def __init__(self, name: str, source: ast.AST, scope) -> None: ... + +class Argument(Binding): ... +class Assignment(Binding): ... + +class Annotation(Binding): + def redefines(self, other: Binding) -> Literal[False]: ... + +class FunctionDefinition(Definition): ... +class ClassDefinition(Definition): ... + +class ExportBinding(Binding): + names: list[str] + def __init__(self, name: str, source: ast.AST, scope: Scope) -> None: ... + +class Scope(dict[str, Binding]): + importStarred: bool + +class ClassScope(Scope): ... + +class FunctionScope(Scope): + usesLocals: bool + alwaysUsed: ClassVar[set[str]] + globals: set[str] + returnValue: Any + isGenerator: bool + def __init__(self) -> None: ... + def unused_assignments(self) -> Iterator[tuple[str, Binding]]: ... + def unused_annotations(self) -> Iterator[tuple[str, Annotation]]: ... + +class GeneratorScope(Scope): ... +class ModuleScope(Scope): ... +class DoctestScope(ModuleScope): ... + +class DetectClassScopedMagic: + names: list[str] + +def getNodeName(node: ast.AST) -> str: ... + +TYPING_MODULES: frozenset[Literal["typing", "typing_extensions"]] + +def is_typing_overload(value: Binding, scope_stack) -> bool: ... + +class AnnotationState: + NONE: ClassVar[Literal[0]] + STRING: ClassVar[Literal[1]] + BARE: ClassVar[Literal[2]] + +def in_annotation(func: _F) -> _F: ... +def in_string_annotation(func: _F) -> _F: ... + +if sys.version_info >= (3, 8): + _NamedExpr: TypeAlias = ast.NamedExpr +else: + _NamedExpr: TypeAlias = Any + +if sys.version_info >= (3, 10): + _Match: TypeAlias = ast.Match + _MatchCase: TypeAlias = ast.match_case + _MatchValue: TypeAlias = ast.MatchValue + _MatchSingleton: TypeAlias = ast.MatchSingleton + _MatchSequence: TypeAlias = ast.MatchSequence + _MatchStar: TypeAlias = ast.MatchStar + _MatchMapping: TypeAlias = ast.MatchMapping + _MatchClass: TypeAlias = ast.MatchClass + _MatchAs: TypeAlias = ast.MatchAs + _MatchOr: TypeAlias = ast.MatchOr +else: + _Match: TypeAlias = Any + _MatchCase: TypeAlias = Any + _MatchValue: TypeAlias = Any + _MatchSingleton: TypeAlias = Any + _MatchSequence: TypeAlias = Any + _MatchStar: TypeAlias = Any + _MatchMapping: TypeAlias = Any + _MatchClass: TypeAlias = Any + _MatchAs: TypeAlias = Any + _MatchOr: TypeAlias = Any + +class Checker: + nodeDepth: int + offset: tuple[int, int] | None + builtIns: set[str] + deadScopes: list[Any] + messages: list[Any] + filename: str + withDoctest: bool + scopeStack: list[Scope] + exceptHandlers: list[Any] + root: ast.AST + def __init__( + self, + tree: ast.AST, + filename: str = ..., + builtins: Iterable[str] | None = ..., + withDoctest: bool = ..., + file_tokens: tuple[Any, ...] = ..., + ) -> None: ... + def deferFunction(self, callable: _AnyFunction) -> None: ... + def deferAssignment(self, callable: _AnyFunction) -> None: ... + def runDeferred(self, deferred: _AnyFunction) -> None: ... + @property + def futuresAllowed(self) -> bool: ... + @futuresAllowed.setter + def futuresAllowed(self, value: Literal[False]) -> None: ... + @property + def annotationsFutureEnabled(self) -> bool: ... + @annotationsFutureEnabled.setter + def annotationsFutureEnabled(self, value: Literal[True]) -> None: ... + @property + def scope(self) -> Scope: ... + def popScope(self) -> None: ... + def checkDeadScopes(self) -> None: ... + def pushScope(self, scopeClass: type[Scope] = ...) -> None: ... + def report(self, messageClass: Callable[_P, Message], *args: _P.args, **kwargs: _P.kwargs) -> None: ... + def getParent(self, node: ast.AST) -> ast.AST: ... + def getCommonAncestor(self, lnode: ast.AST, rnode: ast.AST, stop: ast.AST) -> ast.AST: ... + def descendantOf(self, node: ast.AST, ancestors: ast.AST, stop: ast.AST) -> bool: ... + def getScopeNode(self, node: ast.AST) -> ast.AST | None: ... + def differentForks(self, lnode: ast.AST, rnode: ast.AST) -> bool: ... + def addBinding(self, node: ast.AST, value: Binding) -> None: ... + def getNodeHandler(self, node_class: type[ast.AST]): ... + def handleNodeLoad(self, node: ast.AST, parent: ast.AST) -> None: ... + def handleNodeStore(self, node: ast.AST) -> None: ... + def handleNodeDelete(self, node: ast.AST) -> None: ... + def handleChildren(self, tree: ast.AST, omit: _OmitType = ...) -> None: ... + def isLiteralTupleUnpacking(self, node: ast.AST) -> bool | None: ... + def isDocstring(self, node: ast.AST) -> bool: ... + def getDocstring(self, node: ast.AST) -> tuple[str, int] | tuple[None, None]: ... + def handleNode(self, node: ast.AST | None, parent) -> None: ... + def handleDoctests(self, node: ast.AST) -> None: ... + def handleStringAnnotation(self, s: str, node: ast.AST, ref_lineno: int, ref_col_offset: int, err: type[Message]) -> None: ... + def handleAnnotation(self, annotation: ast.AST, node: ast.AST) -> None: ... + def ignore(self, node: ast.AST) -> None: ... + def DELETE(self, tree: ast.Delete, omit: _OmitType = ...) -> None: ... + def FOR(self, tree: ast.For, omit: _OmitType = ...) -> None: ... + def ASYNCFOR(self, tree: ast.AsyncFor, omit: _OmitType = ...) -> None: ... + def WHILE(self, tree: ast.While, omit: _OmitType = ...) -> None: ... + def WITH(self, tree: ast.With, omit: _OmitType = ...) -> None: ... + def WITHITEM(self, tree: ast.AST, omit: _OmitType = ...) -> None: ... + def ASYNCWITH(self, tree: ast.AsyncWith, omit: _OmitType = ...) -> None: ... + def EXPR(self, tree: ast.AST, omit: _OmitType = ...) -> None: ... + def ASSIGN(self, tree: ast.Assign, omit: _OmitType = ...) -> None: ... + def PASS(self, node: ast.AST) -> None: ... + def BOOLOP(self, tree: ast.BoolOp, omit: _OmitType = ...) -> None: ... + def UNARYOP(self, tree: ast.UnaryOp, omit: _OmitType = ...) -> None: ... + def SET(self, tree: ast.Set, omit: _OmitType = ...) -> None: ... + def ATTRIBUTE(self, tree: ast.Attribute, omit: _OmitType = ...) -> None: ... + def STARRED(self, tree: ast.Starred, omit: _OmitType = ...) -> None: ... + def NAMECONSTANT(self, tree: ast.NameConstant, omit: _OmitType = ...) -> None: ... + def NAMEDEXPR(self, tree: _NamedExpr, omit: _OmitType = ...) -> None: ... + def SUBSCRIPT(self, node: ast.Subscript) -> None: ... + def CALL(self, node: ast.Call) -> None: ... + def BINOP(self, node: ast.BinOp) -> None: ... + def CONSTANT(self, node: ast.Constant) -> None: ... + if sys.version_info < (3, 8): + def NUM(self, node: ast.Num) -> None: ... + def BYTES(self, node: ast.Bytes) -> None: ... + def ELLIPSIS(self, node: ast.Ellipsis) -> None: ... + + def STR(self, node: ast.Str) -> None: ... + def SLICE(self, tree: ast.Slice, omit: _OmitType = ...) -> None: ... + def EXTSLICE(self, tree: ast.ExtSlice, omit: _OmitType = ...) -> None: ... + def INDEX(self, tree: ast.Index, omit: _OmitType = ...) -> None: ... + def LOAD(self, node: ast.Load) -> None: ... + def STORE(self, node: ast.Store) -> None: ... + def DEL(self, node: ast.Del) -> None: ... + def AUGLOAD(self, node: ast.AugLoad) -> None: ... + def AUGSTORE(self, node: ast.AugStore) -> None: ... + def PARAM(self, node: ast.Param) -> None: ... + def AND(self, node: ast.And) -> None: ... + def OR(self, node: ast.Or) -> None: ... + def ADD(self, node: ast.Add) -> None: ... + def SUB(self, node: ast.Sub) -> None: ... + def MULT(self, node: ast.Mult) -> None: ... + def DIV(self, node: ast.Div) -> None: ... + def MOD(self, node: ast.Mod) -> None: ... + def POW(self, node: ast.Pow) -> None: ... + def LSHIFT(self, node: ast.LShift) -> None: ... + def RSHIFT(self, node: ast.RShift) -> None: ... + def BITOR(self, node: ast.BitOr) -> None: ... + def BITXOR(self, node: ast.BitXor) -> None: ... + def BITAND(self, node: ast.BitAnd) -> None: ... + def FLOORDIV(self, node: ast.FloorDiv) -> None: ... + def INVERT(self, node: ast.Invert) -> None: ... + def NOT(self, node: ast.Not) -> None: ... + def UADD(self, node: ast.UAdd) -> None: ... + def USUB(self, node: ast.USub) -> None: ... + def EQ(self, node: ast.Eq) -> None: ... + def NOTEQ(self, node: ast.NotEq) -> None: ... + def LT(self, node: ast.Lt) -> None: ... + def LTE(self, node: ast.LtE) -> None: ... + def GT(self, node: ast.Gt) -> None: ... + def GTE(self, node: ast.GtE) -> None: ... + def IS(self, node: ast.Is) -> None: ... + def ISNOT(self, node: ast.IsNot) -> None: ... + def IN(self, node: ast.In) -> None: ... + def NOTIN(self, node: ast.NotIn) -> None: ... + def MATMULT(self, node: ast.MatMult) -> None: ... + def RAISE(self, node: ast.Raise) -> None: ... + def COMPREHENSION(self, tree: ast.comprehension, omit: _OmitType = ...) -> None: ... + def KEYWORD(self, tree: ast.keyword, omit: _OmitType = ...) -> None: ... + def FORMATTEDVALUE(self, tree: ast.FormattedValue, omit: _OmitType = ...) -> None: ... + def JOINEDSTR(self, node: ast.AST) -> None: ... + def DICT(self, node: ast.Dict) -> None: ... + def IF(self, node: ast.If) -> None: ... + def IFEXP(self, node: ast.If) -> None: ... + def ASSERT(self, node: ast.Assert) -> None: ... + def GLOBAL(self, node: ast.Global) -> None: ... + def NONLOCAL(self, node: ast.Nonlocal) -> None: ... + def GENERATOREXP(self, node: ast.GeneratorExp) -> None: ... + def LISTCOMP(self, node: ast.ListComp) -> None: ... + def DICTCOMP(self, node: ast.DictComp) -> None: ... + def SETCOMP(self, node: ast.SetComp) -> None: ... + def NAME(self, node: ast.Name) -> None: ... + def CONTINUE(self, node: ast.Continue) -> None: ... + def BREAK(self, node: ast.Break) -> None: ... + def RETURN(self, node: ast.Return) -> None: ... + def YIELD(self, node: ast.Yield) -> None: ... + def AWAIT(self, node: ast.Await) -> None: ... + def YIELDFROM(self, node: ast.YieldFrom) -> None: ... + def FUNCTIONDEF(self, node: ast.FunctionDef) -> None: ... + def ASYNCFUNCTIONDEF(self, node: ast.AsyncFunctionDef) -> None: ... + def LAMBDA(self, node: ast.Lambda) -> None: ... + def ARGUMENTS(self, node: ast.arguments) -> None: ... + def ARG(self, node: ast.arg) -> None: ... + def CLASSDEF(self, node: ast.ClassDef): ... + def AUGASSIGN(self, node: ast.AugAssign) -> None: ... + def TUPLE(self, node: ast.Tuple) -> None: ... + def LIST(self, node: ast.List) -> None: ... + def IMPORT(self, node: ast.Import) -> None: ... + def IMPORTFROM(self, node: ast.ImportFrom) -> None: ... + def TRY(self, node: ast.Try) -> None: ... + def EXCEPTHANDLER(self, node: ast.ExceptHandler) -> None: ... + def ANNASSIGN(self, node: ast.AnnAssign) -> None: ... + def COMPARE(self, node: ast.Compare) -> None: ... + def MATCH(self, tree: _Match, omit: _OmitType = ...) -> None: ... + def MATCH_CASE(self, tree: _MatchCase, omit: _OmitType = ...) -> None: ... + def MATCHCLASS(self, tree: _MatchClass, omit: _OmitType = ...) -> None: ... + def MATCHOR(self, tree: _MatchOr, omit: _OmitType = ...) -> None: ... + def MATCHSEQUENCE(self, tree: _MatchSequence, omit: _OmitType = ...) -> None: ... + def MATCHSINGLETON(self, tree: _MatchSingleton, omit: _OmitType = ...) -> None: ... + def MATCHVALUE(self, tree: _MatchValue, omit: _OmitType = ...) -> None: ... + def MATCHAS(self, node: _MatchAs) -> None: ... + def MATCHMAPPING(self, node: _MatchMapping) -> None: ... + def MATCHSTAR(self, node: _MatchStar) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyflakes/pyflakes/messages.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyflakes/pyflakes/messages.pyi new file mode 100644 index 00000000..9cad0698 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyflakes/pyflakes/messages.pyi @@ -0,0 +1,144 @@ +import ast +from typing import Any, ClassVar + +class Message: + message: ClassVar[str] + message_args: tuple[Any, ...] + filename: Any + lineno: int + col: int + def __init__(self, filename, loc: ast.AST) -> None: ... + +class UnusedImport(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, name) -> None: ... + +class RedefinedWhileUnused(Message): + message_args: tuple[Any, int] + def __init__(self, filename, loc: ast.AST, name, orig_loc: ast.AST) -> None: ... + +class ImportShadowedByLoopVar(Message): + message_args: tuple[Any, int] + def __init__(self, filename, loc: ast.AST, name, orig_loc: ast.AST) -> None: ... + +class ImportStarNotPermitted(Message): + message_args: Any + def __init__(self, filename, loc, modname) -> None: ... + +class ImportStarUsed(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, modname) -> None: ... + +class ImportStarUsage(Message): + message_args: tuple[Any, Any] + def __init__(self, filename, loc: ast.AST, name, from_list) -> None: ... + +class UndefinedName(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, name) -> None: ... + +class DoctestSyntaxError(Message): + message_args: tuple[()] + def __init__(self, filename, loc: ast.AST, position: tuple[int, int] | None = ...) -> None: ... + +class UndefinedExport(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, name) -> None: ... + +class UndefinedLocal(Message): + default: ClassVar[str] + builtin: ClassVar[str] + message_args: tuple[Any, int] + def __init__(self, filename, loc: ast.AST, name, orig_loc: ast.AST) -> None: ... + +class DuplicateArgument(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, name) -> None: ... + +class MultiValueRepeatedKeyLiteral(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, key) -> None: ... + +class MultiValueRepeatedKeyVariable(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, key) -> None: ... + +class LateFutureImport(Message): + message_args: tuple[()] + def __init__(self, filename, loc: ast.AST) -> None: ... + +class FutureFeatureNotDefined(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, name) -> None: ... + +class UnusedVariable(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, names) -> None: ... + +class UnusedAnnotation(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, names) -> None: ... + +class ReturnOutsideFunction(Message): ... +class YieldOutsideFunction(Message): ... +class ContinueOutsideLoop(Message): ... +class BreakOutsideLoop(Message): ... +class ContinueInFinally(Message): ... +class DefaultExceptNotLast(Message): ... +class TwoStarredExpressions(Message): ... +class TooManyExpressionsInStarredAssignment(Message): ... +class IfTuple(Message): ... +class AssertTuple(Message): ... + +class ForwardAnnotationSyntaxError(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, annotation) -> None: ... + +class RaiseNotImplemented(Message): ... +class InvalidPrintSyntax(Message): ... +class IsLiteral(Message): ... +class FStringMissingPlaceholders(Message): ... + +class StringDotFormatExtraPositionalArguments(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, extra_positions) -> None: ... + +class StringDotFormatExtraNamedArguments(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, extra_keywords) -> None: ... + +class StringDotFormatMissingArgument(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, missing_arguments) -> None: ... + +class StringDotFormatMixingAutomatic(Message): ... + +class StringDotFormatInvalidFormat(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, error) -> None: ... + +class PercentFormatInvalidFormat(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, error) -> None: ... + +class PercentFormatMixedPositionalAndNamed(Message): ... + +class PercentFormatUnsupportedFormatCharacter(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, c) -> None: ... + +class PercentFormatPositionalCountMismatch(Message): + message_args: tuple[int, int] + def __init__(self, filename, loc: ast.AST, n_placeholders: int, n_substitutions: int) -> None: ... + +class PercentFormatExtraNamedArguments(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, extra_keywords) -> None: ... + +class PercentFormatMissingArgument(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, missing_arguments) -> None: ... + +class PercentFormatExpectedMapping(Message): ... +class PercentFormatExpectedSequence(Message): ... +class PercentFormatStarRequiresSequence(Message): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyflakes/pyflakes/reporter.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyflakes/pyflakes/reporter.pyi new file mode 100644 index 00000000..5b15ee96 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyflakes/pyflakes/reporter.pyi @@ -0,0 +1,5 @@ +class Reporter: + def __init__(self, warningStream, errorStream) -> None: ... + def unexpectedError(self, filename, msg) -> None: ... + def syntaxError(self, filename, msg, lineno, offset, text) -> None: ... + def flake(self, message) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..683bb11b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/@tests/stubtest_allowlist.txt @@ -0,0 +1,40 @@ +# fake module, only exists once the app is frozen +pyi_splash + +# Undocumented and clearly not meant to be exposed +PyInstaller.__main__.generate_parser +PyInstaller.__main__.run_build +PyInstaller.__main__.run_makespec +PyInstaller.utils.hooks.conda.lib_dir + +# A mix of modules meant to be private, and shallow incomplete type references for other modules +PyInstaller.building.* +PyInstaller.depend.analysis.* +PyInstaller.isolated._parent.* + +# Most modules are not meant to be used, yet are not marked as private +PyInstaller.archive.* +PyInstaller.config +PyInstaller.configure +PyInstaller.depend.bindepend +PyInstaller.depend.bytecode +PyInstaller.depend.dylib +PyInstaller.depend.imphook +PyInstaller.depend.utils +PyInstaller.exceptions +PyInstaller.hooks.* +PyInstaller.lib.* +PyInstaller.loader.* +PyInstaller.log +PyInstaller.utils.cliutils.* +PyInstaller.utils.conftest +PyInstaller.utils.git +PyInstaller.utils.hooks.django +PyInstaller.utils.hooks.gi +PyInstaller.utils.hooks.qt +PyInstaller.utils.hooks.tcl_tk +PyInstaller.utils.misc +PyInstaller.utils.osx +PyInstaller.utils.run_tests +PyInstaller.utils.tests +PyInstaller.utils.win32.* diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/METADATA.toml new file mode 100644 index 00000000..1f9b6fdb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/METADATA.toml @@ -0,0 +1,2 @@ +version = "5.8.*" +requires = ["types-setuptools"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/__init__.pyi new file mode 100644 index 00000000..fc98d648 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/__init__.pyi @@ -0,0 +1,11 @@ +from typing_extensions import Final, LiteralString + +from PyInstaller import compat as compat + +__all__ = ("HOMEPATH", "PLATFORM", "__version__", "DEFAULT_DISTPATH", "DEFAULT_SPECPATH", "DEFAULT_WORKPATH") +__version__: Final[str] +HOMEPATH: Final[str] +DEFAULT_SPECPATH: Final[str] +DEFAULT_DISTPATH: Final[str] +DEFAULT_WORKPATH: Final[str] +PLATFORM: Final[LiteralString] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/__main__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/__main__.pyi new file mode 100644 index 00000000..971aa66a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/__main__.pyi @@ -0,0 +1,14 @@ +# https://pyinstaller.org/en/stable/usage.html#running-pyinstaller-from-python-code +import logging +from _typeshed import SupportsKeysAndGetItem +from collections.abc import Iterable +from typing_extensions import TypeAlias + +# Used to update PyInstaller.config.CONF +_PyIConfig: TypeAlias = ( + SupportsKeysAndGetItem[str, bool | str | list[str] | None] | Iterable[tuple[str, bool | str | list[str] | None]] +) + +logger: logging.Logger + +def run(pyi_args: Iterable[str] | None = None, pyi_config: _PyIConfig | None = None) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/building/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/building/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/building/build_main.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/building/build_main.pyi new file mode 100644 index 00000000..ccde42bc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/building/build_main.pyi @@ -0,0 +1,29 @@ +# Referenced in: https://pyinstaller.org/en/stable/hooks.html?highlight=get_hook_config#PyInstaller.utils.hooks.get_hook_config +# Not to be imported during runtime, but is the type reference for hooks and analysis configuration + +from _typeshed import Incomplete, StrPath +from collections.abc import Iterable +from typing import Any + +from PyInstaller.building.datastruct import Target + +class Analysis(Target): + # https://pyinstaller.org/en/stable/hooks-config.html#hook-configuration-options + hooksconfig: dict[str, dict[str, object]] + def __init__( + self, + scripts: Iterable[StrPath], + pathex: Incomplete | None = None, + binaries: Incomplete | None = None, + datas: Incomplete | None = None, + hiddenimports: Incomplete | None = None, + hookspath: Incomplete | None = None, + hooksconfig: dict[str, dict[str, Any]] | None = None, + excludes: Incomplete | None = None, + runtime_hooks: Incomplete | None = None, + cipher: Incomplete | None = None, + win_no_prefer_redirects: bool = False, + win_private_assemblies: bool = False, + noarchive: bool = False, + module_collection_mode: Incomplete | None = None, + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/building/datastruct.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/building/datastruct.pyi new file mode 100644 index 00000000..e225e60b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/building/datastruct.pyi @@ -0,0 +1,34 @@ +# https://pyinstaller.org/en/stable/advanced-topics.html#the-toc-and-tree-classes +from collections.abc import Iterable, Sequence +from typing import ClassVar +from typing_extensions import Literal, LiteralString, SupportsIndex, TypeAlias + +_TypeCode: TypeAlias = Literal["DATA", "BINARY", "EXTENSION", "OPTION"] +_TOCTuple: TypeAlias = tuple[str, str | None, _TypeCode | None] + +class TOC(list[_TOCTuple]): + filenames: set[str] + def __init__(self, initlist: Iterable[_TOCTuple] | None = None) -> None: ... + def append(self, entry: _TOCTuple) -> None: ... + def insert(self, pos: SupportsIndex, entry: _TOCTuple) -> None: ... + def extend(self, other: Iterable[_TOCTuple]) -> None: ... + +class Target: + invcnum: ClassVar[int] + tocfilename: LiteralString + tocbasename: LiteralString + dependencies: TOC + +class Tree(Target, TOC): + root: str | None + prefix: str | None + excludes: Sequence[str] + typecode: _TypeCode + def __init__( + self, + root: str | None = None, + prefix: str | None = None, + excludes: Sequence[str] | None = None, + typecode: _TypeCode = "DATA", + ) -> None: ... + def assemble(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/compat.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/compat.pyi new file mode 100644 index 00000000..664a9b2d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/compat.pyi @@ -0,0 +1,81 @@ +# https://pyinstaller.org/en/stable/hooks.html#module-PyInstaller.compat +from _typeshed import FileDescriptorOrPath, GenericPath +from collections.abc import Iterable +from types import ModuleType +from typing import AnyStr, overload +from typing_extensions import Final, Literal + +strict_collect_mode: bool +is_64bits: Final[bool] +is_py35: Final = True +is_py36: Final = True +is_py37: Final[bool] +is_py38: Final[bool] +is_py39: Final[bool] +is_py310: Final[bool] +is_py311: Final[bool] +is_win: Final[bool] +is_win_10: Final[bool] +is_win_wine: Final[bool] +is_cygwin: Final[bool] +is_darwin: Final[bool] +is_linux: Final[bool] +is_solar: Final[bool] +is_aix: Final[bool] +is_freebsd: Final[bool] +is_openbsd: Final[bool] +is_hpux: Final[bool] +is_unix: Final[bool] +is_musl: Final[bool] +is_macos_11_compat: Final[bool] +is_macos_11_native: Final[bool] +is_macos_11: Final[bool] +PYDYLIB_NAMES: Final[set[str]] +base_prefix: Final[str] +is_venv: Final[bool] +is_virtualenv: Final[bool] +is_conda: Final[bool] +is_pure_conda: Final[bool] +python_executable: Final[str] +is_ms_app_store: Final[bool] +BYTECODE_MAGIC: Final[bytes] +EXTENSION_SUFFIXES: Final[list[str]] +ALL_SUFFIXES: Final[list[str]] + +architecture: Final[Literal["64bit", "n32bit", "32bit"]] +system: Final[Literal["Cygwin", "Linux", "Darwin", "Java", "Windows"]] +machine: Final[Literal["sw_64", "loongarch64", "arm", "intel", "ppc", "mips", "riscv", "s390x", "unknown", None]] + +def is_wine_dll(filename: FileDescriptorOrPath) -> bool: ... +@overload +def getenv(name: str, default: str) -> str: ... +@overload +def getenv(name: str, default: None = None) -> str | None: ... +def setenv(name: str, value: str) -> None: ... +def unsetenv(name: str) -> None: ... +def exec_command( + *cmdargs: str, encoding: str | None = None, raise_enoent: bool | None = None, **kwargs: int | bool | Iterable[int] | None +) -> str: ... +def exec_command_rc(*cmdargs: str, **kwargs: float | bool | Iterable[int] | None) -> int: ... +def exec_command_stdout( + *command_args: str, encoding: str | None = None, **kwargs: float | str | bytes | bool | Iterable[int] | None +) -> str: ... +def exec_command_all( + *cmdargs: str, encoding: str | None = None, **kwargs: int | bool | Iterable[int] | None +) -> tuple[int, str, str]: ... +def exec_python(*args: str, **kwargs: str | None) -> str: ... +def exec_python_rc(*args: str, **kwargs: str | None) -> int: ... +def expand_path(path: GenericPath[AnyStr]) -> AnyStr: ... +def getsitepackages(prefixes: Iterable[str] | None = None) -> list[str]: ... +def importlib_load_source(name: str, pathname: str) -> ModuleType: ... + +PY3_BASE_MODULES: Final[set[str]] +PURE_PYTHON_MODULE_TYPES: Final[set[str]] +SPECIAL_MODULE_TYPES: Final[set[str]] +BINARY_MODULE_TYPES: Final[set[str]] +VALID_MODULE_TYPES: Final[set[str]] +BAD_MODULE_TYPES: Final[set[str]] +ALL_MODULE_TYPES: Final[set[str]] +MODULE_TYPES_TO_TOC_DICT: Final[dict[str, str]] + +def check_requirements() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/depend/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/depend/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/depend/analysis.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/depend/analysis.pyi new file mode 100644 index 00000000..9c7f7966 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/depend/analysis.pyi @@ -0,0 +1,27 @@ +# https://pyinstaller.org/en/stable/hooks.html#the-pre-safe-import-module-psim-api-method + +# The documentation explicitely mentions that "Normally you do not need to know about the module-graph." +# However, some PyiModuleGraph typed class attributes are still documented as existing in imphookapi. +from _typeshed import Incomplete, StrPath, SupportsKeysAndGetItem +from collections.abc import Iterable +from typing_extensions import TypeAlias + +from PyInstaller.lib.modulegraph.modulegraph import Alias, Node + +_LazyNode: TypeAlias = Iterable[Node] | Iterable[str] | Alias | None +# from altgraph.Graph import Graph +_Graph: TypeAlias = Incomplete + +class PyiModuleGraph: # incomplete + def __init__( + self, + pyi_homepath: str, + user_hook_dirs: Iterable[StrPath] = ..., + excludes: Iterable[str] = ..., + *, + path: Iterable[str] | None = None, + replace_paths: Iterable[tuple[StrPath, StrPath]] = ..., + implies: SupportsKeysAndGetItem[str, _LazyNode] | Iterable[tuple[str, _LazyNode]] = ..., + graph: _Graph | None = None, + debug: bool = False, + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/depend/imphookapi.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/depend/imphookapi.pyi new file mode 100644 index 00000000..a5fddf4b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/depend/imphookapi.pyi @@ -0,0 +1,70 @@ +# https://pyinstaller.org/en/stable/hooks-config.html#adding-an-option-to-the-hook `hook_api` is a PostGraphAPI +# Nothing in this module is meant to be initialized externally. +# Instances are exposed through hooks during build. + +from _typeshed import StrOrBytesPath +from collections.abc import Generator, Iterable +from types import CodeType +from typing_extensions import Literal + +from PyInstaller.building.build_main import Analysis +from PyInstaller.building.datastruct import TOC +from PyInstaller.depend.analysis import PyiModuleGraph +from PyInstaller.lib.modulegraph.modulegraph import Package + +# https://pyinstaller.org/en/stable/hooks.html#the-pre-safe-import-module-psim-api-method +class PreSafeImportModuleAPI: + module_basename: str + module_name: str + def __init__( + self, module_graph: PyiModuleGraph, module_basename: str, module_name: str, parent_package: Package | None + ) -> None: ... + @property + def module_graph(self) -> PyiModuleGraph: ... + @property + def parent_package(self) -> Package | None: ... + def add_runtime_module(self, module_name: str) -> None: ... + def add_runtime_package(self, package_name: str) -> None: ... + def add_alias_module(self, real_module_name: str, alias_module_name: str) -> None: ... + def append_package_path(self, directory: str) -> None: ... + +# https://pyinstaller.org/en/stable/hooks.html#the-pre-find-module-path-pfmp-api-method +class PreFindModulePathAPI: + search_dirs: Iterable[StrOrBytesPath] + def __init__(self, module_graph: PyiModuleGraph, module_name: str, search_dirs: Iterable[StrOrBytesPath]) -> None: ... + @property + def module_graph(self) -> PyiModuleGraph: ... + @property + def module_name(self) -> str: ... + +# https://pyinstaller.org/en/stable/hooks.html#the-hook-hook-api-function +class PostGraphAPI: + module_graph: PyiModuleGraph + module: Package + def __init__(self, module_name: str, module_graph: PyiModuleGraph, analysis: Analysis) -> None: ... + @property + def __file__(self) -> str: ... + @property + def __path__(self) -> tuple[str, ...] | None: ... + @property + def __name__(self) -> str: ... + # Compiled code. See stdlib.builtins.compile + @property + def co(self) -> CodeType: ... + @property + def analysis(self) -> Analysis: ... + @property + def name(self) -> str: ... + @property + def graph(self) -> PyiModuleGraph: ... + @property + def node(self) -> Package: ... + @property + def imports(self) -> Generator[Package, None, None]: ... + def add_imports(self, *module_names: str) -> None: ... + def del_imports(self, *module_names: str) -> None: ... + def add_binaries(self, list_of_tuples: TOC | Iterable[tuple[StrOrBytesPath, StrOrBytesPath]]) -> None: ... + def add_datas(self, list_of_tuples: TOC | Iterable[tuple[StrOrBytesPath, StrOrBytesPath]]) -> None: ... + def set_module_collection_mode( + self, name: str | None, mode: Literal["pyz", "pyc", "py", "pyz+py", "py+pyz", None] + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/isolated/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/isolated/__init__.pyi new file mode 100644 index 00000000..6f084bfc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/isolated/__init__.pyi @@ -0,0 +1,2 @@ +# https://pyinstaller.org/en/stable/hooks.html#module-PyInstaller.isolated +from PyInstaller.isolated._parent import Python as Python, call as call, decorate as decorate diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/isolated/_parent.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/isolated/_parent.pyi new file mode 100644 index 00000000..2f0b295d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/isolated/_parent.pyi @@ -0,0 +1,19 @@ +from collections.abc import Callable +from types import TracebackType +from typing import TypeVar +from typing_extensions import ParamSpec, Self + +_AC = TypeVar("_AC", bound=Callable[..., object]) +_R = TypeVar("_R") +_P = ParamSpec("_P") + +class Python: + def __init__(self, strict_mode: bool | None = None) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def call(self, function: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs) -> _R: ... + +def call(function: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs) -> _R: ... +def decorate(function: _AC) -> _AC: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/lib/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/lib/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/lib/modulegraph/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/lib/modulegraph/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/lib/modulegraph/modulegraph.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/lib/modulegraph/modulegraph.pyi new file mode 100644 index 00000000..ae19c98e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/lib/modulegraph/modulegraph.pyi @@ -0,0 +1,45 @@ +# Partial typing of the vendored modulegraph package. +# We reference the vendored package rather than depending on the original untyped module. +# Anything not referenced in the PyInstaller stubs doesn't need to be added here. + +from types import CodeType +from typing import Protocol + +class _SupportsGraphident(Protocol): + graphident: str + +# code, filename and packagepath are always initialized to None. But they can be given a value later. +class Node: + # Compiled code. See stdlib.builtins.compile + code: CodeType | None + filename: str | None + graphident: str + identifier: str + packagepath: str | None + def __init__(self, identifier: str) -> None: ... + def is_global_attr(self, attr_name: str) -> bool: ... + def is_submodule(self, submodule_basename: str) -> bool: ... + def add_global_attr(self, attr_name: str) -> None: ... + def add_global_attrs_from_module(self, target_module: Node) -> None: ... + def add_submodule(self, submodule_basename: str, submodule_node: Node) -> None: ... + def get_submodule(self, submodule_basename: str) -> Node: ... + def get_submodule_or_none(self, submodule_basename: str) -> Node | None: ... + def remove_global_attr_if_found(self, attr_name: str) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __lt__(self, other: _SupportsGraphident) -> bool: ... + def __le__(self, other: _SupportsGraphident) -> bool: ... + def __gt__(self, other: _SupportsGraphident) -> bool: ... + def __ge__(self, other: _SupportsGraphident) -> bool: ... + def infoTuple(self) -> tuple[str]: ... + +class Alias(str): ... + +class BaseModule(Node): + filename: str + packagepath: str + def __init__(self, name: str, filename: str | None = None, path: str | None = None) -> None: ... + # Returns a tuple of length 0, 1, 2, or 3 + def infoTuple(self) -> tuple[str, ...]: ... # type: ignore[override] + +class Package(BaseModule): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/utils/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/utils/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/utils/hooks/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/utils/hooks/__init__.pyi new file mode 100644 index 00000000..b2ee8638 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/utils/hooks/__init__.pyi @@ -0,0 +1,82 @@ +# https://pyinstaller.org/en/stable/hooks.html + +import logging +from _typeshed import StrOrBytesPath, StrPath +from collections.abc import Callable, Iterable +from typing import Any +from typing_extensions import Final, Literal + +import pkg_resources +from PyInstaller import HOMEPATH as HOMEPATH +from PyInstaller.depend.imphookapi import PostGraphAPI +from PyInstaller.utils.hooks import conda +from PyInstaller.utils.hooks.win32 import get_pywin32_module_file_attribute as get_pywin32_module_file_attribute + +conda_support = conda + +logger: logging.Logger +PY_IGNORE_EXTENSIONS: Final[set[str]] +hook_variables: dict[str, str] + +def exec_statement(statement: str) -> str | int: ... +def exec_statement_rc(statement: str) -> str | int: ... +def eval_statement(statement: str) -> Any | Literal[""]: ... +def get_pyextension_imports(module_name: str) -> list[str]: ... +def get_homebrew_path(formula: str = "") -> str | None: ... +def remove_prefix(string: str, prefix: str) -> str: ... +def remove_suffix(string: str, suffix: str) -> str: ... +def remove_file_extension(filename: str) -> str: ... +def can_import_module(module_name: str) -> bool: ... +def get_module_attribute(module_name: str, attr_name: str) -> Any: ... +def get_module_file_attribute(package: str) -> str | None: ... +def is_module_satisfies( + requirements: Iterable[str] | pkg_resources.Requirement, + version: str | pkg_resources.Distribution | None = None, + version_attr: str = "__version__", +) -> bool: ... +def is_package(module_name: str) -> bool: ... +def get_all_package_paths(package: str) -> list[str]: ... +def package_base_path(package_path: str, package: str) -> str: ... +def get_package_paths(package: str) -> tuple[str, str]: ... +def collect_submodules( + package: str, filter: Callable[[str], bool] = ..., on_error: Literal["ignore", "warn once", "warn", "raise"] = "warn once" +) -> list[str]: ... +def is_module_or_submodule(name: str, mod_or_submod: str) -> bool: ... + +PY_DYLIB_PATTERNS: Final[list[str]] + +def collect_dynamic_libs(package: str, destdir: object = None, search_patterns: Iterable[str] = ...) -> list[tuple[str, str]]: ... +def collect_data_files( + package: str, + include_py_files: bool = False, + subdir: StrPath | None = None, + excludes: Iterable[str] | None = None, + includes: Iterable[str] | None = None, +) -> list[tuple[str, str]]: ... +def collect_system_data_files( + path: str, destdir: StrPath | None = None, include_py_files: bool = False +) -> list[tuple[str, str]]: ... +def copy_metadata(package_name: str, recursive: bool = False) -> list[tuple[str, str]]: ... +def get_installer(module: str) -> str | None: ... +def requirements_for_package(package_name: str) -> list[str]: ... +def collect_all( + package_name: str, + include_py_files: bool = True, + filter_submodules: Callable[[str], bool] | None = None, + exclude_datas: Iterable[str] | None = None, + include_datas: Iterable[str] | None = None, + on_error: Literal["ignore", "warn once", "warn", "raise"] = "warn once", +) -> tuple[list[tuple[str, str]], list[tuple[str, str]], list[str]]: ... +def collect_entry_point(name: str) -> tuple[tuple[str, str], list[str]]: ... +def get_hook_config(hook_api: PostGraphAPI, module_name: str, key: str) -> None: ... +def include_or_exclude_file( + filename: StrOrBytesPath, + include_list: Iterable[StrOrBytesPath] | None = None, + exclude_list: Iterable[StrOrBytesPath] | None = None, +) -> bool: ... +def collect_delvewheel_libs_directory( + package_name: str, + libdir_name: StrPath | None = None, + datas: list[tuple[str, str]] | None = None, + binaries: list[tuple[str, str]] | None = None, +) -> tuple[list[tuple[str, str]], list[tuple[str, str]]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/utils/hooks/conda.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/utils/hooks/conda.pyi new file mode 100644 index 00000000..fe80e7a3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/utils/hooks/conda.pyi @@ -0,0 +1,56 @@ +# https://pyinstaller.org/en/stable/hooks.html?highlight=conda_support#module-PyInstaller.utils.hooks.conda + +import sys +from _typeshed import StrOrBytesPath +from collections.abc import Iterable +from pathlib import Path, PurePosixPath +from typing_extensions import Final, TypedDict + +if sys.version_info >= (3, 8): + from importlib.metadata import PackagePath as _PackagePath +else: + # Same as importlib_metadata.PackagePath + class _PackagePath(PurePosixPath): + def read_text(self, encoding: str = "utf-8") -> str: ... + def read_binary(self) -> str: ... + def locate(self) -> Path: ... + +CONDA_ROOT: Final[Path] +CONDA_META_DIR: Final[Path] +PYTHONPATH_PREFIXES: Final[list[Path]] + +class _RawDict(TypedDict): + name: str + version: str + files: list[StrOrBytesPath] + depends: list[str] + +class Distribution: + raw: _RawDict + name: str + version: str + files: list[PackagePath] + dependencies: list[str] + packages: list[str] + def __init__(self, json_path: str) -> None: ... + @classmethod + def from_name(cls, name: str) -> Distribution: ... + @classmethod + def from_package_name(cls, name: str) -> Distribution: ... + +# distribution and package_distribution are meant to be used and are not internal helpers +distribution = Distribution.from_name +package_distribution = Distribution.from_package_name + +class PackagePath(_PackagePath): + def locate(self) -> Path: ... + +def walk_dependency_tree(initial: str, excludes: Iterable[str] | None = None) -> dict[str, Distribution]: ... +def requires(name: str, strip_versions: bool = False) -> list[str]: ... +def files(name: str, dependencies: bool = False, excludes: Iterable[str] | None = None) -> list[PackagePath]: ... +def collect_dynamic_libs( + name: str, dest: str = ".", dependencies: bool = True, excludes: Iterable[str] | None = None +) -> list[tuple[str, str]]: ... + +distributions: dict[str, Distribution] +distributions_by_package: dict[str | None, Distribution] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/utils/hooks/win32.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/utils/hooks/win32.pyi new file mode 100644 index 00000000..ac5efe75 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/PyInstaller/utils/hooks/win32.pyi @@ -0,0 +1 @@ +def get_pywin32_module_file_attribute(module_name: str) -> str | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/pyi_splash/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/pyi_splash/__init__.pyi new file mode 100644 index 00000000..18a31687 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyinstaller/pyi_splash/__init__.pyi @@ -0,0 +1,12 @@ +# Referenced in: https://pyinstaller.org/en/stable/advanced-topics.html#module-pyi_splash +# Source: https://github.com/pyinstaller/pyinstaller/blob/develop/PyInstaller/fake-modules/pyi_splash.py +from typing_extensions import Final + +__all__ = ["CLOSE_CONNECTION", "FLUSH_CHARACTER", "is_alive", "close", "update_text"] + +def is_alive() -> bool: ... +def update_text(msg: str) -> None: ... +def close() -> None: ... + +CLOSE_CONNECTION: Final = b"\x04" +FLUSH_CHARACTER: Final = b"\x0D" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..0fa0e5d5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/@tests/stubtest_allowlist.txt @@ -0,0 +1,16 @@ +# These __init__ methods have *args, **kwargs arguments on some platforms, but not others +pynput.mouse.Controller.__init__ + +# stubtest issues with non-`type` metaclasses, see https://github.com/python/mypy/issues/13316 +pynput.keyboard.Controller._Key +pynput.keyboard._base.Controller._Key +pynput.keyboard._dummy.Controller._Key + +# Platform specific private utils: +pynput._util.xorg_keysyms +pynput._util.xorg +pynput._util.win32_vks +pynput._util.win32 +pynput._util.uinput +pynput._util.darwin_vks +pynput._util.darwin diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/@tests/stubtest_allowlist_linux.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/@tests/stubtest_allowlist_linux.txt new file mode 100644 index 00000000..6a32bbd4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/@tests/stubtest_allowlist_linux.txt @@ -0,0 +1,5 @@ +# These __init__ methods have *args, **kwargs arguments on some platforms, but not others +pynput.keyboard.Controller.__init__ + +# Platform specific implementation detail: +pynput.keyboard.Controller.keyboard_mapping diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/@tests/stubtest_allowlist_win32.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/@tests/stubtest_allowlist_win32.txt new file mode 100644 index 00000000..99b19550 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/@tests/stubtest_allowlist_win32.txt @@ -0,0 +1,2 @@ +# These __init__ methods have *args, **kwargs arguments on some platforms, but not others +pynput.keyboard.Controller.__init__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/METADATA.toml new file mode 100644 index 00000000..c1e49ec2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/METADATA.toml @@ -0,0 +1,4 @@ +version = "1.7.*" + +[tool.stubtest] +platforms = ["darwin", "linux", "win32"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/__init__.pyi new file mode 100644 index 00000000..1b92738f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/__init__.pyi @@ -0,0 +1 @@ +from . import keyboard as keyboard, mouse as mouse diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/_info.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/_info.pyi new file mode 100644 index 00000000..e6655bdd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/_info.pyi @@ -0,0 +1,2 @@ +__author__: str +__version__: tuple[int, int, int] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/_util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/_util.pyi new file mode 100644 index 00000000..87788dd6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/_util.pyi @@ -0,0 +1,71 @@ +import sys +import threading +from collections.abc import Callable +from queue import Queue +from types import ModuleType, TracebackType +from typing import Any, ClassVar, Generic, TypeVar +from typing_extensions import ParamSpec, Self, TypedDict + +_T = TypeVar("_T") +_AbstractListener_T = TypeVar("_AbstractListener_T", bound=AbstractListener) +_P = ParamSpec("_P") + +class _RESOLUTIONS(TypedDict): + darwin: str + uinput: str + xorg: str + +RESOLUTIONS: _RESOLUTIONS + +def backend(package: str) -> ModuleType: ... +def prefix(base: type | tuple[type | tuple[Any, ...], ...], cls: type) -> str | None: ... + +class AbstractListener(threading.Thread): + class StopException(Exception): ... + _HANDLED_EXCEPTIONS: ClassVar[tuple[type | tuple[Any, ...], ...]] # undocumented + _suppress: bool # undocumented + _running: bool # undocumented + _thread: threading.Thread # undocumented + _condition: threading.Condition # undocumented + _ready: bool # undocumented + _queue: Queue[sys._OptExcInfo | None] # undocumented + daemon: bool + def __init__(self, suppress: bool = ..., **kwargs: Callable[..., bool | None] | None) -> None: ... + @property + def suppress(self) -> bool: ... + @property + def running(self) -> bool: ... + def stop(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def wait(self) -> None: ... + def run(self) -> None: ... + @classmethod + def _emitter(cls, f: Callable[_P, _T]) -> Callable[_P, _T]: ... # undocumented + def _mark_ready(self) -> None: ... # undocumented + def _run(self) -> None: ... # undocumented + def _stop_platform(self) -> None: ... # undocumented + def join(self, *args: Any) -> None: ... + +class Events(Generic[_T, _AbstractListener_T]): + _Listener: type[_AbstractListener_T] | None # undocumented + + class Event: + def __eq__(self, other: object) -> bool: ... + _event_queue: Queue[_T] # undocumented + _sentinel: object # undocumented + _listener: _AbstractListener_T # undocumented + start: Callable[[], None] + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def __iter__(self) -> Self: ... + def __next__(self) -> _T: ... + def get(self, timeout: float | None = ...) -> _T | None: ... + def _event_mapper(self, event: Callable[_P, object]) -> Callable[_P, None]: ... + +class NotifierMixin: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/keyboard/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/keyboard/__init__.pyi new file mode 100644 index 00000000..7add8545 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/keyboard/__init__.pyi @@ -0,0 +1,30 @@ +from _typeshed import SupportsItems +from collections.abc import Callable +from typing import Any + +from pynput import _util + +from ._base import Controller as Controller, Key as Key, KeyCode as KeyCode, Listener as Listener + +class Events(_util.Events[Any, Listener]): + class Press(_util.Events.Event): + key: Key | KeyCode | None + def __init__(self, key: Key | KeyCode | None) -> None: ... + + class Release(_util.Events.Event): + key: Key | KeyCode | None + def __init__(self, key: Key | KeyCode | None) -> None: ... + + def __init__(self) -> None: ... + def __next__(self) -> Press | Release: ... + def get(self, timeout: float | None = ...) -> Press | Release | None: ... + +class HotKey: + def __init__(self, keys: list[KeyCode], on_activate: Callable[[], object]) -> None: ... + @staticmethod + def parse(keys: str) -> list[KeyCode]: ... + def press(self, key: Key | KeyCode) -> None: ... + def release(self, key: Key | KeyCode) -> None: ... + +class GlobalHotKeys(Listener): + def __init__(self, hotkeys: SupportsItems[str, Callable[[], None]], *args: Any, **kwargs: Any) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/keyboard/_base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/keyboard/_base.pyi new file mode 100644 index 00000000..a67c0d0e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/keyboard/_base.pyi @@ -0,0 +1,132 @@ +import contextlib +import enum +import sys +from collections.abc import Callable, Iterable, Iterator +from typing import Any, ClassVar +from typing_extensions import Self + +from pynput._util import AbstractListener + +class KeyCode: + _PLATFORM_EXTENSIONS: ClassVar[Iterable[str]] # undocumented + vk: int | None + char: str | None + is_dead: bool | None + combining: str | None + def __init__(self, vk: str | None = ..., char: str | None = ..., is_dead: bool = ..., **kwargs: str) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + def join(self, key: Self) -> Self: ... + @classmethod + def from_vk(cls, vk: int, **kwargs: Any) -> Self: ... + @classmethod + def from_char(cls, char: str, **kwargs: Any) -> Self: ... + @classmethod + def from_dead(cls, char: str, **kwargs: Any) -> Self: ... + +class Key(enum.Enum): + alt: int + alt_l: int + alt_r: int + alt_gr: int + backspace: int + caps_lock: int + cmd: int + cmd_l: int + cmd_r: int + ctrl: int + ctrl_l: int + ctrl_r: int + delete: int + down: int + end: int + enter: int + esc: int + f1: int + f2: int + f3: int + f4: int + f5: int + f6: int + f7: int + f8: int + f9: int + f10: int + f11: int + f12: int + f13: int + f14: int + f15: int + f16: int + f17: int + f18: int + f19: int + f20: int + if sys.platform == "win32": + f21: int + f22: int + f23: int + f24: int + home: int + left: int + page_down: int + page_up: int + right: int + shift: int + shift_l: int + shift_r: int + space: int + tab: int + up: int + media_play_pause: int + media_volume_mute: int + media_volume_down: int + media_volume_up: int + media_previous: int + media_next: int + insert: int + menu: int + num_lock: int + pause: int + print_screen: int + scroll_lock: int + +class Controller: + _KeyCode: ClassVar[type[KeyCode]] # undocumented + _Key: ClassVar[type[Key]] # undocumented + + if sys.platform == "linux": + CTRL_MASK: ClassVar[int] + SHIFT_MASK: ClassVar[int] + + class InvalidKeyException(Exception): ... + class InvalidCharacterException(Exception): ... + + def __init__(self) -> None: ... + def press(self, key: str | Key | KeyCode) -> None: ... + def release(self, key: str | Key | KeyCode) -> None: ... + def tap(self, key: str | Key | KeyCode) -> None: ... + def touch(self, key: str | Key | KeyCode, is_press: bool) -> None: ... + @contextlib.contextmanager + def pressed(self, *args: str | Key | KeyCode) -> Iterator[None]: ... + def type(self, string: str) -> None: ... + @property + def modifiers(self) -> contextlib.AbstractContextManager[Iterator[set[Key]]]: ... + @property + def alt_pressed(self) -> bool: ... + @property + def alt_gr_pressed(self) -> bool: ... + @property + def ctrl_pressed(self) -> bool: ... + @property + def shift_pressed(self) -> bool: ... + +class Listener(AbstractListener): + def __init__( + self, + on_press: Callable[[Key | KeyCode | None], None] | None = ..., + on_release: Callable[[Key | KeyCode | None], None] | None = ..., + suppress: bool = ..., + **kwargs: Any, + ) -> None: ... + def canonical(self, key: Key | KeyCode) -> Key | KeyCode: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/keyboard/_dummy.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/keyboard/_dummy.pyi new file mode 100644 index 00000000..f49ca477 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/keyboard/_dummy.pyi @@ -0,0 +1 @@ +from ._base import Controller as Controller, Key as Key, KeyCode as KeyCode, Listener as Listener diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/mouse/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/mouse/__init__.pyi new file mode 100644 index 00000000..ad9546c8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/mouse/__init__.pyi @@ -0,0 +1,29 @@ +from typing import Any + +from pynput import _util + +from ._base import Button as Button, Controller as Controller, Listener as Listener + +class Events(_util.Events[Any, Listener]): + class Move(_util.Events.Event): + x: int + y: int + def __init__(self, x: int, y: int) -> None: ... + + class Click(_util.Events.Event): + x: int + y: int + button: Button + pressed: bool + def __init__(self, x: int, y: int, button: Button, pressed: bool) -> None: ... + + class Scroll(_util.Events.Event): + x: int + y: int + dx: int + dy: int + def __init__(self, x: int, y: int, dx: int, dy: int) -> None: ... + + def __init__(self) -> None: ... + def __next__(self) -> Move | Click | Scroll: ... + def get(self, timeout: float | None = ...) -> Move | Click | Scroll | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/mouse/_base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/mouse/_base.pyi new file mode 100644 index 00000000..409da415 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/mouse/_base.pyi @@ -0,0 +1,94 @@ +import enum +import sys +from collections.abc import Callable +from types import TracebackType +from typing import Any +from typing_extensions import Self + +from pynput._util import AbstractListener + +class Button(enum.Enum): + unknown: int + left: int + middle: int + right: int + if sys.platform == "linux": + button8: int + button9: int + button10: int + button11: int + button12: int + button13: int + button14: int + button15: int + button16: int + button17: int + button18: int + button19: int + button20: int + button21: int + button22: int + button23: int + button24: int + button25: int + button26: int + button27: int + button28: int + button29: int + button30: int + scroll_down: int + scroll_left: int + scroll_right: int + scroll_up: int + if sys.platform == "win32": + x1: int + x2: int + +class Controller: + def __init__(self) -> None: ... + @property + def position(self) -> tuple[int, int]: ... + @position.setter + def position(self, position: tuple[int, int]) -> None: ... + def scroll(self, dx: int, dy: int) -> None: ... + def press(self, button: Button) -> None: ... + def release(self, button: Button) -> None: ... + def move(self, dx: int, dy: int) -> None: ... + def click(self, button: Button, count: int = ...) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + +class Listener(AbstractListener): + if sys.platform == "win32": + WM_LBUTTONDOWN: int + WM_LBUTTONUP: int + WM_MBUTTONDOWN: int + WM_MBUTTONUP: int + WM_MOUSEMOVE: int + WM_MOUSEWHEEL: int + WM_MOUSEHWHEEL: int + WM_RBUTTONDOWN: int + WM_RBUTTONUP: int + WM_XBUTTONDOWN: int + WM_XBUTTONUP: int + + MK_XBUTTON1: int + MK_XBUTTON2: int + + XBUTTON1: int + XBUTTON2: int + + CLICK_BUTTONS: dict[int, tuple[Button, bool]] + X_BUTTONS: dict[int, dict[int, tuple[Button, bool]]] + SCROLL_BUTTONS: dict[int, tuple[int, int]] + + def __init__( + self, + on_move: Callable[[int, int], bool | None] | None = ..., + on_click: Callable[[int, int, Button, bool], bool | None] | None = ..., + on_scroll: Callable[[int, int, int, int], bool | None] | None = ..., + suppress: bool = ..., + **kwargs: Any, + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/mouse/_dummy.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/mouse/_dummy.pyi new file mode 100644 index 00000000..c799f582 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pynput/pynput/mouse/_dummy.pyi @@ -0,0 +1 @@ +from ._base import Button as Button, Controller as Controller, Listener as Listener diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..30eebdfe --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/@tests/stubtest_allowlist.txt @@ -0,0 +1,69 @@ +# Error: failed to import +# ======================= +serial.__main__ # SystemExit +serial.serialcli # (IronPython) ModuleNotFoundError: No module named 'System' +serial.serialjava # No Java Communications API implementation found + +# Error: is inconsistent +# ====================== +# These are positional only argument in the stub because they inherit from io.RawIOBase +# but at runtime they are normal arguments that don't have consistent names. +serial.Serial.read +serial.Serial.write +serial.SerialBase.readinto +serial.serialutil.SerialBase.readinto +serial.rfc2217.Serial.read +serial.rfc2217.Serial.write +serial.rs485.RS485.write +serial.urlhandler.protocol_cp2110.Serial.read +serial.urlhandler.protocol_cp2110.Serial.write +serial.urlhandler.protocol_loop.Serial.read +serial.urlhandler.protocol_loop.Serial.write +serial.urlhandler.protocol_rfc2217.Serial.read +serial.urlhandler.protocol_rfc2217.Serial.write +serial.urlhandler.protocol_socket.Serial.read +serial.urlhandler.protocol_socket.Serial.write +serial.urlhandler.protocol_spy.Serial.read +serial.urlhandler.protocol_spy.Serial.write + +# Error: is not present in stub +# ============================= +# Python 2 compatibility +serial.basestring +serial.serialutil.basestring +serial.serialutil.iterbytes +serial.serialutil.to_bytes + +# Deprecated aliases +serial.SerialBase.applySettingsDict +serial.SerialBase.flushInput +serial.SerialBase.flushOutput +serial.SerialBase.getCD +serial.SerialBase.getCTS +serial.SerialBase.getDSR +serial.SerialBase.getRI +serial.SerialBase.getSettingsDict +serial.SerialBase.inWaiting +serial.SerialBase.interCharTimeout +serial.SerialBase.isOpen +serial.SerialBase.sendBreak +serial.SerialBase.setDTR +serial.SerialBase.setPort +serial.SerialBase.setRTS +serial.SerialBase.writeTimeout +serial.serialutil.SerialBase.applySettingsDict +serial.serialutil.SerialBase.flushInput +serial.serialutil.SerialBase.flushOutput +serial.serialutil.SerialBase.getCD +serial.serialutil.SerialBase.getCTS +serial.serialutil.SerialBase.getDSR +serial.serialutil.SerialBase.getRI +serial.serialutil.SerialBase.getSettingsDict +serial.serialutil.SerialBase.inWaiting +serial.serialutil.SerialBase.interCharTimeout +serial.serialutil.SerialBase.isOpen +serial.serialutil.SerialBase.sendBreak +serial.serialutil.SerialBase.setDTR +serial.serialutil.SerialBase.setPort +serial.serialutil.SerialBase.setRTS +serial.serialutil.SerialBase.writeTimeout diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/@tests/stubtest_allowlist_darwin.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/@tests/stubtest_allowlist_darwin.txt new file mode 100644 index 00000000..aa88b027 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/@tests/stubtest_allowlist_darwin.txt @@ -0,0 +1,21 @@ +# Error: failed to import +# ======================= +serial.serialwin32 # Windows only +serial.win32 # Windows only +serial.tools.list_ports_windows # Windows only + +# Error: is inconsistent +# ====================== +# Methods defined with positional-only argument in the stub because they inherit from +# io.RawIOBase but at runtime they are normal arguments that don't have consistent +# names. +serial.PosixPollSerial.read +serial.VTIMESerial.read +serial.serialposix.Serial.read +serial.serialposix.Serial.write +serial.serialposix.PosixPollSerial.read +serial.serialposix.VTIMESerial.read + +# intended to be private aliases +serial.tools.list_ports_posix.plat +serial.serialposix.plat diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/@tests/stubtest_allowlist_linux.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/@tests/stubtest_allowlist_linux.txt new file mode 100644 index 00000000..2077595e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/@tests/stubtest_allowlist_linux.txt @@ -0,0 +1,23 @@ +# Error: failed to import +# ======================= +serial.serialwin32 # Windows only +serial.win32 # Windows only +serial.tools.list_ports_osx # Mac only +serial.tools.list_ports_windows # Windows only + +# Error: is inconsistent +# ====================== +# Methods defined with positional-only argument in the stub because they inherit from +# io.RawIOBase but at runtime they are normal arguments that don't have consistent +# names. +serial.PosixPollSerial.read +serial.VTIMESerial.read +serial.serialposix.Serial.read +serial.serialposix.Serial.write +serial.serialposix.PosixPollSerial.read +serial.serialposix.VTIMESerial.read + +# Error: is missing from the stub (intended to be private aliases) +# ================================================================ +serial.tools.list_ports_posix.plat +serial.serialposix.plat diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/@tests/stubtest_allowlist_win32.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/@tests/stubtest_allowlist_win32.txt new file mode 100644 index 00000000..170c6395 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/@tests/stubtest_allowlist_win32.txt @@ -0,0 +1,13 @@ +# Error: failed to import +# ======================= +serial.serialposix # Posix only +serial.tools.list_ports_osx # Mac only +serial.tools.list_ports_posix # Posix only + +# Error: is inconsistent +# ====================== +# Methods defined with positional-only argument in the stub because they inherit from +# io.RawIOBase but at runtime they are normal arguments that don't have consistent +# names. +serial.serialwin32.Serial.read +serial.serialwin32.Serial.write diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/METADATA.toml new file mode 100644 index 00000000..1e5d0af9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/METADATA.toml @@ -0,0 +1,5 @@ +version = "3.5.*" + +[tool.stubtest] +platforms = ["darwin", "linux", "win32"] +extras = ["cp2110"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/__init__.pyi new file mode 100644 index 00000000..7677f130 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/__init__.pyi @@ -0,0 +1,30 @@ +import sys + +from serial.serialutil import * + +if sys.platform == "win32": + from serial.serialwin32 import Serial as Serial +else: + from serial.serialposix import PosixPollSerial as PosixPollSerial, Serial as Serial, VTIMESerial as VTIMESerial +# TODO: java? cli? These platforms raise flake8-pyi Y008. Should they be included with a noqa? + +__version__: str +VERSION: str +protocol_handler_packages: list[str] + +def serial_for_url( + url: str | None, + baudrate: int = ..., + bytesize: int = ..., + parity: str = ..., + stopbits: float = ..., + timeout: float | None = ..., + xonxoff: bool = ..., + rtscts: bool = ..., + write_timeout: float | None = ..., + dsrdtr: bool = ..., + inter_byte_timeout: float | None = ..., + exclusive: float | None = ..., + *, + do_not_open: bool = ..., +) -> Serial: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/__main__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/__main__.pyi new file mode 100644 index 00000000..195cbe71 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/__main__.pyi @@ -0,0 +1 @@ +from serial.tools import miniterm as miniterm diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/rfc2217.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/rfc2217.pyi new file mode 100644 index 00000000..01606331 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/rfc2217.pyi @@ -0,0 +1,184 @@ +import logging +from collections.abc import Callable, Generator +from typing import Any + +from serial.serialutil import SerialBase + +LOGGER_LEVELS: dict[str, int] +SE: bytes +NOP: bytes +DM: bytes +BRK: bytes +IP: bytes +AO: bytes +AYT: bytes +EC: bytes +EL: bytes +GA: bytes +SB: bytes +WILL: bytes +WONT: bytes +DO: bytes +DONT: bytes +IAC: bytes +IAC_DOUBLED: bytes +BINARY: bytes +ECHO: bytes +SGA: bytes +COM_PORT_OPTION: bytes +SET_BAUDRATE: bytes +SET_DATASIZE: bytes +SET_PARITY: bytes +SET_STOPSIZE: bytes +SET_CONTROL: bytes +NOTIFY_LINESTATE: bytes +NOTIFY_MODEMSTATE: bytes +FLOWCONTROL_SUSPEND: bytes +FLOWCONTROL_RESUME: bytes +SET_LINESTATE_MASK: bytes +SET_MODEMSTATE_MASK: bytes +PURGE_DATA: bytes +SERVER_SET_BAUDRATE: bytes +SERVER_SET_DATASIZE: bytes +SERVER_SET_PARITY: bytes +SERVER_SET_STOPSIZE: bytes +SERVER_SET_CONTROL: bytes +SERVER_NOTIFY_LINESTATE: bytes +SERVER_NOTIFY_MODEMSTATE: bytes +SERVER_FLOWCONTROL_SUSPEND: bytes +SERVER_FLOWCONTROL_RESUME: bytes +SERVER_SET_LINESTATE_MASK: bytes +SERVER_SET_MODEMSTATE_MASK: bytes +SERVER_PURGE_DATA: bytes +RFC2217_ANSWER_MAP: dict[bytes, bytes] +SET_CONTROL_REQ_FLOW_SETTING: bytes +SET_CONTROL_USE_NO_FLOW_CONTROL: bytes +SET_CONTROL_USE_SW_FLOW_CONTROL: bytes +SET_CONTROL_USE_HW_FLOW_CONTROL: bytes +SET_CONTROL_REQ_BREAK_STATE: bytes +SET_CONTROL_BREAK_ON: bytes +SET_CONTROL_BREAK_OFF: bytes +SET_CONTROL_REQ_DTR: bytes +SET_CONTROL_DTR_ON: bytes +SET_CONTROL_DTR_OFF: bytes +SET_CONTROL_REQ_RTS: bytes +SET_CONTROL_RTS_ON: bytes +SET_CONTROL_RTS_OFF: bytes +SET_CONTROL_REQ_FLOW_SETTING_IN: bytes +SET_CONTROL_USE_NO_FLOW_CONTROL_IN: bytes +SET_CONTROL_USE_SW_FLOW_CONTOL_IN: bytes +SET_CONTROL_USE_HW_FLOW_CONTOL_IN: bytes +SET_CONTROL_USE_DCD_FLOW_CONTROL: bytes +SET_CONTROL_USE_DTR_FLOW_CONTROL: bytes +SET_CONTROL_USE_DSR_FLOW_CONTROL: bytes +LINESTATE_MASK_TIMEOUT: int +LINESTATE_MASK_SHIFTREG_EMPTY: int +LINESTATE_MASK_TRANSREG_EMPTY: int +LINESTATE_MASK_BREAK_DETECT: int +LINESTATE_MASK_FRAMING_ERROR: int +LINESTATE_MASK_PARTIY_ERROR: int +LINESTATE_MASK_OVERRUN_ERROR: int +LINESTATE_MASK_DATA_READY: int +MODEMSTATE_MASK_CD: int +MODEMSTATE_MASK_RI: int +MODEMSTATE_MASK_DSR: int +MODEMSTATE_MASK_CTS: int +MODEMSTATE_MASK_CD_CHANGE: int +MODEMSTATE_MASK_RI_CHANGE: int +MODEMSTATE_MASK_DSR_CHANGE: int +MODEMSTATE_MASK_CTS_CHANGE: int +PURGE_RECEIVE_BUFFER: bytes +PURGE_TRANSMIT_BUFFER: bytes +PURGE_BOTH_BUFFERS: bytes +RFC2217_PARITY_MAP: dict[str, int] +RFC2217_REVERSE_PARITY_MAP: dict[int, str] +RFC2217_STOPBIT_MAP: dict[int | float, int] +RFC2217_REVERSE_STOPBIT_MAP: dict[int, int | float] +M_NORMAL: int +M_IAC_SEEN: int +M_NEGOTIATE: int +REQUESTED: str +ACTIVE: str +INACTIVE: str +REALLY_INACTIVE: str + +class TelnetOption: + connection: Serial + name: str + option: bytes + send_yes: bytes + send_no: bytes + ack_yes: bytes + ack_no: bytes + state: str + active: bool + activation_callback: Callable[[], Any] + + def __init__( + self, + connection: Serial, + name: str, + option: bytes, + send_yes: bytes, + send_no: bytes, + ack_yes: bytes, + ack_no: bytes, + initial_state: str, + activation_callback: Callable[[], Any] | None = ..., + ) -> None: ... + def process_incoming(self, command: bytes) -> None: ... + +class TelnetSubnegotiation: + connection: Serial + name: str + option: bytes + value: bytes | None + ack_option: bytes + state: str + def __init__(self, connection: Serial, name: str, option: bytes, ack_option: bytes | None = ...) -> None: ... + def set(self, value: bytes) -> None: ... + def is_ready(self) -> bool: ... + @property + def active(self) -> bool: ... + def wait(self, timeout: float = ...) -> None: ... + def check_answer(self, suboption: bytes) -> None: ... + +class Serial(SerialBase): + logger: logging.Logger | None + def open(self) -> None: ... + def from_url(self, url: str) -> tuple[str, int]: ... + @property + def in_waiting(self) -> int: ... + def reset_input_buffer(self) -> None: ... + def reset_output_buffer(self) -> None: ... + @property + def cts(self) -> bool: ... + @property + def dsr(self) -> bool: ... + @property + def ri(self) -> bool: ... + @property + def cd(self) -> bool: ... + def telnet_send_option(self, action: bytes, option: bytes) -> None: ... + def rfc2217_send_subnegotiation(self, option: bytes, value: bytes = ...) -> None: ... + def rfc2217_send_purge(self, value: bytes) -> None: ... + def rfc2217_set_control(self, value: bytes) -> None: ... + def rfc2217_flow_server_ready(self) -> None: ... + def get_modem_state(self) -> int: ... + +class PortManager: + serial: Serial + connection: Serial + logger: logging.Logger | None + mode: int + suboption: bytes | None + telnet_command: bytes | None + modemstate_mask: int + last_modemstate: int | None + linstate_mask: int + def __init__(self, serial_port: Serial, connection: Serial, logger: logging.Logger | None = ...) -> None: ... + def telnet_send_option(self, action: bytes, option: bytes) -> None: ... + def rfc2217_send_subnegotiation(self, option: bytes, value: bytes = ...) -> None: ... + def check_modem_lines(self, force_notification: bool = ...) -> None: ... + def escape(self, data: bytes) -> Generator[bytes, None, None]: ... + def filter(self, data: bytes) -> Generator[bytes, None, None]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/rs485.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/rs485.pyi new file mode 100644 index 00000000..55d75444 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/rs485.pyi @@ -0,0 +1,18 @@ +import serial + +class RS485Settings: + rts_level_for_tx: bool + rts_level_for_rx: bool + loopback: bool + delay_before_tx: float | None + delay_before_rx: float | None + def __init__( + self, + rts_level_for_tx: bool = ..., + rts_level_for_rx: bool = ..., + loopback: bool = ..., + delay_before_tx: float | None = ..., + delay_before_rx: float | None = ..., + ) -> None: ... + +class RS485(serial.Serial): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/serialcli.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/serialcli.pyi new file mode 100644 index 00000000..9acb4140 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/serialcli.pyi @@ -0,0 +1,22 @@ +from typing import Any + +from serial.serialutil import * + +sab: Any # IronPython object + +def as_byte_array(string: bytes) -> Any: ... # IronPython object + +class Serial(SerialBase): + def open(self) -> None: ... + @property + def in_waiting(self) -> int: ... + def reset_input_buffer(self) -> None: ... + def reset_output_buffer(self) -> None: ... + @property + def cts(self) -> bool: ... + @property + def dsr(self) -> bool: ... + @property + def ri(self) -> bool: ... + @property + def cd(self) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/serialjava.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/serialjava.pyi new file mode 100644 index 00000000..21ced63e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/serialjava.pyi @@ -0,0 +1,27 @@ +from collections.abc import Iterable +from typing import Any + +from serial.serialutil import * + +def my_import(name: str) -> Any: ... # Java object +def detect_java_comm(names: Iterable[str]) -> Any: ... # Java object + +comm: Any # Java object + +def device(portnumber: int) -> str: ... + +class Serial(SerialBase): + sPort: Any # Java object + def open(self) -> None: ... + @property + def in_waiting(self) -> int: ... + def reset_input_buffer(self) -> None: ... + def reset_output_buffer(self) -> None: ... + @property + def cts(self) -> bool: ... + @property + def dsr(self) -> bool: ... + @property + def ri(self) -> bool: ... + @property + def cd(self) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/serialposix.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/serialposix.pyi new file mode 100644 index 00000000..7ef6d794 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/serialposix.pyi @@ -0,0 +1,90 @@ +import sys +from typing_extensions import Never + +from serial.serialutil import SerialBase + +class PlatformSpecificBase: + BAUDRATE_CONSTANTS: dict[int, int] + def set_low_latency_mode(self, low_latency_settings: bool) -> None: ... + +CMSPAR: int +if sys.platform == "linux": + TCGETS2: int + TCSETS2: int + BOTHER: int + TIOCGRS485: int + TIOCSRS485: int + SER_RS485_ENABLED: int + SER_RS485_RTS_ON_SEND: int + SER_RS485_RTS_AFTER_SEND: int + SER_RS485_RX_DURING_TX: int + + class PlatformSpecific(PlatformSpecificBase): ... + +elif sys.platform == "cygwin": + class PlatformSpecific(PlatformSpecificBase): ... + +elif sys.platform == "darwin": + IOSSIOSPEED: int + + class PlatformSpecific(PlatformSpecificBase): + osx_version: list[str] + TIOCSBRK: int + TIOCCBRK: int + +else: + class PlatformSpecific(PlatformSpecificBase): ... + +TIOCMGET: int +TIOCMBIS: int +TIOCMBIC: int +TIOCMSET: int +TIOCM_DTR: int +TIOCM_RTS: int +TIOCM_CTS: int +TIOCM_CAR: int +TIOCM_RNG: int +TIOCM_DSR: int +TIOCM_CD: int +TIOCM_RI: int +TIOCINQ: int +TIOCOUTQ: int +TIOCM_zero_str: bytes +TIOCM_RTS_str: bytes +TIOCM_DTR_str: bytes +TIOCSBRK: int +TIOCCBRK: int + +class Serial(SerialBase, PlatformSpecific): + fd: int | None + pipe_abort_read_w: int | None + pipe_abort_read_r: int | None + pipe_abort_write_w: int | None + pipe_abort_write_r: int | None + def open(self) -> None: ... + @property + def in_waiting(self) -> int: ... + def cancel_read(self) -> None: ... + def cancel_write(self) -> None: ... + def reset_input_buffer(self) -> None: ... + def reset_output_buffer(self) -> None: ... + def send_break(self, duration: float = ...) -> None: ... + @property + def cts(self) -> bool: ... + @property + def dsr(self) -> bool: ... + @property + def ri(self) -> bool: ... + @property + def cd(self) -> bool: ... + @property + def out_waiting(self) -> int: ... + def set_input_flow_control(self, enable: bool = ...) -> None: ... + def set_output_flow_control(self, enable: bool = ...) -> None: ... + def nonblocking(self) -> None: ... + +class PosixPollSerial(Serial): ... + +class VTIMESerial(Serial): + @property + def cancel_read(self) -> Never: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/serialutil.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/serialutil.pyi new file mode 100644 index 00000000..ed48e885 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/serialutil.pyi @@ -0,0 +1,136 @@ +import io +from collections.abc import Callable, Generator +from typing import Any +from typing_extensions import Final + +from serial.rs485 import RS485Settings + +XON: Final = b"\x11" +XOFF: Final = b"\x13" +CR: Final = b"\r" +LF: Final = b"\n" +PARITY_NONE: Final = "N" +PARITY_EVEN: Final = "E" +PARITY_ODD: Final = "O" +PARITY_MARK: Final = "M" +PARITY_SPACE: Final = "S" +STOPBITS_ONE: Final = 1 +STOPBITS_ONE_POINT_FIVE: float +STOPBITS_TWO: Final = 2 +FIVEBITS: Final = 5 +SIXBITS: Final = 6 +SEVENBITS: Final = 7 +EIGHTBITS: Final = 8 +PARITY_NAMES: dict[str, str] + +class SerialException(OSError): ... +class SerialTimeoutException(SerialException): ... + +class PortNotOpenError(SerialException): + def __init__(self) -> None: ... + +class Timeout: + TIME: Callable[[], float] + is_infinite: bool + is_non_blocking: bool + duration: float + target_time: float + def __init__(self, duration: float) -> None: ... + def expired(self) -> bool: ... + def time_left(self) -> float: ... + def restart(self, duration: float) -> None: ... + +class SerialBase(io.RawIOBase): + BAUDRATES: tuple[int, ...] + BYTESIZES: tuple[int, ...] + PARITIES: tuple[str, ...] + STOPBITS: tuple[int, float, int] + is_open: bool + portstr: str | None + name: str | None + def __init__( + self, + port: str | None = ..., + baudrate: int = ..., + bytesize: int = ..., + parity: str = ..., + stopbits: float = ..., + timeout: float | None = ..., + xonxoff: bool = ..., + rtscts: bool = ..., + write_timeout: float | None = ..., + dsrdtr: bool = ..., + inter_byte_timeout: float | None = ..., + exclusive: float | None = ..., + ) -> None: ... + def read(self, __size: int = ...) -> bytes: ... # same as io.RawIOBase.read but always returns bytes + @property + def port(self) -> str | None: ... + @port.setter + def port(self, port: str | None) -> None: ... + @property + def baudrate(self) -> int: ... + @baudrate.setter + def baudrate(self, baudrate: int) -> None: ... + @property + def bytesize(self) -> int: ... + @bytesize.setter + def bytesize(self, bytesize: int) -> None: ... + @property + def exclusive(self) -> bool | None: ... + @exclusive.setter + def exclusive(self, exclusive: bool | None) -> None: ... + @property + def parity(self) -> str: ... + @parity.setter + def parity(self, parity: str) -> None: ... + @property + def stopbits(self) -> float: ... + @stopbits.setter + def stopbits(self, stopbits: float) -> None: ... + @property + def timeout(self) -> float | None: ... + @timeout.setter + def timeout(self, timeout: float | None) -> None: ... + @property + def write_timeout(self) -> float | None: ... + @write_timeout.setter + def write_timeout(self, timeout: float | None) -> None: ... + @property + def inter_byte_timeout(self) -> float | None: ... + @inter_byte_timeout.setter + def inter_byte_timeout(self, ic_timeout: float | None) -> None: ... + @property + def xonxoff(self) -> bool: ... + @xonxoff.setter + def xonxoff(self, xonxoff: bool) -> None: ... + @property + def rtscts(self) -> bool: ... + @rtscts.setter + def rtscts(self, rtscts: bool) -> None: ... + @property + def dsrdtr(self) -> bool: ... + @dsrdtr.setter + def dsrdtr(self, dsrdtr: bool | None = ...) -> None: ... + @property + def rts(self) -> bool: ... + @rts.setter + def rts(self, value: bool) -> None: ... + @property + def dtr(self) -> bool: ... + @dtr.setter + def dtr(self, value: bool) -> None: ... + @property + def break_condition(self) -> bool: ... + @break_condition.setter + def break_condition(self, value: bool) -> None: ... + @property + def rs485_mode(self) -> RS485Settings | None: ... + @rs485_mode.setter + def rs485_mode(self, rs485_settings: RS485Settings | None) -> None: ... + def get_settings(self) -> dict[str, Any]: ... + def apply_settings(self, d: dict[str, Any]) -> None: ... + def send_break(self, duration: float = ...) -> None: ... + def read_all(self) -> bytes | None: ... + def read_until(self, expected: bytes = ..., size: int | None = ...) -> bytes: ... + def iread_until(self, expected: bytes = ..., size: int | None = ...) -> Generator[bytes, None, None]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/serialwin32.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/serialwin32.pyi new file mode 100644 index 00000000..df75a56a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/serialwin32.pyi @@ -0,0 +1,22 @@ +from serial.serialutil import SerialBase + +class Serial(SerialBase): + def open(self) -> None: ... + @property + def in_waiting(self) -> int: ... + def reset_input_buffer(self) -> None: ... + def reset_output_buffer(self) -> None: ... + @property + def cts(self) -> bool: ... + @property + def dsr(self) -> bool: ... + @property + def ri(self) -> bool: ... + @property + def cd(self) -> bool: ... + def set_buffer_size(self, rx_size: int = ..., tx_size: int | None = ...) -> None: ... + def set_output_flow_control(self, enable: bool = ...) -> None: ... + @property + def out_waiting(self) -> int: ... + def cancel_read(self) -> None: ... + def cancel_write(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/threaded/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/threaded/__init__.pyi new file mode 100644 index 00000000..95770e26 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/threaded/__init__.pyi @@ -0,0 +1,45 @@ +import threading +from _typeshed import ReadableBuffer +from collections.abc import Callable +from typing_extensions import Self + +from serial import Serial + +class Protocol: + def connection_made(self, transport: ReaderThread) -> None: ... + def data_received(self, data: bytes) -> None: ... + def connection_lost(self, exc: BaseException | None) -> None: ... + +class Packetizer(Protocol): + TERMINATOR: bytes + buffer: bytearray + transport: ReaderThread | None + def handle_packet(self, packet: bytes) -> None: ... + +class FramedPacket(Protocol): + START: bytes + STOP: bytes + packet: bytearray + in_packet: bool + transport: ReaderThread | None + def handle_packet(self, packet: bytes) -> None: ... + def handle_out_of_packet_data(self, data: bytes) -> None: ... + +class LineReader(Packetizer): + ENCODING: str + UNICODE_HANDLING: str + def handle_line(self, line: str) -> None: ... + def write_line(self, text: str) -> None: ... + +class ReaderThread(threading.Thread): + serial: Serial + protocol_factory: Callable[[], Protocol] + alive: bool + protocol: Protocol + def __init__(self, serial_instance: Serial, protocol_factory: Callable[[], Protocol]) -> None: ... + def stop(self) -> None: ... + def write(self, data: ReadableBuffer) -> int: ... + def close(self) -> None: ... + def connect(self) -> tuple[Self, Protocol]: ... + def __enter__(self) -> Protocol: ... + def __exit__(self, __exc_type: object, __exc_val: object, __exc_tb: object) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/hexlify_codec.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/hexlify_codec.pyi new file mode 100644 index 00000000..252a6a5d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/hexlify_codec.pyi @@ -0,0 +1,23 @@ +import codecs +from _typeshed import ReadableBuffer + +HEXDIGITS: str + +def hex_encode(data: str, errors: str = ...) -> tuple[bytes, int]: ... +def hex_decode(data: bytes, errors: str = ...) -> tuple[str, int]: ... + +class Codec(codecs.Codec): + def encode(self, data: str, errors: str = ...) -> tuple[bytes, int]: ... + def decode(self, data: bytes, errors: str = ...) -> tuple[str, int]: ... + +class IncrementalEncoder(codecs.IncrementalEncoder): + state: int + def encode(self, data: str, final: bool = ...) -> bytes: ... + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, data: ReadableBuffer, final: bool = ...) -> str: ... + +class StreamWriter(Codec, codecs.StreamWriter): ... +class StreamReader(Codec, codecs.StreamReader): ... + +def getregentry() -> codecs.CodecInfo: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/list_ports.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/list_ports.pyi new file mode 100644 index 00000000..0266c39a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/list_ports.pyi @@ -0,0 +1,11 @@ +import re +import sys +from collections.abc import Generator + +if sys.platform == "win32": + from serial.tools.list_ports_windows import comports as comports +else: + from serial.tools.list_ports_posix import comports as comports + +def grep(regexp: str | re.Pattern[str], include_links: bool = ...) -> Generator[tuple[str, str, str], None, None]: ... +def main() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/list_ports_common.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/list_ports_common.pyi new file mode 100644 index 00000000..ee607525 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/list_ports_common.pyi @@ -0,0 +1,33 @@ +from collections.abc import Collection +from typing import Any + +def numsplit(text: str) -> list[str | int]: ... + +class ListPortInfo: + device: str + name: str + description: str + hwid: str + # USB specific data: the vid and pid attributes below are specific to USB devices only and + # should be marked as Optional. Since the majority of the serial devices nowadays are USB + # devices, typing them as Optional will be unnecessarily annoying. We type them with as + # `int | Any` so that obvious typing errors like ListPortInfo.pid + "str" are flagged. + # As desired, this will cause a false negative if the value is ever None, but may also cause + # other false negatives from the Any proliferating. The other USB attributes are correctly + # typed as Optional because they may be `None` even for USB devices + # Original discussion at https://github.com/python/typeshed/pull/9347#issuecomment-1358245865. + vid: int | Any + pid: int | Any + serial_number: str | None + location: str | None + manufacturer: str | None + product: str | None + interface: str | None + def __init__(self, device: str, skip_link_detection: bool = ...) -> None: ... + def usb_description(self) -> str: ... + def usb_info(self) -> str: ... + def apply_usb_info(self) -> None: ... + def __lt__(self, other: ListPortInfo) -> bool: ... + def __getitem__(self, index: int) -> str: ... + +def list_links(devices: Collection[str]) -> list[str]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/list_ports_linux.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/list_ports_linux.pyi new file mode 100644 index 00000000..256f46ea --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/list_ports_linux.pyi @@ -0,0 +1,11 @@ +from serial.tools.list_ports_common import ListPortInfo + +class SysFS(ListPortInfo): + usb_device_path: str | None + device_path: str | None + subsystem: str | None + usb_interface_path: str | None + def __init__(self, device: str) -> None: ... + def read_line(self, *args: str) -> str | None: ... + +def comports(include_links: bool = ...) -> list[SysFS]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/list_ports_osx.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/list_ports_osx.pyi new file mode 100644 index 00000000..eb193d82 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/list_ports_osx.pyi @@ -0,0 +1,36 @@ +import ctypes +import sys + +from serial.tools.list_ports_common import ListPortInfo + +if sys.platform == "darwin": + iokit: ctypes.CDLL + cf: ctypes.CDLL + kIOMasterPortDefault: int + kCFAllocatorDefault: ctypes.c_void_p + kCFStringEncodingMacRoman: int + kCFStringEncodingUTF8: int + kUSBVendorString: str + kUSBSerialNumberString: str + io_name_size: int + KERN_SUCCESS: int + kern_return_t = ctypes.c_int + kCFNumberSInt8Type: int + kCFNumberSInt16Type: int + kCFNumberSInt32Type: int + kCFNumberSInt64Type: int + + def get_string_property(device_type: ctypes._CData, property: str) -> str | None: ... + def get_int_property(device_type: ctypes._CData, property: str, cf_number_type: int) -> int | None: ... + def IORegistryEntryGetName(device: ctypes._CData) -> str | None: ... + def IOObjectGetClass(device: ctypes._CData) -> bytes: ... + def GetParentDeviceByType(device: ctypes._CData, parent_type: str) -> ctypes._CData | None: ... + def GetIOServicesByType(service_type: str) -> list[ctypes._CData]: ... + def location_to_string(locationID: int) -> str: ... + + # `SuitableSerialInterface` has required attributes `id: int` and `name: str` but they are not defined on the class + class SuitableSerialInterface: ... + + def scan_interfaces() -> list[SuitableSerialInterface]: ... + def search_for_locationID_in_interfaces(serial_interfaces: list[SuitableSerialInterface], locationID: int) -> str | None: ... + def comports(include_links: bool = ...) -> list[ListPortInfo]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/list_ports_posix.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/list_ports_posix.pyi new file mode 100644 index 00000000..969dfd42 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/list_ports_posix.pyi @@ -0,0 +1,11 @@ +import sys + +from serial.tools.list_ports_common import ListPortInfo + +if sys.platform != "win32": + if sys.platform == "linux": + from serial.tools.list_ports_linux import comports as comports + elif sys.platform == "darwin": + from serial.tools.list_ports_osx import comports as comports + else: + def comports(include_links: bool = ...) -> list[ListPortInfo]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/list_ports_windows.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/list_ports_windows.pyi new file mode 100644 index 00000000..ce51d57b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/list_ports_windows.pyi @@ -0,0 +1,76 @@ +import ctypes +import sys +from collections.abc import Generator +from ctypes.wintypes import DWORD + +from serial.tools.list_ports_common import ListPortInfo + +if sys.platform == "win32": + + def ValidHandle( + value: type[ctypes._CData] | None, func: ctypes._FuncPointer, arguments: tuple[ctypes._CData, ...] + ) -> ctypes._CData: ... + + NULL: int + HDEVINFO = ctypes.c_void_p + LPCTSTR = ctypes.c_wchar_p + PCTSTR = ctypes.c_wchar_p + PTSTR = ctypes.c_wchar_p + LPDWORD: ctypes._Pointer[DWORD] + PDWORD: ctypes._Pointer[DWORD] + LPBYTE = ctypes.c_void_p + PBYTE = ctypes.c_void_p + ACCESS_MASK = DWORD + REGSAM = ACCESS_MASK + + class GUID(ctypes.Structure): + Data1: ctypes._CField + Data2: ctypes._CField + Data3: ctypes._CField + Data4: ctypes._CField + + class SP_DEVINFO_DATA(ctypes.Structure): + cbSize: ctypes._CField + ClassGuid: ctypes._CField + DevInst: ctypes._CField + Reserved: ctypes._CField + PSP_DEVINFO_DATA: type[ctypes._Pointer[SP_DEVINFO_DATA]] + PSP_DEVICE_INTERFACE_DETAIL_DATA = ctypes.c_void_p + setupapi: ctypes.WinDLL + SetupDiDestroyDeviceInfoList: ctypes._NamedFuncPointer + SetupDiClassGuidsFromName: ctypes._NamedFuncPointer + SetupDiEnumDeviceInfo: ctypes._NamedFuncPointer + SetupDiGetClassDevs: ctypes._NamedFuncPointer + SetupDiGetDeviceRegistryProperty: ctypes._NamedFuncPointer + SetupDiGetDeviceInstanceId: ctypes._NamedFuncPointer + SetupDiOpenDevRegKey: ctypes._NamedFuncPointer + advapi32: ctypes.WinDLL + RegCloseKey: ctypes._NamedFuncPointer + RegQueryValueEx: ctypes._NamedFuncPointer + cfgmgr32: ctypes.WinDLL + CM_Get_Parent: ctypes._NamedFuncPointer + CM_Get_Device_IDW: ctypes._NamedFuncPointer + CM_MapCrToWin32Err: ctypes._NamedFuncPointer + DIGCF_PRESENT: int + DIGCF_DEVICEINTERFACE: int + INVALID_HANDLE_VALUE: int + ERROR_INSUFFICIENT_BUFFER: int + ERROR_NOT_FOUND: int + SPDRP_HARDWAREID: int + SPDRP_FRIENDLYNAME: int + SPDRP_LOCATION_PATHS: int + SPDRP_MFG: int + DICS_FLAG_GLOBAL: int + DIREG_DEV: int + KEY_READ: int + MAX_USB_DEVICE_TREE_TRAVERSAL_DEPTH: int + + def get_parent_serial_number( + child_devinst: ctypes._CData, + child_vid: int | None, + child_pid: int | None, + depth: int = ..., + last_serial_number: str | None = ..., + ) -> str: ... + def iterate_comports() -> Generator[ListPortInfo, None, None]: ... + def comports(include_links: bool = ...) -> list[ListPortInfo]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/miniterm.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/miniterm.pyi new file mode 100644 index 00000000..70b4b49e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/tools/miniterm.pyi @@ -0,0 +1,111 @@ +import codecs +import sys +import threading +from collections.abc import Iterable +from typing import Any, BinaryIO, TextIO +from typing_extensions import Self + +from serial import Serial + +def key_description(character: str) -> str: ... + +class ConsoleBase: + byte_output: BinaryIO + output: codecs.StreamWriter | TextIO + def __init__(self) -> None: ... + def setup(self) -> None: ... + def cleanup(self) -> None: ... + def getkey(self) -> None: ... + def write_bytes(self, byte_string: bytes) -> None: ... + def write(self, text: str) -> None: ... + def cancel(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: object, **kwargs: object) -> None: ... + +if sys.platform == "win32": + class Out: + fd: int + def __init__(self, fd: int) -> None: ... + def flush(self) -> None: ... + def write(self, s: bytes) -> None: ... + + class Console(ConsoleBase): + fncodes: dict[str, str] + navcodes: dict[str, str] + +else: + class Console(ConsoleBase): + fd: int + old: list[Any] # return type of termios.tcgetattr() + enc_stdin: TextIO + +class Transform: + def rx(self, text: str) -> str: ... + def tx(self, text: str) -> str: ... + def echo(self, text: str) -> str: ... + +class CRLF(Transform): ... +class CR(Transform): ... +class LF(Transform): ... + +class NoTerminal(Transform): + REPLACEMENT_MAP: dict[int, int] + +class NoControls(NoTerminal): + REPLACEMENT_MAP: dict[int, int] + +class Printable(Transform): ... + +class Colorize(Transform): + input_color: str + echo_color: str + +class DebugIO(Transform): ... + +EOL_TRANSFORMATIONS: dict[str, type[Transform]] +TRANSFORMATIONS: dict[str, type[Transform]] + +def ask_for_port() -> str: ... + +class Miniterm: + console: Console + serial: Serial + echo: bool + raw: bool + input_encoding: str + output_encoding: str + eol: str + filters: Iterable[str] + exit_character: str + menu_character: str + alive: bool | None + receiver_thread: threading.Thread | None + rx_decoder: codecs.IncrementalDecoder | None + tx_decoder: codecs.IncrementalDecoder | None + tx_encoder: codecs.IncrementalEncoder | None + def __init__(self, serial_instance: Serial, echo: bool = ..., eol: str = ..., filters: Iterable[str] = ...) -> None: ... + transmitter_thread: threading.Thread + def start(self) -> None: ... + def stop(self) -> None: ... + def join(self, transmit_only: bool = ...) -> None: ... + def close(self) -> None: ... + tx_transformations: list[Transform] + rx_transformations: list[Transform] + def update_transformations(self) -> None: ... + def set_rx_encoding(self, encoding: str, errors: str = ...) -> None: ... + def set_tx_encoding(self, encoding: str, errors: str = ...) -> None: ... + def dump_port_settings(self) -> None: ... + def reader(self) -> None: ... + def writer(self) -> None: ... + def handle_menu_key(self, c: str) -> None: ... + def upload_file(self) -> None: ... + def change_filter(self) -> None: ... + def change_encoding(self) -> None: ... + def change_baudrate(self) -> None: ... + def change_port(self) -> None: ... + def suspend_port(self) -> None: ... + def get_help_text(self) -> str: ... + +def main( + default_port: str | None = ..., default_baudrate: int = ..., default_rts: int | None = ..., default_dtr: int | None = ... +) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/urlhandler/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/urlhandler/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/urlhandler/protocol_alt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/urlhandler/protocol_alt.pyi new file mode 100644 index 00000000..9711af93 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/urlhandler/protocol_alt.pyi @@ -0,0 +1,3 @@ +from serial import Serial + +def serial_class_for_url(url: str) -> tuple[str, Serial]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/urlhandler/protocol_cp2110.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/urlhandler/protocol_cp2110.pyi new file mode 100644 index 00000000..79bceddc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/urlhandler/protocol_cp2110.pyi @@ -0,0 +1,9 @@ +from serial.serialutil import SerialBase + +class Serial(SerialBase): + def open(self) -> None: ... + def from_url(self, url: str) -> bytes: ... + @property + def in_waiting(self) -> int: ... + def reset_input_buffer(self) -> None: ... + def reset_output_buffer(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/urlhandler/protocol_hwgrep.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/urlhandler/protocol_hwgrep.pyi new file mode 100644 index 00000000..93dc1402 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/urlhandler/protocol_hwgrep.pyi @@ -0,0 +1,4 @@ +import serial + +class Serial(serial.Serial): + def from_url(self, url: str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/urlhandler/protocol_loop.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/urlhandler/protocol_loop.pyi new file mode 100644 index 00000000..89582bb9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/urlhandler/protocol_loop.pyi @@ -0,0 +1,29 @@ +import logging +import queue + +from serial.serialutil import SerialBase + +LOGGER_LEVELS: dict[str, int] + +class Serial(SerialBase): + buffer_size: int + queue: queue.Queue[bytes | None] | None + logger: logging.Logger | None + def open(self) -> None: ... + def from_url(self, url: str) -> None: ... + @property + def in_waiting(self) -> int: ... + def cancel_read(self) -> None: ... + def cancel_write(self) -> None: ... + def reset_input_buffer(self) -> None: ... + def reset_output_buffer(self) -> None: ... + @property + def out_waiting(self) -> int: ... + @property + def cts(self) -> bool: ... + @property + def dsr(self) -> bool: ... + @property + def ri(self) -> bool: ... + @property + def cd(self) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/urlhandler/protocol_rfc2217.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/urlhandler/protocol_rfc2217.pyi new file mode 100644 index 00000000..82903b51 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/urlhandler/protocol_rfc2217.pyi @@ -0,0 +1 @@ +from serial.rfc2217 import Serial as Serial diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/urlhandler/protocol_socket.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/urlhandler/protocol_socket.pyi new file mode 100644 index 00000000..a7257a75 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/urlhandler/protocol_socket.pyi @@ -0,0 +1,23 @@ +import logging + +from serial.serialutil import SerialBase + +LOGGER_LEVELS: dict[str, int] +POLL_TIMEOUT: float + +class Serial(SerialBase): + logger: logging.Logger | None + def open(self) -> None: ... + def from_url(self, url: str) -> tuple[str, int]: ... + @property + def in_waiting(self) -> int: ... + def reset_input_buffer(self) -> None: ... + def reset_output_buffer(self) -> None: ... + @property + def cts(self) -> bool: ... + @property + def dsr(self) -> bool: ... + @property + def ri(self) -> bool: ... + @property + def cd(self) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/urlhandler/protocol_spy.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/urlhandler/protocol_spy.pyi new file mode 100644 index 00000000..7535be49 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/urlhandler/protocol_spy.pyi @@ -0,0 +1,34 @@ +from collections.abc import Generator +from typing import TextIO + +import serial + +def sixteen(data: bytes) -> Generator[tuple[str, str] | tuple[None, None], None, None]: ... +def hexdump(data: bytes) -> Generator[tuple[int, str], None, None]: ... + +class _Formatter: + def rx(self, data: bytes) -> None: ... + def tx(self, data: bytes) -> None: ... + def control(self, name: str, value: str) -> None: ... + +class FormatRaw(_Formatter): + output: TextIO + color: bool + rx_color: str + tx_color: str + def __init__(self, output: TextIO, color: bool) -> None: ... + +class FormatHexdump(_Formatter): + start_time: float + output: TextIO + color: bool + rx_color: str + tx_color: str + control_color: str + def __init__(self, output: TextIO, color: bool) -> None: ... + def write_line(self, timestamp: float, label: str, value: str, value2: str = ...) -> None: ... + +class Serial(serial.Serial): + formatter: FormatRaw | FormatHexdump | None + show_all: bool + def from_url(self, url: str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/win32.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/win32.pyi new file mode 100644 index 00000000..fdcc3081 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyserial/serial/win32.pyi @@ -0,0 +1,161 @@ +import sys +from ctypes import Structure, Union, _CField, _NamedFuncPointer, _Pointer, c_int64, c_ulong, c_void_p +from ctypes.wintypes import DWORD +from typing_extensions import TypeAlias + +if sys.platform == "win32": + def is_64bit() -> bool: ... + + ULONG_PTR: type[c_int64 | c_ulong] + + class _SECURITY_ATTRIBUTES(Structure): + nLength: _CField + lpSecurityDescriptor: _CField + bInheritHandle: _CField + LPSECURITY_ATTRIBUTES: type[_Pointer[_SECURITY_ATTRIBUTES]] + CreateEvent: _NamedFuncPointer + CreateFile: _NamedFuncPointer + # The following are included in __all__ but their existence is not guaranteed as + # they are defined in a try/except block. Their aliases above are always defined. + CreateEventW: _NamedFuncPointer + CreateFileW: _NamedFuncPointer + + class _OVERLAPPED(Structure): + Internal: _CField + InternalHigh: _CField + Offset: _CField + OffsetHigh: _CField + Pointer: _CField + hEvent: _CField + OVERLAPPED: TypeAlias = _OVERLAPPED + + class _COMSTAT(Structure): + fCtsHold: _CField + fDsrHold: _CField + fRlsdHold: _CField + fXoffHold: _CField + fXoffSent: _CField + fEof: _CField + fTxim: _CField + fReserved: _CField + cbInQue: _CField + cbOutQue: _CField + COMSTAT: TypeAlias = _COMSTAT + + class _DCB(Structure): + DCBlength: _CField + BaudRate: _CField + fBinary: _CField + fParity: _CField + fOutxCtsFlow: _CField + fOutxDsrFlow: _CField + fDtrControl: _CField + fDsrSensitivity: _CField + fTXContinueOnXoff: _CField + fOutX: _CField + fInX: _CField + fErrorChar: _CField + fNull: _CField + fRtsControl: _CField + fAbortOnError: _CField + fDummy2: _CField + wReserved: _CField + XonLim: _CField + XoffLim: _CField + ByteSize: _CField + Parity: _CField + StopBits: _CField + XonChar: _CField + XoffChar: _CField + ErrorChar: _CField + EofChar: _CField + EvtChar: _CField + wReserved1: _CField + DCB: TypeAlias = _DCB + + class _COMMTIMEOUTS(Structure): + ReadIntervalTimeout: _CField + ReadTotalTimeoutMultiplier: _CField + ReadTotalTimeoutConstant: _CField + WriteTotalTimeoutMultiplier: _CField + WriteTotalTimeoutConstant: _CField + COMMTIMEOUTS: TypeAlias = _COMMTIMEOUTS + + GetLastError: _NamedFuncPointer + LPOVERLAPPED: type[_Pointer[_OVERLAPPED]] + LPDWORD: type[_Pointer[DWORD]] + GetOverlappedResult: _NamedFuncPointer + ResetEvent: _NamedFuncPointer + LPCVOID = c_void_p + WriteFile: _NamedFuncPointer + LPVOID = c_void_p + ReadFile: _NamedFuncPointer + CloseHandle: _NamedFuncPointer + ClearCommBreak: _NamedFuncPointer + LPCOMSTAT: type[_Pointer[_COMSTAT]] + ClearCommError: _NamedFuncPointer + SetupComm: _NamedFuncPointer + EscapeCommFunction: _NamedFuncPointer + GetCommModemStatus: _NamedFuncPointer + LPDCB: type[_Pointer[_DCB]] + GetCommState: _NamedFuncPointer + LPCOMMTIMEOUTS: type[_Pointer[_COMMTIMEOUTS]] + GetCommTimeouts: _NamedFuncPointer + PurgeComm: _NamedFuncPointer + SetCommBreak: _NamedFuncPointer + SetCommMask: _NamedFuncPointer + SetCommState: _NamedFuncPointer + SetCommTimeouts: _NamedFuncPointer + WaitForSingleObject: _NamedFuncPointer + WaitCommEvent: _NamedFuncPointer + CancelIoEx: _NamedFuncPointer + + ONESTOPBIT: int + TWOSTOPBITS: int + NOPARITY: int + ODDPARITY: int + EVENPARITY: int + RTS_CONTROL_HANDSHAKE: int + RTS_CONTROL_ENABLE: int + DTR_CONTROL_HANDSHAKE: int + DTR_CONTROL_ENABLE: int + MS_DSR_ON: int + EV_RING: int + EV_PERR: int + EV_ERR: int + SETXOFF: int + EV_RXCHAR: int + GENERIC_WRITE: int + PURGE_TXCLEAR: int + FILE_FLAG_OVERLAPPED: int + EV_DSR: int + MAXDWORD: int + EV_RLSD: int + ERROR_IO_PENDING: int + MS_CTS_ON: int + EV_EVENT1: int + EV_RX80FULL: int + PURGE_RXABORT: int + FILE_ATTRIBUTE_NORMAL: int + PURGE_TXABORT: int + SETXON: int + OPEN_EXISTING: int + MS_RING_ON: int + EV_TXEMPTY: int + EV_RXFLAG: int + MS_RLSD_ON: int + GENERIC_READ: int + EV_EVENT2: int + EV_CTS: int + EV_BREAK: int + PURGE_RXCLEAR: int + + class N11_OVERLAPPED4DOLLAR_48E(Union): + Offset: _CField + OffsetHigh: _CField + Pointer: _CField + + class N11_OVERLAPPED4DOLLAR_484DOLLAR_49E(Structure): + Offset: _CField + OffsetHigh: _CField + PVOID: TypeAlias = c_void_p diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pysftp/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pysftp/METADATA.toml new file mode 100644 index 00000000..26d6d1ca --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pysftp/METADATA.toml @@ -0,0 +1,2 @@ +version = "0.2.*" +requires = ["types-paramiko"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pysftp/pysftp/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pysftp/pysftp/__init__.pyi new file mode 100644 index 00000000..6e42892a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pysftp/pysftp/__init__.pyi @@ -0,0 +1,128 @@ +from collections.abc import Callable, Sequence +from contextlib import AbstractContextManager +from stat import S_IMODE as S_IMODE +from types import TracebackType +from typing import IO +from typing_extensions import Literal, Self, TypeAlias + +import paramiko +from paramiko import AuthenticationException as AuthenticationException +from pysftp.exceptions import ( + ConnectionException as ConnectionException, + CredentialException as CredentialException, + HostKeysException as HostKeysException, +) +from pysftp.helpers import ( + WTCallbacks as WTCallbacks, + _PathCallback, + cd as cd, + known_hosts as known_hosts, + path_advance as path_advance, + path_retreat as path_retreat, + reparent as reparent, + st_mode_to_int as st_mode_to_int, + walktree as walktree, +) + +class CnOpts: + log: bool = ... + compression: bool = ... + ciphers: Sequence[str] | None = ... + hostkeys: paramiko.HostKeys = ... + def __init__(self, knownhosts: str | None = ...) -> None: ... + def get_hostkey(self, host: str) -> paramiko.PKey: ... + +_Callback: TypeAlias = Callable[[int, int], object] +_Path: TypeAlias = str | bytes + +class Connection: + def __init__( + self, + host: str, + username: str | None = ..., + private_key: str | paramiko.RSAKey | paramiko.AgentKey | None = ..., + password: str | None = ..., + port: int = ..., + private_key_pass: str | None = ..., + ciphers: Sequence[str] | None = ..., + log: bool = ..., + cnopts: CnOpts | None = ..., + default_path: _Path | None = ..., + ) -> None: ... + @property + def pwd(self) -> str: ... + def get( + self, remotepath: _Path, localpath: _Path | None = ..., callback: _Callback | None = ..., preserve_mtime: bool = ... + ) -> None: ... + def get_d(self, remotedir: _Path, localdir: _Path, preserve_mtime: bool = ...) -> None: ... + def get_r(self, remotedir: _Path, localdir: _Path, preserve_mtime: bool = ...) -> None: ... + def getfo(self, remotepath: _Path, flo: IO[bytes], callback: _Callback | None = ...) -> int: ... + def put( + self, + localpath: _Path, + remotepath: _Path | None = ..., + callback: _Callback | None = ..., + confirm: bool = ..., + preserve_mtime: bool = ..., + ) -> paramiko.SFTPAttributes: ... + def put_d(self, localpath: _Path, remotepath: _Path, confirm: bool = ..., preserve_mtime: bool = ...) -> None: ... + def put_r(self, localpath: _Path, remotepath: _Path, confirm: bool = ..., preserve_mtime: bool = ...) -> None: ... + def putfo( + self, + flo: IO[bytes], + remotepath: _Path | None = ..., + file_size: int = ..., + callback: _Callback | None = ..., + confirm: bool = ..., + ) -> paramiko.SFTPAttributes: ... + def execute(self, command: str) -> list[str]: ... + def cd(self, remotepath: _Path | None = ...) -> AbstractContextManager[None]: ... # noqa: F811 + def chdir(self, remotepath: _Path) -> None: ... + def cwd(self, remotepath: _Path) -> None: ... + def chmod(self, remotepath: _Path, mode: int = ...) -> None: ... + def chown(self, remotepath: _Path, uid: int | None = ..., gid: int | None = ...) -> None: ... + def getcwd(self) -> str: ... + def listdir(self, remotepath: _Path = ...) -> list[str]: ... + def listdir_attr(self, remotepath: _Path = ...) -> list[paramiko.SFTPAttributes]: ... + def mkdir(self, remotepath: _Path, mode: int = ...) -> None: ... + def normalize(self, remotepath: _Path) -> str: ... + def isdir(self, remotepath: _Path) -> bool: ... + def isfile(self, remotepath: _Path) -> bool: ... + def makedirs(self, remotedir: _Path, mode: int = ...) -> None: ... + def readlink(self, remotelink: _Path) -> str: ... + def remove(self, remotefile: _Path) -> None: ... + def unlink(self, remotefile: _Path) -> None: ... + def rmdir(self, remotepath: _Path) -> None: ... + def rename(self, remote_src: _Path, remote_dest: _Path) -> None: ... + def stat(self, remotepath: _Path) -> paramiko.SFTPAttributes: ... + def lstat(self, remotepath: _Path) -> paramiko.SFTPAttributes: ... + def close(self) -> None: ... + def open(self, remote_file: _Path, mode: str = ..., bufsize: int = ...) -> paramiko.SFTPFile: ... + def exists(self, remotepath: _Path) -> bool: ... + def lexists(self, remotepath: _Path) -> bool: ... + def symlink(self, remote_src: _Path, remote_dest: _Path) -> None: ... + def truncate(self, remotepath: _Path, size: int) -> int: ... + def walktree( # noqa: F811 + self, remotepath: _Path, fcallback: _PathCallback, dcallback: _PathCallback, ucallback: _PathCallback, recurse: bool = ... + ) -> None: ... + @property + def sftp_client(self) -> paramiko.SFTPClient: ... + @property + def active_ciphers(self) -> tuple[str, str]: ... + @property + def active_compression(self) -> tuple[str, str]: ... + @property + def security_options(self) -> paramiko.SecurityOptions: ... + @property + def logfile(self) -> str | Literal[False]: ... + @property + def timeout(self) -> float | None: ... + @timeout.setter + def timeout(self, val: float | None) -> None: ... + @property + def remote_server_key(self) -> paramiko.PKey: ... + def __del__(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, etype: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pysftp/pysftp/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pysftp/pysftp/exceptions.pyi new file mode 100644 index 00000000..7416456a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pysftp/pysftp/exceptions.pyi @@ -0,0 +1,9 @@ +class ConnectionException(Exception): + message: str = ... + def __init__(self, host: str, port: int) -> None: ... + +class CredentialException(Exception): + message: str = ... + def __init__(self, message: str) -> None: ... + +class HostKeysException(Exception): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pysftp/pysftp/helpers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pysftp/pysftp/helpers.pyi new file mode 100644 index 00000000..f792cd3e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pysftp/pysftp/helpers.pyi @@ -0,0 +1,35 @@ +from collections.abc import Callable, Iterator +from contextlib import AbstractContextManager +from typing_extensions import TypeAlias + +def known_hosts() -> str: ... +def st_mode_to_int(val: int) -> int: ... + +class WTCallbacks: + def __init__(self) -> None: ... + def file_cb(self, pathname: str) -> None: ... + def dir_cb(self, pathname: str) -> None: ... + def unk_cb(self, pathname: str) -> None: ... + @property + def flist(self) -> list[str]: ... + @flist.setter + def flist(self, val: list[str]) -> None: ... + @property + def dlist(self) -> list[str]: ... + @dlist.setter + def dlist(self, val: list[str]) -> None: ... + @property + def ulist(self) -> list[str]: ... + @ulist.setter + def ulist(self, val: list[str]) -> None: ... + +def path_advance(thepath: str, sep: str = ...) -> Iterator[str]: ... +def path_retreat(thepath: str, sep: str = ...) -> Iterator[str]: ... +def reparent(newparent: str, oldpath: str) -> str: ... + +_PathCallback: TypeAlias = Callable[[str], object] + +def walktree( + localpath: str, fcallback: _PathCallback, dcallback: _PathCallback, ucallback: _PathCallback, recurse: bool = ... +) -> None: ... +def cd(localpath: str | None = ...) -> AbstractContextManager[None]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytest-lazy-fixture/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytest-lazy-fixture/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..24eb54e7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytest-lazy-fixture/@tests/stubtest_allowlist.txt @@ -0,0 +1,12 @@ +# Part of the pytest API, which is internal: +pytest_lazyfixture.pytest_.* + +# Internal undocumented API: +pytest_lazyfixture.fillfixtures +pytest_lazyfixture.normalize_call +pytest_lazyfixture.normalize_metafunc_calls +pytest_lazyfixture.sorted_by_dependency +pytest_lazyfixture.copy_metafunc + +# Compat: +pytest_lazyfixture.PY3 diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytest-lazy-fixture/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytest-lazy-fixture/METADATA.toml new file mode 100644 index 00000000..03031f1e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytest-lazy-fixture/METADATA.toml @@ -0,0 +1 @@ +version = "0.6.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytest-lazy-fixture/pytest_lazyfixture.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytest-lazy-fixture/pytest_lazyfixture.pyi new file mode 100644 index 00000000..85a357e8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytest-lazy-fixture/pytest_lazyfixture.pyi @@ -0,0 +1,14 @@ +from collections.abc import Iterable +from typing import Any, overload +from typing_extensions import TypeGuard + +class LazyFixture: + name: str + def __init__(self, name: str) -> None: ... + def __eq__(self, other: object) -> bool: ... + +@overload +def lazy_fixture(names: str) -> LazyFixture: ... +@overload +def lazy_fixture(names: Iterable[str]) -> list[LazyFixture] | Any: ... +def is_lazy_fixture(val: object) -> TypeGuard[LazyFixture]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-crontab/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-crontab/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..ca0f86d9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-crontab/@tests/stubtest_allowlist.txt @@ -0,0 +1,4 @@ +# Runtime only-hack that doesn't affect typing: +crontabs.CronTabs.__new__ +# stub does not have *args argument "args", but function doesn't actually accept positional args +crontab.CronTab.remove_all diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-crontab/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-crontab/METADATA.toml new file mode 100644 index 00000000..5952f2b1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-crontab/METADATA.toml @@ -0,0 +1 @@ +version = "2.7.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-crontab/cronlog.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-crontab/cronlog.pyi new file mode 100644 index 00000000..f21d4516 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-crontab/cronlog.pyi @@ -0,0 +1,33 @@ +from _typeshed import StrOrBytesPath +from codecs import StreamReaderWriter +from collections.abc import Generator, Iterator +from types import TracebackType +from typing_extensions import Self + +MATCHER: str + +class LogReader: + filename: StrOrBytesPath + mass: int + size: int + read: int + pipe: StreamReaderWriter | None + def __init__(self, filename: StrOrBytesPath, mass: int = ...) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, error_type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __iter__(self) -> Iterator[str]: ... + def readlines(self, until: int = ...) -> Generator[tuple[int, str], None, None]: ... + +class CronLog(LogReader): + user: str | None + def __init__(self, filename: StrOrBytesPath = ..., user: str | None = ...) -> None: ... + def for_program(self, command: str) -> ProgramLog: ... + def __iter__(self) -> dict[str, str | None]: ... # type: ignore[override] + +class ProgramLog: + log: CronLog + command: str + def __init__(self, log: CronLog, command: str) -> None: ... + def __iter__(self) -> dict[str, str | None]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-crontab/crontab.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-crontab/crontab.pyi new file mode 100644 index 00000000..7dbeb53e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-crontab/crontab.pyi @@ -0,0 +1,261 @@ +import re +import subprocess +from _typeshed import Incomplete, Unused +from builtins import range as _range +from collections import OrderedDict +from collections.abc import Callable, Generator, Iterable, Iterator +from datetime import datetime +from logging import Logger +from types import TracebackType +from typing import Any +from typing_extensions import Self, SupportsIndex, TypeAlias + +from cronlog import CronLog + +_User: TypeAlias = str | bool | None + +__pkgname__: str +ITEMREX: re.Pattern[str] +SPECREX: re.Pattern[str] +DEVNULL: str +WEEK_ENUM: list[str] +MONTH_ENUM: list[str | None] +SPECIALS: dict[str, str] +SPECIAL_IGNORE: list[str] +S_INFO: list[dict[str, Any]] +WINOS: bool +POSIX: bool +SYSTEMV: bool +ZERO_PAD: bool +LOG: Logger +CRON_COMMAND: str +SHELL: str +current_user: Callable[[], str | None] + +def open_pipe(cmd: str, *args: str, **flags: str) -> subprocess.Popen[Any]: ... + +class CronTab: + lines: list[str | CronItem] | None + crons: list[CronItem] | None + filen: str | None + cron_command: str + env: OrderedVariableList | None + root: bool + intab: str | None + tabfile: str | None + def __init__( + self, user: _User = ..., tab: str | None = ..., tabfile: str | None = ..., log: CronLog | str | None = ... + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + @property + def log(self) -> CronLog: ... + @property + def user(self) -> _User: ... + @property + def user_opt(self) -> dict[str, str]: ... + def read(self, filename: str | None = ...) -> None: ... + def append( + self, + item: CronItem, + line: str = ..., + read: bool = ..., + before: str | re.Pattern[str] | list[CronItem] | tuple[CronItem, ...] | Generator[CronItem, Any, Any] | None = ..., + ) -> None: ... + def write(self, filename: str | None = ..., user: _User = ..., errors: bool = ...) -> None: ... + def write_to_user(self, user: bool | str = ...) -> None: ... + # Usually `kwargs` are just `now: datetime | None`, but technically this can + # work for `CronItem` subclasses, which might define other kwargs. + def run_pending(self, **kwargs: Any) -> Iterator[str]: ... + # There are two known kwargs and others are unused: + def run_scheduler(self, timeout: int = ..., *, warp: object = ..., cadence: int = ..., **kwargs: Unused) -> Iterator[str]: ... + def render(self, errors: bool = ..., specials: bool | None = ...) -> str: ... + def new( + self, + command: str = ..., + comment: str = ..., + user: str | None = ..., + pre_comment: bool = ..., + before: str | re.Pattern[str] | list[CronItem] | tuple[CronItem, ...] | Generator[CronItem, Any, Any] | None = ..., + ) -> CronItem: ... + def find_command(self, command: str | re.Pattern[str]) -> Iterator[CronItem]: ... + def find_comment(self, comment: str | re.Pattern[str]) -> Iterator[CronItem]: ... + def find_time(self, *args: Any) -> Iterator[CronItem]: ... + @property + def commands(self) -> Iterator[str]: ... + @property + def comments(self) -> Iterator[str]: ... + # You cannot actually pass `*args`, it will raise an exception, + # also known kwargs are added: + def remove_all( + self, *, command: str | re.Pattern[str] = ..., comment: str | re.Pattern[str] = ..., time: Any = ..., **kwargs: object + ) -> int: ... + def remove(self, *items: CronItem | Iterable[CronItem]) -> int: ... + def __iter__(self) -> Iterator[CronItem]: ... + def __getitem__(self, i: SupportsIndex) -> CronItem: ... + def __len__(self) -> int: ... + +class CronItem: + cron: CronTab | None + user: _User + valid: bool + enabled: bool + special: bool + comment: str + command: str | None + last_run: datetime | None + env: OrderedVariableList + pre_comment: bool + marker: str | None + stdin: str | None + slices: CronSlices + def __init__(self, command: str = ..., comment: str = ..., user: _User = ..., pre_comment: bool = ...) -> None: ... + def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + @classmethod + def from_line(cls, line: str, user: str | None = ..., cron: Incomplete | None = ...) -> Self: ... + def delete(self) -> None: ... + def set_command(self, cmd: str, parse_stdin: bool = ...) -> None: ... + def set_comment(self, cmt: str, pre_comment: bool = ...) -> None: ... + def parse(self, line: str) -> None: ... + def enable(self, enabled: bool = ...) -> bool: ... + def is_enabled(self) -> bool: ... + def is_valid(self) -> bool: ... + def render(self, specials: bool = ...) -> str: ... + def every_reboot(self) -> None: ... + def every(self, unit: int = ...) -> Every: ... + def setall(self, *args: Any) -> None: ... + def clear(self) -> None: ... + def frequency(self, year: int | None = ...) -> int: ... + def frequency_per_year(self, year: int | None = ...) -> int: ... + def frequency_per_day(self) -> int: ... + def frequency_per_hour(self) -> int: ... + def run_pending(self, now: datetime | None = ...) -> int | str: ... + def run(self) -> str: ... + # TODO: use types from `croniter` module here: + def schedule(self, date_from: datetime | None = ...) -> Incomplete: ... + # TODO: use types from `cron_descriptor` here: + def description(self, **kw: Incomplete) -> Incomplete: ... + @property + def log(self) -> CronLog: ... + @property + def minute(self) -> int | str: ... + @property + def minutes(self) -> int | str: ... + @property + def hour(self) -> int | str: ... + @property + def hours(self) -> int | str: ... + @property + def day(self) -> int | str: ... + @property + def dom(self) -> int | str: ... + @property + def month(self) -> int | str: ... + @property + def months(self) -> int | str: ... + @property + def dow(self) -> int | str: ... + def __len__(self) -> int: ... + def __getitem__(self, key: int | str) -> int | str: ... + def __lt__(self, value: object) -> bool: ... + def __gt__(self, value: object) -> bool: ... + +class Every: + slices: CronSlices + unit: int + # TODO: add generated attributes + def __init__(self, item: CronSlices, units: int) -> None: ... + def set_attr(self, target: int) -> Callable[[], None]: ... + def year(self) -> None: ... + +class CronSlices(list[CronSlice]): + special: bool | None + def __init__(self, *args: Any) -> None: ... + def is_self_valid(self, *args: Any) -> bool: ... + @classmethod + def is_valid(cls, *args: Any) -> bool: ... + def setall(self, *slices: str) -> None: ... + def clean_render(self) -> str: ... + def render(self, specials: bool = ...) -> str: ... + def clear(self) -> None: ... + def frequency(self, year: int | None = ...) -> int: ... + def frequency_per_year(self, year: int | None = ...) -> int: ... + def frequency_per_day(self) -> int: ... + def frequency_per_hour(self) -> int: ... + def __eq__(self, arg: object) -> bool: ... + +class SundayError(KeyError): ... + +class Also: + obj: CronSlice + def __init__(self, obj: CronSlice) -> None: ... + # These method actually use `*args`, but pass them to `CronSlice` methods, + # this is why they are typed as `Any`. + def every(self, *a: Any) -> _Part: ... + def on(self, *a: Any) -> list[_Part]: ... + def during(self, *a: Any) -> _Part: ... + +_Part: TypeAlias = int | CronValue | CronRange + +class CronSlice: + min: int | None + max: int | None + name: str | None + enum: list[str | None] | None + parts: list[_Part] + def __init__(self, info: int | dict[str, Any], value: str | None = ...) -> None: ... + def __hash__(self) -> int: ... + def parse(self, value: str | None) -> None: ... + def render(self, resolve: bool = ..., specials: bool = ...) -> str: ... + def __eq__(self, arg: object) -> bool: ... + def every(self, n_value: int, also: bool = ...) -> _Part: ... + # The only known kwarg, others are unused, + # `*args`` are passed to `parse_value`, so they are `Any` + def on(self, *n_value: Any, also: bool = ...) -> list[_Part]: ... + def during(self, vfrom: int | str, vto: int | str, also: bool = ...) -> _Part: ... + @property + def also(self) -> Also: ... + def clear(self) -> None: ... + def get_range(self, *vrange: int | str | CronValue) -> list[int | CronRange]: ... + def __iter__(self) -> Iterator[int]: ... + def __len__(self) -> int: ... + def parse_value(self, val: str, sunday: int | None = ...) -> int | CronValue: ... + +def get_cronvalue(value: int, enums: list[str]) -> int | CronValue: ... + +class CronValue: + text: str + value: int + def __init__(self, value: str, enums: list[str]) -> None: ... + def __lt__(self, value: object) -> bool: ... + def __int__(self) -> int: ... + +class CronRange: + dangling: int | None + slice: str + cron: CronTab | None + seq: int + def __init__(self, vslice: str, *vrange: int | str | CronValue) -> None: ... + # Are not set in `__init__`: + vfrom: int | CronValue + vto: int | CronValue + def parse(self, value: str) -> None: ... + def all(self) -> None: ... + def render(self, resolve: bool = ...) -> str: ... + def range(self) -> _range: ... + def every(self, value: int | str) -> None: ... + def __lt__(self, value: object) -> bool: ... + def __gt__(self, value: object) -> bool: ... + def __int__(self) -> int: ... + +# TODO: make generic +class OrderedVariableList(OrderedDict[Incomplete, Incomplete]): + job: Incomplete + def __init__(self, *args: Any, **kw: Any) -> None: ... + @property + def previous(self) -> Incomplete: ... + def all(self) -> Self: ... + def __getitem__(self, key: Incomplete) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-crontab/crontabs.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-crontab/crontabs.pyi new file mode 100644 index 00000000..85fd062d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-crontab/crontabs.pyi @@ -0,0 +1,24 @@ +from typing import Any + +from crontab import CronTab + +class UserSpool(list[CronTab]): + def __init__(self, loc: str, tabs: CronTabs | None = ...) -> None: ... + def listdir(self, loc: str) -> list[str]: ... + def get_owner(self, path: str) -> str: ... + def generate(self, loc: str, username: str) -> CronTab: ... + +class SystemTab(list[CronTab]): + def __init__(self, loc: str, tabs: CronTabs | None = ...) -> None: ... + +class AnaCronTab(list[CronTab]): + def __init__(self, loc: str, tabs: CronTabs | None = ...) -> None: ... + def add(self, loc: str, item: str, anajob: CronTab) -> CronTab: ... + +KNOWN_LOCATIONS: list[tuple[UserSpool | SystemTab | AnaCronTab, str]] + +class CronTabs(list[UserSpool | SystemTab | AnaCronTab]): + def __init__(self) -> None: ... + def add(self, cls: type[UserSpool | SystemTab | AnaCronTab], *args: Any) -> None: ... + @property + def all(self) -> CronTab: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-datemath/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-datemath/METADATA.toml new file mode 100644 index 00000000..8b3cb47b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-datemath/METADATA.toml @@ -0,0 +1,6 @@ +version = "1.5.*" +# Requires a version of arrow with a `py.typed` file +requires = ["arrow>=1.0.1"] + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-datemath/datemath/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-datemath/datemath/__init__.pyi new file mode 100644 index 00000000..ecb00250 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-datemath/datemath/__init__.pyi @@ -0,0 +1,12 @@ +from datetime import datetime + +import arrow + +from .helpers import DateMathException as DateMathException, parse as parse + +def dm( + expr: str, *, now: arrow.Arrow | None = ..., tz: str = ..., type: str | None = ..., roundDown: bool = ... +) -> arrow.Arrow: ... +def datemath( + expr: str, *, now: arrow.Arrow | None = ..., tz: str = ..., type: str | None = ..., roundDown: bool = ... +) -> datetime: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-datemath/datemath/helpers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-datemath/datemath/helpers.pyi new file mode 100644 index 00000000..52402ea6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-datemath/datemath/helpers.pyi @@ -0,0 +1,10 @@ +from _typeshed import Incomplete + +import arrow + +class DateMathException(Exception): ... + +def parse( + expression: str, now: arrow.Arrow | None = ..., tz: str = ..., type: str | None = ..., roundDown: bool = ... +) -> arrow.Arrow: ... +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..852c593a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/@tests/stubtest_allowlist.txt @@ -0,0 +1,13 @@ +dateutil.parser._tzparser.__init__ +dateutil.parser.parserinfo.convertyear +dateutil.rrule.weekday.__init__ +dateutil.tz.tz.tzoffset.instance +dateutil.tz.tz.tzstr.instance +dateutil.tz.tzoffset.instance +dateutil.tz.tzstr.instance + +# Metaclass differs: +dateutil.tz.tzoffset +dateutil.tz.tzutc +dateutil.tz.tz.tzoffset +dateutil.tz.tz.tzutc diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/METADATA.toml new file mode 100644 index 00000000..2b4746de --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/METADATA.toml @@ -0,0 +1,4 @@ +version = "2.8.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/_common.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/_common.pyi new file mode 100644 index 00000000..dd7eff9b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/_common.pyi @@ -0,0 +1,9 @@ +from typing_extensions import Self + +class weekday: + def __init__(self, weekday: int, n: int | None = ...) -> None: ... + def __call__(self, n: int) -> Self: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + weekday: int + n: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/easter.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/easter.pyi new file mode 100644 index 00000000..33e366d4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/easter.pyi @@ -0,0 +1,8 @@ +from datetime import date +from typing_extensions import Literal + +EASTER_JULIAN: Literal[1] +EASTER_ORTHODOX: Literal[2] +EASTER_WESTERN: Literal[3] + +def easter(year: int, method: Literal[1, 2, 3] = ...) -> date: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/parser/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/parser/__init__.pyi new file mode 100644 index 00000000..96fd2247 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/parser/__init__.pyi @@ -0,0 +1,67 @@ +from collections.abc import Callable, Mapping +from datetime import datetime, tzinfo +from typing import IO, Any +from typing_extensions import TypeAlias + +from .isoparser import isoparse as isoparse, isoparser as isoparser + +_FileOrStr: TypeAlias = bytes | str | IO[str] | IO[Any] +_TzData: TypeAlias = tzinfo | int | str | None +_TzInfo: TypeAlias = Mapping[str, _TzData] | Callable[[str, int], _TzData] + +class parserinfo: + JUMP: list[str] + WEEKDAYS: list[tuple[str, ...]] + MONTHS: list[tuple[str, ...]] + HMS: list[tuple[str, str, str]] + AMPM: list[tuple[str, str]] + UTCZONE: list[str] + PERTAIN: list[str] + TZOFFSET: dict[str, int] + def __init__(self, dayfirst: bool = ..., yearfirst: bool = ...) -> None: ... + def jump(self, name: str) -> bool: ... + def weekday(self, name: str) -> int | None: ... + def month(self, name: str) -> int | None: ... + def hms(self, name: str) -> int | None: ... + def ampm(self, name: str) -> int | None: ... + def pertain(self, name: str) -> bool: ... + def utczone(self, name: str) -> bool: ... + def tzoffset(self, name: str) -> int | None: ... + def convertyear(self, year: int) -> int: ... + def validate(self, res: datetime) -> bool: ... + +class parser: + def __init__(self, info: parserinfo | None = ...) -> None: ... + def parse( + self, + timestr: _FileOrStr, + default: datetime | None = ..., + ignoretz: bool = ..., + tzinfos: _TzInfo | None = ..., + *, + dayfirst: bool | None = ..., + yearfirst: bool | None = ..., + fuzzy: bool = ..., + fuzzy_with_tokens: bool = ..., + ) -> datetime: ... + +DEFAULTPARSER: parser + +def parse( + timestr: _FileOrStr, + parserinfo: parserinfo | None = ..., + *, + dayfirst: bool | None = ..., + yearfirst: bool | None = ..., + ignoretz: bool = ..., + fuzzy: bool = ..., + fuzzy_with_tokens: bool = ..., + default: datetime | None = ..., + tzinfos: _TzInfo | None = ..., +) -> datetime: ... + +class _tzparser: ... + +DEFAULTTZPARSER: _tzparser + +class ParserError(ValueError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/parser/isoparser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/parser/isoparser.pyi new file mode 100644 index 00000000..4cc86712 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/parser/isoparser.pyi @@ -0,0 +1,15 @@ +from _typeshed import SupportsRead +from datetime import date, datetime, time, tzinfo +from typing_extensions import TypeAlias + +_Readable: TypeAlias = SupportsRead[str | bytes] +_TakesAscii: TypeAlias = str | bytes | _Readable + +class isoparser: + def __init__(self, sep: str | bytes | None = ...): ... + def isoparse(self, dt_str: _TakesAscii) -> datetime: ... + def parse_isodate(self, datestr: _TakesAscii) -> date: ... + def parse_isotime(self, timestr: _TakesAscii) -> time: ... + def parse_tzstr(self, tzstr: _TakesAscii, zero_as_utc: bool = ...) -> tzinfo: ... + +def isoparse(dt_str: _TakesAscii) -> datetime: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/relativedelta.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/relativedelta.pyi new file mode 100644 index 00000000..d0e0bd7e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/relativedelta.pyi @@ -0,0 +1,96 @@ +from datetime import date, datetime, timedelta +from typing import SupportsFloat, TypeVar, overload +from typing_extensions import Self, TypeAlias + +from ._common import weekday + +_DateT = TypeVar("_DateT", date, datetime) +# Work around attribute and type having the same name. +_Weekday: TypeAlias = weekday + +MO: weekday +TU: weekday +WE: weekday +TH: weekday +FR: weekday +SA: weekday +SU: weekday + +class relativedelta: + years: int + months: int + days: int + leapdays: int + hours: int + minutes: int + seconds: int + microseconds: int + year: int | None + month: int | None + weekday: _Weekday | None + day: int | None + hour: int | None + minute: int | None + second: int | None + microsecond: int | None + def __init__( + self, + dt1: date | None = ..., + dt2: date | None = ..., + years: int | None = ..., + months: int | None = ..., + days: int | None = ..., + leapdays: int | None = ..., + weeks: int | None = ..., + hours: int | None = ..., + minutes: int | None = ..., + seconds: int | None = ..., + microseconds: int | None = ..., + year: int | None = ..., + month: int | None = ..., + day: int | None = ..., + weekday: int | _Weekday | None = ..., + yearday: int | None = ..., + nlyearday: int | None = ..., + hour: int | None = ..., + minute: int | None = ..., + second: int | None = ..., + microsecond: int | None = ..., + ) -> None: ... + @property + def weeks(self) -> int: ... + @weeks.setter + def weeks(self, value: int) -> None: ... + def normalized(self) -> Self: ... + # TODO: use Union when mypy will handle it properly in overloaded operator + # methods (#2129, #1442, #1264 in mypy) + @overload + def __add__(self, other: relativedelta) -> Self: ... + @overload + def __add__(self, other: timedelta) -> Self: ... + @overload + def __add__(self, other: _DateT) -> _DateT: ... + @overload + def __radd__(self, other: relativedelta) -> Self: ... + @overload + def __radd__(self, other: timedelta) -> Self: ... + @overload + def __radd__(self, other: _DateT) -> _DateT: ... + @overload + def __rsub__(self, other: relativedelta) -> Self: ... + @overload + def __rsub__(self, other: timedelta) -> Self: ... + @overload + def __rsub__(self, other: _DateT) -> _DateT: ... + def __sub__(self, other: relativedelta) -> Self: ... + def __neg__(self) -> Self: ... + def __bool__(self) -> bool: ... + def __nonzero__(self) -> bool: ... + def __mul__(self, other: SupportsFloat) -> Self: ... + def __rmul__(self, other: SupportsFloat) -> Self: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __div__(self, other: SupportsFloat) -> Self: ... + def __truediv__(self, other: SupportsFloat) -> Self: ... + def __abs__(self) -> Self: ... + def __hash__(self) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/rrule.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/rrule.pyi new file mode 100644 index 00000000..7ae387ce --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/rrule.pyi @@ -0,0 +1,112 @@ +import datetime +from _typeshed import Incomplete +from collections.abc import Iterable +from typing import Any +from typing_extensions import TypeAlias + +from ._common import weekday as weekdaybase + +YEARLY: int +MONTHLY: int +WEEKLY: int +DAILY: int +HOURLY: int +MINUTELY: int +SECONDLY: int + +class weekday(weekdaybase): ... + +weekdays: tuple[weekday, weekday, weekday, weekday, weekday, weekday, weekday] +MO: weekday +TU: weekday +WE: weekday +TH: weekday +FR: weekday +SA: weekday +SU: weekday + +class rrulebase: + def __init__(self, cache: bool = ...) -> None: ... + def __iter__(self): ... + def __getitem__(self, item): ... + def __contains__(self, item): ... + def count(self): ... + def before(self, dt, inc: bool = ...): ... + def after(self, dt, inc: bool = ...): ... + def xafter(self, dt, count: Incomplete | None = ..., inc: bool = ...): ... + def between(self, after, before, inc: bool = ..., count: int = ...): ... + +class rrule(rrulebase): + def __init__( + self, + freq, + dtstart: datetime.date | None = ..., + interval: int = ..., + wkst: weekday | int | None = ..., + count: int | None = ..., + until: datetime.date | int | None = ..., + bysetpos: int | Iterable[int] | None = ..., + bymonth: int | Iterable[int] | None = ..., + bymonthday: int | Iterable[int] | None = ..., + byyearday: int | Iterable[int] | None = ..., + byeaster: int | Iterable[int] | None = ..., + byweekno: int | Iterable[int] | None = ..., + byweekday: int | weekday | Iterable[int] | Iterable[weekday] | None = ..., + byhour: int | Iterable[int] | None = ..., + byminute: int | Iterable[int] | None = ..., + bysecond: int | Iterable[int] | None = ..., + cache: bool = ..., + ) -> None: ... + def replace(self, **kwargs): ... + +class _iterinfo: + rrule: Any = ... + def __init__(self, rrule) -> None: ... + yearlen: int = ... + nextyearlen: int = ... + yearordinal: int = ... + yearweekday: int = ... + mmask: Any = ... + mdaymask: Any = ... + nmdaymask: Any = ... + wdaymask: Any = ... + mrange: Any = ... + wnomask: Any = ... + nwdaymask: Any = ... + eastermask: Any = ... + lastyear: int = ... + lastmonth: int = ... + def rebuild(self, year, month): ... + def ydayset(self, year, month, day): ... + def mdayset(self, year, month, day): ... + def wdayset(self, year, month, day): ... + def ddayset(self, year, month, day): ... + def htimeset(self, hour, minute, second): ... + def mtimeset(self, hour, minute, second): ... + def stimeset(self, hour, minute, second): ... + +_RRule: TypeAlias = rrule + +class rruleset(rrulebase): + class _genitem: + dt: Any = ... + genlist: Any = ... + gen: Any = ... + def __init__(self, genlist, gen) -> None: ... + def __next__(self): ... + next: Any = ... + def __lt__(self, other): ... + def __gt__(self, other): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + + def __init__(self, cache: bool = ...) -> None: ... + def rrule(self, rrule: _RRule): ... + def rdate(self, rdate): ... + def exrule(self, exrule): ... + def exdate(self, exdate): ... + +class _rrulestr: + def __call__(self, s, **kwargs) -> rrule | rruleset: ... + +rrulestr: _rrulestr diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/tz/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/tz/__init__.pyi new file mode 100644 index 00000000..334ca482 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/tz/__init__.pyi @@ -0,0 +1,15 @@ +from .tz import ( + datetime_ambiguous as datetime_ambiguous, + datetime_exists as datetime_exists, + gettz as gettz, + resolve_imaginary as resolve_imaginary, + tzfile as tzfile, + tzical as tzical, + tzlocal as tzlocal, + tzoffset as tzoffset, + tzrange as tzrange, + tzstr as tzstr, + tzutc as tzutc, +) + +UTC: tzutc diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/tz/_common.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/tz/_common.pyi new file mode 100644 index 00000000..c9e6ba5d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/tz/_common.pyi @@ -0,0 +1,28 @@ +import abc +from datetime import datetime, timedelta, tzinfo +from typing import ClassVar + +def tzname_in_python2(namefunc): ... +def enfold(dt: datetime, fold: int = ...): ... + +class _DatetimeWithFold(datetime): + @property + def fold(self): ... + +# Doesn't actually have ABCMeta as the metaclass at runtime, +# but mypy complains if we don't have it in the stub. +# See discussion in #8908 +class _tzinfo(tzinfo, metaclass=abc.ABCMeta): + def is_ambiguous(self, dt: datetime) -> bool: ... + def fromutc(self, dt: datetime) -> datetime: ... + +class tzrangebase(_tzinfo): + def __init__(self) -> None: ... + def utcoffset(self, dt: datetime | None) -> timedelta | None: ... + def dst(self, dt: datetime | None) -> timedelta | None: ... + def tzname(self, dt: datetime | None) -> str: ... + def fromutc(self, dt: datetime) -> datetime: ... + def is_ambiguous(self, dt: datetime) -> bool: ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __ne__(self, other): ... + __reduce__ = object.__reduce__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/tz/tz.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/tz/tz.pyi new file mode 100644 index 00000000..a8442528 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/tz/tz.pyi @@ -0,0 +1,116 @@ +import datetime +from _typeshed import Incomplete +from typing import ClassVar, Protocol, TypeVar +from typing_extensions import Literal + +from ..relativedelta import relativedelta +from ._common import _tzinfo as _tzinfo, enfold as enfold, tzname_in_python2 as tzname_in_python2, tzrangebase as tzrangebase + +_DT = TypeVar("_DT", bound=datetime.datetime) + +ZERO: datetime.timedelta +EPOCH: datetime.datetime +EPOCHORDINAL: int + +class tzutc(datetime.tzinfo): + def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ... + def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ... + def tzname(self, dt: datetime.datetime | None) -> str: ... + def is_ambiguous(self, dt: datetime.datetime | None) -> bool: ... + def fromutc(self, dt: _DT) -> _DT: ... + def __eq__(self, other): ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __ne__(self, other): ... + __reduce__ = object.__reduce__ + +class tzoffset(datetime.tzinfo): + def __init__(self, name, offset) -> None: ... + def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ... + def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ... + def is_ambiguous(self, dt: datetime.datetime | None) -> bool: ... + def tzname(self, dt: datetime.datetime | None) -> str: ... + def fromutc(self, dt: _DT) -> _DT: ... + def __eq__(self, other): ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __ne__(self, other): ... + __reduce__ = object.__reduce__ + @classmethod + def instance(cls, name, offset) -> tzoffset: ... + +class tzlocal(_tzinfo): + def __init__(self) -> None: ... + def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ... + def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ... + def tzname(self, dt: datetime.datetime | None) -> str: ... + def is_ambiguous(self, dt: datetime.datetime | None) -> bool: ... + def __eq__(self, other): ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __ne__(self, other): ... + __reduce__ = object.__reduce__ + +class _ttinfo: + def __init__(self) -> None: ... + def __eq__(self, other): ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __ne__(self, other): ... + +class _TZFileReader(Protocol): + # optional attribute: + # name: str + def read(self, __size: int) -> bytes: ... + def seek(self, __target: int, __whence: Literal[1]) -> object: ... + +class tzfile(_tzinfo): + def __init__(self, fileobj: str | _TZFileReader, filename: str | None = ...) -> None: ... + def is_ambiguous(self, dt: datetime.datetime | None, idx: int | None = ...) -> bool: ... + def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ... + def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ... + def tzname(self, dt: datetime.datetime | None) -> str: ... + def __eq__(self, other): ... + __hash__: ClassVar[None] # type: ignore[assignment] + def __ne__(self, other): ... + def __reduce__(self): ... + def __reduce_ex__(self, protocol): ... + +class tzrange(tzrangebase): + hasdst: bool + def __init__( + self, + stdabbr: str, + stdoffset: int | datetime.timedelta | None = ..., + dstabbr: str | None = ..., + dstoffset: int | datetime.timedelta | None = ..., + start: relativedelta | None = ..., + end: relativedelta | None = ..., + ) -> None: ... + def transitions(self, year: int) -> tuple[datetime.datetime, datetime.datetime]: ... + def __eq__(self, other): ... + +class tzstr(tzrange): + hasdst: bool + def __init__(self, s: str, posix_offset: bool = ...) -> None: ... + @classmethod + def instance(cls, name, offset) -> tzoffset: ... + +class _ICalReader(Protocol): + # optional attribute: + # name: str + def read(self) -> str: ... + +class tzical: + def __init__(self, fileobj: str | _ICalReader) -> None: ... + def keys(self): ... + def get(self, tzid: Incomplete | None = ...): ... + +TZFILES: list[str] +TZPATHS: list[str] + +def datetime_exists(dt: datetime.datetime, tz: datetime.tzinfo | None = ...) -> bool: ... +def datetime_ambiguous(dt: datetime.datetime, tz: datetime.tzinfo | None = ...) -> bool: ... +def resolve_imaginary(dt: datetime.datetime) -> datetime.datetime: ... + +class _GetTZ: + def __call__(self, name: str | None = ...) -> datetime.tzinfo | None: ... + def nocache(self, name: str | None) -> datetime.tzinfo | None: ... + +gettz: _GetTZ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/utils.pyi new file mode 100644 index 00000000..6ebd4031 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/utils.pyi @@ -0,0 +1,5 @@ +from datetime import datetime, timedelta, tzinfo + +def default_tzinfo(dt: datetime, tzinfo: tzinfo) -> datetime: ... +def today(tzinfo: tzinfo | None = ...) -> datetime: ... +def within_delta(dt1: datetime, dt2: datetime, delta: timedelta) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/zoneinfo/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/zoneinfo/__init__.pyi new file mode 100644 index 00000000..59c12273 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/zoneinfo/__init__.pyi @@ -0,0 +1,17 @@ +from _typeshed import Incomplete +from typing import IO +from typing_extensions import TypeAlias + +__all__ = ["get_zonefile_instance", "gettz", "gettz_db_metadata"] + +_MetadataType: TypeAlias = dict[str, Incomplete] + +class ZoneInfoFile: + zones: dict[Incomplete, Incomplete] + metadata: _MetadataType | None + def __init__(self, zonefile_stream: IO[bytes] | None = ...) -> None: ... + def get(self, name, default: Incomplete | None = ...): ... + +def get_zonefile_instance(new_instance: bool = ...) -> ZoneInfoFile: ... +def gettz(name): ... +def gettz_db_metadata() -> _MetadataType: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/zoneinfo/rebuild.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/zoneinfo/rebuild.pyi new file mode 100644 index 00000000..67668644 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-dateutil/dateutil/zoneinfo/rebuild.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete, StrOrBytesPath +from collections.abc import Sequence +from tarfile import TarInfo + +def rebuild( + filename: StrOrBytesPath, + tag: Incomplete | None = ..., + format: str = ..., + zonegroups: Sequence[str | TarInfo] = ..., + metadata: Incomplete | None = ..., +) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-gflags/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-gflags/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..f37b2b81 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-gflags/@tests/stubtest_allowlist.txt @@ -0,0 +1 @@ +gflags diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-gflags/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-gflags/METADATA.toml new file mode 100644 index 00000000..84307529 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-gflags/METADATA.toml @@ -0,0 +1 @@ +version = "3.1.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-gflags/gflags.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-gflags/gflags.pyi new file mode 100644 index 00000000..84da3865 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-gflags/gflags.pyi @@ -0,0 +1,309 @@ +from collections.abc import Callable, Iterable, Iterator, Sequence +from types import ModuleType +from typing import IO, Any + +class Error(Exception): ... + +FlagsError = Error + +class DuplicateFlag(FlagsError): ... +class CantOpenFlagFileError(FlagsError): ... +class DuplicateFlagCannotPropagateNoneToSwig(DuplicateFlag): ... + +class DuplicateFlagError(DuplicateFlag): + def __init__(self, flagname: str, flag_values: FlagValues, other_flag_values: FlagValues = ...) -> None: ... + +class IllegalFlagValueError(FlagsError): ... + +IllegalFlagValue = IllegalFlagValueError + +class UnrecognizedFlag(FlagsError): ... + +class UnrecognizedFlagError(UnrecognizedFlag): + def __init__(self, flagname: str, flagvalue: str = ...) -> None: ... + +def get_help_width() -> int: ... + +GetHelpWidth = get_help_width + +def text_wrap(text: str, length: int = ..., indent: str = ..., firstline_indent: str = ..., tabs: str = ...) -> str: ... + +TextWrap = text_wrap + +def doc_to_help(doc: str) -> str: ... + +DocToHelp = doc_to_help + +class FlagValues: + def UseGnuGetOpt(self, use_gnu_getopt: bool = ...) -> None: ... + def is_gnu_getopt(self) -> bool: ... + IsGnuGetOpt = is_gnu_getopt + # TODO dict type + def FlagDict(self) -> dict[Any, Any]: ... + def flags_by_module_dict(self) -> dict[str, list[Flag]]: ... + FlagsByModuleDict = flags_by_module_dict + def flags_by_module_id_dict(self) -> dict[int, list[Flag]]: ... + FlagsByModuleIdDict = flags_by_module_id_dict + def key_flags_by_module_dict(self) -> dict[str, list[Flag]]: ... + KeyFlagsByModuleDict = key_flags_by_module_dict + def find_module_defining_flag(self, flagname: str, default: str = ...) -> str: ... + FindModuleDefiningFlag = find_module_defining_flag + def find_module_id_defining_flag(self, flagname: str, default: int = ...) -> int: ... + FindModuleIdDefiningFlag = find_module_id_defining_flag + def append_flag_values(self, flag_values: FlagValues) -> None: ... + AppendFlagValues = append_flag_values + def remove_flag_values(self, flag_values: FlagValues) -> None: ... + RemoveFlagValues = remove_flag_values + def __setitem__(self, name: str, flag: Flag) -> None: ... + def __getitem__(self, name: str) -> Flag: ... + def __getattr__(self, name: str) -> Any: ... + def __setattr__(self, name: str, value: Any) -> None: ... + def __delattr__(self, flag_name: str) -> None: ... + def set_default(self, name: str, value: Any) -> None: ... + SetDefault = set_default + def __contains__(self, name: str) -> bool: ... + has_key = __contains__ + def __iter__(self) -> Iterator[str]: ... + def __call__(self, argv: list[str], known_only: bool = ...) -> list[str]: ... + def reset(self) -> None: ... + Reset = reset + def RegisteredFlags(self) -> list[str]: ... + def flag_values_dict(self) -> dict[str, Any]: ... + FlagValuesDict = flag_values_dict + def GetHelp(self, prefix: str = ...) -> str: ... + def module_help(self, module: ModuleType | str) -> str: ... + ModuleHelp = module_help + def main_module_help(self) -> str: ... + MainModuleHelp = main_module_help + def get(self, name: str, default: Any) -> Any: ... + def ShortestUniquePrefixes(self, fl: dict[str, Flag]) -> dict[str, str]: ... + def ExtractFilename(self, flagfile_str: str) -> str: ... + def read_flags_from_files(self, argv: list[str], force_gnu: bool = ...) -> list[str]: ... + ReadFlagsFromFiles = read_flags_from_files + def flags_into_string(self) -> str: ... + FlagsIntoString = flags_into_string + def append_flags_into_file(self, filename: str) -> None: ... + AppendFlagsIntoFile = append_flags_into_file + def write_help_in_xml_format(self, outfile: IO[str] = ...) -> None: ... + WriteHelpInXMLFormat = write_help_in_xml_format + # TODO validator: gflags_validators.Validator + def AddValidator(self, validator: Any) -> None: ... + def is_parsed(self) -> bool: ... + IsParsed = is_parsed + +FLAGS: FlagValues + +class Flag: + name: str + default: Any + default_as_str: str + value: Any + help: str + short_name: str + boolean: bool + present: bool + parser: ArgumentParser + serializer: ArgumentSerializer + allow_override: bool + def __init__( + self, + parser: ArgumentParser, + serializer: ArgumentSerializer, + name: str, + default: str | None, + help_string: str, + short_name: str = ..., + boolean: bool = ..., + allow_override: bool = ..., + ) -> None: ... + def Parse(self, argument: Any) -> Any: ... + def Unparse(self) -> None: ... + def Serialize(self) -> str: ... + def SetDefault(self, value: Any) -> None: ... + def Type(self) -> str: ... + def WriteInfoInXMLFormat(self, outfile: IO[str], module_name: str, is_key: bool = ..., indent: str = ...) -> None: ... + +class ArgumentParser: + syntactic_help: str + # TODO what is this + def parse(self, argument: Any) -> Any: ... + Parser = parse + def flag_type(self) -> str: ... + Type = flag_type + def WriteCustomInfoInXMLFormat(self, outfile: IO[str], indent: str) -> None: ... + +class ArgumentSerializer: + def Serialize(self, value: Any) -> str: ... + +class ListSerializer(ArgumentSerializer): + def __init__(self, list_sep: str) -> None: ... + def Serialize(self, value: list[Any]) -> str: ... + +def register_validator( + flag_name: str, checker: Callable[[Any], bool], message: str = ..., flag_values: FlagValues = ... +) -> None: ... + +RegisterValidator = register_validator + +def mark_flag_as_required(flag_name: str, flag_values: FlagValues = ...) -> None: ... + +MarkFlagAsRequired = mark_flag_as_required + +def mark_flags_as_required(flag_names: Iterable[str], flag_values: FlagValues = ...) -> None: ... + +MarkFlagsAsRequired = mark_flags_as_required + +def mark_flags_as_mutual_exclusive(flag_names: Iterable[str], required: bool = ..., flag_values: FlagValues = ...) -> None: ... + +MarkFlagsAsMutualExclusive = mark_flags_as_mutual_exclusive + +def DEFINE( + parser: ArgumentParser, + name: str, + default: Any, + help: str, + flag_values: FlagValues = ..., + serializer: ArgumentSerializer = ..., + **args: Any, +) -> None: ... +def DEFINE_flag(flag: Flag, flag_values: FlagValues = ...) -> None: ... +def declare_key_flag(flag_name: str, flag_values: FlagValues = ...) -> None: ... + +DECLARE_key_flag = declare_key_flag + +def adopt_module_key_flags(module: ModuleType, flag_values: FlagValues = ...) -> None: ... + +ADOPT_module_key_flags = adopt_module_key_flags + +def DEFINE_string(name: str, default: str | None, help: str, flag_values: FlagValues = ..., **args: Any) -> None: ... + +class BooleanParser(ArgumentParser): + def Convert(self, argument: Any) -> bool: ... + def Parse(self, argument: Any) -> bool: ... + +class BooleanFlag(Flag): + def __init__(self, name: str, default: bool | None, help: str, short_name: str = ..., **args: Any) -> None: ... + +def DEFINE_boolean(name: str, default: bool | None, help: str, flag_values: FlagValues = ..., **args: Any) -> None: ... + +DEFINE_bool = DEFINE_boolean + +class HelpFlag(BooleanFlag): + def __init__(self) -> None: ... + def Parse(self, arg: Any) -> None: ... + +class HelpXMLFlag(BooleanFlag): + def __init__(self) -> None: ... + def Parse(self, arg: Any) -> None: ... + +class HelpshortFlag(BooleanFlag): + def __init__(self) -> None: ... + def Parse(self, arg: Any) -> None: ... + +class NumericParser(ArgumentParser): + def IsOutsideBounds(self, val: float) -> bool: ... + def Parse(self, argument: Any) -> float: ... + def WriteCustomInfoInXMLFormat(self, outfile: IO[str], indent: str) -> None: ... + def Convert(self, argument: Any) -> Any: ... + +class FloatParser(NumericParser): + number_article: str + number_name: str + syntactic_help: str + def __init__(self, lower_bound: float = ..., upper_bound: float = ...) -> None: ... + def Convert(self, argument: Any) -> float: ... + +def DEFINE_float( + name: str, + default: float | None, + help: str, + lower_bound: float = ..., + upper_bound: float = ..., + flag_values: FlagValues = ..., + **args: Any, +) -> None: ... + +class IntegerParser(NumericParser): + number_article: str + number_name: str + syntactic_help: str + def __init__(self, lower_bound: int = ..., upper_bound: int = ...) -> None: ... + def Convert(self, argument: Any) -> int: ... + +def DEFINE_integer( + name: str, + default: int | None, + help: str, + lower_bound: int = ..., + upper_bound: int = ..., + flag_values: FlagValues = ..., + **args: Any, +) -> None: ... + +class EnumParser(ArgumentParser): + def __init__(self, enum_values: list[str]) -> None: ... + def Parse(self, argument: Any) -> Any: ... + +class EnumFlag(Flag): + def __init__( + self, name: str, default: str | None, help: str, enum_values: list[str], short_name: str, **args: Any + ) -> None: ... + +def DEFINE_enum( + name: str, default: str | None, enum_values: Iterable[str], help: str, flag_values: FlagValues = ..., **args: Any +) -> None: ... + +class BaseListParser(ArgumentParser): + def __init__(self, token: str = ..., name: str = ...) -> None: ... + def Parse(self, argument: Any) -> list[Any]: ... + +class ListParser(BaseListParser): + def __init__(self) -> None: ... + +class WhitespaceSeparatedListParser(BaseListParser): + def __init__(self) -> None: ... + +def DEFINE_list(name: str, default: list[str] | None, help: str, flag_values: FlagValues = ..., **args: Any) -> None: ... +def DEFINE_spaceseplist(name: str, default: list[str] | None, help: str, flag_values: FlagValues = ..., **args: Any) -> None: ... + +class MultiFlag(Flag): + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + def Parse(self, arguments: Any) -> None: ... + def Serialize(self) -> str: ... + +def DEFINE_multi_string( + name: str, default: str | list[str] | None, help: str, flag_values: FlagValues = ..., **args: Any +) -> None: ... + +DEFINE_multistring = DEFINE_multi_string + +def DEFINE_multi_integer( + name: str, + default: int | list[int] | None, + help: str, + lower_bound: int = ..., + upper_bound: int = ..., + flag_values: FlagValues = ..., + **args: Any, +) -> None: ... + +DEFINE_multi_int = DEFINE_multi_integer + +def DEFINE_multi_float( + name: str, + default: float | list[float] | None, + help: str, + lower_bound: float = ..., + upper_bound: float = ..., + flag_values: FlagValues = ..., + **args: Any, +) -> None: ... +def DEFINE_multi_enum( + name: str, + default: Sequence[str] | str | None, + enum_values: Sequence[str], + help: str, + flag_values: FlagValues = ..., + case_sensitive: bool = ..., + **args: Any, +) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..efd6e0e1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/@tests/stubtest_allowlist.txt @@ -0,0 +1,6 @@ +jose.backends.cryptography_backend +jose.backends.CryptographyAESKey +jose.backends.CryptographyECKey +jose.backends.CryptographyHMACKey +jose.backends.CryptographyRSAKey +jose.backends.ECDSAECKey diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/METADATA.toml new file mode 100644 index 00000000..5cc80a93 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/METADATA.toml @@ -0,0 +1,2 @@ +version = "3.3.*" +requires = ["types-pyasn1"] # excluding pyrsa, cryptography until typing is available diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/__init__.pyi new file mode 100644 index 00000000..dcd21e1f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/__init__.pyi @@ -0,0 +1,11 @@ +from .exceptions import ( + ExpiredSignatureError as ExpiredSignatureError, + JOSEError as JOSEError, + JWSError as JWSError, + JWTError as JWTError, +) + +__version__: str +__author__: str +__license__: str +__copyright__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/backends/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/backends/__init__.pyi new file mode 100644 index 00000000..4ce1c431 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/backends/__init__.pyi @@ -0,0 +1,21 @@ +from collections.abc import Callable + +from .base import DIRKey as DIRKey +from .cryptography_backend import ( + CryptographyAESKey as CryptographyAESKey, + CryptographyECKey as CryptographyECKey, + CryptographyHMACKey as CryptographyHMACKey, + CryptographyRSAKey as CryptographyRSAKey, +) +from .ecdsa_backend import ECDSAECKey as ECDSAECKey +from .native import HMACKey as NativeHMACKey +from .rsa_backend import RSAKey as BackendRSAKey + +# python-jose relies on importing from cryptography_backend +# then falling back on other imports +# these are all the potential options +AESKey: type[CryptographyAESKey] | None +HMACKey: type[CryptographyHMACKey] | type[NativeHMACKey] +RSAKey: type[CryptographyRSAKey] | type[BackendRSAKey] | None +ECKey: type[CryptographyECKey] | type[ECDSAECKey] +get_random_bytes: Callable[[int], bytes] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/backends/_asn1.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/backends/_asn1.pyi new file mode 100644 index 00000000..b4fce464 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/backends/_asn1.pyi @@ -0,0 +1,17 @@ +from pyasn1.type import namedtype, univ + +RSA_ENCRYPTION_ASN1_OID: str + +class RsaAlgorithmIdentifier(univ.Sequence): + componentType: namedtype.NamedTypes + +class PKCS8PrivateKey(univ.Sequence): + componentType: namedtype.NamedTypes + +class PublicKeyInfo(univ.Sequence): + componentType: namedtype.NamedTypes + +def rsa_private_key_pkcs8_to_pkcs1(pkcs8_key) -> bytes: ... +def rsa_private_key_pkcs1_to_pkcs8(pkcs1_key) -> bytes: ... +def rsa_public_key_pkcs1_to_pkcs8(pkcs1_key) -> bytes: ... +def rsa_public_key_pkcs8_to_pkcs1(pkcs8_key) -> bytes: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/backends/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/backends/base.pyi new file mode 100644 index 00000000..26313de2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/backends/base.pyi @@ -0,0 +1,23 @@ +from typing import Any +from typing_extensions import Self + +class Key: + # Enable when we can use stubs from installed dependencies, + # as `key` can be of type cryptography.x509.base.Certificate: + # from cryptography.x509 import Certificate + def __init__(self, key, algorithm) -> None: ... + def sign(self, msg: bytes) -> bytes: ... + def verify(self, msg: bytes, sig: bytes) -> bool: ... + def public_key(self) -> Self: ... + def to_pem(self) -> bytes: ... + def to_dict(self) -> dict[str, Any]: ... + def encrypt(self, plain_text: str | bytes, aad: bytes | None = ...) -> tuple[bytes, bytes, bytes | None]: ... + def decrypt( + self, cipher_text: str | bytes, iv: str | bytes | None = ..., aad: bytes | None = ..., tag: bytes | None = ... + ) -> bytes: ... + def wrap_key(self, key_data: bytes) -> bytes: ... + def unwrap_key(self, wrapped_key: bytes) -> bytes: ... + +class DIRKey(Key): + def __init__(self, key_data: str | bytes, algorithm: str) -> None: ... + def to_dict(self) -> dict[str, Any]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/backends/cryptography_backend.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/backends/cryptography_backend.pyi new file mode 100644 index 00000000..d910e72d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/backends/cryptography_backend.pyi @@ -0,0 +1,67 @@ +from _typeshed import Incomplete +from typing import Any + +from .base import Key + +def get_random_bytes(num_bytes: int) -> bytes: ... + +# Enable when we can use stubs from installed dependencies: +# from cryptography.hazmat import backends +class CryptographyECKey(Key): + SHA256: Any + SHA384: Any + SHA512: Any + hash_alg: Any + cryptography_backend: Any + prepared_key: Any + def __init__(self, key, algorithm, cryptography_backend=...) -> None: ... + def sign(self, msg): ... + def verify(self, msg, sig): ... + def is_public(self): ... + def public_key(self): ... + def to_pem(self): ... + def to_dict(self): ... + +class CryptographyRSAKey(Key): + SHA256: Any + SHA384: Any + SHA512: Any + RSA1_5: Any + RSA_OAEP: Any + RSA_OAEP_256: Any + hash_alg: Any + padding: Any + cryptography_backend: Any + prepared_key: Any + def __init__(self, key, algorithm, cryptography_backend=...) -> None: ... + def sign(self, msg): ... + def verify(self, msg, sig): ... + def is_public(self): ... + def public_key(self): ... + def to_pem(self, pem_format: str = ...): ... + def to_dict(self): ... + def wrap_key(self, key_data): ... + def unwrap_key(self, wrapped_key): ... + +class CryptographyAESKey(Key): + KEY_128: Any + KEY_192: Any + KEY_256: Any + KEY_384: Any + KEY_512: Any + AES_KW_ALGS: Any + MODES: Any + def __init__(self, key, algorithm) -> None: ... + def to_dict(self): ... + def encrypt(self, plain_text, aad: Incomplete | None = ...): ... + def decrypt(self, cipher_text, iv: Incomplete | None = ..., aad: Incomplete | None = ..., tag: Incomplete | None = ...): ... + def wrap_key(self, key_data): ... + def unwrap_key(self, wrapped_key): ... + +class CryptographyHMACKey(Key): + ALG_MAP: Any + prepared_key: Any + def __init__(self, key, algorithm) -> None: ... + def to_dict(self): ... + def sign(self, msg): ... + def verify(self, msg, sig): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/backends/ecdsa_backend.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/backends/ecdsa_backend.pyi new file mode 100644 index 00000000..fea488c7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/backends/ecdsa_backend.pyi @@ -0,0 +1,25 @@ +from collections.abc import Callable +from hashlib import _Hash +from typing import Any +from typing_extensions import Self + +from .base import Key + +# Enable when we can use stubs from installed dependencies: +# from ecdsa.curves import Curve +class ECDSAECKey(Key): + SHA256: Callable[[bytes], _Hash] + SHA384: Callable[[bytes], _Hash] + SHA512: Callable[[bytes], _Hash] + CURVE_MAP: Any + CURVE_NAMES: Any + hash_alg: Any + curve: Any + prepared_key: Any + def __init__(self, key, algorithm) -> None: ... + def sign(self, msg): ... + def verify(self, msg, sig): ... + def is_public(self) -> bool: ... + def public_key(self) -> Self: ... + def to_pem(self): ... + def to_dict(self) -> dict[str, Any]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/backends/native.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/backends/native.pyi new file mode 100644 index 00000000..8c1626b5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/backends/native.pyi @@ -0,0 +1,21 @@ +from _typeshed import ReadableBuffer +from collections.abc import Callable +from hashlib import _Hash +from typing import Any + +from .base import Key + +def get_random_bytes(num_bytes: int) -> bytes: ... + +class HMACKey(Key): + HASHES: dict[str, Callable[[bytes], _Hash]] + prepared_key: bytes + def __init__( + self, + # explicitly checks for key_data as dict instance, instead of a Mapping + key: str | bytes | dict[str, Any], + algorithm: str, + ) -> None: ... + def sign(self, msg: ReadableBuffer | None) -> bytes: ... + def verify(self, msg: ReadableBuffer | None, sig: str | bytes) -> bool: ... + def to_dict(self) -> dict[str, Any]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/backends/rsa_backend.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/backends/rsa_backend.pyi new file mode 100644 index 00000000..39e9b171 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/backends/rsa_backend.pyi @@ -0,0 +1,27 @@ +from typing import Any +from typing_extensions import Self + +from .base import Key + +LEGACY_INVALID_PKCS8_RSA_HEADER: bytes +ASN1_SEQUENCE_ID: bytes +RSA_ENCRYPTION_ASN1_OID: str + +# Enable when we can use stubs from installed dependencies: +# from rsa import PublicKey +def pem_to_spki(pem, fmt: str = ...): ... + +class RSAKey(Key): + SHA256: str + SHA384: str + SHA512: str + hash_alg: str + def __init__(self, key, algorithm) -> None: ... + def sign(self, msg: bytes) -> bytes: ... + def verify(self, msg: bytes, sig: bytes) -> bool: ... + def is_public(self) -> bool: ... + def public_key(self) -> Self: ... + def to_pem(self, pem_format: str = ...) -> bytes: ... + def to_dict(self) -> dict[str, Any]: ... + def wrap_key(self, key_data: bytes) -> bytes: ... + def unwrap_key(self, wrapped_key: bytes) -> bytes: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/constants.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/constants.pyi new file mode 100644 index 00000000..209a9056 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/constants.pyi @@ -0,0 +1,72 @@ +from collections.abc import Callable, Mapping +from hashlib import _Hash + +from .backends.base import Key + +class Algorithms: + NONE: str + HS256: str + HS384: str + HS512: str + RS256: str + RS384: str + RS512: str + ES256: str + ES384: str + ES512: str + A128CBC_HS256: str + A192CBC_HS384: str + A256CBC_HS512: str + A128GCM: str + A192GCM: str + A256GCM: str + A128CBC: str + A192CBC: str + A256CBC: str + DIR: str + RSA1_5: str + RSA_OAEP: str + RSA_OAEP_256: str + A128KW: str + A192KW: str + A256KW: str + ECDH_ES: str + ECDH_ES_A128KW: str + ECDH_ES_A192KW: str + ECDH_ES_A256KW: str + A128GCMKW: str + A192GCMKW: str + A256GCMKW: str + PBES2_HS256_A128KW: str + PBES2_HS384_A192KW: str + PBES2_HS512_A256KW: str + DEF: str + HMAC: set[str] + RSA_DS: set[str] + RSA_KW: set[str] + RSA: set[str] + EC_DS: set[str] + EC_KW: set[str] + EC: set[str] + AES_PSEUDO: set[str] + AES_JWE_ENC: set[str] + AES_ENC: set[str] + AES_KW: set[str] + AEC_GCM_KW: set[str] + AES: set[str] + PBES2_KW: set[str] + HMAC_AUTH_TAG: set[str] + GCM: set[str] + SUPPORTED: set[str] + ALL: set[str] + HASHES: Mapping[str, Callable[[bytes], _Hash]] + KEYS: Mapping[str, type[Key]] + +ALGORITHMS: Algorithms + +class Zips: + DEF: str + NONE: None + SUPPORTED: set[str | None] + +ZIPS: Zips diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/exceptions.pyi new file mode 100644 index 00000000..d7ab2176 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/exceptions.pyi @@ -0,0 +1,12 @@ +class JOSEError(Exception): ... +class JWSError(JOSEError): ... +class JWSSignatureError(JWSError): ... +class JWSAlgorithmError(JWSError): ... +class JWTError(JOSEError): ... +class JWTClaimsError(JWTError): ... +class ExpiredSignatureError(JWTError): ... +class JWKError(JOSEError): ... +class JWEError(JOSEError): ... +class JWEParseError(JWEError): ... +class JWEInvalidAuth(JWEError): ... +class JWEAlgorithmUnsupportedError(JWEError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/jwe.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/jwe.pyi new file mode 100644 index 00000000..82368e04 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/jwe.pyi @@ -0,0 +1,22 @@ +from typing import Any + +from .backends.base import Key + +def encrypt( + plaintext: str | bytes, + # Internally it's passed down to jwk.construct(), which explicitly checks for + # key as dict instance, instead of a Mapping + key: str | bytes | dict[str, Any] | Key, + encryption: str = ..., + algorithm: str = ..., + zip: str | None = ..., + cty: str | None = ..., + kid: str | None = ..., +) -> bytes: ... +def decrypt( + jwe_str: str | bytes, + # Internally it's passed down to jwk.construct(), which explicitly checks for + # key as dict instance, instead of a Mapping + key: str | bytes | dict[str, Any] | Key, +) -> bytes | None: ... +def get_unverified_header(jwe_str: str | bytes | None) -> dict[str, Any]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/jwk.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/jwk.pyi new file mode 100644 index 00000000..c8113d1b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/jwk.pyi @@ -0,0 +1,13 @@ +from typing import Any +from typing_extensions import Literal + +from .backends import AESKey as AESKey, ECKey as ECKey, HMACKey as HMACKey, RSAKey as RSAKey +from .backends.base import DIRKey as DIRKey, Key + +def get_key(algorithm: str) -> type[Key] | None: ... +def register_key(algorithm: str, key_class: type[Key]) -> Literal[True]: ... +def construct( + # explicitly checks for key_data as dict instance, instead of a Mapping + key_data: str | bytes | dict[str, Any] | Key, + algorithm: str | None = ..., +) -> Key: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/jws.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/jws.pyi new file mode 100644 index 00000000..0898a14e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/jws.pyi @@ -0,0 +1,24 @@ +from collections.abc import Container, Mapping +from typing import Any + +from .backends.base import Key + +def sign( + payload: bytes | Mapping[str, Any], + # Internally it's passed down to jwk.construct(), which explicitly checks for + # key as dict instance, instead of a Mapping + key: str | bytes | dict[str, Any] | Key, + headers: Mapping[str, Any] | None = ..., + algorithm: str = ..., +) -> str: ... +def verify( + token: str | bytes, + key: str | bytes | Mapping[str, Any] | Key, + # Callers of this function, like jwt.decode(), and functions called internally, + # like jws._verify_signature(), use and accept algorithms=None + algorithms: str | Container[str] | None, + verify: bool = ..., +) -> bytes: ... +def get_unverified_header(token: str | bytes) -> dict[str, Any]: ... +def get_unverified_headers(token: str | bytes) -> dict[str, Any]: ... +def get_unverified_claims(token: str | bytes) -> bytes: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/jwt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/jwt.pyi new file mode 100644 index 00000000..4a297cb1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/jwt.pyi @@ -0,0 +1,26 @@ +from collections.abc import Container, Iterable, Mapping, MutableMapping +from typing import Any + +from .backends.base import Key + +def encode( + claims: MutableMapping[str, Any], + # Internally it calls jws.sign() that expects a key dict instance instead of Mapping + key: str | bytes | dict[str, Any] | Key, + algorithm: str = ..., + headers: Mapping[str, Any] | None = ..., + access_token: str | None = ..., +) -> str: ... +def decode( + token: str | bytes, + key: str | bytes | Mapping[str, Any] | Key, + algorithms: str | Container[str] | None = ..., + options: Mapping[str, Any] | None = ..., + audience: str | None = ..., + issuer: str | Iterable[str] | None = ..., + subject: str | None = ..., + access_token: str | None = ..., +) -> dict[str, Any]: ... +def get_unverified_header(token: str | bytes) -> dict[str, Any]: ... +def get_unverified_headers(token: str | bytes) -> dict[str, Any]: ... +def get_unverified_claims(token: str | bytes) -> dict[str, Any]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/utils.pyi new file mode 100644 index 00000000..de0a0153 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-jose/jose/utils.pyi @@ -0,0 +1,14 @@ +from collections.abc import Callable, Iterable +from datetime import timedelta +from hashlib import _Hash +from typing import Any + +def long_to_bytes(n: int, blocksize: int | None = ...) -> bytes: ... +def long_to_base64(data: int, size: int | None = ...) -> bytes: ... +def int_arr_to_long(arr: Iterable[Any]) -> int: ... +def base64_to_long(data: str | bytes) -> int: ... +def calculate_at_hash(access_token: str, hash_alg: Callable[[bytes], _Hash]) -> str: ... +def base64url_decode(input: bytes) -> bytes: ... +def base64url_encode(input: bytes) -> bytes: ... +def timedelta_total_seconds(delta: timedelta) -> int: ... +def ensure_binary(s: str | bytes) -> bytes: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-nmap/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-nmap/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..7e144572 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-nmap/@tests/stubtest_allowlist.txt @@ -0,0 +1 @@ +nmap.test_nmap diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-nmap/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-nmap/METADATA.toml new file mode 100644 index 00000000..7431acfe --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-nmap/METADATA.toml @@ -0,0 +1 @@ +version = "0.7.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-nmap/nmap/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-nmap/nmap/__init__.pyi new file mode 100644 index 00000000..d2e6aef6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-nmap/nmap/__init__.pyi @@ -0,0 +1,2 @@ +from .nmap import * +from .nmap import __author__ as __author__, __last_modification__ as __last_modification__, __version__ as __version__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-nmap/nmap/nmap.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-nmap/nmap/nmap.pyi new file mode 100644 index 00000000..ae8e48ba --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-nmap/nmap/nmap.pyi @@ -0,0 +1,132 @@ +from collections.abc import Callable, Iterable, Iterator +from typing import Any, TypeVar +from typing_extensions import TypeAlias, TypedDict + +_T = TypeVar("_T") +_Callback: TypeAlias = Callable[[str, _Result], object] + +class _Result(TypedDict): + nmap: _ResultNmap + scan: dict[str, PortScannerHostDict] + +class _ResultNmap(TypedDict): + command_line: str + scaninfo: _ResultNmapInfo + scanstats: _ResultNampStats + +class _ResultNmapInfo(TypedDict, total=False): + error: str + warning: str + protocol: _ResultNampInfoProtocol + +class _ResultNampInfoProtocol(TypedDict): + method: str + services: str + +class _ResultNampStats(TypedDict): + timestr: str + elapsed: str + uphosts: str + downhosts: str + totalhosts: str + +class _ResulHostUptime(TypedDict): + seconds: str + lastboot: str + +class _ResultHostNames(TypedDict): + type: str + name: str + +class _ResultHostPort(TypedDict): + conf: str + cpe: str + extrainfo: str + name: str + product: str + reason: str + state: str + version: str + +__last_modification__: str +__author__: str +__version__: str + +class PortScanner: + def __init__(self, nmap_search_path: Iterable[str] = ...) -> None: ... + def get_nmap_last_output(self) -> str: ... + def nmap_version(self) -> tuple[int, int]: ... + def listscan(self, hosts: str = ...) -> list[str]: ... + def scan( + self, hosts: str = ..., ports: str | None = ..., arguments: str = ..., sudo: bool = ..., timeout: int = ... + ) -> _Result: ... + def analyse_nmap_xml_scan( + self, + nmap_xml_output: str | None = ..., + nmap_err: str = ..., + nmap_err_keep_trace: str = ..., + nmap_warn_keep_trace: str = ..., + ) -> _Result: ... + def __getitem__(self, host: str) -> PortScannerHostDict: ... + def all_hosts(self) -> list[str]: ... + def command_line(self) -> str: ... + def scaninfo(self) -> _ResultNmapInfo: ... + def scanstats(self) -> _ResultNampStats: ... + def has_host(self, host: str) -> bool: ... + def csv(self) -> str: ... + +def __scan_progressive__( + self: object, hosts: str, ports: str, arguments: str, callback: _Callback | None, sudo: bool, timeout: int +) -> None: ... + +class PortScannerAsync: + def __init__(self) -> None: ... + def __del__(self) -> None: ... + def scan( + self, + hosts: str = ..., + ports: str | None = ..., + arguments: str = ..., + callback: _Callback | None = ..., + sudo: bool = ..., + timeout: int = ..., + ) -> None: ... + def stop(self) -> None: ... + def wait(self, timeout: int | None = ...) -> None: ... + def still_scanning(self) -> bool: ... + +class PortScannerYield(PortScannerAsync): + def __init__(self) -> None: ... + def scan( # type: ignore[override] + self, hosts: str = ..., ports: str | None = ..., arguments: str = ..., sudo: bool = ..., timeout: int = ... + ) -> Iterator[tuple[str, _Result]]: ... + def stop(self) -> None: ... + def wait(self, timeout: int | None = ...) -> None: ... + def still_scanning(self) -> None: ... # type: ignore[override] + +class PortScannerHostDict(dict[str, Any]): + def hostnames(self) -> list[_ResultHostNames]: ... + def hostname(self) -> str: ... + def state(self) -> str: ... + def uptime(self) -> _ResulHostUptime: ... + def all_protocols(self) -> list[str]: ... + def all_tcp(self) -> list[int]: ... + def has_tcp(self, port: int) -> bool: ... + def tcp(self, port: int) -> _ResultHostPort: ... + def all_udp(self) -> list[int]: ... + def has_udp(self, port: int) -> bool: ... + def udp(self, port: int) -> _ResultHostPort: ... + def all_ip(self) -> list[int]: ... + def has_ip(self, port: int) -> bool: ... + def ip(self, port: int) -> _ResultHostPort: ... + def all_sctp(self) -> list[int]: ... + def has_sctp(self, port: int) -> bool: ... + def sctp(self, port: int) -> _ResultHostPort: ... + +class PortScannerError(Exception): + value: str + def __init__(self, value: str) -> None: ... + +class PortScannerTimeout(PortScannerError): ... + +def convert_nmap_output_to_encoding(value: _T, code: str = ...) -> _T: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-slugify/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-slugify/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..e77adcae --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-slugify/@tests/stubtest_allowlist.txt @@ -0,0 +1 @@ +slugify.__main__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-slugify/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-slugify/METADATA.toml new file mode 100644 index 00000000..629f910b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-slugify/METADATA.toml @@ -0,0 +1 @@ +version = "8.0.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-slugify/slugify/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-slugify/slugify/__init__.pyi new file mode 100644 index 00000000..2d77c5af --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-slugify/slugify/__init__.pyi @@ -0,0 +1,12 @@ +from .__version__ import ( + __author__ as __author__, + __author_email__ as __author_email__, + __copyright__ as __copyright__, + __description__ as __description__, + __license__ as __license__, + __title__ as __title__, + __url__ as __url__, + __version__ as __version__, +) +from .slugify import * +from .special import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-slugify/slugify/__version__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-slugify/slugify/__version__.pyi new file mode 100644 index 00000000..ed1591f3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-slugify/slugify/__version__.pyi @@ -0,0 +1,8 @@ +__title__: str +__author__: str +__author_email__: str +__description__: str +__url__: str +__license__: str +__copyright__: str +__version__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-slugify/slugify/slugify.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-slugify/slugify/slugify.pyi new file mode 100644 index 00000000..ce2276c0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-slugify/slugify/slugify.pyi @@ -0,0 +1,20 @@ +from collections.abc import Iterable + +def smart_truncate( + string: str, max_length: int = ..., word_boundary: bool = ..., separator: str = ..., save_order: bool = ... +) -> str: ... +def slugify( + text: str, + entities: bool = ..., + decimal: bool = ..., + hexadecimal: bool = ..., + max_length: int = ..., + word_boundary: bool = ..., + separator: str = ..., + save_order: bool = ..., + stopwords: Iterable[str] = ..., + regex_pattern: str | None = ..., + lowercase: bool = ..., + replacements: Iterable[Iterable[str]] = ..., + allow_unicode: bool = ..., +) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-slugify/slugify/special.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-slugify/slugify/special.pyi new file mode 100644 index 00000000..2d1da58e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-slugify/slugify/special.pyi @@ -0,0 +1,8 @@ +from collections.abc import Sequence + +def add_uppercase_char(char_list: Sequence[tuple[str, str]]) -> Sequence[tuple[str, str]]: ... + +CYRILLIC: Sequence[tuple[str, str]] +GERMAN: Sequence[tuple[str, str]] +GREEK: Sequence[tuple[str, str]] +PRE_TRANSLATIONS: Sequence[tuple[str, str]] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..6bc56055 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/@tests/stubtest_allowlist.txt @@ -0,0 +1,24 @@ +# Type helpers +Xlib._typing + +# __all__ is a map at runtime +# https://github.com/python-xlib/python-xlib/pull/238 +Xlib.ext(\.__all__)? + +# These will unconditionally fail at runtime +# See: https://github.com/python-xlib/python-xlib/issues/253 +Xlib.protocol.rq.DictWrapper.__gt__ +Xlib.protocol.rq.DictWrapper.__lt__ +Xlib.protocol.rq.Event.__gt__ +Xlib.protocol.rq.Event.__lt__ + +# Can be None or str once instanciated +Xlib.protocol.rq.*.structcode +# Should only ever be str once instanciated +Xlib.protocol.rq.*.name + +# Structs generate their attributes (@type_check_only) +Xlib.protocol.rq.Struct.__getattr__ + +# Iteration variable that bleeds into the global scope +Xlib.protocol.rq.c diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/@tests/stubtest_allowlist_win32.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/@tests/stubtest_allowlist_win32.txt new file mode 100644 index 00000000..db181acb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/@tests/stubtest_allowlist_win32.txt @@ -0,0 +1,3 @@ +# failed to import, ModuleNotFoundError: No module named 'fcntl' +# https://github.com/python-xlib/python-xlib/pull/217 +Xlib.support.unix_connect diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/METADATA.toml new file mode 100644 index 00000000..86e974f5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/METADATA.toml @@ -0,0 +1,2 @@ +version = "0.33.*" +requires = ["types-Pillow"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/X.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/X.pyi new file mode 100644 index 00000000..30261ec9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/X.pyi @@ -0,0 +1,345 @@ +NONE: int +ParentRelative: int +CopyFromParent: int +PointerWindow: int +InputFocus: int +PointerRoot: int +AnyPropertyType: int +AnyKey: int +AnyButton: int +AllTemporary: int +CurrentTime: int +NoSymbol: int +NoEventMask: int +KeyPressMask: int +KeyReleaseMask: int +ButtonPressMask: int +ButtonReleaseMask: int +EnterWindowMask: int +LeaveWindowMask: int +PointerMotionMask: int +PointerMotionHintMask: int +Button1MotionMask: int +Button2MotionMask: int +Button3MotionMask: int +Button4MotionMask: int +Button5MotionMask: int +ButtonMotionMask: int +KeymapStateMask: int +ExposureMask: int +VisibilityChangeMask: int +StructureNotifyMask: int +ResizeRedirectMask: int +SubstructureNotifyMask: int +SubstructureRedirectMask: int +FocusChangeMask: int +PropertyChangeMask: int +ColormapChangeMask: int +OwnerGrabButtonMask: int +KeyPress: int +KeyRelease: int +ButtonPress: int +ButtonRelease: int +MotionNotify: int +EnterNotify: int +LeaveNotify: int +FocusIn: int +FocusOut: int +KeymapNotify: int +Expose: int +GraphicsExpose: int +NoExpose: int +VisibilityNotify: int +CreateNotify: int +DestroyNotify: int +UnmapNotify: int +MapNotify: int +MapRequest: int +ReparentNotify: int +ConfigureNotify: int +ConfigureRequest: int +GravityNotify: int +ResizeRequest: int +CirculateNotify: int +CirculateRequest: int +PropertyNotify: int +SelectionClear: int +SelectionRequest: int +SelectionNotify: int +ColormapNotify: int +ClientMessage: int +MappingNotify: int +LASTEvent: int +ShiftMask: int +LockMask: int +ControlMask: int +Mod1Mask: int +Mod2Mask: int +Mod3Mask: int +Mod4Mask: int +Mod5Mask: int +ShiftMapIndex: int +LockMapIndex: int +ControlMapIndex: int +Mod1MapIndex: int +Mod2MapIndex: int +Mod3MapIndex: int +Mod4MapIndex: int +Mod5MapIndex: int +Button1Mask: int +Button2Mask: int +Button3Mask: int +Button4Mask: int +Button5Mask: int +AnyModifier: int +Button1: int +Button2: int +Button3: int +Button4: int +Button5: int +NotifyNormal: int +NotifyGrab: int +NotifyUngrab: int +NotifyWhileGrabbed: int +NotifyHint: int +NotifyAncestor: int +NotifyVirtual: int +NotifyInferior: int +NotifyNonlinear: int +NotifyNonlinearVirtual: int +NotifyPointer: int +NotifyPointerRoot: int +NotifyDetailNone: int +VisibilityUnobscured: int +VisibilityPartiallyObscured: int +VisibilityFullyObscured: int +PlaceOnTop: int +PlaceOnBottom: int +FamilyInternet: int +FamilyDECnet: int +FamilyChaos: int +FamilyServerInterpreted: int +FamilyInternetV6: int +PropertyNewValue: int +PropertyDelete: int +ColormapUninstalled: int +ColormapInstalled: int +GrabModeSync: int +GrabModeAsync: int +GrabSuccess: int +AlreadyGrabbed: int +GrabInvalidTime: int +GrabNotViewable: int +GrabFrozen: int +AsyncPointer: int +SyncPointer: int +ReplayPointer: int +AsyncKeyboard: int +SyncKeyboard: int +ReplayKeyboard: int +AsyncBoth: int +SyncBoth: int +RevertToNone: int +RevertToPointerRoot: int +RevertToParent: int +Success: int +BadRequest: int +BadValue: int +BadWindow: int +BadPixmap: int +BadAtom: int +BadCursor: int +BadFont: int +BadMatch: int +BadDrawable: int +BadAccess: int +BadAlloc: int +BadColor: int +BadGC: int +BadIDChoice: int +BadName: int +BadLength: int +BadImplementation: int +FirstExtensionError: int +LastExtensionError: int +InputOutput: int +InputOnly: int +CWBackPixmap: int +CWBackPixel: int +CWBorderPixmap: int +CWBorderPixel: int +CWBitGravity: int +CWWinGravity: int +CWBackingStore: int +CWBackingPlanes: int +CWBackingPixel: int +CWOverrideRedirect: int +CWSaveUnder: int +CWEventMask: int +CWDontPropagate: int +CWColormap: int +CWCursor: int +CWX: int +CWY: int +CWWidth: int +CWHeight: int +CWBorderWidth: int +CWSibling: int +CWStackMode: int +ForgetGravity: int +NorthWestGravity: int +NorthGravity: int +NorthEastGravity: int +WestGravity: int +CenterGravity: int +EastGravity: int +SouthWestGravity: int +SouthGravity: int +SouthEastGravity: int +StaticGravity: int +UnmapGravity: int +NotUseful: int +WhenMapped: int +Always: int +IsUnmapped: int +IsUnviewable: int +IsViewable: int +SetModeInsert: int +SetModeDelete: int +DestroyAll: int +RetainPermanent: int +RetainTemporary: int +Above: int +Below: int +TopIf: int +BottomIf: int +Opposite: int +RaiseLowest: int +LowerHighest: int +PropModeReplace: int +PropModePrepend: int +PropModeAppend: int +GXclear: int +GXand: int +GXandReverse: int +GXcopy: int +GXandInverted: int +GXnoop: int +GXxor: int +GXor: int +GXnor: int +GXequiv: int +GXinvert: int +GXorReverse: int +GXcopyInverted: int +GXorInverted: int +GXnand: int +GXset: int +LineSolid: int +LineOnOffDash: int +LineDoubleDash: int +CapNotLast: int +CapButt: int +CapRound: int +CapProjecting: int +JoinMiter: int +JoinRound: int +JoinBevel: int +FillSolid: int +FillTiled: int +FillStippled: int +FillOpaqueStippled: int +EvenOddRule: int +WindingRule: int +ClipByChildren: int +IncludeInferiors: int +Unsorted: int +YSorted: int +YXSorted: int +YXBanded: int +CoordModeOrigin: int +CoordModePrevious: int +Complex: int +Nonconvex: int +Convex: int +ArcChord: int +ArcPieSlice: int +GCFunction: int +GCPlaneMask: int +GCForeground: int +GCBackground: int +GCLineWidth: int +GCLineStyle: int +GCCapStyle: int +GCJoinStyle: int +GCFillStyle: int +GCFillRule: int +GCTile: int +GCStipple: int +GCTileStipXOrigin: int +GCTileStipYOrigin: int +GCFont: int +GCSubwindowMode: int +GCGraphicsExposures: int +GCClipXOrigin: int +GCClipYOrigin: int +GCClipMask: int +GCDashOffset: int +GCDashList: int +GCArcMode: int +GCLastBit: int +FontLeftToRight: int +FontRightToLeft: int +FontChange: int +XYBitmap: int +XYPixmap: int +ZPixmap: int +AllocNone: int +AllocAll: int +DoRed: int +DoGreen: int +DoBlue: int +CursorShape: int +TileShape: int +StippleShape: int +AutoRepeatModeOff: int +AutoRepeatModeOn: int +AutoRepeatModeDefault: int +LedModeOff: int +LedModeOn: int +KBKeyClickPercent: int +KBBellPercent: int +KBBellPitch: int +KBBellDuration: int +KBLed: int +KBLedMode: int +KBKey: int +KBAutoRepeatMode: int +MappingSuccess: int +MappingBusy: int +MappingFailed: int +MappingModifier: int +MappingKeyboard: int +MappingPointer: int +DontPreferBlanking: int +PreferBlanking: int +DefaultBlanking: int +DisableScreenSaver: int +DisableScreenInterval: int +DontAllowExposures: int +AllowExposures: int +DefaultExposures: int +ScreenSaverReset: int +ScreenSaverActive: int +HostInsert: int +HostDelete: int +EnableAccess: int +DisableAccess: int +StaticGray: int +GrayScale: int +StaticColor: int +PseudoColor: int +TrueColor: int +DirectColor: int +LSBFirst: int +MSBFirst: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/XK.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/XK.pyi new file mode 100644 index 00000000..01ad5481 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/XK.pyi @@ -0,0 +1,7 @@ +from Xlib.keysymdef.latin1 import * +from Xlib.keysymdef.miscellany import * +from Xlib.X import NoSymbol as NoSymbol + +def string_to_keysym(keysym: str) -> int: ... +def load_keysym_group(group: str) -> None: ... +def keysym_to_string(keysym: int) -> str | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/Xatom.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/Xatom.pyi new file mode 100644 index 00000000..e0c1e0a9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/Xatom.pyi @@ -0,0 +1,69 @@ +PRIMARY: int +SECONDARY: int +ARC: int +ATOM: int +BITMAP: int +CARDINAL: int +COLORMAP: int +CURSOR: int +CUT_BUFFER0: int +CUT_BUFFER1: int +CUT_BUFFER2: int +CUT_BUFFER3: int +CUT_BUFFER4: int +CUT_BUFFER5: int +CUT_BUFFER6: int +CUT_BUFFER7: int +DRAWABLE: int +FONT: int +INTEGER: int +PIXMAP: int +POINT: int +RECTANGLE: int +RESOURCE_MANAGER: int +RGB_COLOR_MAP: int +RGB_BEST_MAP: int +RGB_BLUE_MAP: int +RGB_DEFAULT_MAP: int +RGB_GRAY_MAP: int +RGB_GREEN_MAP: int +RGB_RED_MAP: int +STRING: int +VISUALID: int +WINDOW: int +WM_COMMAND: int +WM_HINTS: int +WM_CLIENT_MACHINE: int +WM_ICON_NAME: int +WM_ICON_SIZE: int +WM_NAME: int +WM_NORMAL_HINTS: int +WM_SIZE_HINTS: int +WM_ZOOM_HINTS: int +MIN_SPACE: int +NORM_SPACE: int +MAX_SPACE: int +END_SPACE: int +SUPERSCRIPT_X: int +SUPERSCRIPT_Y: int +SUBSCRIPT_X: int +SUBSCRIPT_Y: int +UNDERLINE_POSITION: int +UNDERLINE_THICKNESS: int +STRIKEOUT_ASCENT: int +STRIKEOUT_DESCENT: int +ITALIC_ANGLE: int +X_HEIGHT: int +QUAD_WIDTH: int +WEIGHT: int +POINT_SIZE: int +RESOLUTION: int +COPYRIGHT: int +NOTICE: int +FONT_NAME: int +FAMILY_NAME: int +FULL_NAME: int +CAP_HEIGHT: int +WM_CLASS: int +WM_TRANSIENT_FOR: int +LAST_PREDEFINED: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/Xcursorfont.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/Xcursorfont.pyi new file mode 100644 index 00000000..43116488 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/Xcursorfont.pyi @@ -0,0 +1,78 @@ +num_glyphs: int +X_cursor: int +arrow: int +based_arrow_down: int +based_arrow_up: int +boat: int +bogosity: int +bottom_left_corner: int +bottom_right_corner: int +bottom_side: int +bottom_tee: int +box_spiral: int +center_ptr: int +circle: int +clock: int +coffee_mug: int +cross: int +cross_reverse: int +crosshair: int +diamond_cross: int +dot: int +dotbox: int +double_arrow: int +draft_large: int +draft_small: int +draped_box: int +exchange: int +fleur: int +gobbler: int +gumby: int +hand1: int +hand2: int +heart: int +icon: int +iron_cross: int +left_ptr: int +left_side: int +left_tee: int +leftbutton: int +ll_angle: int +lr_angle: int +man: int +middlebutton: int +mouse: int +pencil: int +pirate: int +plus: int +question_arrow: int +right_ptr: int +right_side: int +right_tee: int +rightbutton: int +rtl_logo: int +sailboat: int +sb_down_arrow: int +sb_h_double_arrow: int +sb_left_arrow: int +sb_right_arrow: int +sb_up_arrow: int +sb_v_double_arrow: int +shuttle: int +sizing: int +spider: int +spraycan: int +star: int +target: int +tcross: int +top_left_arrow: int +top_left_corner: int +top_right_corner: int +top_side: int +top_tee: int +trek: int +ul_angle: int +umbrella: int +ur_angle: int +watch: int +xterm: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/Xutil.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/Xutil.pyi new file mode 100644 index 00000000..e30c44b1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/Xutil.pyi @@ -0,0 +1,57 @@ +NoValue: int +XValue: int +YValue: int +WidthValue: int +HeightValue: int +AllValues: int +XNegative: int +YNegative: int +USPosition: int +USSize: int +PPosition: int +PSize: int +PMinSize: int +PMaxSize: int +PResizeInc: int +PAspect: int +PBaseSize: int +PWinGravity: int +PAllHints: int +InputHint: int +StateHint: int +IconPixmapHint: int +IconWindowHint: int +IconPositionHint: int +IconMaskHint: int +WindowGroupHint: int +MessageHint: int +UrgencyHint: int +AllHints: int +WithdrawnState: int +NormalState: int +IconicState: int +DontCareState: int +ZoomState: int +InactiveState: int +RectangleOut: int +RectangleIn: int +RectanglePart: int +VisualNoMask: int +VisualIDMask: int +VisualScreenMask: int +VisualDepthMask: int +VisualClassMask: int +VisualRedMaskMask: int +VisualGreenMaskMask: int +VisualBlueMaskMask: int +VisualColormapSizeMask: int +VisualBitsPerRGBMask: int +VisualAllMask: int +ReleaseByFreeingColormap: int +BitmapSuccess: int +BitmapOpenFailed: int +BitmapFileInvalid: int +BitmapNoMemory: int +XCSUCCESS: int +XCNOMEM: int +XCNOENT: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/__init__.pyi new file mode 100644 index 00000000..988134c0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/__init__.pyi @@ -0,0 +1,14 @@ +from Xlib import ( + XK as XK, + X as X, + Xatom as Xatom, + Xcursorfont as Xcursorfont, + Xutil as Xutil, + display as display, + error as error, + rdb as rdb, +) + +__all__ = ["X", "XK", "Xatom", "Xcursorfont", "Xutil", "display", "error", "rdb"] + +# Shared types throughout the stub diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/_typing.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/_typing.pyi new file mode 100644 index 00000000..ff701619 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/_typing.pyi @@ -0,0 +1,10 @@ +from collections.abc import Callable +from typing import TypeVar +from typing_extensions import TypeAlias + +from Xlib.error import XError +from Xlib.protocol.rq import Request + +_T = TypeVar("_T") +ErrorHandler: TypeAlias = Callable[[XError, Request | None], _T] +Unused: TypeAlias = object diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/display.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/display.pyi new file mode 100644 index 00000000..b307a68e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/display.pyi @@ -0,0 +1,162 @@ +from collections.abc import Callable, Iterable, Sequence +from re import Pattern +from types import FunctionType, MethodType +from typing import Any, overload +from typing_extensions import Literal, TypeAlias, TypedDict + +from Xlib import error +from Xlib._typing import ErrorHandler +from Xlib.protocol import display, request, rq +from Xlib.xobject import colormap, cursor, drawable, fontable, resource + +_ResourceBaseClass: TypeAlias = ( + resource.Resource + | drawable.Drawable + | drawable.Window + | drawable.Pixmap + | fontable.Fontable + | fontable.Font + | fontable.GC + | colormap.Colormap + | cursor.Cursor +) + +# Is the type of the `_resource_baseclasses` variable, defined in this file at runtime +class _ResourceBaseClassesType(TypedDict): # noqa: Y049 + resource: type[resource.Resource] + drawable: type[drawable.Drawable] + window: type[drawable.Window] + pixmap: type[drawable.Pixmap] + fontable: type[fontable.Fontable] + font: type[fontable.Font] + gc: type[fontable.GC] + colormap: type[colormap.Colormap] + cursor: type[cursor.Cursor] + +class _BaseDisplay(display.Display): + def __init__(self, display: str | None = ...) -> None: ... + def get_atom(self, atomname: str, only_if_exists: bool = ...) -> int: ... + +class Display: + display: _BaseDisplay + keysym_translations: dict[int, str] + extensions: list[str] + class_extension_dicts: dict[str, dict[str, FunctionType]] + display_extension_methods: dict[str, Callable[..., Any]] + extension_event: rq.DictWrapper + def __init__(self, display: str | None = ...) -> None: ... + def get_display_name(self) -> str: ... + def fileno(self) -> int: ... + def close(self) -> None: ... + def set_error_handler(self, handler: ErrorHandler[object] | None) -> None: ... + def flush(self) -> None: ... + def sync(self) -> None: ... + def next_event(self) -> rq.Event: ... + def pending_events(self) -> int: ... + def has_extension(self, extension: str) -> bool: ... + @overload + def create_resource_object(self, type: Literal["resource"], id: int) -> resource.Resource: ... + @overload + def create_resource_object(self, type: Literal["drawable"], id: int) -> drawable.Drawable: ... + @overload + def create_resource_object(self, type: Literal["window"], id: int) -> drawable.Window: ... + @overload + def create_resource_object(self, type: Literal["pixmap"], id: int) -> drawable.Pixmap: ... + @overload + def create_resource_object(self, type: Literal["fontable"], id: int) -> fontable.Fontable: ... + @overload + def create_resource_object(self, type: Literal["font"], id: int) -> fontable.Font: ... + @overload + def create_resource_object(self, type: Literal["gc"], id: int) -> fontable.GC: ... + @overload + def create_resource_object(self, type: Literal["colormap"], id: int) -> colormap.Colormap: ... + @overload + def create_resource_object(self, type: Literal["cursor"], id: int) -> cursor.Cursor: ... + @overload + def create_resource_object(self, type: str, id: int) -> resource.Resource: ... + def __getattr__(self, attr: str) -> MethodType: ... + def screen(self, sno: int | None = ...) -> rq.Struct: ... + def screen_count(self) -> int: ... + def get_default_screen(self) -> int: ... + def extension_add_method(self, object: str, name: str, function: Callable[..., Any]) -> None: ... + def extension_add_event(self, code: int, evt: type, name: str | None = ...) -> None: ... + def extension_add_subevent(self, code: int, subcode: int | None, evt: type[rq.Event], name: str | None = ...) -> None: ... + def extension_add_error(self, code: int, err: type[error.XError]) -> None: ... + def keycode_to_keysym(self, keycode: int, index: int) -> int: ... + def keysym_to_keycode(self, keysym: int) -> int: ... + def keysym_to_keycodes(self, keysym: int) -> Iterable[tuple[int, int]]: ... + def refresh_keyboard_mapping(self, evt: rq.Event) -> None: ... + def lookup_string(self, keysym: int) -> str | None: ... + def rebind_string(self, keysym: int, newstring: str | None) -> None: ... + def intern_atom(self, name: str, only_if_exists: bool = ...) -> int: ... + def get_atom(self, atom: str, only_if_exists: bool = ...) -> int: ... + def get_atom_name(self, atom: int) -> str: ... + def get_selection_owner(self, selection: int) -> int: ... + def send_event( + self, + destination: int, + event: rq.Event, + event_mask: int = ..., + propagate: bool = ..., + onerror: ErrorHandler[object] | None = ..., + ) -> None: ... + def ungrab_pointer(self, time: int, onerror: ErrorHandler[object] | None = ...) -> None: ... + def change_active_pointer_grab( + self, event_mask: int, cursor: cursor.Cursor, time: int, onerror: ErrorHandler[object] | None = ... + ) -> None: ... + def ungrab_keyboard(self, time: int, onerror: ErrorHandler[object] | None = ...) -> None: ... + def allow_events(self, mode: int, time: int, onerror: ErrorHandler[object] | None = ...) -> None: ... + def grab_server(self, onerror: ErrorHandler[object] | None = ...) -> None: ... + def ungrab_server(self, onerror: ErrorHandler[object] | None = ...) -> None: ... + def warp_pointer( + self, + x: int, + y: int, + src_window: int = ..., + src_x: int = ..., + src_y: int = ..., + src_width: int = ..., + src_height: int = ..., + onerror: ErrorHandler[object] | None = ..., + ) -> None: ... + def set_input_focus(self, focus: int, revert_to: int, time: int, onerror: ErrorHandler[object] | None = ...) -> None: ... + def get_input_focus(self) -> request.GetInputFocus: ... + def query_keymap(self) -> bytes: ... # TODO: Validate if this is correct + def open_font(self, name: str) -> _ResourceBaseClass | None: ... + def list_fonts(self, pattern: Pattern[str] | str, max_names: int) -> list[str]: ... + def list_fonts_with_info(self, pattern: Pattern[str] | str, max_names: int) -> request.ListFontsWithInfo: ... + def set_font_path(self, path: Sequence[str], onerror: ErrorHandler[object] | None = ...) -> None: ... + def get_font_path(self) -> list[str]: ... + def query_extension(self, name: str) -> request.QueryExtension | None: ... + def list_extensions(self) -> list[str]: ... + def change_keyboard_mapping( + self, first_keycode: int, keysyms: Sequence[Sequence[int]], onerror: ErrorHandler[object] | None = ... + ) -> None: ... + def get_keyboard_mapping(self, first_keycode: int, count: int) -> list[tuple[int, ...]]: ... + def change_keyboard_control(self, onerror: ErrorHandler[object] | None = ..., **keys: object) -> None: ... + def get_keyboard_control(self) -> request.GetKeyboardControl: ... + def bell(self, percent: int = ..., onerror: ErrorHandler[object] | None = ...) -> None: ... + def change_pointer_control( + self, accel: tuple[int, int] | None = ..., threshold: int | None = ..., onerror: ErrorHandler[object] | None = ... + ) -> None: ... + def get_pointer_control(self) -> request.GetPointerControl: ... + def set_screen_saver( + self, timeout: int, interval: int, prefer_blank: int, allow_exposures: int, onerror: ErrorHandler[object] | None = ... + ) -> None: ... + def get_screen_saver(self) -> request.GetScreenSaver: ... + def change_hosts( + self, + mode: int, + host_family: int, + host: Sequence[int] | Sequence[bytes], # TODO: validate + onerror: ErrorHandler[object] | None = ..., + ) -> None: ... + def list_hosts(self) -> request.ListHosts: ... + def set_access_control(self, mode: int, onerror: ErrorHandler[object] | None = ...) -> None: ... + def set_close_down_mode(self, mode: int, onerror: ErrorHandler[object] | None = ...) -> None: ... + def force_screen_saver(self, mode: int, onerror: ErrorHandler[object] | None = ...) -> None: ... + def set_pointer_mapping(self, map: Sequence[int]) -> int: ... + def get_pointer_mapping(self) -> list[int]: ... + def set_modifier_mapping(self, keycodes: rq._ModifierMappingList8Elements) -> int: ... + def get_modifier_mapping(self) -> Sequence[Sequence[int]]: ... + def no_operation(self, onerror: ErrorHandler[object] | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/error.pyi new file mode 100644 index 00000000..aa846f83 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/error.pyi @@ -0,0 +1,57 @@ +from _typeshed import SliceableBuffer +from typing_extensions import Literal + +from Xlib.protocol import display, rq + +class DisplayError(Exception): + display: object + def __init__(self, display: object) -> None: ... + +class DisplayNameError(DisplayError): ... + +class DisplayConnectionError(DisplayError): + display: object + msg: object + def __init__(self, display: object, msg: object) -> None: ... + +class ConnectionClosedError(Exception): + whom: object + def __init__(self, whom: object) -> None: ... + +class XauthError(Exception): ... +class XNoAuthError(Exception): ... +class ResourceIDError(Exception): ... + +class XError(rq.GetAttrData, Exception): + def __init__(self, display: display.Display, data: SliceableBuffer) -> None: ... + +class XResourceError(XError): ... +class BadRequest(XError): ... +class BadValue(XError): ... +class BadWindow(XResourceError): ... +class BadPixmap(XResourceError): ... +class BadAtom(XError): ... +class BadCursor(XResourceError): ... +class BadFont(XResourceError): ... +class BadMatch(XError): ... +class BadDrawable(XResourceError): ... +class BadAccess(XError): ... +class BadAlloc(XError): ... +class BadColor(XResourceError): ... +class BadGC(XResourceError): ... +class BadIDChoice(XResourceError): ... +class BadName(XError): ... +class BadLength(XError): ... +class BadImplementation(XError): ... + +xerror_class: dict[int, type[XError]] + +class CatchError: + error_types: tuple[type[XError], ...] + error: XError | None + request: rq.Request | None + def __init__(self, *errors: type[XError]) -> None: ... + def __call__(self, error: XError, request: rq.Request | None) -> Literal[0, 1]: ... + def get_error(self) -> XError | None: ... + def get_request(self) -> rq.Request | None: ... + def reset(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/__init__.pyi new file mode 100644 index 00000000..17afcdca --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/__init__.pyi @@ -0,0 +1,35 @@ +from Xlib.ext import ( + composite as composite, + damage as damage, + dpms as dpms, + ge as ge, + nvcontrol as nvcontrol, + randr as randr, + record as record, + res as res, + screensaver as screensaver, + security as security, + shape as shape, + xfixes as xfixes, + xinerama as xinerama, + xinput as xinput, + xtest as xtest, +) + +__all__ = [ + "ge", + "xtest", + "shape", + "xinerama", + "record", + "composite", + "randr", + "xfixes", + "security", + "xinput", + "nvcontrol", + "damage", + "dpms", + "res", + "screensaver", +] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/composite.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/composite.pyi new file mode 100644 index 00000000..f974c136 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/composite.pyi @@ -0,0 +1,47 @@ +from collections.abc import Callable +from typing import Any +from typing_extensions import TypeAlias + +from Xlib._typing import ErrorHandler, Unused +from Xlib.display import Display +from Xlib.protocol import rq +from Xlib.xobject import drawable, resource + +_Update: TypeAlias = Callable[[rq.DictWrapper | dict[str, Any]], object] + +extname: str +RedirectAutomatic: int +RedirectManual: int + +class QueryVersion(rq.ReplyRequest): ... + +def query_version(self: Display | resource.Resource) -> QueryVersion: ... + +class RedirectWindow(rq.Request): ... + +def redirect_window(self: drawable.Window, update: _Update, onerror: ErrorHandler[object] | None = ...) -> None: ... + +class RedirectSubwindows(rq.Request): ... + +def redirect_subwindows(self: drawable.Window, update: _Update, onerror: ErrorHandler[object] | None = ...) -> None: ... + +class UnredirectWindow(rq.Request): ... + +def unredirect_window(self: drawable.Window, update: _Update, onerror: ErrorHandler[object] | None = ...) -> None: ... + +class UnredirectSubindows(rq.Request): ... + +def unredirect_subwindows(self: drawable.Window, update: _Update, onerror: ErrorHandler[object] | None = ...) -> None: ... + +class CreateRegionFromBorderClip(rq.Request): ... + +def create_region_from_border_clip(self: drawable.Window, onerror: ErrorHandler[object] | None = ...) -> int: ... + +class NameWindowPixmap(rq.Request): ... + +def name_window_pixmap(self: Display | resource.Resource, onerror: ErrorHandler[object] | None = ...) -> drawable.Pixmap: ... + +class GetOverlayWindow(rq.ReplyRequest): ... + +def get_overlay_window(self: Display) -> GetOverlayWindow: ... +def init(disp: Display, info: Unused) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/damage.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/damage.pyi new file mode 100644 index 00000000..35d8b719 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/damage.pyi @@ -0,0 +1,41 @@ +from Xlib.display import Display +from Xlib.error import XError +from Xlib.protocol import request, rq +from Xlib.xobject import resource + +extname: str +DamageNotifyCode: int +BadDamageCode: int + +class BadDamageError(XError): ... + +DamageReportRawRectangles: int +DamageReportDeltaRectangles: int +DamageReportBoundingBox: int +DamageReportNonEmpty: int +DamageReportLevel: tuple[int, int, int, int] +DAMAGE = rq.Card32 + +class QueryVersion(rq.ReplyRequest): ... + +def query_version(self: Display | resource.Resource) -> QueryVersion: ... + +class DamageCreate(rq.Request): ... + +def damage_create(self: Display | resource.Resource, level: int) -> int: ... + +class DamageDestroy(rq.Request): ... + +def damage_destroy(self: Display | resource.Resource, damage: int) -> None: ... + +class DamageSubtract(rq.Request): ... + +def damage_subtract(self: Display | resource.Resource, damage: int, repair: int = ..., parts: int = ...) -> None: ... + +class DamageAdd(rq.Request): ... + +def damage_add(self: Display | resource.Resource, repair: int, parts: int) -> None: ... + +class DamageNotify(rq.Event): ... + +def init(disp: Display, info: request.QueryExtension) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/dpms.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/dpms.pyi new file mode 100644 index 00000000..35c6b920 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/dpms.pyi @@ -0,0 +1,46 @@ +from Xlib._typing import Unused +from Xlib.display import Display +from Xlib.protocol import rq +from Xlib.xobject import resource + +extname: str +DPMSModeOn: int +DPMSModeStandby: int +DPMSModeSuspend: int +DPMSModeOff: int +DPMSPowerLevel: tuple[int, int, int, int] + +class DPMSGetVersion(rq.ReplyRequest): ... + +def get_version(self: Display | resource.Resource) -> DPMSGetVersion: ... + +class DPMSCapable(rq.ReplyRequest): ... + +def capable(self: Display | resource.Resource) -> DPMSCapable: ... + +class DPMSGetTimeouts(rq.ReplyRequest): ... + +def get_timeouts(self: Display | resource.Resource) -> DPMSGetTimeouts: ... + +class DPMSSetTimeouts(rq.Request): ... + +def set_timeouts( + self: Display | resource.Resource, standby_timeout: int, suspend_timeout: int, off_timeout: int +) -> DPMSSetTimeouts: ... + +class DPMSEnable(rq.Request): ... + +def enable(self: Display | resource.Resource) -> DPMSEnable: ... + +class DPMSDisable(rq.Request): ... + +def disable(self: Display | resource.Resource) -> DPMSDisable: ... + +class DPMSForceLevel(rq.Request): ... + +def force_level(self: Display | resource.Resource, power_level: int) -> DPMSForceLevel: ... + +class DPMSInfo(rq.ReplyRequest): ... + +def info(self: Display | resource.Resource) -> DPMSInfo: ... +def init(disp: Display, _info: Unused) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/ge.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/ge.pyi new file mode 100644 index 00000000..790c0a85 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/ge.pyi @@ -0,0 +1,16 @@ +from Xlib._typing import Unused +from Xlib.display import Display +from Xlib.protocol import rq +from Xlib.xobject import resource + +extname: str +GenericEventCode: int + +class GEQueryVersion(rq.ReplyRequest): ... + +def query_version(self: Display | resource.Resource) -> GEQueryVersion: ... + +class GenericEvent(rq.Event): ... + +def add_event_data(self: Display | resource.Resource, extension: int, evtype: int, estruct: int) -> None: ... +def init(disp: Display, info: Unused) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/nvcontrol.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/nvcontrol.pyi new file mode 100644 index 00000000..edae4e22 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/nvcontrol.pyi @@ -0,0 +1,1063 @@ +from Xlib._typing import Unused +from Xlib.display import Display +from Xlib.protocol import rq +from Xlib.xobject import resource + +extname: str + +def query_target_count(self: Display | resource.Resource, target: Target) -> int: ... +def query_int_attribute(self: Display | resource.Resource, target: Target, display_mask: int, attr: int) -> int | None: ... +def set_int_attribute(self: Display | resource.Resource, target: Target, display_mask: int, attr: int, value: int) -> bool: ... +def query_string_attribute(self: Display | resource.Resource, target: Target, display_mask: int, attr: int) -> str | None: ... +def query_valid_attr_values( + self: Display | resource.Resource, target: Target, display_mask: int, attr: int +) -> tuple[int, int] | None: ... +def query_binary_data(self: Display | resource.Resource, target: Target, display_mask: int, attr: int) -> bytes | None: ... +def get_coolers_used_by_gpu(self: Display | resource.Resource, target: Target) -> list[int] | None: ... +def get_gpu_count(self: Display | resource.Resource) -> int: ... +def get_name(self: Display | resource.Resource, target: Target) -> str | None: ... +def get_driver_version(self: Display | resource.Resource, target: Target) -> str | None: ... +def get_vbios_version(self: Display | resource.Resource, target: Target) -> str | None: ... +def get_gpu_uuid(self: Display | resource.Resource, target: Target) -> str | None: ... +def get_utilization_rates(self: Display | resource.Resource, target: Target) -> dict[str, str | int]: ... +def get_performance_modes(self: Display | resource.Resource, target: Target) -> list[dict[str, str | int]]: ... +def get_clock_info(self: Display | resource.Resource, target: Target) -> dict[str, str | int]: ... +def get_vram(self: Display | resource.Resource, target: Target) -> int | None: ... +def get_irq(self: Display | resource.Resource, target: Target) -> int | None: ... +def supports_framelock(self: Display | resource.Resource, target: Target) -> int | None: ... +def gvo_supported(self: Display | resource.Resource, screen: Target) -> int | None: ... +def get_core_temp(self: Display | resource.Resource, target: Target) -> int | None: ... +def get_core_threshold(self: Display | resource.Resource, target: Target) -> int | None: ... +def get_default_core_threshold(self: Display | resource.Resource, target: Target) -> int | None: ... +def get_max_core_threshold(self: Display | resource.Resource, target: Target) -> int | None: ... +def get_ambient_temp(self: Display | resource.Resource, target: Target) -> int | None: ... +def get_cuda_cores(self: Display | resource.Resource, target: Target) -> int | None: ... +def get_memory_bus_width(self: Display | resource.Resource, target: Target) -> int | None: ... +def get_total_dedicated_gpu_memory(self: Display | resource.Resource, target: Target) -> int | None: ... +def get_used_dedicated_gpu_memory(self: Display | resource.Resource, target: Target) -> int | None: ... +def get_curr_pcie_link_width(self: Display | resource.Resource, target: Target) -> int | None: ... +def get_max_pcie_link_width(self: Display | resource.Resource, target: Target) -> int | None: ... +def get_curr_pcie_link_generation(self: Display | resource.Resource, target: Target) -> int | None: ... +def get_encoder_utilization(self: Display | resource.Resource, target: Target) -> int | None: ... +def get_decoder_utilization(self: Display | resource.Resource, target: Target) -> int | None: ... +def get_current_performance_level(self: Display | resource.Resource, target: Target) -> int | None: ... +def get_gpu_nvclock_offset(self: Display | resource.Resource, target: Target, perf_level: int) -> int | None: ... +def set_gpu_nvclock_offset(self: Display | resource.Resource, target: Target, perf_level: int, offset: int) -> bool: ... +def set_gpu_nvclock_offset_all_levels(self: Display | resource.Resource, target: Target, offset: int) -> bool: ... +def get_gpu_nvclock_offset_range( + self: Display | resource.Resource, target: Target, perf_level: int +) -> tuple[int, int] | None: ... +def get_mem_transfer_rate_offset(self: Display | resource.Resource, target: Target, perf_level: int) -> int | None: ... +def set_mem_transfer_rate_offset(self: Display | resource.Resource, target: Target, perf_level: int, offset: int) -> bool: ... +def set_mem_transfer_rate_offset_all_levels(self: Display | resource.Resource, target: Target, offset: int) -> bool: ... +def get_mem_transfer_rate_offset_range( + self: Display | resource.Resource, target: Target, perf_level: int +) -> tuple[int, int] | None: ... +def get_cooler_manual_control_enabled(self: Display | resource.Resource, target: Target) -> int | None: ... +def set_cooler_manual_control_enabled(self: Display | resource.Resource, target: Target, enabled: bool) -> bool: ... +def get_fan_duty(self: Display | resource.Resource, target: Target) -> int | None: ... +def set_fan_duty(self: Display | resource.Resource, cooler: Target, speed: int) -> bool: ... +def get_fan_rpm(self: Display | resource.Resource, target: Target) -> int | None: ... +def get_max_displays(self: Display | resource.Resource, target: Target) -> int | None: ... +def init(disp: Display, info: Unused) -> None: ... + +NV_CTRL_FLATPANEL_SCALING: int +NV_CTRL_FLATPANEL_SCALING_DEFAULT: int +NV_CTRL_FLATPANEL_SCALING_NATIVE: int +NV_CTRL_FLATPANEL_SCALING_SCALED: int +NV_CTRL_FLATPANEL_SCALING_CENTERED: int +NV_CTRL_FLATPANEL_SCALING_ASPECT_SCALED: int +NV_CTRL_FLATPANEL_DITHERING: int +NV_CTRL_FLATPANEL_DITHERING_DEFAULT: int +NV_CTRL_FLATPANEL_DITHERING_ENABLED: int +NV_CTRL_FLATPANEL_DITHERING_DISABLED: int +NV_CTRL_DITHERING: int +NV_CTRL_DITHERING_AUTO: int +NV_CTRL_DITHERING_ENABLED: int +NV_CTRL_DITHERING_DISABLED: int +NV_CTRL_DIGITAL_VIBRANCE: int +NV_CTRL_BUS_TYPE: int +NV_CTRL_BUS_TYPE_AGP: int +NV_CTRL_BUS_TYPE_PCI: int +NV_CTRL_BUS_TYPE_PCI_EXPRESS: int +NV_CTRL_BUS_TYPE_INTEGRATED: int +NV_CTRL_TOTAL_GPU_MEMORY: int +NV_CTRL_VIDEO_RAM: int +NV_CTRL_IRQ: int +NV_CTRL_OPERATING_SYSTEM: int +NV_CTRL_OPERATING_SYSTEM_LINUX: int +NV_CTRL_OPERATING_SYSTEM_FREEBSD: int +NV_CTRL_OPERATING_SYSTEM_SUNOS: int +NV_CTRL_SYNC_TO_VBLANK: int +NV_CTRL_SYNC_TO_VBLANK_OFF: int +NV_CTRL_SYNC_TO_VBLANK_ON: int +NV_CTRL_LOG_ANISO: int +NV_CTRL_FSAA_MODE: int +NV_CTRL_FSAA_MODE_NONE: int +NV_CTRL_FSAA_MODE_2x: int +NV_CTRL_FSAA_MODE_2x_5t: int +NV_CTRL_FSAA_MODE_15x15: int +NV_CTRL_FSAA_MODE_2x2: int +NV_CTRL_FSAA_MODE_4x: int +NV_CTRL_FSAA_MODE_4x_9t: int +NV_CTRL_FSAA_MODE_8x: int +NV_CTRL_FSAA_MODE_16x: int +NV_CTRL_FSAA_MODE_8xS: int +NV_CTRL_FSAA_MODE_8xQ: int +NV_CTRL_FSAA_MODE_16xS: int +NV_CTRL_FSAA_MODE_16xQ: int +NV_CTRL_FSAA_MODE_32xS: int +NV_CTRL_FSAA_MODE_32x: int +NV_CTRL_FSAA_MODE_64xS: int +NV_CTRL_FSAA_MODE_MAX: int +NV_CTRL_UBB: int +NV_CTRL_UBB_OFF: int +NV_CTRL_UBB_ON: int +NV_CTRL_OVERLAY: int +NV_CTRL_OVERLAY_OFF: int +NV_CTRL_OVERLAY_ON: int +NV_CTRL_STEREO: int +NV_CTRL_STEREO_OFF: int +NV_CTRL_STEREO_DDC: int +NV_CTRL_STEREO_BLUELINE: int +NV_CTRL_STEREO_DIN: int +NV_CTRL_STEREO_PASSIVE_EYE_PER_DPY: int +NV_CTRL_STEREO_VERTICAL_INTERLACED: int +NV_CTRL_STEREO_COLOR_INTERLACED: int +NV_CTRL_STEREO_HORIZONTAL_INTERLACED: int +NV_CTRL_STEREO_CHECKERBOARD_PATTERN: int +NV_CTRL_STEREO_INVERSE_CHECKERBOARD_PATTERN: int +NV_CTRL_STEREO_3D_VISION: int +NV_CTRL_STEREO_3D_VISION_PRO: int +NV_CTRL_STEREO_HDMI_3D: int +NV_CTRL_STEREO_TRIDELITY_SL: int +NV_CTRL_STEREO_INBAND_STEREO_SIGNALING: int +NV_CTRL_STEREO_MAX: int +NV_CTRL_EMULATE: int +NV_CTRL_EMULATE_NONE: int +NV_CTRL_TWINVIEW: int +NV_CTRL_TWINVIEW_NOT_ENABLED: int +NV_CTRL_TWINVIEW_ENABLED: int +NV_CTRL_CONNECTED_DISPLAYS: int +NV_CTRL_ENABLED_DISPLAYS: int +NV_CTRL_FRAMELOCK: int +NV_CTRL_FRAMELOCK_NOT_SUPPORTED: int +NV_CTRL_FRAMELOCK_SUPPORTED: int +NV_CTRL_FRAMELOCK_MASTER: int +NV_CTRL_FRAMELOCK_MASTER_FALSE: int +NV_CTRL_FRAMELOCK_MASTER_TRUE: int +NV_CTRL_FRAMELOCK_POLARITY: int +NV_CTRL_FRAMELOCK_POLARITY_RISING_EDGE: int +NV_CTRL_FRAMELOCK_POLARITY_FALLING_EDGE: int +NV_CTRL_FRAMELOCK_POLARITY_BOTH_EDGES: int +NV_CTRL_FRAMELOCK_SYNC_DELAY: int +NV_CTRL_FRAMELOCK_SYNC_DELAY_MAX: int +NV_CTRL_FRAMELOCK_SYNC_DELAY_FACTOR: float +NV_CTRL_FRAMELOCK_SYNC_INTERVAL: int +NV_CTRL_FRAMELOCK_PORT0_STATUS: int +NV_CTRL_FRAMELOCK_PORT0_STATUS_INPUT: int +NV_CTRL_FRAMELOCK_PORT0_STATUS_OUTPUT: int +NV_CTRL_FRAMELOCK_PORT1_STATUS: int +NV_CTRL_FRAMELOCK_PORT1_STATUS_INPUT: int +NV_CTRL_FRAMELOCK_PORT1_STATUS_OUTPUT: int +NV_CTRL_FRAMELOCK_HOUSE_STATUS: int +NV_CTRL_FRAMELOCK_HOUSE_STATUS_NOT_DETECTED: int +NV_CTRL_FRAMELOCK_HOUSE_STATUS_DETECTED: int +NV_CTRL_FRAMELOCK_SYNC: int +NV_CTRL_FRAMELOCK_SYNC_DISABLE: int +NV_CTRL_FRAMELOCK_SYNC_ENABLE: int +NV_CTRL_FRAMELOCK_SYNC_READY: int +NV_CTRL_FRAMELOCK_SYNC_READY_FALSE: int +NV_CTRL_FRAMELOCK_SYNC_READY_TRUE: int +NV_CTRL_FRAMELOCK_STEREO_SYNC: int +NV_CTRL_FRAMELOCK_STEREO_SYNC_FALSE: int +NV_CTRL_FRAMELOCK_STEREO_SYNC_TRUE: int +NV_CTRL_FRAMELOCK_TEST_SIGNAL: int +NV_CTRL_FRAMELOCK_TEST_SIGNAL_DISABLE: int +NV_CTRL_FRAMELOCK_TEST_SIGNAL_ENABLE: int +NV_CTRL_FRAMELOCK_ETHERNET_DETECTED: int +NV_CTRL_FRAMELOCK_ETHERNET_DETECTED_NONE: int +NV_CTRL_FRAMELOCK_ETHERNET_DETECTED_PORT0: int +NV_CTRL_FRAMELOCK_ETHERNET_DETECTED_PORT1: int +NV_CTRL_FRAMELOCK_VIDEO_MODE: int +NV_CTRL_FRAMELOCK_VIDEO_MODE_NONE: int +NV_CTRL_FRAMELOCK_VIDEO_MODE_TTL: int +NV_CTRL_FRAMELOCK_VIDEO_MODE_NTSCPALSECAM: int +NV_CTRL_FRAMELOCK_VIDEO_MODE_HDTV: int +NV_CTRL_FRAMELOCK_VIDEO_MODE_COMPOSITE_AUTO: int +NV_CTRL_FRAMELOCK_VIDEO_MODE_COMPOSITE_BI_LEVEL: int +NV_CTRL_FRAMELOCK_VIDEO_MODE_COMPOSITE_TRI_LEVEL: int +NV_CTRL_FRAMELOCK_SYNC_RATE: int +NV_CTRL_FORCE_GENERIC_CPU: int +NV_CTRL_FORCE_GENERIC_CPU_DISABLE: int +NV_CTRL_FORCE_GENERIC_CPU_ENABLE: int +NV_CTRL_OPENGL_AA_LINE_GAMMA: int +NV_CTRL_OPENGL_AA_LINE_GAMMA_DISABLE: int +NV_CTRL_OPENGL_AA_LINE_GAMMA_ENABLE: int +NV_CTRL_FRAMELOCK_TIMING: int +NV_CTRL_FRAMELOCK_TIMING_FALSE: int +NV_CTRL_FRAMELOCK_TIMING_TRUE: int +NV_CTRL_FLIPPING_ALLOWED: int +NV_CTRL_FLIPPING_ALLOWED_FALSE: int +NV_CTRL_FLIPPING_ALLOWED_TRUE: int +NV_CTRL_ARCHITECTURE: int +NV_CTRL_ARCHITECTURE_X86: int +NV_CTRL_ARCHITECTURE_X86_64: int +NV_CTRL_ARCHITECTURE_IA64: int +NV_CTRL_ARCHITECTURE_ARM: int +NV_CTRL_ARCHITECTURE_AARCH64: int +NV_CTRL_ARCHITECTURE_PPC64LE: int +NV_CTRL_TEXTURE_CLAMPING: int +NV_CTRL_TEXTURE_CLAMPING_EDGE: int +NV_CTRL_TEXTURE_CLAMPING_SPEC: int +NV_CTRL_CURSOR_SHADOW: int +NV_CTRL_CURSOR_SHADOW_DISABLE: int +NV_CTRL_CURSOR_SHADOW_ENABLE: int +NV_CTRL_CURSOR_SHADOW_ALPHA: int +NV_CTRL_CURSOR_SHADOW_RED: int +NV_CTRL_CURSOR_SHADOW_GREEN: int +NV_CTRL_CURSOR_SHADOW_BLUE: int +NV_CTRL_CURSOR_SHADOW_X_OFFSET: int +NV_CTRL_CURSOR_SHADOW_Y_OFFSET: int +NV_CTRL_FSAA_APPLICATION_CONTROLLED: int +NV_CTRL_FSAA_APPLICATION_CONTROLLED_ENABLED: int +NV_CTRL_FSAA_APPLICATION_CONTROLLED_DISABLED: int +NV_CTRL_LOG_ANISO_APPLICATION_CONTROLLED: int +NV_CTRL_LOG_ANISO_APPLICATION_CONTROLLED_ENABLED: int +NV_CTRL_LOG_ANISO_APPLICATION_CONTROLLED_DISABLED: int +NV_CTRL_IMAGE_SHARPENING: int +NV_CTRL_TV_OVERSCAN: int +NV_CTRL_TV_FLICKER_FILTER: int +NV_CTRL_TV_BRIGHTNESS: int +NV_CTRL_TV_HUE: int +NV_CTRL_TV_CONTRAST: int +NV_CTRL_TV_SATURATION: int +NV_CTRL_TV_RESET_SETTINGS: int +NV_CTRL_GPU_CORE_TEMPERATURE: int +NV_CTRL_GPU_CORE_THRESHOLD: int +NV_CTRL_GPU_DEFAULT_CORE_THRESHOLD: int +NV_CTRL_GPU_MAX_CORE_THRESHOLD: int +NV_CTRL_AMBIENT_TEMPERATURE: int +NV_CTRL_PBUFFER_SCANOUT_SUPPORTED: int +NV_CTRL_PBUFFER_SCANOUT_FALSE: int +NV_CTRL_PBUFFER_SCANOUT_TRUE: int +NV_CTRL_PBUFFER_SCANOUT_XID: int +NV_CTRL_GVO_SUPPORTED: int +NV_CTRL_GVO_SUPPORTED_FALSE: int +NV_CTRL_GVO_SUPPORTED_TRUE: int +NV_CTRL_GVO_SYNC_MODE: int +NV_CTRL_GVO_SYNC_MODE_FREE_RUNNING: int +NV_CTRL_GVO_SYNC_MODE_GENLOCK: int +NV_CTRL_GVO_SYNC_MODE_FRAMELOCK: int +NV_CTRL_GVO_SYNC_SOURCE: int +NV_CTRL_GVO_SYNC_SOURCE_COMPOSITE: int +NV_CTRL_GVO_SYNC_SOURCE_SDI: int +NV_CTRL_GVIO_REQUESTED_VIDEO_FORMAT: int +NV_CTRL_GVIO_VIDEO_FORMAT_NONE: int +NV_CTRL_GVIO_VIDEO_FORMAT_487I_59_94_SMPTE259_NTSC: int +NV_CTRL_GVIO_VIDEO_FORMAT_576I_50_00_SMPTE259_PAL: int +NV_CTRL_GVIO_VIDEO_FORMAT_720P_59_94_SMPTE296: int +NV_CTRL_GVIO_VIDEO_FORMAT_720P_60_00_SMPTE296: int +NV_CTRL_GVIO_VIDEO_FORMAT_1035I_59_94_SMPTE260: int +NV_CTRL_GVIO_VIDEO_FORMAT_1035I_60_00_SMPTE260: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080I_50_00_SMPTE295: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080I_50_00_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080I_59_94_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080I_60_00_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080P_23_976_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080P_24_00_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080P_25_00_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080P_29_97_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080P_30_00_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_720P_50_00_SMPTE296: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080I_48_00_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080I_47_96_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_720P_30_00_SMPTE296: int +NV_CTRL_GVIO_VIDEO_FORMAT_720P_29_97_SMPTE296: int +NV_CTRL_GVIO_VIDEO_FORMAT_720P_25_00_SMPTE296: int +NV_CTRL_GVIO_VIDEO_FORMAT_720P_24_00_SMPTE296: int +NV_CTRL_GVIO_VIDEO_FORMAT_720P_23_98_SMPTE296: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080PSF_25_00_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080PSF_29_97_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080PSF_30_00_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080PSF_24_00_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080PSF_23_98_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_2048P_30_00_SMPTE372: int +NV_CTRL_GVIO_VIDEO_FORMAT_2048P_29_97_SMPTE372: int +NV_CTRL_GVIO_VIDEO_FORMAT_2048I_60_00_SMPTE372: int +NV_CTRL_GVIO_VIDEO_FORMAT_2048I_59_94_SMPTE372: int +NV_CTRL_GVIO_VIDEO_FORMAT_2048P_25_00_SMPTE372: int +NV_CTRL_GVIO_VIDEO_FORMAT_2048I_50_00_SMPTE372: int +NV_CTRL_GVIO_VIDEO_FORMAT_2048P_24_00_SMPTE372: int +NV_CTRL_GVIO_VIDEO_FORMAT_2048P_23_98_SMPTE372: int +NV_CTRL_GVIO_VIDEO_FORMAT_2048I_48_00_SMPTE372: int +NV_CTRL_GVIO_VIDEO_FORMAT_2048I_47_96_SMPTE372: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080P_50_00_3G_LEVEL_A_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080P_59_94_3G_LEVEL_A_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080P_60_00_3G_LEVEL_A_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080P_60_00_3G_LEVEL_B_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080I_60_00_3G_LEVEL_B_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_2048I_60_00_3G_LEVEL_B_SMPTE372: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080P_50_00_3G_LEVEL_B_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080I_50_00_3G_LEVEL_B_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_2048I_50_00_3G_LEVEL_B_SMPTE372: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080P_30_00_3G_LEVEL_B_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_2048P_30_00_3G_LEVEL_B_SMPTE372: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080P_25_00_3G_LEVEL_B_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_2048P_25_00_3G_LEVEL_B_SMPTE372: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080P_24_00_3G_LEVEL_B_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_2048P_24_00_3G_LEVEL_B_SMPTE372: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080I_48_00_3G_LEVEL_B_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_2048I_48_00_3G_LEVEL_B_SMPTE372: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080P_59_94_3G_LEVEL_B_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080I_59_94_3G_LEVEL_B_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_2048I_59_94_3G_LEVEL_B_SMPTE372: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080P_29_97_3G_LEVEL_B_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_2048P_29_97_3G_LEVEL_B_SMPTE372: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080P_23_98_3G_LEVEL_B_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_2048P_23_98_3G_LEVEL_B_SMPTE372: int +NV_CTRL_GVIO_VIDEO_FORMAT_1080I_47_96_3G_LEVEL_B_SMPTE274: int +NV_CTRL_GVIO_VIDEO_FORMAT_2048I_47_96_3G_LEVEL_B_SMPTE372: int +NV_CTRL_GVO_OUTPUT_VIDEO_FORMAT: int +NV_CTRL_GVO_VIDEO_FORMAT_NONE: int +NV_CTRL_GVO_VIDEO_FORMAT_487I_59_94_SMPTE259_NTSC: int +NV_CTRL_GVO_VIDEO_FORMAT_576I_50_00_SMPTE259_PAL: int +NV_CTRL_GVO_VIDEO_FORMAT_720P_59_94_SMPTE296: int +NV_CTRL_GVO_VIDEO_FORMAT_720P_60_00_SMPTE296: int +NV_CTRL_GVO_VIDEO_FORMAT_1035I_59_94_SMPTE260: int +NV_CTRL_GVO_VIDEO_FORMAT_1035I_60_00_SMPTE260: int +NV_CTRL_GVO_VIDEO_FORMAT_1080I_50_00_SMPTE295: int +NV_CTRL_GVO_VIDEO_FORMAT_1080I_50_00_SMPTE274: int +NV_CTRL_GVO_VIDEO_FORMAT_1080I_59_94_SMPTE274: int +NV_CTRL_GVO_VIDEO_FORMAT_1080I_60_00_SMPTE274: int +NV_CTRL_GVO_VIDEO_FORMAT_1080P_23_976_SMPTE274: int +NV_CTRL_GVO_VIDEO_FORMAT_1080P_24_00_SMPTE274: int +NV_CTRL_GVO_VIDEO_FORMAT_1080P_25_00_SMPTE274: int +NV_CTRL_GVO_VIDEO_FORMAT_1080P_29_97_SMPTE274: int +NV_CTRL_GVO_VIDEO_FORMAT_1080P_30_00_SMPTE274: int +NV_CTRL_GVO_VIDEO_FORMAT_720P_50_00_SMPTE296: int +NV_CTRL_GVO_VIDEO_FORMAT_1080I_48_00_SMPTE274: int +NV_CTRL_GVO_VIDEO_FORMAT_1080I_47_96_SMPTE274: int +NV_CTRL_GVO_VIDEO_FORMAT_720P_30_00_SMPTE296: int +NV_CTRL_GVO_VIDEO_FORMAT_720P_29_97_SMPTE296: int +NV_CTRL_GVO_VIDEO_FORMAT_720P_25_00_SMPTE296: int +NV_CTRL_GVO_VIDEO_FORMAT_720P_24_00_SMPTE296: int +NV_CTRL_GVO_VIDEO_FORMAT_720P_23_98_SMPTE296: int +NV_CTRL_GVO_VIDEO_FORMAT_1080PSF_25_00_SMPTE274: int +NV_CTRL_GVO_VIDEO_FORMAT_1080PSF_29_97_SMPTE274: int +NV_CTRL_GVO_VIDEO_FORMAT_1080PSF_30_00_SMPTE274: int +NV_CTRL_GVO_VIDEO_FORMAT_1080PSF_24_00_SMPTE274: int +NV_CTRL_GVO_VIDEO_FORMAT_1080PSF_23_98_SMPTE274: int +NV_CTRL_GVO_VIDEO_FORMAT_2048P_30_00_SMPTE372: int +NV_CTRL_GVO_VIDEO_FORMAT_2048P_29_97_SMPTE372: int +NV_CTRL_GVO_VIDEO_FORMAT_2048I_60_00_SMPTE372: int +NV_CTRL_GVO_VIDEO_FORMAT_2048I_59_94_SMPTE372: int +NV_CTRL_GVO_VIDEO_FORMAT_2048P_25_00_SMPTE372: int +NV_CTRL_GVO_VIDEO_FORMAT_2048I_50_00_SMPTE372: int +NV_CTRL_GVO_VIDEO_FORMAT_2048P_24_00_SMPTE372: int +NV_CTRL_GVO_VIDEO_FORMAT_2048P_23_98_SMPTE372: int +NV_CTRL_GVO_VIDEO_FORMAT_2048I_48_00_SMPTE372: int +NV_CTRL_GVO_VIDEO_FORMAT_2048I_47_96_SMPTE372: int +NV_CTRL_GVIO_DETECTED_VIDEO_FORMAT: int +NV_CTRL_GVO_INPUT_VIDEO_FORMAT: int +NV_CTRL_GVO_DATA_FORMAT: int +NV_CTRL_GVO_DATA_FORMAT_R8G8B8_TO_YCRCB444: int +NV_CTRL_GVO_DATA_FORMAT_R8G8B8A8_TO_YCRCBA4444: int +NV_CTRL_GVO_DATA_FORMAT_R8G8B8Z10_TO_YCRCBZ4444: int +NV_CTRL_GVO_DATA_FORMAT_R8G8B8_TO_YCRCB422: int +NV_CTRL_GVO_DATA_FORMAT_R8G8B8A8_TO_YCRCBA4224: int +NV_CTRL_GVO_DATA_FORMAT_R8G8B8Z10_TO_YCRCBZ4224: int +NV_CTRL_GVO_DATA_FORMAT_R8G8B8_TO_RGB444: int +NV_CTRL_GVO_DATA_FORMAT_X8X8X8_444_PASSTHRU: int +NV_CTRL_GVO_DATA_FORMAT_R8G8B8A8_TO_RGBA4444: int +NV_CTRL_GVO_DATA_FORMAT_X8X8X8A8_4444_PASSTHRU: int +NV_CTRL_GVO_DATA_FORMAT_R8G8B8Z10_TO_RGBZ4444: int +NV_CTRL_GVO_DATA_FORMAT_X8X8X8Z8_4444_PASSTHRU: int +NV_CTRL_GVO_DATA_FORMAT_Y10CR10CB10_TO_YCRCB444: int +NV_CTRL_GVO_DATA_FORMAT_X10X10X10_444_PASSTHRU: int +NV_CTRL_GVO_DATA_FORMAT_Y10CR8CB8_TO_YCRCB444: int +NV_CTRL_GVO_DATA_FORMAT_X10X8X8_444_PASSTHRU: int +NV_CTRL_GVO_DATA_FORMAT_Y10CR8CB8A10_TO_YCRCBA4444: int +NV_CTRL_GVO_DATA_FORMAT_X10X8X8A10_4444_PASSTHRU: int +NV_CTRL_GVO_DATA_FORMAT_Y10CR8CB8Z10_TO_YCRCBZ4444: int +NV_CTRL_GVO_DATA_FORMAT_X10X8X8Z10_4444_PASSTHRU: int +NV_CTRL_GVO_DATA_FORMAT_DUAL_R8G8B8_TO_DUAL_YCRCB422: int +NV_CTRL_GVO_DATA_FORMAT_DUAL_Y8CR8CB8_TO_DUAL_YCRCB422: int +NV_CTRL_GVO_DATA_FORMAT_DUAL_X8X8X8_TO_DUAL_422_PASSTHRU: int +NV_CTRL_GVO_DATA_FORMAT_R10G10B10_TO_YCRCB422: int +NV_CTRL_GVO_DATA_FORMAT_R10G10B10_TO_YCRCB444: int +NV_CTRL_GVO_DATA_FORMAT_Y12CR12CB12_TO_YCRCB444: int +NV_CTRL_GVO_DATA_FORMAT_X12X12X12_444_PASSTHRU: int +NV_CTRL_GVO_DATA_FORMAT_R12G12B12_TO_YCRCB444: int +NV_CTRL_GVO_DATA_FORMAT_X8X8X8_422_PASSTHRU: int +NV_CTRL_GVO_DATA_FORMAT_X8X8X8A8_4224_PASSTHRU: int +NV_CTRL_GVO_DATA_FORMAT_X8X8X8Z8_4224_PASSTHRU: int +NV_CTRL_GVO_DATA_FORMAT_X10X10X10_422_PASSTHRU: int +NV_CTRL_GVO_DATA_FORMAT_X10X8X8_422_PASSTHRU: int +NV_CTRL_GVO_DATA_FORMAT_X10X8X8A10_4224_PASSTHRU: int +NV_CTRL_GVO_DATA_FORMAT_X10X8X8Z10_4224_PASSTHRU: int +NV_CTRL_GVO_DATA_FORMAT_X12X12X12_422_PASSTHRU: int +NV_CTRL_GVO_DATA_FORMAT_R12G12B12_TO_YCRCB422: int +NV_CTRL_GVO_DISPLAY_X_SCREEN: int +NV_CTRL_GVO_DISPLAY_X_SCREEN_ENABLE: int +NV_CTRL_GVO_DISPLAY_X_SCREEN_DISABLE: int +NV_CTRL_GVO_COMPOSITE_SYNC_INPUT_DETECTED: int +NV_CTRL_GVO_COMPOSITE_SYNC_INPUT_DETECTED_FALSE: int +NV_CTRL_GVO_COMPOSITE_SYNC_INPUT_DETECTED_TRUE: int +NV_CTRL_GVO_COMPOSITE_SYNC_INPUT_DETECT_MODE: int +NV_CTRL_GVO_COMPOSITE_SYNC_INPUT_DETECT_MODE_AUTO: int +NV_CTRL_GVO_COMPOSITE_SYNC_INPUT_DETECT_MODE_BI_LEVEL: int +NV_CTRL_GVO_COMPOSITE_SYNC_INPUT_DETECT_MODE_TRI_LEVEL: int +NV_CTRL_GVO_SDI_SYNC_INPUT_DETECTED: int +NV_CTRL_GVO_SDI_SYNC_INPUT_DETECTED_NONE: int +NV_CTRL_GVO_SDI_SYNC_INPUT_DETECTED_HD: int +NV_CTRL_GVO_SDI_SYNC_INPUT_DETECTED_SD: int +NV_CTRL_GVO_VIDEO_OUTPUTS: int +NV_CTRL_GVO_VIDEO_OUTPUTS_NONE: int +NV_CTRL_GVO_VIDEO_OUTPUTS_VIDEO1: int +NV_CTRL_GVO_VIDEO_OUTPUTS_VIDEO2: int +NV_CTRL_GVO_VIDEO_OUTPUTS_VIDEO_BOTH: int +NV_CTRL_GVO_FIRMWARE_VERSION: int +NV_CTRL_GVO_SYNC_DELAY_PIXELS: int +NV_CTRL_GVO_SYNC_DELAY_LINES: int +NV_CTRL_GVO_INPUT_VIDEO_FORMAT_REACQUIRE: int +NV_CTRL_GVO_INPUT_VIDEO_FORMAT_REACQUIRE_FALSE: int +NV_CTRL_GVO_INPUT_VIDEO_FORMAT_REACQUIRE_TRUE: int +NV_CTRL_GVO_GLX_LOCKED: int +NV_CTRL_GVO_GLX_LOCKED_FALSE: int +NV_CTRL_GVO_GLX_LOCKED_TRUE: int +NV_CTRL_GVIO_VIDEO_FORMAT_WIDTH: int +NV_CTRL_GVIO_VIDEO_FORMAT_HEIGHT: int +NV_CTRL_GVIO_VIDEO_FORMAT_REFRESH_RATE: int +NV_CTRL_GVO_VIDEO_FORMAT_WIDTH: int +NV_CTRL_GVO_VIDEO_FORMAT_HEIGHT: int +NV_CTRL_GVO_VIDEO_FORMAT_REFRESH_RATE: int +NV_CTRL_GVO_X_SCREEN_PAN_X: int +NV_CTRL_GVO_X_SCREEN_PAN_Y: int +NV_CTRL_GPU_OVERCLOCKING_STATE: int +NV_CTRL_GPU_OVERCLOCKING_STATE_NONE: int +NV_CTRL_GPU_OVERCLOCKING_STATE_MANUAL: int +NV_CTRL_GPU_2D_CLOCK_FREQS: int +NV_CTRL_GPU_3D_CLOCK_FREQS: int +NV_CTRL_GPU_DEFAULT_2D_CLOCK_FREQS: int +NV_CTRL_GPU_DEFAULT_3D_CLOCK_FREQS: int +NV_CTRL_GPU_CURRENT_CLOCK_FREQS: int +NV_CTRL_GPU_OPTIMAL_CLOCK_FREQS: int +NV_CTRL_GPU_OPTIMAL_CLOCK_FREQS_INVALID: int +NV_CTRL_GPU_OPTIMAL_CLOCK_FREQS_DETECTION: int +NV_CTRL_GPU_OPTIMAL_CLOCK_FREQS_DETECTION_START: int +NV_CTRL_GPU_OPTIMAL_CLOCK_FREQS_DETECTION_CANCEL: int +NV_CTRL_GPU_OPTIMAL_CLOCK_FREQS_DETECTION_STATE: int +NV_CTRL_GPU_OPTIMAL_CLOCK_FREQS_DETECTION_STATE_IDLE: int +NV_CTRL_GPU_OPTIMAL_CLOCK_FREQS_DETECTION_STATE_BUSY: int +NV_CTRL_FLATPANEL_CHIP_LOCATION: int +NV_CTRL_FLATPANEL_CHIP_LOCATION_INTERNAL: int +NV_CTRL_FLATPANEL_CHIP_LOCATION_EXTERNAL: int +NV_CTRL_FLATPANEL_LINK: int +NV_CTRL_FLATPANEL_LINK_SINGLE: int +NV_CTRL_FLATPANEL_LINK_DUAL: int +NV_CTRL_FLATPANEL_LINK_QUAD: int +NV_CTRL_FLATPANEL_SIGNAL: int +NV_CTRL_FLATPANEL_SIGNAL_LVDS: int +NV_CTRL_FLATPANEL_SIGNAL_TMDS: int +NV_CTRL_FLATPANEL_SIGNAL_DISPLAYPORT: int +NV_CTRL_USE_HOUSE_SYNC: int +NV_CTRL_USE_HOUSE_SYNC_DISABLED: int +NV_CTRL_USE_HOUSE_SYNC_INPUT: int +NV_CTRL_USE_HOUSE_SYNC_OUTPUT: int +NV_CTRL_USE_HOUSE_SYNC_FALSE: int +NV_CTRL_USE_HOUSE_SYNC_TRUE: int +NV_CTRL_EDID_AVAILABLE: int +NV_CTRL_EDID_AVAILABLE_FALSE: int +NV_CTRL_EDID_AVAILABLE_TRUE: int +NV_CTRL_FORCE_STEREO: int +NV_CTRL_FORCE_STEREO_FALSE: int +NV_CTRL_FORCE_STEREO_TRUE: int +NV_CTRL_IMAGE_SETTINGS: int +NV_CTRL_IMAGE_SETTINGS_HIGH_QUALITY: int +NV_CTRL_IMAGE_SETTINGS_QUALITY: int +NV_CTRL_IMAGE_SETTINGS_PERFORMANCE: int +NV_CTRL_IMAGE_SETTINGS_HIGH_PERFORMANCE: int +NV_CTRL_XINERAMA: int +NV_CTRL_XINERAMA_OFF: int +NV_CTRL_XINERAMA_ON: int +NV_CTRL_XINERAMA_STEREO: int +NV_CTRL_XINERAMA_STEREO_FALSE: int +NV_CTRL_XINERAMA_STEREO_TRUE: int +NV_CTRL_BUS_RATE: int +NV_CTRL_GPU_PCIE_MAX_LINK_WIDTH: int +NV_CTRL_SHOW_SLI_VISUAL_INDICATOR: int +NV_CTRL_SHOW_SLI_VISUAL_INDICATOR_FALSE: int +NV_CTRL_SHOW_SLI_VISUAL_INDICATOR_TRUE: int +NV_CTRL_SHOW_SLI_HUD: int +NV_CTRL_SHOW_SLI_HUD_FALSE: int +NV_CTRL_SHOW_SLI_HUD_TRUE: int +NV_CTRL_XV_SYNC_TO_DISPLAY: int +NV_CTRL_GVIO_REQUESTED_VIDEO_FORMAT2: int +NV_CTRL_GVO_OUTPUT_VIDEO_FORMAT2: int +NV_CTRL_GVO_OVERRIDE_HW_CSC: int +NV_CTRL_GVO_OVERRIDE_HW_CSC_FALSE: int +NV_CTRL_GVO_OVERRIDE_HW_CSC_TRUE: int +NV_CTRL_GVO_CAPABILITIES: int +NV_CTRL_GVO_CAPABILITIES_APPLY_CSC_IMMEDIATELY: int +NV_CTRL_GVO_CAPABILITIES_APPLY_CSC_TO_X_SCREEN: int +NV_CTRL_GVO_CAPABILITIES_COMPOSITE_TERMINATION: int +NV_CTRL_GVO_CAPABILITIES_SHARED_SYNC_BNC: int +NV_CTRL_GVO_CAPABILITIES_MULTIRATE_SYNC: int +NV_CTRL_GVO_CAPABILITIES_ADVANCE_SYNC_SKEW: int +NV_CTRL_GVO_COMPOSITE_TERMINATION: int +NV_CTRL_GVO_COMPOSITE_TERMINATION_ENABLE: int +NV_CTRL_GVO_COMPOSITE_TERMINATION_DISABLE: int +NV_CTRL_ASSOCIATED_DISPLAY_DEVICES: int +NV_CTRL_FRAMELOCK_SLAVES: int +NV_CTRL_FRAMELOCK_MASTERABLE: int +NV_CTRL_PROBE_DISPLAYS: int +NV_CTRL_REFRESH_RATE: int +NV_CTRL_GVO_FLIP_QUEUE_SIZE: int +NV_CTRL_CURRENT_SCANLINE: int +NV_CTRL_INITIAL_PIXMAP_PLACEMENT: int +NV_CTRL_INITIAL_PIXMAP_PLACEMENT_FORCE_SYSMEM: int +NV_CTRL_INITIAL_PIXMAP_PLACEMENT_SYSMEM: int +NV_CTRL_INITIAL_PIXMAP_PLACEMENT_VIDMEM: int +NV_CTRL_INITIAL_PIXMAP_PLACEMENT_RESERVED: int +NV_CTRL_INITIAL_PIXMAP_PLACEMENT_GPU_SYSMEM: int +NV_CTRL_PCI_BUS: int +NV_CTRL_PCI_DEVICE: int +NV_CTRL_PCI_FUNCTION: int +NV_CTRL_FRAMELOCK_FPGA_REVISION: int +NV_CTRL_MAX_SCREEN_WIDTH: int +NV_CTRL_MAX_SCREEN_HEIGHT: int +NV_CTRL_MAX_DISPLAYS: int +NV_CTRL_DYNAMIC_TWINVIEW: int +NV_CTRL_MULTIGPU_DISPLAY_OWNER: int +NV_CTRL_GPU_SCALING: int +NV_CTRL_GPU_SCALING_TARGET_INVALID: int +NV_CTRL_GPU_SCALING_TARGET_FLATPANEL_BEST_FIT: int +NV_CTRL_GPU_SCALING_TARGET_FLATPANEL_NATIVE: int +NV_CTRL_GPU_SCALING_METHOD_INVALID: int +NV_CTRL_GPU_SCALING_METHOD_STRETCHED: int +NV_CTRL_GPU_SCALING_METHOD_CENTERED: int +NV_CTRL_GPU_SCALING_METHOD_ASPECT_SCALED: int +NV_CTRL_FRONTEND_RESOLUTION: int +NV_CTRL_BACKEND_RESOLUTION: int +NV_CTRL_FLATPANEL_NATIVE_RESOLUTION: int +NV_CTRL_FLATPANEL_BEST_FIT_RESOLUTION: int +NV_CTRL_GPU_SCALING_ACTIVE: int +NV_CTRL_DFP_SCALING_ACTIVE: int +NV_CTRL_FSAA_APPLICATION_ENHANCED: int +NV_CTRL_FSAA_APPLICATION_ENHANCED_ENABLED: int +NV_CTRL_FSAA_APPLICATION_ENHANCED_DISABLED: int +NV_CTRL_FRAMELOCK_SYNC_RATE_4: int +NV_CTRL_GVO_LOCK_OWNER: int +NV_CTRL_GVO_LOCK_OWNER_NONE: int +NV_CTRL_GVO_LOCK_OWNER_GLX: int +NV_CTRL_GVO_LOCK_OWNER_CLONE: int +NV_CTRL_GVO_LOCK_OWNER_X_SCREEN: int +NV_CTRL_HWOVERLAY: int +NV_CTRL_HWOVERLAY_FALSE: int +NV_CTRL_HWOVERLAY_TRUE: int +NV_CTRL_NUM_GPU_ERRORS_RECOVERED: int +NV_CTRL_REFRESH_RATE_3: int +NV_CTRL_ONDEMAND_VBLANK_INTERRUPTS: int +NV_CTRL_ONDEMAND_VBLANK_INTERRUPTS_OFF: int +NV_CTRL_ONDEMAND_VBLANK_INTERRUPTS_ON: int +NV_CTRL_GPU_POWER_SOURCE: int +NV_CTRL_GPU_POWER_SOURCE_AC: int +NV_CTRL_GPU_POWER_SOURCE_BATTERY: int +NV_CTRL_GPU_CURRENT_PERFORMANCE_MODE: int +NV_CTRL_GPU_CURRENT_PERFORMANCE_MODE_DESKTOP: int +NV_CTRL_GPU_CURRENT_PERFORMANCE_MODE_MAXPERF: int +NV_CTRL_GLYPH_CACHE: int +NV_CTRL_GLYPH_CACHE_DISABLED: int +NV_CTRL_GLYPH_CACHE_ENABLED: int +NV_CTRL_GPU_CURRENT_PERFORMANCE_LEVEL: int +NV_CTRL_GPU_ADAPTIVE_CLOCK_STATE: int +NV_CTRL_GPU_ADAPTIVE_CLOCK_STATE_DISABLED: int +NV_CTRL_GPU_ADAPTIVE_CLOCK_STATE_ENABLED: int +NV_CTRL_GVO_OUTPUT_VIDEO_LOCKED: int +NV_CTRL_GVO_OUTPUT_VIDEO_LOCKED_FALSE: int +NV_CTRL_GVO_OUTPUT_VIDEO_LOCKED_TRUE: int +NV_CTRL_GVO_SYNC_LOCK_STATUS: int +NV_CTRL_GVO_SYNC_LOCK_STATUS_UNLOCKED: int +NV_CTRL_GVO_SYNC_LOCK_STATUS_LOCKED: int +NV_CTRL_GVO_ANC_TIME_CODE_GENERATION: int +NV_CTRL_GVO_ANC_TIME_CODE_GENERATION_DISABLE: int +NV_CTRL_GVO_ANC_TIME_CODE_GENERATION_ENABLE: int +NV_CTRL_GVO_COMPOSITE: int +NV_CTRL_GVO_COMPOSITE_DISABLE: int +NV_CTRL_GVO_COMPOSITE_ENABLE: int +NV_CTRL_GVO_COMPOSITE_ALPHA_KEY: int +NV_CTRL_GVO_COMPOSITE_ALPHA_KEY_DISABLE: int +NV_CTRL_GVO_COMPOSITE_ALPHA_KEY_ENABLE: int +NV_CTRL_GVO_COMPOSITE_LUMA_KEY_RANGE: int +NV_CTRL_GVO_COMPOSITE_CR_KEY_RANGE: int +NV_CTRL_GVO_COMPOSITE_CB_KEY_RANGE: int +NV_CTRL_GVO_COMPOSITE_NUM_KEY_RANGES: int +NV_CTRL_SWITCH_TO_DISPLAYS: int +NV_CTRL_NOTEBOOK_DISPLAY_CHANGE_LID_EVENT: int +NV_CTRL_NOTEBOOK_INTERNAL_LCD: int +NV_CTRL_DEPTH_30_ALLOWED: int +NV_CTRL_MODE_SET_EVENT: int +NV_CTRL_OPENGL_AA_LINE_GAMMA_VALUE: int +NV_CTRL_VCSC_HIGH_PERF_MODE: int +NV_CTRL_VCSC_HIGH_PERF_MODE_DISABLE: int +NV_CTRL_VCSC_HIGH_PERF_MODE_ENABLE: int +NV_CTRL_DISPLAYPORT_LINK_RATE: int +NV_CTRL_DISPLAYPORT_LINK_RATE_DISABLED: int +NV_CTRL_DISPLAYPORT_LINK_RATE_1_62GBPS: int +NV_CTRL_DISPLAYPORT_LINK_RATE_2_70GBPS: int +NV_CTRL_STEREO_EYES_EXCHANGE: int +NV_CTRL_STEREO_EYES_EXCHANGE_OFF: int +NV_CTRL_STEREO_EYES_EXCHANGE_ON: int +NV_CTRL_NO_SCANOUT: int +NV_CTRL_NO_SCANOUT_DISABLED: int +NV_CTRL_NO_SCANOUT_ENABLED: int +NV_CTRL_GVO_CSC_CHANGED_EVENT: int +NV_CTRL_FRAMELOCK_SLAVEABLE: int +NV_CTRL_GVO_SYNC_TO_DISPLAY: int +NV_CTRL_GVO_SYNC_TO_DISPLAY_DISABLE: int +NV_CTRL_GVO_SYNC_TO_DISPLAY_ENABLE: int +NV_CTRL_X_SERVER_UNIQUE_ID: int +NV_CTRL_PIXMAP_CACHE: int +NV_CTRL_PIXMAP_CACHE_DISABLE: int +NV_CTRL_PIXMAP_CACHE_ENABLE: int +NV_CTRL_PIXMAP_CACHE_ROUNDING_SIZE_KB: int +NV_CTRL_IS_GVO_DISPLAY: int +NV_CTRL_IS_GVO_DISPLAY_FALSE: int +NV_CTRL_IS_GVO_DISPLAY_TRUE: int +NV_CTRL_PCI_ID: int +NV_CTRL_GVO_FULL_RANGE_COLOR: int +NV_CTRL_GVO_FULL_RANGE_COLOR_DISABLED: int +NV_CTRL_GVO_FULL_RANGE_COLOR_ENABLED: int +NV_CTRL_SLI_MOSAIC_MODE_AVAILABLE: int +NV_CTRL_SLI_MOSAIC_MODE_AVAILABLE_FALSE: int +NV_CTRL_SLI_MOSAIC_MODE_AVAILABLE_TRUE: int +NV_CTRL_GVO_ENABLE_RGB_DATA: int +NV_CTRL_GVO_ENABLE_RGB_DATA_DISABLE: int +NV_CTRL_GVO_ENABLE_RGB_DATA_ENABLE: int +NV_CTRL_IMAGE_SHARPENING_DEFAULT: int +NV_CTRL_PCI_DOMAIN: int +NV_CTRL_GVI_NUM_JACKS: int +NV_CTRL_GVI_MAX_LINKS_PER_STREAM: int +NV_CTRL_GVI_DETECTED_CHANNEL_BITS_PER_COMPONENT: int +NV_CTRL_GVI_BITS_PER_COMPONENT_UNKNOWN: int +NV_CTRL_GVI_BITS_PER_COMPONENT_8: int +NV_CTRL_GVI_BITS_PER_COMPONENT_10: int +NV_CTRL_GVI_BITS_PER_COMPONENT_12: int +NV_CTRL_GVI_REQUESTED_STREAM_BITS_PER_COMPONENT: int +NV_CTRL_GVI_DETECTED_CHANNEL_COMPONENT_SAMPLING: int +NV_CTRL_GVI_COMPONENT_SAMPLING_UNKNOWN: int +NV_CTRL_GVI_COMPONENT_SAMPLING_4444: int +NV_CTRL_GVI_COMPONENT_SAMPLING_4224: int +NV_CTRL_GVI_COMPONENT_SAMPLING_444: int +NV_CTRL_GVI_COMPONENT_SAMPLING_422: int +NV_CTRL_GVI_COMPONENT_SAMPLING_420: int +NV_CTRL_GVI_REQUESTED_STREAM_COMPONENT_SAMPLING: int +NV_CTRL_GVI_REQUESTED_STREAM_CHROMA_EXPAND: int +NV_CTRL_GVI_CHROMA_EXPAND_FALSE: int +NV_CTRL_GVI_CHROMA_EXPAND_TRUE: int +NV_CTRL_GVI_DETECTED_CHANNEL_COLOR_SPACE: int +NV_CTRL_GVI_COLOR_SPACE_UNKNOWN: int +NV_CTRL_GVI_COLOR_SPACE_GBR: int +NV_CTRL_GVI_COLOR_SPACE_GBRA: int +NV_CTRL_GVI_COLOR_SPACE_GBRD: int +NV_CTRL_GVI_COLOR_SPACE_YCBCR: int +NV_CTRL_GVI_COLOR_SPACE_YCBCRA: int +NV_CTRL_GVI_COLOR_SPACE_YCBCRD: int +NV_CTRL_GVI_DETECTED_CHANNEL_LINK_ID: int +NV_CTRL_GVI_LINK_ID_UNKNOWN: int +NV_CTRL_GVI_DETECTED_CHANNEL_SMPTE352_IDENTIFIER: int +NV_CTRL_GVI_GLOBAL_IDENTIFIER: int +NV_CTRL_FRAMELOCK_SYNC_DELAY_RESOLUTION: int +NV_CTRL_GPU_COOLER_MANUAL_CONTROL: int +NV_CTRL_GPU_COOLER_MANUAL_CONTROL_FALSE: int +NV_CTRL_GPU_COOLER_MANUAL_CONTROL_TRUE: int +NV_CTRL_THERMAL_COOLER_LEVEL: int +NV_CTRL_THERMAL_COOLER_LEVEL_SET_DEFAULT: int +NV_CTRL_THERMAL_COOLER_CONTROL_TYPE: int +NV_CTRL_THERMAL_COOLER_CONTROL_TYPE_NONE: int +NV_CTRL_THERMAL_COOLER_CONTROL_TYPE_TOGGLE: int +NV_CTRL_THERMAL_COOLER_CONTROL_TYPE_VARIABLE: int +NV_CTRL_THERMAL_COOLER_TARGET: int +NV_CTRL_THERMAL_COOLER_TARGET_NONE: int +NV_CTRL_THERMAL_COOLER_TARGET_GPU: int +NV_CTRL_THERMAL_COOLER_TARGET_MEMORY: int +NV_CTRL_THERMAL_COOLER_TARGET_POWER_SUPPLY: int +NV_CTRL_THERMAL_COOLER_TARGET_GPU_RELATED: int +NV_CTRL_GPU_ECC_SUPPORTED: int +NV_CTRL_GPU_ECC_SUPPORTED_FALSE: int +NV_CTRL_GPU_ECC_SUPPORTED_TRUE: int +NV_CTRL_GPU_ECC_STATUS: int +NV_CTRL_GPU_ECC_STATUS_DISABLED: int +NV_CTRL_GPU_ECC_STATUS_ENABLED: int +NV_CTRL_GPU_ECC_CONFIGURATION_SUPPORTED: int +NV_CTRL_GPU_ECC_CONFIGURATION_SUPPORTED_FALSE: int +NV_CTRL_GPU_ECC_CONFIGURATION_SUPPORTED_TRUE: int +NV_CTRL_GPU_ECC_CONFIGURATION: int +NV_CTRL_GPU_ECC_CONFIGURATION_DISABLED: int +NV_CTRL_GPU_ECC_CONFIGURATION_ENABLED: int +NV_CTRL_GPU_ECC_DEFAULT_CONFIGURATION: int +NV_CTRL_GPU_ECC_DEFAULT_CONFIGURATION_DISABLED: int +NV_CTRL_GPU_ECC_DEFAULT_CONFIGURATION_ENABLED: int +NV_CTRL_GPU_ECC_SINGLE_BIT_ERRORS: int +NV_CTRL_GPU_ECC_DOUBLE_BIT_ERRORS: int +NV_CTRL_GPU_ECC_AGGREGATE_SINGLE_BIT_ERRORS: int +NV_CTRL_GPU_ECC_AGGREGATE_DOUBLE_BIT_ERRORS: int +NV_CTRL_GPU_ECC_RESET_ERROR_STATUS: int +NV_CTRL_GPU_ECC_RESET_ERROR_STATUS_VOLATILE: int +NV_CTRL_GPU_ECC_RESET_ERROR_STATUS_AGGREGATE: int +NV_CTRL_GPU_POWER_MIZER_MODE: int +NV_CTRL_GPU_POWER_MIZER_MODE_ADAPTIVE: int +NV_CTRL_GPU_POWER_MIZER_MODE_PREFER_MAXIMUM_PERFORMANCE: int +NV_CTRL_GPU_POWER_MIZER_MODE_AUTO: int +NV_CTRL_GPU_POWER_MIZER_MODE_PREFER_CONSISTENT_PERFORMANCE: int +NV_CTRL_GVI_SYNC_OUTPUT_FORMAT: int +NV_CTRL_GVI_MAX_CHANNELS_PER_JACK: int +NV_CTRL_GVI_MAX_STREAMS: int +NV_CTRL_GVI_NUM_CAPTURE_SURFACES: int +NV_CTRL_OVERSCAN_COMPENSATION: int +NV_CTRL_GPU_PCIE_GENERATION: int +NV_CTRL_GPU_PCIE_GENERATION1: int +NV_CTRL_GPU_PCIE_GENERATION2: int +NV_CTRL_GPU_PCIE_GENERATION3: int +NV_CTRL_GVI_BOUND_GPU: int +NV_CTRL_GVIO_REQUESTED_VIDEO_FORMAT3: int +NV_CTRL_ACCELERATE_TRAPEZOIDS: int +NV_CTRL_ACCELERATE_TRAPEZOIDS_DISABLE: int +NV_CTRL_ACCELERATE_TRAPEZOIDS_ENABLE: int +NV_CTRL_GPU_CORES: int +NV_CTRL_GPU_MEMORY_BUS_WIDTH: int +NV_CTRL_GVI_TEST_MODE: int +NV_CTRL_GVI_TEST_MODE_DISABLE: int +NV_CTRL_GVI_TEST_MODE_ENABLE: int +NV_CTRL_COLOR_SPACE: int +NV_CTRL_COLOR_SPACE_RGB: int +NV_CTRL_COLOR_SPACE_YCbCr422: int +NV_CTRL_COLOR_SPACE_YCbCr444: int +NV_CTRL_COLOR_RANGE: int +NV_CTRL_COLOR_RANGE_FULL: int +NV_CTRL_COLOR_RANGE_LIMITED: int +NV_CTRL_GPU_SCALING_DEFAULT_TARGET: int +NV_CTRL_GPU_SCALING_DEFAULT_METHOD: int +NV_CTRL_DITHERING_MODE: int +NV_CTRL_DITHERING_MODE_AUTO: int +NV_CTRL_DITHERING_MODE_DYNAMIC_2X2: int +NV_CTRL_DITHERING_MODE_STATIC_2X2: int +NV_CTRL_DITHERING_MODE_TEMPORAL: int +NV_CTRL_CURRENT_DITHERING: int +NV_CTRL_CURRENT_DITHERING_DISABLED: int +NV_CTRL_CURRENT_DITHERING_ENABLED: int +NV_CTRL_CURRENT_DITHERING_MODE: int +NV_CTRL_CURRENT_DITHERING_MODE_NONE: int +NV_CTRL_CURRENT_DITHERING_MODE_DYNAMIC_2X2: int +NV_CTRL_CURRENT_DITHERING_MODE_STATIC_2X2: int +NV_CTRL_CURRENT_DITHERING_MODE_TEMPORAL: int +NV_CTRL_THERMAL_SENSOR_READING: int +NV_CTRL_THERMAL_SENSOR_PROVIDER: int +NV_CTRL_THERMAL_SENSOR_PROVIDER_NONE: int +NV_CTRL_THERMAL_SENSOR_PROVIDER_GPU_INTERNAL: int +NV_CTRL_THERMAL_SENSOR_PROVIDER_ADM1032: int +NV_CTRL_THERMAL_SENSOR_PROVIDER_ADT7461: int +NV_CTRL_THERMAL_SENSOR_PROVIDER_MAX6649: int +NV_CTRL_THERMAL_SENSOR_PROVIDER_MAX1617: int +NV_CTRL_THERMAL_SENSOR_PROVIDER_LM99: int +NV_CTRL_THERMAL_SENSOR_PROVIDER_LM89: int +NV_CTRL_THERMAL_SENSOR_PROVIDER_LM64: int +NV_CTRL_THERMAL_SENSOR_PROVIDER_G781: int +NV_CTRL_THERMAL_SENSOR_PROVIDER_ADT7473: int +NV_CTRL_THERMAL_SENSOR_PROVIDER_SBMAX6649: int +NV_CTRL_THERMAL_SENSOR_PROVIDER_VBIOSEVT: int +NV_CTRL_THERMAL_SENSOR_PROVIDER_OS: int +NV_CTRL_THERMAL_SENSOR_PROVIDER_UNKNOWN: int +NV_CTRL_THERMAL_SENSOR_TARGET: int +NV_CTRL_THERMAL_SENSOR_TARGET_NONE: int +NV_CTRL_THERMAL_SENSOR_TARGET_GPU: int +NV_CTRL_THERMAL_SENSOR_TARGET_MEMORY: int +NV_CTRL_THERMAL_SENSOR_TARGET_POWER_SUPPLY: int +NV_CTRL_THERMAL_SENSOR_TARGET_BOARD: int +NV_CTRL_THERMAL_SENSOR_TARGET_UNKNOWN: int +NV_CTRL_SHOW_MULTIGPU_VISUAL_INDICATOR: int +NV_CTRL_SHOW_MULTIGPU_VISUAL_INDICATOR_FALSE: int +NV_CTRL_SHOW_MULTIGPU_VISUAL_INDICATOR_TRUE: int +NV_CTRL_GPU_CURRENT_PROCESSOR_CLOCK_FREQS: int +NV_CTRL_GVIO_VIDEO_FORMAT_FLAGS: int +NV_CTRL_GVIO_VIDEO_FORMAT_FLAGS_NONE: int +NV_CTRL_GVIO_VIDEO_FORMAT_FLAGS_INTERLACED: int +NV_CTRL_GVIO_VIDEO_FORMAT_FLAGS_PROGRESSIVE: int +NV_CTRL_GVIO_VIDEO_FORMAT_FLAGS_PSF: int +NV_CTRL_GVIO_VIDEO_FORMAT_FLAGS_3G_LEVEL_A: int +NV_CTRL_GVIO_VIDEO_FORMAT_FLAGS_3G_LEVEL_B: int +NV_CTRL_GVIO_VIDEO_FORMAT_FLAGS_3G: int +NV_CTRL_GVIO_VIDEO_FORMAT_FLAGS_3G_1080P_NO_12BPC: int +NV_CTRL_GPU_PCIE_MAX_LINK_SPEED: int +NV_CTRL_3D_VISION_PRO_RESET_TRANSCEIVER_TO_FACTORY_SETTINGS: int +NV_CTRL_3D_VISION_PRO_TRANSCEIVER_CHANNEL: int +NV_CTRL_3D_VISION_PRO_TRANSCEIVER_MODE: int +NV_CTRL_3D_VISION_PRO_TRANSCEIVER_MODE_INVALID: int +NV_CTRL_3D_VISION_PRO_TRANSCEIVER_MODE_LOW_RANGE: int +NV_CTRL_3D_VISION_PRO_TRANSCEIVER_MODE_MEDIUM_RANGE: int +NV_CTRL_3D_VISION_PRO_TRANSCEIVER_MODE_HIGH_RANGE: int +NV_CTRL_3D_VISION_PRO_TRANSCEIVER_MODE_COUNT: int +NV_CTRL_SYNCHRONOUS_PALETTE_UPDATES: int +NV_CTRL_SYNCHRONOUS_PALETTE_UPDATES_DISABLE: int +NV_CTRL_SYNCHRONOUS_PALETTE_UPDATES_ENABLE: int +NV_CTRL_DITHERING_DEPTH: int +NV_CTRL_DITHERING_DEPTH_AUTO: int +NV_CTRL_DITHERING_DEPTH_6_BITS: int +NV_CTRL_DITHERING_DEPTH_8_BITS: int +NV_CTRL_CURRENT_DITHERING_DEPTH: int +NV_CTRL_CURRENT_DITHERING_DEPTH_NONE: int +NV_CTRL_CURRENT_DITHERING_DEPTH_6_BITS: int +NV_CTRL_CURRENT_DITHERING_DEPTH_8_BITS: int +NV_CTRL_3D_VISION_PRO_TRANSCEIVER_CHANNEL_FREQUENCY: int +NV_CTRL_3D_VISION_PRO_TRANSCEIVER_CHANNEL_QUALITY: int +NV_CTRL_3D_VISION_PRO_TRANSCEIVER_CHANNEL_COUNT: int +NV_CTRL_3D_VISION_PRO_PAIR_GLASSES: int +NV_CTRL_3D_VISION_PRO_PAIR_GLASSES_STOP: int +NV_CTRL_3D_VISION_PRO_PAIR_GLASSES_BEACON: int +NV_CTRL_3D_VISION_PRO_UNPAIR_GLASSES: int +NV_CTRL_3D_VISION_PRO_DISCOVER_GLASSES: int +NV_CTRL_3D_VISION_PRO_IDENTIFY_GLASSES: int +NV_CTRL_3D_VISION_PRO_GLASSES_SYNC_CYCLE: int +NV_CTRL_3D_VISION_PRO_GLASSES_MISSED_SYNC_CYCLES: int +NV_CTRL_3D_VISION_PRO_GLASSES_BATTERY_LEVEL: int +NV_CTRL_GVO_ANC_PARITY_COMPUTATION: int +NV_CTRL_GVO_ANC_PARITY_COMPUTATION_AUTO: int +NV_CTRL_GVO_ANC_PARITY_COMPUTATION_ON: int +NV_CTRL_GVO_ANC_PARITY_COMPUTATION_OFF: int +NV_CTRL_3D_VISION_PRO_GLASSES_PAIR_EVENT: int +NV_CTRL_3D_VISION_PRO_GLASSES_UNPAIR_EVENT: int +NV_CTRL_GPU_PCIE_CURRENT_LINK_WIDTH: int +NV_CTRL_GPU_PCIE_CURRENT_LINK_SPEED: int +NV_CTRL_GVO_AUDIO_BLANKING: int +NV_CTRL_GVO_AUDIO_BLANKING_DISABLE: int +NV_CTRL_GVO_AUDIO_BLANKING_ENABLE: int +NV_CTRL_CURRENT_METAMODE_ID: int +NV_CTRL_DISPLAY_ENABLED: int +NV_CTRL_DISPLAY_ENABLED_TRUE: int +NV_CTRL_DISPLAY_ENABLED_FALSE: int +NV_CTRL_FRAMELOCK_INCOMING_HOUSE_SYNC_RATE: int +NV_CTRL_FXAA: int +NV_CTRL_FXAA_DISABLE: int +NV_CTRL_FXAA_ENABLE: int +NV_CTRL_DISPLAY_RANDR_OUTPUT_ID: int +NV_CTRL_FRAMELOCK_DISPLAY_CONFIG: int +NV_CTRL_FRAMELOCK_DISPLAY_CONFIG_DISABLED: int +NV_CTRL_FRAMELOCK_DISPLAY_CONFIG_CLIENT: int +NV_CTRL_FRAMELOCK_DISPLAY_CONFIG_SERVER: int +NV_CTRL_TOTAL_DEDICATED_GPU_MEMORY: int +NV_CTRL_USED_DEDICATED_GPU_MEMORY: int +NV_CTRL_GPU_DOUBLE_PRECISION_BOOST_IMMEDIATE: int +NV_CTRL_GPU_DOUBLE_PRECISION_BOOST_IMMEDIATE_DISABLED: int +NV_CTRL_GPU_DOUBLE_PRECISION_BOOST_IMMEDIATE_ENABLED: int +NV_CTRL_GPU_DOUBLE_PRECISION_BOOST_REBOOT: int +NV_CTRL_GPU_DOUBLE_PRECISION_BOOST_REBOOT_DISABLED: int +NV_CTRL_GPU_DOUBLE_PRECISION_BOOST_REBOOT_ENALED: int +NV_CTRL_DPY_HDMI_3D: int +NV_CTRL_DPY_HDMI_3D_DISABLED: int +NV_CTRL_DPY_HDMI_3D_ENABLED: int +NV_CTRL_BASE_MOSAIC: int +NV_CTRL_BASE_MOSAIC_DISABLED: int +NV_CTRL_BASE_MOSAIC_FULL: int +NV_CTRL_BASE_MOSAIC_LIMITED: int +NV_CTRL_MULTIGPU_MASTER_POSSIBLE: int +NV_CTRL_MULTIGPU_MASTER_POSSIBLE_FALSE: int +NV_CTRL_MULTIGPU_MASTER_POSSIBLE_TRUE: int +NV_CTRL_GPU_POWER_MIZER_DEFAULT_MODE: int +NV_CTRL_XV_SYNC_TO_DISPLAY_ID: int +NV_CTRL_XV_SYNC_TO_DISPLAY_ID_AUTO: int +NV_CTRL_BACKLIGHT_BRIGHTNESS: int +NV_CTRL_GPU_LOGO_BRIGHTNESS: int +NV_CTRL_GPU_SLI_LOGO_BRIGHTNESS: int +NV_CTRL_THERMAL_COOLER_SPEED: int +NV_CTRL_PALETTE_UPDATE_EVENT: int +NV_CTRL_VIDEO_ENCODER_UTILIZATION: int +NV_CTRL_GSYNC_ALLOWED: int +NV_CTRL_GSYNC_ALLOWED_FALSE: int +NV_CTRL_GSYNC_ALLOWED_TRUE: int +NV_CTRL_GPU_NVCLOCK_OFFSET: int +NV_CTRL_GPU_MEM_TRANSFER_RATE_OFFSET: int +NV_CTRL_VIDEO_DECODER_UTILIZATION: int +NV_CTRL_GPU_OVER_VOLTAGE_OFFSET: int +NV_CTRL_GPU_CURRENT_CORE_VOLTAGE: int +NV_CTRL_CURRENT_COLOR_SPACE: int +NV_CTRL_CURRENT_COLOR_SPACE_RGB: int +NV_CTRL_CURRENT_COLOR_SPACE_YCbCr422: int +NV_CTRL_CURRENT_COLOR_SPACE_YCbCr444: int +NV_CTRL_CURRENT_COLOR_SPACE_YCbCr420: int +NV_CTRL_CURRENT_COLOR_RANGE: int +NV_CTRL_CURRENT_COLOR_RANGE_FULL: int +NV_CTRL_CURRENT_COLOR_RANGE_LIMITED: int +NV_CTRL_SHOW_GSYNC_VISUAL_INDICATOR: int +NV_CTRL_SHOW_GSYNC_VISUAL_INDICATOR_FALSE: int +NV_CTRL_SHOW_GSYNC_VISUAL_INDICATOR_TRUE: int +NV_CTRL_THERMAL_COOLER_CURRENT_LEVEL: int +NV_CTRL_STEREO_SWAP_MODE: int +NV_CTRL_STEREO_SWAP_MODE_APPLICATION_CONTROL: int +NV_CTRL_STEREO_SWAP_MODE_PER_EYE: int +NV_CTRL_STEREO_SWAP_MODE_PER_EYE_PAIR: int +NV_CTRL_CURRENT_XV_SYNC_TO_DISPLAY_ID: int +NV_CTRL_GPU_FRAMELOCK_FIRMWARE_UNSUPPORTED: int +NV_CTRL_GPU_FRAMELOCK_FIRMWARE_UNSUPPORTED_FALSE: int +NV_CTRL_GPU_FRAMELOCK_FIRMWARE_UNSUPPORTED_TRUE: int +NV_CTRL_DISPLAYPORT_CONNECTOR_TYPE: int +NV_CTRL_DISPLAYPORT_CONNECTOR_TYPE_UNKNOWN: int +NV_CTRL_DISPLAYPORT_CONNECTOR_TYPE_DISPLAYPORT: int +NV_CTRL_DISPLAYPORT_CONNECTOR_TYPE_HDMI: int +NV_CTRL_DISPLAYPORT_CONNECTOR_TYPE_DVI: int +NV_CTRL_DISPLAYPORT_CONNECTOR_TYPE_VGA: int +NV_CTRL_DISPLAYPORT_IS_MULTISTREAM: int +NV_CTRL_DISPLAYPORT_SINK_IS_AUDIO_CAPABLE: int +NV_CTRL_GPU_NVCLOCK_OFFSET_ALL_PERFORMANCE_LEVELS: int +NV_CTRL_GPU_MEM_TRANSFER_RATE_OFFSET_ALL_PERFORMANCE_LEVELS: int +NV_CTRL_FRAMELOCK_FIRMWARE_VERSION: int +NV_CTRL_FRAMELOCK_FIRMWARE_MINOR_VERSION: int +NV_CTRL_SHOW_GRAPHICS_VISUAL_INDICATOR: int +NV_CTRL_SHOW_GRAPHICS_VISUAL_INDICATOR_FALSE: int +NV_CTRL_SHOW_GRAPHICS_VISUAL_INDICATOR_TRUE: int +NV_CTRL_LAST_ATTRIBUTE: int +NV_CTRL_STRING_PRODUCT_NAME: int +NV_CTRL_STRING_VBIOS_VERSION: int +NV_CTRL_STRING_NVIDIA_DRIVER_VERSION: int +NV_CTRL_STRING_DISPLAY_DEVICE_NAME: int +NV_CTRL_STRING_TV_ENCODER_NAME: int +NV_CTRL_STRING_GVIO_FIRMWARE_VERSION: int +NV_CTRL_STRING_GVO_FIRMWARE_VERSION: int +NV_CTRL_STRING_CURRENT_MODELINE: int +NV_CTRL_STRING_ADD_MODELINE: int +NV_CTRL_STRING_DELETE_MODELINE: int +NV_CTRL_STRING_CURRENT_METAMODE: int +NV_CTRL_STRING_CURRENT_METAMODE_VERSION_1: int +NV_CTRL_STRING_ADD_METAMODE: int +NV_CTRL_STRING_DELETE_METAMODE: int +NV_CTRL_STRING_VCSC_PRODUCT_NAME: int +NV_CTRL_STRING_VCSC_PRODUCT_ID: int +NV_CTRL_STRING_VCSC_SERIAL_NUMBER: int +NV_CTRL_STRING_VCSC_BUILD_DATE: int +NV_CTRL_STRING_VCSC_FIRMWARE_VERSION: int +NV_CTRL_STRING_VCSC_FIRMWARE_REVISION: int +NV_CTRL_STRING_VCSC_HARDWARE_VERSION: int +NV_CTRL_STRING_VCSC_HARDWARE_REVISION: int +NV_CTRL_STRING_MOVE_METAMODE: int +NV_CTRL_STRING_VALID_HORIZ_SYNC_RANGES: int +NV_CTRL_STRING_VALID_VERT_REFRESH_RANGES: int +NV_CTRL_STRING_SCREEN_RECTANGLE: int +NV_CTRL_STRING_XINERAMA_SCREEN_INFO: int +NV_CTRL_STRING_NVIDIA_XINERAMA_INFO_ORDER: int +NV_CTRL_STRING_TWINVIEW_XINERAMA_INFO_ORDER: int +NV_CTRL_STRING_SLI_MODE: int +NV_CTRL_STRING_PERFORMANCE_MODES: int +NV_CTRL_STRING_VCSC_FAN_STATUS: int +NV_CTRL_STRING_VCSC_TEMPERATURES: int +NV_CTRL_STRING_VCSC_PSU_INFO: int +NV_CTRL_STRING_GVIO_VIDEO_FORMAT_NAME: int +NV_CTRL_STRING_GVO_VIDEO_FORMAT_NAME: int +NV_CTRL_STRING_GPU_CURRENT_CLOCK_FREQS: int +NV_CTRL_STRING_3D_VISION_PRO_TRANSCEIVER_HARDWARE_REVISION: int +NV_CTRL_STRING_3D_VISION_PRO_TRANSCEIVER_FIRMWARE_VERSION_A: int +NV_CTRL_STRING_3D_VISION_PRO_TRANSCEIVER_FIRMWARE_DATE_A: int +NV_CTRL_STRING_3D_VISION_PRO_TRANSCEIVER_FIRMWARE_VERSION_B: int +NV_CTRL_STRING_3D_VISION_PRO_TRANSCEIVER_FIRMWARE_DATE_B: int +NV_CTRL_STRING_3D_VISION_PRO_TRANSCEIVER_ADDRESS: int +NV_CTRL_STRING_3D_VISION_PRO_GLASSES_FIRMWARE_VERSION_A: int +NV_CTRL_STRING_3D_VISION_PRO_GLASSES_FIRMWARE_DATE_A: int +NV_CTRL_STRING_3D_VISION_PRO_GLASSES_ADDRESS: int +NV_CTRL_STRING_3D_VISION_PRO_GLASSES_NAME: int +NV_CTRL_STRING_CURRENT_METAMODE_VERSION_2: int +NV_CTRL_STRING_DISPLAY_NAME_TYPE_BASENAME: int +NV_CTRL_STRING_DISPLAY_NAME_TYPE_ID: int +NV_CTRL_STRING_DISPLAY_NAME_DP_GUID: int +NV_CTRL_STRING_DISPLAY_NAME_EDID_HASH: int +NV_CTRL_STRING_DISPLAY_NAME_TARGET_INDEX: int +NV_CTRL_STRING_DISPLAY_NAME_RANDR: int +NV_CTRL_STRING_GPU_UUID: int +NV_CTRL_STRING_GPU_UTILIZATION: int +NV_CTRL_STRING_MULTIGPU_MODE: int +NV_CTRL_STRING_PRIME_OUTPUTS_DATA: int +NV_CTRL_STRING_LAST_ATTRIBUTE: int +NV_CTRL_BINARY_DATA_EDID: int +NV_CTRL_BINARY_DATA_MODELINES: int +NV_CTRL_BINARY_DATA_METAMODES: int +NV_CTRL_BINARY_DATA_METAMODES_VERSION_1: int +NV_CTRL_BINARY_DATA_XSCREENS_USING_GPU: int +NV_CTRL_BINARY_DATA_GPUS_USED_BY_XSCREEN: int +NV_CTRL_BINARY_DATA_GPUS_USING_FRAMELOCK: int +NV_CTRL_BINARY_DATA_DISPLAY_VIEWPORT: int +NV_CTRL_BINARY_DATA_FRAMELOCKS_USED_BY_GPU: int +NV_CTRL_BINARY_DATA_GPUS_USING_VCSC: int +NV_CTRL_BINARY_DATA_VCSCS_USED_BY_GPU: int +NV_CTRL_BINARY_DATA_COOLERS_USED_BY_GPU: int +NV_CTRL_BINARY_DATA_GPUS_USED_BY_LOGICAL_XSCREEN: int +NV_CTRL_BINARY_DATA_THERMAL_SENSORS_USED_BY_GPU: int +NV_CTRL_BINARY_DATA_GLASSES_PAIRED_TO_3D_VISION_PRO_TRANSCEIVER: int +NV_CTRL_BINARY_DATA_DISPLAY_TARGETS: int +NV_CTRL_BINARY_DATA_DISPLAYS_CONNECTED_TO_GPU: int +NV_CTRL_BINARY_DATA_METAMODES_VERSION_2: int +NV_CTRL_BINARY_DATA_DISPLAYS_ENABLED_ON_XSCREEN: int +NV_CTRL_BINARY_DATA_DISPLAYS_ASSIGNED_TO_XSCREEN: int +NV_CTRL_BINARY_DATA_GPU_FLAGS: int +NV_CTRL_BINARY_DATA_GPU_FLAGS_STEREO_DISPLAY_TRANSFORM_EXCLUSIVE: int +NV_CTRL_BINARY_DATA_GPU_FLAGS_OVERLAY_DISPLAY_TRANSFORM_EXCLUSIVE: int +NV_CTRL_BINARY_DATA_GPU_FLAGS_DEPTH_8_DISPLAY_TRANSFORM_EXCLUSIVE: int +NV_CTRL_BINARY_DATA_DISPLAYS_ON_GPU: int +NV_CTRL_BINARY_DATA_LAST_ATTRIBUTE: int +NV_CTRL_STRING_OPERATION_ADD_METAMODE: int +NV_CTRL_STRING_OPERATION_GTF_MODELINE: int +NV_CTRL_STRING_OPERATION_CVT_MODELINE: int +NV_CTRL_STRING_OPERATION_BUILD_MODEPOOL: int +NV_CTRL_STRING_OPERATION_GVI_CONFIGURE_STREAMS: int +NV_CTRL_STRING_OPERATION_PARSE_METAMODE: int +NV_CTRL_STRING_OPERATION_LAST_ATTRIBUTE: int +X_nvCtrlQueryExtension: int +X_nvCtrlQueryAttribute: int +X_nvCtrlQueryStringAttribute: int +X_nvCtrlQueryValidAttributeValues: int +X_nvCtrlSetStringAttribute: int +X_nvCtrlSetAttributeAndGetStatus: int +X_nvCtrlQueryBinaryData: int +X_nvCtrlQueryTargetCount: int +X_nvCtrlStringOperation: int +ATTRIBUTE_TYPE_UNKNOWN: int +ATTRIBUTE_TYPE_INTEGER: int +ATTRIBUTE_TYPE_BITMASK: int +ATTRIBUTE_TYPE_BOOL: int +ATTRIBUTE_TYPE_RANGE: int +ATTRIBUTE_TYPE_INT_BITS: int +ATTRIBUTE_TYPE_READ: int +ATTRIBUTE_TYPE_WRITE: int +ATTRIBUTE_TYPE_DISPLAY: int +ATTRIBUTE_TYPE_GPU: int +ATTRIBUTE_TYPE_FRAMELOCK: int +ATTRIBUTE_TYPE_X_SCREEN: int +ATTRIBUTE_TYPE_XINERAMA: int +ATTRIBUTE_TYPE_VCSC: int +NV_CTRL_TARGET_TYPE_X_SCREEN: int +NV_CTRL_TARGET_TYPE_GPU: int +NV_CTRL_TARGET_TYPE_FRAMELOCK: int +NV_CTRL_TARGET_TYPE_VCSC: int +NV_CTRL_TARGET_TYPE_GVI: int +NV_CTRL_TARGET_TYPE_COOLER: int +NV_CTRL_TARGET_TYPE_THERMAL_SENSOR: int +NV_CTRL_TARGET_TYPE_3D_VISION_PRO_TRANSCEIVER: int +NV_CTRL_TARGET_TYPE_DISPLAY: int + +class Target: + def id(self) -> int: ... + def type(self) -> int: ... + +class Gpu(Target): + def __init__(self, ngpu: int = ...) -> None: ... + +class Screen(Target): + def __init__(self, nscr: int = ...) -> None: ... + +class Cooler(Target): + def __init__(self, nfan: int = ...) -> None: ... + +class NVCtrlQueryTargetCountReplyRequest(rq.ReplyRequest): ... +class NVCtrlQueryAttributeReplyRequest(rq.ReplyRequest): ... +class NVCtrlSetAttributeAndGetStatusReplyRequest(rq.ReplyRequest): ... +class NVCtrlQueryStringAttributeReplyRequest(rq.ReplyRequest): ... +class NVCtrlQueryValidAttributeValuesReplyRequest(rq.ReplyRequest): ... +class NVCtrlQueryBinaryDataReplyRequest(rq.ReplyRequest): ... +class NVCtrlQueryListCard32ReplyRequest(rq.ReplyRequest): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/randr.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/randr.pyi new file mode 100644 index 00000000..f9188ddf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/randr.pyi @@ -0,0 +1,261 @@ +from collections.abc import Sequence +from typing_extensions import TypeAlias + +from Xlib.display import Display +from Xlib.protocol import request, rq +from Xlib.xobject import drawable, resource + +_RandRModeInfo13IntSequence: TypeAlias = Sequence[int] + +extname: str +RRScreenChangeNotify: int +RRNotify: int +RRNotify_CrtcChange: int +RRNotify_OutputChange: int +RRNotify_OutputProperty: int +RRScreenChangeNotifyMask: int +RRCrtcChangeNotifyMask: int +RROutputChangeNotifyMask: int +RROutputPropertyNotifyMask: int +SetConfigSuccess: int +SetConfigInvalidConfigTime: int +SetConfigInvalidTime: int +SetConfigFailed: int +Rotate_0: int +Rotate_90: int +Rotate_180: int +Rotate_270: int +Reflect_X: int +Reflect_Y: int +HSyncPositive: int +HSyncNegative: int +VSyncPositive: int +VSyncNegative: int +Interlace: int +DoubleScan: int +CSync: int +CSyncPositive: int +CSyncNegative: int +HSkewPresent: int +BCast: int +PixelMultiplex: int +DoubleClock: int +ClockDivideBy2: int +Connected: int +Disconnected: int +UnknownConnection: int +PROPERTY_RANDR_EDID: str +PROPERTY_SIGNAL_FORMAT: str +PROPERTY_SIGNAL_PROPERTIES: str +PROPERTY_CONNECTOR_TYPE: str +PROPERTY_CONNECTOR_NUMBER: str +PROPERTY_COMPATIBILITY_LIST: str +PROPERTY_CLONE_LIST: str +SubPixelUnknown: int +SubPixelHorizontalRGB: int +SubPixelHorizontalBGR: int +SubPixelVerticalRGB: int +SubPixelVerticalBGR: int +SubPixelNone: int +BadRROutput: int +BadRRCrtc: int +BadRRMode: int + +class BadRROutputError(Exception): ... +class BadRRCrtcError(Exception): ... +class BadRRModeError(Exception): ... + +RandR_ScreenSizes: rq.Struct +RandR_ModeInfo: rq.Struct +RandR_Rates: rq.Struct +Render_Transform: rq.Struct +MonitorInfo: rq.Struct + +class QueryVersion(rq.ReplyRequest): ... + +def query_version(self: Display | resource.Resource) -> QueryVersion: ... + +class _1_0SetScreenConfig(rq.ReplyRequest): ... +class SetScreenConfig(rq.ReplyRequest): ... + +def set_screen_config( + self: drawable.Drawable, size_id: int, rotation: int, config_timestamp: int, rate: int = ..., timestamp: int = ... +) -> SetScreenConfig: ... + +class SelectInput(rq.Request): ... + +def select_input(self: drawable.Window, mask: int) -> SelectInput: ... + +class GetScreenInfo(rq.ReplyRequest): ... + +def get_screen_info(self: drawable.Window) -> GetScreenInfo: ... + +class GetScreenSizeRange(rq.ReplyRequest): ... + +def get_screen_size_range(self: drawable.Window) -> GetScreenSizeRange: ... + +class SetScreenSize(rq.Request): ... + +def set_screen_size( + self: drawable.Window, + width: int, + height: int, + width_in_millimeters: int | None = ..., + height_in_millimeters: int | None = ..., +) -> SetScreenSize: ... + +class GetScreenResources(rq.ReplyRequest): ... + +def get_screen_resources(self: drawable.Window) -> GetScreenResources: ... + +class GetOutputInfo(rq.ReplyRequest): ... + +def get_output_info(self: Display | resource.Resource, output: int, config_timestamp: int) -> GetOutputInfo: ... + +class ListOutputProperties(rq.ReplyRequest): ... + +def list_output_properties(self: Display | resource.Resource, output: int) -> ListOutputProperties: ... + +class QueryOutputProperty(rq.ReplyRequest): ... + +def query_output_property(self: Display | resource.Resource, output: int, property: int) -> QueryOutputProperty: ... + +class ConfigureOutputProperty(rq.Request): ... + +def configure_output_property(self: Display | resource.Resource, output: int, property: int) -> ConfigureOutputProperty: ... + +class ChangeOutputProperty(rq.Request): ... + +def change_output_property( + self: Display | resource.Resource, output: int, property: int, type: int, mode: int, value: Sequence[float] | Sequence[str] +) -> ChangeOutputProperty: ... + +class DeleteOutputProperty(rq.Request): ... + +def delete_output_property(self: Display | resource.Resource, output: int, property: int) -> DeleteOutputProperty: ... + +class GetOutputProperty(rq.ReplyRequest): ... + +def get_output_property( + self: Display | resource.Resource, + output: int, + property: int, + type: int, + long_offset: int, + long_length: int, + delete: bool = ..., + pending: bool = ..., +) -> GetOutputProperty: ... + +class CreateMode(rq.ReplyRequest): ... + +def create_mode(self: drawable.Window, mode: _RandRModeInfo13IntSequence, name: str) -> CreateMode: ... + +class DestroyMode(rq.Request): ... + +def destroy_mode(self: Display | resource.Resource, mode: int) -> DestroyMode: ... + +class AddOutputMode(rq.Request): ... + +def add_output_mode(self: Display | resource.Resource, output: int, mode: int) -> AddOutputMode: ... + +class DeleteOutputMode(rq.Request): ... + +def delete_output_mode(self: Display | resource.Resource, output: int, mode: int) -> DeleteOutputMode: ... + +class GetCrtcInfo(rq.ReplyRequest): ... + +def get_crtc_info(self: Display | resource.Resource, crtc: int, config_timestamp: int) -> GetCrtcInfo: ... + +class SetCrtcConfig(rq.ReplyRequest): ... + +def set_crtc_config( + self: Display | resource.Resource, + crtc: int, + config_timestamp: int, + x: int, + y: int, + mode: int, + rotation: int, + outputs: Sequence[int], + timestamp: int = ..., +) -> SetCrtcConfig: ... + +class GetCrtcGammaSize(rq.ReplyRequest): ... + +def get_crtc_gamma_size(self: Display | resource.Resource, crtc: int) -> GetCrtcGammaSize: ... + +class GetCrtcGamma(rq.ReplyRequest): ... + +def get_crtc_gamma(self: Display | resource.Resource, crtc: int) -> GetCrtcGamma: ... + +class SetCrtcGamma(rq.Request): ... + +def set_crtc_gamma( + self: Display | resource.Resource, crtc: int, size: int, red: Sequence[int], green: Sequence[int], blue: Sequence[int] +) -> SetCrtcGamma: ... + +class GetScreenResourcesCurrent(rq.ReplyRequest): ... + +def get_screen_resources_current(self: drawable.Window) -> GetScreenResourcesCurrent: ... + +class SetCrtcTransform(rq.Request): ... + +def set_crtc_transform(self: Display | resource.Resource, crtc: int, n_bytes_filter: Sequence[int]) -> SetCrtcTransform: ... + +class GetCrtcTransform(rq.ReplyRequest): ... + +def get_crtc_transform(self: Display | resource.Resource, crtc: int) -> GetCrtcTransform: ... + +class GetPanning(rq.ReplyRequest): ... + +def get_panning(self: Display | resource.Resource, crtc: int) -> GetPanning: ... + +class SetPanning(rq.ReplyRequest): ... + +def set_panning( + self: Display | resource.Resource, + crtc: int, + left: int, + top: int, + width: int, + height: int, + track_left: int, + track_top: int, + track_width: int, + track_height: int, + border_left: int, + border_top: int, + border_width: int, + border_height: int, + timestamp: int = ..., +) -> SetPanning: ... + +class SetOutputPrimary(rq.Request): ... + +def set_output_primary(self: drawable.Window, output: int) -> SetOutputPrimary: ... + +class GetOutputPrimary(rq.ReplyRequest): ... + +def get_output_primary(self: drawable.Window) -> GetOutputPrimary: ... + +class GetMonitors(rq.ReplyRequest): ... + +def get_monitors(self: drawable.Window, is_active: bool = ...) -> GetMonitors: ... + +class SetMonitor(rq.Request): ... + +def set_monitor( + self: drawable.Window, monitor_info: tuple[int, bool, bool, Sequence[int], int, int, int, int, int] +) -> SetMonitor: ... + +class DeleteMonitor(rq.Request): ... + +def delete_monitor(self: Display | resource.Resource, name: str) -> DeleteMonitor: ... + +class ScreenChangeNotify(rq.Event): ... +class CrtcChangeNotify(rq.Event): ... +class OutputChangeNotify(rq.Event): ... +class OutputPropertyNotify(rq.Event): ... + +def init(disp: Display, info: request.QueryExtension) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/record.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/record.pyi new file mode 100644 index 00000000..bfbb971f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/record.pyi @@ -0,0 +1,113 @@ +from collections.abc import Callable, Sequence, Sized +from typing import Any, TypeVar +from typing_extensions import Literal + +from Xlib._typing import Unused +from Xlib.display import Display +from Xlib.protocol import display, rq +from Xlib.xobject import resource + +_T = TypeVar("_T") +_S = TypeVar("_S", bound=Sized) + +extname: str +FromServerTime: int +FromClientTime: int +FromClientSequence: int +CurrentClients: int +FutureClients: int +AllClients: int +FromServer: int +FromClient: int +ClientStarted: int +ClientDied: int +StartOfData: int +EndOfData: int +Record_Range8: rq.Struct +Record_Range16: rq.Struct +Record_ExtRange: rq.Struct +Record_Range: rq.Struct +Record_ClientInfo: rq.Struct + +class RawField(rq.ValueField): + structcode: None + def pack_value(self, val: _S) -> tuple[_S, int, None]: ... # type: ignore[override] + def parse_binary_value(self, data: _T, display: Unused, length: Unused, format: Unused) -> tuple[_T, Literal[""]]: ... # type: ignore[override] # See: https://github.com/python-xlib/python-xlib/pull/249 + +class GetVersion(rq.ReplyRequest): ... + +def get_version(self: Display | resource.Resource, major: int, minor: int) -> GetVersion: ... + +class CreateContext(rq.Request): ... + +def create_context( + self: Display | resource.Resource, + datum_flags: int, + clients: Sequence[int], + ranges: Sequence[ + tuple[ + tuple[int, int], + tuple[int, int], + tuple[int, int], + tuple[int, int], + tuple[int, int], + tuple[int, int], + tuple[int, int], + bool, + bool, + ] + ], +) -> int: ... + +class RegisterClients(rq.Request): ... + +def register_clients( + self: Display | resource.Resource, + context: int, + element_header: int, + clients: int, + ranges: Sequence[ + tuple[ + tuple[int, int], + tuple[int, int], + tuple[int, int], + tuple[int, int], + tuple[int, int], + tuple[int, int], + tuple[int, int], + bool, + bool, + ] + ], +) -> None: ... + +class UnregisterClients(rq.Request): ... + +def unregister_clients(self: Display | resource.Resource, context: int, clients: Sequence[int]) -> None: ... + +class GetContext(rq.ReplyRequest): ... + +def get_context(self: Display | resource.Resource, context: int) -> GetContext: ... + +class EnableContext(rq.ReplyRequest): + def __init__( + self, + callback: Callable[[rq.DictWrapper | dict[str, Any]], Any], + display: display.Display, + defer: bool = ..., + *args: object | bool, + **keys: object | bool, + ) -> None: ... + +def enable_context( + self: Display | resource.Resource, context: int, callback: Callable[[rq.DictWrapper | dict[str, Any]], Any] +) -> None: ... + +class DisableContext(rq.Request): ... + +def disable_context(self: Display | resource.Resource, context: int) -> None: ... + +class FreeContext(rq.Request): ... + +def free_context(self: Display | resource.Resource, context: int) -> None: ... +def init(disp: Display, info: Unused) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/res.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/res.pyi new file mode 100644 index 00000000..48ecc7c9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/res.pyi @@ -0,0 +1,61 @@ +from collections.abc import Sequence + +from Xlib._typing import Unused +from Xlib.display import Display +from Xlib.protocol import rq +from Xlib.xobject import resource + +RES_MAJOR_VERSION: int +RES_MINOR_VERSION: int +extname: str +ResQueryVersion: int +ResQueryClients: int +ResQueryClientResources: int +ResQueryClientPixmapBytes: int +ResQueryClientIds: int +ResQueryResourceBytes: int + +class QueryVersion(rq.ReplyRequest): ... + +def query_version(self: Display | resource.Resource, client_major: int = ..., client_minor: int = ...) -> QueryVersion: ... + +Client: rq.Struct + +class QueryClients(rq.ReplyRequest): ... + +def query_clients(self: Display | resource.Resource) -> QueryClients: ... + +Type: rq.Struct + +class QueryClientResources(rq.ReplyRequest): ... + +def query_client_resources(self: Display | resource.Resource, client: int) -> QueryClientResources: ... + +class QueryClientPixmapBytes(rq.ReplyRequest): ... + +def query_client_pixmap_bytes(self: Display | resource.Resource, client: int) -> QueryClientPixmapBytes: ... + +class SizeOf(rq.LengthOf): + item_size: int + def __init__(self, name: str | list[str] | tuple[str, ...], size: int, item_size: int) -> None: ... + def parse_value(self, length: int, display: Unused) -> int: ... # type: ignore[override] + +ClientXIDMask: int +LocalClientPIDMask: int +ClientIdSpec: rq.Struct +ClientIdValue: rq.Struct + +class QueryClientIds(rq.ReplyRequest): ... + +def query_client_ids(self: Display | resource.Resource, specs: Sequence[tuple[int, int]]) -> QueryClientIds: ... + +ResourceIdSpec: rq.Struct +ResourceSizeSpec: rq.Struct +ResourceSizeValue: rq.Struct + +class QueryResourceBytes(rq.ReplyRequest): ... + +def query_resource_bytes( + self: Display | resource.Resource, client: int, specs: Sequence[tuple[int, int]] +) -> QueryResourceBytes: ... +def init(disp: Display, info: Unused) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/screensaver.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/screensaver.pyi new file mode 100644 index 00000000..9ccf5cbc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/screensaver.pyi @@ -0,0 +1,50 @@ +from Xlib._typing import ErrorHandler +from Xlib.display import Display +from Xlib.protocol import request, rq +from Xlib.xobject import drawable + +extname: str +NotifyMask: int +CycleMask: int +StateOff: int +StateOn: int +StateCycle: int +KindBlanked: int +KindInternal: int +KindExternal: int + +class QueryVersion(rq.ReplyRequest): ... + +def query_version(self: drawable.Drawable) -> QueryVersion: ... + +class QueryInfo(rq.ReplyRequest): ... + +def query_info(self: drawable.Drawable) -> QueryInfo: ... + +class SelectInput(rq.Request): ... + +def select_input(self: drawable.Drawable, mask: int) -> SelectInput: ... + +class SetAttributes(rq.Request): ... + +def set_attributes( + self: drawable.Drawable, + x: int, + y: int, + width: int, + height: int, + border_width: int, + window_class: int = ..., + depth: int = ..., + visual: int = ..., + onerror: ErrorHandler[object] | None = ..., + **keys: object, +) -> SetAttributes: ... + +class UnsetAttributes(rq.Request): ... + +def unset_attributes(self: drawable.Drawable, onerror: ErrorHandler[object] | None = ...) -> UnsetAttributes: ... + +class Notify(rq.Event): ... + +def init(disp: Display, info: request.QueryExtension) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/security.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/security.pyi new file mode 100644 index 00000000..93fd4b0b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/security.pyi @@ -0,0 +1,31 @@ +from Xlib._typing import Unused +from Xlib.display import Display +from Xlib.protocol import rq +from Xlib.xobject import resource + +extname: str +SecurityClientTrusted: int +SecurityClientUntrusted: int +SecurityAuthorizationRevokedMask: int +AUTHID = rq.Card32 + +class QueryVersion(rq.ReplyRequest): ... + +def query_version(self: Display | resource.Resource) -> QueryVersion: ... + +class SecurityGenerateAuthorization(rq.ReplyRequest): ... + +def generate_authorization( + self: Display | resource.Resource, + auth_proto: str, + auth_data: bytes | bytearray = ..., + timeout: int | None = ..., + trust_level: int | None = ..., + group: int | None = ..., + event_mask: int | None = ..., +) -> SecurityGenerateAuthorization: ... + +class SecurityRevokeAuthorization(rq.Request): ... + +def revoke_authorization(self: Display | resource.Resource, authid: int) -> SecurityRevokeAuthorization: ... +def init(disp: Display, info: Unused) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/shape.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/shape.pyi new file mode 100644 index 00000000..67d0ed77 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/shape.pyi @@ -0,0 +1,61 @@ +from collections.abc import Sequence + +from Xlib.display import Display +from Xlib.protocol import request, rq +from Xlib.protocol.structs import _Rectangle4IntSequence +from Xlib.xobject import drawable, resource + +extname: str +OP = rq.Card8 + +class SO: + Set: int + Union: int + Intersect: int + Subtract: int + Invert: int + +class SK: + Bounding: int + Clip: int + Input: int + +class KIND(rq.Set): + def __init__(self, name: str) -> None: ... + +class NotifyEventData(rq.Event): ... +class QueryVersion(rq.ReplyRequest): ... +class Rectangles(rq.Request): ... +class Mask(rq.Request): ... +class Combine(rq.Request): ... +class Offset(rq.Request): ... +class QueryExtents(rq.ReplyRequest): ... +class SelectInput(rq.Request): ... +class InputSelected(rq.ReplyRequest): ... +class GetRectangles(rq.ReplyRequest): ... + +class Event: + Notify: int + +def combine( + self: drawable.Window, operation: int, destination_kind: int, source_kind: int, x_offset: int, y_offset: int +) -> None: ... +def get_rectangles(self: drawable.Window, source_kind: int) -> GetRectangles: ... +def input_selected(self: drawable.Window) -> InputSelected: ... +def mask( + self: drawable.Window, operation: int, destination_kind: int, x_offset: int, y_offset: int, source_bitmap: int +) -> None: ... +def offset(self: drawable.Window, destination_kind: int, x_offset: int, y_offset: int) -> None: ... +def query_extents(self: drawable.Window) -> QueryExtents: ... +def query_version(self: Display | resource.Resource) -> QueryVersion: ... +def rectangles( + self: drawable.Window, + operation: int, + destination_kind: int, + ordering: int, + x_offset: int, + y_offset: int, + rectangles: Sequence[_Rectangle4IntSequence], +) -> None: ... +def select_input(self: drawable.Window, enable: int) -> None: ... +def init(disp: Display, info: request.QueryExtension) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/xfixes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/xfixes.pyi new file mode 100644 index 00000000..98509f7f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/xfixes.pyi @@ -0,0 +1,48 @@ +from Xlib._typing import Unused +from Xlib.display import Display +from Xlib.protocol import request, rq +from Xlib.xobject import drawable, resource + +extname: str +XFixesSelectionNotify: int +XFixesCursorNotify: int +XFixesSetSelectionOwnerNotifyMask: int +XFixesSelectionWindowDestroyNotifyMask: int +XFixesSelectionClientCloseNotifyMask: int +XFixesDisplayCursorNotifyMask: int +XFixesSetSelectionOwnerNotify: int +XFixesSelectionWindowDestroyNotify: int +XFixesSelectionClientCloseNotify: int +XFixesDisplayCursorNotify: int + +class QueryVersion(rq.ReplyRequest): ... + +def query_version(self: Display | resource.Resource) -> QueryVersion: ... + +class HideCursor(rq.Request): ... + +def hide_cursor(self: drawable.Window) -> None: ... + +class ShowCursor(rq.Request): ... + +def show_cursor(self: drawable.Window) -> None: ... + +class SelectSelectionInput(rq.Request): ... + +def select_selection_input(self: Display | resource.Resource, window: int, selection: int, mask: int) -> SelectSelectionInput: ... + +class SelectionNotify(rq.Event): ... +class SetSelectionOwnerNotify(SelectionNotify): ... +class SelectionWindowDestroyNotify(SelectionNotify): ... +class SelectionClientCloseNotify(SelectionNotify): ... +class SelectCursorInput(rq.Request): ... + +def select_cursor_input(self: Display | resource.Resource, window: int, mask: int) -> SelectCursorInput: ... + +class GetCursorImage(rq.ReplyRequest): ... + +def get_cursor_image(self: Display | resource.Resource, window: Unused) -> GetCursorImage: ... + +class DisplayCursorNotify(rq.Event): ... + +def init(disp: Display, info: request.QueryExtension) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/xinerama.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/xinerama.pyi new file mode 100644 index 00000000..004f7cdf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/xinerama.pyi @@ -0,0 +1,35 @@ +from Xlib._typing import Unused +from Xlib.display import Display +from Xlib.protocol import rq +from Xlib.xobject import drawable, resource + +extname: str + +class QueryVersion(rq.ReplyRequest): ... + +def query_version(self: Display | resource.Resource) -> QueryVersion: ... + +class GetState(rq.ReplyRequest): ... + +def get_state(self: drawable.Window) -> GetState: ... + +class GetScreenCount(rq.ReplyRequest): ... + +def get_screen_count(self: drawable.Window) -> GetScreenCount: ... + +class GetScreenSize(rq.ReplyRequest): ... + +def get_screen_size(self: drawable.Window, screen_no: int) -> GetScreenSize: ... + +class IsActive(rq.ReplyRequest): ... + +def is_active(self: Display | resource.Resource) -> int: ... + +class QueryScreens(rq.ReplyRequest): ... + +def query_screens(self: Display | resource.Resource) -> QueryScreens: ... + +class GetInfo(rq.ReplyRequest): ... + +def get_info(self: Display | resource.Resource, visual: int) -> None: ... +def init(disp: Display, info: Unused) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/xinput.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/xinput.pyi new file mode 100644 index 00000000..ef3a5486 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/xinput.pyi @@ -0,0 +1,252 @@ +from _typeshed import ReadableBuffer, SliceableBuffer +from collections.abc import Iterable, Sequence +from typing import SupportsFloat, TypeVar +from typing_extensions import SupportsIndex, TypeAlias + +from Xlib._typing import Unused +from Xlib.display import Display +from Xlib.protocol import display, request, rq +from Xlib.xobject import drawable, resource + +_T = TypeVar("_T") +_Floatable: TypeAlias = SupportsFloat | SupportsIndex | str | ReadableBuffer + +extname: str +PropertyDeleted: int +PropertyCreated: int +PropertyModified: int +NotifyNormal: int +NotifyGrab: int +NotifyUngrab: int +NotifyWhileGrabbed: int +NotifyPassiveGrab: int +NotifyPassiveUngrab: int +NotifyAncestor: int +NotifyVirtual: int +NotifyInferior: int +NotifyNonlinear: int +NotifyNonlinearVirtual: int +NotifyPointer: int +NotifyPointerRoot: int +NotifyDetailNone: int +GrabtypeButton: int +GrabtypeKeycode: int +GrabtypeEnter: int +GrabtypeFocusIn: int +GrabtypeTouchBegin: int +AnyModifier: int +AnyButton: int +AnyKeycode: int +AsyncDevice: int +SyncDevice: int +ReplayDevice: int +AsyncPairedDevice: int +AsyncPair: int +SyncPair: int +SlaveSwitch: int +DeviceChange: int +MasterAdded: int +MasterRemoved: int +SlaveAdded: int +SlaveRemoved: int +SlaveAttached: int +SlaveDetached: int +DeviceEnabled: int +DeviceDisabled: int +AddMaster: int +RemoveMaster: int +AttachSlave: int +DetachSlave: int +AttachToMaster: int +Floating: int +ModeRelative: int +ModeAbsolute: int +MasterPointer: int +MasterKeyboard: int +SlavePointer: int +SlaveKeyboard: int +FloatingSlave: int +KeyClass: int +ButtonClass: int +ValuatorClass: int +ScrollClass: int +TouchClass: int +KeyRepeat: int +AllDevices: int +AllMasterDevices: int +DeviceChanged: int +KeyPress: int +KeyRelease: int +ButtonPress: int +ButtonRelease: int +Motion: int +Enter: int +Leave: int +FocusIn: int +FocusOut: int +HierarchyChanged: int +PropertyEvent: int +RawKeyPress: int +RawKeyRelease: int +RawButtonPress: int +RawButtonRelease: int +RawMotion: int +DeviceChangedMask: int +KeyPressMask: int +KeyReleaseMask: int +ButtonPressMask: int +ButtonReleaseMask: int +MotionMask: int +EnterMask: int +LeaveMask: int +FocusInMask: int +FocusOutMask: int +HierarchyChangedMask: int +PropertyEventMask: int +RawKeyPressMask: int +RawKeyReleaseMask: int +RawButtonPressMask: int +RawButtonReleaseMask: int +RawMotionMask: int +GrabModeSync: int +GrabModeAsync: int +GrabModeTouch: int +DEVICEID = rq.Card16 +DEVICE = rq.Card16 +DEVICEUSE = rq.Card8 +PROPERTY_TYPE_FLOAT: str + +# ignore[override] because of Liskov substitution principle violations +class FP1616(rq.Int32): + def check_value(self, value: float) -> int: ... # type: ignore[override] + def parse_value(self, value: _Floatable, display: Unused) -> float: ... # type: ignore[override] + +class FP3232(rq.ValueField): + structcode: str + def check_value(self, value: _T) -> _T: ... # type: ignore[override] + def parse_value(self, value: tuple[_Floatable, _Floatable], display: Unused) -> float: ... # type: ignore[override] + +class XIQueryVersion(rq.ReplyRequest): ... + +def query_version(self: Display | resource.Resource) -> XIQueryVersion: ... + +class Mask(rq.List): + def __init__(self, name: str) -> None: ... + def pack_value(self, val: int | Iterable[int]) -> tuple[bytes, int, None]: ... # type: ignore[override] + +EventMask: rq.Struct + +class XISelectEvents(rq.Request): ... + +def select_events(self: drawable.Window, event_masks: Sequence[tuple[int, Sequence[int]]]) -> XISelectEvents: ... + +AnyInfo: rq.Struct + +class ButtonMask: + def __init__(self, value: int, length: int) -> None: ... + def __getitem__(self, key: int) -> int: ... + def __len__(self) -> int: ... + +class ButtonState(rq.ValueField): + structcode: None + def __init__(self, name: str) -> None: ... + def parse_binary_value( # type: ignore[override] # length: None will error. See: https://github.com/python-xlib/python-xlib/pull/248 + self, data: SliceableBuffer, display: Unused, length: int, fmt: Unused + ) -> tuple[ButtonMask, SliceableBuffer]: ... + +ButtonInfo: rq.Struct +KeyInfo: rq.Struct +ValuatorInfo: rq.Struct +ScrollInfo: rq.Struct +TouchInfo: rq.Struct +INFO_CLASSES: dict[int, rq.Struct] + +class ClassInfoClass: + structcode: None + def parse_binary(self, data: SliceableBuffer, display: display.Display | None) -> tuple[rq.DictWrapper, SliceableBuffer]: ... + +ClassInfo: ClassInfoClass +DeviceInfo: rq.Struct + +class XIQueryDevice(rq.ReplyRequest): ... + +def query_device(self: Display | resource.Resource, deviceid: int) -> XIQueryDevice: ... + +class XIListProperties(rq.ReplyRequest): ... + +def list_device_properties(self: Display | resource.Resource, deviceid: int) -> XIListProperties: ... + +class XIGetProperty(rq.ReplyRequest): ... + +def get_device_property( + self: Display | resource.Resource, deviceid: int, property: int, type: int, offset: int, length: int, delete: int = ... +) -> XIGetProperty: ... + +class XIChangeProperty(rq.Request): ... + +def change_device_property( + self: Display | resource.Resource, deviceid: int, property: int, type: int, mode: int, value: Sequence[float] | Sequence[str] +) -> XIChangeProperty: ... + +class XIDeleteProperty(rq.Request): ... + +def delete_device_property(self: Display | resource.Resource, deviceid: int, property: int) -> XIDeleteProperty: ... + +class XIGrabDevice(rq.ReplyRequest): ... + +def grab_device( + self: drawable.Window, + deviceid: int, + time: int, + grab_mode: int, + paired_device_mode: int, + owner_events: bool, + event_mask: Sequence[int], +) -> XIGrabDevice: ... + +class XIUngrabDevice(rq.Request): ... + +def ungrab_device(self: Display | resource.Resource, deviceid: int, time: int) -> XIUngrabDevice: ... + +class XIPassiveGrabDevice(rq.ReplyRequest): ... + +def passive_grab_device( + self: drawable.Window, + deviceid: int, + time: int, + detail: int, + grab_type: int, + grab_mode: int, + paired_device_mode: int, + owner_events: bool, + event_mask: Sequence[int], + modifiers: Sequence[int], +) -> XIPassiveGrabDevice: ... +def grab_keycode( + self: drawable.Window, + deviceid: int, + time: int, + keycode: int, + grab_mode: int, + paired_device_mode: int, + owner_events: bool, + event_mask: Sequence[int], + modifiers: Sequence[int], +) -> XIPassiveGrabDevice: ... + +class XIPassiveUngrabDevice(rq.Request): ... + +def passive_ungrab_device( + self: drawable.Window, deviceid: int, detail: int, grab_type: int, modifiers: Sequence[int] +) -> XIPassiveUngrabDevice: ... +def ungrab_keycode(self: drawable.Window, deviceid: int, keycode: int, modifiers: Sequence[int]) -> XIPassiveUngrabDevice: ... + +HierarchyInfo: rq.Struct +HierarchyEventData: rq.Struct +ModifierInfo: rq.Struct +GroupInfo: rq.Struct +DeviceEventData: rq.Struct +DeviceChangedEventData: rq.Struct +PropertyEventData: rq.Struct + +def init(disp: Display, info: request.QueryExtension) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/xtest.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/xtest.pyi new file mode 100644 index 00000000..d2505b77 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/ext/xtest.pyi @@ -0,0 +1,32 @@ +from Xlib._typing import Unused +from Xlib.display import Display +from Xlib.protocol import rq +from Xlib.xobject import resource + +extname: str +CurrentCursor: int + +class GetVersion(rq.ReplyRequest): ... + +def get_version(self: Display | resource.Resource, major: int, minor: int) -> GetVersion: ... + +class CompareCursor(rq.ReplyRequest): ... + +def compare_cursor(self: Display | resource.Resource, cursor: int) -> int: ... + +class FakeInput(rq.Request): ... + +def fake_input( + self: Display | resource.Resource, + event_type: int, + detail: int = ..., + time: int = ..., + root: int = ..., + x: int = ..., + y: int = ..., +) -> None: ... + +class GrabControl(rq.Request): ... + +def grab_control(self: Display | resource.Resource, impervious: bool) -> None: ... +def init(disp: Display, info: Unused) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/__init__.pyi new file mode 100644 index 00000000..ca535d0e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/__init__.pyi @@ -0,0 +1,43 @@ +from Xlib.keysymdef import ( + apl as apl, + arabic as arabic, + cyrillic as cyrillic, + greek as greek, + hebrew as hebrew, + katakana as katakana, + korean as korean, + latin1 as latin1, + latin2 as latin2, + latin3 as latin3, + latin4 as latin4, + miscellany as miscellany, + publishing as publishing, + special as special, + technical as technical, + thai as thai, + xf86 as xf86, + xk3270 as xk3270, + xkb as xkb, +) + +__all__ = [ + "apl", + "arabic", + "cyrillic", + "greek", + "hebrew", + "katakana", + "korean", + "latin1", + "latin2", + "latin3", + "latin4", + "miscellany", + "publishing", + "special", + "technical", + "thai", + "xf86", + "xk3270", + "xkb", +] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/apl.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/apl.pyi new file mode 100644 index 00000000..a4404d76 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/apl.pyi @@ -0,0 +1,19 @@ +XK_leftcaret: int +XK_rightcaret: int +XK_downcaret: int +XK_upcaret: int +XK_overbar: int +XK_downtack: int +XK_upshoe: int +XK_downstile: int +XK_underbar: int +XK_jot: int +XK_quad: int +XK_uptack: int +XK_circle: int +XK_upstile: int +XK_downshoe: int +XK_rightshoe: int +XK_leftshoe: int +XK_lefttack: int +XK_righttack: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/arabic.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/arabic.pyi new file mode 100644 index 00000000..ab9a7b0f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/arabic.pyi @@ -0,0 +1,50 @@ +XK_Arabic_comma: int +XK_Arabic_semicolon: int +XK_Arabic_question_mark: int +XK_Arabic_hamza: int +XK_Arabic_maddaonalef: int +XK_Arabic_hamzaonalef: int +XK_Arabic_hamzaonwaw: int +XK_Arabic_hamzaunderalef: int +XK_Arabic_hamzaonyeh: int +XK_Arabic_alef: int +XK_Arabic_beh: int +XK_Arabic_tehmarbuta: int +XK_Arabic_teh: int +XK_Arabic_theh: int +XK_Arabic_jeem: int +XK_Arabic_hah: int +XK_Arabic_khah: int +XK_Arabic_dal: int +XK_Arabic_thal: int +XK_Arabic_ra: int +XK_Arabic_zain: int +XK_Arabic_seen: int +XK_Arabic_sheen: int +XK_Arabic_sad: int +XK_Arabic_dad: int +XK_Arabic_tah: int +XK_Arabic_zah: int +XK_Arabic_ain: int +XK_Arabic_ghain: int +XK_Arabic_tatweel: int +XK_Arabic_feh: int +XK_Arabic_qaf: int +XK_Arabic_kaf: int +XK_Arabic_lam: int +XK_Arabic_meem: int +XK_Arabic_noon: int +XK_Arabic_ha: int +XK_Arabic_heh: int +XK_Arabic_waw: int +XK_Arabic_alefmaksura: int +XK_Arabic_yeh: int +XK_Arabic_fathatan: int +XK_Arabic_dammatan: int +XK_Arabic_kasratan: int +XK_Arabic_fatha: int +XK_Arabic_damma: int +XK_Arabic_kasra: int +XK_Arabic_shadda: int +XK_Arabic_sukun: int +XK_Arabic_switch: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/cyrillic.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/cyrillic.pyi new file mode 100644 index 00000000..a4accc99 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/cyrillic.pyi @@ -0,0 +1,107 @@ +XK_Serbian_dje: int +XK_Macedonia_gje: int +XK_Cyrillic_io: int +XK_Ukrainian_ie: int +XK_Ukranian_je: int +XK_Macedonia_dse: int +XK_Ukrainian_i: int +XK_Ukranian_i: int +XK_Ukrainian_yi: int +XK_Ukranian_yi: int +XK_Cyrillic_je: int +XK_Serbian_je: int +XK_Cyrillic_lje: int +XK_Serbian_lje: int +XK_Cyrillic_nje: int +XK_Serbian_nje: int +XK_Serbian_tshe: int +XK_Macedonia_kje: int +XK_Byelorussian_shortu: int +XK_Cyrillic_dzhe: int +XK_Serbian_dze: int +XK_numerosign: int +XK_Serbian_DJE: int +XK_Macedonia_GJE: int +XK_Cyrillic_IO: int +XK_Ukrainian_IE: int +XK_Ukranian_JE: int +XK_Macedonia_DSE: int +XK_Ukrainian_I: int +XK_Ukranian_I: int +XK_Ukrainian_YI: int +XK_Ukranian_YI: int +XK_Cyrillic_JE: int +XK_Serbian_JE: int +XK_Cyrillic_LJE: int +XK_Serbian_LJE: int +XK_Cyrillic_NJE: int +XK_Serbian_NJE: int +XK_Serbian_TSHE: int +XK_Macedonia_KJE: int +XK_Byelorussian_SHORTU: int +XK_Cyrillic_DZHE: int +XK_Serbian_DZE: int +XK_Cyrillic_yu: int +XK_Cyrillic_a: int +XK_Cyrillic_be: int +XK_Cyrillic_tse: int +XK_Cyrillic_de: int +XK_Cyrillic_ie: int +XK_Cyrillic_ef: int +XK_Cyrillic_ghe: int +XK_Cyrillic_ha: int +XK_Cyrillic_i: int +XK_Cyrillic_shorti: int +XK_Cyrillic_ka: int +XK_Cyrillic_el: int +XK_Cyrillic_em: int +XK_Cyrillic_en: int +XK_Cyrillic_o: int +XK_Cyrillic_pe: int +XK_Cyrillic_ya: int +XK_Cyrillic_er: int +XK_Cyrillic_es: int +XK_Cyrillic_te: int +XK_Cyrillic_u: int +XK_Cyrillic_zhe: int +XK_Cyrillic_ve: int +XK_Cyrillic_softsign: int +XK_Cyrillic_yeru: int +XK_Cyrillic_ze: int +XK_Cyrillic_sha: int +XK_Cyrillic_e: int +XK_Cyrillic_shcha: int +XK_Cyrillic_che: int +XK_Cyrillic_hardsign: int +XK_Cyrillic_YU: int +XK_Cyrillic_A: int +XK_Cyrillic_BE: int +XK_Cyrillic_TSE: int +XK_Cyrillic_DE: int +XK_Cyrillic_IE: int +XK_Cyrillic_EF: int +XK_Cyrillic_GHE: int +XK_Cyrillic_HA: int +XK_Cyrillic_I: int +XK_Cyrillic_SHORTI: int +XK_Cyrillic_KA: int +XK_Cyrillic_EL: int +XK_Cyrillic_EM: int +XK_Cyrillic_EN: int +XK_Cyrillic_O: int +XK_Cyrillic_PE: int +XK_Cyrillic_YA: int +XK_Cyrillic_ER: int +XK_Cyrillic_ES: int +XK_Cyrillic_TE: int +XK_Cyrillic_U: int +XK_Cyrillic_ZHE: int +XK_Cyrillic_VE: int +XK_Cyrillic_SOFTSIGN: int +XK_Cyrillic_YERU: int +XK_Cyrillic_ZE: int +XK_Cyrillic_SHA: int +XK_Cyrillic_E: int +XK_Cyrillic_SHCHA: int +XK_Cyrillic_CHE: int +XK_Cyrillic_HARDSIGN: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/greek.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/greek.pyi new file mode 100644 index 00000000..7460ea2e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/greek.pyi @@ -0,0 +1,74 @@ +XK_Greek_ALPHAaccent: int +XK_Greek_EPSILONaccent: int +XK_Greek_ETAaccent: int +XK_Greek_IOTAaccent: int +XK_Greek_IOTAdiaeresis: int +XK_Greek_OMICRONaccent: int +XK_Greek_UPSILONaccent: int +XK_Greek_UPSILONdieresis: int +XK_Greek_OMEGAaccent: int +XK_Greek_accentdieresis: int +XK_Greek_horizbar: int +XK_Greek_alphaaccent: int +XK_Greek_epsilonaccent: int +XK_Greek_etaaccent: int +XK_Greek_iotaaccent: int +XK_Greek_iotadieresis: int +XK_Greek_iotaaccentdieresis: int +XK_Greek_omicronaccent: int +XK_Greek_upsilonaccent: int +XK_Greek_upsilondieresis: int +XK_Greek_upsilonaccentdieresis: int +XK_Greek_omegaaccent: int +XK_Greek_ALPHA: int +XK_Greek_BETA: int +XK_Greek_GAMMA: int +XK_Greek_DELTA: int +XK_Greek_EPSILON: int +XK_Greek_ZETA: int +XK_Greek_ETA: int +XK_Greek_THETA: int +XK_Greek_IOTA: int +XK_Greek_KAPPA: int +XK_Greek_LAMDA: int +XK_Greek_LAMBDA: int +XK_Greek_MU: int +XK_Greek_NU: int +XK_Greek_XI: int +XK_Greek_OMICRON: int +XK_Greek_PI: int +XK_Greek_RHO: int +XK_Greek_SIGMA: int +XK_Greek_TAU: int +XK_Greek_UPSILON: int +XK_Greek_PHI: int +XK_Greek_CHI: int +XK_Greek_PSI: int +XK_Greek_OMEGA: int +XK_Greek_alpha: int +XK_Greek_beta: int +XK_Greek_gamma: int +XK_Greek_delta: int +XK_Greek_epsilon: int +XK_Greek_zeta: int +XK_Greek_eta: int +XK_Greek_theta: int +XK_Greek_iota: int +XK_Greek_kappa: int +XK_Greek_lamda: int +XK_Greek_lambda: int +XK_Greek_mu: int +XK_Greek_nu: int +XK_Greek_xi: int +XK_Greek_omicron: int +XK_Greek_pi: int +XK_Greek_rho: int +XK_Greek_sigma: int +XK_Greek_finalsmallsigma: int +XK_Greek_tau: int +XK_Greek_upsilon: int +XK_Greek_phi: int +XK_Greek_chi: int +XK_Greek_psi: int +XK_Greek_omega: int +XK_Greek_switch: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/hebrew.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/hebrew.pyi new file mode 100644 index 00000000..5ad28bf8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/hebrew.pyi @@ -0,0 +1,40 @@ +XK_hebrew_doublelowline: int +XK_hebrew_aleph: int +XK_hebrew_bet: int +XK_hebrew_beth: int +XK_hebrew_gimel: int +XK_hebrew_gimmel: int +XK_hebrew_dalet: int +XK_hebrew_daleth: int +XK_hebrew_he: int +XK_hebrew_waw: int +XK_hebrew_zain: int +XK_hebrew_zayin: int +XK_hebrew_chet: int +XK_hebrew_het: int +XK_hebrew_tet: int +XK_hebrew_teth: int +XK_hebrew_yod: int +XK_hebrew_finalkaph: int +XK_hebrew_kaph: int +XK_hebrew_lamed: int +XK_hebrew_finalmem: int +XK_hebrew_mem: int +XK_hebrew_finalnun: int +XK_hebrew_nun: int +XK_hebrew_samech: int +XK_hebrew_samekh: int +XK_hebrew_ayin: int +XK_hebrew_finalpe: int +XK_hebrew_pe: int +XK_hebrew_finalzade: int +XK_hebrew_finalzadi: int +XK_hebrew_zade: int +XK_hebrew_zadi: int +XK_hebrew_qoph: int +XK_hebrew_kuf: int +XK_hebrew_resh: int +XK_hebrew_shin: int +XK_hebrew_taw: int +XK_hebrew_taf: int +XK_Hebrew_switch: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/katakana.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/katakana.pyi new file mode 100644 index 00000000..2480a14c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/katakana.pyi @@ -0,0 +1,70 @@ +XK_overline: int +XK_kana_fullstop: int +XK_kana_openingbracket: int +XK_kana_closingbracket: int +XK_kana_comma: int +XK_kana_conjunctive: int +XK_kana_middledot: int +XK_kana_WO: int +XK_kana_a: int +XK_kana_i: int +XK_kana_u: int +XK_kana_e: int +XK_kana_o: int +XK_kana_ya: int +XK_kana_yu: int +XK_kana_yo: int +XK_kana_tsu: int +XK_kana_tu: int +XK_prolongedsound: int +XK_kana_A: int +XK_kana_I: int +XK_kana_U: int +XK_kana_E: int +XK_kana_O: int +XK_kana_KA: int +XK_kana_KI: int +XK_kana_KU: int +XK_kana_KE: int +XK_kana_KO: int +XK_kana_SA: int +XK_kana_SHI: int +XK_kana_SU: int +XK_kana_SE: int +XK_kana_SO: int +XK_kana_TA: int +XK_kana_CHI: int +XK_kana_TI: int +XK_kana_TSU: int +XK_kana_TU: int +XK_kana_TE: int +XK_kana_TO: int +XK_kana_NA: int +XK_kana_NI: int +XK_kana_NU: int +XK_kana_NE: int +XK_kana_NO: int +XK_kana_HA: int +XK_kana_HI: int +XK_kana_FU: int +XK_kana_HU: int +XK_kana_HE: int +XK_kana_HO: int +XK_kana_MA: int +XK_kana_MI: int +XK_kana_MU: int +XK_kana_ME: int +XK_kana_MO: int +XK_kana_YA: int +XK_kana_YU: int +XK_kana_YO: int +XK_kana_RA: int +XK_kana_RI: int +XK_kana_RU: int +XK_kana_RE: int +XK_kana_RO: int +XK_kana_WA: int +XK_kana_N: int +XK_voicedsound: int +XK_semivoicedsound: int +XK_kana_switch: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/korean.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/korean.pyi new file mode 100644 index 00000000..c3bb3127 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/korean.pyi @@ -0,0 +1,107 @@ +XK_Hangul: int +XK_Hangul_Start: int +XK_Hangul_End: int +XK_Hangul_Hanja: int +XK_Hangul_Jamo: int +XK_Hangul_Romaja: int +XK_Hangul_Codeinput: int +XK_Hangul_Jeonja: int +XK_Hangul_Banja: int +XK_Hangul_PreHanja: int +XK_Hangul_PostHanja: int +XK_Hangul_SingleCandidate: int +XK_Hangul_MultipleCandidate: int +XK_Hangul_PreviousCandidate: int +XK_Hangul_Special: int +XK_Hangul_switch: int +XK_Hangul_Kiyeog: int +XK_Hangul_SsangKiyeog: int +XK_Hangul_KiyeogSios: int +XK_Hangul_Nieun: int +XK_Hangul_NieunJieuj: int +XK_Hangul_NieunHieuh: int +XK_Hangul_Dikeud: int +XK_Hangul_SsangDikeud: int +XK_Hangul_Rieul: int +XK_Hangul_RieulKiyeog: int +XK_Hangul_RieulMieum: int +XK_Hangul_RieulPieub: int +XK_Hangul_RieulSios: int +XK_Hangul_RieulTieut: int +XK_Hangul_RieulPhieuf: int +XK_Hangul_RieulHieuh: int +XK_Hangul_Mieum: int +XK_Hangul_Pieub: int +XK_Hangul_SsangPieub: int +XK_Hangul_PieubSios: int +XK_Hangul_Sios: int +XK_Hangul_SsangSios: int +XK_Hangul_Ieung: int +XK_Hangul_Jieuj: int +XK_Hangul_SsangJieuj: int +XK_Hangul_Cieuc: int +XK_Hangul_Khieuq: int +XK_Hangul_Tieut: int +XK_Hangul_Phieuf: int +XK_Hangul_Hieuh: int +XK_Hangul_A: int +XK_Hangul_AE: int +XK_Hangul_YA: int +XK_Hangul_YAE: int +XK_Hangul_EO: int +XK_Hangul_E: int +XK_Hangul_YEO: int +XK_Hangul_YE: int +XK_Hangul_O: int +XK_Hangul_WA: int +XK_Hangul_WAE: int +XK_Hangul_OE: int +XK_Hangul_YO: int +XK_Hangul_U: int +XK_Hangul_WEO: int +XK_Hangul_WE: int +XK_Hangul_WI: int +XK_Hangul_YU: int +XK_Hangul_EU: int +XK_Hangul_YI: int +XK_Hangul_I: int +XK_Hangul_J_Kiyeog: int +XK_Hangul_J_SsangKiyeog: int +XK_Hangul_J_KiyeogSios: int +XK_Hangul_J_Nieun: int +XK_Hangul_J_NieunJieuj: int +XK_Hangul_J_NieunHieuh: int +XK_Hangul_J_Dikeud: int +XK_Hangul_J_Rieul: int +XK_Hangul_J_RieulKiyeog: int +XK_Hangul_J_RieulMieum: int +XK_Hangul_J_RieulPieub: int +XK_Hangul_J_RieulSios: int +XK_Hangul_J_RieulTieut: int +XK_Hangul_J_RieulPhieuf: int +XK_Hangul_J_RieulHieuh: int +XK_Hangul_J_Mieum: int +XK_Hangul_J_Pieub: int +XK_Hangul_J_PieubSios: int +XK_Hangul_J_Sios: int +XK_Hangul_J_SsangSios: int +XK_Hangul_J_Ieung: int +XK_Hangul_J_Jieuj: int +XK_Hangul_J_Cieuc: int +XK_Hangul_J_Khieuq: int +XK_Hangul_J_Tieut: int +XK_Hangul_J_Phieuf: int +XK_Hangul_J_Hieuh: int +XK_Hangul_RieulYeorinHieuh: int +XK_Hangul_SunkyeongeumMieum: int +XK_Hangul_SunkyeongeumPieub: int +XK_Hangul_PanSios: int +XK_Hangul_KkogjiDalrinIeung: int +XK_Hangul_SunkyeongeumPhieuf: int +XK_Hangul_YeorinHieuh: int +XK_Hangul_AraeA: int +XK_Hangul_AraeAE: int +XK_Hangul_J_PanSios: int +XK_Hangul_J_KkogjiDalrinIeung: int +XK_Hangul_J_YeorinHieuh: int +XK_Korean_Won: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/latin1.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/latin1.pyi new file mode 100644 index 00000000..6a80d591 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/latin1.pyi @@ -0,0 +1,195 @@ +XK_space: int +XK_exclam: int +XK_quotedbl: int +XK_numbersign: int +XK_dollar: int +XK_percent: int +XK_ampersand: int +XK_apostrophe: int +XK_quoteright: int +XK_parenleft: int +XK_parenright: int +XK_asterisk: int +XK_plus: int +XK_comma: int +XK_minus: int +XK_period: int +XK_slash: int +XK_0: int +XK_1: int +XK_2: int +XK_3: int +XK_4: int +XK_5: int +XK_6: int +XK_7: int +XK_8: int +XK_9: int +XK_colon: int +XK_semicolon: int +XK_less: int +XK_equal: int +XK_greater: int +XK_question: int +XK_at: int +XK_A: int +XK_B: int +XK_C: int +XK_D: int +XK_E: int +XK_F: int +XK_G: int +XK_H: int +XK_I: int +XK_J: int +XK_K: int +XK_L: int +XK_M: int +XK_N: int +XK_O: int +XK_P: int +XK_Q: int +XK_R: int +XK_S: int +XK_T: int +XK_U: int +XK_V: int +XK_W: int +XK_X: int +XK_Y: int +XK_Z: int +XK_bracketleft: int +XK_backslash: int +XK_bracketright: int +XK_asciicircum: int +XK_underscore: int +XK_grave: int +XK_quoteleft: int +XK_a: int +XK_b: int +XK_c: int +XK_d: int +XK_e: int +XK_f: int +XK_g: int +XK_h: int +XK_i: int +XK_j: int +XK_k: int +XK_l: int +XK_m: int +XK_n: int +XK_o: int +XK_p: int +XK_q: int +XK_r: int +XK_s: int +XK_t: int +XK_u: int +XK_v: int +XK_w: int +XK_x: int +XK_y: int +XK_z: int +XK_braceleft: int +XK_bar: int +XK_braceright: int +XK_asciitilde: int +XK_nobreakspace: int +XK_exclamdown: int +XK_cent: int +XK_sterling: int +XK_currency: int +XK_yen: int +XK_brokenbar: int +XK_section: int +XK_diaeresis: int +XK_copyright: int +XK_ordfeminine: int +XK_guillemotleft: int +XK_notsign: int +XK_hyphen: int +XK_registered: int +XK_macron: int +XK_degree: int +XK_plusminus: int +XK_twosuperior: int +XK_threesuperior: int +XK_acute: int +XK_mu: int +XK_paragraph: int +XK_periodcentered: int +XK_cedilla: int +XK_onesuperior: int +XK_masculine: int +XK_guillemotright: int +XK_onequarter: int +XK_onehalf: int +XK_threequarters: int +XK_questiondown: int +XK_Agrave: int +XK_Aacute: int +XK_Acircumflex: int +XK_Atilde: int +XK_Adiaeresis: int +XK_Aring: int +XK_AE: int +XK_Ccedilla: int +XK_Egrave: int +XK_Eacute: int +XK_Ecircumflex: int +XK_Ediaeresis: int +XK_Igrave: int +XK_Iacute: int +XK_Icircumflex: int +XK_Idiaeresis: int +XK_ETH: int +XK_Eth: int +XK_Ntilde: int +XK_Ograve: int +XK_Oacute: int +XK_Ocircumflex: int +XK_Otilde: int +XK_Odiaeresis: int +XK_multiply: int +XK_Ooblique: int +XK_Ugrave: int +XK_Uacute: int +XK_Ucircumflex: int +XK_Udiaeresis: int +XK_Yacute: int +XK_THORN: int +XK_Thorn: int +XK_ssharp: int +XK_agrave: int +XK_aacute: int +XK_acircumflex: int +XK_atilde: int +XK_adiaeresis: int +XK_aring: int +XK_ae: int +XK_ccedilla: int +XK_egrave: int +XK_eacute: int +XK_ecircumflex: int +XK_ediaeresis: int +XK_igrave: int +XK_iacute: int +XK_icircumflex: int +XK_idiaeresis: int +XK_eth: int +XK_ntilde: int +XK_ograve: int +XK_oacute: int +XK_ocircumflex: int +XK_otilde: int +XK_odiaeresis: int +XK_division: int +XK_oslash: int +XK_ugrave: int +XK_uacute: int +XK_ucircumflex: int +XK_udiaeresis: int +XK_yacute: int +XK_thorn: int +XK_ydiaeresis: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/latin2.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/latin2.pyi new file mode 100644 index 00000000..308f7277 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/latin2.pyi @@ -0,0 +1,57 @@ +XK_Aogonek: int +XK_breve: int +XK_Lstroke: int +XK_Lcaron: int +XK_Sacute: int +XK_Scaron: int +XK_Scedilla: int +XK_Tcaron: int +XK_Zacute: int +XK_Zcaron: int +XK_Zabovedot: int +XK_aogonek: int +XK_ogonek: int +XK_lstroke: int +XK_lcaron: int +XK_sacute: int +XK_caron: int +XK_scaron: int +XK_scedilla: int +XK_tcaron: int +XK_zacute: int +XK_doubleacute: int +XK_zcaron: int +XK_zabovedot: int +XK_Racute: int +XK_Abreve: int +XK_Lacute: int +XK_Cacute: int +XK_Ccaron: int +XK_Eogonek: int +XK_Ecaron: int +XK_Dcaron: int +XK_Dstroke: int +XK_Nacute: int +XK_Ncaron: int +XK_Odoubleacute: int +XK_Rcaron: int +XK_Uring: int +XK_Udoubleacute: int +XK_Tcedilla: int +XK_racute: int +XK_abreve: int +XK_lacute: int +XK_cacute: int +XK_ccaron: int +XK_eogonek: int +XK_ecaron: int +XK_dcaron: int +XK_dstroke: int +XK_nacute: int +XK_ncaron: int +XK_odoubleacute: int +XK_udoubleacute: int +XK_rcaron: int +XK_uring: int +XK_tcedilla: int +XK_abovedot: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/latin3.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/latin3.pyi new file mode 100644 index 00000000..dd803081 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/latin3.pyi @@ -0,0 +1,22 @@ +XK_Hstroke: int +XK_Hcircumflex: int +XK_Iabovedot: int +XK_Gbreve: int +XK_Jcircumflex: int +XK_hstroke: int +XK_hcircumflex: int +XK_idotless: int +XK_gbreve: int +XK_jcircumflex: int +XK_Cabovedot: int +XK_Ccircumflex: int +XK_Gabovedot: int +XK_Gcircumflex: int +XK_Ubreve: int +XK_Scircumflex: int +XK_cabovedot: int +XK_ccircumflex: int +XK_gabovedot: int +XK_gcircumflex: int +XK_ubreve: int +XK_scircumflex: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/latin4.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/latin4.pyi new file mode 100644 index 00000000..278d7cc9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/latin4.pyi @@ -0,0 +1,36 @@ +XK_kra: int +XK_kappa: int +XK_Rcedilla: int +XK_Itilde: int +XK_Lcedilla: int +XK_Emacron: int +XK_Gcedilla: int +XK_Tslash: int +XK_rcedilla: int +XK_itilde: int +XK_lcedilla: int +XK_emacron: int +XK_gcedilla: int +XK_tslash: int +XK_ENG: int +XK_eng: int +XK_Amacron: int +XK_Iogonek: int +XK_Eabovedot: int +XK_Imacron: int +XK_Ncedilla: int +XK_Omacron: int +XK_Kcedilla: int +XK_Uogonek: int +XK_Utilde: int +XK_Umacron: int +XK_amacron: int +XK_iogonek: int +XK_eabovedot: int +XK_imacron: int +XK_ncedilla: int +XK_omacron: int +XK_kcedilla: int +XK_uogonek: int +XK_utilde: int +XK_umacron: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/miscellany.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/miscellany.pyi new file mode 100644 index 00000000..ecf77b84 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/miscellany.pyi @@ -0,0 +1,169 @@ +XK_BackSpace: int +XK_Tab: int +XK_Linefeed: int +XK_Clear: int +XK_Return: int +XK_Pause: int +XK_Scroll_Lock: int +XK_Sys_Req: int +XK_Escape: int +XK_Delete: int +XK_Multi_key: int +XK_SingleCandidate: int +XK_MultipleCandidate: int +XK_PreviousCandidate: int +XK_Kanji: int +XK_Muhenkan: int +XK_Henkan_Mode: int +XK_Henkan: int +XK_Romaji: int +XK_Hiragana: int +XK_Katakana: int +XK_Hiragana_Katakana: int +XK_Zenkaku: int +XK_Hankaku: int +XK_Zenkaku_Hankaku: int +XK_Touroku: int +XK_Massyo: int +XK_Kana_Lock: int +XK_Kana_Shift: int +XK_Eisu_Shift: int +XK_Eisu_toggle: int +XK_Zen_Koho: int +XK_Mae_Koho: int +XK_Home: int +XK_Left: int +XK_Up: int +XK_Right: int +XK_Down: int +XK_Prior: int +XK_Page_Up: int +XK_Next: int +XK_Page_Down: int +XK_End: int +XK_Begin: int +XK_Select: int +XK_Print: int +XK_Execute: int +XK_Insert: int +XK_Undo: int +XK_Redo: int +XK_Menu: int +XK_Find: int +XK_Cancel: int +XK_Help: int +XK_Break: int +XK_Mode_switch: int +XK_script_switch: int +XK_Num_Lock: int +XK_KP_Space: int +XK_KP_Tab: int +XK_KP_Enter: int +XK_KP_F1: int +XK_KP_F2: int +XK_KP_F3: int +XK_KP_F4: int +XK_KP_Home: int +XK_KP_Left: int +XK_KP_Up: int +XK_KP_Right: int +XK_KP_Down: int +XK_KP_Prior: int +XK_KP_Page_Up: int +XK_KP_Next: int +XK_KP_Page_Down: int +XK_KP_End: int +XK_KP_Begin: int +XK_KP_Insert: int +XK_KP_Delete: int +XK_KP_Equal: int +XK_KP_Multiply: int +XK_KP_Add: int +XK_KP_Separator: int +XK_KP_Subtract: int +XK_KP_Decimal: int +XK_KP_Divide: int +XK_KP_0: int +XK_KP_1: int +XK_KP_2: int +XK_KP_3: int +XK_KP_4: int +XK_KP_5: int +XK_KP_6: int +XK_KP_7: int +XK_KP_8: int +XK_KP_9: int +XK_F1: int +XK_F2: int +XK_F3: int +XK_F4: int +XK_F5: int +XK_F6: int +XK_F7: int +XK_F8: int +XK_F9: int +XK_F10: int +XK_F11: int +XK_L1: int +XK_F12: int +XK_L2: int +XK_F13: int +XK_L3: int +XK_F14: int +XK_L4: int +XK_F15: int +XK_L5: int +XK_F16: int +XK_L6: int +XK_F17: int +XK_L7: int +XK_F18: int +XK_L8: int +XK_F19: int +XK_L9: int +XK_F20: int +XK_L10: int +XK_F21: int +XK_R1: int +XK_F22: int +XK_R2: int +XK_F23: int +XK_R3: int +XK_F24: int +XK_R4: int +XK_F25: int +XK_R5: int +XK_F26: int +XK_R6: int +XK_F27: int +XK_R7: int +XK_F28: int +XK_R8: int +XK_F29: int +XK_R9: int +XK_F30: int +XK_R10: int +XK_F31: int +XK_R11: int +XK_F32: int +XK_R12: int +XK_F33: int +XK_R13: int +XK_F34: int +XK_R14: int +XK_F35: int +XK_R15: int +XK_Shift_L: int +XK_Shift_R: int +XK_Control_L: int +XK_Control_R: int +XK_Caps_Lock: int +XK_Shift_Lock: int +XK_Meta_L: int +XK_Meta_R: int +XK_Alt_L: int +XK_Alt_R: int +XK_Super_L: int +XK_Super_R: int +XK_Hyper_L: int +XK_Hyper_R: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/publishing.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/publishing.pyi new file mode 100644 index 00000000..50c1f020 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/publishing.pyi @@ -0,0 +1,83 @@ +XK_emspace: int +XK_enspace: int +XK_em3space: int +XK_em4space: int +XK_digitspace: int +XK_punctspace: int +XK_thinspace: int +XK_hairspace: int +XK_emdash: int +XK_endash: int +XK_signifblank: int +XK_ellipsis: int +XK_doubbaselinedot: int +XK_onethird: int +XK_twothirds: int +XK_onefifth: int +XK_twofifths: int +XK_threefifths: int +XK_fourfifths: int +XK_onesixth: int +XK_fivesixths: int +XK_careof: int +XK_figdash: int +XK_leftanglebracket: int +XK_decimalpoint: int +XK_rightanglebracket: int +XK_marker: int +XK_oneeighth: int +XK_threeeighths: int +XK_fiveeighths: int +XK_seveneighths: int +XK_trademark: int +XK_signaturemark: int +XK_trademarkincircle: int +XK_leftopentriangle: int +XK_rightopentriangle: int +XK_emopencircle: int +XK_emopenrectangle: int +XK_leftsinglequotemark: int +XK_rightsinglequotemark: int +XK_leftdoublequotemark: int +XK_rightdoublequotemark: int +XK_prescription: int +XK_minutes: int +XK_seconds: int +XK_latincross: int +XK_hexagram: int +XK_filledrectbullet: int +XK_filledlefttribullet: int +XK_filledrighttribullet: int +XK_emfilledcircle: int +XK_emfilledrect: int +XK_enopencircbullet: int +XK_enopensquarebullet: int +XK_openrectbullet: int +XK_opentribulletup: int +XK_opentribulletdown: int +XK_openstar: int +XK_enfilledcircbullet: int +XK_enfilledsqbullet: int +XK_filledtribulletup: int +XK_filledtribulletdown: int +XK_leftpointer: int +XK_rightpointer: int +XK_club: int +XK_diamond: int +XK_heart: int +XK_maltesecross: int +XK_dagger: int +XK_doubledagger: int +XK_checkmark: int +XK_ballotcross: int +XK_musicalsharp: int +XK_musicalflat: int +XK_malesymbol: int +XK_femalesymbol: int +XK_telephone: int +XK_telephonerecorder: int +XK_phonographcopyright: int +XK_caret: int +XK_singlelowquotemark: int +XK_doublelowquotemark: int +XK_cursor: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/special.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/special.pyi new file mode 100644 index 00000000..6376279e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/special.pyi @@ -0,0 +1,24 @@ +XK_blank: int +XK_soliddiamond: int +XK_checkerboard: int +XK_ht: int +XK_ff: int +XK_cr: int +XK_lf: int +XK_nl: int +XK_vt: int +XK_lowrightcorner: int +XK_uprightcorner: int +XK_upleftcorner: int +XK_lowleftcorner: int +XK_crossinglines: int +XK_horizlinescan1: int +XK_horizlinescan3: int +XK_horizlinescan5: int +XK_horizlinescan7: int +XK_horizlinescan9: int +XK_leftt: int +XK_rightt: int +XK_bott: int +XK_topt: int +XK_vertbar: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/technical.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/technical.pyi new file mode 100644 index 00000000..6b60c278 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/technical.pyi @@ -0,0 +1,49 @@ +XK_leftradical: int +XK_topleftradical: int +XK_horizconnector: int +XK_topintegral: int +XK_botintegral: int +XK_vertconnector: int +XK_topleftsqbracket: int +XK_botleftsqbracket: int +XK_toprightsqbracket: int +XK_botrightsqbracket: int +XK_topleftparens: int +XK_botleftparens: int +XK_toprightparens: int +XK_botrightparens: int +XK_leftmiddlecurlybrace: int +XK_rightmiddlecurlybrace: int +XK_topleftsummation: int +XK_botleftsummation: int +XK_topvertsummationconnector: int +XK_botvertsummationconnector: int +XK_toprightsummation: int +XK_botrightsummation: int +XK_rightmiddlesummation: int +XK_lessthanequal: int +XK_notequal: int +XK_greaterthanequal: int +XK_integral: int +XK_therefore: int +XK_variation: int +XK_infinity: int +XK_nabla: int +XK_approximate: int +XK_similarequal: int +XK_ifonlyif: int +XK_implies: int +XK_identical: int +XK_radical: int +XK_includedin: int +XK_includes: int +XK_intersection: int +XK_union: int +XK_logicaland: int +XK_logicalor: int +XK_partialderivative: int +XK_function: int +XK_leftarrow: int +XK_uparrow: int +XK_rightarrow: int +XK_downarrow: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/thai.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/thai.pyi new file mode 100644 index 00000000..71f2c910 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/thai.pyi @@ -0,0 +1,84 @@ +XK_Thai_kokai: int +XK_Thai_khokhai: int +XK_Thai_khokhuat: int +XK_Thai_khokhwai: int +XK_Thai_khokhon: int +XK_Thai_khorakhang: int +XK_Thai_ngongu: int +XK_Thai_chochan: int +XK_Thai_choching: int +XK_Thai_chochang: int +XK_Thai_soso: int +XK_Thai_chochoe: int +XK_Thai_yoying: int +XK_Thai_dochada: int +XK_Thai_topatak: int +XK_Thai_thothan: int +XK_Thai_thonangmontho: int +XK_Thai_thophuthao: int +XK_Thai_nonen: int +XK_Thai_dodek: int +XK_Thai_totao: int +XK_Thai_thothung: int +XK_Thai_thothahan: int +XK_Thai_thothong: int +XK_Thai_nonu: int +XK_Thai_bobaimai: int +XK_Thai_popla: int +XK_Thai_phophung: int +XK_Thai_fofa: int +XK_Thai_phophan: int +XK_Thai_fofan: int +XK_Thai_phosamphao: int +XK_Thai_moma: int +XK_Thai_yoyak: int +XK_Thai_rorua: int +XK_Thai_ru: int +XK_Thai_loling: int +XK_Thai_lu: int +XK_Thai_wowaen: int +XK_Thai_sosala: int +XK_Thai_sorusi: int +XK_Thai_sosua: int +XK_Thai_hohip: int +XK_Thai_lochula: int +XK_Thai_oang: int +XK_Thai_honokhuk: int +XK_Thai_paiyannoi: int +XK_Thai_saraa: int +XK_Thai_maihanakat: int +XK_Thai_saraaa: int +XK_Thai_saraam: int +XK_Thai_sarai: int +XK_Thai_saraii: int +XK_Thai_saraue: int +XK_Thai_sarauee: int +XK_Thai_sarau: int +XK_Thai_sarauu: int +XK_Thai_phinthu: int +XK_Thai_maihanakat_maitho: int +XK_Thai_baht: int +XK_Thai_sarae: int +XK_Thai_saraae: int +XK_Thai_sarao: int +XK_Thai_saraaimaimuan: int +XK_Thai_saraaimaimalai: int +XK_Thai_lakkhangyao: int +XK_Thai_maiyamok: int +XK_Thai_maitaikhu: int +XK_Thai_maiek: int +XK_Thai_maitho: int +XK_Thai_maitri: int +XK_Thai_maichattawa: int +XK_Thai_thanthakhat: int +XK_Thai_nikhahit: int +XK_Thai_leksun: int +XK_Thai_leknung: int +XK_Thai_leksong: int +XK_Thai_leksam: int +XK_Thai_leksi: int +XK_Thai_lekha: int +XK_Thai_lekhok: int +XK_Thai_lekchet: int +XK_Thai_lekpaet: int +XK_Thai_lekkao: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/xf86.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/xf86.pyi new file mode 100644 index 00000000..73b36fc7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/xf86.pyi @@ -0,0 +1,184 @@ +XK_XF86_ModeLock: int +XK_XF86_MonBrightnessUp: int +XK_XF86_MonBrightnessDown: int +XK_XF86_KbdLightOnOff: int +XK_XF86_KbdBrightnessUp: int +XK_XF86_KbdBrightnessDown: int +XK_XF86_MonBrightnessCycle: int +XK_XF86_Standby: int +XK_XF86_AudioLowerVolume: int +XK_XF86_AudioMute: int +XK_XF86_AudioRaiseVolume: int +XK_XF86_AudioPlay: int +XK_XF86_AudioStop: int +XK_XF86_AudioPrev: int +XK_XF86_AudioNext: int +XK_XF86_HomePage: int +XK_XF86_Mail: int +XK_XF86_Start: int +XK_XF86_Search: int +XK_XF86_AudioRecord: int +XK_XF86_Calculator: int +XK_XF86_Memo: int +XK_XF86_ToDoList: int +XK_XF86_Calendar: int +XK_XF86_PowerDown: int +XK_XF86_ContrastAdjust: int +XK_XF86_RockerUp: int +XK_XF86_RockerDown: int +XK_XF86_RockerEnter: int +XK_XF86_Back: int +XK_XF86_Forward: int +XK_XF86_Stop: int +XK_XF86_Refresh: int +XK_XF86_PowerOff: int +XK_XF86_WakeUp: int +XK_XF86_Eject: int +XK_XF86_ScreenSaver: int +XK_XF86_WWW: int +XK_XF86_Sleep: int +XK_XF86_Favorites: int +XK_XF86_AudioPause: int +XK_XF86_AudioMedia: int +XK_XF86_MyComputer: int +XK_XF86_VendorHome: int +XK_XF86_LightBulb: int +XK_XF86_Shop: int +XK_XF86_History: int +XK_XF86_OpenURL: int +XK_XF86_AddFavorite: int +XK_XF86_HotLinks: int +XK_XF86_BrightnessAdjust: int +XK_XF86_Finance: int +XK_XF86_Community: int +XK_XF86_AudioRewind: int +XK_XF86_XF86BackForward: int +XK_XF86_Launch0: int +XK_XF86_Launch1: int +XK_XF86_Launch2: int +XK_XF86_Launch3: int +XK_XF86_Launch4: int +XK_XF86_Launch5: int +XK_XF86_Launch6: int +XK_XF86_Launch7: int +XK_XF86_Launch8: int +XK_XF86_Launch9: int +XK_XF86_LaunchA: int +XK_XF86_LaunchB: int +XK_XF86_LaunchC: int +XK_XF86_LaunchD: int +XK_XF86_LaunchE: int +XK_XF86_LaunchF: int +XK_XF86_ApplicationLeft: int +XK_XF86_ApplicationRight: int +XK_XF86_Book: int +XK_XF86_CD: int +XK_XF86_Calculater: int +XK_XF86_Clear: int +XK_XF86_Close: int +XK_XF86_Copy: int +XK_XF86_Cut: int +XK_XF86_Display: int +XK_XF86_DOS: int +XK_XF86_Documents: int +XK_XF86_Excel: int +XK_XF86_Explorer: int +XK_XF86_Game: int +XK_XF86_Go: int +XK_XF86_iTouch: int +XK_XF86_LogOff: int +XK_XF86_Market: int +XK_XF86_Meeting: int +XK_XF86_MenuKB: int +XK_XF86_MenuPB: int +XK_XF86_MySites: int +XK_XF86_New: int +XK_XF86_News: int +XK_XF86_OfficeHome: int +XK_XF86_Open: int +XK_XF86_Option: int +XK_XF86_Paste: int +XK_XF86_Phone: int +XK_XF86_Q: int +XK_XF86_Reply: int +XK_XF86_Reload: int +XK_XF86_RotateWindows: int +XK_XF86_RotationPB: int +XK_XF86_RotationKB: int +XK_XF86_Save: int +XK_XF86_ScrollUp: int +XK_XF86_ScrollDown: int +XK_XF86_ScrollClick: int +XK_XF86_Send: int +XK_XF86_Spell: int +XK_XF86_SplitScreen: int +XK_XF86_Support: int +XK_XF86_TaskPane: int +XK_XF86_Terminal: int +XK_XF86_Tools: int +XK_XF86_Travel: int +XK_XF86_UserPB: int +XK_XF86_User1KB: int +XK_XF86_User2KB: int +XK_XF86_Video: int +XK_XF86_WheelButton: int +XK_XF86_Word: int +XK_XF86_Xfer: int +XK_XF86_ZoomIn: int +XK_XF86_ZoomOut: int +XK_XF86_Away: int +XK_XF86_Messenger: int +XK_XF86_WebCam: int +XK_XF86_MailForward: int +XK_XF86_Pictures: int +XK_XF86_Music: int +XK_XF86_Battery: int +XK_XF86_Bluetooth: int +XK_XF86_WLAN: int +XK_XF86_UWB: int +XK_XF86_AudioForward: int +XK_XF86_AudioRepeat: int +XK_XF86_AudioRandomPlay: int +XK_XF86_Subtitle: int +XK_XF86_AudioCycleTrack: int +XK_XF86_CycleAngle: int +XK_XF86_FrameBack: int +XK_XF86_FrameForward: int +XK_XF86_Time: int +XK_XF86_Select: int +XK_XF86_View: int +XK_XF86_TopMenu: int +XK_XF86_Red: int +XK_XF86_Green: int +XK_XF86_Yellow: int +XK_XF86_Blue: int +XK_XF86_Suspend: int +XK_XF86_Hibernate: int +XK_XF86_TouchpadToggle: int +XK_XF86_TouchpadOn: int +XK_XF86_TouchpadOff: int +XK_XF86_AudioMicMute: int +XK_XF86_Keyboard: int +XK_XF86_WWAN: int +XK_XF86_RFKill: int +XK_XF86_AudioPreset: int +XK_XF86_RotationLockToggle: int +XK_XF86_FullScreen: int +XK_XF86_Switch_VT_1: int +XK_XF86_Switch_VT_2: int +XK_XF86_Switch_VT_3: int +XK_XF86_Switch_VT_4: int +XK_XF86_Switch_VT_5: int +XK_XF86_Switch_VT_6: int +XK_XF86_Switch_VT_7: int +XK_XF86_Switch_VT_8: int +XK_XF86_Switch_VT_9: int +XK_XF86_Switch_VT_10: int +XK_XF86_Switch_VT_11: int +XK_XF86_Switch_VT_12: int +XK_XF86_Ungrab: int +XK_XF86_ClearGrab: int +XK_XF86_Next_VMode: int +XK_XF86_Prev_VMode: int +XK_XF86_LogWindowTree: int +XK_XF86_LogGrabInfo: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/xk3270.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/xk3270.pyi new file mode 100644 index 00000000..5fe41b94 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/xk3270.pyi @@ -0,0 +1,30 @@ +XK_3270_Duplicate: int +XK_3270_FieldMark: int +XK_3270_Right2: int +XK_3270_Left2: int +XK_3270_BackTab: int +XK_3270_EraseEOF: int +XK_3270_EraseInput: int +XK_3270_Reset: int +XK_3270_Quit: int +XK_3270_PA1: int +XK_3270_PA2: int +XK_3270_PA3: int +XK_3270_Test: int +XK_3270_Attn: int +XK_3270_CursorBlink: int +XK_3270_AltCursor: int +XK_3270_KeyClick: int +XK_3270_Jump: int +XK_3270_Ident: int +XK_3270_Rule: int +XK_3270_Copy: int +XK_3270_Play: int +XK_3270_Setup: int +XK_3270_Record: int +XK_3270_ChangeScreen: int +XK_3270_DeleteWord: int +XK_3270_ExSelect: int +XK_3270_CursorSelect: int +XK_3270_PrintScreen: int +XK_3270_Enter: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/xkb.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/xkb.pyi new file mode 100644 index 00000000..dbfa43c9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/keysymdef/xkb.pyi @@ -0,0 +1,100 @@ +XK_ISO_Lock: int +XK_ISO_Level2_Latch: int +XK_ISO_Level3_Shift: int +XK_ISO_Level3_Latch: int +XK_ISO_Level3_Lock: int +XK_ISO_Group_Shift: int +XK_ISO_Group_Latch: int +XK_ISO_Group_Lock: int +XK_ISO_Next_Group: int +XK_ISO_Next_Group_Lock: int +XK_ISO_Prev_Group: int +XK_ISO_Prev_Group_Lock: int +XK_ISO_First_Group: int +XK_ISO_First_Group_Lock: int +XK_ISO_Last_Group: int +XK_ISO_Last_Group_Lock: int +XK_ISO_Left_Tab: int +XK_ISO_Move_Line_Up: int +XK_ISO_Move_Line_Down: int +XK_ISO_Partial_Line_Up: int +XK_ISO_Partial_Line_Down: int +XK_ISO_Partial_Space_Left: int +XK_ISO_Partial_Space_Right: int +XK_ISO_Set_Margin_Left: int +XK_ISO_Set_Margin_Right: int +XK_ISO_Release_Margin_Left: int +XK_ISO_Release_Margin_Right: int +XK_ISO_Release_Both_Margins: int +XK_ISO_Fast_Cursor_Left: int +XK_ISO_Fast_Cursor_Right: int +XK_ISO_Fast_Cursor_Up: int +XK_ISO_Fast_Cursor_Down: int +XK_ISO_Continuous_Underline: int +XK_ISO_Discontinuous_Underline: int +XK_ISO_Emphasize: int +XK_ISO_Center_Object: int +XK_ISO_Enter: int +XK_dead_grave: int +XK_dead_acute: int +XK_dead_circumflex: int +XK_dead_tilde: int +XK_dead_macron: int +XK_dead_breve: int +XK_dead_abovedot: int +XK_dead_diaeresis: int +XK_dead_abovering: int +XK_dead_doubleacute: int +XK_dead_caron: int +XK_dead_cedilla: int +XK_dead_ogonek: int +XK_dead_iota: int +XK_dead_voiced_sound: int +XK_dead_semivoiced_sound: int +XK_dead_belowdot: int +XK_First_Virtual_Screen: int +XK_Prev_Virtual_Screen: int +XK_Next_Virtual_Screen: int +XK_Last_Virtual_Screen: int +XK_Terminate_Server: int +XK_AccessX_Enable: int +XK_AccessX_Feedback_Enable: int +XK_RepeatKeys_Enable: int +XK_SlowKeys_Enable: int +XK_BounceKeys_Enable: int +XK_StickyKeys_Enable: int +XK_MouseKeys_Enable: int +XK_MouseKeys_Accel_Enable: int +XK_Overlay1_Enable: int +XK_Overlay2_Enable: int +XK_AudibleBell_Enable: int +XK_Pointer_Left: int +XK_Pointer_Right: int +XK_Pointer_Up: int +XK_Pointer_Down: int +XK_Pointer_UpLeft: int +XK_Pointer_UpRight: int +XK_Pointer_DownLeft: int +XK_Pointer_DownRight: int +XK_Pointer_Button_Dflt: int +XK_Pointer_Button1: int +XK_Pointer_Button2: int +XK_Pointer_Button3: int +XK_Pointer_Button4: int +XK_Pointer_Button5: int +XK_Pointer_DblClick_Dflt: int +XK_Pointer_DblClick1: int +XK_Pointer_DblClick2: int +XK_Pointer_DblClick3: int +XK_Pointer_DblClick4: int +XK_Pointer_DblClick5: int +XK_Pointer_Drag_Dflt: int +XK_Pointer_Drag1: int +XK_Pointer_Drag2: int +XK_Pointer_Drag3: int +XK_Pointer_Drag4: int +XK_Pointer_Drag5: int +XK_Pointer_EnableKeys: int +XK_Pointer_Accelerate: int +XK_Pointer_DfltBtnNext: int +XK_Pointer_DfltBtnPrev: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/protocol/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/protocol/__init__.pyi new file mode 100644 index 00000000..1252a6dc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/protocol/__init__.pyi @@ -0,0 +1,3 @@ +from Xlib.protocol import display as display, event as event, request as request, rq as rq, structs as structs + +__all__ = ["display", "event", "request", "rq", "structs"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/protocol/display.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/protocol/display.pyi new file mode 100644 index 00000000..e125e966 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/protocol/display.pyi @@ -0,0 +1,118 @@ +from _typeshed import _BufferWithLen +from socket import socket +from typing import TypeVar, overload +from typing_extensions import Literal + +from Xlib import error +from Xlib._typing import ErrorHandler +from Xlib.display import _ResourceBaseClass, _ResourceBaseClassesType +from Xlib.protocol import rq +from Xlib.support import lock +from Xlib.xobject import colormap, cursor, drawable, fontable, resource + +_T = TypeVar("_T") + +class bytesview: + view: memoryview + @overload + def __init__(self, data: bytes | bytesview, offset: int, size: int) -> None: ... + @overload + def __init__(self, data: _BufferWithLen, offset: int = ..., size: int | None = ...) -> None: ... + @overload + def __getitem__(self, key: slice) -> bytes: ... + @overload + def __getitem__(self, key: int) -> int: ... + def __len__(self) -> int: ... + +class Display: + extension_major_opcodes: dict[str, int] + error_classes: dict[int, type[error.XError]] + event_classes: dict[int, type[rq.Event] | dict[int, type[rq.Event]]] + resource_classes: _ResourceBaseClassesType | None + display_name: str + default_screen: int + socket: socket + socket_error_lock: lock._DummyLock + socket_error: Exception | None + event_queue_read_lock: lock._DummyLock + event_queue_write_lock: lock._DummyLock + event_queue: list[rq.Event] + request_queue_lock: lock._DummyLock + request_serial: int + request_queue: list[tuple[rq.Request | rq.ReplyRequest | ConnectionSetupRequest, int]] + send_recv_lock: lock._DummyLock + send_active: int + recv_active: int + event_waiting: int + event_wait_lock: lock._DummyLock + request_waiting: int + request_wait_lock: lock._DummyLock + recv_buffer_size: int + sent_requests: list[rq.Request | rq.ReplyRequest | ConnectionSetupRequest] + recv_packet_len: int + data_send: bytes + data_recv: bytes + data_sent_bytes: int + resource_id_lock: lock._DummyLock + resource_ids: dict[int, None] + last_resource_id: int + error_handler: ErrorHandler[object] | None + big_endian: bool + info: ConnectionSetupRequest + def __init__(self, display: str | None = ...) -> None: ... + def get_display_name(self) -> str: ... + def get_default_screen(self) -> int: ... + def fileno(self) -> int: ... + def next_event(self) -> rq.Event: ... + def pending_events(self) -> int: ... + def flush(self) -> None: ... + def close(self) -> None: ... + def set_error_handler(self, handler: ErrorHandler[object] | None) -> None: ... + def allocate_resource_id(self) -> int: ... + def free_resource_id(self, rid: int) -> None: ... + @overload + def get_resource_class(self, class_name: Literal["resource"], default: object = ...) -> type[resource.Resource]: ... + @overload + def get_resource_class(self, class_name: Literal["drawable"], default: object = ...) -> type[drawable.Drawable]: ... + @overload + def get_resource_class(self, class_name: Literal["window"], default: object = ...) -> type[drawable.Window]: ... + @overload + def get_resource_class(self, class_name: Literal["pixmap"], default: object = ...) -> type[drawable.Pixmap]: ... + @overload + def get_resource_class(self, class_name: Literal["fontable"], default: object = ...) -> type[fontable.Fontable]: ... + @overload + def get_resource_class(self, class_name: Literal["font"], default: object = ...) -> type[fontable.Font]: ... + @overload + def get_resource_class(self, class_name: Literal["gc"], default: object = ...) -> type[fontable.GC]: ... + @overload + def get_resource_class(self, class_name: Literal["colormap"], default: object = ...) -> type[colormap.Colormap]: ... + @overload + def get_resource_class(self, class_name: Literal["cursor"], default: object) -> type[cursor.Cursor]: ... + @overload + def get_resource_class(self, class_name: str, default: _T) -> type[_ResourceBaseClass] | _T: ... + @overload + def get_resource_class(self, class_name: str, default: None = ...) -> type[_ResourceBaseClass] | None: ... + def set_extension_major(self, extname: str, major: int) -> None: ... + def get_extension_major(self, extname: str) -> int: ... + def add_extension_event(self, code: int, evt: type[rq.Event], subcode: int | None = ...) -> None: ... + def add_extension_error(self, code: int, err: type[error.XError]) -> None: ... + def check_for_error(self) -> None: ... + def send_request(self, request: rq.Request | rq.ReplyRequest | ConnectionSetupRequest, wait_for_response: bool) -> None: ... + def close_internal(self, whom: object) -> None: ... + def send_and_recv(self, flush: bool = ..., event: bool = ..., request: int | None = ..., recv: bool = ...) -> None: ... + def parse_response(self, request: int) -> bool: ... + def parse_error_response(self, request: int) -> bool: ... + def default_error_handler(self, err: object) -> None: ... + def parse_request_response(self, request: int) -> bool: ... + def parse_event_response(self, etype: int) -> None: ... + def get_waiting_request(self, sno: int) -> rq.ReplyRequest | ConnectionSetupRequest | None: ... + def get_waiting_replyrequest(self) -> rq.ReplyRequest | ConnectionSetupRequest: ... + def parse_connection_setup(self) -> bool: ... + +PixmapFormat: rq.Struct +VisualType: rq.Struct +Depth: rq.Struct +Screen: rq.Struct + +class ConnectionSetupRequest(rq.GetAttrData): + def __init__(self, display: Display, *args: object, **keys: object) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/protocol/event.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/protocol/event.pyi new file mode 100644 index 00000000..cfb82544 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/protocol/event.pyi @@ -0,0 +1,80 @@ +from typing_extensions import TypeAlias + +from Xlib.protocol import rq + +class AnyEvent(rq.Event): ... +class KeyButtonPointer(rq.Event): ... +class KeyPress(KeyButtonPointer): ... +class KeyRelease(KeyButtonPointer): ... +class ButtonPress(KeyButtonPointer): ... +class ButtonRelease(KeyButtonPointer): ... +class MotionNotify(KeyButtonPointer): ... +class EnterLeave(rq.Event): ... +class EnterNotify(EnterLeave): ... +class LeaveNotify(EnterLeave): ... +class Focus(rq.Event): ... +class FocusIn(Focus): ... +class FocusOut(Focus): ... +class Expose(rq.Event): ... +class GraphicsExpose(rq.Event): ... +class NoExpose(rq.Event): ... +class VisibilityNotify(rq.Event): ... +class CreateNotify(rq.Event): ... +class DestroyNotify(rq.Event): ... +class UnmapNotify(rq.Event): ... +class MapNotify(rq.Event): ... +class MapRequest(rq.Event): ... +class ReparentNotify(rq.Event): ... +class ConfigureNotify(rq.Event): ... +class ConfigureRequest(rq.Event): ... +class GravityNotify(rq.Event): ... +class ResizeRequest(rq.Event): ... +class Circulate(rq.Event): ... +class CirculateNotify(Circulate): ... +class CirculateRequest(Circulate): ... +class PropertyNotify(rq.Event): ... +class SelectionClear(rq.Event): ... +class SelectionRequest(rq.Event): ... +class SelectionNotify(rq.Event): ... +class ColormapNotify(rq.Event): ... +class MappingNotify(rq.Event): ... +class ClientMessage(rq.Event): ... +class KeymapNotify(rq.Event): ... + +_EventClass: TypeAlias = dict[ + int, + type[KeyPress] + | type[KeyRelease] + | type[ButtonPress] + | type[ButtonRelease] + | type[MotionNotify] + | type[EnterNotify] + | type[LeaveNotify] + | type[FocusIn] + | type[FocusOut] + | type[KeymapNotify] + | type[Expose] + | type[GraphicsExpose] + | type[NoExpose] + | type[VisibilityNotify] + | type[CreateNotify] + | type[DestroyNotify] + | type[UnmapNotify] + | type[MapNotify] + | type[MapRequest] + | type[ReparentNotify] + | type[ConfigureNotify] + | type[ConfigureRequest] + | type[GravityNotify] + | type[ResizeRequest] + | type[CirculateNotify] + | type[CirculateRequest] + | type[PropertyNotify] + | type[SelectionClear] + | type[SelectionRequest] + | type[SelectionNotify] + | type[ColormapNotify] + | type[ClientMessage] + | type[MappingNotify], +] +event_class: _EventClass diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/protocol/request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/protocol/request.pyi new file mode 100644 index 00000000..a336f5c9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/protocol/request.pyi @@ -0,0 +1,133 @@ +from typing import NoReturn + +from Xlib import display +from Xlib.protocol import rq + +class CreateWindow(rq.Request): ... +class ChangeWindowAttributes(rq.Request): ... +class GetWindowAttributes(rq.ReplyRequest): ... +class DestroyWindow(rq.Request): ... +class DestroySubWindows(rq.Request): ... +class ChangeSaveSet(rq.Request): ... +class ReparentWindow(rq.Request): ... +class MapWindow(rq.Request): ... +class MapSubwindows(rq.Request): ... +class UnmapWindow(rq.Request): ... +class UnmapSubwindows(rq.Request): ... +class ConfigureWindow(rq.Request): ... +class CirculateWindow(rq.Request): ... +class GetGeometry(rq.ReplyRequest): ... +class QueryTree(rq.ReplyRequest): ... +class InternAtom(rq.ReplyRequest): ... +class GetAtomName(rq.ReplyRequest): ... +class ChangeProperty(rq.Request): ... +class DeleteProperty(rq.Request): ... +class GetProperty(rq.ReplyRequest): ... +class ListProperties(rq.ReplyRequest): ... +class SetSelectionOwner(rq.Request): ... +class GetSelectionOwner(rq.ReplyRequest): ... +class ConvertSelection(rq.Request): ... +class SendEvent(rq.Request): ... +class GrabPointer(rq.ReplyRequest): ... +class UngrabPointer(rq.Request): ... +class GrabButton(rq.Request): ... +class UngrabButton(rq.Request): ... +class ChangeActivePointerGrab(rq.Request): ... +class GrabKeyboard(rq.ReplyRequest): ... +class UngrabKeyboard(rq.Request): ... +class GrabKey(rq.Request): ... +class UngrabKey(rq.Request): ... +class AllowEvents(rq.Request): ... +class GrabServer(rq.Request): ... +class UngrabServer(rq.Request): ... +class QueryPointer(rq.ReplyRequest): ... +class GetMotionEvents(rq.ReplyRequest): ... +class TranslateCoords(rq.ReplyRequest): ... +class WarpPointer(rq.Request): ... +class SetInputFocus(rq.Request): ... +class GetInputFocus(rq.ReplyRequest): ... +class QueryKeymap(rq.ReplyRequest): ... +class OpenFont(rq.Request): ... +class CloseFont(rq.Request): ... +class QueryFont(rq.ReplyRequest): ... +class QueryTextExtents(rq.ReplyRequest): ... +class ListFonts(rq.ReplyRequest): ... + +class ListFontsWithInfo(rq.ReplyRequest): + def __init__(self, display: display.Display, defer: bool = ..., *args: object, **keys: object) -> None: ... + def __getattr__(self, attr: object) -> NoReturn: ... + def __getitem__(self, item: str) -> object: ... + def __len__(self) -> int: ... + +class SetFontPath(rq.Request): ... +class GetFontPath(rq.ReplyRequest): ... +class CreatePixmap(rq.Request): ... +class FreePixmap(rq.Request): ... +class CreateGC(rq.Request): ... +class ChangeGC(rq.Request): ... +class CopyGC(rq.Request): ... +class SetDashes(rq.Request): ... +class SetClipRectangles(rq.Request): ... +class FreeGC(rq.Request): ... +class ClearArea(rq.Request): ... +class CopyArea(rq.Request): ... +class CopyPlane(rq.Request): ... +class PolyPoint(rq.Request): ... +class PolyLine(rq.Request): ... +class PolySegment(rq.Request): ... +class PolyRectangle(rq.Request): ... +class PolyArc(rq.Request): ... +class FillPoly(rq.Request): ... +class PolyFillRectangle(rq.Request): ... +class PolyFillArc(rq.Request): ... +class PutImage(rq.Request): ... +class GetImage(rq.ReplyRequest): ... +class PolyText8(rq.Request): ... +class PolyText16(rq.Request): ... +class ImageText8(rq.Request): ... +class ImageText16(rq.Request): ... +class CreateColormap(rq.Request): ... +class FreeColormap(rq.Request): ... +class CopyColormapAndFree(rq.Request): ... +class InstallColormap(rq.Request): ... +class UninstallColormap(rq.Request): ... +class ListInstalledColormaps(rq.ReplyRequest): ... +class AllocColor(rq.ReplyRequest): ... +class AllocNamedColor(rq.ReplyRequest): ... +class AllocColorCells(rq.ReplyRequest): ... +class AllocColorPlanes(rq.ReplyRequest): ... +class FreeColors(rq.Request): ... +class StoreColors(rq.Request): ... +class StoreNamedColor(rq.Request): ... +class QueryColors(rq.ReplyRequest): ... +class LookupColor(rq.ReplyRequest): ... +class CreateCursor(rq.Request): ... +class CreateGlyphCursor(rq.Request): ... +class FreeCursor(rq.Request): ... +class RecolorCursor(rq.Request): ... +class QueryBestSize(rq.ReplyRequest): ... +class QueryExtension(rq.ReplyRequest): ... +class ListExtensions(rq.ReplyRequest): ... +class ChangeKeyboardMapping(rq.Request): ... +class GetKeyboardMapping(rq.ReplyRequest): ... +class ChangeKeyboardControl(rq.Request): ... +class GetKeyboardControl(rq.ReplyRequest): ... +class Bell(rq.Request): ... +class ChangePointerControl(rq.Request): ... +class GetPointerControl(rq.ReplyRequest): ... +class SetScreenSaver(rq.Request): ... +class GetScreenSaver(rq.ReplyRequest): ... +class ChangeHosts(rq.Request): ... +class ListHosts(rq.ReplyRequest): ... +class SetAccessControl(rq.Request): ... +class SetCloseDownMode(rq.Request): ... +class KillClient(rq.Request): ... +class RotateProperties(rq.Request): ... +class ForceScreenSaver(rq.Request): ... +class SetPointerMapping(rq.ReplyRequest): ... +class GetPointerMapping(rq.ReplyRequest): ... +class SetModifierMapping(rq.ReplyRequest): ... +class GetModifierMapping(rq.ReplyRequest): ... +class NoOperation(rq.Request): ... + +major_codes: dict[int, type[rq.Request]] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/protocol/rq.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/protocol/rq.pyi new file mode 100644 index 00000000..8b5b8ac8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/protocol/rq.pyi @@ -0,0 +1,402 @@ +from _typeshed import ReadableBuffer, SliceableBuffer, SupportsTrunc +from array import array + +# Avoid name collision with List.type +from builtins import type as Type +from collections.abc import Callable, Iterable, Sequence +from typing import Any, SupportsInt, TypeVar, overload, type_check_only +from typing_extensions import Literal, LiteralString, SupportsIndex, TypeAlias + +from Xlib._typing import ErrorHandler, Unused +from Xlib.display import _ResourceBaseClass +from Xlib.error import XError +from Xlib.ext.xinput import ClassInfoClass +from Xlib.protocol import display + +_T = TypeVar("_T") +_IntNew: TypeAlias = str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc +_ModifierMappingList8Elements: TypeAlias = Sequence[Sequence[int]] + +# Workaround for pytype crash. Should be Xlib.display._BaseDisplay +@type_check_only +class _BaseDisplay(display.Display): + def __init__(self, display: str | None = ...) -> None: ... + def get_atom(self, atomname: str, only_if_exists: bool = ...) -> int: ... + +def decode_string(bs: bytes | bytearray) -> str: ... +def encode_array(a: array[Any] | memoryview) -> str: ... + +class BadDataError(Exception): ... + +signed_codes: dict[int, str] +unsigned_codes: dict[int, str] +array_unsigned_codes: dict[int, LiteralString] +struct_to_array_codes: dict[str, LiteralString] +size: int + +class Field: + name: str + default: int | None + pack_value: Callable[[Any], tuple[Any, int | None, int | None]] | None + structcode: str | None + structvalues: int + check_value: Callable[[Any], Any] | None + parse_value: Callable[[Any, Any], Any] | None + keyword_args: int + + def parse_binary_value( + self, data: SliceableBuffer, display: display.Display | None, length: int | None, format: int + ) -> tuple[Any, SliceableBuffer]: ... + +class Pad(Field): + size: int + value: bytes + structcode: str + def __init__(self, size: int) -> None: ... + +class ConstantField(Field): + value: int + def __init__(self, value: int) -> None: ... + +class Opcode(ConstantField): + structcode: str + +class ReplyCode(ConstantField): + structcode: str + value: int + def __init__(self) -> None: ... + +class LengthField(Field): + structcode: str + other_fields: list[str] | tuple[str, ...] | None + def calc_length(self, length: int) -> int: ... + +class TotalLengthField(LengthField): ... +class RequestLength(TotalLengthField): ... +class ReplyLength(TotalLengthField): ... + +class LengthOf(LengthField): + other_fields: list[str] | tuple[str, ...] | None + def __init__(self, name: str | list[str] | tuple[str, ...], size: int) -> None: ... + +class OddLength(LengthField): + def __init__(self, name: str) -> None: ... + def parse_value(self, value: int, display: Unused) -> Literal["even", "odd"]: ... # type: ignore[override] + +class FormatField(Field): + structcode: str + def __init__(self, name: str, size: int) -> None: ... + +Format = FormatField + +class ValueField(Field): + def __init__(self, name: str, default: int | None = ...) -> None: ... + +class Int8(ValueField): + structcode: str + +class Int16(ValueField): + structcode: str + +class Int32(ValueField): + structcode: str + +class Card8(ValueField): + structcode: str + +class Card16(ValueField): + structcode: str + +class Card32(ValueField): + structcode: str + +class Resource(Card32): + cast_function: str + class_name: str + codes: tuple[int, ...] + def __init__(self, name: str, codes: tuple[int, ...] = ..., default: int | None = ...) -> None: ... + @overload # type: ignore[override] + def check_value(self, value: Callable[[], _T]) -> _T: ... + @overload + def check_value(self, value: _T) -> _T: ... + def parse_value(self, value: int, display: _BaseDisplay) -> int: ... # type: ignore[override] # display: None will error. See: https://github.com/python-xlib/python-xlib/pull/248 + +class Window(Resource): + cast_function: str + class_name: str + +class Pixmap(Resource): + cast_function: str + class_name: str + +class Drawable(Resource): + cast_function: str + class_name: str + +class Fontable(Resource): + cast_function: str + class_name: str + +class Font(Resource): + cast_function: str + class_name: str + +class GC(Resource): + cast_function: str + class_name: str + +class Colormap(Resource): + cast_function: str + class_name: str + +class Cursor(Resource): + cast_function: str + class_name: str + +class Bool(ValueField): + structcode: str + def check_value(self, value: object) -> bool: ... # type: ignore[override] + +class Set(ValueField): + structcode: str + values: Sequence[object] + def __init__(self, name: str, size: int, values: Sequence[object], default: int | None = ...) -> None: ... + def check_value(self, val: _T) -> _T: ... # type: ignore[override] + +class Gravity(Set): + def __init__(self, name: str) -> None: ... + +class FixedBinary(ValueField): + structcode: str + def __init__(self, name: str, size: int) -> None: ... + +class Binary(ValueField): + structcode: None + pad: int + def __init__(self, name: str, pad: int = ...) -> None: ... + def pack_value( # type: ignore[override] # Override Callable + self, val: bytes | bytearray + ) -> tuple[bytes | bytearray, int, None]: ... + @overload # type: ignore[override] # Overload for specific values + def parse_binary_value(self, data: _T, display: Unused, length: None, format: Unused) -> tuple[_T, Literal[b""]]: ... + @overload + def parse_binary_value( + self, data: SliceableBuffer, display: Unused, length: int, format: Unused + ) -> tuple[SliceableBuffer, SliceableBuffer]: ... + +class String8(ValueField): + structcode: None + pad: int + def __init__(self, name: str, pad: int = ...) -> None: ... + def pack_value(self, val: bytes | str) -> tuple[bytes, int, None]: ... # type: ignore[override] # Override Callable + @overload # type: ignore[override] # Overload for specific values + def parse_binary_value( + self, data: bytes | bytearray, display: Unused, length: None, format: Unused + ) -> tuple[str, Literal[b""]]: ... + @overload + def parse_binary_value( + self, data: SliceableBuffer, display: Unused, length: int, format: Unused + ) -> tuple[str, SliceableBuffer]: ... + +class String16(ValueField): + structcode: None + pad: int + def __init__(self, name: str, pad: int = ...) -> None: ... + def pack_value(self, val: Sequence[object]) -> tuple[bytes, int, None]: ... # type: ignore[override] # Override Callable + def parse_binary_value( # type: ignore[override] # length: None will error. See: https://github.com/python-xlib/python-xlib/pull/248 + self, data: SliceableBuffer, display: Unused, length: int | Literal["odd", "even"], format: Unused + ) -> tuple[tuple[Any, ...], SliceableBuffer]: ... + +class List(ValueField): + structcode: None + type: Struct | ScalarObj | ResourceObj | ClassInfoClass | type[ValueField] + pad: int + def __init__( + self, name: str, type: Struct | ScalarObj | ResourceObj | ClassInfoClass | Type[ValueField], pad: int = ... + ) -> None: ... + def parse_binary_value( + self, data: SliceableBuffer, display: display.Display | None, length: SupportsIndex | None, format: Unused + ) -> tuple[list[DictWrapper | None], SliceableBuffer]: ... + def pack_value( # type: ignore[override] # Override Callable + self, val: Sequence[object] | dict[str, Any] + ) -> tuple[bytes, int, None]: ... + +class FixedList(List): + size: int + def __init__(self, name: str, size: int, type: Struct | ScalarObj, pad: int = ...) -> None: ... + def parse_binary_value( + self, data: SliceableBuffer, display: display.Display | None, length: Unused, format: Unused + ) -> tuple[list[DictWrapper | None], SliceableBuffer]: ... + +class Object(ValueField): + type: Struct + structcode: str | None + def __init__(self, name: str, type: Struct, default: int | None = ...) -> None: ... + def parse_binary_value( + self, data: SliceableBuffer, display: display.Display | None, length: Unused, format: Unused + ) -> tuple[DictWrapper, SliceableBuffer]: ... + def parse_value(self, val: SliceableBuffer, display: display.Display | None) -> DictWrapper: ... # type: ignore[override] + def pack_value( # type: ignore[override] # Override Callable + self, val: tuple[object, ...] | dict[str, Any] | DictWrapper + ) -> bytes: ... + def check_value(self, val: tuple[_T, ...] | dict[str, _T] | DictWrapper) -> list[_T]: ... # type: ignore[override] + +class PropertyData(ValueField): + structcode: None + def parse_binary_value( + self, data: SliceableBuffer, display: Unused, length: _IntNew | None, format: int + ) -> tuple[tuple[int, SliceableBuffer] | None, SliceableBuffer]: ... + def pack_value( # type: ignore[override] # Override Callable + self, value: tuple[int, Sequence[float] | Sequence[str]] + ) -> tuple[bytes, int, Literal[8, 16, 32]]: ... + +class FixedPropertyData(PropertyData): + size: int + def __init__(self, name: str, size: int) -> None: ... + +class ValueList(Field): + structcode: None + keyword_args: int + default: str # type: ignore[assignment] # Actually different from base class + maskcode: bytes + maskcodelen: int + fields: list[tuple[Field, int]] + def __init__(self, name: str, mask: int, pad: int, *fields: Field) -> None: ... + def pack_value( # type: ignore[override] # Override Callable + self, arg: str | dict[str, Any], keys: dict[str, Any] + ) -> tuple[bytes, None, None]: ... + def parse_binary_value( + self, data: SliceableBuffer, display: display.Display | None, length: Unused, format: Unused + ) -> tuple[DictWrapper, SliceableBuffer]: ... + +class KeyboardMapping(ValueField): + structcode: None + def parse_binary_value( + self, data: SliceableBuffer, display: Unused, length: int | None, format: int + ) -> tuple[list[int], SliceableBuffer]: ... + def pack_value( # type: ignore[override] # Override Callable + self, value: Sequence[Sequence[object]] + ) -> tuple[bytes, int, int]: ... + +class ModifierMapping(ValueField): + structcode: None + def parse_binary_value( + self, data: SliceableBuffer, display: Unused, length: Unused, format: int + ) -> tuple[list[array[int]], SliceableBuffer]: ... + def pack_value( # type: ignore[override] # Override Callable + self, value: _ModifierMappingList8Elements + ) -> tuple[bytes, int, int]: ... + +class EventField(ValueField): + structcode: None + def pack_value(self, value: Event) -> tuple[SliceableBuffer, None, None]: ... # type: ignore[override] # Override Callable + def parse_binary_value( # type: ignore[override] + self, data: SliceableBuffer, display: display.Display, length: Unused, format: Unused + ) -> tuple[Event, SliceableBuffer]: ... + +class ScalarObj: + structcode: str + structvalues: int + parse_value: None + check_value: None + def __init__(self, code: str) -> None: ... + +Card8Obj: ScalarObj +Card16Obj: ScalarObj +Card32Obj: ScalarObj + +class ResourceObj: + structcode: str + structvalues: int + class_name: str + check_value: None + def __init__(self, class_name: str) -> None: ... + def parse_value(self, value: int, display: _BaseDisplay) -> int | _ResourceBaseClass: ... + +WindowObj: ResourceObj +ColormapObj: ResourceObj + +class StrClass: + structcode: None + def pack_value(self, val: str) -> bytes: ... + def parse_binary(self, data: bytes | bytearray, display: Unused) -> tuple[str, bytes | bytearray]: ... + +Str: StrClass + +class Struct: + name: str + check_value: Callable[[Any], Any] | None + keyword_args: bool + fields: tuple[Field] + static_codes: str + static_values: int + static_fields: list[Field] + static_size: int + var_fields: list[Field] + structcode: str | None + structvalues: int + def __init__(self, *fields: Field) -> None: ... + def to_binary(self, *varargs: object, **keys: object) -> bytes: ... + def pack_value(self, value: tuple[object, ...] | dict[str, Any] | DictWrapper) -> bytes: ... + @overload + def parse_value(self, val: SliceableBuffer, display: display.Display | None, rawdict: Literal[True]) -> dict[str, Any]: ... + @overload + def parse_value( + self, val: SliceableBuffer, display: display.Display | None, rawdict: Literal[False] = ... + ) -> DictWrapper: ... + @overload + def parse_binary( + self, data: SliceableBuffer, display: display.Display | None, rawdict: Literal[True] + ) -> tuple[dict[str, Any], SliceableBuffer]: ... + @overload + def parse_binary( + self, data: SliceableBuffer, display: display.Display | None, rawdict: Literal[False] = ... + ) -> tuple[DictWrapper, SliceableBuffer]: ... + # Structs generate their attributes + # TODO: Create a specific type-only class for all instances of `Struct` + @type_check_only + def __getattr__(self, __name: str) -> Any: ... + +class TextElements8(ValueField): + string_textitem: Struct + def pack_value( # type: ignore[override] # Override Callable + self, value: Iterable[Field | str | bytes | tuple[Sequence[object], ...] | dict[str, Sequence[object]] | DictWrapper] + ) -> tuple[bytes, None, None]: ... + def parse_binary_value( # type: ignore[override] # See: https://github.com/python-xlib/python-xlib/pull/249 + self, data: SliceableBuffer, display: display.Display | None, length: Unused, format: Unused + ) -> tuple[list[DictWrapper], Literal[""]]: ... + +class TextElements16(TextElements8): + string_textitem: Struct + +class GetAttrData: + # GetAttrData classes get their attributes dynamically + # TODO: Complete all classes inheriting from GetAttrData + def __getattr__(self, attr: str) -> Any: ... + @type_check_only + def __setattr__(self, __name: str, __value: Any) -> None: ... + +class DictWrapper(GetAttrData): + def __init__(self, dict: dict[str, Any]) -> None: ... + def __getitem__(self, key: str) -> object: ... + def __setitem__(self, key: str, value: object) -> None: ... + def __delitem__(self, key: str) -> None: ... + def __setattr__(self, key: str, value: object) -> None: ... + +class Request: + def __init__( + self, display: _BaseDisplay, onerror: ErrorHandler[object] | None = ..., *args: object, **keys: object + ) -> None: ... + +class ReplyRequest(GetAttrData): + def __init__(self, display: display.Display, defer: int = ..., *args: object, **keys: object) -> None: ... + def reply(self) -> None: ... + +class Event(GetAttrData): + def __init__( + self, binarydata: SliceableBuffer | None = ..., display: display.Display | None = ..., **keys: object + ) -> None: ... + +def call_error_handler( + handler: Callable[[XError, Request | None], _T], error: XError, request: Request | None +) -> _T | Literal[0]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/protocol/structs.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/protocol/structs.pyi new file mode 100644 index 00000000..60e004c4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/protocol/structs.pyi @@ -0,0 +1,26 @@ +from collections.abc import Iterable, Sequence +from typing_extensions import TypeAlias + +from Xlib.protocol import rq + +# Aliases used in other modules +_RGB3IntIterable: TypeAlias = Iterable[int] # noqa: Y047 +_Rectangle4IntSequence: TypeAlias = Sequence[int] # noqa: Y047 +_Segment4IntSequence: TypeAlias = Sequence[int] # noqa: Y047 +_Arc6IntSequence: TypeAlias = Sequence[int] # noqa: Y047 + +# TODO: Complete all classes using WindowValues and GCValues +# Currently *object is used to represent the ValueList instead of the possible attribute types +def WindowValues(arg: str) -> rq.ValueList: ... +def GCValues(arg: str) -> rq.ValueList: ... + +TimeCoord: rq.Struct +Host: rq.Struct +CharInfo: rq.Struct +FontProp: rq.Struct +ColorItem: rq.Struct +RGB: rq.Struct +Point: rq.Struct +Segment: rq.Struct +Rectangle: rq.Struct +Arc: rq.Struct diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/rdb.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/rdb.pyi new file mode 100644 index 00000000..4fb7a666 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/rdb.pyi @@ -0,0 +1,97 @@ +from _typeshed import SupportsDunderGT, SupportsDunderLT, SupportsRead +from collections.abc import Iterable, Mapping, Sequence +from re import Pattern +from typing import Any, Protocol, TypeVar, overload +from typing_extensions import TypeAlias + +from Xlib.display import Display +from Xlib.support.lock import _DummyLock + +_T = TypeVar("_T") +_T_contra = TypeVar("_T_contra", contravariant=True) + +_DB: TypeAlias = dict[str, tuple[_DB, ...]] +# A recursive type can be a bit annoying due to dict invariance, +# so this is a slightly less precise version of the _DB alias for parameter annotations +_DB_Param: TypeAlias = dict[str, Any] + +class _SupportsComparisons(SupportsDunderLT[_T_contra], SupportsDunderGT[_T_contra], Protocol[_T_contra]): ... + +comment_re: Pattern[str] +resource_spec_re: Pattern[str] +value_escape_re: Pattern[str] +resource_parts_re: Pattern[str] +NAME_MATCH: int +CLASS_MATCH: int +WILD_MATCH: int +MATCH_SKIP: int + +class OptionError(Exception): ... + +class ResourceDB: + db: _DB + lock: _DummyLock + def __init__( + self, + file: bytes | SupportsRead[str] | None = ..., + string: str | None = ..., + resources: Iterable[tuple[str, object]] | None = ..., + ) -> None: ... + def insert_file(self, file: bytes | SupportsRead[str]) -> None: ... + def insert_string(self, data: str) -> None: ... + def insert_resources(self, resources: Iterable[tuple[str, object]]) -> None: ... + def insert(self, resource: str, value: object) -> None: ... + def __getitem__(self, keys_tuple: tuple[str, str]) -> Any: ... + @overload + def get(self, res: str, cls: str, default: None = ...) -> Any: ... + @overload + def get(self, res: str, cls: str, default: _T) -> _T: ... + def update(self, db: ResourceDB) -> None: ... + def output(self) -> str: ... + def getopt(self, name: str, argv: Sequence[str], opts: Mapping[str, Option]) -> Sequence[str]: ... + +def bin_insert(list: list[_SupportsComparisons[_T]], element: _SupportsComparisons[_T]) -> None: ... +def update_db(dest: _DB_Param, src: _DB_Param) -> None: ... +def copy_group(group: tuple[_DB_Param, ...]) -> tuple[_DB, ...]: ... +def copy_db(db: _DB_Param) -> _DB: ... +def output_db(prefix: str, db: _DB_Param) -> str: ... +def output_escape(value: object) -> str: ... + +class Option: + def parse(self, name: str, db: ResourceDB, args: Sequence[_T]) -> Sequence[_T]: ... + +class NoArg(Option): + specifier: str + value: object + def __init__(self, specifier: str, value: object) -> None: ... + +class IsArg(Option): + specifier: str + def __init__(self, specifier: str) -> None: ... + +class SepArg(Option): + specifier: str + def __init__(self, specifier: str) -> None: ... + +class ResArgClass(Option): + def parse(self, name: str, db: ResourceDB, args: Sequence[str]) -> Sequence[str]: ... # type: ignore[override] + +ResArg: ResArgClass + +class SkipArgClass(Option): ... + +SkipArg: SkipArgClass + +class SkipLineClass(Option): ... + +SkipLine: SkipLineClass + +class SkipNArgs(Option): + count: int + def __init__(self, count: int) -> None: ... + +def get_display_opts( + options: Mapping[str, Option], argv: Sequence[str] = ... +) -> tuple[Display, str, ResourceDB, Sequence[str]]: ... + +stdopts: dict[str, SepArg | NoArg | ResArgClass] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/support/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/support/__init__.pyi new file mode 100644 index 00000000..63adc819 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/support/__init__.pyi @@ -0,0 +1,3 @@ +from Xlib.support import connect as connect, lock as lock + +__all__ = ["lock", "connect"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/support/connect.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/support/connect.pyi new file mode 100644 index 00000000..53552b31 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/support/connect.pyi @@ -0,0 +1,4 @@ +# Ignore OpenVMS in typeshed +from Xlib.support.unix_connect import get_auth as get_auth, get_display as get_display, get_socket as get_socket + +platform: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/support/lock.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/support/lock.pyi new file mode 100644 index 00000000..236e96e5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/support/lock.pyi @@ -0,0 +1,8 @@ +from collections.abc import Callable + +class _DummyLock: + acquire: Callable[..., None] + release: Callable[..., None] + locked: Callable[..., None] + +def allocate_lock() -> _DummyLock: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/support/unix_connect.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/support/unix_connect.pyi new file mode 100644 index 00000000..4c2af3b6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/support/unix_connect.pyi @@ -0,0 +1,25 @@ +import sys +from _socket import _Address +from platform import uname_result +from re import Pattern +from socket import socket +from typing_extensions import Literal, TypeAlias + +from Xlib._typing import Unused + +if sys.platform == "darwin": + SUPPORTED_PROTOCOLS: tuple[None, Literal["tcp"], Literal["unix"], Literal["darwin"]] + _Protocol: TypeAlias = Literal[None, "tcp", "unix", "darwin"] + DARWIN_DISPLAY_RE: Pattern[str] +else: + SUPPORTED_PROTOCOLS: tuple[None, Literal["tcp"], Literal["unix"]] + _Protocol: TypeAlias = Literal[None, "tcp", "unix"] +uname: uname_result +DISPLAY_RE: Pattern[str] + +def get_display(display: str | None) -> tuple[str, str | None, str | None, int, int]: ... +def get_socket(dname: _Address, protocol: _Protocol, host: _Address | None, dno: int) -> socket: ... +def new_get_auth(sock: socket, dname: Unused, protocol: _Protocol, host: Unused, dno: int) -> tuple[bytes, bytes]: ... +def old_get_auth(sock: Unused, dname: _Address, host: Unused, dno: Unused) -> tuple[str | Literal[b""], bytes]: ... + +get_auth = new_get_auth diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/support/vms_connect.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/support/vms_connect.pyi new file mode 100644 index 00000000..3bbbd766 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/support/vms_connect.pyi @@ -0,0 +1,11 @@ +from _socket import _Address +from re import Pattern +from socket import socket + +from Xlib._typing import Unused + +display_re: Pattern[str] + +def get_display(display: str | None) -> tuple[str, None, str, int, int]: ... +def get_socket(dname: _Address, protocol: Unused, host: _Address, dno: int) -> socket: ... +def get_auth(sock: Unused, dname: Unused, host: Unused, dno: Unused) -> tuple[str, str]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/threaded.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/threaded.pyi new file mode 100644 index 00000000..c7314799 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/threaded.pyi @@ -0,0 +1,4 @@ +# This isn't just a re-export from from Xlib.support import lock +# Importing from this module will cause the lock.allocate_lock function to +# return a basic Python lock, instead of the default dummy lock +from Xlib.support import lock as lock diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xauth.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xauth.pyi new file mode 100644 index 00000000..d0dd0970 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xauth.pyi @@ -0,0 +1,17 @@ +from _typeshed import FileDescriptorOrPath + +FamilyInternet: int +FamilyDECnet: int +FamilyChaos: int +FamilyServerInterpreted: int +FamilyInternetV6: int +FamilyLocal: int + +class Xauthority: + entries: list[tuple[bytes, bytes, bytes, bytes, bytes]] + def __init__(self, filename: FileDescriptorOrPath | None = ...) -> None: ... + def __len__(self) -> int: ... + def __getitem__(self, i: int) -> tuple[bytes, bytes, bytes, bytes, bytes]: ... + def get_best_auth( + self, family: bytes, address: bytes, dispno: bytes, types: tuple[bytes, ...] = ... + ) -> tuple[bytes, bytes]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xobject/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xobject/__init__.pyi new file mode 100644 index 00000000..5d06a9d3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xobject/__init__.pyi @@ -0,0 +1,10 @@ +from Xlib.xobject import ( + colormap as colormap, + cursor as cursor, + drawable as drawable, + fontable as fontable, + icccm as icccm, + resource as resource, +) + +__all__ = ["colormap", "cursor", "drawable", "fontable", "icccm", "resource"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xobject/colormap.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xobject/colormap.pyi new file mode 100644 index 00000000..a3ebc934 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xobject/colormap.pyi @@ -0,0 +1,24 @@ +from collections.abc import Sequence +from re import Pattern + +from Xlib._typing import ErrorHandler +from Xlib.protocol import request, rq +from Xlib.xobject import resource + +rgb_res: list[Pattern[str]] + +class Colormap(resource.Resource): + __colormap__ = resource.Resource.__resource__ + def free(self, onerror: ErrorHandler[object] | None = ...) -> None: ... + def copy_colormap_and_free(self, scr_cmap: int) -> Colormap: ... + def install_colormap(self, onerror: ErrorHandler[object] | None = ...) -> None: ... + def uninstall_colormap(self, onerror: ErrorHandler[object] | None = ...) -> None: ... + def alloc_color(self, red: int, green: int, blue: int) -> request.AllocColor: ... + def alloc_named_color(self, name: str) -> request.AllocColor | request.AllocNamedColor | None: ... + def alloc_color_cells(self, contiguous: bool, colors: int, planes: int) -> request.AllocColorCells: ... + def alloc_color_planes(self, contiguous: bool, colors: int, red: int, green: int, blue: int) -> request.AllocColorPlanes: ... + def free_colors(self, pixels: Sequence[int], plane_mask: int, onerror: ErrorHandler[object] | None = ...) -> None: ... + def store_colors(self, items: dict[str, int], onerror: ErrorHandler[object] | None = ...) -> None: ... + def store_named_color(self, name: str, pixel: int, flags: int, onerror: ErrorHandler[object] | None = ...) -> None: ... + def query_colors(self, pixels: Sequence[int]) -> rq.Struct: ... + def lookup_color(self, name: str) -> request.LookupColor: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xobject/cursor.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xobject/cursor.pyi new file mode 100644 index 00000000..aeb773dc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xobject/cursor.pyi @@ -0,0 +1,10 @@ +from Xlib._typing import ErrorHandler +from Xlib.protocol.structs import _RGB3IntIterable +from Xlib.xobject import resource + +class Cursor(resource.Resource): + __cursor__ = resource.Resource.__resource__ + def free(self, onerror: ErrorHandler[object] | None = ...) -> None: ... + def recolor( + self, foreground: _RGB3IntIterable, background: _RGB3IntIterable, onerror: ErrorHandler[object] | None = ... + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xobject/drawable.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xobject/drawable.pyi new file mode 100644 index 00000000..da6615aa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xobject/drawable.pyi @@ -0,0 +1,263 @@ +from collections.abc import Iterable, Sequence +from typing import Any + +from PIL import Image +from Xlib._typing import ErrorHandler +from Xlib.protocol import request, rq +from Xlib.protocol.structs import _Arc6IntSequence, _Rectangle4IntSequence, _RGB3IntIterable, _Segment4IntSequence +from Xlib.xobject import colormap, cursor, fontable, resource + +class Drawable(resource.Resource): + __drawable__ = resource.Resource.__resource__ + def get_geometry(self) -> request.GetGeometry: ... + def create_pixmap(self, width: int, height: int, depth: int) -> Pixmap: ... + def create_gc(self, **keys: object) -> fontable.GC: ... + def copy_area( + self, + gc: int, + src_drawable: int, + src_x: int, + src_y: int, + width: int, + height: int, + dst_x: int, + dst_y: int, + onerror: ErrorHandler[object] | None = ..., + ) -> None: ... + def copy_plane( + self, + gc: int, + src_drawable: int, + src_x: int, + src_y: int, + width: int, + height: int, + dst_x: int, + dst_y: int, + bit_plane: int, + onerror: ErrorHandler[object] | None = ..., + ) -> None: ... + def poly_point( + self, gc: int, coord_mode: int, points: Sequence[tuple[int, int]], onerror: ErrorHandler[object] | None = ... + ) -> None: ... + def point(self, gc: int, x: int, y: int, onerror: ErrorHandler[object] | None = ...) -> None: ... + def poly_line( + self, gc: int, coord_mode: int, points: Sequence[tuple[int, int]], onerror: ErrorHandler[object] | None = ... + ) -> None: ... + def line(self, gc: int, x1: int, y1: int, x2: int, y2: int, onerror: ErrorHandler[object] | None = ...) -> None: ... + def poly_segment( + self, gc: int, segments: Sequence[_Segment4IntSequence], onerror: ErrorHandler[object] | None = ... + ) -> None: ... + def poly_rectangle( + self, gc: int, rectangles: Sequence[_Rectangle4IntSequence], onerror: ErrorHandler[object] | None = ... + ) -> None: ... + def rectangle(self, gc: int, x: int, y: int, width: int, height: int, onerror: ErrorHandler[object] | None = ...) -> None: ... + def poly_arc(self, gc: int, arcs: Sequence[_Arc6IntSequence], onerror: ErrorHandler[object] | None = ...) -> None: ... + def arc( + self, + gc: int, + x: int, + y: int, + width: int, + height: int, + angle1: int, + angle2: int, + onerror: ErrorHandler[object] | None = ..., + ) -> None: ... + def fill_poly( + self, gc: int, shape: int, coord_mode: int, points: Sequence[tuple[int, int]], onerror: ErrorHandler[object] | None = ... + ) -> None: ... + def poly_fill_rectangle( + self, gc: int, rectangles: Sequence[_Rectangle4IntSequence], onerror: ErrorHandler[object] | None = ... + ) -> None: ... + def fill_rectangle( + self, gc: int, x: int, y: int, width: int, height: int, onerror: ErrorHandler[object] | None = ... + ) -> None: ... + def poly_fill_arc(self, gc: int, arcs: Sequence[_Arc6IntSequence], onerror: ErrorHandler[object] | None = ...) -> None: ... + def fill_arc( + self, + gc: int, + x: int, + y: int, + width: int, + height: int, + angle1: int, + angle2: int, + onerror: ErrorHandler[object] | None = ..., + ) -> None: ... + def put_image( + self, + gc: int, + x: int, + y: int, + width: int, + height: int, + format: int, + depth: int, + left_pad: int, + data: bytes | bytearray, + onerror: ErrorHandler[object] | None = ..., + ) -> None: ... + def put_pil_image(self, gc: int, x: int, y: int, image: Image.Image, onerror: ErrorHandler[object] | None = ...) -> None: ... + def get_image(self, x: int, y: int, width: int, height: int, format: int, plane_mask: int) -> request.GetImage: ... + def draw_text( + self, gc: int, x: int, y: int, text: dict[str, str | int], onerror: ErrorHandler[object] | None = ... + ) -> None: ... + def poly_text( + self, gc: int, x: int, y: int, items: Sequence[dict[str, str | int]], onerror: ErrorHandler[object] | None = ... + ) -> None: ... + def poly_text_16( + self, gc: int, x: int, y: int, items: Sequence[dict[str, str | int]], onerror: ErrorHandler[object] | None = ... + ) -> None: ... + def image_text(self, gc: int, x: int, y: int, string: str, onerror: ErrorHandler[object] | None = ...) -> None: ... + def image_text_16(self, gc: int, x: int, y: int, string: str, onerror: ErrorHandler[object] | None = ...) -> None: ... + def query_best_size(self, item_class: int, width: int, height: int) -> request.QueryBestSize: ... + +class Window(Drawable): + __window__ = resource.Resource.__resource__ + def create_window( + self, + x: int, + y: int, + width: int, + height: int, + border_width: int, + depth: int, + window_class: int = ..., + visual: int = ..., + onerror: ErrorHandler[object] | None = ..., + **keys: object, + ) -> Window: ... + def change_attributes(self, onerror: ErrorHandler[object] | None = ..., **keys: object) -> None: ... + def get_attributes(self) -> request.GetWindowAttributes: ... + def destroy(self, onerror: ErrorHandler[object] | None = ...) -> None: ... + def destroy_sub_windows(self, onerror: ErrorHandler[object] | None = ...) -> None: ... + def change_save_set(self, mode: int, onerror: ErrorHandler[object] | None = ...) -> None: ... + def reparent(self, parent: int, x: int, y: int, onerror: ErrorHandler[object] | None = ...) -> None: ... + def map(self, onerror: ErrorHandler[object] | None = ...) -> None: ... + def map_sub_windows(self, onerror: ErrorHandler[object] | None = ...) -> None: ... + def unmap(self, onerror: ErrorHandler[object] | None = ...) -> None: ... + def unmap_sub_windows(self, onerror: ErrorHandler[object] | None = ...) -> None: ... + def configure(self, onerror: ErrorHandler[object] | None = ..., **keys: object) -> None: ... + def circulate(self, direction: int, onerror: ErrorHandler[object] | None = ...) -> None: ... + def raise_window(self, onerror: ErrorHandler[object] | None = ...) -> None: ... + def query_tree(self) -> request.QueryTree: ... + def change_property( + self, + property: int, + property_type: int, + format: int, + data: Sequence[float] | Sequence[str], + mode: int = ..., + onerror: ErrorHandler[object] | None = ..., + ) -> None: ... + def change_text_property( + self, property: int, property_type: int, data: bytes | str, mode: int = ..., onerror: ErrorHandler[object] | None = ... + ) -> None: ... + def delete_property(self, property: int, onerror: ErrorHandler[object] | None = ...) -> None: ... + def get_property( + self, property: int, property_type: int, offset: int, length: int, delete: bool = ... + ) -> request.GetProperty | None: ... + def get_full_property(self, property: int, property_type: int, sizehint: int = ...) -> request.GetProperty | None: ... + def get_full_text_property(self, property: int, property_type: int = ..., sizehint: int = ...) -> str | None: ... + def list_properties(self) -> list[int]: ... + def set_selection_owner(self, selection: int, time: int, onerror: ErrorHandler[object] | None = ...) -> None: ... + def convert_selection( + self, selection: int, target: int, property: int, time: int, onerror: ErrorHandler[object] | None = ... + ) -> None: ... + def send_event( + self, event: rq.Event, event_mask: int = ..., propagate: bool = ..., onerror: ErrorHandler[object] | None = ... + ) -> None: ... + def grab_pointer( + self, owner_events: bool, event_mask: int, pointer_mode: int, keyboard_mode: int, confine_to: int, cursor: int, time: int + ) -> int: ... + def grab_button( + self, + button: int, + modifiers: int, + owner_events: bool, + event_mask: int, + pointer_mode: int, + keyboard_mode: int, + confine_to: int, + cursor: int, + onerror: ErrorHandler[object] | None = ..., + ) -> None: ... + def ungrab_button(self, button: int, modifiers: int, onerror: ErrorHandler[object] | None = ...) -> None: ... + def grab_keyboard(self, owner_events: bool, pointer_mode: int, keyboard_mode: int, time: int) -> int: ... + def grab_key( + self, + key: int, + modifiers: int, + owner_events: bool, + pointer_mode: int, + keyboard_mode: int, + onerror: ErrorHandler[object] | None = ..., + ) -> None: ... + def ungrab_key(self, key: int, modifiers: int, onerror: ErrorHandler[object] | None = ...) -> None: ... + def query_pointer(self) -> request.QueryPointer: ... + def get_motion_events(self, start: int, stop: int) -> rq.Struct: ... + def translate_coords(self, src_window: int, src_x: int, src_y: int) -> request.TranslateCoords: ... + def warp_pointer( + self, + x: int, + y: int, + src_window: int = ..., + src_x: int = ..., + src_y: int = ..., + src_width: int = ..., + src_height: int = ..., + onerror: ErrorHandler[object] | None = ..., + ) -> None: ... + def set_input_focus(self, revert_to: int, time: int, onerror: ErrorHandler[object] | None = ...) -> None: ... + def clear_area( + self, + x: int = ..., + y: int = ..., + width: int = ..., + height: int = ..., + exposures: bool = ..., + onerror: ErrorHandler[object] | None = ..., + ) -> None: ... + def create_colormap(self, visual: int, alloc: int) -> colormap.Colormap: ... + def list_installed_colormaps(self) -> list[colormap.Colormap]: ... + def rotate_properties(self, properties: Sequence[int], delta: int, onerror: ErrorHandler[object] | None = ...) -> None: ... + def set_wm_name(self, name: bytes | str, onerror: ErrorHandler[object] | None = ...) -> None: ... + def get_wm_name(self) -> str | None: ... + def set_wm_icon_name(self, name: bytes | str, onerror: ErrorHandler[object] | None = ...) -> None: ... + def get_wm_icon_name(self) -> str | None: ... + def set_wm_class(self, inst: str, cls: str, onerror: ErrorHandler[object] | None = ...) -> None: ... + def get_wm_class(self) -> tuple[str, str] | None: ... + def set_wm_transient_for(self, window: Window, onerror: ErrorHandler[object] | None = ...) -> None: ... + def get_wm_transient_for(self) -> Window | None: ... + def set_wm_protocols(self, protocols: Iterable[int], onerror: ErrorHandler[object] | None = ...) -> None: ... + def get_wm_protocols(self) -> list[int]: ... + def set_wm_colormap_windows(self, windows: Iterable[Window], onerror: ErrorHandler[object] | None = ...) -> None: ... + def get_wm_colormap_windows(self) -> Iterable[Window]: ... + def set_wm_client_machine(self, name: bytes | str, onerror: ErrorHandler[object] | None = ...) -> None: ... + def get_wm_client_machine(self) -> str | None: ... + def set_wm_normal_hints( + self, hints: rq.DictWrapper | dict[str, Any] = ..., onerror: ErrorHandler[object] | None = ..., **keys: object + ) -> None: ... + def get_wm_normal_hints(self) -> rq.DictWrapper | None: ... + def set_wm_hints( + self, hints: rq.DictWrapper | dict[str, Any] = ..., onerror: ErrorHandler[object] | None = ..., **keys: object + ) -> None: ... + def get_wm_hints(self) -> rq.DictWrapper | None: ... + def set_wm_state( + self, hints: rq.DictWrapper | dict[str, Any] = ..., onerror: ErrorHandler[object] | None = ..., **keys: object + ) -> None: ... + def get_wm_state(self) -> rq.DictWrapper | None: ... + def set_wm_icon_size( + self, hints: rq.DictWrapper | dict[str, Any] = ..., onerror: ErrorHandler[object] | None = ..., **keys: object + ) -> None: ... + def get_wm_icon_size(self) -> rq.DictWrapper | None: ... + +class Pixmap(Drawable): + __pixmap__ = resource.Resource.__resource__ + def free(self, onerror: ErrorHandler[object] | None = ...) -> None: ... + def create_cursor( + self, mask: int, foreground: _RGB3IntIterable, background: _RGB3IntIterable, x: int, y: int + ) -> cursor.Cursor: ... + +def roundup(value: int, unit: int) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xobject/fontable.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xobject/fontable.pyi new file mode 100644 index 00000000..614c19de --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xobject/fontable.pyi @@ -0,0 +1,33 @@ +from collections.abc import Sequence + +from Xlib._typing import ErrorHandler +from Xlib.protocol import request +from Xlib.protocol.structs import _RGB3IntIterable +from Xlib.xobject import cursor, resource + +class Fontable(resource.Resource): + __fontable__ = resource.Resource.__resource__ + def query(self) -> request.QueryFont: ... + def query_text_extents(self, string: str) -> request.QueryTextExtents: ... + +class GC(Fontable): + __gc__ = resource.Resource.__resource__ + def change(self, onerror: ErrorHandler[object] | None = ..., **keys: object) -> None: ... + def copy(self, src_gc: int, mask: int, onerror: ErrorHandler[object] | None = ...) -> None: ... + def set_dashes(self, offset: int, dashes: Sequence[int], onerror: ErrorHandler[object] | None = ...) -> None: ... + def set_clip_rectangles( + self, + x_origin: int, + y_origin: int, + rectangles: Sequence[dict[str, int]], + ordering: int, + onerror: ErrorHandler[object] | None = ..., + ) -> None: ... + def free(self, onerror: ErrorHandler[object] | None = ...) -> None: ... + +class Font(Fontable): + __font__ = resource.Resource.__resource__ + def close(self, onerror: ErrorHandler[object] | None = ...) -> None: ... + def create_glyph_cursor( + self, mask: Font, source_char: int, mask_char: int, foreground: _RGB3IntIterable, background: _RGB3IntIterable + ) -> cursor.Cursor: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xobject/icccm.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xobject/icccm.pyi new file mode 100644 index 00000000..8f64f1b0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xobject/icccm.pyi @@ -0,0 +1,7 @@ +from Xlib.protocol import rq + +Aspect: rq.Struct +WMNormalHints: rq.Struct +WMHints: rq.Struct +WMState: rq.Struct +WMIconSize: rq.Struct diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xobject/resource.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xobject/resource.pyi new file mode 100644 index 00000000..81577e09 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/python-xlib/Xlib/xobject/resource.pyi @@ -0,0 +1,10 @@ +from Xlib._typing import ErrorHandler +from Xlib.display import _BaseDisplay + +class Resource: + display: _BaseDisplay + id: int + owner: int + def __init__(self, display: _BaseDisplay, rid: int, owner: int = ...) -> None: ... + def __resource__(self) -> int: ... + def kill_client(self, onerror: ErrorHandler[object] | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..c25eb828 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/@tests/stubtest_allowlist.txt @@ -0,0 +1,5 @@ +# "Abstract" methods, see the .pyi file for more details. +pytz.BaseTzInfo.localize +pytz.BaseTzInfo.normalize +pytz.tzinfo.BaseTzInfo.localize +pytz.tzinfo.BaseTzInfo.normalize diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/METADATA.toml new file mode 100644 index 00000000..78850407 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/METADATA.toml @@ -0,0 +1 @@ +version = "2022.7.1" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/pytz/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/pytz/__init__.pyi new file mode 100644 index 00000000..e6b4d99a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/pytz/__init__.pyi @@ -0,0 +1,46 @@ +import datetime +from _typeshed import Unused +from collections.abc import Mapping +from typing import ClassVar + +from .exceptions import ( + AmbiguousTimeError as AmbiguousTimeError, + InvalidTimeError as InvalidTimeError, + NonExistentTimeError as NonExistentTimeError, + UnknownTimeZoneError as UnknownTimeZoneError, +) +from .tzinfo import BaseTzInfo as BaseTzInfo, DstTzInfo, StaticTzInfo + +# Actually named UTC and then masked with a singleton with the same name +class _UTCclass(BaseTzInfo): + def localize(self, dt: datetime.datetime, is_dst: bool | None = ...) -> datetime.datetime: ... + def normalize(self, dt: datetime.datetime) -> datetime.datetime: ... + def tzname(self, dt: datetime.datetime | None) -> str: ... + def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta: ... + def dst(self, dt: datetime.datetime | None) -> datetime.timedelta: ... + +utc: _UTCclass +UTC: _UTCclass + +def timezone(zone: str) -> _UTCclass | StaticTzInfo | DstTzInfo: ... + +class _FixedOffset(datetime.tzinfo): + zone: ClassVar[None] + def __init__(self, minutes: int) -> None: ... + def utcoffset(self, dt: Unused) -> datetime.timedelta | None: ... + def dst(self, dt: Unused) -> datetime.timedelta: ... + def tzname(self, dt: Unused) -> None: ... + def localize(self, dt: datetime.datetime, is_dst: bool = ...) -> datetime.datetime: ... + def normalize(self, dt: datetime.datetime, is_dst: bool = ...) -> datetime.datetime: ... + +def FixedOffset(offset: int, _tzinfos: dict[int, _FixedOffset] = ...) -> _UTCclass | _FixedOffset: ... + +all_timezones: list[str] +all_timezones_set: set[str] +common_timezones: list[str] +common_timezones_set: set[str] +country_timezones: Mapping[str, list[str]] +country_names: Mapping[str, str] +ZERO: datetime.timedelta +HOUR: datetime.timedelta +VERSION: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/pytz/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/pytz/exceptions.pyi new file mode 100644 index 00000000..1880e442 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/pytz/exceptions.pyi @@ -0,0 +1,7 @@ +__all__ = ["UnknownTimeZoneError", "InvalidTimeError", "AmbiguousTimeError", "NonExistentTimeError"] + +class Error(Exception): ... +class UnknownTimeZoneError(KeyError, Error): ... +class InvalidTimeError(Error): ... +class AmbiguousTimeError(InvalidTimeError): ... +class NonExistentTimeError(InvalidTimeError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/pytz/lazy.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/pytz/lazy.pyi new file mode 100644 index 00000000..84a7d2e2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/pytz/lazy.pyi @@ -0,0 +1,17 @@ +from _typeshed import Incomplete +from collections.abc import Iterator, Mapping as DictMixin + +class LazyDict(DictMixin[str, Incomplete]): + data: dict[str, Incomplete] | None + def __getitem__(self, key: str) -> Incomplete: ... + def __contains__(self, key: object) -> bool: ... + def __iter__(self) -> Iterator[str]: ... + def __len__(self) -> int: ... + +class LazyList(list[Incomplete]): + # does not return `Self` type: + def __new__(cls, fill_iter: Incomplete | None = ...) -> LazyList: ... # noqa: Y034 + +class LazySet(set[Incomplete]): + # does not return `Self` type: + def __new__(cls, fill_iter: Incomplete | None = ...) -> LazySet: ... # noqa: Y034 diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/pytz/reference.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/pytz/reference.pyi new file mode 100644 index 00000000..cf9ceeeb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/pytz/reference.pyi @@ -0,0 +1,38 @@ +import datetime + +from pytz import UTC as UTC + +class FixedOffset(datetime.tzinfo): + def __init__(self, offset: float, name: str) -> None: ... + def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta: ... + def tzname(self, dt: datetime.datetime | None) -> str: ... + def dst(self, dt: datetime.datetime | None) -> datetime.timedelta: ... + +STDOFFSET: datetime.timedelta +DSTOFFSET: datetime.timedelta + +class LocalTimezone(datetime.tzinfo): + def utcoffset(self, dt: datetime.datetime) -> datetime.timedelta: ... # type: ignore[override] + def dst(self, dt: datetime.datetime) -> datetime.timedelta: ... # type: ignore[override] + def tzname(self, dt: datetime.datetime) -> str: ... # type: ignore[override] + +Local: LocalTimezone +DSTSTART: datetime.datetime +DSTEND: datetime.datetime + +def first_sunday_on_or_after(dt: datetime.datetime) -> datetime.datetime: ... + +class USTimeZone(datetime.tzinfo): + stdoffset: datetime.timedelta + reprname: str + stdname: str + dstname: str + def __init__(self, hours: float, reprname: str, stdname: str, dstname: str) -> None: ... + def tzname(self, dt: datetime.datetime | None) -> str: ... + def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta: ... + def dst(self, dt: datetime.datetime | None) -> datetime.timedelta: ... + +Eastern: USTimeZone +Central: USTimeZone +Mountain: USTimeZone +Pacific: USTimeZone diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/pytz/tzfile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/pytz/tzfile.pyi new file mode 100644 index 00000000..db28b757 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/pytz/tzfile.pyi @@ -0,0 +1,5 @@ +from typing import IO + +from pytz.tzinfo import DstTzInfo + +def build_tzinfo(zone: str, fp: IO[bytes]) -> DstTzInfo: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/pytz/tzinfo.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/pytz/tzinfo.pyi new file mode 100644 index 00000000..a744a7ad --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pytz/pytz/tzinfo.pyi @@ -0,0 +1,39 @@ +import datetime +from abc import abstractmethod +from typing import Any, overload + +class BaseTzInfo(datetime.tzinfo): + zone: str | None # Actually None but should be set on concrete subclasses + # The following abstract methods don't exist in the implementation, but + # are implemented by all sub-classes. + @abstractmethod + def localize(self, dt: datetime.datetime) -> datetime.datetime: ... + @abstractmethod + def normalize(self, dt: datetime.datetime) -> datetime.datetime: ... + @abstractmethod + def tzname(self, dt: datetime.datetime | None) -> str: ... + @abstractmethod + def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ... + @abstractmethod + def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ... + +class StaticTzInfo(BaseTzInfo): + def fromutc(self, dt: datetime.datetime) -> datetime.datetime: ... + def localize(self, dt: datetime.datetime, is_dst: bool | None = ...) -> datetime.datetime: ... + def normalize(self, dt: datetime.datetime, is_dst: bool | None = ...) -> datetime.datetime: ... + def tzname(self, dt: datetime.datetime | None, is_dst: bool | None = ...) -> str: ... + def utcoffset(self, dt: datetime.datetime | None, is_dst: bool | None = ...) -> datetime.timedelta: ... + def dst(self, dt: datetime.datetime | None, is_dst: bool | None = ...) -> datetime.timedelta: ... + +class DstTzInfo(BaseTzInfo): + def __init__(self, _inf: Any = ..., _tzinfos: Any = ...) -> None: ... + def fromutc(self, dt: datetime.datetime) -> datetime.datetime: ... + def localize(self, dt: datetime.datetime, is_dst: bool | None = ...) -> datetime.datetime: ... + def normalize(self, dt: datetime.datetime) -> datetime.datetime: ... + def tzname(self, dt: datetime.datetime | None, is_dst: bool | None = ...) -> str: ... + # https://github.com/python/mypy/issues/12379 + @overload # type: ignore[override] + def utcoffset(self, dt: None, is_dst: bool | None = ...) -> None: ... + @overload + def utcoffset(self, dt: datetime.datetime, is_dst: bool | None = ...) -> datetime.timedelta: ... + def dst(self, dt: datetime.datetime | None, is_dst: bool | None = ...) -> datetime.timedelta | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..b14e3eb3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/@tests/stubtest_allowlist.txt @@ -0,0 +1,8 @@ +pyVmomi.vim +pyVmomi.vim.event +pyVmomi.vim.fault +pyVmomi.vim.option +pyVmomi.vim.view +pyVmomi.vmodl +pyVmomi.vmodl.fault +pyVmomi.vmodl.query diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/METADATA.toml new file mode 100644 index 00000000..035acd27 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/METADATA.toml @@ -0,0 +1,4 @@ +version = "8.0.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vim/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vim/__init__.pyi new file mode 100644 index 00000000..4a243eb0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vim/__init__.pyi @@ -0,0 +1,73 @@ +from _typeshed import Incomplete +from datetime import datetime +from enum import Enum +from typing import Any + +from ..vmodl.query import PropertyCollector +from .event import EventManager +from .option import OptionManager +from .view import ViewManager + +def __getattr__(name: str) -> Incomplete: ... + +class ManagedObject: ... + +class ManagedEntity(ManagedObject): + _moId: str + obj: None + name: str + def __getattr__(self, name: str) -> Incomplete: ... + +class ServiceInstanceContent: + setting: OptionManager + propertyCollector: PropertyCollector + rootFolder: Folder + viewManager: ViewManager + perfManager: PerformanceManager + eventManager: EventManager + def __getattr__(self, name: str) -> Incomplete: ... + +class ServiceInstance: + content: ServiceInstanceContent + def CurrentTime(self) -> datetime: ... + def __getattr__(self, name: str) -> Incomplete: ... + +class PerformanceManager: + class MetricId: + counterId: int + instance: str + def __init__(self, counterId: int, instance: str) -> None: ... + + class PerfCounterInfo: + key: int + groupInfo: Any + nameInfo: Any + rollupType: Any + def __getattr__(self, name: str) -> Incomplete: ... + + class QuerySpec: + entity: ManagedEntity + metricId: list[PerformanceManager.MetricId] + intervalId: int + maxSample: int + startTime: datetime + def __getattr__(self, name: str) -> Incomplete: ... + + class EntityMetricBase: + entity: ManagedEntity + def QueryPerfCounterByLevel(self, collection_level: int) -> list[PerformanceManager.PerfCounterInfo]: ... + def QueryPerf(self, querySpec: list[PerformanceManager.QuerySpec]) -> list[PerformanceManager.EntityMetricBase]: ... + def __getattr__(self, name: str) -> Incomplete: ... + +class ClusterComputeResource(ManagedEntity): ... +class ComputeResource(ManagedEntity): ... +class Datacenter(ManagedEntity): ... +class Datastore(ManagedEntity): ... +class Folder(ManagedEntity): ... +class HostSystem(ManagedEntity): ... +class VirtualMachine(ManagedEntity): ... + +class VirtualMachinePowerState(Enum): + poweredOff: int + poweredOn: int + suspended: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vim/event.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vim/event.pyi new file mode 100644 index 00000000..92bb9396 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vim/event.pyi @@ -0,0 +1,16 @@ +from _typeshed import Incomplete +from datetime import datetime + +def __getattr__(name: str) -> Incomplete: ... + +class Event: + createdTime: datetime + +class EventFilterSpec: + class ByTime: + def __init__(self, beginTime: datetime) -> None: ... + time: EventFilterSpec.ByTime + +class EventManager: + latestEvent: Event + def QueryEvents(self, filer: EventFilterSpec) -> list[Event]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vim/fault.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vim/fault.pyi new file mode 100644 index 00000000..35be4044 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vim/fault.pyi @@ -0,0 +1,7 @@ +from _typeshed import Incomplete + +def __getattr__(name: str) -> Incomplete: ... + +class InvalidName(Exception): ... +class RestrictedByAdministrator(Exception): ... +class NoPermission(Exception): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vim/option.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vim/option.pyi new file mode 100644 index 00000000..0c7b5926 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vim/option.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete +from typing import Any + +def __getattr__(name: str) -> Incomplete: ... + +class OptionManager: + def QueryOptions(self, name: str) -> list[OptionValue]: ... + +class OptionValue: + value: Any + key: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vim/view.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vim/view.pyi new file mode 100644 index 00000000..dbb397bd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vim/view.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +from pyVmomi.vim import ManagedEntity + +def __getattr__(name: str) -> Incomplete: ... + +class ContainerView: + def Destroy(self) -> None: ... + +class ViewManager: + # Doc says the `type` parameter of CreateContainerView is a `list[str]`, + # but in practice it seems to be `list[Type[ManagedEntity]]` + # Source: https://pubs.vmware.com/vi-sdk/visdk250/ReferenceGuide/vim.view.ViewManager.html + @staticmethod + def CreateContainerView(container: ManagedEntity, type: list[type[ManagedEntity]], recursive: bool) -> ContainerView: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vmodl/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vmodl/__init__.pyi new file mode 100644 index 00000000..cf469c1e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vmodl/__init__.pyi @@ -0,0 +1,27 @@ +from typing import Any + +from .fault import * +from .query import * + +class DynamicData: ... + +class DynamicProperty: + def __init__(self, *, name: str = ..., val: Any = ...) -> None: ... + name: str + val: Any + +class ManagedObject: ... + +class KeyAnyValue(DynamicData): + key: str + value: Any + +class LocalizableMessage(DynamicData): + key: str + arg: list[KeyAnyValue] | None + message: str | None + +class MethodFault(DynamicData, Exception): + msg: str | None + faultCause: MethodFault | None + faultMessage: list[LocalizableMessage] | None diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vmodl/fault.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vmodl/fault.pyi new file mode 100644 index 00000000..8aac5d51 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vmodl/fault.pyi @@ -0,0 +1,10 @@ +from _typeshed import Incomplete + +from pyVmomi.vmodl import ManagedObject + +def __getattr__(name: str) -> Incomplete: ... + +class InvalidArgument(Exception): ... + +class ManagedObjectNotFound(Exception): + obj: ManagedObject diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vmodl/query.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vmodl/query.pyi new file mode 100644 index 00000000..d9c464c5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pyvmomi/pyVmomi/vmodl/query.pyi @@ -0,0 +1,62 @@ +from _typeshed import Incomplete +from typing import Any + +from pyVmomi.vim import ManagedEntity +from pyVmomi.vim.view import ContainerView +from pyVmomi.vmodl import DynamicProperty + +class PropertyCollector: + class PropertySpec: + def __init__(self, *, all: bool = ..., type: type[ManagedEntity] = ..., pathSet: list[str] = ...) -> None: ... + all: bool + type: type[ManagedEntity] + pathSet: list[str] + + class TraversalSpec: + def __init__( + self, *, path: str = ..., skip: bool = ..., type: type[ContainerView] = ..., **kwargs: Incomplete + ) -> None: ... + path: str + skip: bool + type: type[ContainerView] + def __getattr__(self, name: str) -> Incomplete: ... + + class RetrieveOptions: + def __init__(self, *, maxObjects: int | None = ...) -> None: ... + maxObjects: int | None + + class ObjectSpec: + def __init__( + self, *, skip: bool = ..., selectSet: list[PropertyCollector.TraversalSpec] = ..., obj: Any = ... + ) -> None: ... + skip: bool + selectSet: list[PropertyCollector.TraversalSpec] + obj: Any + + class FilterSpec: + def __init__( + self, + *, + propSet: list[PropertyCollector.PropertySpec] = ..., + objectSet: list[PropertyCollector.ObjectSpec] = ..., + **kwargs: Incomplete, + ) -> None: ... + propSet: list[PropertyCollector.PropertySpec] + objectSet: list[PropertyCollector.ObjectSpec] + def __getattr__(self, name: str) -> Incomplete: ... + + class ObjectContent: + def __init__(self, *, obj: ManagedEntity = ..., propSet: list[DynamicProperty] = ..., **kwargs: Incomplete) -> None: ... + obj: ManagedEntity + propSet: list[DynamicProperty] + def __getattr__(self, name: str) -> Incomplete: ... + + class RetrieveResult: + def __init__(self, *, objects: list[PropertyCollector.ObjectContent] = ..., token: str | None = ...) -> None: ... + objects: list[PropertyCollector.ObjectContent] + token: str | None + def RetrievePropertiesEx( + self, specSet: list[PropertyCollector.FilterSpec], options: PropertyCollector.RetrieveOptions + ) -> PropertyCollector.RetrieveResult: ... + def ContinueRetrievePropertiesEx(self, token: str) -> PropertyCollector.RetrieveResult: ... + def __getattr__(self, name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/@tests/stubtest_allowlist_win32.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/@tests/stubtest_allowlist_win32.txt new file mode 100644 index 00000000..e144d280 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/@tests/stubtest_allowlist_win32.txt @@ -0,0 +1,73 @@ +# Not available at runtime. Contains type definitions that are otherwise not exposed +_win32typing +win32com(ext)?.mapi._exchdapi +win32._wincerapi + +# PyWin tool / debugger +pythonwin.pywin.* +win32com.client.combrowse +win32com.client.tlbrowse + +# Utilities to generate python bindings +win32com.client.build.* +win32com.client.CLSIDToClass +win32com.client.connect +# "dynamic.CDipatch" is necessary for mypy to not throw AssertionError +win32com.client.dynamic.* +win32com.client.gencache.* +win32com.client.genpy +win32com.client.makepy +win32com.client.selecttlb +win32com.client.util +win32com.makegw.* +(win32.lib.)?pywintypes.__import_pywin32_system_module__ + +# COM object servers scripts +win32com.server.factory +win32com.server.localserver +win32com.server.register +win32com.servers.* +# Active X Scripts +win32com(ext)?.axscript.client.framework +win32com(ext)?.axscript.client.pyscript_rexec +# "pyscript.pyi" is necessary for mypy to not fail due to missing stub +win32com(ext)?.axscript.client.pyscript.* +win32com(ext)?.axscript.client.scriptdispatch +# Other scripts +isapi.install + +# Demos, tests and debugging +win32com.demos.* +win32com.servers.test_pycomtest +win32com.test.* +win32com(ext)?.axdebug.codecontainer +win32com(ext)?.axdebug.dump +win32com(ext)?.axdebug.debugger +win32com(ext)?.axscript.client.debug +win32com(ext)?.axscript.client.pydumper +win32com(ext)?.directsound.test.* + +# Deprecated and obsolete +pythoncom.MakeIID +pythoncom.MakeTime +(win32.lib.)?win32pdhquery.Query.addperfcounter +# Deprecated and makes a buffer of random junk. Use something like `b"\x00" * bufferSize` instead +(win32.)?win(32|xp)gui.PyMakeBuffer + +# failed to import, ImportError: DLL load failed while importing axdebug: The specified module could not be found. +win32com.axdebug.axdebug +win32com(ext)?.axdebug.codecontainer +# failed to import, ModuleNotFoundError: No module named 'gateways' +win32com(ext)?.axdebug.contexts +# failed to import, ModuleNotFoundError: No module named 'axdebug' +win32com(ext)?.axdebug.adb +win32com(ext)?.axdebug.documents +win32com(ext)?.axdebug.expressions +# failed to import, ModuleNotFoundError: No module named 'expressions' +win32com(ext)?.axdebug.stackframe +# Axdebug is not built on Python 3.11: https://github.com/mhammond/pywin32/blob/main/setup.py#L405 +# failed to import, ImportError: cannot import name 'axdebug' from 'win32com.axdebug' +win32com.axdebug.gateways +win32comext.axdebug.gateways +# failed to import, ModuleNotFoundError: No module named 'win32comext.axdebug.axdebug' +win32comext.axdebug.axdebug diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/METADATA.toml new file mode 100644 index 00000000..0e4489b4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/METADATA.toml @@ -0,0 +1,4 @@ +version = "305.*" + +[tool.stubtest] +platforms = ["win32"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/_win32typing.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/_win32typing.pyi new file mode 100644 index 00000000..9d730bbd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/_win32typing.pyi @@ -0,0 +1,6069 @@ +# Not available at runtime. Contains type definitions that are otherwise not exposed and not part of a specific module. +from _typeshed import Incomplete +from collections.abc import Iterable +from typing import overload +from typing_extensions import Literal, Self, final + +class ArgNotFound: ... +class PyOleEmpty: ... +class PyOleMissing: ... +class PyOleNothing: ... + +class PyDSCAPSType: + @property + def dwFlags(self): ... + @property + def dwFreeHw3DAllBuffers(self): ... + @property + def dwFreeHw3DStaticBuffers(self): ... + @property + def dwFreeHw3DStreamingBuffers(self): ... + @property + def dwFreeHwMemBytes(self): ... + @property + def dwFreeHwMixingAllBuffers(self): ... + @property + def dwFreeHwMixingStaticBuffers(self): ... + @property + def dwFreeHwMixingStreamingBuffers(self): ... + @property + def dwMaxContigFreeHwMemBytes(self): ... + @property + def dwMaxHw3DAllBuffers(self): ... + @property + def dwMaxHw3DStaticBuffers(self): ... + @property + def dwMaxHw3DStreamingBuffers(self): ... + @property + def dwMaxHwMixingAllBuffers(self): ... + @property + def dwMaxHwMixingStaticBuffers(self): ... + @property + def dwMaxHwMixingStreamingBuffers(self): ... + @property + def dwMaxSecondarySampleRate(self): ... + @property + def dwMinSecondarySampleRate(self): ... + @property + def dwPlayCpuOverheadSwBuffers(self): ... + @property + def dwPrimaryBuffers(self): ... + @property + def dwTotalHwMemBytes(self): ... + @property + def dwUnlockTransferRateHwBuffers(self): ... + +class PyDSCBCAPSType: + @property + def dwBufferBytes(self): ... + @property + def dwFlags(self): ... + +class PyDSCCAPSType: + @property + def dwChannels(self): ... + @property + def dwFlags(self): ... + @property + def dwFormats(self): ... + +@final +class PyNCB: + @property + def Bufflen(self): ... + @property + def Callname(self): ... + @property + def Cmd_cplt(self): ... + @property + def Command(self): ... + @property + def Event(self): ... + @property + def Lana_num(self): ... + @property + def Lsn(self): ... + @property + def Name(self): ... + @property + def Num(self): ... + @property + def Post(self): ... + def Reset(self, *args, **kwargs): ... # incomplete + @property + def Retcode(self): ... + @property + def Rto(self): ... + @property + def Sto(self): ... + +class COMMTIMEOUTS: ... +class CopyProgressRoutine: ... + +class DOCINFO: + @property + def DocName(self) -> str: ... + @property + def Output(self) -> str: ... + @property + def DataType(self) -> str: ... + @property + def Type(self): ... + +class ExportCallback: ... + +class FORM_INFO_1: + @property + def Flags(self): ... + @property + def Name(self) -> str: ... + @property + def Size(self): ... + @property + def ImageableArea(self): ... + +class ImportCallback: ... +class LARGE_INTEGER: ... + +class NCB: + @property + def Command(self): ... + @property + def Retcode(self): ... + @property + def Lsn(self): ... + @property + def Num(self): ... + @property + def Bufflen(self): ... + @property + def Callname(self) -> str: ... + @property + def Name(self) -> str: ... + @property + def Rto(self) -> str: ... + @property + def Sto(self) -> str: ... + @property + def Lana_num(self): ... + @property + def Cmd_cplt(self): ... + @property + def Event(self): ... + @property + def Post(self): ... + +class PRINTER_DEFAULTS: + @property + def pDatatype(self) -> str: ... + @property + def pDevMode(self) -> PyDEVMODE: ... + @property + def DesiredAccess(self): ... + +class PyACL: + def Initialize(self) -> None: ... + def IsValid(self) -> bool: ... + @overload + def AddAccessAllowedAce(self, __access: int, __sid: PySID) -> None: ... + @overload + def AddAccessAllowedAce(self, __revision: int, __access: int, __sid: PySID) -> None: ... + def AddAccessAllowedAceEx(self, __revision: int, __aceflags: int, __access: int, __sid: PySID) -> None: ... + def AddAccessAllowedObjectAce( + self, AceRevision, AceFlags, AccessMask, ObjectTypeGuid: PyIID, InheritedObjectTypeGuid: PyIID, sid: PySID + ) -> None: ... + def AddAccessDeniedAce(self, __revision: int, __access: int, __sid: PySID, __access1: int, __sid1: PySID) -> None: ... + def AddAccessDeniedAceEx(self, __revision: int, __aceflags: int, __access: int, __sid: PySID) -> None: ... + def AddMandatoryAce(self, AceRevision, AceFlags, MandatoryPolicy, LabelSid: PySID) -> None: ... + def AddAuditAccessAce(self, dwAceRevision, dwAccessMask, sid: PySID, bAuditSuccess, bAuditFailure) -> None: ... + def AddAuditAccessAceEx(self, dwAceRevision, AceFlags, dwAccessMask, sid: PySID, bAuditSuccess, bAuditFailure) -> None: ... + def AddAuditAccessObjectAce( + self, + dwAceRevision, + AceFlags, + dwAccessMask, + ObjectTypeGuid: PyIID, + InheritedObjectTypeGuid: PyIID, + sid: PySID, + bAuditSuccess, + bAuditFailure, + ) -> None: ... + def GetAclSize(self): ... + def GetAclRevision(self): ... + def GetAceCount(self) -> int: ... + def GetAce(self, __index: int) -> tuple[tuple[int, int], int, PySID]: ... + def DeleteAce(self, __index: int) -> None: ... + def GetEffectiveRightsFromAcl(self, __trustee: PyTRUSTEE | dict[str, int | PySID]) -> int: ... + def GetAuditedPermissionsFromAcl(self, trustee: PyTRUSTEE) -> tuple[Incomplete, Incomplete]: ... + def SetEntriesInAcl(self, __obexpl_list: tuple[dict[str, int | dict[str, int | PySID]], ...]) -> PyACL: ... + def GetExplicitEntriesFromAcl(self) -> tuple[dict[str, int | dict[str, int | PySID]]] | None: ... + +class PyBITMAP: + @property + def bmType(self) -> int: ... + @property + def bmWidth(self) -> int: ... + @property + def bmHeight(self) -> int: ... + @property + def bmWidthBytes(self) -> int: ... + @property + def bmPlanes(self) -> int: ... + +class PyBLENDFUNCTION: ... +class PyCEHANDLE: ... + +class PyCERTSTORE: + @property + def HCERTSTORE(self): ... + # Flags argument is deprecated. + # The underlying function is now always called with `CERT_CLOSE_STORE_CHECK_FLAG`, + # and support for this param will be dropped at some point in the future. + def CertCloseStore(self, Flags: int = ...) -> None: ... + def CertControlStore(self, Flags, CtrlType, CtrlPara: int) -> None: ... + def CertEnumCertificatesInStore(self) -> list[PyCERT_CONTEXT]: ... + def CertEnumCTLsInStore(self) -> list[PyCTL_CONTEXT]: ... + def CertSaveStore(self, MsgAndCertEncodingType, SaveAs, SaveTo, SaveToPara: str | int, Flags=...) -> None: ... + def CertAddEncodedCertificateToStore(self, CertEncodingType, CertEncoded, AddDisposition) -> PyCERT_CONTEXT: ... + def CertAddCertificateContextToStore(self, CertContext: PyCERT_CONTEXT, AddDisposition) -> PyCERT_CONTEXT: ... + def CertAddCertificateLinkToStore(self, CertContext: PyCERT_CONTEXT, AddDisposition) -> PyCERT_CONTEXT: ... + def CertAddCTLContextToStore(self, CtlContext: PyCTL_CONTEXT, AddDisposition) -> PyCTL_CONTEXT: ... + def CertAddCTLLinkToStore(self, CtlContext: PyCTL_CONTEXT, AddDisposition) -> PyCTL_CONTEXT: ... + def CertAddStoreToCollection(self, SiblingStore: PyCERTSTORE, UpdateFlag: int = ..., Priority: int = ...) -> None: ... + def CertRemoveStoreFromCollection(self, SiblingStore: PyCERTSTORE) -> None: ... + def PFXExportCertStoreEx(self, Flags, Password: Incomplete | None = ...): ... + +class PyCERT_ALT_NAME_ENTRY: ... +class PyCERT_ALT_NAME_INFO: ... + +class PyCERT_AUTHORITY_KEY_ID_INFO: + @property + def KeyId(self): ... + @property + def CertIssuer(self): ... + @property + def CertSerialNumber(self): ... + +class PyCERT_BASIC_CONSTRAINTS2_INFO: + @property + def fCA(self): ... + @property + def fPathLenConstraint(self): ... + @property + def PathLenConstraint(self): ... + +class PyCERT_BASIC_CONSTRAINTS_INFO: + @property + def SubjectType(self) -> PyCRYPT_BIT_BLOB: ... + @property + def fPathLenConstraint(self): ... + @property + def PathLenConstraint(self): ... + @property + def SubtreesConstraint(self): ... + +class PyCERT_CONTEXT: + @property + def HANDLE(self): ... + @property + def CertStore(self) -> PyCERTSTORE: ... + @property + def CertEncoded(self): ... + @property + def CertEncodingType(self): ... + @property + def Version(self): ... + @property + def Subject(self) -> str: ... + @property + def Issuer(self) -> str: ... + @property + def NotBefore(self) -> PyTime: ... + @property + def NotAfter(self) -> PyTime: ... + @property + def SignatureAlgorithm(self): ... + @property + def Extension(self) -> tuple[PyCERT_EXTENSION, ...]: ... + @property + def SubjectPublicKeyInfo(self) -> PyCERT_PUBLIC_KEY_INFO: ... + @property + def SerialNumber(self): ... + def CertFreeCertificateContext(self) -> None: ... + def CertEnumCertificateContextProperties(self) -> list[Incomplete]: ... + def CryptAcquireCertificatePrivateKey(self, Flags: int = ...) -> tuple[Incomplete, PyCRYPTPROV]: ... + def CertGetIntendedKeyUsage(self): ... + def CertGetEnhancedKeyUsage(self, Flags: int = ...): ... + def CertSerializeCertificateStoreElement(self, Flags: int = ...) -> str: ... + def CertVerifySubjectCertificateContext(self, Issuer: PyCERT_CONTEXT, Flags): ... + def CertDeleteCertificateFromStore(self) -> None: ... + def CertGetCertificateContextProperty(self, PropId): ... + def CertSetCertificateContextProperty(self, PropId, Data, Flags: int = ...) -> None: ... + +class PyCERT_EXTENSION: + @property + def ObjId(self): ... + @property + def Critical(self): ... + @property + def Value(self): ... + +class PyCERT_KEY_ATTRIBUTES_INFO: + @property + def KeyId(self): ... + @property + def IntendedKeyUsage(self) -> PyCRYPT_BIT_BLOB: ... + @property + def PrivateKeyUsagePeriod(self): ... + +class PyCERT_NAME_INFO: ... +class PyCERT_NAME_VALUE: ... +class PyCERT_OTHER_NAME: ... + +class PyCERT_POLICY_INFO: + @property + def PolicyIdentifier(self): ... + @property + def PolicyQualifier(self): ... + +class PyCERT_PUBLIC_KEY_INFO: + @property + def Algorithm(self) -> PyCRYPT_ALGORITHM_IDENTIFIER: ... + @property + def PublicKey(self) -> PyCRYPT_BIT_BLOB: ... + +class PyCOMSTAT: + @property + def cbInQue(self) -> int: ... + @property + def cbOutQue(self) -> int: ... + @property + def fCtsHold(self) -> int: ... + @property + def fDsrHold(self) -> int: ... + @property + def fRlsdHold(self) -> int: ... + @property + def fXoffHold(self) -> int: ... + @property + def fXoffSent(self) -> int: ... + @property + def fEof(self) -> int: ... + @property + def fTxim(self) -> int: ... + @property + def fReserved(self) -> int: ... + +@final +class PyCOORD: + @property + def X(self): ... + @property + def Y(self): ... + +class PyCREDENTIAL: + @property + def Flags(self): ... + @property + def Type(self): ... + @property + def TargetName(self) -> str: ... + @property + def Comment(self) -> str: ... + @property + def LastWritten(self) -> PyTime: ... + @property + def CredentialBlob(self) -> str: ... + @property + def Persist(self): ... + @property + def Attributes(self): ... + @property + def TargetAlias(self) -> str: ... + @property + def UserName(self) -> str: ... + +class PyCREDENTIAL_ATTRIBUTE: + @property + def Keyword(self) -> str: ... + @property + def Flags(self): ... + @property + def Value(self): ... + +class PyCREDENTIAL_TARGET_INFORMATION: + @property + def TargetName(self) -> str: ... + @property + def NetbiosServerName(self) -> str: ... + @property + def DnsServerName(self) -> str: ... + @property + def NetbiosDomainName(self) -> str: ... + @property + def DnsDomainName(self) -> str: ... + @property + def DnsTreeName(self) -> str: ... + @property + def PackageName(self) -> str: ... + @property + def Flags(self): ... + @property + def CredTypes(self) -> tuple[Incomplete, ...]: ... + +class PyCREDUI_INFO: + @property + def Parent(self) -> int: ... + @property + def MessageText(self) -> str: ... + @property + def CaptionText(self) -> str: ... + @property + def Banner(self) -> int: ... + +class PyCRYPTHASH: + def CryptDestroyHash(self) -> None: ... + def CryptDuplicateHash(self, Flags: int = ...) -> PyCRYPTHASH: ... + def CryptHashData(self, Data: str, Flags: int = ...) -> None: ... + def CryptHashSessionKey(self, Key: PyCRYPTKEY, Flags: int = ...) -> None: ... + def CryptSignHash(self, KeySpec, Flags: int = ...) -> str: ... + def CryptVerifySignature(self, Signature: str, PubKey: PyCRYPTKEY, Flags: int = ...) -> None: ... + def CryptGetHashParam(self, Param, Flags: int = ...): ... + +class PyCRYPTKEY: + @property + def HCRYPTPROV(self): ... + @property + def HCRYPTKEY(self): ... + def CryptDestroyKey(self) -> None: ... + def CryptExportKey(self, ExpKey: PyCRYPTKEY, BlobType, Flags: int = ...): ... + def CryptGetKeyParam(self, Param, Flags: int = ...): ... + def CryptDuplicateKey(self, Reserved: int = ..., Flags: int = ...) -> PyCRYPTKEY: ... + def CryptEncrypt(self, Final, Data, Hash: PyCRYPTHASH | None = ..., Flags: int = ...): ... + def CryptDecrypt(self, Final, Data, Hash: PyCRYPTHASH | None = ..., Flags: int = ...): ... + +class PyCRYPTMSG: + @property + def HCRYPTMSG(self): ... + def CryptMsgClose(self) -> None: ... + +class PyCRYPTPROTECT_PROMPTSTRUCT: ... + +class PyCRYPTPROV: + def CryptReleaseContext(self, Flags: int = ...) -> None: ... + def CryptGenKey(self, Algid, Flags, KeyLen: int = ...) -> PyCRYPTKEY: ... + def CryptGetProvParam(self, Param, Flags: int = ...) -> None: ... + def CryptGetUserKey(self, KeySpec) -> PyCRYPTKEY: ... + def CryptGenRandom(self, Len, SeedData: str | None = ...) -> str: ... + def CryptCreateHash(self, Algid, Key: PyCRYPTKEY | None = ..., Flags: int = ...) -> PyCRYPTHASH: ... + def CryptImportKey(self, Data, PubKey: PyCRYPTKEY | None = ..., Flags: int = ...) -> PyCRYPTKEY: ... + def CryptExportPublicKeyInfo(self, KeySpec, CertEncodingType) -> PyCERT_PUBLIC_KEY_INFO: ... + def CryptImportPublicKeyInfo(self, Info, CertEncodingType) -> PyCRYPTKEY: ... + +class PyCRYPT_ALGORITHM_IDENTIFIER: + @property + def ObjId(self): ... + @property + def Parameters(self): ... + +class PyCRYPT_ATTRIBUTE: + @property + def ObjId(self): ... + @property + def Value(self) -> tuple[Incomplete, ...]: ... + +class PyCRYPT_BIT_BLOB: + @property + def Data(self): ... + @property + def UnusedBits(self): ... + +class PyCRYPT_DECRYPT_MESSAGE_PARA: + @property + def CertStores(self) -> tuple[Incomplete, ...]: ... + @property + def MsgAndCertEncodingType(self): ... + @property + def Flags(self): ... + +class PyCRYPT_ENCRYPT_MESSAGE_PARA: + @property + def ContentEncryptionAlgorithm(self) -> PyCRYPT_ALGORITHM_IDENTIFIER: ... + @property + def CryptProv(self) -> PyCRYPTPROV: ... + @property + def EncryptionAuxInfo(self): ... + @property + def Flags(self): ... + @property + def InnerContentType(self): ... + @property + def MsgEncodingType(self): ... + +class PyCRYPT_SIGN_MESSAGE_PARA: + @property + def SigningCert(self) -> PyCERT_CONTEXT: ... + @property + def HashAlgorithm(self) -> PyCRYPT_ALGORITHM_IDENTIFIER: ... + @property + def HashAuxInfo(self): ... + @property + def MsgCert(self) -> tuple[PyCERT_CONTEXT, ...]: ... + @property + def MsgCrl(self) -> tuple[Incomplete, ...]: ... + @property + def AuthAttr(self) -> tuple[PyCRYPT_ATTRIBUTE, ...]: ... + @property + def UnauthAttr(self) -> tuple[PyCRYPT_ATTRIBUTE, ...]: ... + @property + def Flags(self): ... + @property + def InnerContentType(self): ... + @property + def MsgEncodingType(self): ... + +class PyCRYPT_VERIFY_MESSAGE_PARA: + @property + def MsgAndCertEncodingType(self): ... + @property + def CryptProv(self) -> PyCRYPTPROV: ... + @property + def PyGetSignerCertificate(self): ... + @property + def GetArg(self): ... + +class PyCTL_CONTEXT: + @property + def HCTL_CONTEXT(self): ... + def CertFreeCTLContext(self) -> None: ... + def CertEnumCTLContextProperties(self) -> tuple[Incomplete, ...]: ... + def CertEnumSubjectInSortedCTL(self) -> tuple[tuple[Incomplete, Incomplete], ...]: ... + def CertDeleteCTLFromStore(self) -> None: ... + def CertSerializeCTLStoreElement(self, Flags: int = ...) -> str: ... + +class PyCTL_USAGE: ... + +@final +class PyConsoleScreenBuffer: + def SetConsoleActiveScreenBuffer(self) -> None: ... + def GetConsoleCursorInfo(self) -> tuple[Incomplete, Incomplete]: ... + def SetConsoleCursorInfo(self, Size, Visible) -> None: ... + def GetConsoleMode(self): ... + def SetConsoleMode(self, Mode) -> None: ... + def ReadConsole(self, NumberOfCharsToRead): ... + def WriteConsole(self, Buffer): ... + def FlushConsoleInputBuffer(self) -> None: ... + def SetConsoleTextAttribute(self, __Attributes: int) -> None: ... + def SetConsoleCursorPosition(self, CursorPosition: PyCOORD) -> None: ... + def SetConsoleScreenBufferSize(self, Size: PyCOORD) -> None: ... + def SetConsoleWindowInfo(self, Absolute, ConsoleWindow: PySMALL_RECT) -> None: ... + def GetConsoleScreenBufferInfo(self): ... + def GetLargestConsoleWindowSize(self) -> PyCOORD: ... + def FillConsoleOutputAttribute(self, Attribute, Length, WriteCoord: PyCOORD): ... + def FillConsoleOutputCharacter(self, Character, Length, WriteCoord: PyCOORD): ... + def ReadConsoleOutputCharacter(self, Length, ReadCoord: PyCOORD) -> str: ... + def ReadConsoleOutputAttribute(self, Length, ReadCoord: PyCOORD) -> tuple[Incomplete, ...]: ... + def WriteConsoleOutputCharacter(self, Characters, WriteCoord: PyCOORD): ... + def WriteConsoleOutputAttribute(self, Attributes: tuple[Incomplete, ...], WriteCoord: PyCOORD): ... + def ScrollConsoleScreenBuffer( + self, ScrollRectangle: PySMALL_RECT, ClipRectangle: PySMALL_RECT, DestinationOrigin: PyCOORD, FillCharacter, FillAttribute + ) -> None: ... + def GetCurrentConsoleFont(self, MaximumWindow: bool = ...) -> tuple[Incomplete, PyCOORD]: ... + def GetConsoleFontSize(self, Font) -> PyCOORD: ... + def SetConsoleFont(self, Font) -> None: ... + def SetStdHandle(self, StdHandle) -> None: ... + def SetConsoleDisplayMode(self, Flags, NewScreenBufferDimensions: PyCOORD) -> None: ... + def WriteConsoleInput(self, __Buffer: Iterable[PyINPUT_RECORD]): ... + def ReadConsoleInput(self, Length) -> tuple[PyINPUT_RECORD, ...]: ... + def PeekConsoleInput(self, Length) -> tuple[PyINPUT_RECORD, ...]: ... + def GetNumberOfConsoleInputEvents(self): ... + def Close(self, *args, **kwargs): ... # incomplete + def Detach(self, *args, **kwargs): ... # incomplete + +class PyCredHandle: + def Detach(self): ... + def FreeCredentialsHandle(self) -> None: ... + def QueryCredentialsAttributes(self, Attribute) -> None: ... + +class PyCtxtHandle: + def Detach(self): ... + def CompleteAuthToken(self, Token: PySecBufferDesc) -> None: ... + def QueryContextAttributes(self, Attribute) -> None: ... + def DeleteSecurityContext(self) -> None: ... + def QuerySecurityContextToken(self): ... + def MakeSignature(self, fqop, Message: PySecBufferDesc, MessageSeqNo) -> None: ... + def VerifySignature(self, Message: PySecBufferDesc, MessageSeqNo) -> None: ... + def EncryptMessage(self, fqop, Message: PySecBufferDesc, MessageSeqNo) -> None: ... + def DecryptMessage(self, Message: PySecBufferDesc, MessageSeqNo) -> None: ... + def ImpersonateSecurityContext(self) -> None: ... + def RevertSecurityContext(self) -> None: ... + +class PyDCB: + @property + def BaudRate(self) -> int: ... + @property + def wReserved(self) -> int: ... + @property + def XonLim(self) -> int: ... + @property + def XoffLim(self) -> int: ... + @property + def ByteSize(self) -> int: ... + @property + def Parity(self) -> int: ... + @property + def StopBits(self) -> int: ... + @property + def XonChar(self) -> str: ... + @property + def XoffChar(self) -> str: ... + @property + def ErrorChar(self) -> str: ... + @property + def EofChar(self) -> str: ... + @property + def EvtChar(self) -> str: ... + @property + def wReserved1(self) -> int: ... + @property + def fBinary(self) -> int: ... + @property + def fParity(self) -> int: ... + @property + def fOutxCtsFlow(self) -> int: ... + @property + def fOutxDsrFlow(self) -> int: ... + @property + def fDtrControl(self) -> int: ... + @property + def fDsrSensitivity(self) -> int: ... + @property + def fTXContinueOnXoff(self) -> int: ... + @property + def fOutX(self) -> int: ... + @property + def fInX(self) -> int: ... + @property + def fErrorChar(self) -> int: ... + @property + def fNull(self) -> int: ... + @property + def fRtsControl(self) -> int: ... + @property + def fAbortOnError(self) -> int: ... + @property + def fDummy2(self) -> int: ... + +class PyDEVMODE: + @property + def SpecVersion(self) -> int: ... + @property + def DriverVersion(self) -> int: ... + @property + def Size(self) -> int: ... + @property + def DriverExtra(self) -> int: ... + @property + def Fields(self) -> int: ... + @property + def Orientation(self) -> int: ... + @property + def PaperSize(self) -> int: ... + @property + def PaperLength(self) -> int: ... + @property + def PaperWidth(self) -> int: ... + @property + def Position_x(self) -> int: ... + @property + def Position_y(self) -> int: ... + @property + def DisplayOrientation(self) -> int: ... + @property + def DisplayFixedOutput(self) -> int: ... + @property + def Scale(self) -> int: ... + @property + def Copies(self) -> int: ... + @property + def DefaultSource(self) -> int: ... + @property + def PrintQuality(self) -> int: ... + @property + def Color(self) -> int: ... + @property + def Duplex(self) -> int: ... + @property + def YResolution(self) -> int: ... + @property + def TTOption(self) -> int: ... + @property + def Collate(self) -> int: ... + @property + def LogPixels(self) -> int: ... + @property + def BitsPerPel(self) -> int: ... + @property + def PelsWidth(self) -> int: ... + @property + def PelsHeight(self) -> int: ... + @property + def DisplayFlags(self) -> int: ... + @property + def DisplayFrequency(self) -> int: ... + @property + def ICMMethod(self) -> int: ... + @property + def ICMIntent(self) -> int: ... + @property + def MediaType(self) -> int: ... + @property + def DitherType(self) -> int: ... + @property + def Reserved1(self) -> int: ... + @property + def Reserved2(self) -> int: ... + @property + def Nup(self) -> int: ... + @property + def PanningWidth(self) -> int: ... + @property + def PanningHeight(self) -> int: ... + @property + def DeviceName(self) -> str: ... + @property + def FormName(self) -> str: ... + @property + def DriverData(self) -> Incomplete | None: ... + def Clear(self) -> None: ... + +class PyDEVMODEW: + @property + def SpecVersion(self) -> int: ... + @property + def DriverVersion(self) -> int: ... + @property + def Size(self) -> int: ... + @property + def DriverExtra(self) -> int: ... + @property + def Fields(self) -> int: ... + @property + def Orientation(self) -> int: ... + @property + def PaperSize(self) -> int: ... + @property + def PaperLength(self) -> int: ... + @property + def PaperWidth(self) -> int: ... + @property + def Position_x(self) -> int: ... + @property + def Position_y(self) -> int: ... + @property + def DisplayOrientation(self) -> int: ... + @property + def DisplayFixedOutput(self) -> int: ... + @property + def Scale(self) -> int: ... + @property + def Copies(self) -> int: ... + @property + def DefaultSource(self) -> int: ... + @property + def PrintQuality(self) -> int: ... + @property + def Color(self) -> int: ... + @property + def Duplex(self) -> int: ... + @property + def YResolution(self) -> int: ... + @property + def TTOption(self) -> int: ... + @property + def Collate(self) -> int: ... + @property + def LogPixels(self) -> int: ... + @property + def BitsPerPel(self) -> int: ... + @property + def PelsWidth(self) -> int: ... + @property + def PelsHeight(self) -> int: ... + @property + def DisplayFlags(self) -> int: ... + @property + def DisplayFrequency(self) -> int: ... + @property + def ICMMethod(self) -> int: ... + @property + def ICMIntent(self) -> int: ... + @property + def MediaType(self) -> int: ... + @property + def DitherType(self) -> int: ... + @property + def Reserved1(self) -> int: ... + @property + def Reserved2(self) -> int: ... + @property + def Nup(self) -> int: ... + @property + def PanningWidth(self) -> int: ... + @property + def PanningHeight(self) -> int: ... + @property + def DeviceName(self) -> str: ... + @property + def FormName(self) -> str: ... + @property + def DriverData(self) -> Incomplete | None: ... + +class PyDISPLAY_DEVICE: + @property + def Size(self) -> int: ... + @property + def DeviceName(self) -> str: ... + @property + def DeviceString(self) -> str: ... + @property + def StateFlags(self) -> int: ... + @property + def DeviceID(self) -> str: ... + @property + def DeviceKey(self) -> str: ... + def Clear(self) -> None: ... + +class PyDLGITEMTEMPLATE: ... +class PyDLGTEMPLATE: ... +class PyDS_HANDLE: ... +class PyDS_NAME_RESULT_ITEM: ... + +class PyDateTime: + def Format(self): ... + +class PyDialogTemplate: ... +class PyEVTLOG_HANDLE: ... +class PyEVT_HANDLE: ... +class PyEVT_RPC_LOGIN: ... + +class PyEventLogRecord: + @property + def Reserved(self) -> int: ... + @property + def RecordNumber(self) -> int: ... + @property + def TimeGenerated(self) -> PyTime: ... + @property + def TimeWritten(self) -> PyTime: ... + @property + def EventID(self) -> int: ... + @property + def EventType(self) -> int: ... + @property + def EventCategory(self) -> int: ... + @property + def ReservedFlags(self) -> int: ... + @property + def ClosingRecordNumber(self) -> int: ... + @property + def SourceName(self) -> str: ... + @property + def StringInserts(self) -> tuple[str, ...]: ... + @property + def Sid(self) -> PySID | None: ... + @property + def Data(self) -> str: ... + @property + def ComputerName(self) -> str: ... + +class PyGROUP_INFO_0: + @property + def name(self) -> str: ... + +class PyGROUP_INFO_1: + @property + def name(self) -> str: ... + @property + def comment(self) -> str: ... + +class PyGROUP_INFO_1002: + @property + def comment(self) -> str: ... + +class PyGROUP_INFO_1005: + @property + def attributes(self): ... + +class PyGROUP_INFO_2: + @property + def name(self) -> str: ... + @property + def comment(self) -> str: ... + @property + def group_id(self): ... + @property + def attributes(self): ... + +class PyGROUP_USERS_INFO_0: + @property + def name(self) -> str: ... + +class PyGROUP_USERS_INFO_1: + @property + def name(self) -> str: ... + @property + def attributes(self): ... + +class PyGdiHANDLE: ... +class PyGetSignerCertificate: ... + +class PyHANDLE: + @property + def handle(self) -> int: ... + def Close(self) -> None: ... + def close(self) -> None: ... + def Detach(self) -> Self: ... + +@final +class PyHDESK: + def SetThreadDesktop(self) -> None: ... + def EnumDesktopWindows(self) -> tuple[int, ...]: ... + def SwitchDesktop(self) -> None: ... + def CloseDesktop(self) -> None: ... + def Detach(self, *args, **kwargs): ... # incomplete + +class PyHDEVNOTIFY: ... + +class PyHHNTRACK: + @property + def action(self): ... + @property + def hdr(self): ... + @property + def curUrl(self) -> str: ... + @property + def winType(self): ... + +class PyHHN_NOTIFY: + @property + def hdr(self): ... + @property + def url(self) -> str: ... + +class PyHH_AKLINK: + @property + def indexOnFail(self): ... + @property + def keywords(self) -> str: ... + @property + def url(self) -> str: ... + @property + def msgText(self) -> str: ... + @property + def msgTitle(self) -> str: ... + @property + def window(self) -> str: ... + +class PyHH_FTS_QUERY: + @property + def uniCodeStrings(self): ... + @property + def proximity(self): ... + @property + def stemmedSearch(self): ... + @property + def titleOnly(self): ... + @property + def execute(self): ... + @property + def searchQuery(self) -> str: ... + +class PyHH_POPUP: + @property + def hinst(self): ... + @property + def idString(self): ... + @property + def clrForeground(self): ... + @property + def clrBackground(self): ... + @property + def text(self) -> str: ... + @property + def font(self) -> str: ... + @property + def pt(self): ... + @property + def margins(self): ... + +class PyHH_WINTYPE: + @property + def uniCodeStrings(self): ... + @property + def validMembers(self): ... + @property + def winProperties(self): ... + @property + def styles(self): ... + @property + def exStyles(self): ... + @property + def showState(self): ... + @property + def hwndHelp(self): ... + @property + def hwndCaller(self): ... + @property + def hwndToolBar(self): ... + @property + def hwndNavigation(self): ... + @property + def hwndHTML(self): ... + @property + def navWidth(self): ... + @property + def toolBarFlags(self): ... + @property + def notExpanded(self): ... + @property + def curNavType(self): ... + @property + def idNotify(self): ... + @property + def typeName(self) -> str: ... + @property + def caption(self) -> str: ... + @property + def windowPos(self): ... + @property + def HTMLPos(self): ... + @property + def toc(self) -> str: ... + @property + def index(self) -> str: ... + @property + def file(self) -> str: ... + @property + def home(self) -> str: ... + @property + def jump1(self) -> str: ... + @property + def jump2(self) -> str: ... + @property + def urlJump1(self) -> str: ... + @property + def urlJump2(self) -> str: ... + +class PyHINTERNET: ... + +class PyHKEY: + def Close(self): ... + +class PyHTHEME: ... + +@final +class PyHWINSTA: + def EnumDesktops(self) -> tuple[Incomplete, ...]: ... + def SetProcessWindowStation(self) -> None: ... + def CloseWindowStation(self) -> None: ... + def Detach(self, *args, **kwargs): ... # incomplete + +class PyICONINFO: ... + +@final +class PyIID: ... + +@final +class PyINPUT_RECORD: + EventType: int + KeyDown: int | bool + RepeatCount: int + VirtualKeyCode: int + VirtualScanCode: Incomplete + Char: str + ControlKeyState: int + ButtonState: int + EventFlags: int + MousePosition: PyCOORD + Size: PyCOORD + SetFocus: Incomplete + CommandId: Incomplete + +class PyLOCALGROUP_INFO_0: + @property + def name(self) -> str: ... + +class PyLOCALGROUP_INFO_1: + @property + def name(self) -> str: ... + @property + def comment(self) -> str: ... + +class PyLOCALGROUP_INFO_1002: + @property + def comment(self) -> str: ... + +class PyLOCALGROUP_MEMBERS_INFO_0: + @property + def sid(self) -> PySID: ... + +class PyLOCALGROUP_MEMBERS_INFO_1: + @property + def sid(self) -> PySID: ... + @property + def sidusage(self): ... + @property + def name(self) -> str: ... + +class PyLOCALGROUP_MEMBERS_INFO_2: + @property + def sid(self) -> PySID: ... + @property + def sidusage(self): ... + @property + def domainandname(self) -> str: ... + +class PyLOCALGROUP_MEMBERS_INFO_3: + @property + def domainandname(self) -> str: ... + +class PyLOGBRUSH: + @property + def Style(self): ... + @property + def Color(self): ... + @property + def Hatch(self) -> int: ... + +class PyLOGFONT: + @property + def lfHeight(self) -> int: ... + @property + def lfWidth(self) -> int: ... + @property + def lfEscapement(self) -> int: ... + @property + def lfOrientation(self) -> int: ... + @property + def lfWeight(self) -> int: ... + @property + def lfItalic(self) -> int: ... + @property + def lfUnderline(self) -> int: ... + @property + def lfStrikeOut(self) -> int: ... + @property + def lfCharSet(self) -> int: ... + @property + def lfOutPrecision(self) -> int: ... + @property + def lfClipPrecision(self) -> int: ... + @property + def lfQuality(self) -> int: ... + @property + def lfPitchAndFamily(self) -> int: ... + @property + def lfFaceName(self) -> str: ... + +class PyLSA_HANDLE: ... +class PyLUID_AND_ATTRIBUTES: ... +class PyLsaLogon_HANDLE: ... +class PyMSG: ... + +@final +class PyNETRESOURCE: + @property + def dwScope(self) -> int: ... + @property + def dwType(self) -> int: ... + @property + def dwDisplayType(self) -> int: ... + @property + def dwUsage(self) -> int: ... + @property + def lpComment(self): ... + @property + def lpLocalName(self): ... + @property + def lpProvider(self): ... + @property + def lpRemoteName(self): ... + +class PyNET_VALIDATE_AUTHENTICATION_INPUT_ARG: ... +class PyNET_VALIDATE_PASSWORD_CHANGE_INPUT_ARG: ... +class PyNET_VALIDATE_PERSISTED_FIELDS: ... + +class PyNMHDR: + @property + def hwndFrom(self): ... + @property + def idFrom(self): ... + @property + def code(self): ... + +class PyNOTIFYICONDATA: ... + +class PyOVERLAPPED: + Offset: int + OffsetHigh: int + object: object + dword: int + hEvent: int + Internal: int + InternalHigh: int + +class PyOVERLAPPEDReadBuffer: ... + +class PyPERF_COUNTER_DEFINITION: + @property + def DefaultScale(self) -> int: ... + @property + def DetailLevel(self) -> int: ... + @property + def CounterType(self) -> int: ... + @property + def CounterNameTitleIndex(self) -> int: ... + @property + def CounterHelpTitleIndex(self) -> int: ... + def Increment(self) -> None: ... + def Decrement(self) -> None: ... + def Set(self) -> None: ... + def Get(self) -> None: ... + +class PyPERF_OBJECT_TYPE: + @property + def ObjectNameTitleIndex(self) -> int: ... + @property + def ObjectHelpTitleIndex(self) -> int: ... + @property + def DefaultCounterIndex(self) -> int: ... + def Close(self) -> None: ... + +class PyPOINT: ... + +class PyPROFILEINFO: + @property + def UserName(self) -> str: ... + @property + def Flags(self): ... + @property + def ProfilePath(self) -> str: ... + @property + def DefaultPath(self) -> str: ... + @property + def ServerName(self) -> str: ... + @property + def PolicyPath(self) -> str: ... + @property + def Profile(self) -> PyHKEY: ... + +class PyPerfMonManager: + def Close(self) -> None: ... + +class PyPrinterHANDLE: ... +class PyRECT: ... +class PyResourceId: ... +class PySCROLLINFO: ... +class PySC_HANDLE: ... + +class PySECURITY_ATTRIBUTES: + bInheritHandle: int + SECURITY_DESCRIPTOR: PySECURITY_DESCRIPTOR + +class PySECURITY_DESCRIPTOR: + def Initialize(self) -> None: ... + def GetSecurityDescriptorOwner(self) -> PySID: ... + def GetSecurityDescriptorDacl(self) -> PyACL: ... + def GetSecurityDescriptorSacl(self) -> PyACL: ... + def GetSecurityDescriptorControl(self) -> tuple[Incomplete, Incomplete]: ... + def SetSecurityDescriptorOwner(self, __sid: PySID, __bOwnerDefaulted: int | bool) -> None: ... + def SetSecurityDescriptorGroup(self, sid: PySID, bOwnerDefaulted): ... + def SetSecurityDescriptorDacl(self, __bSaclPresent: int | bool, __SACL: PyACL, __bSaclDefaulted: int | bool) -> None: ... + def SetSecurityDescriptorSacl(self, bSaclPresent, SACL: PyACL, bSaclDefaulted) -> None: ... + def SetSecurityDescriptorControl(self, ControlBitsOfInterest, ControlBitsToSet) -> None: ... + def IsValid(self) -> bool: ... + def GetLength(self) -> None: ... + def IsSelfRelative(self) -> bool: ... + +class PySERVER_INFO_100: + @property + def platform_id(self): ... + @property + def name(self) -> str: ... + +class PySERVER_INFO_101: + @property + def platform_id(self): ... + @property + def name(self) -> str: ... + @property + def version_major(self): ... + @property + def version_minor(self): ... + @property + def type(self): ... + @property + def comment(self) -> str: ... + +class PySERVER_INFO_102: + @property + def platform_id(self): ... + @property + def name(self) -> str: ... + @property + def version_major(self): ... + @property + def version_minor(self): ... + @property + def type(self): ... + @property + def comment(self) -> str: ... + @property + def users(self): ... + @property + def disc(self): ... + @property + def hidden(self): ... + @property + def announce(self): ... + @property + def anndelta(self): ... + @property + def userpath(self) -> str: ... + +class PySERVER_INFO_402: + @property + def ulist_mtime(self): ... + @property + def glist_mtime(self): ... + @property + def alist_mtime(self): ... + @property + def security(self): ... + @property + def numadmin(self): ... + @property + def lanmask(self): ... + @property + def guestacct(self) -> str: ... + @property + def chdevs(self): ... + @property + def chdevq(self): ... + @property + def chdevjobs(self): ... + @property + def connections(self): ... + @property + def shares(self): ... + @property + def openfiles(self): ... + @property + def sessopens(self): ... + @property + def sessvcs(self): ... + @property + def sessreqs(self): ... + @property + def opensearch(self): ... + @property + def activelocks(self): ... + @property + def numreqbuf(self): ... + @property + def sizreqbuf(self): ... + @property + def numbigbuf(self): ... + @property + def numfiletasks(self): ... + @property + def alertsched(self): ... + @property + def erroralert(self): ... + @property + def logonalert(self): ... + @property + def accessalert(self): ... + @property + def diskalert(self): ... + @property + def netioalert(self): ... + @property + def maxauditsz(self): ... + @property + def srvheuristics(self) -> str: ... + +class PySERVER_INFO_403: + @property + def ulist_mtime(self): ... + @property + def glist_mtime(self): ... + @property + def alist_mtime(self): ... + @property + def security(self): ... + @property + def numadmin(self): ... + @property + def lanmask(self): ... + @property + def guestacct(self) -> str: ... + @property + def chdevs(self): ... + @property + def chdevq(self): ... + @property + def chdevjobs(self): ... + @property + def connections(self): ... + @property + def shares(self): ... + @property + def openfiles(self): ... + @property + def sessopens(self): ... + @property + def sessvcs(self): ... + @property + def sessreqs(self): ... + @property + def opensearch(self): ... + @property + def activelocks(self): ... + @property + def numreqbuf(self): ... + @property + def sizreqbuf(self): ... + @property + def numbigbuf(self): ... + @property + def numfiletasks(self): ... + @property + def alertsched(self): ... + @property + def erroralert(self): ... + @property + def logonalert(self): ... + @property + def accessalert(self): ... + @property + def diskalert(self): ... + @property + def netioalert(self): ... + @property + def maxauditsz(self): ... + @property + def srvheuristics(self) -> str: ... + @property + def auditedevents(self): ... + @property + def autoprofile(self): ... + @property + def autopath(self) -> str: ... + +class PySERVER_INFO_502: + @property + def sessopens(self): ... + @property + def sessvcs(self): ... + @property + def opensearch(self): ... + @property + def sizreqbuf(self): ... + @property + def initworkitems(self): ... + @property + def maxworkitems(self): ... + @property + def rawworkitems(self): ... + @property + def irpstacksize(self): ... + @property + def maxrawbuflen(self): ... + @property + def sessusers(self): ... + @property + def sessconns(self): ... + @property + def maxpagedmemoryusage(self): ... + @property + def maxnonpagedmemoryusage(self): ... + @property + def enableforcedlogoff(self): ... + @property + def timesource(self): ... + @property + def acceptdownlevelapis(self): ... + @property + def lmannounce(self): ... + +class PySERVER_INFO_503: + @property + def sessopens(self): ... + @property + def sessvcs(self): ... + @property + def opensearch(self): ... + @property + def sizreqbuf(self): ... + @property + def initworkitems(self): ... + @property + def maxworkitems(self): ... + @property + def rawworkitems(self): ... + @property + def irpstacksize(self): ... + @property + def maxrawbuflen(self): ... + @property + def sessusers(self): ... + @property + def sessconns(self): ... + @property + def maxpagedmemoryusage(self): ... + @property + def maxnonpagedmemoryusage(self): ... + @property + def enableforcedlogoff(self): ... + @property + def timesource(self): ... + @property + def acceptdownlevelapis(self): ... + @property + def lmannounce(self): ... + @property + def domain(self) -> str: ... + @property + def maxkeepsearch(self): ... + @property + def scavtimeout(self): ... + @property + def minrcvqueue(self): ... + @property + def minfreeworkitems(self): ... + @property + def xactmemsize(self): ... + @property + def threadpriority(self): ... + @property + def maxmpxct(self): ... + @property + def oplockbreakwait(self): ... + @property + def oplockbreakresponsewait(self): ... + @property + def enableoplocks(self): ... + @property + def enablefcbopens(self): ... + @property + def enableraw(self): ... + @property + def enablesharednetdrives(self): ... + @property + def minfreeconnections(self): ... + @property + def maxfreeconnections(self): ... + +class PySHARE_INFO_0: + @property + def netname(self) -> str: ... + +class PySHARE_INFO_1: + @property + def netname(self) -> str: ... + @property + def type(self): ... + @property + def remark(self) -> str: ... + +class PySHARE_INFO_2: + @property + def netname(self) -> str: ... + @property + def type(self): ... + @property + def remark(self) -> str: ... + @property + def permissions(self): ... + @property + def max_uses(self): ... + @property + def current_uses(self): ... + @property + def path(self) -> str: ... + @property + def passwd(self) -> str: ... + +class PySHARE_INFO_501: + @property + def netname(self) -> str: ... + @property + def type(self): ... + @property + def remark(self) -> str: ... + @property + def flags(self): ... + +class PySHARE_INFO_502: + @property + def netname(self) -> str: ... + @property + def type(self): ... + @property + def remark(self) -> str: ... + @property + def permissions(self): ... + @property + def max_uses(self): ... + @property + def current_uses(self): ... + @property + def path(self) -> str: ... + @property + def passwd(self) -> str: ... + @property + def reserved(self): ... + @property + def security_descriptor(self) -> PySECURITY_DESCRIPTOR: ... + +class PySID: + def Initialize(self, idAuthority, numSubauthorities) -> None: ... + def IsValid(self) -> bool: ... + def SetSubAuthority(self, index, val) -> None: ... + def GetLength(self): ... + def GetSubAuthorityCount(self): ... + def GetSubAuthority(self): ... + def GetSidIdentifierAuthority(self) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete, Incomplete, Incomplete]: ... + +class PySID_AND_ATTRIBUTES: ... +class PySIZE: ... + +@final +class PySMALL_RECT: + @property + def Left(self): ... + @property + def Top(self): ... + @property + def Right(self): ... + @property + def Bottom(self): ... + +class PySTARTUPINFO: + dwX: int + dwY: int + dwXSize: int + dwYSize: int + dwXCountChars: int + dwYCountChars: int + dwFillAttribute: int + dwFlags: int + wShowWindow: int + hStdInput: int + hStdOutput: int + hStdError: int + lpDesktop: str + lpTitle: str + +class PySecBuffer: + @property + def BufferType(self): ... + @property + def Buffer(self) -> str: ... + @property + def BufferSize(self): ... + @property + def MaxBufferSize(self): ... + def Clear(self) -> None: ... + +class PySecBufferDesc: + Version: Incomplete + Buffer: Incomplete + def append(self, buffer) -> None: ... + +class PyTOKEN_GROUPS: ... +class PyTOKEN_PRIVILEGES: ... + +class PyTRIVERTEX: + @property + def x(self): ... + @property + def y(self): ... + @property + def Red(self): ... + @property + def Green(self): ... + @property + def Blue(self): ... + @property + def Alpha(self): ... + +# Properties Multiple* are ignored +class PyTRUSTEE: + @property + def TrusteeForm(self) -> int: ... + @property + def TrusteeType(self) -> int: ... + @property + def Identifier(self) -> PySID: ... + @property + def MultipleTrustee(self) -> None: ... + @property + def MultipleTrusteeOperation(self) -> Literal[0]: ... + +class PyTS_HANDLE: ... + +class PyTime: + @property + def year(self): ... + @property + def month(self): ... + @property + def weekday(self): ... + @property + def day(self): ... + @property + def hour(self): ... + @property + def minute(self): ... + @property + def second(self): ... + @property + def msec(self): ... + def Format(self, _format: str) -> str: ... + +class PyUSER_INFO_0: + @property + def name(self) -> str: ... + +class PyUSER_INFO_1: + @property + def name(self) -> str: ... + @property + def password(self) -> str: ... + @property + def password_age(self): ... + @property + def priv(self): ... + @property + def home_dir(self) -> str: ... + @property + def comment(self) -> str: ... + @property + def flags(self): ... + @property + def script_path(self) -> str: ... + +class PyUSER_INFO_10: + @property + def name(self) -> str: ... + @property + def comment(self) -> str: ... + @property + def usr_comment(self) -> str: ... + @property + def full_name(self) -> str: ... + +class PyUSER_INFO_1003: + @property + def password(self) -> str: ... + +class PyUSER_INFO_1005: + @property + def priv(self): ... + +class PyUSER_INFO_1006: + @property + def home_dir(self) -> str: ... + +class PyUSER_INFO_1007: + @property + def comment(self) -> str: ... + +class PyUSER_INFO_1008: + @property + def flags(self): ... + +class PyUSER_INFO_1009: + @property + def script_path(self) -> str: ... + +class PyUSER_INFO_1010: + @property + def auth_flags(self): ... + +class PyUSER_INFO_1011: + @property + def full_name(self) -> str: ... + +class PyUSER_INFO_11: + @property + def name(self) -> str: ... + @property + def comment(self) -> str: ... + @property + def usr_comment(self) -> str: ... + @property + def full_name(self) -> str: ... + @property + def priv(self): ... + @property + def auth_flags(self): ... + @property + def password_age(self): ... + @property + def home_dir(self) -> str: ... + @property + def parms(self) -> str: ... + @property + def last_logon(self): ... + @property + def last_logoff(self): ... + @property + def bad_pw_count(self): ... + @property + def num_logons(self): ... + @property + def logon_server(self) -> str: ... + @property + def country_code(self): ... + @property + def workstations(self) -> str: ... + @property + def max_storage(self): ... + @property + def units_per_week(self): ... + @property + def logon_hours(self) -> str: ... + @property + def code_page(self): ... + +class PyUSER_INFO_2: + @property + def name(self) -> str: ... + @property + def password(self) -> str: ... + @property + def password_age(self): ... + @property + def priv(self): ... + @property + def home_dir(self) -> str: ... + @property + def comment(self) -> str: ... + @property + def flags(self): ... + @property + def script_path(self) -> str: ... + @property + def auth_flags(self): ... + @property + def full_name(self) -> str: ... + @property + def usr_comment(self) -> str: ... + @property + def parms(self) -> str: ... + @property + def workstations(self) -> str: ... + @property + def last_logon(self): ... + @property + def last_logoff(self): ... + @property + def acct_expires(self): ... + @property + def max_storage(self): ... + @property + def units_per_week(self): ... + @property + def logon_hours(self) -> str: ... + @property + def bad_pw_count(self): ... + @property + def num_logons(self): ... + @property + def logon_server(self) -> str: ... + @property + def country_code(self): ... + @property + def code_page(self): ... + +class PyUSER_INFO_20: + @property + def name(self) -> str: ... + @property + def full_name(self) -> str: ... + @property + def comment(self) -> str: ... + @property + def flags(self): ... + @property + def user_id(self): ... + +class PyUSER_INFO_3: + @property + def name(self) -> str: ... + @property + def password(self) -> str: ... + @property + def password_age(self): ... + @property + def priv(self): ... + @property + def home_dir(self) -> str: ... + @property + def comment(self) -> str: ... + @property + def flags(self): ... + @property + def script_path(self) -> str: ... + @property + def auth_flags(self): ... + @property + def full_name(self) -> str: ... + @property + def usr_comment(self) -> str: ... + @property + def parms(self) -> str: ... + @property + def workstations(self) -> str: ... + @property + def last_logon(self): ... + @property + def last_logoff(self): ... + @property + def acct_expires(self): ... + @property + def max_storage(self): ... + @property + def units_per_week(self): ... + @property + def logon_hours(self) -> str: ... + @property + def bad_pw_count(self): ... + @property + def num_logons(self): ... + @property + def logon_server(self) -> str: ... + @property + def country_code(self): ... + @property + def code_page(self): ... + @property + def user_id(self): ... + @property + def primary_group_id(self): ... + @property + def profile(self) -> str: ... + @property + def home_dir_drive(self) -> str: ... + @property + def password_expired(self): ... + +class PyUSER_INFO_4: + @property + def name(self) -> str: ... + @property + def password(self) -> str: ... + @property + def password_age(self): ... + @property + def priv(self): ... + @property + def home_dir(self) -> str: ... + @property + def comment(self) -> str: ... + @property + def flags(self): ... + @property + def script_path(self) -> str: ... + @property + def auth_flags(self): ... + @property + def full_name(self) -> str: ... + @property + def usr_comment(self) -> str: ... + @property + def parms(self) -> str: ... + @property + def workstations(self) -> str: ... + @property + def last_logon(self): ... + @property + def last_logoff(self): ... + @property + def acct_expires(self): ... + @property + def max_storage(self): ... + @property + def units_per_week(self): ... + @property + def logon_hours(self) -> str: ... + @property + def bad_pw_count(self): ... + @property + def num_logons(self): ... + @property + def logon_server(self) -> str: ... + @property + def country_code(self): ... + @property + def code_page(self): ... + @property + def user_sid(self) -> PySID: ... + @property + def primary_group_id(self): ... + @property + def profile(self) -> str: ... + @property + def home_dir_drive(self) -> str: ... + @property + def password_expired(self): ... + +class PyUSER_MODALS_INFO_0: + @property + def min_passwd_len(self): ... + @property + def max_passwd_age(self): ... + @property + def min_passwd_age(self): ... + @property + def force_logoff(self): ... + @property + def password_hist_len(self): ... + +class PyUSER_MODALS_INFO_1: + @property + def role(self): ... + @property + def primary(self) -> str: ... + +class PyUSER_MODALS_INFO_2: + @property + def domain_name(self) -> str: ... + @property + def domain_id(self) -> PySID: ... + +class PyUSER_MODALS_INFO_3: + @property + def lockout_duration(self): ... + @property + def lockout_observation_window(self): ... + @property + def usrmod3_lockout_threshold(self): ... + +class PyUSE_INFO_0: + @property + def local(self) -> str: ... + @property + def remote(self) -> str: ... + +class PyUSE_INFO_1: + @property + def local(self) -> str: ... + @property + def remote(self) -> str: ... + @property + def password(self) -> str: ... + @property + def status(self): ... + @property + def asg_type(self): ... + @property + def refcount(self): ... + @property + def usecount(self): ... + +class PyUSE_INFO_2: + @property + def local(self) -> str: ... + @property + def remote(self) -> str: ... + @property + def password(self) -> str: ... + @property + def status(self): ... + @property + def asg_type(self): ... + @property + def refcount(self): ... + @property + def usecount(self): ... + @property + def username(self) -> str: ... + @property + def domainname(self) -> str: ... + +class PyUSE_INFO_3: + @property + def local(self) -> str: ... + @property + def remote(self) -> str: ... + @property + def password(self) -> str: ... + @property + def status(self): ... + @property + def asg_type(self): ... + @property + def refcount(self): ... + @property + def usecount(self): ... + @property + def username(self) -> str: ... + @property + def domainname(self) -> str: ... + @property + def flags(self): ... + +class PyUnicode: ... +class PyUrlCacheHANDLE: ... + +class PyWAVEFORMATEX: + @property + def wFormatTag(self) -> int: ... + @property + def nChannels(self) -> int: ... + @property + def nSamplesPerSec(self) -> int: ... + @property + def nAvgBytesPerSec(self) -> int: ... + @property + def nBlockAlign(self) -> int: ... + @property + def wBitsPerSample(self) -> int: ... + +class PyWINHTTP_AUTOPROXY_OPTIONS: ... +class PyWINHTTP_PROXY_INFO: ... + +class PyWKSTA_INFO_100: + @property + def platform_id(self): ... + @property + def computername(self) -> str: ... + @property + def langroup(self) -> str: ... + @property + def ver_major(self): ... + @property + def ver_minor(self): ... + +class PyWKSTA_INFO_101: + @property + def platform_id(self): ... + @property + def computername(self) -> str: ... + @property + def langroup(self) -> str: ... + @property + def ver_major(self): ... + @property + def ver_minor(self): ... + @property + def lanroot(self) -> str: ... + +class PyWKSTA_INFO_102: + @property + def platform_id(self): ... + @property + def computername(self) -> str: ... + @property + def langroup(self) -> str: ... + @property + def ver_major(self): ... + @property + def ver_minor(self): ... + @property + def lanroot(self) -> str: ... + @property + def logged_on_users(self): ... + +class PyWKSTA_INFO_302: + @property + def char_wait(self): ... + @property + def collection_time(self): ... + @property + def maximum_collection_count(self): ... + @property + def keep_conn(self): ... + @property + def keep_search(self): ... + @property + def max_cmds(self): ... + @property + def num_work_buf(self): ... + @property + def siz_work_buf(self): ... + @property + def max_wrk_cache(self): ... + @property + def siz_error(self): ... + @property + def num_alerts(self): ... + @property + def num_services(self): ... + @property + def errlog_sz(self): ... + @property + def print_buf_time(self): ... + @property + def num_char_buf(self): ... + @property + def siz_char_buf(self): ... + @property + def wrk_heuristics(self) -> str: ... + @property + def mailslots(self): ... + @property + def num_dgram_buf(self): ... + +class PyWKSTA_INFO_402: + @property + def char_wait(self): ... + @property + def collection_time(self): ... + @property + def maximum_collection_count(self) -> str: ... + @property + def keep_conn(self): ... + @property + def keep_search(self): ... + @property + def max_cmds(self): ... + @property + def num_work_buf(self): ... + @property + def siz_work_buf(self): ... + @property + def max_wrk_cache(self): ... + @property + def sess_timeout(self): ... + @property + def siz_error(self): ... + @property + def num_alerts(self): ... + @property + def num_services(self): ... + @property + def errlog_sz(self): ... + @property + def print_buf_time(self): ... + @property + def num_char_buf(self): ... + @property + def siz_char_buf(self): ... + @property + def mailslots(self): ... + @property + def num_dgram_buf(self): ... + @property + def max_threads(self): ... + +class PyWKSTA_INFO_502: + @property + def char_wait(self): ... + @property + def collection_time(self): ... + @property + def maximum_collection_count(self): ... + @property + def keep_conn(self): ... + @property + def max_cmds(self): ... + @property + def max_wrk_cache(self): ... + @property + def siz_char_buf(self): ... + @property + def lock_quota(self): ... + @property + def lock_increment(self): ... + @property + def lock_maximum(self): ... + @property + def pipe_increment(self): ... + @property + def pipe_maximum(self): ... + @property + def cache_file_timeout(self): ... + @property + def dormant_file_limit(self): ... + @property + def read_ahead_throughput(self): ... + @property + def num_mailslot_buffers(self): ... + @property + def num_srv_announce_buffers(self): ... + @property + def max_illegal_datagram_events(self): ... + @property + def illegal_datagram_event_reset_frequency(self): ... + @property + def log_election_packets(self): ... + @property + def use_opportunistic_locking(self): ... + @property + def use_unlock_behind(self): ... + @property + def use_close_behind(self): ... + @property + def buf_named_pipes(self): ... + @property + def use_lock_read_unlock(self): ... + @property + def utilize_nt_caching(self): ... + @property + def use_raw_read(self): ... + @property + def use_raw_write(self): ... + @property + def use_write_raw_data(self): ... + @property + def use_encryption(self): ... + @property + def buf_files_deny_write(self): ... + @property + def buf_read_only_files(self): ... + @property + def force_core_create_mode(self): ... + @property + def use_512_byte_max_transfer(self): ... + +class PyWKSTA_TRANSPORT_INFO_0: + @property + def quality_of_service(self): ... + @property + def number_of_vcs(self): ... + @property + def transport_name(self) -> str: ... + @property + def transport_address(self) -> str: ... + @property + def wan_ish(self): ... + +class PyWKSTA_USER_INFO_0: + @property + def username(self) -> str: ... + +class PyWKSTA_USER_INFO_1: + @property + def username(self) -> str: ... + @property + def logon_domain(self) -> str: ... + @property + def oth_domains(self) -> str: ... + @property + def logon_server(self) -> str: ... + +class PyWNDCLASS: + @property + def style(self) -> int: ... + @property + def cbWndExtra(self) -> int: ... + @property + def hInstance(self) -> int: ... + @property + def hIcon(self) -> int: ... + @property + def hCursor(self) -> int: ... + @property + def hbrBackground(self) -> int: ... + @property + def lpszMenuName(self) -> str: ... + @property + def lpszClassName(self) -> str: ... + @property + def lpfnWndProc(self): ... + def SetDialogProc(self) -> None: ... + +class PyXFORM: + @property + def M11(self) -> float: ... + @property + def M12(self) -> float: ... + @property + def M21(self) -> float: ... + @property + def M22(self) -> float: ... + @property + def Dx(self) -> float: ... + @property + def Dy(self) -> float: ... + +class Pymmapfile: + def close(self) -> None: ... + def find(self, needle, start): ... + def flush(self, offset: int = ..., size: int = ...) -> None: ... + def move(self, dest, src, count) -> None: ... + def read(self, num_bytes): ... + def read_byte(self): ... + def read_line(self): ... + def resize(self, MaximumSize, FileOffset: int = ..., NumberOfBytesToMap: int = ...) -> None: ... + def seek(self, dist, how: int = ...) -> None: ... + def size(self): ... + def tell(self): ... + def write(self, data) -> None: ... + def write_byte(self, char) -> None: ... + +class RASDIALEXTENSIONS: + @property + def dwfOptions(self) -> int: ... + @property + def hwndParent(self) -> int: ... + @property + def reserved(self) -> int: ... + @property + def reserved1(self) -> int: ... + @property + def RasEapInfo(self): ... + +class RASDIALPARAMS: ... + +class SC_ACTION: + @property + def Type(self): ... + @property + def Delay(self): ... + +class SERVICE_FAILURE_ACTIONS: + @property + def ResetPeriod(self): ... + @property + def RebootMsg(self) -> str: ... + @property + def Command(self) -> str: ... + @property + def Actions(self): ... + +class SERVICE_STATUS: + def __getitem__(self, __i: int) -> int: ... + +class TRACKMOUSEEVENT: ... +class ULARGE_INTEGER: ... +class WIN32_FIND_DATA: ... +class com_error: ... + +class connection: + def setautocommit(self, c) -> None: ... + def commit(self) -> None: ... + def rollback(self) -> None: ... + def cursor(self) -> None: ... + def close(self) -> None: ... + +class cursor: + def close(self) -> None: ... + def execute(self, sql: str, arg): ... + def fetchone(self): ... + def fetchmany(self) -> list[Incomplete]: ... + def fetchall(self) -> list[Incomplete]: ... + def setinputsizes(self) -> None: ... + def setoutputsize(self) -> None: ... + +class error(Exception): ... + +class COMPONENT: + @property + def ID(self): ... + @property + def ComponentType(self): ... + @property + def Checked(self): ... + @property + def fDirty(self): ... + @property + def NoScroll(self): ... + @property + def Pos(self): ... + @property + def FriendlyName(self): ... + @property + def Source(self): ... + @property + def SubscribedURL(self): ... + @property + def CurItemState(self): ... + @property + def Original(self): ... + @property + def Restored(self): ... + @property + def Size(self): ... + +class COMPONENTSOPT: + @property + def EnableComponents(self): ... + @property + def ActiveDesktop(self): ... + @property + def Size(self): ... + +class COMPPOS: + @property + def Left(self): ... + @property + def Top(self): ... + @property + def Width(self): ... + @property + def Height(self): ... + @property + def Index(self): ... + @property + def CanResize(self): ... + @property + def CanResizeX(self): ... + @property + def CanResizeY(self): ... + @property + def PreferredLeftPercent(self): ... + @property + def PreferredTopPercent(self): ... + @property + def Size(self): ... + +class COMPSTATEINFO: + @property + def Left(self): ... + @property + def Top(self): ... + @property + def Width(self): ... + @property + def Height(self): ... + @property + def dwItemState(self): ... + @property + def Size(self): ... + +class DEFCONTENTMENU: ... +class ELEMDESC: ... + +class EXP_DARWIN_LINK: + @property + def Signature(self): ... + @property + def DarwinID(self): ... + @property + def wDarwinID(self): ... + @property + def Size(self): ... + +class EXP_SPECIAL_FOLDER: + @property + def Signature(self): ... + @property + def idSpecialFolder(self): ... + @property + def Offset(self): ... + @property + def Size(self): ... + +class EXP_SZ_LINK: + @property + def Signature(self): ... + @property + def Target(self): ... + @property + def wTarget(self): ... + @property + def Size(self): ... + +class FUNCDESC: + @property + def memid(self) -> int: ... + @property + def scodeArray(self) -> tuple[Incomplete, ...]: ... + @property + def args(self) -> tuple[ELEMDESC, ...]: ... + @property + def funckind(self): ... + @property + def invkind(self): ... + @property + def callconv(self): ... + @property + def cParamsOpt(self): ... + @property + def oVft(self): ... + @property + def rettype(self) -> ELEMDESC: ... + @property + def wFuncFlags(self): ... + +class IDLDESC: ... +class MAPIINIT_0: ... + +class NT_CONSOLE_PROPS: + @property + def Signature(self): ... + @property + def FillAttribute(self): ... + @property + def PopupFillAttribute(self): ... + @property + def ScreenBufferSize(self) -> tuple[Incomplete, Incomplete]: ... + @property + def WindowSize(self) -> tuple[Incomplete, Incomplete]: ... + @property + def WindowOrigin(self) -> tuple[Incomplete, Incomplete]: ... + @property + def nFont(self): ... + @property + def InputBufferSize(self): ... + @property + def FontSize(self) -> tuple[Incomplete, Incomplete]: ... + @property + def FontFamily(self): ... + @property + def FontWeight(self): ... + @property + def FaceName(self): ... + @property + def CursorSize(self): ... + @property + def FullScreen(self): ... + @property + def QuickEdit(self): ... + @property + def InsertMode(self): ... + @property + def AutoPosition(self): ... + @property + def HistoryBufferSize(self): ... + @property + def NumberOfHistoryBuffers(self): ... + @property + def HistoryNoDup(self): ... + @property + def ColorTable(self): ... + @property + def Size(self): ... + +class NT_FE_CONSOLE_PROPS: + @property + def Signature(self): ... + @property + def CodePage(self): ... + @property + def Size(self): ... + +class PROPSPEC: ... +class PyADSVALUE: ... + +class PyADS_ATTR_INFO: + @property + def AttrName(self): ... + @property + def ControlCode(self) -> int: ... + @property + def ADsType(self) -> int: ... + @property + def Values(self) -> list[Incomplete]: ... + +class PyADS_OBJECT_INFO: + @property + def RDN(self): ... + @property + def ObjectDN(self): ... + @property + def ParentDN(self): ... + @property + def ClassName(self): ... + +class PyADS_SEARCHPREF_INFO: ... + +class PyBIND_OPTS: + @property + def Flags(self): ... + @property + def Mode(self): ... + @property + def TickCountDeadline(self): ... + @property + def cbStruct(self): ... + +class PyCMINVOKECOMMANDINFO: ... + +class PyDSBCAPS: + @property + def dwFlags(self) -> int: ... + @property + def dwUnlockTransferRate(self) -> int: ... + @property + def dwBufferBytes(self): ... + @property + def dwPlayCpuOverhead(self): ... + +class PyDSBUFFERDESC: + @property + def dwFlags(self) -> int: ... + @property + def dwBufferBytes(self) -> int: ... + @property + def lpwfxFormat(self): ... + +class PyDSCAPS: + @property + def dwFlags(self) -> int: ... + @property + def dwMinSecondarySampleRate(self) -> int: ... + @property + def dwMaxSecondarySampleRate(self) -> int: ... + @property + def dwPrimaryBuffers(self) -> int: ... + @property + def dwMaxHwMixingAllBuffers(self) -> int: ... + @property + def dwMaxHwMixingStaticBuffers(self) -> int: ... + @property + def dwMaxHwMixingStreamingBuffers(self) -> int: ... + @property + def dwFreeHwMixingAllBuffers(self) -> int: ... + @property + def dwFreeHwMixingStaticBuffers(self) -> int: ... + @property + def dwFreeHwMixingStreamingBuffers(self) -> int: ... + @property + def dwMaxHw3DAllBuffers(self) -> int: ... + @property + def dwMaxHw3DStaticBuffers(self) -> int: ... + @property + def dwMaxHw3DStreamingBuffers(self) -> int: ... + @property + def dwFreeHw3DAllBuffers(self) -> int: ... + @property + def dwFreeHw3DStaticBuffers(self) -> int: ... + @property + def dwFreeHw3DStreamingBuffers(self) -> int: ... + @property + def dwTotalHwMemBytes(self) -> int: ... + @property + def dwFreeHwMemBytes(self) -> int: ... + @property + def dwMaxContigFreeHwMemBytes(self) -> int: ... + @property + def dwUnlockTransferRateHwBuffers(self) -> int: ... + @property + def dwPlayCpuOverheadSwBuffers(self) -> int: ... + +class PyDSCBCAPS: + @property + def dwFlags(self) -> int: ... + @property + def dwBufferBytes(self) -> int: ... + +class PyDSCBUFFERDESC: + @property + def dwFlags(self) -> int: ... + @property + def dwBufferBytes(self) -> int: ... + @property + def lpwfxFormat(self): ... + +class PyDSCCAPS: + @property + def dwFlags(self) -> int: ... + @property + def dwFormats(self) -> int: ... + @property + def dwChannels(self) -> int: ... + +class PyDSOP_FILTER_FLAGS: + @property + def uplevel(self) -> PyDSOP_UPLEVEL_FILTER_FLAGS: ... + @property + def downlevel(self): ... + +class PyDSOP_SCOPE_INIT_INFO: + @property + def type(self): ... + @property + def scope(self): ... + @property + def hr(self): ... + @property + def dcName(self) -> str: ... + @property + def filterFlags(self) -> PyDSOP_FILTER_FLAGS: ... + +class PyDSOP_SCOPE_INIT_INFOs: + def __new__(cls, size): ... + +class PyDSOP_UPLEVEL_FILTER_FLAGS: + @property + def bothModes(self): ... + @property + def mixedModeOnly(self): ... + @property + def nativeModeOnly(self): ... + +class PyFORMATETC: ... + +class PyGFileOperationProgressSink: + def StartOperations(self) -> None: ... + def FinishOperations(self, Result) -> None: ... + def PreRenameItem(self, Flags, Item: PyIShellItem, NewName) -> None: ... + def PostRenameItem(self, Flags, Item: PyIShellItem, NewName, hrRename, NewlyCreated: PyIShellItem) -> None: ... + def PreMoveItem(self, Flags, Item: PyIShellItem, DestinationFolder: PyIShellItem, NewName) -> None: ... + def PostMoveItem( + self, Flags, Item: PyIShellItem, DestinationFolder: PyIShellItem, NewName, hrMove, NewlyCreated: PyIShellItem + ) -> None: ... + def PreCopyItem(self, Flags, Item: PyIShellItem, DestinationFolder: PyIShellItem, NewName) -> None: ... + def PostCopyItem( + self, Flags, Item: PyIShellItem, DestinationFolder: PyIShellItem, NewName, hrCopy, NewlyCreated: PyIShellItem + ) -> None: ... + def PreDeleteItem(self, Flags, Item: PyIShellItem) -> None: ... + def PostDeleteItem(self, Flags, Item: PyIShellItem, hrDelete, NewlyCreated: PyIShellItem) -> None: ... + def PreNewItem(self, Flags, DestinationFolder: PyIShellItem, NewName) -> None: ... + def PostNewItem( + self, Flags, DestinationFolder: PyIShellItem, NewName, TemplateName, FileAttributes, hrNew, NewItem: PyIShellItem + ) -> None: ... + def UpdateProgress(self, WorkTotal, WorkSoFar) -> None: ... + def ResetTimer(self) -> None: ... + def PauseTimer(self) -> None: ... + def ResumeTimer(self) -> None: ... + +class PyGSecurityInformation: + def GetObjectInformation(self) -> SI_OBJECT_INFO: ... + def GetSecurity(self, RequestedInformation, Default) -> PySECURITY_DESCRIPTOR: ... + def SetSecurity(self, SecurityInformation, SecurityDescriptor: PySECURITY_DESCRIPTOR) -> None: ... + def GetAccessRights(self, ObjectType: PyIID, Flags) -> tuple[SI_ACCESS, Incomplete]: ... + def MapGeneric(self, ObjectType: PyIID, AceFlags, Mask): ... + def GetInheritTypes(self) -> tuple[SI_INHERIT_TYPE, ...]: ... + def PropertySheetPageCallback(self, hwnd: int, Msg, Page) -> None: ... + +class PyIADesktopP2: + def UpdateAllDesktopSubscriptions(self) -> None: ... + +class PyIADs: + @property + def ADsPath(self) -> str: ... + @property + def AdsPath(self) -> str: ... + @property + def Class(self) -> str: ... + @property + def GUID(self) -> str: ... + @property + def Name(self) -> str: ... + @property + def Parent(self) -> str: ... + @property + def Schema(self) -> str: ... + def GetInfo(self) -> None: ... + def SetInfo(self) -> None: ... + def Get(self, prop: str): ... + def Put(self, _property: str, val) -> None: ... + def get(self, prop: str): ... + def put(self, _property: str, val) -> None: ... + +class PyIADsContainer: + def GetObject(self, _class: str, relativeName: str) -> PyIDispatch: ... + def get_Count(self): ... + def get_Filter(self): ... + def put_Filter(self, val) -> None: ... + def get_Hints(self): ... + def put_Hints(self, val) -> None: ... + +class PyIADsUser: + def get_AccountDisabled(self): ... + def put_AccountDisabled(self, val) -> None: ... + def get_AccountExpirationDate(self): ... + def put_AccountExpirationDate(self, val: PyTime) -> None: ... + def get_BadLoginAddress(self): ... + def get_BadLoginCount(self): ... + def get_Department(self): ... + def put_Department(self, val) -> None: ... + def get_Description(self): ... + def put_Description(self, val) -> None: ... + def get_Division(self): ... + def put_Division(self, val) -> None: ... + def get_EmailAddress(self): ... + def put_EmailAddress(self, val) -> None: ... + def get_EmployeeID(self): ... + def put_EmployeeID(self, val) -> None: ... + def get_FirstName(self): ... + def put_FirstName(self, val) -> None: ... + def get_FullName(self): ... + def put_FullName(self, val) -> None: ... + def get_HomeDirectory(self): ... + def put_HomeDirectory(self, val) -> None: ... + def get_HomePage(self): ... + def put_HomePage(self, val) -> None: ... + def get_LoginScript(self): ... + def put_LoginScript(self, val) -> None: ... + def SetPassword(self, val) -> None: ... + def ChangePassword(self, oldval, newval) -> None: ... + +class PyIActiveDesktop: + def ApplyChanges(self, Flags) -> None: ... + def GetWallpaper(self, cchWallpaper, Reserved: int = ...): ... + def SetWallpaper(self, Wallpaper, Reserved: int = ...) -> None: ... + def GetWallpaperOptions(self, Reserved: int = ...): ... + def SetWallpaperOptions(self, Style, Reserved: int = ...) -> None: ... + def GetPattern(self, cchPattern: int = ..., Reserved: int = ...) -> None: ... + def SetPattern(self, Pattern, Reserved: int = ...) -> None: ... + def GetDesktopItemOptions(self): ... + def SetDesktopItemOptions(self, comp, Reserved: int = ...) -> None: ... + def AddDesktopItem(self, comp, Reserved: int = ...) -> None: ... + def AddDesktopItemWithUI(self, hwnd: int, comp, Flags) -> None: ... + def ModifyDesktopItem(self, comp, Flags) -> None: ... + def RemoveDesktopItem(self, comp, Reserved: int = ...) -> None: ... + def GetDesktopItemCount(self) -> None: ... + def GetDesktopItem(self, Component, Reserved: int = ...): ... + def GetDesktopItemByID(self, ID, reserved: int = ...): ... + def GenerateDesktopItemHtml(self, FileName, comp, Reserved: int = ...) -> None: ... + def AddUrl(self, hwnd: int, Source, comp, Flags) -> None: ... + def GetDesktopItemBySource(self, Source, Reserved: int = ...): ... + +class PyIActiveDesktopP: + def SetSafeMode(self, Flags) -> None: ... + +class PyIActiveScriptDebug: + def GetScriptTextAttributes(self, pstrCode: str, pstrDelimiter: str, dwFlags) -> tuple[Incomplete, ...]: ... + def GetScriptletTextAttributes(self, pstrCode: str, pstrDelimiter: str, dwFlags) -> None: ... + def EnumCodeContextsOfPosition(self, dwSourceContext, uCharacterOffset, uNumChars) -> None: ... + +class PyIActiveScriptError: + def GetExceptionInfo(self) -> None: ... + def GetSourcePosition(self) -> None: ... + def GetSourceLineText(self) -> None: ... + +class PyIActiveScriptErrorDebug: + def GetDocumentContext(self) -> None: ... + def GetStackFrame(self) -> None: ... + +class PyIActiveScriptParseProcedure: + def ParseProcedureText( + self, + pstrCode, + pstrFormalParams, + pstrProcedureName, + pstrItemName, + punkContext: PyIUnknown, + pstrDelimiter, + dwSourceContextCookie, + ulStartingLineNumber, + dwFlags, + ) -> None: ... + +class PyIActiveScriptSite: + def GetLCID(self): ... + def GetItemInfo(self): ... + def GetDocVersionString(self): ... + def OnStateChange(self): ... + def OnEnterScript(self): ... + def OnLeaveScript(self): ... + def OnScriptError(self): ... + def OnScriptTerminate(self): ... + +class PyIActiveScriptSiteDebug: + def GetDocumentContextFromPosition(self, dwSourceContext, uCharacterOffset, uNumChars) -> None: ... + def GetApplication(self) -> None: ... + def GetRootApplicationNode(self) -> None: ... + def OnScriptErrorDebug(self) -> tuple[Incomplete, Incomplete]: ... + +class PyIAddrBook: + def ResolveName(self, uiParm, flags, entryTitle: str, ADRlist) -> None: ... + def OpenEntry(self, entryId: str, iid: PyIID, flags): ... + def CompareEntryIDs(self, entryId: str, entryId1: str, flags: int = ...): ... + +class PyIApplicationDebugger: + def QueryAlive(self) -> None: ... + def CreateInstanceAtDebugger(self, rclsid: PyIID, pUnkOuter: PyIUnknown, dwClsContext, riid: PyIID) -> None: ... + def onDebugOutput(self, pstr) -> None: ... + def onHandleBreakPoint(self, prpt: PyIRemoteDebugApplicationThread, br, pError) -> None: ... + def onClose(self) -> None: ... + def onDebuggerEvent(self, guid: PyIID, uUnknown: PyIUnknown) -> None: ... + +class PyIApplicationDestinations: + def SetAppID(self, AppID) -> None: ... + def RemoveDestination(self, punk: PyIUnknown) -> None: ... + def RemoveAllDestinations(self) -> None: ... + +class PyIApplicationDocumentlists: + def SetAppID(self, AppID) -> None: ... + def Getlist(self, listType, riid: PyIID, ItemsDesired: int = ...) -> PyIEnumObjects: ... + +class PyIAsyncOperation: + def SetAsyncMode(self, fDoOpAsync) -> None: ... + def GetAsyncMode(self): ... + def StartOperation(self, pbcReserved: PyIBindCtx) -> None: ... + def InOperation(self) -> None: ... + def EndOperation(self, hResult, pbcReserved: PyIBindCtx, dwEffects) -> None: ... + +class PyIAttach: + def GetLastError(self, hr, flags): ... + +class PyIBindCtx: + def GetRunningObjectTable(self) -> PyIRunningObjectTable: ... + def GetBindOptions(self) -> PyBIND_OPTS: ... + def SetBindOptions(self, bindopts) -> None: ... + def RegisterObjectParam(self, Key: str, punk: PyIUnknown) -> None: ... + def RevokeObjectParam(self, Key: str) -> None: ... + def GetObjectParam(self, Key: str) -> PyIUnknown: ... + def EnumObjectParam(self) -> PyIEnumString: ... + +class PyIBrowserFrameOptions: + def GetFrameOptions(self, dwMask) -> None: ... + +class PyICancelMethodCalls: + def Cancel(self, Seconds) -> None: ... + def TestCancel(self): ... + +class PyICatInformation: + def EnumCategories(self, lcid: int = ...) -> PyIEnumCATEGORYINFO: ... + def GetCategoryDesc(self, lcid: int = ...) -> str: ... + def EnumClassesOfCategories( + self, listIIdImplemented: list[PyIID] | None = ..., listIIdRequired: Incomplete | None = ... + ) -> PyIEnumGUID: ... + +class PyICatRegister: + def RegisterCategories(self, arg: list[tuple[PyIID, Incomplete, str]]) -> None: ... + def UnRegisterCategories(self, arg: list[PyIID]) -> None: ... + def RegisterClassImplCategories(self, clsid: PyIID, arg: list[PyIID]) -> None: ... + def UnRegisterClassImplCategories(self, clsid: PyIID, arg: list[PyIID]) -> None: ... + def RegisterClassReqCategories(self, clsid: PyIID, arg: list[PyIID]) -> None: ... + def UnRegisterClassReqCategories(self, clsid: PyIID, arg: list[PyIID]) -> None: ... + +class PyICategoryProvider: + def CanCategorizeOnSCID(self, pscid) -> None: ... + def GetDefaultCategory(self) -> None: ... + def GetCategoryForSCID(self, pscid) -> None: ... + def EnumCategories(self) -> None: ... + def GetCategoryName(self, guid: PyIID) -> None: ... + def CreateCategory(self, guid: PyIID, riid: PyIID) -> None: ... + +class PyIClassFactory: + def CreateInstance(self, outerUnknown: PyIUnknown, iid: PyIID) -> PyIUnknown: ... + def LockServer(self, bInc) -> None: ... + +class PyIClientSecurity: + def QueryBlanket(self, Proxy: PyIUnknown): ... + def SetBlanket( + self, Proxy: PyIUnknown, AuthnSvc, AuthzSvc, ServerPrincipalName: str, AuthnLevel, ImpLevel, AuthInfo, Capabilities + ) -> None: ... + def CopyProxy(self, Proxy: PyIUnknown) -> PyIUnknown: ... + +class PyIColumnProvider: + def Initialize(self, psci) -> None: ... + def GetColumnInfo(self, dwIndex) -> None: ... + def GetItemData(self, pscid, pscd) -> None: ... + +class PyIConnectionPoint: + def GetConnectionInterface(self) -> PyIID: ... + def GetConnectionPointContainer(self) -> PyIConnectionPointContainer: ... + def Advise(self, unk: PyIUnknown): ... + def Unadvise(self, cookie) -> None: ... + def EnumConnections(self) -> PyIEnumConnections: ... + +class PyIConnectionPointContainer: + def EnumConnectionPoints(self) -> PyIEnumConnectionPoints: ... + def FindConnectionPoint(self, iid: PyIID) -> PyIConnectionPoint: ... + +class PyIContext: + def SetProperty(self, rpolicyId: PyIID, flags, pUnk: PyIUnknown) -> None: ... + def RemoveProperty(self, rPolicyId: PyIID) -> None: ... + def GetProperty(self, rGuid: PyIID) -> tuple[Incomplete, PyIUnknown]: ... + def EnumContextProps(self) -> PyIEnumContextProps: ... + +class PyIContextMenu: + def QueryContextMenu(self, hmenu: int, indexMenu, idCmdFirst, idCmdLast, uFlags): ... + def InvokeCommand(self, pici: PyCMINVOKECOMMANDINFO) -> None: ... + def GetCommandString(self, idCmd, uType, cchMax: int = ...): ... + +class PyICopyHookA: + def CopyCallback(self, hwnd: int, wFunc, wFlags, srcFile: str, srcAttribs, destFile: str, destAttribs) -> None: ... + +class PyICopyHookW: + def CopyCallback(self, hwnd: int, wFunc, wFlags, srcFile: str, srcAttribs, destFile: str, destAttribs) -> None: ... + +class PyICreateTypeInfo: + def SetGuid(self, guid: PyIID) -> None: ... + def SetTypeFlags(self, uTypeFlags) -> None: ... + def SetDocString(self, pStrDoc) -> None: ... + def SetHelpContext(self, dwHelpContext) -> None: ... + def SetVersion(self, wMajorVerNum, wMinorVerNum) -> None: ... + def AddRefTypeInfo(self, pTInfo: PyITypeInfo) -> None: ... + def AddFuncDesc(self, index) -> None: ... + def AddImplType(self, index, hRefType) -> None: ... + def SetImplTypeFlags(self, index, implTypeFlags) -> None: ... + def SetAlignment(self, cbAlignment) -> None: ... + def SetSchema(self, pStrSchema) -> None: ... + def AddVarDesc(self, index) -> None: ... + def SetFuncAndParamNames(self, index, rgszNames: tuple[Incomplete, ...]) -> None: ... + def SetVarName(self, index, szName) -> None: ... + def SetTypeDescAlias(self) -> None: ... + def DefineFuncAsDllEntry(self, index, szDllName, szProcName) -> None: ... + def SetFuncDocString(self, index, szDocString) -> None: ... + def SetVarDocString(self, index, szDocString) -> None: ... + def SetFuncHelpContext(self, index, dwHelpContext) -> None: ... + def SetVarHelpContext(self, index, dwHelpContext) -> None: ... + def SetMops(self, index, bstrMops) -> None: ... + def LayOut(self) -> None: ... + +class PyICreateTypeLib: + def CreateTypeInfo(self, szName) -> None: ... + def SetName(self, szName) -> None: ... + def SetVersion(self, wMajorVerNum, wMinorVerNum) -> None: ... + def SetGuid(self, guid: PyIID) -> None: ... + def SetDocString(self, szDoc) -> None: ... + def SetHelpFileName(self, szHelpFileName) -> None: ... + def SetHelpContext(self, dwHelpContext) -> None: ... + def SetLcid(self) -> None: ... + def SetLibFlags(self, uLibFlags) -> None: ... + def SaveAllChanges(self) -> None: ... + +class PyICreateTypeLib2: + def CreateTypeInfo(self, szName) -> None: ... + def SetName(self, szName) -> None: ... + def SetVersion(self, wMajorVerNum, wMinorVerNum) -> None: ... + def SetGuid(self, guid: PyIID) -> None: ... + def SetDocString(self, szDoc) -> None: ... + def SetHelpFileName(self, szHelpFileName) -> None: ... + def SetHelpContext(self, dwHelpContext) -> None: ... + def SetLcid(self) -> None: ... + def SetLibFlags(self, uLibFlags) -> None: ... + def SaveAllChanges(self) -> None: ... + +class PyICurrentItem: ... + +class PyICustomDestinationlist: + def SetAppID(self, AppID) -> None: ... + def Beginlist(self, riid: PyIID) -> tuple[Incomplete, PyIObjectArray]: ... + def AppendCategory(self, Category, Items: PyIObjectArray) -> None: ... + def AppendKnownCategory(self, Category) -> None: ... + def AddUserTasks(self, Items: PyIObjectArray) -> None: ... + def Commitlist(self) -> None: ... + def GetRemovedDestinations(self, riid: PyIID) -> PyIObjectArray: ... + def Deletelist(self, AppID: Incomplete | None = ...) -> None: ... + def Abortlist(self) -> None: ... + +class PyIDL: ... + +class PyIDataObject: + def GetData(self, pformatetcIn: PyFORMATETC) -> PySTGMEDIUM: ... + def GetDataHere(self, pformatetcIn: PyFORMATETC) -> PySTGMEDIUM: ... + def QueryGetData(self, pformatetc: PyFORMATETC) -> None: ... + def GetCanonicalFormatEtc(self, pformatectIn: PyFORMATETC) -> PyFORMATETC: ... + def SetData(self, pformatetc: PyFORMATETC, pmedium: PySTGMEDIUM, fRelease) -> None: ... + def EnumFormatEtc(self, dwDirection) -> PyIEnumFORMATETC: ... + def DAdvise(self, pformatetc: PyFORMATETC, advf, pAdvSink): ... + def DUnadvise(self, dwConnection) -> None: ... + def EnumDAdvise(self): ... + +class PyIDebugApplication: + def SetName(self, pstrName) -> None: ... + def StepOutComplete(self) -> None: ... + def DebugOutput(self, pstr) -> None: ... + def StartDebugSession(self) -> None: ... + def HandleBreakPoint(self, br): ... + def Close(self) -> None: ... + def GetBreakFlags(self): ... + def GetCurrentThread(self) -> PyIDebugApplicationThread: ... + def CreateAsyncDebugOperation(self, psdo: PyIDebugSyncOperation) -> None: ... + def AddStackFrameSniffer(self, pdsfs: PyIDebugStackFrameSniffer): ... + def RemoveStackFrameSniffer(self, dwCookie) -> None: ... + def QueryCurrentThreadIsDebuggerThread(self) -> None: ... + def SynchronousCallInDebuggerThread(self, pptc, dwParam1, dwParam2, dwParam3) -> None: ... + def CreateApplicationNode(self) -> PyIDebugApplicationNode: ... + def FireDebuggerEvent(self, guid, unknown: PyIUnknown) -> None: ... + def HandleRuntimeError(self, pErrorDebug: PyIActiveScriptErrorDebug, pScriptSite: PyIActiveScriptSite) -> None: ... + def FCanJitDebug(self) -> None: ... + def FIsAutoJitDebugEnabled(self) -> None: ... + def AddGlobalExpressionContextProvider(self, pdsfs: PyIProvideExpressionContexts) -> None: ... + def RemoveGlobalExpressionContextProvider(self, dwCookie) -> None: ... + +class PyIDebugApplicationNode: + def EnumChildren(self) -> None: ... + def GetParent(self) -> PyIDebugApplicationNode: ... + def SetDocumentProvider(self, pddp: PyIDebugDocumentProvider) -> None: ... + def Close(self) -> None: ... + def Attach(self, pdanParent: PyIDebugApplicationNode) -> None: ... + def Detach(self) -> None: ... + +class PyIDebugApplicationNodeEvents: + def onAddChild(self, prddpChild: PyIDebugApplicationNode) -> None: ... + def onRemoveChild(self, prddpChild: PyIDebugApplicationNode) -> None: ... + def onDetach(self) -> None: ... + def onAttach(self, prddpParent: PyIDebugApplicationNode) -> None: ... + +class PyIDebugApplicationThread: + def SynchronousCallIntoThread(self, pstcb, dwParam1, dwParam2, dwParam3) -> None: ... + def QueryIsCurrentThread(self) -> None: ... + def QueryIsDebuggerThread(self) -> None: ... + +class PyIDebugCodeContext: + def GetDocumentContext(self) -> None: ... + def SetBreakPoint(self, bps) -> None: ... + +class PyIDebugDocument: ... + +class PyIDebugDocumentContext: + def GetDocument(self) -> None: ... + def EnumCodeContexts(self) -> None: ... + +class PyIDebugDocumentHelper: + def Init(self, pda: PyIDebugApplication, pszShortName, pszLongName, docAttr) -> None: ... + def Attach(self, pddhParent: PyIDebugDocumentHelper) -> None: ... + def Detach(self) -> None: ... + def AddUnicodeText(self, pszText) -> None: ... + def AddDBCSText(self) -> None: ... + def SetDebugDocumentHost(self, pddh: PyIDebugDocumentHost) -> None: ... + def AddDeferredText(self, cChars, dwTextStartCookie) -> None: ... + def DefineScriptBlock(self, ulCharOffset, cChars, pas, fScriptlet) -> None: ... + def SetDefaultTextAttr(self, staTextAttr) -> None: ... + def SetTextAttributes(self, ulCharOffset, obAttr) -> None: ... + def SetLongName(self, pszLongName) -> None: ... + def SetShortName(self, pszShortName) -> None: ... + def SetDocumentAttr(self, pszAttributes) -> None: ... + def GetDebugApplicationNode(self) -> None: ... + def GetScriptBlockInfo(self, dwSourceContext) -> None: ... + def CreateDebugDocumentContext(self, iCharPos, cChars) -> None: ... + def BringDocumentToTop(self) -> None: ... + def BringDocumentContextToTop(self, pddc: PyIDebugDocumentContext) -> None: ... + +class PyIDebugDocumentHost: + def GetDeferredText(self, dwTextStartCookie, cMaxChars) -> None: ... + def GetScriptTextAttributes(self, pstrCode, pstrDelimiter, dwFlags) -> None: ... + def OnCreateDocumentContext(self) -> None: ... + def GetPathName(self) -> None: ... + def GetFileName(self) -> None: ... + def NotifyChanged(self) -> None: ... + +class PyIDebugDocumentInfo: + def GetName(self) -> None: ... + def GetDocumentClassId(self) -> PyIID: ... + +class PyIDebugDocumentProvider: + def GetDocument(self) -> PyIDebugDocument: ... + +class PyIDebugDocumentText: + def GetDocumentAttributes(self) -> None: ... + def GetSize(self) -> None: ... + def GetPositionOfLine(self, cLineNumber) -> None: ... + def GetLineOfPosition(self, cCharacterPosition) -> None: ... + def GetText(self, cCharacterPosition, cMaxChars, bWantAttr: int = ...) -> None: ... + def GetPositionOfContext(self, psc: PyIDebugDocumentContext) -> None: ... + def GetContextOfPosition(self, cCharacterPosition, cNumChars) -> None: ... + +class PyIDebugDocumentTextAuthor: + def InsertText(self, cCharacterPosition, cNumToInsert, pcharText) -> None: ... + def RemoveText(self, cCharacterPosition, cNumToRemove) -> None: ... + def ReplaceText(self, cCharacterPosition, cNumToReplace, pcharText) -> None: ... + +class PyIDebugDocumentTextEvents: + def onDestroy(self) -> None: ... + def onInsertText(self, cCharacterPosition, cNumToInsert) -> None: ... + def onRemoveText(self, cCharacterPosition, cNumToRemove) -> None: ... + def onReplaceText(self, cCharacterPosition, cNumToReplace) -> None: ... + def onUpdateTextAttributes(self, cCharacterPosition, cNumToUpdate) -> None: ... + def onUpdateDocumentAttributes(self, textdocattr) -> None: ... + +class PyIDebugDocumentTextExternalAuthor: + def GetPathName(self) -> None: ... + def GetFileName(self) -> None: ... + def NotifyChanged(self) -> None: ... + +class PyIDebugExpression: + def Start(self, pdecb: PyIDebugExpressionCallBack) -> None: ... + def Abort(self) -> None: ... + def QueryIsComplete(self) -> None: ... + def GetResultAsString(self) -> None: ... + def GetResultAsDebugProperties(self) -> None: ... + +class PyIDebugExpressionCallBack: + def onComplete(self) -> None: ... + +class PyIDebugExpressionContext: + def ParseLanguageText(self, pstrCode, nRadix, pstrDelimiter, dwFlags) -> None: ... + def GetLanguageInfo(self) -> None: ... + +class PyIDebugProperty: + def GetPropertyInfo(self, dwFieldSpec, nRadix) -> None: ... + def GetExtendedInfo(self) -> None: ... + def SetValueAsString(self, pszValue, nRadix) -> None: ... + def EnumMembers(self, dwFieldSpec, nRadix, refiid: PyIID) -> None: ... + def GetParent(self) -> None: ... + +class PyIDebugSessionProvider: + def StartDebugSession(self, pda: PyIRemoteDebugApplication) -> None: ... + +class PyIDebugStackFrame: + def GetCodeContext(self) -> None: ... + def GetDescriptionString(self, fLong): ... + def GetLanguageString(self, fLong): ... + def GetThread(self) -> PyIDebugApplicationThread: ... + +class PyIDebugStackFrameSniffer: + def EnumStackFrames(self) -> None: ... + +class PyIDebugStackFrameSnifferEx: + def EnumStackFramesEx(self) -> None: ... + +class PyIDebugSyncOperation: + def GetTargetThread(self) -> None: ... + def Execute(self) -> None: ... + def InProgressAbort(self) -> None: ... + +class PyIDefaultExtractIconInit: + def SetFlags(self, uFlags) -> None: ... + def SetKey(self, hkey: PyHKEY) -> None: ... + def SetNormalIcon(self, pszFile, iIcon) -> None: ... + def SetOpenIcon(self, pszFile, iIcon) -> None: ... + def SetShortcutIcon(self, pszFile, iIcon) -> None: ... + def SetDefaultIcon(self, pszFile, iIcon) -> None: ... + +class PyIDirectSound: + def Initialize(self, guid: PyIID) -> None: ... + def SetCooperativeLevel(self, hwnd: int, level) -> None: ... + def CreateSoundBuffer(self, lpDSCBufferDesc: PyDSCBUFFERDESC, unk: Incomplete | None = ...) -> None: ... + def GetCaps(self) -> None: ... + def Compact(self) -> None: ... + +class PyIDirectSoundBuffer: + def Initialize(self) -> None: ... + def GetStatus(self) -> None: ... + def GetCaps(self) -> None: ... + def Restore(self) -> None: ... + def GetCurrentPosition(self) -> None: ... + def Play(self) -> None: ... + def SetCurrentPosition(self) -> None: ... + def Stop(self) -> None: ... + def GetFrequency(self) -> None: ... + def GetPan(self) -> None: ... + def GetVolume(self) -> None: ... + def SetFrequency(self) -> None: ... + def SetPan(self) -> None: ... + def SetVolume(self) -> None: ... + +class PyIDirectSoundCapture: + def Initialize(self) -> None: ... + def GetCaps(self) -> None: ... + +class PyIDirectSoundCaptureBuffer: + def Initialize(self) -> None: ... + def GetStatus(self) -> None: ... + def GetCurrentPosition(self) -> None: ... + def Stop(self) -> None: ... + +class PyIDirectSoundNotify: ... + +class PyIDirectoryObject: + def GetObjectInformation(self) -> PyADS_OBJECT_INFO: ... + def GetObjectAttributes(self, names: tuple[str, ...]) -> tuple[PyADS_ATTR_INFO, ...]: ... + def SetObjectAttributes(self, attrs: tuple[PyADS_ATTR_INFO, ...]): ... + def CreateDSObject(self, rdn: str, attrs: tuple[PyADS_ATTR_INFO, ...]) -> PyIDispatch: ... + def DeleteDSObject(self, rdn: str) -> None: ... + +class PyIDirectorySearch: + def SetSearchPreference(self, prefs) -> tuple[Incomplete, Incomplete, Incomplete]: ... + def ExecuteSearch(self, _filter: str, attrNames: list[str]): ... + def GetNextRow(self, handle): ... + def GetFirstRow(self, handle): ... + def GetPreviousRow(self, handle): ... + def CloseSearchHandle(self, handle) -> None: ... + def AdandonSearch(self, handle) -> None: ... + def GetColumn(self, handle, name: str) -> tuple[Incomplete, Incomplete, Incomplete]: ... + def GetNextColumnName(self) -> None: ... + +@final +class PyIDispatch: + def Invoke(self, dispid, lcid, flags, bResultWanted, arg: tuple[Incomplete, ...]): ... + def InvokeTypes( + self, dispid, lcid, wFlags, resultTypeDesc, typeDescs: tuple[Incomplete, ...], args: tuple[Incomplete, ...] + ): ... + def GetIDsOfNames(self, name: str, arg) -> tuple[Incomplete, Incomplete]: ... + def GetTypeInfo(self, locale, index: int = ...) -> PyITypeInfo: ... + def GetTypeInfoCount(self): ... + +class PyIDispatchEx: + def GetDispID(self, name: str, fdex): ... + def InvokeEx( + self, + dispid, + lcid, + flags, + args: list[Incomplete], + types: list[Incomplete] | None = ..., + returnDesc: int = ..., + serviceProvider: PyIServiceProvider | None = ..., + ): ... + def DeleteMemberByName(self, name: str, fdex) -> None: ... + def DeleteMemberByDispID(self, dispid) -> None: ... + def GetMemberProperties(self, dispid, fdex): ... + def GetMemberName(self, dispid): ... + def GetNextDispID(self, fdex, dispid): ... + +class PyIDisplayItem: ... + +class PyIDocHostUIHandler: + def ShowContextMenu( + self, dwID, pt: tuple[Incomplete, Incomplete], pcmdtReserved: PyIUnknown, pdispReserved: PyIDispatch + ) -> None: ... + def GetHostInfo(self) -> None: ... + def ShowUI( + self, + dwID, + pActiveObject: PyIOleInPlaceActiveObject, + pCommandTarget: PyIOleCommandTarget, + pFrame: PyIOleInPlaceFrame, + pDoc: PyIOleInPlaceUIWindow, + ) -> None: ... + def HideUI(self) -> None: ... + def UpdateUI(self) -> None: ... + def EnableModeless(self, fEnable) -> None: ... + def OnDocWindowActivate(self, fActivate) -> None: ... + def OnFrameWindowActivate(self, fActivate) -> None: ... + def ResizeBorder( + self, prcBorder: tuple[Incomplete, Incomplete, Incomplete, Incomplete], pUIWindow: PyIOleInPlaceUIWindow, fRameWindow + ) -> None: ... + def TranslateAccelerator(self, lpMsg, pguidCmdGroup: PyIID, nCmdID) -> None: ... + def GetOptionKeyPath(self, dw) -> None: ... + def GetDropTarget(self, pDropTarget: PyIDropTarget) -> None: ... + def GetExternal(self) -> None: ... + def TranslateUrl(self, dwTranslate, pchURLIn) -> None: ... + def FilterDataObject(self, pDO: PyIDataObject) -> None: ... + +class PyIDropSource: + def QueryContinueDrag(self, fEscapePressed, grfKeyState) -> None: ... + def GiveFeedback(self, dwEffect) -> None: ... + +class PyIDropTarget: + def DragEnter(self, pDataObj: PyIDataObject, grfKeyState, pt: tuple[Incomplete, Incomplete], pdwEffect): ... + def DragOver(self, grfKeyState, pt: tuple[Incomplete, Incomplete], pdwEffect): ... + def DragLeave(self) -> None: ... + def Drop(self, pDataObj: PyIDataObject, grfKeyState, pt: tuple[Incomplete, Incomplete], dwEffect): ... + +class PyIDropTargetHelper: + def DragEnter(self, hwnd: int, pDataObj: PyIDataObject, pt: tuple[Incomplete, Incomplete], dwEffect) -> None: ... + def DragOver(self, hwnd: int, pt: tuple[Incomplete, Incomplete], pdwEffect) -> None: ... + def DragLeave(self) -> None: ... + def Drop(self, pDataObj: PyIDataObject, pt: tuple[Incomplete, Incomplete], dwEffect) -> None: ... + +class PyIDsObjectPicker: + def Initialize( + self, targetComputer: str, scopeInfos: PyDSOP_SCOPE_INIT_INFOs, options: int = ..., attrNames: list[str] | None = ... + ) -> None: ... + def InvokeDialog(self, hwnd: int) -> PyIDataObject: ... + +class PyIEmptyVolumeCache: ... +class PyIEmptyVolumeCache2: ... + +class PyIEmptyVolumeCacheCallBack: + def ScanProgress(self, dwlSpaceUsed, dwFlags, pcwszStatus) -> None: ... + def PurgeProgress(self, dwlSpaceFreed, spaceFreed, spaceToFree, flags, status) -> None: ... + +class PyIEnumCATEGORYINFO: + def Next(self, num: int = ...) -> tuple[tuple[PyIID, Incomplete, str], ...]: ... + def Skip(self, num) -> None: ... + def Reset(self) -> None: ... + def Clone(self) -> PyIEnumCATEGORYINFO: ... + +class PyIEnumConnectionPoints: + def Next(self, num: int = ...) -> tuple[PyIConnectionPoint, ...]: ... + def Skip(self) -> None: ... + def Reset(self) -> None: ... + def Clone(self) -> PyIEnumConnectionPoints: ... + +class PyIEnumConnections: + def Next(self, num: int = ...): ... + def Skip(self) -> None: ... + def Reset(self) -> None: ... + def Clone(self) -> PyIEnumConnections: ... + +class PyIEnumContextProps: + def Next(self, num: int = ...) -> tuple[tuple[PyIID, Incomplete, PyIUnknown], ...]: ... + def Skip(self) -> None: ... + def Reset(self) -> None: ... + def Clone(self) -> PyIEnumContextProps: ... + +class PyIEnumDebugApplicationNodes: + def Next(self, num: int = ...): ... + def Skip(self) -> None: ... + def Reset(self) -> None: ... + def Clone(self) -> PyIEnumDebugApplicationNodes: ... + +class PyIEnumDebugCodeContexts: + def Next(self, num: int = ...): ... + def Skip(self) -> None: ... + def Reset(self) -> None: ... + def Clone(self) -> PyIEnumDebugCodeContexts: ... + +class PyIEnumDebugExpressionContexts: + def Next(self, num: int = ...): ... + def Skip(self) -> None: ... + def Reset(self) -> None: ... + def Clone(self) -> PyIEnumDebugExpressionContexts: ... + +class PyIEnumDebugPropertyInfo: + def Next(self, num: int = ...): ... + def Skip(self) -> None: ... + def Reset(self) -> None: ... + def Clone(self) -> PyIEnumDebugPropertyInfo: ... + def GetCount(self): ... + +class PyIEnumDebugStackFrames: + def Next(self, num: int = ...): ... + def Skip(self) -> None: ... + def Reset(self) -> None: ... + def Clone(self) -> PyIEnumDebugStackFrames: ... + +class PyIEnumExplorerCommand: + def Next(self, num: int = ...): ... + def Skip(self) -> None: ... + def Reset(self) -> None: ... + def Clone(self) -> PyIEnumExplorerCommand: ... + +class PyIEnumFORMATETC: + def Next(self, num: int = ...): ... + def Skip(self) -> None: ... + def Reset(self) -> None: ... + def Clone(self) -> PyIEnumFORMATETC: ... + +class PyIEnumGUID: + def Next(self, num: int = ...) -> tuple[PyIID, ...]: ... + def Skip(self, num) -> None: ... + def Reset(self) -> None: ... + def Clone(self) -> PyIEnumGUID: ... + +class PyIEnumIDlist: + def Next(self, num: int = ...): ... + def Skip(self) -> None: ... + def Reset(self) -> None: ... + def Clone(self) -> PyIEnumIDlist: ... + +class PyIEnumMoniker: + def Next(self, num: int = ...) -> PyIMoniker: ... + def Skip(self, num) -> None: ... + def Reset(self) -> None: ... + def Clone(self) -> PyIEnumMoniker: ... + +class PyIEnumObjects: + def Next(self, riid: PyIID, num: int = ...) -> tuple[PyIUnknown, ...]: ... + def Skip(self) -> None: ... + def Reset(self) -> None: ... + def Clone(self) -> PyIEnumObjects: ... + +class PyIEnumRemoteDebugApplicationThreads: + def Next(self, num: int = ...): ... + def Skip(self) -> None: ... + def Reset(self) -> None: ... + def Clone(self) -> PyIEnumRemoteDebugApplicationThreads: ... + +class PyIEnumRemoteDebugApplications: + def Next(self, num: int = ...): ... + def Skip(self) -> None: ... + def Reset(self) -> None: ... + def Clone(self) -> PyIEnumRemoteDebugApplications: ... + +class PyIEnumResources: + def Next(self, num: int = ...): ... + def Skip(self) -> None: ... + def Reset(self) -> None: ... + def Clone(self) -> PyIEnumResources: ... + +class PyIEnumSTATPROPSETSTG: + def Next(self, num: int = ...): ... + def Skip(self) -> None: ... + def Reset(self) -> None: ... + def Clone(self) -> PyIEnumSTATPROPSETSTG: ... + +class PyIEnumSTATPROPSTG: + def Next(self, num: int = ...): ... + def Skip(self) -> None: ... + def Reset(self) -> None: ... + def Clone(self) -> PyIEnumSTATPROPSTG: ... + +class PyIEnumSTATSTG: + def Next(self, num: int = ...) -> tuple[STATSTG, ...]: ... + def Skip(self) -> None: ... + def Reset(self) -> None: ... + def Clone(self) -> PyIEnumSTATSTG: ... + +class PyIEnumShellItems: + def Next(self, num: int = ...) -> tuple[PyIShellItem, ...]: ... + def Skip(self) -> None: ... + def Reset(self) -> None: ... + def Clone(self) -> PyIEnumShellItems: ... + +class PyIEnumString: + def Next(self, num: int = ...) -> tuple[str, ...]: ... + def Skip(self) -> None: ... + def Reset(self) -> None: ... + def Clone(self) -> PyIEnumString: ... + +class PyIErrorLog: + def AddError(self, propName: str, excepInfo: Incomplete | None = ...) -> None: ... + +class PyIExplorerBrowser: + def Initialize(self, hwndParent, prc: PyRECT, pfs) -> None: ... + def Destroy(self) -> None: ... + def SetRect(self, hdwp, rcBrowser: PyRECT) -> int: ... + def SetPropertyBag(self, PropertyBag) -> None: ... + def SetEmptyText(self, EmptyText) -> None: ... + def SetFolderSettings(self, pfs) -> None: ... + def Advise(self, psbe: PyIExplorerBrowserEvents): ... + def Unadvise(self, dwCookie) -> None: ... + def SetOptions(self, dwFlag) -> None: ... + def GetOptions(self): ... + def BrowseToIDlist(self, pidl, uFlags) -> None: ... + def BrowseToObject(self, punk: PyIUnknown, uFlags) -> None: ... + def FillFromObject(self, punk: PyIUnknown, dwFlags) -> None: ... + def RemoveAll(self) -> None: ... + def GetCurrentView(self, riid: PyIID) -> PyIUnknown: ... + +class PyIExplorerBrowserEvents: + def OnNavigationPending(self, pidlFolder) -> None: ... + def OnViewCreated(self, psv: PyIShellView) -> None: ... + def OnNavigationComplete(self, pidlFolder) -> None: ... + def OnNavigationFailed(self, pidlFolder) -> None: ... + +class PyIExplorerCommand: + def GetTitle(self, psiItemArray: PyIShellItemArray): ... + def GetIcon(self, psiItemArray: PyIShellItemArray): ... + def GetToolTip(self, psiItemArray: PyIShellItemArray): ... + def GetCanonicalName(self) -> PyIID: ... + def GetState(self, psiItemArray: PyIShellItemArray, fOkToBeSlow): ... + def Invoke(self, psiItemArray: PyIShellItemArray, pbc: PyIBindCtx) -> None: ... + def GetFlags(self): ... + def EnumSubCommands(self) -> PyIEnumExplorerCommand: ... + +class PyIExplorerCommandProvider: ... +class PyIExplorerPaneVisibility: ... + +class PyIExternalConnection: + def AddConnection(self, extconn, reserved: int = ...): ... + def ReleaseConnection(self, extconn, reserved, fLastReleaseCloses): ... + +class PyIExtractIcon: + def Extract(self, pszFile, nIconIndex, nIconSize) -> None: ... + def GetIconLocation(self, uFlags, cchMax) -> None: ... + +class PyIExtractIconW: + def Extract(self, pszFile, nIconIndex, nIconSize) -> None: ... + def GetIconLocation(self, uFlags, cchMax) -> None: ... + +class PyIExtractImage: + def GetLocation(self, dwPriority, size: tuple[Incomplete, Incomplete], dwRecClrDepth, pdwFlags) -> None: ... + def Extract(self) -> None: ... + +class PyIFileOperation: + def Advise(self, Sink: PyGFileOperationProgressSink): ... + def Unadvise(self, Cookie) -> None: ... + def SetOperationFlags(self, OperationFlags) -> None: ... + def SetProgressMessage(self, Message) -> None: ... + def SetProgressDialog(self, popd) -> None: ... + def SetProperties(self, proparray: PyIPropertyChangeArray) -> None: ... + def SetOwnerWindow(self, Owner: int) -> None: ... + def ApplyPropertiesToItem(self, Item: PyIShellItem) -> None: ... + def ApplyPropertiesToItems(self, Items: PyIUnknown) -> None: ... + def RenameItem(self, Item: PyIShellItem, NewName, Sink: PyGFileOperationProgressSink | None = ...) -> None: ... + def RenameItems(self, pUnkItems: PyIUnknown, NewName) -> None: ... + def MoveItem( + self, + Item: PyIShellItem, + DestinationFolder: PyIShellItem, + pszNewName: Incomplete | None = ..., + Sink: PyGFileOperationProgressSink | None = ..., + ) -> None: ... + def MoveItems(self, Items: PyIUnknown, DestinationFolder: PyIShellItem) -> None: ... + def CopyItem( + self, + Item: PyIShellItem, + DestinationFolder: PyIShellItem, + CopyName: Incomplete | None = ..., + Sink: PyGFileOperationProgressSink | None = ..., + ) -> None: ... + def CopyItems(self, Items: PyIUnknown, DestinationFolder: PyIShellItem) -> None: ... + def DeleteItem(self, Item: PyIShellItem, Sink: PyGFileOperationProgressSink | None = ...) -> None: ... + def DeleteItems(self, Items: PyIUnknown) -> None: ... + def NewItem( + self, + DestinationFolder: PyIShellItem, + FileAttributes, + Name, + TemplateName: Incomplete | None = ..., + Sink: PyGFileOperationProgressSink | None = ..., + ) -> None: ... + def PerformOperations(self) -> None: ... + def GetAnyOperationsAborted(self): ... + +class PyIIdentityName: ... + +class PyIInitializeWithFile: + def Initialize(self, FilePath, Mode) -> None: ... + +class PyIInitializeWithStream: + def Initialize(self, Stream: PyIStream, Mode) -> None: ... + +class PyIInputObject: + def TranslateAccelerator(self, pmsg) -> None: ... + def UIActivate(self, uState) -> None: ... + def HasFocusIO(self) -> None: ... + +class PyIInternetBindInfo: + def GetBindInfo(self) -> None: ... + def GetBindString(self) -> None: ... + +class PyIInternetPriority: + def SetPriority(self, nPriority) -> None: ... + def GetPriority(self) -> None: ... + +class PyIInternetProtocol: + def Read(self, cb) -> None: ... + def Seek(self, dlibMove: LARGE_INTEGER, dwOrigin) -> None: ... + def LockRequest(self, dwOptions) -> None: ... + def UnlockRequest(self) -> None: ... + +class PyIInternetProtocolInfo: + def ParseUrl(self, pwzUrl, ParseAction, dwParseFlags, cchResult, dwReserved) -> None: ... + def CombineUrl(self, pwzBaseUrl, pwzRelativeUrl, dwCombineFlags, cchResult, dwReserved) -> None: ... + def CompareUrl(self, pwzUrl1, pwzUrl2, dwCompareFlags) -> None: ... + def QueryInfo(self, pwzUrl, OueryOption, dwQueryFlags, cbBuffer, dwReserved): ... + +class PyIInternetProtocolRoot: + def Start(self, szUrl, pOIProtSink: PyIInternetProtocolSink, pOIBindInfo: PyIInternetBindInfo, grfPI, dwReserved) -> None: ... + def Continue(self) -> None: ... + def Abort(self, hrReason, dwOptions) -> None: ... + def Terminate(self, dwOptions) -> None: ... + def Suspend(self) -> None: ... + def Resume(self) -> None: ... + +class PyIInternetProtocolSink: + def Switch(self) -> None: ... + def ReportProgress(self, ulStatusCode, szStatusText) -> None: ... + def ReportData(self, grfBSCF, ulProgress, ulProgressMax) -> None: ... + def ReportResult(self, hrResult, dwError, szResult) -> None: ... + +class PyIInternetSecurityManager: + def SetSecuritySite(self, pSite) -> None: ... + def GetSecuritySite(self) -> None: ... + def MapUrlToZone(self, pwszUrl, dwFlags) -> None: ... + def GetSecurityId(self, pwszUrl, pcbSecurityId) -> None: ... + def ProcessUrlAction(self, pwszUrl, dwAction, context, dwFlags) -> None: ... + def SetZoneMapping(self, dwZone, lpszPattern, dwFlags) -> None: ... + def GetZoneMappings(self, dwZone, dwFlags) -> None: ... + +class PyIKnownFolder: + def GetId(self) -> PyIID: ... + def GetCategory(self): ... + def GetShellItem(self, riid: PyIID, Flags: int = ...) -> PyIShellItem: ... + def GetPath(self, Flags: int = ...): ... + def SetPath(self, Flags, Path) -> None: ... + def GetIDlist(self, Flags) -> PyIDL: ... + def GetFolderType(self) -> PyIID: ... + def GetRedirectionCapabilities(self): ... + def GetFolderDefinition(self): ... + +class PyIKnownFolderManager: + def FolderIdFromCsidl(self, Csidl) -> PyIID: ... + def FolderIdToCsidl(self, _id: PyIID): ... + def GetFolderIds(self) -> tuple[PyIID, ...]: ... + def GetFolder(self, _id: PyIID) -> PyIKnownFolder: ... + def GetFolderByName(self, Name) -> PyIKnownFolder: ... + def RegisterFolder(self, _id: PyIID, Definition) -> None: ... + def UnregisterFolder(self, _id: PyIID) -> None: ... + def FindFolderFromPath(self, Path, Mode) -> PyIKnownFolder: ... + def FindFolderFromIDlist(self, pidl: PyIDL) -> PyIKnownFolder: ... + def Redirect(self, _id: PyIID, hwnd: int, flags, TargetPath, Exclusion: tuple[PyIID, ...]) -> None: ... + +class PyILockBytes: + def ReadAt(self, ulOffset: ULARGE_INTEGER, cb) -> str: ... + def WriteAt(self, ulOffset: ULARGE_INTEGER, data: str): ... + def Flush(self) -> None: ... + def SetSize(self, cb: ULARGE_INTEGER) -> None: ... + def LockRegion(self, libOffset: ULARGE_INTEGER, cb: ULARGE_INTEGER, dwLockType) -> None: ... + def UnlockRegion(self, libOffset: ULARGE_INTEGER, cb: ULARGE_INTEGER, dwLockType) -> None: ... + def Stat(self, grfStatFlag) -> STATSTG: ... + +class PyIMAPIContainer: + def OpenEntry(self, entryId: str, iid: PyIID, flags): ... + def GetContentsTable(self, flags) -> PyIMAPITable: ... + def GetHierarchyTable(self, flags) -> PyIMAPITable: ... + +class PyIMAPIFolder: + def GetLastError(self, hr, flags): ... + def CreateFolder( + self, folderType, folderName: str, folderComment: str | None = ..., iid: PyIID | None = ..., flags=... + ) -> PyIMAPIFolder: ... + def CreateMessage(self, iid: PyIID, flags) -> PyIMessage: ... + def CopyMessages(self, msgs: PySBinaryArray, iid: PyIID, folder: PyIMAPIFolder, ulUIParam, progress, flags): ... + def DeleteFolder(self, entryId: str, uiParam, progress) -> None: ... + def DeleteMessages(self, msgs: PySBinaryArray, uiParam, progress, flags): ... + def EmptyFolder(self, uiParam, progress, flags): ... + def SetReadFlags(self, msgs: PySBinaryArray, uiParam, progress, flag) -> None: ... + +class PyIMAPIProp: + def GetProps(self, proplist: PySPropTagArray, flags: int = ...) -> tuple[Incomplete, Incomplete, Incomplete]: ... + def DeleteProps(self, proplist: PySPropTagArray, wantProblems: bool = ...) -> tuple[Incomplete, Incomplete, Incomplete]: ... + def SetProps( + self, proplist: tuple[Incomplete, Incomplete], wantProblems: bool = ... + ) -> tuple[Incomplete, Incomplete, Incomplete]: ... + def CopyTo( + self, + IIDExcludelist: tuple[Incomplete, Incomplete], + propTags: PySPropTagArray, + uiParam, + progress, + resultIID: PyIID, + dest: PyIMAPIProp, + flags, + wantProblems: bool = ..., + ) -> tuple[Incomplete, Incomplete, Incomplete]: ... + def CopyProps( + self, propTags: PySPropTagArray, uiParam, progress, resultIID: PyIID, dest: PyIMAPIProp, flags, wantProblems: bool = ... + ) -> tuple[Incomplete, Incomplete, Incomplete]: ... + def OpenProperty(self, propTag, iid: PyIID, interfaceOptions, flags) -> PyIUnknown: ... + def GetIDsFromNames(self, nameIds: PyMAPINAMEIDArray, flags: int = ...) -> PySPropTagArray: ... + def GetNamesFromIDs( + self, propTags: PySPropTagArray, propSetGuid: PyIID | None = ..., flags=... + ) -> tuple[Incomplete, PySPropTagArray, PyMAPINAMEIDArray]: ... + def GetLastError(self, hr, flags): ... + def SaveChanges(self, flags) -> None: ... + def GetProplist(self, flags) -> PySPropTagArray: ... + +class PyIMAPISession: + def OpenEntry(self, entryId: str, iid: PyIID, flags): ... + def OpenMsgStore(self, uiParam, entryId: str, iid: PyIID, flags) -> PyIUnknown: ... + def QueryIdentity(self) -> str: ... + def Advise(self, entryId: str, mask, sink): ... + def Unadvise(self, connection) -> None: ... + def CompareEntryIDs(self, entryId: str, entryId1: str, flags: int = ...): ... + def GetLastError(self, hr, flags): ... + def GetMsgStoresTable(self, flags) -> PyIMAPITable: ... + def GetStatusTable(self, flags) -> PyIMAPITable: ... + def Logoff(self, uiParm, flags, reserved) -> None: ... + def OpenAddressBook(self, uiParm, iid: PyIID, flags) -> PyIAddrBook: ... + def OpenProfileSection(self, iidSection: PyIID, iid: PyIID, flags): ... + def AdminServices(self, flags: int = ...) -> PyIMsgServiceAdmin: ... + +class PyIMAPIStatus: + def ChangePassword(self, oldPassword, newPassword, ulFlags) -> None: ... + def SettingsDialog(self, ulUIParam, ulFlags) -> None: ... + def ValidateState(self, ulUIParam, ulFlags) -> None: ... + def FlushQueues(self, ulUIParam, transport: str, ulFlags) -> None: ... + +class PyIMAPITable: + def GetLastError(self, hr, flags): ... + def Advise(self, eventMask, adviseSink): ... + def SeekRow(self, bookmark, rowCount): ... + def SeekRowApprox(self, numerator, denominator) -> None: ... + def GetRowCount(self, flags): ... + def QueryRows(self, rowCount, flags): ... + def SetColumns(self, propTags, flags) -> None: ... + def GetStatus(self) -> None: ... + def QueryPosition(self) -> None: ... + def QueryColumns(self, flags): ... + def Abort(self) -> None: ... + def FreeBookmark(self, bookmark) -> None: ... + def CreateBookmark(self): ... + def Restrict(self, restriction: PySRestriction, flags) -> None: ... + def FindRow(self, restriction: PySRestriction, bookmarkOrigin, flags) -> None: ... + def SortTable(self, sortOrderSet: PySSortOrderSet, flags) -> None: ... + def Unadvise(self, handle) -> None: ... + +class PyIMachineDebugManager: + def AddApplication(self, pda: PyIRemoteDebugApplication) -> None: ... + def RemoveApplication(self, dwAppCookie) -> None: ... + def EnumApplications(self) -> None: ... + +class PyIMachineDebugManagerEvents: + def onAddApplication(self, pda: PyIRemoteDebugApplication, dwAppCookie) -> None: ... + def onRemoveApplication(self, pda: PyIRemoteDebugApplication, dwAppCookie) -> None: ... + +class PyIMessage: + def SetReadFlag(self, flag) -> None: ... + def GetAttachmentTable(self, flags) -> PyIMAPITable: ... + def OpenAttach(self, attachmentNum, interface: PyIID, flags) -> PyIAttach: ... + def CreateAttach(self, interface: PyIID, flags) -> tuple[Incomplete, PyIAttach]: ... + def DeleteAttach(self, attachmentNum, ulUIParam, interface, flags) -> None: ... + def ModifyRecipients(self, flags, mods) -> None: ... + def GetRecipientTable(self, flags) -> PyIMAPITable: ... + def SubmitMessage(self, flags) -> None: ... + +class PyIMoniker: + def BindToObject(self, bindCtx: PyIBindCtx, moniker: PyIMoniker, iidResult) -> PyIUnknown: ... + def BindToStorage(self, bindCtx: PyIBindCtx, moniker: PyIMoniker, iidResult) -> PyIUnknown: ... + def GetDisplayName(self, bindCtx: PyIBindCtx, moniker: PyIMoniker) -> str: ... + def ComposeWith(self, mkRight: PyIMoniker, fOnlyIfNotGeneric) -> PyIMoniker: ... + def Enum(self, fForward: bool = ...) -> PyIEnumMoniker: ... + def IsEqual(self, other: PyIMoniker) -> bool: ... + def IsSystemMoniker(self) -> bool: ... + def Hash(self): ... + +class PyIMsgServiceAdmin: + def GetLastError(self, hr, flags): ... + def CreateMsgService(self, serviceName: str, displayName: str, flags, uiParam: int = ...) -> None: ... + def ConfigureMsgService(self, iid: PyIID, ulUIParam, ulFlags, arg: list[Incomplete]) -> None: ... + def GetMsgServiceTable(self, flags) -> PyIMAPITable: ... + def GetProviderTable(self, flags) -> PyIMAPITable: ... + def DeleteMsgService(self, uuid: PyIID) -> None: ... + def RenameMsgService(self, uuid: PyIID, flags, newName: str) -> None: ... + def OpenProfileSection(self, uuid: PyIID, iid: PyIID, flags): ... + def AdminProviders(self, uuid: PyIID, flags): ... + +class PyIMsgStore: + def OpenEntry(self, entryId: str, iid: PyIID, flags): ... + def GetReceiveFolder(self, messageClass: str | None = ..., flags: int = ...) -> tuple[PyIID, str]: ... + def GetReceiveFolderTable(self, flags) -> PyIMAPITable: ... + def CompareEntryIDs(self, entryId: str, entryId1: str, flags: int = ...): ... + def GetLastError(self, hr, flags): ... + def AbortSubmit(self, entryId: str, flags: int = ...): ... + def Advise(self, entryId: str, eventMask, adviseSink) -> None: ... + def Unadvise(self, connection) -> None: ... + +class PyINameSpaceTreeControl: + def Initialize(self, hwndParent, prc: tuple[Incomplete, Incomplete, Incomplete, Incomplete], nsctsFlags) -> None: ... + def TreeAdvise(self, punk: PyIUnknown) -> None: ... + def TreeUnadvise(self, dwCookie) -> None: ... + def AppendRoot(self, psiRoot: PyIShellItem, grfEnumFlags, grfRootStyle, pif) -> None: ... + def InsertRoot(self, iIndex, psiRoot: PyIShellItem, grfEnumFlags, grfRootStyle, pif) -> None: ... + def RemoveRoot(self, psiRoot: PyIShellItem) -> None: ... + def RemoveAllRoots(self) -> None: ... + def GetRootItems(self) -> None: ... + def SetItemState(self, psi: PyIShellItem, nstcisMask, nstcisFlags) -> None: ... + def GetItemState(self, psi: PyIShellItem, nstcisMask) -> None: ... + def GetSelectedItems(self) -> None: ... + def GetItemCustomState(self, psi: PyIShellItem) -> None: ... + def SetItemCustomState(self, psi: PyIShellItem, iStateNumber) -> None: ... + def EnsureItemVisible(self, psi: PyIShellItem) -> None: ... + def SetTheme(self, pszTheme) -> None: ... + def GetNextItem(self, psi: PyIShellItem, nstcgi) -> None: ... + def HitTest(self, pt: tuple[Incomplete, Incomplete]) -> None: ... + def GetItemRect(self) -> None: ... + def CollapseAll(self) -> None: ... + +class PyINamedPropertyStore: + def GetNamedValue(self, Name) -> PyPROPVARIANT: ... + def SetNamedValue(self, propvar) -> None: ... + def GetNameCount(self): ... + def GetNameAt(self, Index): ... + +class PyIObjectArray: + def GetCount(self): ... + def GetAt(self, Index, riid: PyIID) -> PyIUnknown: ... + +class PyIObjectCollection: + def AddObject(self, punk: PyIUnknown) -> None: ... + def AddFromArray(self, Source: PyIObjectArray) -> None: ... + def RemoveObjectAt(self, Index) -> None: ... + def Clear(self) -> None: ... + +class PyIObjectWithPropertyKey: + def SetPropertyKey(self, key: PyPROPERTYKEY) -> None: ... + def GetPropertyKey(self) -> PyPROPERTYKEY: ... + +class PyIObjectWithSite: + def SetSite(self, pUnkSite) -> None: ... + def GetSite(self, riid: PyIID) -> None: ... + +class PyIOleClientSite: + def SaveObject(self) -> None: ... + def GetMoniker(self, dwAssign, dwWhichMoniker) -> None: ... + def GetContainer(self) -> None: ... + def ShowObject(self) -> None: ... + def OnShowWindow(self, fShow) -> None: ... + def RequestNewObjectLayout(self) -> None: ... + +class PyIOleCommandTarget: + def QueryStatus(self) -> None: ... + def Exec(self) -> None: ... + +class PyIOleControl: + def GetControlInfo(self) -> None: ... + def OnMnemonic(self, msg) -> None: ... + def OnAmbientPropertyChange(self, dispID) -> None: ... + def FreezeEvents(self, bFreeze) -> None: ... + +class PyIOleControlSite: + def OnControlInfoChanged(self) -> None: ... + def LockInPlaceActive(self, fLock) -> None: ... + def GetExtendedControl(self) -> None: ... + def TransformCoords( + self, PtlHimetric: tuple[Incomplete, Incomplete], pPtfContainer: tuple[float, float], dwFlags + ) -> None: ... + def TranslateAccelerator(self, pMsg: PyMSG, grfModifiers) -> None: ... + def OnFocus(self, fGotFocus) -> None: ... + def ShowPropertyFrame(self) -> None: ... + +class PyIOleInPlaceActiveObject: + def TranslateAccelerator(self, lpmsg: PyMSG) -> None: ... + def OnFrameWindowActivate(self, fActivate) -> None: ... + def OnDocWindowActivate(self, fActivate) -> None: ... + def ResizeBorder( + self, rcBorder: tuple[Incomplete, Incomplete, Incomplete, Incomplete], pUIWindow: PyIOleInPlaceUIWindow, fFrameWindow + ) -> None: ... + def EnableModeless(self, fEnable) -> None: ... + +class PyIOleInPlaceFrame: + def InsertMenus(self, hmenuShared, menuWidths: PyOLEMENUGROUPWIDTHS) -> None: ... + def SetMenu(self, hmenuShared, holemenu, hwndActiveObject) -> None: ... + def RemoveMenus(self, hmenuShared) -> None: ... + def SetStatusText(self, pszStatusText) -> None: ... + def EnableModeless(self, fEnable) -> None: ... + def TranslateAccelerator(self, lpmsg: PyMSG, wID) -> None: ... + +class PyIOleInPlaceObject: + def InPlaceDeactivate(self) -> None: ... + def UIDeactivate(self) -> None: ... + def SetObjectRects(self) -> None: ... + def ReactivateAndUndo(self) -> None: ... + +class PyIOleInPlaceSite: + def CanInPlaceActivate(self) -> None: ... + def OnInPlaceActivate(self) -> None: ... + def OnUIActivate(self) -> None: ... + def GetWindowContext(self) -> None: ... + def Scroll(self) -> None: ... + def OnUIDeactivate(self, fUndoable) -> None: ... + def OnInPlaceDeactivate(self) -> None: ... + def DiscardUndoState(self) -> None: ... + def DeactivateAndUndo(self) -> None: ... + def OnPosRectChange(self) -> None: ... + +class PyIOleInPlaceSiteEx: + def OnInPlaceActivateEx(self, dwFlags) -> None: ... + def OnInPlaceDeactivateEx(self, fNoRedraw) -> None: ... + def RequestUIActivate(self) -> None: ... + +class PyIOleInPlaceSiteWindowless: + def CanWindowlessActivate(self) -> None: ... + def GetCapture(self) -> None: ... + def SetCapture(self, fCapture) -> None: ... + def GetFocus(self) -> None: ... + def SetFocus(self, fFocus) -> None: ... + def GetDC(self, grfFlags, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete]) -> None: ... + def ReleaseDC(self, hDC) -> None: ... + def InvalidateRect(self, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], fErase) -> None: ... + def InvalidateRgn(self, hRgn, fErase) -> None: ... + def ScrollRect(self, dx, dy) -> None: ... + def AdjustRect(self) -> None: ... + def OnDefWindowMessage(self, msg, wParam, lParam) -> None: ... + +class PyIOleInPlaceUIWindow: + def GetBorder(self) -> None: ... + def RequestBorderSpace(self, borderwidths: tuple[Incomplete, Incomplete, Incomplete, Incomplete]) -> None: ... + def SetBorderSpace(self, borderwidths: tuple[Incomplete, Incomplete, Incomplete, Incomplete]) -> None: ... + def SetActiveObject(self, pActiveObject: PyIOleInPlaceActiveObject, pszObjName) -> None: ... + +class PyIOleObject: + def SetClientSite(self, pClientSite: PyIOleClientSite) -> None: ... + def GetClientSite(self) -> None: ... + def SetHostNames(self, szContainerApp, szContainerObj) -> None: ... + def Close(self, dwSaveOption) -> None: ... + def SetMoniker(self, dwWhichMoniker, pmk: PyIMoniker) -> None: ... + def GetMoniker(self, dwAssign, dwWhichMoniker) -> None: ... + def InitFromData(self, pDataObject: PyIDataObject, fCreation, dwReserved) -> None: ... + def GetClipboardData(self, dwReserved) -> None: ... + def DoVerb( + self, + iVerb, + msg: PyMSG, + pActiveSite: PyIOleClientSite, + lindex, + hwndParent, + rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], + ) -> None: ... + def EnumVerbs(self) -> None: ... + def Update(self) -> None: ... + def IsUpToDate(self) -> bool: ... + def GetUserClassID(self) -> None: ... + def GetUserType(self, dwFormOfType) -> None: ... + def SetExtent(self, dwDrawAspect, size: tuple[Incomplete, Incomplete]) -> None: ... + def GetExtent(self, dwDrawAspect, size: tuple[Incomplete, Incomplete]) -> None: ... + def Advise(self, pAdvSink) -> None: ... + def Unadvise(self, dwConnection) -> None: ... + def EnumAdvise(self) -> None: ... + def GetMiscStatus(self, dwAspect) -> None: ... + def SetColorScheme(self) -> None: ... + +class PyIOleWindow: + def GetWindow(self) -> None: ... + def ContextSensitiveHelp(self, fEnterMode) -> None: ... + +class PyIPersist: + def GetClassID(self) -> PyIID: ... + +class PyIPersistFile: + def IsDirty(self) -> bool: ... + def Load(self, FileName, Mode) -> None: ... + def Save(self, FileName, fRemember) -> None: ... + def SaveCompleted(self, FileName) -> None: ... + def GetCurFile(self): ... + +class PyIPersistFolder: + def Initialize(self, pidl: PyIDL) -> None: ... + +class PyIPersistFolder2: + def GetCurFolder(self) -> None: ... + +class PyIPersistPropertyBag: + def InitNew(self) -> None: ... + def Load(self, bag: PyIPropertyBag, log: PyIErrorLog | None = ...) -> None: ... + def Save(self, bag: PyIPropertyBag, clearDirty, saveProperties) -> None: ... + +class PyIPersistSerializedPropStorage: + def SetFlags(self, flags) -> None: ... + def SetPropertyStorage(self, ps) -> None: ... + def GetPropertyStorage(self): ... + +class PyIPersistStorage: + def IsDirty(self) -> bool: ... + def InitNew(self, PyIStorage: PyIStorage) -> None: ... + def Load(self, storage: PyIStorage) -> None: ... + def Save(self, PyIStorage: PyIStorage, _int) -> None: ... + def SaveCompleted(self, PyIStorage: PyIStorage) -> None: ... + def HandsOffStorage(self) -> None: ... + +class PyIPersistStream: + def IsDirty(self) -> bool: ... + def Load(self, stream: PyIStream) -> None: ... + def Save(self, stream: PyIStream, bClearDirty) -> None: ... + def GetSizeMax(self) -> ULARGE_INTEGER: ... + +class PyIPersistStreamInit: + def InitNew(self) -> None: ... + +class PyIProcessDebugManager: + def CreateApplication(self) -> None: ... + def GetDefaultApplication(self) -> None: ... + def AddApplication(self, pda: PyIDebugApplication) -> None: ... + def RemoveApplication(self, dwAppCookie) -> None: ... + def CreateDebugDocumentHelper(self, unkOuter) -> None: ... + +class PyIProfAdmin: + def GetLastError(self, hr, flags): ... + def CreateProfile(self, oldProfileName: str, Password: str, uiParam: int = ..., flags: int = ...) -> None: ... + def DeleteProfile(self, oldProfileName: str, flags: int = ...) -> None: ... + def CopyProfile(self, oldProfileName: str, Password: str, newProfileName: str, uiParam: int = ..., flags=...) -> None: ... + def RenameProfile(self, oldProfileName: str, Password: str, newProfileName: str, uiParam: int = ..., flags=...) -> None: ... + def SetDefaultProfile(self, profileName: str, flags: int = ...) -> None: ... + def AdminServices(self, profileName: str, Password: str | None = ..., uiParam: int = ..., flags=...) -> PyIProfAdmin: ... + +class PyIPropertyBag: + def Read(self, propName, propType, errorLog: PyIErrorLog | None = ...): ... + def Write(self, propName, value) -> None: ... + +class PyIPropertyChange: + def ApplyToPropVariant(self, OrigVal: PyPROPVARIANT) -> PyPROPVARIANT: ... + +class PyIPropertyChangeArray: + def GetCount(self): ... + def GetAt(self, Index, riid: PyIID) -> PyIPropertyChange: ... + def InsertAt(self, Index, PropChange: PyIPropertyChange) -> None: ... + def Append(self, PropChange: PyIPropertyChange) -> None: ... + def AppendOrReplace(self, PropChange: PyIPropertyChange) -> None: ... + def RemoveAt(self, Index) -> None: ... + def IsKeyInArray(self, key: PyPROPERTYKEY) -> bool: ... + +class PyIPropertyDescription: + def GetPropertyKey(self) -> PyPROPERTYKEY: ... + def GetCanonicalName(self): ... + def GetPropertyType(self): ... + def GetDisplayName(self): ... + def GetEditInvitation(self): ... + def GetTypeFlags(self, mask): ... + def GetViewFlags(self): ... + def GetDefaultColumnWidth(self): ... + def GetDisplayType(self): ... + def GetColumnState(self): ... + def GetGroupingRange(self): ... + def GetRelativeDescriptionType(self): ... + def GetRelativeDescription(self, var1: PyPROPVARIANT, var2: PyPROPVARIANT) -> tuple[Incomplete, Incomplete]: ... + def GetSortDescription(self): ... + def GetSortDescriptionLabel(self, Descending): ... + def GetAggregationType(self): ... + def GetConditionType(self) -> tuple[Incomplete, Incomplete]: ... + def GetEnumTypelist(self, riid: PyIID) -> PyIPropertyEnumTypelist: ... + def CoerceToCanonicalValue(self, Value: PyPROPVARIANT): ... + def FormatForDisplay(self, Value: PyPROPVARIANT, Flags): ... + def IsValueCanonical(self, Value) -> bool: ... + +class PyIPropertyDescriptionAliasInfo: + def GetSortByAlias(self, riid: PyIID) -> PyIPropertyDescription: ... + def GetAdditionalSortByAliases(self, riid: PyIID) -> PyIPropertyDescriptionlist: ... + +class PyIPropertyDescriptionlist: + def GetCount(self): ... + def GetAt(self, Elem, riid: PyIID) -> PyIPropertyDescription: ... + +class PyIPropertyDescriptionSearchInfo: + def GetSearchInfoFlags(self): ... + def GetColumnIndexType(self): ... + def GetProjectionString(self): ... + def GetMaxSize(self): ... + +class PyIPropertyEnumType: + def GetEnumType(self): ... + def GetValue(self) -> PyPROPVARIANT: ... + def GetRangeMinValue(self) -> PyPROPVARIANT: ... + def GetRangeSetValue(self) -> PyPROPVARIANT: ... + def GetDisplayText(self) -> None: ... + +class PyIPropertyEnumTypelist: + def GetCount(self): ... + def GetAt(self, itype, riid: PyIID) -> PyIPropertyEnumType: ... + def FindMatchingIndex(self, Cmp: PyPROPVARIANT): ... + +class PyIPropertySetStorage: + def Create(self, fmtid: PyIID, clsid: PyIID, Flags, Mode) -> PyIPropertyStorage: ... + def Open(self, fmtid: PyIID, Mode) -> PyIPropertyStorage: ... + def Delete(self, fmtid: PyIID) -> None: ... + def Enum(self) -> PyIEnumSTATPROPSETSTG: ... + +class PyIPropertyStorage: + def ReadMultiple(self, props: tuple[PROPSPEC, ...]) -> tuple[Incomplete, ...]: ... + def WriteMultiple(self, props: tuple[PROPSPEC, ...], values: tuple[Incomplete, ...], propidNameFirst: int = ...) -> None: ... + def DeleteMultiple(self, props: tuple[PROPSPEC, ...]) -> None: ... + def ReadPropertyNames(self, props: tuple[Incomplete, ...]) -> tuple[Incomplete, ...]: ... + def WritePropertyNames(self, props: tuple[Incomplete, ...], names: tuple[str, ...]) -> None: ... + def DeletePropertyNames(self, props: tuple[Incomplete, ...]) -> None: ... + def Commit(self, CommitFlags) -> None: ... + def Revert(self) -> None: ... + def Enum(self) -> PyIEnumSTATPROPSTG: ... + def SetTimes(self, ctime: PyTime, atime: PyTime, mtime: PyTime) -> None: ... + def SetClass(self, clsid: PyIID) -> None: ... + def Stat(self): ... + +class PyIPropertyStore: + def GetCount(self): ... + def GetAt(self, iProp) -> PyPROPERTYKEY: ... + def GetValue(self, Key: PyPROPERTYKEY) -> PyPROPVARIANT: ... + def SetValue(self, Key: PyPROPERTYKEY, Value: PyPROPVARIANT) -> None: ... + def Commit(self) -> None: ... + +class PyIPropertyStoreCache: + def GetState(self, key: PyPROPERTYKEY): ... + def GetValueAndState(self, key: PyPROPERTYKEY) -> tuple[PyPROPVARIANT, Incomplete]: ... + def SetState(self, key: PyPROPERTYKEY, state) -> None: ... + def SetValueAndState(self, key: PyPROPERTYKEY, value: PyPROPVARIANT, state) -> None: ... + +class PyIPropertyStoreCapabilities: + def IsPropertyWritable(self, key: PyPROPERTYKEY) -> bool: ... + +class PyIPropertySystem: + def GetPropertyDescription(self, Key: PyPROPERTYKEY, riid: PyIID) -> PyIPropertyDescription: ... + def GetPropertyDescriptionByName(self, CanonicalName, riid: PyIID) -> PyIPropertyDescription: ... + def GetPropertyDescriptionlistFromString(self, Proplist, riid: PyIID) -> PyIPropertyDescriptionlist: ... + def EnumeratePropertyDescriptions(self, Filter, riid: PyIID) -> PyIPropertyDescriptionlist: ... + def FormatForDisplay(self, Key: PyPROPERTYKEY, Value: PyPROPVARIANT, Flags): ... + def RegisterPropertySchema(self, Path) -> None: ... + def UnregisterPropertySchema(self, Path) -> None: ... + def RefreshPropertySchema(self) -> None: ... + +class PyIProvideClassInfo: + def GetClassInfo(self) -> PyITypeInfo: ... + +class PyIProvideClassInfo2: + def GetGUID(self, flags) -> PyIID: ... + +class PyIProvideExpressionContexts: + def EnumExpressionContexts(self) -> None: ... + +class PyIProvideTaskPage: + def GetPage(self, tpType, PersistChanges) -> None: ... + +class PyIQueryAssociations: + def Init(self, flags, assoc: str, hkeyProgId: PyHKEY | None = ..., hwnd: int | None = ...) -> None: ... + def GetKey(self, flags, assocKey, arg: str): ... + def GetString(self, flags, assocStr, arg: str): ... + +class PyIRelatedItem: + def GetItemIDlist(self) -> PyIDL: ... + def GetItem(self) -> PyIShellItem: ... + +class PyIRemoteDebugApplication: + def ResumeFromBreakPoint(self, prptFocus: PyIRemoteDebugApplicationThread, bra, era) -> None: ... + def CauseBreak(self) -> None: ... + def ConnectDebugger(self, pad: PyIApplicationDebugger) -> None: ... + def DisconnectDebugger(self) -> None: ... + def GetDebugger(self) -> PyIApplicationDebugger: ... + def CreateInstanceAtApplication(self, rclsid: PyIID, pUnkOuter: PyIUnknown, dwClsContext, riid: PyIID) -> PyIUnknown: ... + def QueryAlive(self) -> None: ... + def EnumThreads(self) -> PyIEnumRemoteDebugApplicationThreads: ... + def GetName(self) -> None: ... + def GetRootNode(self) -> PyIDebugApplicationNode: ... + def EnumGlobalExpressionContexts(self): ... + +class PyIRemoteDebugApplicationEvents: + def OnConnectDebugger(self, pad: PyIApplicationDebugger) -> None: ... + def OnDisconnectDebugger(self) -> None: ... + def OnSetName(self, pstrName) -> None: ... + def OnDebugOutput(self, pstr) -> None: ... + def OnClose(self) -> None: ... + def OnEnterBreakPoint(self, prdat: PyIRemoteDebugApplicationThread) -> None: ... + def OnLeaveBreakPoint(self, prdat: PyIRemoteDebugApplicationThread) -> None: ... + def OnCreateThread(self, prdat: PyIRemoteDebugApplicationThread) -> None: ... + def OnDestroyThread(self, prdat: PyIRemoteDebugApplicationThread) -> None: ... + def OnBreakFlagChange(self, abf, prdatSteppingThread: PyIRemoteDebugApplicationThread) -> None: ... + +class PyIRemoteDebugApplicationThread: + def GetSystemThreadId(self) -> None: ... + def GetApplication(self) -> None: ... + def EnumStackFrames(self) -> None: ... + def GetDescription(self) -> None: ... + def SetNextStatement(self, pStackFrame: PyIDebugStackFrame, pCodeContext: PyIDebugCodeContext) -> None: ... + def GetState(self) -> None: ... + def Suspend(self) -> None: ... + def Resume(self) -> None: ... + def GetSuspendCount(self) -> None: ... + +class PyIRunningObjectTable: + def Register(self): ... + def Revoke(self): ... + def IsRunning(self, objectName: PyIMoniker) -> bool: ... + def GetObject(self, objectName: PyIMoniker) -> PyIUnknown: ... + def EnumRunning(self) -> PyIEnumMoniker: ... + +class PyIScheduledWorkItem: + def CreateTrigger(self) -> tuple[Incomplete, PyITaskTrigger]: ... + def DeleteTrigger(self, Trigger) -> None: ... + def GetTriggerCount(self): ... + def GetTrigger(self, iTrigger) -> PyITaskTrigger: ... + def GetTriggerString(self): ... + def GetRunTimes(self, Count, Begin: PyTime, End: PyTime) -> tuple[PyTime, Incomplete, Incomplete, Incomplete]: ... + def GetNextRunTime(self) -> PyTime: ... + def SetIdleWait(self, wIdleMinutes, wDeadlineMinutes) -> None: ... + def GetIdleWait(self) -> tuple[Incomplete, Incomplete]: ... + def Run(self) -> None: ... + def Terminate(self) -> None: ... + def EditWorkItem(self, hParent: int, dwReserved) -> None: ... + def GetMostRecentRunTime(self) -> PyTime: ... + def GetStatus(self): ... + def GetExitCode(self) -> tuple[Incomplete, Incomplete]: ... + def SetComment(self, Comment) -> None: ... + def GetComment(self) -> str: ... + def SetCreator(self, Creator) -> None: ... + def GetCreator(self) -> None: ... + def SetWorkItemData(self, Data: str) -> None: ... + def GetWorkItemData(self) -> str: ... + def SetErrorRetryCount(self, wRetryCount) -> None: ... + def GetErrorRetryCount(self) -> None: ... + def SetErrorRetryInterval(self, RetryInterval) -> None: ... + def GetErrorRetryInterval(self) -> None: ... + def SetFlags(self, dwFlags) -> None: ... + def GetFlags(self): ... + def SetAccountInformation(self, AccountName, Password) -> None: ... + def GetAccountInformation(self): ... + +class PyIServerSecurity: + def QueryBlanket(self, Capabilities: int = ...): ... + def ImpersonateClient(self) -> None: ... + def RevertToSelf(self) -> None: ... + def IsImpersonating(self) -> bool: ... + +class PyIServiceProvider: + def QueryService(self, clsid: PyIID, iid: PyIID) -> PyIUnknown: ... + +class PyIShellBrowser: + def InsertMenusSB(self, hmenuShared: int, lpMenuWidths: PyOLEMENUGROUPWIDTHS) -> PyOLEMENUGROUPWIDTHS: ... + def SetMenuSB(self, hmenuShared: int, holemenuRes: int, hwndActiveObject: int) -> None: ... + def RemoveMenusSB(self, hmenuShared: int) -> None: ... + def SetStatusTextSB(self, pszStatusText) -> None: ... + def EnableModelessSB(self, fEnable) -> None: ... + def TranslateAcceleratorSB(self, pmsg: PyMSG, wID) -> None: ... + def BrowseObject(self, pidl: PyIDL, wFlags) -> None: ... + def GetViewStateStream(self, grfMode) -> PyIStream: ... + def GetControlWindow(self, _id) -> None: ... + def SendControlMsg(self, _id, uMsg, wParam, lParam): ... + def QueryActiveShellView(self) -> PyIShellView: ... + def OnViewWindowActive(self, pshv: PyIShellView) -> None: ... + def SetToolbarItems(self, lpButtons, uFlags) -> None: ... + +class PyIShellExtInit: + def Initialize(self, pFolder: PyIDL, pDataObject: PyIDataObject, hkey: int) -> None: ... + +class PyIShellFolder: + def ParseDisplayName(self, hwndOwner: int, pbc: PyIBindCtx, DisplayName, Attributes: int = ...): ... + def EnumObjects(self, grfFlags, hwndOwner: int | None = ...) -> PyIEnumIDlist: ... + def BindToObject(self, pidl: PyIDL, pbc: PyIBindCtx, riid: PyIID) -> PyIShellFolder: ... + def BindToStorage(self, pidl: PyIDL, pbc: PyIBindCtx, riid: PyIID): ... + def CompareIDs(self, lparam, pidl1: PyIDL, pidl2: PyIDL): ... + def CreateViewObject(self, hwndOwner, riid: PyIID) -> PyIShellView: ... + def GetAttributesOf(self, pidl: tuple[PyIDL, ...], rgfInOut): ... + def GetUIObjectOf( + self, hwndOwner: int, pidl: tuple[PyIDL, ...], riid: PyIID, iidout: PyIID, Reserved=... + ) -> tuple[Incomplete, PyIUnknown]: ... + def GetDisplayNameOf(self, pidl: PyIDL, uFlags): ... + def SetNameOf(self, hwndOwner, pidl: PyIDL, Name, Flags) -> PyIDL: ... + +class PyIShellFolder2: + def GetDefaultSearchGUID(self, pguid: PyIID) -> PyIID: ... + def EnumSearches(self): ... + def GetDefaultColumn(self) -> tuple[Incomplete, Incomplete]: ... + def GetDefaultColumnState(self, iColumn): ... + def GetDetailsEx(self, pidl: PyIDL, pscid): ... + def GetDetailsOf(self, pidl: PyIDL, iColumn) -> tuple[Incomplete, Incomplete, Incomplete]: ... + def MapColumnToSCID(self, Column): ... + +class PyIShellIcon: + def GetIconOf(self, pidl: PyIDL) -> None: ... + +class PyIShellIconOverlay: + def GetOverlayIndex(self, pidl: PyIDL) -> None: ... + def GetOverlayIconIndex(self, pidl: PyIDL) -> None: ... + +class PyIShellIconOverlayIdentifier: + def IsMemberOf(self, path: str, attrib) -> bool: ... + def GetOverlayInfo(self) -> tuple[str, Incomplete, Incomplete]: ... + def GetPriority(self): ... + +class PyIShellIconOverlayManager: + def GetFileOverlayInfo(self, path, attrib, flags): ... + def GetReservedOverlayInfo(self, path, attrib, flags, ireservedID) -> None: ... + def RefreshOverlayImages(self, flags) -> None: ... + def LoadNonloadedOverlayIdentifiers(self) -> None: ... + def OverlayIndexFromImageIndex(self, iImage, fAdd) -> None: ... + +class PyIShellItem: + def BindToHandler(self, pbc: PyIBindCtx, bhid: PyIID, riid: PyIID): ... + def GetParent(self) -> PyIShellItem: ... + def GetDisplayName(self, sigdnName): ... + def GetAttributes(self, Mask): ... + def Compare(self, psi: PyIShellItem, hint): ... + +class PyIShellItem2: + def GetPropertyStore(self, Flags, riid: PyIID) -> PyIPropertyStore: ... + def GetPropertyStoreForKeys(self, Keys: tuple[Incomplete, ...], Flags, riid: PyIID) -> PyIPropertyStore: ... + def GetPropertyStoreWithCreateObject(self, Flags, CreateObject: PyIUnknown, riid: PyIID) -> PyIPropertyStore: ... + def GetPropertyDescriptionlist(self, Type: PyPROPERTYKEY, riid: PyIID) -> PyIPropertyDescriptionlist: ... + def Update(self, BindCtx: Incomplete | None = ...) -> None: ... + def GetProperty(self, key: PyPROPERTYKEY): ... + def GetCLSID(self, key: PyPROPERTYKEY) -> PyIID: ... + def GetFileTime(self, key: PyPROPERTYKEY) -> PyTime: ... + def GetInt32(self, key: PyPROPERTYKEY): ... + def GetString(self, key: PyPROPERTYKEY): ... + def GetUInt32(self, key: PyPROPERTYKEY): ... + def GetUInt64(self, key: PyPROPERTYKEY): ... + def GetBool(self, key: PyPROPERTYKEY): ... + +class PyIShellItemArray: + def BindToHandler(self, pbc: PyIBindCtx, rbhid: PyIID, riid: PyIID): ... + def GetPropertyStore(self, flags, riid: PyIID) -> PyIPropertyStore: ... + def GetPropertyDescriptionlist(self, Type: PyPROPERTYKEY, riid: PyIID) -> PyIPropertyDescriptionlist: ... + def GetAttributes(self, AttribFlags, Mask): ... + def GetCount(self): ... + def GetItemAt(self, dwIndex) -> PyIShellItem: ... + def EnumItems(self) -> PyIEnumShellItems: ... + +class PyIShellItemResources: + def GetAttributes(self) -> None: ... + def GetSize(self): ... + def GetTimes(self) -> None: ... + def SetTimes(self, pftCreation: PyTime, pftWrite: PyTime, pftAccess: PyTime) -> None: ... + def GetResourceDescription(self, pcsir: PySHELL_ITEM_RESOURCE) -> None: ... + def EnumResources(self) -> PyIEnumResources: ... + def SupportsResource(self, pcsir: PySHELL_ITEM_RESOURCE): ... + def OpenResource(self, pcsir: PySHELL_ITEM_RESOURCE, riid: PyIID) -> PyIUnknown: ... + def CreateResource(self, sir: PySHELL_ITEM_RESOURCE, riid: PyIID): ... + def MarkForDelete(self) -> None: ... + +class PyIShellLibrary: + def LoadLibraryFromItem(self, Library: PyIShellItem, Mode) -> None: ... + def LoadLibraryFromKnownFolder(self, Library: PyIID, Mode) -> None: ... + def AddFolder(self, Location: PyIShellItem) -> None: ... + def RemoveFolder(self, Location: PyIShellItem) -> None: ... + def GetFolders(self, Filter, riid: PyIID) -> PyIShellItemArray: ... + def ResolveFolder(self, FolderToResolve: PyIShellItem, Timeout, riid: PyIID) -> PyIShellItem: ... + def GetDefaultSaveFolder(self, Type, riid: PyIID) -> PyIShellItem: ... + def SetDefaultSaveFolder(self, Type, SaveFolder: PyIShellItem) -> None: ... + def GetOptions(self): ... + def SetOptions(self, Mask, Options) -> None: ... + def GetFolderType(self) -> PyIID: ... + def SetFolderType(self, Type: PyIID) -> None: ... + def GetIcon(self): ... + def SetIcon(self, Icon) -> None: ... + def Commit(self) -> None: ... + def Save(self, FolderToSaveIn: PyIShellItem, LibraryName, Flags) -> PyIShellItem: ... + def SaveInKnownFolder(self, FolderToSaveIn: PyIID, LibraryName, Flags) -> PyIShellItem: ... + +class PyIShellLink: + def GetPath(self, fFlags, cchMaxPath) -> tuple[Incomplete, WIN32_FIND_DATA]: ... + def GetIDlist(self) -> PyIDL: ... + def SetIDlist(self, pidl: PyIDL) -> None: ... + def GetDescription(self, cchMaxName: int = ...): ... + def SetDescription(self, Name) -> None: ... + def GetWorkingDirectory(self, cchMaxName: int = ...): ... + def SetWorkingDirectory(self, Dir) -> None: ... + def GetArguments(self, cchMaxName: int = ...): ... + def SetArguments(self, args) -> None: ... + def GetHotkey(self): ... + def SetHotkey(self, wHotkey) -> None: ... + def GetShowCmd(self): ... + def SetShowCmd(self, iShowCmd) -> None: ... + def GetIconLocation(self, cchMaxPath): ... + def SetIconLocation(self, iconPath: str, iIcon) -> None: ... + def SetRelativePath(self, relPath: str, reserved: int = ...) -> None: ... + def Resolve(self, hwnd: int, fFlags) -> None: ... + def SetPath(self, path: str) -> None: ... + +class PyIShellLinkDatalist: + def AddDataBlock(self, DataBlock) -> None: ... + def CopyDataBlock(self, Sig): ... + def GetFlags(self): ... + def RemoveDataBlock(self, Sig) -> None: ... + def SetFlags(self, Flags) -> None: ... + +class PyIShellView: + def TranslateAccelerator(self, pmsg): ... + def EnableModeless(self, fEnable) -> None: ... + def UIActivate(self, uState) -> None: ... + def Refresh(self) -> None: ... + def CreateViewWindow( + self, + psvPrevious: PyIShellView, + pfs: tuple[Incomplete, Incomplete], + psb: PyIShellBrowser, + prcView: tuple[Incomplete, Incomplete, Incomplete, Incomplete], + ): ... + def DestroyViewWindow(self) -> None: ... + def GetCurrentInfo(self): ... + def SaveViewState(self) -> None: ... + def SelectItem(self, pidlItem: PyIDL, uFlags) -> None: ... + def GetItemObject(self, uItem, riid: PyIID) -> PyIUnknown: ... + +class PyISpecifyPropertyPages: + def GetPages(self) -> None: ... + +class PyIStorage: + def CreateStream(self, Name, Mode, reserved1: int = ..., reserved2: int = ...) -> PyIStream: ... + def OpenStream(self, Name, reserved1, Mode, reserved2: int = ...) -> PyIStream: ... + def CreateStorage(self, Name, Mode, StgFmt, reserved2: int = ...) -> PyIStorage: ... + def OpenStorage(self, Name, Priority: PyIStorage, Mode, snbExclude, reserved=...) -> PyIStorage: ... + def CopyTo(self, rgiidExclude: tuple[Incomplete, Incomplete], snbExclude, stgDest: PyIStorage) -> None: ... + def MoveElementTo(self, Name, stgDest: PyIStorage, NewName, Flags) -> None: ... + def Commit(self, grfCommitFlags) -> None: ... + def Revert(self) -> None: ... + def EnumElements(self, reserved1: int = ..., reserved2: Incomplete | None = ..., reserved3: int = ...) -> PyIEnumSTATSTG: ... + def DestroyElement(self, name: str) -> None: ... + def RenameElement(self, OldName, NewName) -> None: ... + def SetElementTimes(self, name, ctime: PyTime, atime: PyTime, mtime: PyTime) -> None: ... + def SetClass(self, clsid: PyIID) -> None: ... + def SetStateBits(self, grfStateBits, grfMask) -> None: ... + def Stat(self, grfStatFlag) -> STATSTG: ... + +class PyIStream: + def Read(self, numBytes) -> str: ... + def read(self, numBytes) -> str: ... + def Write(self, data: str) -> None: ... + def write(self, data: str) -> None: ... + def Seek(self, offset, origin) -> ULARGE_INTEGER: ... + def SetSize(self, newSize: ULARGE_INTEGER) -> None: ... + def CopyTo(self, stream: PyIStream, cb: ULARGE_INTEGER) -> ULARGE_INTEGER: ... + def Commit(self, flags) -> None: ... + def Revert(self) -> None: ... + def LockRegion(self, offset: ULARGE_INTEGER, cb: ULARGE_INTEGER, lockType) -> None: ... + def UnLockRegion(self, offset: ULARGE_INTEGER, cb: ULARGE_INTEGER, lockType) -> None: ... + def Clone(self) -> PyIStream: ... + def Stat(self, grfStatFlag: int = ...) -> STATSTG: ... + +class PyITask: + def SetApplicationName(self, ApplicationName) -> None: ... + def GetApplicationName(self): ... + def SetParameters(self, Parameters) -> None: ... + def GetParameters(self): ... + def SetWorkingDirectory(self, WorkingDirectory) -> None: ... + def GetWorkingDirectory(self): ... + def SetPriority(self, Priority) -> None: ... + def GetPriority(self): ... + def SetTaskFlags(self, dwFlags) -> None: ... + def GetTaskFlags(self): ... + def SetMaxRunTime(self, MaxRunTimeMS) -> None: ... + def GetMaxRunTime(self): ... + +class PyITaskScheduler: + def SetTargetComputer(self, Computer) -> None: ... + def GetTargetComputer(self): ... + def Enum(self) -> tuple[str, ...]: ... + def Activate(self, Name, riid: PyIID) -> PyITask: ... + def Delete(self, TaskName) -> None: ... + def NewWorkItem(self, TaskName, rclsid: PyIID, riid: PyIID) -> PyITask: ... + def AddWorkItem(self, TaskName, WorkItem: PyITask) -> None: ... + def IsOfType(self, Name, riid: PyIID) -> bool: ... + +class PyITaskTrigger: + def SetTrigger(self, Trigger: PyTASK_TRIGGER) -> None: ... + def GetTrigger(self) -> PyTASK_TRIGGER: ... + def GetTriggerString(self) -> str: ... + +class PyITaskbarlist: + def HrInit(self) -> None: ... + def AddTab(self, hwnd: int) -> None: ... + def DeleteTab(self, hwnd: int) -> None: ... + def ActivateTab(self, hwnd: int) -> None: ... + def SetActiveAlt(self, hwnd: int) -> None: ... + +class PyITransferAdviseSink: + def UpdateProgress(self, SizeCurrent, SizeTotal, FilesCurrent, FilesTotal, FoldersCurrent, FoldersTotal) -> None: ... + def UpdateTransferState(self, State) -> None: ... + def ConfirmOverwrite(self, Source: PyIShellItem, DestParent: PyIShellItem, Name): ... + def ConfirmEncryptionLoss(self, Source: PyIShellItem): ... + def FileFailure(self, Item: PyIShellItem, ItemName, Error) -> tuple[Incomplete, Incomplete]: ... + def SubStreamFailure(self, Item: PyIShellItem, StreamName, Error): ... + def PropertyFailure(self, Item: PyIShellItem, key: PyPROPERTYKEY, Error): ... + +class PyITransferDestination: + def Advise(self, Sink: PyITransferAdviseSink): ... + def Unadvise(self, Cookie) -> None: ... + def CreateItem( + self, Name, Attributes, Size, Flags, riidItem: PyIID, riidResources: PyIID + ) -> tuple[Incomplete, Incomplete, Incomplete]: ... + +class PyITransferMediumItem: ... + +class PyITransferSource: + def Advise(self, Sink: PyITransferAdviseSink): ... + def Unadvise(self, Cookie) -> None: ... + def SetProperties(self, proparray: PyIPropertyChangeArray) -> None: ... + def OpenItem(self, Item: PyIShellItem, flags, riid: PyIID) -> tuple[Incomplete, PyIShellItemResources]: ... + def MoveItem(self, Item: PyIShellItem, ParentDst: PyIShellItem, NameDst, flags) -> tuple[Incomplete, PyIShellItem]: ... + def RecycleItem(self, Source: PyIShellItem, ParentDest: PyIShellItem, flags) -> tuple[Incomplete, PyIShellItem]: ... + def RemoveItem(self, Source: PyIShellItem, flags): ... + def RenameItem(self, Source: PyIShellItem, NewName, flags) -> tuple[Incomplete, PyIShellItem]: ... + def LinkItem(self, Source: PyIShellItem, ParentDest: PyIShellItem, NewName, flags) -> tuple[Incomplete, PyIShellItem]: ... + def ApplyPropertiesToItem(self, Source: PyIShellItem) -> PyIShellItem: ... + def GetDefaultDestinationName(self, Source: PyIShellItem, ParentDest: PyIShellItem): ... + def EnterFolder(self, ChildFolderDest: PyIShellItem): ... + def LeaveFolder(self, ChildFolderDest: PyIShellItem): ... + +class PyITypeComp: + def Bind(self, szName: str, wflags: int = ...): ... + def BindType(self, szName: str): ... + +class PyITypeInfo: + def GetContainingTypeLib(self) -> tuple[PyITypeLib, Incomplete]: ... + def GetDocumentation(self, memberId) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... + def GetFuncDesc(self, memberId) -> FUNCDESC: ... + def GetImplTypeFlags(self, index): ... + def GetIDsOfNames(self): ... + def GetNames(self, memberId): ... + def GetTypeAttr(self) -> TYPEATTR: ... + def GetRefTypeInfo(self, hRefType) -> PyITypeInfo: ... + def GetRefTypeOfImplType(self, hRefType): ... + def GetVarDesc(self, memberId) -> VARDESC: ... + def GetTypeComp(self) -> PyITypeComp: ... + +class PyITypeLib: + def GetDocumentation(self, index): ... + def GetLibAttr(self) -> TLIBATTR: ... + def GetTypeComp(self) -> PyITypeComp: ... + def GetTypeInfo(self, index) -> PyITypeInfo: ... + def GetTypeInfoCount(self): ... + def GetTypeInfoOfGuid(self, iid: PyIID) -> PyITypeInfo: ... + def GetTypeInfoType(self, index): ... + +class PyIUniformResourceLocator: + def GetURL(self): ... + def SetURL(self, URL, InFlags: int = ...) -> None: ... + def InvokeCommand(self, Verb, Flags: int = ..., hwndParent: int = ...): ... + +@final +class PyIUnknown: + def QueryInterface(self, iid, useIID: Incomplete | None = ...) -> PyIUnknown: ... + +class PyIViewObject: + def Draw( + self, + dwDrawAspect, + lindex, + aspectFlags, + hdcTargetDev, + hdcDraw, + arg: tuple[Incomplete, Incomplete, Incomplete, Incomplete], + arg1: tuple[Incomplete, Incomplete, Incomplete, Incomplete], + funcContinue, + obContinue, + ) -> None: ... + def GetColorSet(self, dwDrawAspect, lindex, aspectFlags, hicTargetDev) -> None: ... + def Freeze(self, dwDrawAspect, lindex, aspectFlags) -> None: ... + def Unfreeze(self, dwFreeze) -> None: ... + def SetAdvise(self, aspects, advf, pAdvSink) -> None: ... + def GetAdvise(self) -> None: ... + +class PyIViewObject2: + def GetExtent(self, dwDrawAspect, lindex, targetDevice) -> None: ... + +class PyMAPINAMEIDArray: ... +class PyOLEMENUGROUPWIDTHS: ... +class PyPROPERTYKEY: ... + +@final +class PyPROPVARIANT: + @property + def vt(self): ... + def GetValue(self): ... + def ToString(self): ... + def ChangeType(self, Type, Flags: int = ...) -> PyPROPVARIANT: ... + +class PySAndRestriction: ... +class PySBinaryArray: ... +class PySBitMaskRestriction: ... +class PySContentRestriction: ... +class PySExistRestriction: ... +class PySHELL_ITEM_RESOURCE: ... +class PySNotRestriction: ... +class PySOrRestriction: ... +class PySPropTagArray: ... +class PySPropValue: ... +class PySPropValueArray: ... +class PySPropertyRestriction: ... +class PySRestriction: ... +class PySRow: ... +class PySRowSet: ... +class PySSortOrderItem: ... +class PySSortOrderSet: ... + +class PySTGMEDIUM: + @property + def tymed(self): ... + @property + def data(self): ... + @property + def data_handle(self): ... + def set(self, tymed, data) -> None: ... + +class PyTASK_TRIGGER: ... +class RTF_WCSINFO: ... +class SHFILEINFO: ... +class SHFILEOPSTRUCT: ... +class SI_ACCESS: ... +class SI_INHERIT_TYPE: ... +class SI_OBJECT_INFO: ... +class STATSTG: ... +class TLIBATTR: ... + +class TYPEATTR: + @property + def iid(self) -> PyIID: ... + @property + def lcid(self): ... + @property + def memidConstructor(self): ... + @property + def memidDestructor(self): ... + @property + def cbSizeInstance(self): ... + @property + def typekind(self): ... + @property + def cFuncs(self): ... + @property + def cVars(self): ... + @property + def cImplTypes(self): ... + @property + def cbSizeVft(self): ... + @property + def cbAlignment(self): ... + @property + def wTypeFlags(self): ... + @property + def wMajorVerNum(self): ... + @property + def wMinorVerNum(self): ... + @property + def tdescAlias(self) -> TYPEDESC: ... + @property + def idldeskType(self) -> IDLDESC: ... + +class TYPEDESC: ... + +class VARDESC: + @property + def memid(self): ... + @property + def value(self): ... + @property + def elemdescVar(self) -> ELEMDESC: ... + @property + def varFlags(self): ... + @property + def varkind(self): ... + +class CHARFORMAT: ... +class CREATESTRUCT: ... +class LV_COLUMN: ... +class LV_ITEM: ... +class PARAFORMAT: ... +class PyAssocCObject: ... + +class PyAssocObject: + def AttachObject(self) -> None: ... + def GetAttachedObject(self): ... + +class PyCBitmap: + def CreateCompatibleBitmap(self, dc: PyCDC, width, height) -> None: ... + def GetSize(self) -> tuple[Incomplete, Incomplete]: ... + def GetHandle(self) -> PyGdiHANDLE: ... + def LoadBitmap(self, idRes, obDLL: PyDLL | None = ...) -> None: ... + def LoadBitmapFile(self, fileObject) -> None: ... + def LoadPPMFile(self, fileObject, cols, rows) -> None: ... + def Paint( + self, + dcObject: PyCDC, + arg: tuple[Incomplete, Incomplete, Incomplete, Incomplete], + arg1: tuple[Incomplete, Incomplete, Incomplete, Incomplete], + ) -> None: ... + def GetInfo(self): ... + def GetBitmapBits(self, asString: int = ...) -> str: ... + def SaveBitmapFile(self, dcObject: PyCDC, Filename: str): ... + +class PyCBrush: + def CreateSolidBrush(self) -> None: ... + def GetSafeHandle(self): ... + +class PyCButton: + def CreateWindow( + self, caption: str, style, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], parent: PyCWnd, _id + ) -> None: ... + def GetBitmap(self): ... + def SetBitmap(self, hBitmap: int = ...): ... + def GetCheck(self): ... + def SetCheck(self, idCheck) -> None: ... + def GetState(self): ... + def SetState(self, bHighlight): ... + def GetButtonStyle(self): ... + def SetButtonStyle(self, style, bRedraw: int = ...): ... + +class PyCCmdTarget: + def BeginWaitCursor(self) -> None: ... + def EndWaitCursor(self) -> None: ... + def HookCommand(self, obHandler, _id): ... + def HookCommandUpdate(self, obHandler, _id): ... + def HookOleEvent(self): ... + def HookNotify(self, obHandler, _id): ... + def RestoreWaitCursor(self) -> None: ... + +class PyCCmdUI: + @property + def m_nIndex(self): ... + @property + def m_nID(self): ... + @property + def m_pMenu(self) -> PyCMenu: ... + @property + def m_pSubMenu(self) -> PyCMenu: ... + def Enable(self, bEnable: int = ...) -> None: ... + def SetCheck(self, state: int = ...) -> None: ... + def SetRadio(self, bOn: int = ...) -> None: ... + def SetText(self, text: str) -> None: ... + def ContinueRouting(self) -> None: ... + +class PyCColorDialog: + def GetColor(self): ... + def DoModal(self): ... + def GetSavedCustomColors(self): ... + def SetCurrentColor(self, color) -> None: ... + def SetCustomColors(self) -> None: ... + def GetCustomColors(self) -> tuple[Incomplete, ...]: ... + +class PyCComboBox: + def AddString(self, _object): ... + def DeleteString(self, pos): ... + def Dir(self, attr, wild: str): ... + def GetCount(self): ... + def GetCurSel(self): ... + def GetEditSel(self): ... + def GetExtendedUI(self): ... + def GetItemData(self, item): ... + def GetItemValue(self, item): ... + def GetLBText(self, index) -> str: ... + def GetLBTextLen(self, index): ... + def InsertString(self, pos, _object): ... + def LimitText(self, _max): ... + def ResetContent(self) -> None: ... + def SelectString(self, after, string: str) -> None: ... + def SetCurSel(self, index) -> None: ... + def SetEditSel(self, start, end) -> None: ... + def SetExtendedUI(self, bExtended: int = ...) -> None: ... + def SetItemData(self, item, Data): ... + def SetItemValue(self, item, data): ... + def ShowDropDown(self, bShowIt: int = ...) -> None: ... + +class PyCCommonDialog: ... +class PyCControl: ... + +class PyCControlBar: + @property + def dockSite(self) -> PyCFrameWnd: ... + @property + def dockBar(self) -> PyCWnd: ... + @property + def dockContext(self) -> PyCDockContext: ... + @property + def dwStyle(self): ... + @property + def dwDockStyle(self): ... + def CalcDynamicLayout(self, length, dwMode): ... + def CalcFixedLayout(self, bStretch, bHorz): ... + def EnableDocking(self, style) -> None: ... + def EraseNonClient(self) -> None: ... + def GetBarStyle(self): ... + def GetCount(self): ... + def GetDockingFrame(self) -> PyCFrameWnd: ... + def IsFloating(self) -> bool: ... + def SetBarStyle(self, style) -> None: ... + def ShowWindow(self): ... + +class PyCCtrlView: + def OnCommand(self, wparam, lparam) -> None: ... + +class PyCDC: + def AbortDoc(self) -> None: ... + def Arc( + self, + rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], + pointStart: tuple[Incomplete, Incomplete], + pointEnd: tuple[Incomplete, Incomplete], + ) -> None: ... + def BeginPath(self) -> None: ... + def BitBlt( + self, + destPos: tuple[Incomplete, Incomplete], + size: tuple[Incomplete, Incomplete], + dc: PyCDC, + srcPos: tuple[Incomplete, Incomplete], + rop, + ) -> None: ... + def Chord( + self, + rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], + pointStart: tuple[Incomplete, Incomplete], + pointEnd: tuple[Incomplete, Incomplete], + ) -> None: ... + def CreateCompatibleDC(self, dcFrom: PyCDC | None = ...) -> PyCDC: ... + def CreatePrinterDC(self, printerName: str | None = ...) -> None: ... + def DeleteDC(self) -> None: ... + def DPtoLP(self, point: tuple[Incomplete, Incomplete], x, y) -> tuple[Incomplete, Incomplete]: ... + def Draw3dRect(self, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], colorTopLeft, colorBotRight) -> None: ... + def DrawFocusRect(self, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete]) -> None: ... + def DrawFrameControl(self, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], typ, state) -> None: ... + def DrawIcon(self, point: tuple[Incomplete, Incomplete], hIcon: int) -> None: ... + def DrawText( + self, s: str, _tuple: tuple[Incomplete, Incomplete, Incomplete, Incomplete], _format + ) -> tuple[Incomplete, Incomplete, Incomplete]: ... + def Ellipse(self, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete]) -> None: ... + def EndDoc(self) -> None: ... + def EndPage(self) -> None: ... + def EndPath(self) -> None: ... + def ExtTextOut( + self, + _int, + _int1, + _int2, + rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], + string, + _tuple: tuple[tuple[Incomplete, Incomplete], ...], + ) -> None: ... + def FillPath(self) -> None: ... + def FillRect(self, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], brush: PyCBrush) -> None: ... + def FillSolidRect(self, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], color) -> None: ... + def FrameRect(self, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], brush: PyCBrush) -> None: ... + def GetBrushOrg(self) -> tuple[Incomplete, Incomplete]: ... + def GetClipBox(self) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... + def GetCurrentPosition(self) -> tuple[Incomplete, Incomplete]: ... + def GetDeviceCaps(self, index): ... + def GetHandleAttrib(self): ... + def GetHandleOutput(self): ... + def GetMapMode(self): ... + def GetNearestColor(self, color): ... + def GetPixel(self, x, y) -> None: ... + def GetSafeHdc(self): ... + def GetTextExtent(self, text: str) -> tuple[Incomplete, Incomplete]: ... + def GetTextExtentPoint(self, text: str) -> tuple[Incomplete, Incomplete]: ... + def GetTextFace(self) -> str: ... + def GetTextMetrics(self): ... + def GetViewportExt(self) -> tuple[Incomplete, Incomplete]: ... + def GetViewportOrg(self) -> tuple[Incomplete, Incomplete]: ... + def GetWindowExt(self) -> tuple[Incomplete, Incomplete]: ... + def GetWindowOrg(self) -> tuple[Incomplete, Incomplete]: ... + def IntersectClipRect(self, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete]) -> None: ... + def IsPrinting(self) -> bool: ... + def LineTo(self, point: tuple[Incomplete, Incomplete], x, y) -> None: ... + def LPtoDP(self, point: tuple[Incomplete, Incomplete], x, y) -> tuple[Incomplete, Incomplete]: ... + def MoveTo(self, point: tuple[Incomplete, Incomplete], x, y) -> tuple[Incomplete, Incomplete]: ... + def OffsetWindowOrg(self, arg: tuple[Incomplete, Incomplete]) -> tuple[Incomplete, Incomplete]: ... + def OffsetViewportOrg(self, arg: tuple[Incomplete, Incomplete]) -> tuple[Incomplete, Incomplete]: ... + def PatBlt(self, destPos: tuple[Incomplete, Incomplete], size: tuple[Incomplete, Incomplete], rop) -> None: ... + def Pie(self, x1, y1, x2, y2, x3, y3, x4, y4) -> None: ... + def PolyBezier(self) -> None: ... + def Polygon(self) -> None: ... + def Polyline(self, points: list[tuple[Incomplete, Incomplete]]) -> None: ... + def RealizePalette(self): ... + def Rectangle(self): ... + def RectVisible(self, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete]): ... + def RestoreDC(self, saved) -> None: ... + def SaveDC(self): ... + def ScaleWindowExt(self) -> tuple[Incomplete, Incomplete]: ... + def ScaleViewportExt(self) -> tuple[Incomplete, Incomplete]: ... + def SelectClipRgn(self): ... + def SelectObject(self, ob): ... + def SetBkColor(self, color): ... + def SetBkMode(self, mode): ... + def SetBrushOrg(self, point: tuple[Incomplete, Incomplete]) -> tuple[Incomplete, Incomplete]: ... + def SetGraphicsMode(self, mode): ... + def SetMapMode(self, newMode): ... + def SetPixel(self, x, y, color) -> None: ... + def SetPolyFillMode(self, point: tuple[Incomplete, Incomplete]): ... + def SetROP2(self, mode): ... + def SetTextAlign(self, newFlags): ... + def SetTextColor(self, color): ... + def SetWindowExt(self, size: tuple[Incomplete, Incomplete]) -> tuple[Incomplete, Incomplete]: ... + def SetWindowOrg(self, arg: tuple[Incomplete, Incomplete]) -> tuple[Incomplete, Incomplete]: ... + def SetViewportExt(self, size: tuple[Incomplete, Incomplete]) -> tuple[Incomplete, Incomplete]: ... + def SetViewportOrg(self, arg: tuple[Incomplete, Incomplete]) -> tuple[Incomplete, Incomplete]: ... + def SetWorldTransform(self): ... + def StartDoc(self, docName: str, outputFile: str) -> None: ... + def StartPage(self) -> None: ... + def StretchBlt( + self, + destPos: tuple[Incomplete, Incomplete], + size: tuple[Incomplete, Incomplete], + dc: PyCDC, + srcPos: tuple[Incomplete, Incomplete], + size1: tuple[Incomplete, Incomplete], + rop, + ) -> None: ... + def StrokeAndFillPath(self) -> None: ... + def StrokePath(self) -> None: ... + def TextOut(self, _int, _int1, string) -> None: ... + +class PyCDialog: + def CreateWindow(self, obParent: PyCWnd | None = ...) -> None: ... + def DoModal(self): ... + def EndDialog(self, result) -> None: ... + def GotoDlgCtrl(self, control: PyCWnd) -> None: ... + def MapDialogRect( + self, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete] + ) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... + def OnCancel(self) -> None: ... + def OnOK(self) -> None: ... + def OnInitDialog(self): ... + +class PyCDialogBar: + def CreateWindow(self, parent: PyCWnd, template: PyResourceId, style, _id) -> None: ... + +class PyCDocTemplate: + def DoCreateDoc(self, fileName: str | None = ...) -> PyCDocument: ... + def FindOpenDocument(self, fileName: str) -> PyCDocument: ... + def GetDocString(self, docIndex) -> str: ... + def GetDocumentlist(self): ... + def GetResourceID(self) -> None: ... + def GetSharedMenu(self) -> PyCMenu: ... + def InitialUpdateFrame( + self, frame: PyCFrameWnd | None = ..., doc: PyCDocument | None = ..., bMakeVisible: int = ... + ) -> None: ... + def SetContainerInfo(self, _id) -> None: ... + def SetDocStrings(self, docStrings: str) -> None: ... + def OpenDocumentFile(self, filename: str, bMakeVisible: int = ...) -> None: ... + +class PyCDockContext: + @property + def ptLast(self) -> tuple[Incomplete, Incomplete]: ... + @property + def rectLast(self) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... + @property + def sizeLast(self) -> tuple[Incomplete, Incomplete]: ... + @property + def bDitherLast(self): ... + @property + def rectDragHorz(self) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... + @property + def rectDragVert(self) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... + @property + def rectFrameDragHorz(self) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... + @property + def rectFrameDragVert(self) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... + @property + def dwDockStyle(self): ... + @property + def dwOverDockStyle(self): ... + @property + def dwStyle(self): ... + @property + def bFlip(self): ... + @property + def bForceFrame(self): ... + @property + def bDragging(self): ... + @property + def nHitTest(self): ... + @property + def uMRUDockID(self): ... + @property + def rectMRUDockPos(self) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... + @property + def dwMRUFloatStyle(self): ... + @property + def ptMRUFloatPos(self) -> tuple[Incomplete, Incomplete]: ... + def EndDrag(self): ... + def StartDrag(self, pt: tuple[Incomplete, Incomplete]): ... + def EndResize(self): ... + def StartResize(self, hittest, pt: tuple[Incomplete, Incomplete]): ... + def ToggleDocking(self): ... + +class PyCDocument: + def DeleteContents(self) -> None: ... + def DoSave(self, fileName: str, bReplace: int = ...) -> None: ... + def DoFileSave(self) -> None: ... + def GetDocTemplate(self) -> PyCDocTemplate: ... + def GetAllViews(self) -> list[Incomplete]: ... + def GetFirstView(self) -> PyCView: ... + def GetPathName(self) -> str: ... + def GetTitle(self) -> str: ... + def IsModified(self) -> bool: ... + def OnChangedViewlist(self) -> None: ... + def OnCloseDocument(self) -> None: ... + def OnNewDocument(self) -> None: ... + def OnOpenDocument(self, pathName: str) -> None: ... + def OnSaveDocument(self, pathName: str) -> None: ... + def SetModifiedFlag(self, bModified: int = ...) -> None: ... + def SaveModified(self): ... + def SetPathName(self, path: str) -> None: ... + def SetTitle(self, title: str) -> None: ... + def UpdateAllViews(self, sender: PyCView, hint: Incomplete | None = ...) -> None: ... + +class PyCEdit: + def CreateWindow(self, style, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], parent: PyCWnd, _id) -> None: ... + def Clear(self): ... + def Copy(self) -> None: ... + def Cut(self) -> None: ... + def FmtLines(self, bAddEOL): ... + def GetFirstVisibleLine(self): ... + def GetSel(self) -> tuple[Incomplete, Incomplete]: ... + def GetLine(self, lineNo): ... + def GetLineCount(self): ... + def LimitText(self, nChars: int = ...) -> None: ... + def LineFromChar(self, charNo: int = ...): ... + def LineIndex(self, lineNo: int = ...): ... + def LineScroll(self, nLines, nChars: int = ...): ... + def Paste(self) -> None: ... + def ReplaceSel(self, text: str) -> None: ... + def SetReadOnly(self, bReadOnly: int = ...) -> None: ... + def SetSel(self, start, end, arg, bNoScroll1, bNoScroll: int = ...) -> None: ... + +class PyCEditView: + def IsModified(self) -> bool: ... + def LoadFile(self, fileName: str) -> None: ... + def SetModifiedFlag(self, bModified: int = ...) -> None: ... + def GetEditCtrl(self): ... + def PreCreateWindow(self, createStruct): ... + def SaveFile(self, fileName: str) -> None: ... + def OnCommand(self, wparam, lparam) -> None: ... + +class PyCFileDialog: + def GetPathName(self) -> str: ... + def GetFileName(self) -> str: ... + def GetFileExt(self) -> str: ... + def GetFileTitle(self) -> str: ... + def GetPathNames(self) -> str: ... + def GetReadOnlyPref(self): ... + def SetOFNTitle(self, title: str) -> None: ... + def SetOFNInitialDir(self, title: str) -> None: ... + +class PyCFont: + def GetSafeHandle(self): ... + +class PyCFontDialog: + def DoModal(self): ... + def GetCurrentFont(self): ... + def GetCharFormat(self): ... + def GetColor(self): ... + def GetFaceName(self) -> str: ... + def GetStyleName(self) -> str: ... + def GetSize(self): ... + def GetWeight(self): ... + def IsStrikeOut(self) -> bool: ... + def IsUnderline(self) -> bool: ... + def IsBold(self) -> bool: ... + def IsItalic(self) -> bool: ... + +class PyCFormView: + def OnCommand(self, wparam, lparam) -> None: ... + +class PyCFrameWnd: + def BeginModalState(self) -> None: ... + def CreateWindow( + self, + wndClass: str, + title: str, + style, + PyCWnd, + menuId, + styleEx, + rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete] | None = ..., + createContext: Incomplete | None = ..., + ): ... + def EndModalState(self) -> None: ... + def DockControlBar( + self, controlBar: PyCControlBar, arg: tuple[Incomplete, Incomplete, Incomplete, Incomplete], dockBarId: int = ... + ) -> None: ... + def EnableDocking(self, style) -> None: ... + def FloatControlBar(self, controlBar: PyCControlBar, arg: tuple[Incomplete, Incomplete], style) -> None: ... + def GetActiveDocument(self) -> PyCDocument: ... + def GetControlBar(self, _id) -> PyCControlBar: ... + def GetMessageString(self, _id) -> str: ... + def GetMessageBar(self) -> PyCWnd: ... + def IsTracking(self) -> bool: ... + def InModalState(self): ... + def LoadAccelTable(self, _id: PyResourceId) -> None: ... + def LoadFrame( + self, idResource, style: int = ..., wndParent: PyCWnd | None = ..., context: Incomplete | None = ... + ) -> None: ... + def LoadBarState(self, profileName: str) -> None: ... + def PreCreateWindow(self, createStruct): ... + def SaveBarState(self, profileName: str) -> None: ... + def ShowControlBar(self, controlBar: PyCControlBar, bShow, bDelay) -> None: ... + def RecalcLayout(self, bNotify: int = ...) -> None: ... + def GetActiveView(self) -> PyCView: ... + def OnBarCheck(self, _id): ... + def OnUpdateControlBarMenu(self, cmdUI: PyCCmdUI): ... + def SetActiveView(self, view: PyCView, bNotify: int = ...) -> None: ... + +class PyCGdiObject: ... + +class PyCImagelist: + def Add(self, arg: tuple[Incomplete, Incomplete], bitmap, color, hIcon): ... + def Destroy(self) -> None: ... + def DeleteImagelist(self) -> None: ... + def GetBkColor(self): ... + def GetSafeHandle(self): ... + def GetImageCount(self): ... + def GetImageInfo(self, index): ... + def SetBkColor(self, color) -> None: ... + +class PyClistBox: + def AddString(self, _object): ... + def DeleteString(self, pos): ... + def Dir(self, attr, wild: str): ... + def GetCaretIndex(self): ... + def GetCount(self): ... + def GetCurSel(self): ... + def GetItemData(self, item): ... + def GetItemValue(self, item): ... + def GetSel(self, index): ... + def GetSelCount(self): ... + def GetSelItems(self): ... + def GetSelTextItems(self): ... + def GetTopIndex(self): ... + def GetText(self, index) -> str: ... + def GetTextLen(self, index): ... + def InsertString(self, pos, _object): ... + def ResetContent(self) -> None: ... + def SetCaretIndex(self, index, bScroll: int = ...) -> None: ... + def SelectString(self, after, string: str) -> None: ... + def SelItemRange(self, bSel, start, end) -> None: ... + def SetCurSel(self, index) -> None: ... + def SetItemData(self, item, Data): ... + def SetItemValue(self, item, data): ... + def SetSel(self, index, bSel: int = ...) -> None: ... + def SetTabStops(self, eachTabStop, tabStops) -> None: ... + def SetTopIndex(self, index) -> None: ... + +class PyClistCtrl: + def Arrange(self, code) -> None: ... + def CreateWindow(self, style, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], PyCWnd, _id) -> None: ... + def DeleteAllItems(self) -> None: ... + def DeleteItem(self, item) -> None: ... + def GetTextColor(self): ... + def SetTextColor(self, color) -> None: ... + def GetBkColor(self): ... + def SetBkColor(self, color) -> None: ... + def GetItem(self, item, sub) -> LV_ITEM: ... + def GetItemCount(self): ... + def GetItemRect(self, item, bTextOnly) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... + def GetEditControl(self) -> PyCEdit: ... + def EditLabel(self, item) -> PyCEdit: ... + def EnsureVisible(self, item, bPartialOK): ... + def CreateDragImage(self, item) -> tuple[PyCImagelist, Incomplete, Incomplete]: ... + def GetImagelist(self, nImagelist) -> PyCImagelist: ... + def GetNextItem(self, item, flags): ... + def InsertColumn(self, colNo, item: LV_COLUMN): ... + def InsertItem(self, item: LV_ITEM, item1, text, image, item2, text1): ... + def SetImagelist(self, imagelist: PyCImagelist, imageType): ... + def GetColumn(self, column) -> LV_COLUMN: ... + def GetTextBkColor(self): ... + def SetTextBkColor(self, color) -> None: ... + def GetTopIndex(self): ... + def GetCountPerPage(self): ... + def GetSelectedCount(self): ... + def SetItem(self, item: LV_ITEM): ... + def SetItemState(self, item, state, mask): ... + def GetItemState(self, item, mask): ... + def SetItemData(self, item, Data): ... + def GetItemData(self, item): ... + def SetItemCount(self, count) -> None: ... + def SetItemText(self, item, sub, text: str): ... + def GetItemText(self, item, sub): ... + def RedrawItems(self, first, first1): ... + def Update(self, item) -> None: ... + def SetColumn(self, colNo, item: LV_COLUMN): ... + def DeleteColumn(self, first): ... + def GetColumnWidth(self, first): ... + def SetColumnWidth(self, first, first1): ... + def GetStringWidth(self, first): ... + def HitTest(self, arg) -> tuple[Incomplete, Incomplete, Incomplete]: ... + def GetItemPosition(self, item) -> tuple[Incomplete, Incomplete]: ... + +class PyClistView: + def PreCreateWindow(self, createStruct): ... + def GetlistCtrl(self) -> PyClistCtrl: ... + def OnCommand(self, wparam, lparam) -> None: ... + +class PyCMDIChildWnd: + def ActivateFrame(self, cmdShow: int = ...) -> None: ... + def CreateWindow( + self, + wndClass: str, + title: str, + style, + PyCWnd, + rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete] | None = ..., + createContext: Incomplete | None = ..., + ): ... + def GetMDIFrame(self) -> None: ... + def MDIActivate(self, cmdShow: int = ...) -> None: ... + def PreCreateWindow(self, createStruct): ... + def PreTranslateMessage(self) -> None: ... + def OnCommand(self, wparam, lparam) -> None: ... + def OnClose(self) -> None: ... + +class PyCMDIFrameWnd: + def GetMDIClient(self) -> PyCMDIFrameWnd: ... + def MDIGetActive(self) -> tuple[PyCMDIChildWnd, Incomplete]: ... + def MDIActivate(self, window: PyCWnd) -> PyCMDIFrameWnd: ... + def MDINext(self, fNext: int = ...) -> None: ... + def PreCreateWindow(self, createStruct): ... + def PreTranslateMessage(self) -> None: ... + def OnCommand(self, wparam, lparam) -> None: ... + def OnContextHelp(self): ... + def OnClose(self) -> None: ... + +class PyCMenu: + def AppendMenu(self, flags, _id: int = ..., value: str | None = ...) -> None: ... + def DeleteMenu(self, _id, flags) -> str: ... + def EnableMenuItem(self, _id, flags): ... + def GetMenuItemCount(self): ... + def GetMenuItemID(self, pos): ... + def GetMenuString(self, _id, arg) -> str: ... + def GetSubMenu(self, pos) -> PyCMenu: ... + def InsertMenu(self, pos, flags, _id: PyCMenu | int = ..., value: str | None = ...) -> None: ... + def ModifyMenu(self, pos, flags, _id: int = ..., value: str | None = ...) -> None: ... + def TrackPopupMenu(self, arg: tuple[Incomplete, Incomplete], arg1, arg2: PyCWnd) -> None: ... + +class PyCOleClientItem: + def CreateNewItem(self) -> None: ... + def Close(self) -> None: ... + def DoVerb(self) -> None: ... + def Draw(self) -> None: ... + def GetActiveView(self) -> PyCView: ... + def GetDocument(self) -> PyCDocument: ... + def GetInPlaceWindow(self) -> PyCWnd: ... + def GetItemState(self) -> None: ... + def GetObject(self) -> PyIUnknown: ... + def GetStorage(self) -> None: ... + def OnActivate(self) -> None: ... + def OnChange(self) -> None: ... + def OnChangeItemPosition(self): ... + def OnDeactivateUI(self): ... + def Run(self) -> None: ... + def SetItemRects(self) -> None: ... + +class PyCOleDialog: ... + +class PyCOleDocument: + def EnableCompoundFile(self, bEnable: int = ...) -> None: ... + def GetStartPosition(self): ... + def GetNextItem(self, pos) -> tuple[Incomplete, PyCOleClientItem]: ... + def GetInPlaceActiveItem(self, wnd: PyCWnd) -> PyCOleClientItem: ... + +class PyCOleInsertDialog: + def GetClassID(self): ... + def GetSelectionType(self): ... + def GetPathName(self): ... + +class PyCPrintDialog: ... + +class PyCPrintInfo: + def DocObject(self) -> None: ... + def GetDwFlags(self) -> None: ... + def SetDwFlags(self) -> None: ... + def GetDocOffsetPage(self) -> None: ... + def SetDocOffsetPage(self) -> None: ... + def SetPrintDialog(self) -> None: ... + def GetDirect(self) -> None: ... + def SetDirect(self) -> None: ... + def GetPreview(self) -> None: ... + def SetPreview(self) -> None: ... + def GetContinuePrinting(self) -> None: ... + def SetContinuePrinting(self) -> None: ... + def GetCurPage(self) -> None: ... + def SetCurPage(self) -> None: ... + def GetNumPreviewPages(self) -> None: ... + def SetNumPreviewPages(self) -> None: ... + def GetUserData(self) -> None: ... + def SetUserData(self) -> None: ... + def GetDraw(self) -> None: ... + def SetDraw(self) -> None: ... + def GetPageDesc(self) -> None: ... + def SetPageDesc(self) -> None: ... + def GetMinPage(self) -> None: ... + def SetMinPage(self) -> None: ... + def GetMaxPage(self) -> None: ... + def SetMaxPage(self) -> None: ... + def GetOffsetPage(self) -> None: ... + def GetFromPage(self) -> None: ... + def GetToPage(self) -> None: ... + def SetHDC(self, hdc) -> None: ... + def CreatePrinterDC(self) -> None: ... + def DoModal(self) -> None: ... + def GetCopies(self) -> None: ... + def GetDefaults(self) -> None: ... + def FreeDefaults(self) -> None: ... + def GetDeviceName(self) -> None: ... + def GetDriverName(self) -> None: ... + def GetDlgFromPage(self) -> None: ... + def GetDlgToPage(self) -> None: ... + def GetPortName(self) -> None: ... + def GetPrinterDC(self) -> None: ... + def PrintAll(self) -> None: ... + def PrintCollate(self) -> None: ... + def PrintRange(self) -> None: ... + def PrintSelection(self) -> None: ... + def GetHDC(self) -> None: ... + def GetFlags(self) -> None: ... + def SetFlags(self) -> None: ... + def SetFromPage(self) -> None: ... + def SetToPage(self) -> None: ... + def GetPRINTDLGMinPage(self) -> None: ... + def SetPRINTDLGMinPage(self) -> None: ... + def GetPRINTDLGCopies(self) -> None: ... + def SetPRINTDLGCopies(self) -> None: ... + +class PyCProgressCtrl: + def CreateWindow(self, style, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], parent: PyCWnd, _id) -> None: ... + def SetRange(self, nLower: int = ..., nUpper: int = ...) -> None: ... + def SetPos(self, nPos: int = ...): ... + def OffsetPos(self, nPos: int = ...): ... + def SetStep(self, nStep: int = ...): ... + def StepIt(self): ... + +class PyCPropertyPage: + def CancelToClose(self) -> None: ... + def OnCancel(self) -> None: ... + def OnOK(self) -> None: ... + def OnApply(self) -> None: ... + def OnReset(self) -> None: ... + def OnQueryCancel(self) -> None: ... + def OnWizardBack(self) -> None: ... + def OnWizardNext(self) -> None: ... + def OnWizardFinish(self) -> None: ... + def OnSetActive(self): ... + def OnKillActive(self): ... + def SetModified(self, bChanged: int = ...) -> None: ... + def SetPSPBit(self, bitMask, bitValue) -> None: ... + +class PyCPropertySheet: + def AddPage(self, page: PyCPropertyPage) -> None: ... + def CreateWindow(self, style, exStyle, parent: PyCWnd | None = ...) -> None: ... + def DoModal(self): ... + def EnableStackedTabs(self, stacked) -> PyCPropertyPage: ... + def EndDialog(self, result) -> None: ... + def GetActiveIndex(self): ... + def GetActivePage(self) -> PyCPropertyPage: ... + def GetPage(self, pageNo) -> PyCPropertyPage: ... + def GetPageIndex(self, page: PyCPropertyPage): ... + def GetPageCount(self): ... + def GetTabCtrl(self) -> PyCTabCtrl: ... + def OnInitDialog(self): ... + def PressButton(self, button) -> None: ... + def RemovePage(self, offset, page) -> None: ... + def SetActivePage(self, page: PyCPropertyPage) -> None: ... + def SetTitle(self, title: str) -> None: ... + def SetFinishText(self, text: str) -> None: ... + def SetWizardMode(self) -> None: ... + def SetWizardButtons(self, flags) -> None: ... + def SetPSHBit(self, bitMask, bitValue) -> None: ... + +class PyCRect: ... +class PyCRgn: ... + +class PyCRichEditCtrl: + def Clear(self): ... + def Copy(self) -> None: ... + def CreateWindow(self, style, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], parent: PyCWnd, _id) -> None: ... + def Cut(self) -> None: ... + def FindText(self, charPos) -> tuple[Incomplete, Incomplete, Incomplete]: ... + def GetCharPos(self, charPos): ... + def GetDefaultCharFormat(self): ... + def GetEventMask(self): ... + def GetSelectionCharFormat(self): ... + def GetFirstVisibleLine(self): ... + def GetParaFormat(self): ... + def GetSel(self) -> tuple[Incomplete, Incomplete]: ... + def GetSelText(self) -> str: ... + def GetTextLength(self): ... + def GetLine(self, lineNo): ... + def GetModify(self): ... + def GetLineCount(self): ... + def LimitText(self, nChars: int = ...) -> None: ... + def LineFromChar(self, charNo: int = ...): ... + def LineIndex(self, lineNo: int = ...): ... + def LineScroll(self, nLines, nChars: int = ...): ... + def Paste(self) -> None: ... + def ReplaceSel(self, text: str) -> None: ... + def SetBackgroundColor(self, bSysColor, cr: int = ...): ... + def SetDefaultCharFormat(self, charFormat) -> None: ... + def SetEventMask(self, eventMask): ... + def SetSelectionCharFormat(self, charFormat) -> None: ... + def SetModify(self, modified: int = ...) -> None: ... + def SetOptions(self, op, flags) -> None: ... + def SetParaFormat(self, paraFormat): ... + def SetReadOnly(self, bReadOnly: int = ...) -> None: ... + def SetSel(self, start, end, arg) -> None: ... + def SetSelAndCharFormat(self, charFormat) -> None: ... + def SetTargetDevice(self, dc: PyCDC, lineWidth) -> None: ... + def StreamIn(self, _format, method) -> tuple[Incomplete, Incomplete]: ... + def StreamOut(self, _format, method) -> tuple[Incomplete, Incomplete]: ... + +class PyCRichEditDoc: + def OnCloseDocument(self) -> None: ... + +class PyCRichEditDocTemplate: + def DoCreateRichEditDoc(self, fileName: str | None = ...) -> PyCRichEditDoc: ... + +class PyCRichEditView: + def GetRichEditCtrl(self) -> PyCRichEditCtrl: ... + def SetWordWrap(self, wordWrap): ... + def WrapChanged(self): ... + def SaveTextFile(self, FileName): ... + +class PyCScrollView: + def GetDeviceScrollPosition(self) -> tuple[Incomplete, Incomplete]: ... + def GetDC(self) -> PyCDC: ... + def GetScrollPosition(self) -> tuple[Incomplete, Incomplete]: ... + def GetTotalSize(self) -> tuple[Incomplete, Incomplete]: ... + def OnCommand(self, wparam, lparam) -> None: ... + def ResizeParentToFit(self, bShrinkOnly: int = ...): ... + def SetScaleToFitSize(self, size: tuple[Incomplete, Incomplete]) -> None: ... + def ScrollToPosition(self, position: tuple[Incomplete, Incomplete]) -> None: ... + def SetScrollSizes( + self, + mapMode, + sizeTotal: tuple[Incomplete, Incomplete], + arg: tuple[Incomplete, Incomplete], + arg1: tuple[Incomplete, Incomplete], + ) -> None: ... + def UpdateBars(self) -> None: ... + +class PyCSliderCtrl: + def CreateWindow(self, style, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], parent: PyCWnd, _id) -> None: ... + def GetLineSize(self): ... + def SetLineSize(self, nLineSize: int = ...): ... + def GetPageSize(self): ... + def SetPageSize(self, nPageSize: int = ...): ... + def GetRangeMax(self): ... + def GetRangeMin(self): ... + def GetRange(self): ... + def SetRange(self, nRangeMin: int = ..., nRangeMax: int = ..., bRedraw: int = ...): ... + def GetSelection(self): ... + def SetSelection(self, nRangeMin: int = ..., nRangeMax: int = ...): ... + def GetChannelRect(self): ... + def GetThumbRect(self): ... + def GetPos(self): ... + def SetPos(self, nPos: int = ...): ... + def GetNumTics(self): ... + def GetTicArray(self): ... + def GetTic(self, nTic: int = ...): ... + def GetTicPos(self, nTic: int = ...): ... + def SetTic(self, nTic: int = ...): ... + def SetTicFreq(self, nFreq: int = ...): ... + def ClearSel(self, bRedraw: int = ...): ... + def VerifyPos(self): ... + def ClearTics(self, bRedraw: int = ...): ... + +class PyCSpinButtonCtrl: + def GetPos(self): ... + def SetPos(self, pos): ... + def SetRange(self): ... + def SetRange32(self): ... + +class PyCSplitterWnd: + def GetPane(self, row, col) -> PyCWnd: ... + def CreateView(self, view: PyCView, row, col, arg: tuple[Incomplete, Incomplete]) -> None: ... + def CreateStatic(self, parent, rows, cols, style, _id) -> None: ... + def SetColumnInfo(self, column, ideal, _min) -> None: ... + def SetRowInfo(self, row, ideal, _min) -> None: ... + def IdFromRowCol(self, row, col) -> None: ... + def DoKeyboardSplit(self): ... + +class PyCStatusBar: + def GetPaneInfo(self, index) -> tuple[Incomplete, Incomplete, Incomplete]: ... + def GetStatusBarCtrl(self) -> PyCStatusBarCtrl: ... + def SetIndicators(self, indicators) -> None: ... + def SetPaneInfo(self, index, _id, style, width) -> None: ... + +class PyCStatusBarCtrl: + def CreateWindow(self, style, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], parent: PyCWnd, _id) -> None: ... + def GetBorders(self) -> tuple[Incomplete, Incomplete, Incomplete]: ... + def GetParts(self, nParts): ... + def GetRect(self, nPane) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... + def GetText(self, nPane): ... + def GetTextAttr(self, nPane): ... + def GetTextLength(self, nPane): ... + def SetMinHeight(self, nHeight) -> None: ... + def SetParts(self, coord) -> None: ... + def SetText(self, text: str, nPane, nType) -> None: ... + def SetTipText(self, nPane, text: str) -> None: ... + +class PyCTabCtrl: + def GetCurSel(self): ... + def GetItemCountl(self): ... + def SetCurSel(self, index): ... + +class PyCToolBar: + def GetButtonStyle(self, index) -> None: ... + def GetButtonText(self, index) -> str: ... + def GetItemID(self, index) -> None: ... + def SetButtonInfo(self, index, ID, style, imageIx) -> None: ... + def GetToolBarCtrl(self) -> PyCToolBarCtrl: ... + def LoadBitmap(self, _id: PyResourceId) -> None: ... + def LoadToolBar(self, _id: PyResourceId) -> None: ... + def SetBarStyle(self, style) -> None: ... + def SetBitmap(self, hBitmap) -> None: ... + def SetButtons(self, buttons, numButtons) -> None: ... + def SetButtonStyle(self, index, style) -> None: ... + def SetHeight(self, height) -> None: ... + def SetSizes(self, sizeButton: tuple[Incomplete, Incomplete], sizeButton1: tuple[Incomplete, Incomplete]) -> None: ... + +class PyCToolBarCtrl: + def AddBitmap(self, numButtons, bitmap): ... + def AddButtons(self): ... + def AddStrings(self, strings): ... + def AutoSize(self) -> None: ... + def CheckButton(self, nID, bCheck: int = ...): ... + def CommandToIndex(self, nID): ... + def CreateWindow(self, style, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], parent: PyCWnd, _id) -> None: ... + def Customize(self) -> None: ... + def DeleteButton(self, nID) -> None: ... + def EnableButton(self, nID, bEnable: int = ...) -> None: ... + def GetBitmapFlags(self): ... + def GetButton(self, nID): ... + def GetButtonCount(self): ... + def GetItemRect(self, nID) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... + def GetRows(self) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... + def HideButton(self, nID, bEnable: int = ...) -> None: ... + def Indeterminate(self, nID, bEnable: int = ...) -> None: ... + def InsertButton(self, nID, button: PyCToolBarCtrl): ... + def IsButtonChecked(self, nID) -> bool: ... + def IsButtonEnabled(self, nID) -> bool: ... + def IsButtonHidden(self, nID) -> bool: ... + def IsButtonIndeterminate(self, nID) -> bool: ... + def IsButtonPressed(self, nID) -> bool: ... + def PressButton(self, nID, bEnable: int = ...) -> None: ... + def SetBitmapSize(self, width1, height1, width: int = ..., height: int = ...) -> None: ... + def SetButtonSize(self, width1, height1, width: int = ..., height: int = ...) -> None: ... + def SetCmdID(self, nIndex, nID) -> None: ... + def SetRows(self, nRows, bLarger) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... + +class PyCToolTipCtrl: + def CreateWindow(self, parent: PyCWnd, style) -> None: ... + def UpdateTipText(self, text: str, wnd: PyCWnd, _id) -> None: ... + def AddTool( + self, wnd: PyCWnd, text: str, _id, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete] | None = ... + ) -> None: ... + def SetMaxTipWidth(self, width): ... + +class PyCTreeCtrl: + def CreateWindow(self, style, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], PyCWnd, _id) -> None: ... + def GetCount(self): ... + def GetIndent(self): ... + def SetIndent(self, indent) -> None: ... + def GetImagelist(self, nImagelist) -> PyCImagelist: ... + def SetImagelist(self, imagelist: PyCImagelist, imageType): ... + def GetNextItem(self, item, code): ... + def ItemHasChildren(self, item): ... + def GetChildItem(self, item): ... + def GetNextSiblingItem(self, item): ... + def GetPrevSiblingItem(self, item): ... + def GetParentItem(self, item): ... + def GetFirstVisibleItem(self): ... + def GetNextVisibleItem(self, item): ... + def GetSelectedItem(self): ... + def GetDropHilightItem(self): ... + def GetRootItem(self): ... + def GetToolTips(self): ... + def GetItem(self, item, arg) -> TV_ITEM: ... + def SetItem(self, item: TV_ITEM): ... + def GetItemState(self, item, stateMask) -> tuple[Incomplete, Incomplete]: ... + def SetItemState(self, item, state, stateMask) -> None: ... + def GetItemImage(self, item) -> tuple[Incomplete, Incomplete]: ... + def SetItemImage(self, item, iImage, iSelectedImage) -> None: ... + def SetItemText(self, item, text: str): ... + def GetItemText(self, item): ... + def GetItemData(self, item): ... + def SetItemData(self, item, Data): ... + def GetItemRect(self, item, bTextOnly) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... + def GetEditControl(self) -> PyCEdit: ... + def GetVisibleCount(self): ... + def InsertItem( + self, + hParent, + hInsertAfter, + item: TV_ITEM, + mask, + text, + image, + selectedImage, + state, + stateMask, + lParam, + parent, + parent1, + text1, + image1, + selectedImage1, + parent2, + insertAfter, + text2, + parent3, + parent4, + ): ... + def DeleteItem(self, item) -> None: ... + def DeleteAllItems(self): ... + def Expand(self, item, code) -> None: ... + def Select(self, item, code) -> None: ... + def SelectItem(self, item) -> None: ... + def SelectDropTarget(self, item) -> None: ... + def SelectSetFirstVisible(self, item) -> None: ... + def EditLabel(self, item) -> PyCEdit: ... + def CreateDragImage(self, item) -> PyCImagelist: ... + def SortChildren(self, item) -> None: ... + def EnsureVisible(self, item): ... + def HitTest(self, arg) -> tuple[Incomplete, Incomplete]: ... + +class PyCTreeView: + def PreCreateWindow(self, createStruct): ... + def GetTreeCtrl(self) -> PyCTreeCtrl: ... + def OnCommand(self, wparam, lparam) -> None: ... + +class PyCView: + def CreateWindow(self, parent: PyCWnd, arg, arg1, arg2: tuple[Incomplete, Incomplete, Incomplete, Incomplete]) -> None: ... + def GetDocument(self) -> PyCDocument: ... + def OnActivateView(self, activate, activateView: PyCView, DeactivateView: PyCView): ... + def OnInitialUpdate(self) -> None: ... + def OnMouseActivate(self, wnd: PyCWnd, hittest, message): ... + def PreCreateWindow(self, createStruct): ... + def OnFilePrint(self) -> None: ... + def DoPreparePrinting(self): ... + def OnBeginPrinting(self) -> None: ... + def OnEndPrinting(self) -> None: ... + +class PyCWinApp: + def AddDocTemplate(self, template: PyCDocTemplate) -> None: ... + def FindOpenDocument(self, fileName: str) -> PyCDocument: ... + def GetDocTemplatelist(self) -> list[Incomplete]: ... + def InitDlgInstance(self, dialog: PyCDialog) -> None: ... + def LoadCursor(self, cursorId: PyResourceId): ... + def LoadStandardCursor(self, cursorId: PyResourceId): ... + def LoadOEMCursor(self, cursorId): ... + def LoadIcon(self, idResource: int) -> int: ... + def LoadStandardIcon(self, resourceName: PyResourceId): ... + def OpenDocumentFile(self, fileName: str) -> None: ... + def OnFileNew(self) -> None: ... + def OnFileOpen(self) -> None: ... + def RemoveDocTemplate(self, template: PyCDocTemplate) -> None: ... + def Run(self): ... + def IsInproc(self) -> bool: ... + +class PyCWinThread: + def CreateThread(self) -> None: ... + def PumpIdle(self) -> None: ... + def PumpMessages(self) -> None: ... + def Run(self): ... + def SetMainFrame(self, mainFrame: PyCWnd) -> None: ... + def SetThreadPriority(self, priority: PyCWnd) -> None: ... + +class PyCWnd: + def ActivateFrame(self, cmdShow) -> None: ... + def BringWindowToTop(self) -> None: ... + def BeginPaint(self) -> tuple[PyCDC, Incomplete]: ... + def CalcWindowRect( + self, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], nAdjustType + ) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... + def CenterWindow(self, altwin: PyCWnd | None = ...) -> None: ... + def CheckRadioButton(self, idFirst, idLast, idCheck) -> None: ... + def ChildWindowFromPoint(self, x, y, flag: int = ...) -> PyCWnd: ... + def ClientToScreen( + self, point: tuple[Incomplete, Incomplete], rect + ) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete, Incomplete]: ... + def CreateWindow( + self, + classId: str, + windowName: str, + style, + rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], + parent: PyCWnd, + _id, + context: Incomplete | None = ..., + ) -> None: ... + def CreateWindowEx( + self, + styleEx, + classId: str, + windowName: str, + style, + rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], + parent: PyCWnd, + _id, + createStruct1, + createStruct: CREATESTRUCT | None = ..., + ) -> None: ... + def DefWindowProc(self, message, idLast, idCheck): ... + def DestroyWindow(self) -> None: ... + def DlgDirlist(self, defPath: str, idlistbox, idStaticPath, fileType) -> None: ... + def DlgDirlistComboBox(self) -> None: ... + def DlgDirSelect(self, idlistbox) -> str: ... + def DlgDirSelectComboBox(self, idlistbox) -> str: ... + def DragAcceptFiles(self, bAccept: int = ...) -> None: ... + def DrawMenuBar(self) -> None: ... + def EnableWindow(self, bEnable: int = ...): ... + def EndModalLoop(self, result) -> None: ... + def EndPaint(self, paintStruct) -> None: ... + def GetCheckedRadioButton(self, idFirst, idLast): ... + def GetClientRect(self) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... + def GetDC(self) -> PyCDC: ... + def GetDCEx(self) -> PyCDC: ... + def GetDlgCtrlID(self): ... + def GetDlgItem(self, idControl) -> PyCWnd: ... + def GetDlgItemInt(self, idControl, bUnsigned: int = ...): ... + def GetDlgItemText(self, idControl) -> str: ... + def GetLastActivePopup(self) -> PyCWnd: ... + def GetMenu(self) -> PyCMenu: ... + def GetParent(self) -> PyCWnd: ... + def GetParentFrame(self) -> PyCWnd: ... + def GetSafeHwnd(self): ... + def GetScrollInfo(self, nBar, mask): ... + def GetScrollPos(self, nBar): ... + def GetStyle(self): ... + def GetExStyle(self): ... + def GetSystemMenu(self) -> PyCMenu: ... + def GetTopLevelFrame(self) -> PyCWnd: ... + def GetTopLevelOwner(self) -> PyCWnd: ... + def GetTopLevelParent(self) -> PyCWnd: ... + def GetTopWindow(self) -> PyCWnd: ... + def GetWindow(self, _type) -> PyCWnd: ... + def GetWindowDC(self) -> PyCDC: ... + def GetWindowPlacement(self): ... + def GetWindowRect(self) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... + def GetWindowText(self) -> str: ... + def HideCaret(self) -> None: ... + def HookAllKeyStrokes(self, obHandler) -> None: ... + def HookKeyStroke(self, obHandler, ch): ... + def HookMessage(self, obHandler, message): ... + def InvalidateRect(self, arg: tuple[Incomplete, Incomplete, Incomplete, Incomplete], bErase: int = ...) -> None: ... + def InvalidateRgn(self, region: PyCRgn, bErase: int = ...) -> None: ... + def IsChild(self, obWnd: PyCWnd) -> bool: ... + def IsDlgButtonChecked(self, idCtl) -> bool: ... + def IsIconic(self) -> bool: ... + def IsZoomed(self) -> bool: ... + def IsWindow(self) -> bool: ... + def IsWindowVisible(self) -> bool: ... + def KillTimer(self): ... + def LockWindowUpdate(self) -> None: ... + def MapWindowPoints(self, wnd: PyCWnd, points: list[tuple[Incomplete, Incomplete]]) -> None: ... + def MouseCaptured(self): ... + def MessageBox(self, message: str, arg, title: str | None = ...) -> None: ... + def ModifyStyle(self, remove, add, flags: int = ...): ... + def ModifyStyleEx(self, remove, add, flags: int = ...): ... + def MoveWindow(self, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], bRepaint: int = ...) -> None: ... + def OnClose(self): ... + def OnCtlColor(self, dc: PyCDC, control, _type): ... + def OnEraseBkgnd(self, dc: PyCDC): ... + def OnNcHitTest(self, arg: tuple[Incomplete, Incomplete]): ... + def OnPaint(self): ... + def OnQueryDragIcon(self): ... + def OnQueryNewPalette(self): ... + def OnSetCursor(self, wnd: PyCWnd, hittest, message): ... + def OnMouseActivate(self, wnd: PyCWnd, hittest, message): ... + def OnWndMsg(self, msg, wParam, lParam) -> tuple[Incomplete, Incomplete]: ... + def PreCreateWindow(self, createStruct): ... + def PumpWaitingMessages(self, firstMsg, lastMsg) -> None: ... + def RedrawWindow( + self, _object: PyCRgn, flags, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete] | None = ... + ) -> None: ... + def ReleaseCapture(self) -> None: ... + def ReleaseDC(self, dc: PyCDC) -> None: ... + def RepositionBars(self, idFirst, idLast, idLeftOver) -> None: ... + def RunModalLoop(self, flags): ... + def PostMessage(self, idMessage, wParam: int = ..., lParam: int = ...) -> None: ... + def SendMessageToDescendants(self, idMessage, wParam: int = ..., lParam: int = ..., bDeep: int = ...) -> None: ... + def SendMessage(self, idMessage, idMessage1, ob, wParam: int = ..., lParam: int = ...) -> None: ... + def SetActiveWindow(self) -> PyCWnd: ... + def SetForegroundWindow(self) -> None: ... + def SetWindowPos(self, hWndInsertAfter, position: tuple[Incomplete, Incomplete, Incomplete, Incomplete], flags) -> None: ... + def ScreenToClient( + self, rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete, Incomplete], pnt + ) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete, Incomplete]: ... + def SetCapture(self) -> None: ... + def SetDlgItemText(self, idControl, text: str) -> None: ... + def SetFocus(self) -> None: ... + def SetFont(self, font: PyCFont, bRedraw: int = ...) -> None: ... + def SetIcon(self): ... + def SetMenu(self, menuObj: PyCMenu) -> None: ... + def SetRedraw(self, bState: int = ...) -> None: ... + def SetScrollPos(self, nBar, nPos, redraw: int = ...): ... + def SetScrollInfo(self, nBar, ScrollInfo, redraw: int = ...): ... + def SetTimer(self, idEvent, elapse): ... + def SetWindowPlacement(self, placement) -> None: ... + def SetWindowText(self, text: str) -> None: ... + def ShowCaret(self) -> None: ... + def ShowScrollBar(self, nBar, bShow: int = ...) -> None: ... + def ShowWindow(self, arg): ... + def UnLockWindowUpdate(self) -> None: ... + def UpdateData(self, bSaveAndValidate: int = ...): ... + def UpdateDialogControls(self, pTarget: PyCCmdTarget, disableIfNoHandler): ... + def UpdateWindow(self) -> None: ... + +class PyDDEConv: + def ConnectTo(self, service: str, topic: str) -> None: ... + def Connected(self) -> None: ... + def Exec(self, Cmd: str) -> None: ... + def Request(self) -> None: ... + def Poke(self) -> None: ... + +class PyDDEServer: + def AddTopic(self, topic: PyDDETopic) -> None: ... + def Create(self, name: str, filterFlags: int = ...) -> None: ... + def Destroy(self) -> None: ... + def GetLastError(self): ... + def Shutdown(self) -> None: ... + +class PyDDEStringItem: + def SetData(self, data: str) -> None: ... + +class PyDDETopic: + def AddItem(self, item) -> None: ... + def Destroy(self) -> None: ... + +class PyDLL: + def GetFileName(self) -> str: ... + def AttachToMFC(self) -> None: ... + +class SCROLLINFO: ... +class TV_ITEM: ... + +class EXTENSION_CONTROL_BLOCK: + @property + def Version(self) -> int: ... + @property + def TotalBytes(self): ... + @property + def AvailableBytes(self): ... + @property + def HttpStatusCode(self): ... + @property + def Method(self): ... + @property + def ConnID(self): ... + @property + def QueryString(self): ... + @property + def PathInfo(self): ... + @property + def PathTranslated(self): ... + @property + def AvailableData(self): ... + @property + def ContentType(self): ... + @property + def LogData(self): ... + def WriteClient(self, data: str, reserved: int = ...): ... + def GetServerVariable(self, variable: str, default) -> str: ... + def ReadClient(self, nbytes) -> str: ... + def SendResponseHeaders(self, reply: str, headers: str, keepAlive: bool = ...) -> None: ... + def SetFlushFlag(self, flag) -> None: ... + def TransmitFile(self, callback, param, hFile, statusCode: str, BytesToWrite, Offset, head: str, tail: str, flags): ... + def MapURLToPath(self) -> None: ... + def DoneWithSession(self, status) -> None: ... + def Redirect(self, url: str) -> None: ... + def IsKeepAlive(self) -> bool: ... + def GetAnonymousToken(self, metabase_path: str): ... + def GetImpersonationToken(self): ... + def IsKeepConn(self) -> bool: ... + def ExecURL(self, url: str, method: str, clientHeaders: str, info, entity, flags): ... + def GetExecURLStatus(self): ... + def IOCompletion(self, func, arg: Incomplete | None = ...): ... + def ReportUnhealthy(self, reason: str | None = ...): ... + def IOCallback(self, ecb: EXTENSION_CONTROL_BLOCK, arg, cbIO, dwError): ... + +class HSE_VERSION_INFO: + @property + def ExtensionDesc(self) -> str: ... + +class HTTP_FILTER_AUTHENT: + @property + def User(self) -> str: ... + @property + def Password(self) -> str: ... + +class HTTP_FILTER_CONTEXT: + @property + def Revision(self): ... + @property + def fIsSecurePort(self): ... + @property + def NotificationType(self): ... + @property + def FilterContext(self): ... + def GetData(self): ... + def GetServerVariable(self, variable: str, default) -> str: ... + def WriteClient(self, data: str, reserverd: int = ...) -> None: ... + def AddResponseHeaders(self, data: str, reserverd: int = ...) -> None: ... + def SendResponseHeader(self, status: str, header: str) -> None: ... + def DisableNotifications(self, flags) -> None: ... + +class HTTP_FILTER_LOG: + @property + def ClientHostName(self) -> str: ... + @property + def ClientUserName(self) -> str: ... + @property + def ServerName(self) -> str: ... + @property + def Operation(self) -> str: ... + @property + def Target(self) -> str: ... + @property + def Parameters(self) -> str: ... + @property + def HttpStatus(self): ... + +class HTTP_FILTER_PREPROC_HEADERS: + def GetHeader(self, header: str, default) -> str: ... + def SetHeader(self, name: str, val: str) -> None: ... + def AddHeader(self) -> None: ... + +class HTTP_FILTER_RAW_DATA: + @property + def InData(self) -> str: ... + +class HTTP_FILTER_URL_MAP: + @property + def URL(self) -> str: ... + @property + def PhysicalPath(self) -> str: ... + +class HTTP_FILTER_VERSION: + @property + def ServerFilterVersion(self): ... + @property + def FilterVersion(self): ... + @property + def Flags(self): ... + @property + def FilterDesc(self) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/afxres.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/afxres.pyi new file mode 100644 index 00000000..0d3239c0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/afxres.pyi @@ -0,0 +1 @@ +from win32.lib.afxres import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/commctrl.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/commctrl.pyi new file mode 100644 index 00000000..603c5703 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/commctrl.pyi @@ -0,0 +1 @@ +from win32.lib.commctrl import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/dde.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/dde.pyi new file mode 100644 index 00000000..90a5be0f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/dde.pyi @@ -0,0 +1 @@ +from pythonwin.dde import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/isapi/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/isapi/__init__.pyi new file mode 100644 index 00000000..87815a67 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/isapi/__init__.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete + +class ISAPIError(Exception): + errno: Incomplete + strerror: Incomplete + funcname: Incomplete + def __init__(self, errno, strerror: Incomplete | None = ..., funcname: Incomplete | None = ...) -> None: ... + +class FilterError(ISAPIError): ... +class ExtensionError(ISAPIError): ... +class InternalReloadException(Exception): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/isapi/isapicon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/isapi/isapicon.pyi new file mode 100644 index 00000000..33dc5d62 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/isapi/isapicon.pyi @@ -0,0 +1,86 @@ +from _typeshed import Incomplete + +HTTP_CONTINUE: int +HTTP_SWITCHING_PROTOCOLS: int +HTTP_PROCESSING: int +HTTP_OK: int +HTTP_CREATED: int +HTTP_ACCEPTED: int +HTTP_NON_AUTHORITATIVE: int +HTTP_NO_CONTENT: int +HTTP_RESET_CONTENT: int +HTTP_PARTIAL_CONTENT: int +HTTP_MULTI_STATUS: int +HTTP_MULTIPLE_CHOICES: int +HTTP_MOVED_PERMANENTLY: int +HTTP_MOVED_TEMPORARILY: int +HTTP_SEE_OTHER: int +HTTP_NOT_MODIFIED: int +HTTP_USE_PROXY: int +HTTP_TEMPORARY_REDIRECT: int +HTTP_BAD_REQUEST: int +HTTP_UNAUTHORIZED: int +HTTP_PAYMENT_REQUIRED: int +HTTP_FORBIDDEN: int +HTTP_NOT_FOUND: int +HTTP_METHOD_NOT_ALLOWED: int +HTTP_NOT_ACCEPTABLE: int +HTTP_PROXY_AUTHENTICATION_REQUIRED: int +HTTP_REQUEST_TIME_OUT: int +HTTP_CONFLICT: int +HTTP_GONE: int +HTTP_LENGTH_REQUIRED: int +HTTP_PRECONDITION_FAILED: int +HTTP_REQUEST_ENTITY_TOO_LARGE: int +HTTP_REQUEST_URI_TOO_LARGE: int +HTTP_UNSUPPORTED_MEDIA_TYPE: int +HTTP_RANGE_NOT_SATISFIABLE: int +HTTP_EXPECTATION_FAILED: int +HTTP_UNPROCESSABLE_ENTITY: int +HTTP_INTERNAL_SERVER_ERROR: int +HTTP_NOT_IMPLEMENTED: int +HTTP_BAD_GATEWAY: int +HTTP_SERVICE_UNAVAILABLE: int +HTTP_GATEWAY_TIME_OUT: int +HTTP_VERSION_NOT_SUPPORTED: int +HTTP_VARIANT_ALSO_VARIES: int +HSE_STATUS_SUCCESS: int +HSE_STATUS_SUCCESS_AND_KEEP_CONN: int +HSE_STATUS_PENDING: int +HSE_STATUS_ERROR: int +SF_NOTIFY_SECURE_PORT: int +SF_NOTIFY_NONSECURE_PORT: int +SF_NOTIFY_READ_RAW_DATA: int +SF_NOTIFY_PREPROC_HEADERS: int +SF_NOTIFY_AUTHENTICATION: int +SF_NOTIFY_URL_MAP: int +SF_NOTIFY_ACCESS_DENIED: int +SF_NOTIFY_SEND_RESPONSE: int +SF_NOTIFY_SEND_RAW_DATA: int +SF_NOTIFY_LOG: int +SF_NOTIFY_END_OF_REQUEST: int +SF_NOTIFY_END_OF_NET_SESSION: int +SF_NOTIFY_ORDER_HIGH: int +SF_NOTIFY_ORDER_MEDIUM: int +SF_NOTIFY_ORDER_LOW: int +SF_NOTIFY_ORDER_DEFAULT: int +SF_NOTIFY_ORDER_MASK: Incomplete +SF_STATUS_REQ_FINISHED: int +SF_STATUS_REQ_FINISHED_KEEP_CONN: Incomplete +SF_STATUS_REQ_NEXT_NOTIFICATION: Incomplete +SF_STATUS_REQ_HANDLED_NOTIFICATION: Incomplete +SF_STATUS_REQ_ERROR: Incomplete +SF_STATUS_REQ_READ_NEXT: Incomplete +HSE_IO_SYNC: int +HSE_IO_ASYNC: int +HSE_IO_DISCONNECT_AFTER_SEND: int +HSE_IO_SEND_HEADERS: int +HSE_IO_NODELAY: int +HSE_IO_FINAL_SEND: int +HSE_IO_CACHE_RESPONSE: int +HSE_EXEC_URL_NO_HEADERS: int +HSE_EXEC_URL_IGNORE_CURRENT_INTERCEPTOR: int +HSE_EXEC_URL_IGNORE_VALIDATION_AND_RANGE: int +HSE_EXEC_URL_DISABLE_CUSTOM_ERROR: int +HSE_EXEC_URL_SSI_CMD: int +HSE_EXEC_URL_HTTP_CACHE_ELIGIBLE: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/isapi/simple.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/isapi/simple.pyi new file mode 100644 index 00000000..7e9bc1b3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/isapi/simple.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete + +class SimpleExtension: + def GetExtensionVersion(self, vi) -> None: ... + def HttpExtensionProc(self, control_block) -> None: ... + def TerminateExtension(self, status) -> None: ... + +class SimpleFilter: + filter_flags: Incomplete + def GetFilterVersion(self, fv) -> None: ... + def HttpFilterProc(self, fc) -> None: ... + def TerminateFilter(self, status) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/isapi/threaded_extension.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/isapi/threaded_extension.pyi new file mode 100644 index 00000000..502351d2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/isapi/threaded_extension.pyi @@ -0,0 +1,29 @@ +import threading +from _typeshed import Incomplete + +import isapi.simple +from isapi import ExtensionError as ExtensionError, isapicon as isapicon +from win32event import INFINITE as INFINITE + +ISAPI_REQUEST: int +ISAPI_SHUTDOWN: int + +class WorkerThread(threading.Thread): + running: bool + io_req_port: Incomplete + extension: Incomplete + def __init__(self, extension, io_req_port) -> None: ... + def call_handler(self, cblock) -> None: ... + +class ThreadPoolExtension(isapi.simple.SimpleExtension): + max_workers: int + worker_shutdown_wait: int + workers: Incomplete + dispatch_map: Incomplete + io_req_port: Incomplete + def GetExtensionVersion(self, vi) -> None: ... + def HttpExtensionProc(self, control_block): ... + def TerminateExtension(self, status) -> None: ... + def DispatchConnection(self, errCode, bytes, key, overlapped) -> None: ... + def Dispatch(self, ecb) -> None: ... + def HandleDispatchError(self, ecb) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/mmapfile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/mmapfile.pyi new file mode 100644 index 00000000..0b18e760 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/mmapfile.pyi @@ -0,0 +1 @@ +from win32.mmapfile import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/mmsystem.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/mmsystem.pyi new file mode 100644 index 00000000..600475d2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/mmsystem.pyi @@ -0,0 +1 @@ +from win32.lib.mmsystem import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/ntsecuritycon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/ntsecuritycon.pyi new file mode 100644 index 00000000..0b237548 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/ntsecuritycon.pyi @@ -0,0 +1 @@ +from win32.lib.ntsecuritycon import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/odbc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/odbc.pyi new file mode 100644 index 00000000..4671d862 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/odbc.pyi @@ -0,0 +1 @@ +from win32.odbc import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/perfmon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/perfmon.pyi new file mode 100644 index 00000000..eae890e0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/perfmon.pyi @@ -0,0 +1 @@ +from win32.perfmon import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/pythoncom.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/pythoncom.pyi new file mode 100644 index 00000000..30a0f46a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/pythoncom.pyi @@ -0,0 +1,470 @@ +# Can't generate with stubgen because: +# "Critical error during semantic analysis: mypy: can't decode file '.venv\Lib\site-packages\pywin32_system32\pythoncom39.dll': 'utf-8' codec can't decode byte 0x90 in position 2: invalid start byte" +# https://github.com/python/mypy/issues/13822 +from _typeshed import Incomplete +from typing_extensions import TypeAlias + +import _win32typing +from win32.lib.pywintypes import com_error as com_error + +error: TypeAlias = com_error # noqa: Y042 + +class internal_error(Exception): ... + +def CoCreateFreeThreadedMarshaler(unk: _win32typing.PyIUnknown) -> _win32typing.PyIUnknown: ... +def CoCreateInstanceEx( + clsid: _win32typing.PyIID, + unkOuter: _win32typing.PyIUnknown, + context, + serverInfo: tuple[Incomplete, Incomplete, Incomplete, Incomplete], + iids: list[_win32typing.PyIID], +) -> _win32typing.PyIUnknown: ... +def CoCreateInstance( + __clsid: _win32typing.PyIID, __unkOuter: _win32typing.PyIUnknown | None, __context: int, __iid: _win32typing.PyIID +) -> _win32typing.PyIUnknown: ... +def CoFreeUnusedLibraries() -> None: ... +def CoInitialize() -> None: ... +def CoInitializeEx(flags) -> None: ... +def CoInitializeSecurity( + sd: _win32typing.PySECURITY_DESCRIPTOR, authSvc, reserved1, authnLevel, impLevel, authInfo, capabilities, reserved2 +) -> None: ... +def CoGetInterfaceAndReleaseStream(stream: _win32typing.PyIStream, iid: _win32typing.PyIID) -> _win32typing.PyIUnknown: ... +def CoMarshalInterThreadInterfaceInStream(iid: _win32typing.PyIID, unk: _win32typing.PyIUnknown) -> _win32typing.PyIStream: ... +def CoMarshalInterface( + Stm: _win32typing.PyIStream, riid: _win32typing.PyIID, Unk: _win32typing.PyIUnknown, DestContext, flags +) -> None: ... +def CoUnmarshalInterface(Stm: _win32typing.PyIStream, riid: _win32typing.PyIID): ... +def CoReleaseMarshalData(Stm: _win32typing.PyIStream) -> None: ... +def CoGetObject(name: str, iid: _win32typing.PyIID, bindOpts: Incomplete | None = ...) -> _win32typing.PyIUnknown: ... +def CoUninitialize() -> None: ... +def CoRegisterClassObject(iid: _win32typing.PyIID, factory: _win32typing.PyIUnknown, context, flags): ... +def CoResumeClassObjects() -> None: ... +def CoRevokeClassObject(reg) -> None: ... +def CoTreatAsClass(clsidold: _win32typing.PyIID, clsidnew: _win32typing.PyIID) -> None: ... +def CoWaitForMultipleHandles(Flags, Timeout, Handles: list[int]): ... +def Connect(cls) -> _win32typing.PyIDispatch: ... +def CreateGuid() -> _win32typing.PyIID: ... +def CreateBindCtx() -> _win32typing.PyIBindCtx: ... +def CreateFileMoniker(filename: str) -> _win32typing.PyIMoniker: ... +def CreateItemMoniker(delim: str, item: str) -> _win32typing.PyIMoniker: ... +def CreatePointerMoniker(IUnknown: _win32typing.PyIUnknown) -> _win32typing.PyIMoniker: ... +def CreateTypeLib(): ... +def CreateTypeLib2(): ... +def CreateStreamOnHGlobal(hGlobal: int | None = ..., DeleteOnRelease: bool = ...) -> _win32typing.PyIStream: ... +def CreateILockBytesOnHGlobal(hGlobal: int | None = ..., DeleteOnRelease: bool = ...) -> _win32typing.PyILockBytes: ... +def EnableQuitMessage(threadId) -> None: ... +def FUNCDESC() -> _win32typing.FUNCDESC: ... +def GetActiveObject(cls) -> _win32typing.PyIUnknown: ... +def GetClassFile(fileName) -> _win32typing.PyIID: ... +def GetFacilityString(scode) -> str: ... +def GetRecordFromGuids( + iid: _win32typing.PyIID, verMajor, verMinor, lcid, infoIID: _win32typing.PyIID, data: Incomplete | None = ... +): ... +def GetRecordFromTypeInfo(TypeInfo: _win32typing.PyITypeInfo): ... +def GetRunningObjectTable(reserved: int = ...) -> _win32typing.PyIRunningObjectTable: ... +def GetScodeString(scode) -> str: ... +def GetScodeRangeString(scode) -> str: ... +def GetSeverityString(scode) -> str: ... +def IsGatewayRegistered(__iid: _win32typing.PyIID | None) -> int: ... +def LoadRegTypeLib(iid: _win32typing.PyIID, versionMajor, versionMinor, lcid) -> _win32typing.PyITypeLib: ... +def LoadTypeLib(libFileName: str) -> _win32typing.PyITypeLib: ... +def MakePyFactory(iid: _win32typing.PyIID) -> _win32typing.PyIClassFactory: ... +def MkParseDisplayName( + displayName: str, bindCtx: _win32typing.PyIBindCtx | None = ... +) -> tuple[_win32typing.PyIMoniker, Incomplete, _win32typing.PyIBindCtx]: ... +def New(cls) -> _win32typing.PyIDispatch: ... +def ObjectFromAddress(address, iid: _win32typing.PyIID) -> _win32typing.PyIUnknown: ... +def ObjectFromLresult(lresult, iid: _win32typing.PyIID, wparm) -> _win32typing.PyIUnknown: ... +def OleInitialize() -> None: ... +def OleGetClipboard() -> _win32typing.PyIDataObject: ... +def OleFlushClipboard() -> None: ... +def OleIsCurrentClipboard(dataObj: _win32typing.PyIDataObject): ... +def OleSetClipboard(dataObj: _win32typing.PyIDataObject) -> None: ... +def OleLoadFromStream(stream: _win32typing.PyIStream, iid: _win32typing.PyIID) -> None: ... +def OleSaveToStream(persist: _win32typing.PyIPersistStream, stream: _win32typing.PyIStream) -> None: ... +def OleLoad(storage: _win32typing.PyIStorage, iid: _win32typing.PyIID, site: _win32typing.PyIOleClientSite) -> None: ... +def ProgIDFromCLSID(clsid) -> str: ... +def PumpWaitingMessages(__firstMessage: int = ..., __lastMessage: int = ...) -> int: ... +def PumpMessages() -> None: ... +def QueryPathOfRegTypeLib(iid: _win32typing.PyIID, versionMajor, versionMinor, lcid) -> str: ... +def ReadClassStg(storage: _win32typing.PyIStorage) -> _win32typing.PyIID: ... +def ReadClassStm(Stm: _win32typing.PyIStream) -> _win32typing.PyIID: ... +def RegisterTypeLib(typelib: _win32typing.PyITypeLib, fullPath: str, lcid, helpDir: str | None = ...) -> None: ... +def UnRegisterTypeLib(iid: _win32typing.PyIID, versionMajor, versionMinor, lcid, syskind) -> str: ... +def RegisterActiveObject(obUnknown: _win32typing.PyIUnknown, clsid: _win32typing.PyIID, flags): ... +def RevokeActiveObject(handle) -> None: ... +def RegisterDragDrop(hwnd: int, dropTarget: _win32typing.PyIDropTarget) -> None: ... +def RevokeDragDrop(hwnd: int) -> None: ... +def DoDragDrop() -> None: ... +def StgCreateDocfile(name: str, mode, reserved: int = ...) -> _win32typing.PyIStorage: ... +def StgCreateDocfileOnILockBytes(lockBytes: _win32typing.PyILockBytes, mode, reserved=...) -> _win32typing.PyIStorage: ... +def StgOpenStorageOnILockBytes( + lockBytes: _win32typing.PyILockBytes, + stgPriority: _win32typing.PyIStorage, + snbExclude: Incomplete | None = ..., + reserved: int = ..., +) -> _win32typing.PyIStorage: ... +def StgIsStorageFile(name: str): ... +def STGMEDIUM() -> _win32typing.PySTGMEDIUM: ... +def StgOpenStorage( + name: str, other: _win32typing.PyIStorage, mode, snbExclude: Incomplete | None = ..., reserved=... +) -> _win32typing.PyIStorage: ... +def StgOpenStorageEx( + Name: str, Mode, stgfmt, Attrs, riid: _win32typing.PyIID, StgOptions: Incomplete | None = ... +) -> _win32typing.PyIStorage: ... +def StgCreateStorageEx( + Name: str, + Mode, + stgfmt, + Attrs, + riid: _win32typing.PyIID, + StgOptions: Incomplete | None = ..., + SecurityDescriptor: _win32typing.PySECURITY_DESCRIPTOR | None = ..., +) -> _win32typing.PyIStorage: ... +def TYPEATTR() -> _win32typing.TYPEATTR: ... +def VARDESC() -> _win32typing.VARDESC: ... +def WrapObject(ob, gatewayIID: _win32typing.PyIID, interfaceIID: _win32typing.PyIID) -> _win32typing.PyIUnknown: ... +def WriteClassStg(storage: _win32typing.PyIStorage, iid: _win32typing.PyIID) -> None: ... +def WriteClassStm(Stm: _win32typing.PyIStream, clsid: _win32typing.PyIID) -> None: ... +def UnwrapObject(ob: _win32typing.PyIUnknown) -> _win32typing.PyIDispatch: ... +def FmtIdToPropStgName(fmtid: _win32typing.PyIID): ... +def PropStgNameToFmtId(Name: str) -> _win32typing.PyIID: ... +def CoGetCallContext(riid: _win32typing.PyIID) -> _win32typing.PyIServerSecurity: ... +def CoGetObjectContext(riid: _win32typing.PyIID) -> _win32typing.PyIContext: ... +def CoGetCancelObject(riid: _win32typing.PyIID, ThreadID: int = ...) -> _win32typing.PyICancelMethodCalls: ... +def CoSetCancelObject(Unk: _win32typing.PyIUnknown) -> None: ... +def CoEnableCallCancellation() -> None: ... +def CoDisableCallCancellation() -> None: ... +def CreateURLMonikerEx(*args, **kwargs): ... # incomplete +def new(__iid: _win32typing.PyIID | str): ... + +ACTIVEOBJECT_STRONG: int +ACTIVEOBJECT_WEAK: int +ArgNotFound: _win32typing.ArgNotFound +CLSCTX_ALL: int +CLSCTX_INPROC: int +CLSCTX_INPROC_HANDLER: int +CLSCTX_INPROC_SERVER: int +CLSCTX_LOCAL_SERVER: int +CLSCTX_REMOTE_SERVER: int +CLSCTX_SERVER: int +CLSID_DCOMAccessControl: _win32typing.PyIID +CLSID_StdComponentCategoriesMgr: _win32typing.PyIID +CLSID_StdGlobalInterfaceTable: _win32typing.PyIID +COINIT_APARTMENTTHREADED: int +COINIT_DISABLE_OLE1DDE: int +COINIT_MULTITHREADED: int +COINIT_SPEED_OVER_MEMORY: int +COWAIT_ALERTABLE: int +COWAIT_WAITALL: int + +DATADIR_GET: int +DATADIR_SET: int +DESCKIND_FUNCDESC: int +DESCKIND_VARDESC: int +DISPATCH_METHOD: int +DISPATCH_PROPERTYGET: int +DISPATCH_PROPERTYPUT: int +DISPATCH_PROPERTYPUTREF: int +DISPID_COLLECT: int +DISPID_CONSTRUCTOR: int +DISPID_DESTRUCTOR: int +DISPID_EVALUATE: int +DISPID_NEWENUM: int +DISPID_PROPERTYPUT: int +DISPID_STARTENUM: int +DISPID_THIS: int +DISPID_UNKNOWN: int +DISPID_VALUE: int +DVASPECT_CONTENT: int +DVASPECT_DOCPRINT: int +DVASPECT_ICON: int +DVASPECT_THUMBNAIL: int +EOAC_ACCESS_CONTROL: int +EOAC_ANY_AUTHORITY: int +EOAC_APPID: int +EOAC_AUTO_IMPERSONATE: int +EOAC_DEFAULT: int +EOAC_DISABLE_AAA: int +EOAC_DYNAMIC: int +EOAC_DYNAMIC_CLOAKING: int +EOAC_MAKE_FULLSIC: int +EOAC_MUTUAL_AUTH: int +EOAC_NONE: int +EOAC_NO_CUSTOM_MARSHAL: int +EOAC_REQUIRE_FULLSIC: int +EOAC_SECURE_REFS: int +EOAC_STATIC_CLOAKING: int +EXTCONN_CALLABLE: int +EXTCONN_STRONG: int +EXTCONN_WEAK: int +Empty: _win32typing.PyOleEmpty +FMTID_DocSummaryInformation: _win32typing.PyIID +FMTID_SummaryInformation: _win32typing.PyIID +FMTID_UserDefinedProperties: _win32typing.PyIID +FUNCFLAG_FBINDABLE: int +FUNCFLAG_FDEFAULTBIND: int +FUNCFLAG_FDISPLAYBIND: int +FUNCFLAG_FHIDDEN: int +FUNCFLAG_FREQUESTEDIT: int +FUNCFLAG_FRESTRICTED: int +FUNCFLAG_FSOURCE: int +FUNCFLAG_FUSESGETLASTERROR: int +FUNC_DISPATCH: int +FUNC_NONVIRTUAL: int +FUNC_PUREVIRTUAL: int +FUNC_STATIC: int +FUNC_VIRTUAL: int +IDLFLAG_FIN: int +IDLFLAG_FLCID: int +IDLFLAG_FOUT: int +IDLFLAG_FRETVAL: int +IDLFLAG_NONE: int +IID_IBindCtx: _win32typing.PyIID +IID_ICancelMethodCalls: _win32typing.PyIID +IID_ICatInformation: _win32typing.PyIID +IID_ICatRegister: _win32typing.PyIID +IID_IClassFactory: _win32typing.PyIID +IID_IClientSecurity: _win32typing.PyIID +IID_IConnectionPoint: _win32typing.PyIID +IID_IConnectionPointContainer: _win32typing.PyIID +IID_IContext: _win32typing.PyIID +IID_ICreateTypeInfo: _win32typing.PyIID +IID_ICreateTypeLib: _win32typing.PyIID +IID_ICreateTypeLib2: _win32typing.PyIID +IID_IDataObject: _win32typing.PyIID +IID_IDispatch: _win32typing.PyIID +IID_IDispatchEx: _win32typing.PyIID +IID_IDropSource: _win32typing.PyIID +IID_IDropTarget: _win32typing.PyIID +IID_IEnumCATEGORYINFO: _win32typing.PyIID +IID_IEnumConnectionPoints: _win32typing.PyIID +IID_IEnumConnections: _win32typing.PyIID +IID_IEnumContextProps: _win32typing.PyIID +IID_IEnumFORMATETC: _win32typing.PyIID +IID_IEnumGUID: _win32typing.PyIID +IID_IEnumMoniker: _win32typing.PyIID +IID_IEnumSTATPROPSETSTG: _win32typing.PyIID +IID_IEnumSTATPROPSTG: _win32typing.PyIID +IID_IEnumSTATSTG: _win32typing.PyIID +IID_IEnumString: _win32typing.PyIID +IID_IEnumVARIANT: _win32typing.PyIID +IID_IErrorLog: _win32typing.PyIID +IID_IExternalConnection: _win32typing.PyIID +IID_IGlobalInterfaceTable: _win32typing.PyIID +IID_ILockBytes: _win32typing.PyIID +IID_IMarshal: _win32typing.PyIID +IID_IMoniker: _win32typing.PyIID +IID_IOleWindow: _win32typing.PyIID +IID_IPersist: _win32typing.PyIID +IID_IPersistFile: _win32typing.PyIID +IID_IPersistPropertyBag: _win32typing.PyIID +IID_IPersistStorage: _win32typing.PyIID +IID_IPersistStream: _win32typing.PyIID +IID_IPersistStreamInit: _win32typing.PyIID +IID_IPropertyBag: _win32typing.PyIID +IID_IPropertySetStorage: _win32typing.PyIID +IID_IPropertyStorage: _win32typing.PyIID +IID_IProvideClassInfo: _win32typing.PyIID +IID_IProvideClassInfo2: _win32typing.PyIID +IID_IRunningObjectTable: _win32typing.PyIID +IID_IServerSecurity: _win32typing.PyIID +IID_IServiceProvider: _win32typing.PyIID +IID_IStdMarshalInfo: _win32typing.PyIID +IID_IStorage: _win32typing.PyIID +IID_IStream: _win32typing.PyIID +IID_ITypeComp: _win32typing.PyIID +IID_ITypeInfo: _win32typing.PyIID +IID_ITypeLib: _win32typing.PyIID +IID_IUnknown: _win32typing.PyIID +IID_NULL: _win32typing.PyIID +IID_StdOle: _win32typing.PyIID +IMPLTYPEFLAG_FDEFAULT: int +IMPLTYPEFLAG_FRESTRICTED: int +IMPLTYPEFLAG_FSOURCE: int +INVOKE_FUNC: int +INVOKE_PROPERTYGET: int +INVOKE_PROPERTYPUT: int +INVOKE_PROPERTYPUTREF: int +InterfaceNames: dict[str, _win32typing.PyIID] +MKSYS_ANTIMONIKER: int +MKSYS_CLASSMONIKER: int +MKSYS_FILEMONIKER: int +MKSYS_GENERICCOMPOSITE: int +MKSYS_ITEMMONIKER: int +MKSYS_NONE: int +MKSYS_POINTERMONIKER: int +MSHCTX_DIFFERENTMACHINE: int +MSHCTX_INPROC: int +MSHCTX_LOCAL: int +MSHCTX_NOSHAREDMEM: int +MSHLFLAGS_NOPING: int +MSHLFLAGS_NORMAL: int +MSHLFLAGS_TABLESTRONG: int +MSHLFLAGS_TABLEWEAK: int +Missing: _win32typing.PyOleMissing +Nothing: _win32typing.PyOleNothing +PARAMFLAG_FHASDEFAULT: int +PARAMFLAG_FIN: int +PARAMFLAG_FLCID: int +PARAMFLAG_FOPT: int +PARAMFLAG_FOUT: int +PARAMFLAG_FRETVAL: int +PARAMFLAG_NONE: int +REGCLS_MULTIPLEUSE: int +REGCLS_MULTI_SEPARATE: int +REGCLS_SINGLEUSE: int +REGCLS_SUSPENDED: int +ROTFLAGS_ALLOWANYCLIENT: int +ROTFLAGS_REGISTRATIONKEEPSALIVE: int +RPC_C_AUTHN_DCE_PRIVATE: int +RPC_C_AUTHN_DCE_PUBLIC: int +RPC_C_AUTHN_DEC_PUBLIC: int +RPC_C_AUTHN_DEFAULT: int +RPC_C_AUTHN_DPA: int +RPC_C_AUTHN_GSS_KERBEROS: int +RPC_C_AUTHN_GSS_NEGOTIATE: int +RPC_C_AUTHN_GSS_SCHANNEL: int +RPC_C_AUTHN_LEVEL_CALL: int +RPC_C_AUTHN_LEVEL_CONNECT: int +RPC_C_AUTHN_LEVEL_DEFAULT: int +RPC_C_AUTHN_LEVEL_NONE: int +RPC_C_AUTHN_LEVEL_PKT: int +RPC_C_AUTHN_LEVEL_PKT_INTEGRITY: int +RPC_C_AUTHN_LEVEL_PKT_PRIVACY: int +RPC_C_AUTHN_MQ: int +RPC_C_AUTHN_MSN: int +RPC_C_AUTHN_NONE: int +RPC_C_AUTHN_WINNT: int +RPC_C_AUTHZ_DCE: int +RPC_C_AUTHZ_DEFAULT: int +RPC_C_AUTHZ_NAME: int +RPC_C_AUTHZ_NONE: int +RPC_C_IMP_LEVEL_ANONYMOUS: int +RPC_C_IMP_LEVEL_DEFAULT: int +RPC_C_IMP_LEVEL_DELEGATE: int +RPC_C_IMP_LEVEL_IDENTIFY: int +RPC_C_IMP_LEVEL_IMPERSONATE: int +STDOLE2_LCID: int +STDOLE2_MAJORVERNUM: int +STDOLE2_MINORVERNUM: int +STDOLE_LCID: int +STDOLE_MAJORVERNUM: int +STDOLE_MINORVERNUM: int +STREAM_SEEK_CUR: int +STREAM_SEEK_END: int +STREAM_SEEK_SET: int +SYS_MAC: int +SYS_WIN16: int +SYS_WIN32: int +ServerInterfaces: dict[_win32typing.PyIID, bytes] +TKIND_ALIAS: int +TKIND_COCLASS: int +TKIND_DISPATCH: int +TKIND_ENUM: int +TKIND_INTERFACE: int +TKIND_MODULE: int +TKIND_RECORD: int +TKIND_UNION: int +TYMED_ENHMF: int +TYMED_FILE: int +TYMED_GDI: int +TYMED_HGLOBAL: int +TYMED_ISTORAGE: int +TYMED_ISTREAM: int +TYMED_MFPICT: int +TYMED_NULL: int +TYPEFLAG_FAGGREGATABLE: int +TYPEFLAG_FAPPOBJECT: int +TYPEFLAG_FCANCREATE: int +TYPEFLAG_FCONTROL: int +TYPEFLAG_FDISPATCHABLE: int +TYPEFLAG_FDUAL: int +TYPEFLAG_FHIDDEN: int +TYPEFLAG_FLICENSED: int +TYPEFLAG_FNONEXTENSIBLE: int +TYPEFLAG_FOLEAUTOMATION: int +TYPEFLAG_FPREDECLID: int +TYPEFLAG_FREPLACEABLE: int +TYPEFLAG_FRESTRICTED: int +TYPEFLAG_FREVERSEBIND: int +TypeIIDs: dict[_win32typing.PyIID, type] +URL_MK_LEGACY: int +URL_MK_UNIFORM: int +VARFLAG_FREADONLY: int +VAR_CONST: int +VAR_DISPATCH: int +VAR_PERINSTANCE: int +VAR_STATIC: int +VT_ARRAY: int +VT_BLOB: int +VT_BLOB_OBJECT: int +VT_BOOL: int +VT_BSTR: int +VT_BSTR_BLOB: int +VT_BYREF: int +VT_CARRAY: int +VT_CF: int +VT_CLSID: int +VT_CY: int +VT_DATE: int +VT_DECIMAL: int +VT_DISPATCH: int +VT_EMPTY: int +VT_ERROR: int +VT_FILETIME: int +VT_HRESULT: int +VT_I1: int +VT_I2: int +VT_I4: int +VT_I8: int +VT_ILLEGAL: int +VT_ILLEGALMASKED: int +VT_INT: int +VT_LPSTR: int +VT_LPWSTR: int +VT_NULL: int +VT_PTR: int +VT_R4: int +VT_R8: int +VT_RECORD: int +VT_RESERVED: int +VT_SAFEARRAY: int +VT_STORAGE: int +VT_STORED_OBJECT: int +VT_STREAM: int +VT_STREAMED_OBJECT: int +VT_TYPEMASK: int +VT_UI1: int +VT_UI2: int +VT_UI4: int +VT_UI8: int +VT_UINT: int +VT_UNKNOWN: int +VT_USERDEFINED: int +VT_VARIANT: int +VT_VECTOR: int +VT_VOID: int + +def connect(*args, **kwargs): ... # incomplete + +dcom: int +fdexNameCaseInsensitive: int +fdexNameCaseSensitive: int +fdexNameEnsure: int +fdexNameImplicit: int +fdexPropCanCall: int +fdexPropCanConstruct: int +fdexPropCanGet: int +fdexPropCanPut: int +fdexPropCanPutRef: int +fdexPropCanSourceEvents: int +fdexPropCannotCall: int +fdexPropCannotConstruct: int +fdexPropCannotGet: int +fdexPropCannotPut: int +fdexPropCannotPutRef: int +fdexPropCannotSourceEvents: int +fdexPropDynamicType: int +fdexPropNoSideEffects: int +frozen: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/pythonwin/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/pythonwin/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/pythonwin/dde.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/pythonwin/dde.pyi new file mode 100644 index 00000000..ac041d3c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/pythonwin/dde.pyi @@ -0,0 +1,33 @@ +# Can't generate with stubgen because: +# "ImportError: This must be an MFC application - try 'import win32ui' first" +APPCLASS_MONITOR: int +APPCLASS_STANDARD: int +APPCMD_CLIENTONLY: int +APPCMD_FILTERINITS: int +CBF_FAIL_ADVISES: int +CBF_FAIL_ALLSVRXACTIONS: int +CBF_FAIL_CONNECTIONS: int +CBF_FAIL_EXECUTES: int +CBF_FAIL_POKES: int +CBF_FAIL_REQUESTS: int +CBF_FAIL_SELFCONNECTIONS: int +CBF_SKIP_ALLNOTIFICATIONS: int +CBF_SKIP_CONNECT_CONFIRMS: int +CBF_SKIP_DISCONNECTS: int +CBF_SKIP_REGISTRATIONS: int + +def CreateConversation(*args, **kwargs): ... # incomplete +def CreateServer(*args, **kwargs): ... # incomplete +def CreateServerSystemTopic(*args, **kwargs): ... # incomplete +def CreateStringItem(*args, **kwargs): ... # incomplete +def CreateTopic(*args, **kwargs): ... # incomplete + +MF_CALLBACKS: int +MF_CONV: int +MF_ERRORS: int +MF_HSZ_INFO: int +MF_LINKS: int +MF_POSTMSGS: int +MF_SENDMSGS: int + +class error(Exception): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/pythonwin/win32ui.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/pythonwin/win32ui.pyi new file mode 100644 index 00000000..ab07c2db --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/pythonwin/win32ui.pyi @@ -0,0 +1,369 @@ +from _typeshed import Incomplete + +import _win32typing + +class error(Exception): ... + +def ComparePath(path1: str, path2: str): ... +def CreateMDIFrame() -> _win32typing.PyCMDIFrameWnd: ... +def CreateMDIChild() -> _win32typing.PyCMDIChildWnd: ... +def CreateBitmap() -> _win32typing.PyCBitmap: ... +def CreateBitmapFromHandle(): ... +def CreateBrush() -> _win32typing.PyCBrush: ... +def CreateButton() -> _win32typing.PyCButton: ... +def CreateColorDialog( + initColor: int = ..., flags: int = ..., parent: _win32typing.PyCWnd | None = ... +) -> _win32typing.PyCColorDialog: ... +def CreateControl( + classId: str, + windowName: str, + style, + rect: tuple[Incomplete, Incomplete, Incomplete, Incomplete], + parent: _win32typing.PyCWnd, + _id, + bStorage, + obPersist: Incomplete | None = ..., + licKey: str | None = ..., +) -> _win32typing.PyCWnd: ... +def CreateControlBar() -> _win32typing.PyCControlBar: ... +def CreateCtrlView(doc: _win32typing.PyCDocument, className: str, style: int = ...) -> _win32typing.PyCCtrlView: ... +def CreateDC() -> None: ... +def CreateDCFromHandle(hwnd: int) -> _win32typing.PyCDC: ... +def CreateDialog(idRes, dll: _win32typing.PyDLL | None = ...) -> _win32typing.PyCDialog: ... +def CreateDialogBar() -> _win32typing.PyCDialogBar: ... +def CreateDialogIndirect(oblist) -> _win32typing.PyCDialog: ... +def CreatePrintDialog( + idRes, bPrintSetupOnly, dwFlags, parent: _win32typing.PyCWnd | None = ..., dll: _win32typing.PyDLL | None = ... +) -> _win32typing.PyCPrintDialog: ... +def CreateDocTemplate(idRes) -> _win32typing.PyCDocTemplate: ... +def CreateEdit() -> _win32typing.PyCEdit: ... +def CreateFileDialog( + bFileOpen, + arg, + defExt: str | None = ..., + fileName: str | None = ..., + _filter: str | None = ..., + parent: _win32typing.PyCWnd | None = ..., +) -> _win32typing.PyCFileDialog: ... +def CreateFontDialog( + arg, font: Incomplete | None = ..., dcPrinter: _win32typing.PyCDC | None = ..., parent: _win32typing.PyCWnd | None = ... +) -> _win32typing.PyCFontDialog: ... +def CreateFormView(doc: _win32typing.PyCDocument, Template) -> _win32typing.PyCFormView: ... +def CreateFrame(): ... +def CreateTreeCtrl() -> _win32typing.PyCTreeCtrl: ... +def CreateTreeView(doc: _win32typing.PyCDocument) -> _win32typing.PyCTreeView: ... +def CreatePalette(lp): ... +def CreatePopupMenu() -> _win32typing.PyCMenu: ... +def CreateMenu() -> _win32typing.PyCMenu: ... +def CreatePen(style, width, color): ... +def CreateProgressCtrl() -> _win32typing.PyCProgressCtrl: ... +def CreatePropertyPage(resource: _win32typing.PyResourceId, caption: int = ...) -> _win32typing.PyCPropertyPage: ... +def CreatePropertyPageIndirect(resourcelist: _win32typing.PyDialogTemplate, caption=...) -> _win32typing.PyCPropertyPage: ... +def CreatePropertySheet( + caption: _win32typing.PyResourceId, parent: _win32typing.PyCWnd | None = ..., select=... +) -> _win32typing.PyCPropertySheet: ... +def CreateRgn() -> _win32typing.PyCRgn: ... +def CreateRichEditCtrl() -> _win32typing.PyCRichEditCtrl: ... +def CreateRichEditDocTemplate(idRes) -> _win32typing.PyCRichEditDocTemplate: ... +def CreateRichEditView(doc: _win32typing.PyCDocument | None = ...) -> _win32typing.PyCRichEditView: ... +def CreateSliderCtrl() -> _win32typing.PyCSliderCtrl: ... +def CreateSplitter() -> _win32typing.PyCSplitterWnd: ... +def CreateStatusBar(parent: _win32typing.PyCWnd, arg, arg1, ctrlStype=...) -> _win32typing.PyCStatusBar: ... +def CreateStatusBarCtrl() -> _win32typing.PyCStatusBarCtrl: ... +def CreateFont(properties) -> _win32typing.PyCFont: ... +def CreateToolBar(parent: _win32typing.PyCWnd, style, arg) -> _win32typing.PyCToolBar: ... +def CreateToolBarCtrl() -> _win32typing.PyCToolBarCtrl: ... +def CreateToolTipCtrl() -> _win32typing.PyCToolTipCtrl: ... +def CreateThread() -> _win32typing.PyCWinThread: ... +def CreateView(doc: _win32typing.PyCDocument) -> _win32typing.PyCScrollView: ... +def CreateEditView(doc: _win32typing.PyCDocument) -> _win32typing.PyCEditView: ... +def CreateDebuggerThread() -> None: ... +def CreateWindowFromHandle(hwnd: int) -> _win32typing.PyCWnd: ... +def CreateWnd() -> _win32typing.PyCWnd: ... +def DestroyDebuggerThread() -> None: ... +def DoWaitCursor(code) -> None: ... +def DisplayTraceback() -> None: ... +def Enable3dControls(): ... +def FindWindow(className: str, windowName: str) -> _win32typing.PyCWnd: ... +def FindWindowEx( + parentWindow: _win32typing.PyCWnd, childAfter: _win32typing.PyCWnd, className: str, windowName: str +) -> _win32typing.PyCWnd: ... +def FullPath(path: str) -> str: ... +def GetActiveWindow() -> _win32typing.PyCWnd: ... +def GetApp() -> _win32typing.PyCWinApp: ... +def GetAppName(): ... +def GetAppRegistryKey() -> None: ... +def GetBytes(address, size) -> str: ... +def GetCommandLine() -> str: ... +def GetDeviceCaps(hdc, index): ... +def GetFileTitle(fileName: str) -> str: ... +def GetFocus() -> _win32typing.PyCWnd: ... +def GetForegroundWindow() -> _win32typing.PyCWnd: ... +def GetHalftoneBrush() -> _win32typing.PyCBrush: ... +def GetInitialStateRequest(): ... +def GetMainFrame() -> _win32typing.PyCWnd: ... +def GetName() -> str: ... +def GetProfileFileName() -> str: ... +def GetProfileVal(section: str, entry: str, defValue: str) -> str: ... +def GetResource() -> _win32typing.PyDLL: ... +def GetThread() -> _win32typing.PyCWinApp: ... +def GetType(): ... +def InitRichEdit() -> str: ... +def InstallCallbackCaller(): ... +def IsDebug() -> int: ... +def IsWin32s() -> int: ... +def IsObject(__o: object) -> bool: ... +def LoadDialogResource(idRes, dll: _win32typing.PyDLL | None = ...): ... +def LoadLibrary(fileName: str) -> _win32typing.PyDLL: ... +def LoadMenu(_id, dll: _win32typing.PyDLL | None = ...) -> _win32typing.PyCMenu: ... +def LoadStdProfileSettings(maxFiles) -> None: ... +def LoadString(stringId) -> str: ... +def MessageBox(message: str, arg, title: str | None = ...): ... +def OutputDebugString(msg: str) -> None: ... +def EnableControlContainer(): ... +def PrintTraceback(tb, output) -> None: ... +def PumpWaitingMessages(__firstMessage: int = ..., __lastMessage: int = ...) -> int: ... +def RegisterWndClass(style, hCursor: int = ..., hBrush: int = ..., hIcon=...) -> str: ... +def RemoveRecentFile(index: int = ...) -> None: ... +def SetAppHelpPath(): ... +def SetAppName(appName: str): ... +def SetCurrentInstanceHandle(newVal): ... +def SetCurrentResourceHandle(newVal): ... +def SetDialogBkColor(arg, arg1): ... +def SetProfileFileName(filename: str) -> None: ... +def SetRegistryKey(key: str) -> None: ... +def SetResource(dll) -> _win32typing.PyDLL: ... +def SetStatusText(msg: str, bForce: int = ...) -> None: ... +def StartDebuggerPump() -> None: ... +def StopDebuggerPump() -> None: ... +def TranslateMessage(): ... +def TranslateVirtualKey(vk) -> str: ... +def WinHelp(arg, data: str) -> None: ... +def WriteProfileVal(section: str, entry: str, value: str) -> None: ... +def AddToRecentFileList(*args, **kwargs): ... # incomplete +def CreateImageList(*args, **kwargs): ... # incomplete +def CreateListCtrl(*args, **kwargs): ... # incomplete +def CreateListView(*args, **kwargs): ... # incomplete +def CreateRectRgn(*args, **kwargs): ... # incomplete +def GetRecentFileList(*args, **kwargs): ... # incomplete +def OutputDebug(*args, **kwargs): ... # incomplete + +AFX_IDW_PANE_FIRST: int +AFX_IDW_PANE_LAST: int +AFX_WS_DEFAULT_VIEW: int +CDocTemplate_Confidence_maybeAttemptForeign: int +CDocTemplate_Confidence_maybeAttemptNative: int +CDocTemplate_Confidence_noAttempt: int +CDocTemplate_Confidence_yesAlreadyOpen: int +CDocTemplate_Confidence_yesAttemptForeign: int +CDocTemplate_Confidence_yesAttemptNative: int +CDocTemplate_docName: int +CDocTemplate_fileNewName: int +CDocTemplate_filterExt: int +CDocTemplate_filterName: int +CDocTemplate_regFileTypeId: int +CDocTemplate_regFileTypeName: int +CDocTemplate_windowTitle: int +CRichEditView_WrapNone: int +CRichEditView_WrapToTargetDevice: int +CRichEditView_WrapToWindow: int +debug: int +FWS_ADDTOTITLE: int +FWS_PREFIXTITLE: int +FWS_SNAPTOBARS: int +ID_APP_ABOUT: int +ID_APP_EXIT: int +ID_EDIT_CLEAR: int +ID_EDIT_CLEAR_ALL: int +ID_EDIT_COPY: int +ID_EDIT_CUT: int +ID_EDIT_FIND: int +ID_EDIT_GOTO_LINE: int +ID_EDIT_PASTE: int +ID_EDIT_REDO: int +ID_EDIT_REPEAT: int +ID_EDIT_REPLACE: int +ID_EDIT_SELECT_ALL: int +ID_EDIT_SELECT_BLOCK: int +ID_EDIT_UNDO: int +ID_FILE_CHECK: int +ID_FILE_CLOSE: int +ID_FILE_IMPORT: int +ID_FILE_LOCATE: int +ID_FILE_MRU_FILE1: int +ID_FILE_MRU_FILE2: int +ID_FILE_MRU_FILE3: int +ID_FILE_MRU_FILE4: int +ID_FILE_NEW: int +ID_FILE_OPEN: int +ID_FILE_PAGE_SETUP: int +ID_FILE_PRINT: int +ID_FILE_PRINT_PREVIEW: int +ID_FILE_PRINT_SETUP: int +ID_FILE_RUN: int +ID_FILE_SAVE: int +ID_FILE_SAVE_ALL: int +ID_FILE_SAVE_AS: int +ID_HELP_GUI_REF: int +ID_HELP_OTHER: int +ID_HELP_PYTHON: int +ID_INDICATOR_COLNUM: int +ID_INDICATOR_LINENUM: int +ID_NEXT_PANE: int +ID_PREV_PANE: int +ID_SEPARATOR: int +ID_VIEW_BROWSE: int +ID_VIEW_EOL: int +ID_VIEW_FIXED_FONT: int +ID_VIEW_FOLD_COLLAPSE: int +ID_VIEW_FOLD_COLLAPSE_ALL: int +ID_VIEW_FOLD_EXPAND: int +ID_VIEW_FOLD_EXPAND_ALL: int +ID_VIEW_INDENTATIONGUIDES: int +ID_VIEW_INTERACTIVE: int +ID_VIEW_OPTIONS: int +ID_VIEW_RIGHT_EDGE: int +ID_VIEW_STATUS_BAR: int +ID_VIEW_TOOLBAR: int +ID_VIEW_TOOLBAR_DBG: int +ID_VIEW_WHITESPACE: int +ID_WINDOW_ARRANGE: int +ID_WINDOW_CASCADE: int +ID_WINDOW_NEW: int +ID_WINDOW_SPLIT: int +ID_WINDOW_TILE_HORZ: int +ID_WINDOW_TILE_VERT: int +IDB_BROWSER_HIER: int +IDB_DEBUGGER_HIER: int +IDB_HIERFOLDERS: int +IDC_ABOUT_VERSION: int +IDC_AUTO_RELOAD: int +IDC_AUTOCOMPLETE: int +IDC_BUTTON1: int +IDC_BUTTON2: int +IDC_BUTTON3: int +IDC_BUTTON4: int +IDC_CALLTIPS: int +IDC_CHECK1: int +IDC_CHECK2: int +IDC_CHECK3: int +IDC_COMBO1: int +IDC_COMBO2: int +IDC_EDIT1: int +IDC_EDIT2: int +IDC_EDIT3: int +IDC_EDIT4: int +IDC_EDIT_TABS: int +IDC_INDENT_SIZE: int +IDC_KEYBOARD_CONFIG: int +IDC_PROMPT1: int +IDC_PROMPT2: int +IDC_PROMPT3: int +IDC_PROMPT4: int +IDC_PROMPT_TABS: int +IDC_RADIO1: int +IDC_RADIO2: int +IDC_RIGHTEDGE_COLUMN: int +IDC_RIGHTEDGE_DEFINE: int +IDC_RIGHTEDGE_ENABLE: int +IDC_RIGHTEDGE_SAMPLE: int +IDC_SPIN1: int +IDC_SPIN2: int +IDC_SPIN3: int +IDC_TAB_SIZE: int +IDC_USE_SMART_TABS: int +IDC_USE_TABS: int +IDC_VIEW_WHITESPACE: int +IDC_VSS_INTEGRATE: int +IDD_ABOUTBOX: int +IDD_DUMMYPROPPAGE: int +IDD_GENERAL_STATUS: int +IDD_LARGE_EDIT: int +IDD_PP_DEBUGGER: int +IDD_PP_EDITOR: int +IDD_PP_FORMAT: int +IDD_PP_IDE: int +IDD_PP_TABS: int +IDD_PP_TOOLMENU: int +IDD_PROPDEMO1: int +IDD_PROPDEMO2: int +IDD_RUN_SCRIPT: int +IDD_SET_TABSTOPS: int +IDD_SIMPLE_INPUT: int +IDD_TREE: int +IDD_TREE_MB: int +IDR_CNTR_INPLACE: int +IDR_DEBUGGER: int +IDR_MAINFRAME: int +IDR_PYTHONCONTYPE: int +IDR_PYTHONTYPE: int +IDR_PYTHONTYPE_CNTR_IP: int +IDR_TEXTTYPE: int +LM_COMMIT: int +LM_HORZ: int +LM_HORZDOCK: int +LM_LENGTHY: int +LM_MRUWIDTH: int +LM_STRETCH: int +LM_VERTDOCK: int +MFS_4THICKFRAME: int +MFS_BLOCKSYSMENU: int +MFS_MOVEFRAME: int +MFS_SYNCACTIVE: int +MFS_THICKFRAME: int +PD_ALLPAGES: int +PD_COLLATE: int +PD_DISABLEPRINTTOFILE: int +PD_ENABLEPRINTHOOK: int +PD_ENABLEPRINTTEMPLATE: int +PD_ENABLEPRINTTEMPLATEHANDLE: int +PD_ENABLESETUPHOOK: int +PD_ENABLESETUPTEMPLATE: int +PD_ENABLESETUPTEMPLATEHANDLE: int +PD_HIDEPRINTTOFILE: int +PD_NONETWORKBUTTON: int +PD_NOPAGENUMS: int +PD_NOSELECTION: int +PD_NOWARNING: int +PD_PAGENUMS: int +PD_PRINTSETUP: int +PD_PRINTTOFILE: int +PD_RETURNDC: int +PD_RETURNDEFAULT: int +PD_RETURNIC: int +PD_SELECTION: int +PD_SHOWHELP: int +PD_USEDEVMODECOPIES: int +PD_USEDEVMODECOPIESANDCOLLATE: int +PSWIZB_BACK: int +PSWIZB_DISABLEDFINISH: int +PSWIZB_FINISH: int +PSWIZB_NEXT: int +IDC_DBG_ADD: int +IDC_DBG_BREAKPOINTS: int +IDC_DBG_CLEAR: int +IDC_DBG_CLOSE: int +IDC_DBG_GO: int +IDC_DBG_STACK: int +IDC_DBG_STEP: int +IDC_DBG_STEPOUT: int +IDC_DBG_STEPOVER: int +IDC_DBG_WATCH: int +IDC_EDITOR_COLOR: int +IDC_FOLD_ENABLE: int +IDC_FOLD_ON_OPEN: int +IDC_FOLD_SHOW_LINES: int +IDC_LIST1: int +IDC_MARGIN_FOLD: int +IDC_MARGIN_LINENUMBER: int +IDC_MARGIN_MARKER: int +IDC_TABTIMMY_BG: int +IDC_TABTIMMY_IND: int +IDC_TABTIMMY_NONE: int +IDC_VIEW_EOL: int +IDC_VIEW_INDENTATIONGUIDES: int +ID_VIEW_FOLD_TOPLEVEL: int +UNICODE: int +copyright: str +dllhandle: int +types: dict[str, type] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/pythonwin/win32uiole.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/pythonwin/win32uiole.pyi new file mode 100644 index 00000000..5d30d75e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/pythonwin/win32uiole.pyi @@ -0,0 +1,25 @@ +import _win32typing + +def AfxOleInit(enabled) -> None: ... +def CreateInsertDialog() -> _win32typing.PyCOleInsertDialog: ... +def CreateOleClientItem() -> _win32typing.PyCOleClientItem: ... +def CreateOleDocument(template: _win32typing.PyCDocTemplate, fileName: str | None = ...) -> _win32typing.PyCOleDocument: ... +def DaoGetEngine() -> _win32typing.PyIDispatch: ... +def GetIDispatchForWindow() -> _win32typing.PyIDispatch: ... +def OleGetUserCtrl(): ... +def OleSetUserCtrl(bUserCtrl): ... +def SetMessagePendingDelay(delay) -> None: ... +def EnableNotRespondingDialog(enabled) -> None: ... +def EnableBusyDialog(*args, **kwargs): ... # incomplete + +COleClientItem_activeState: int +COleClientItem_activeUIState: int +COleClientItem_emptyState: int +COleClientItem_loadedState: int +COleClientItem_openState: int +OLE_CHANGED: int +OLE_CHANGED_ASPECT: int +OLE_CHANGED_STATE: int +OLE_CLOSED: int +OLE_RENAMED: int +OLE_SAVED: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/pywintypes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/pywintypes.pyi new file mode 100644 index 00000000..64c9f845 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/pywintypes.pyi @@ -0,0 +1 @@ +from win32.lib.pywintypes import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/regutil.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/regutil.pyi new file mode 100644 index 00000000..ef4d7a48 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/regutil.pyi @@ -0,0 +1 @@ +from win32.lib.regutil import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/servicemanager.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/servicemanager.pyi new file mode 100644 index 00000000..91dbd289 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/servicemanager.pyi @@ -0,0 +1 @@ +from win32.servicemanager import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/sspicon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/sspicon.pyi new file mode 100644 index 00000000..0618e191 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/sspicon.pyi @@ -0,0 +1 @@ +from win32.lib.sspicon import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/timer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/timer.pyi new file mode 100644 index 00000000..d3d28016 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/timer.pyi @@ -0,0 +1 @@ +from win32.timer import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win2kras.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win2kras.pyi new file mode 100644 index 00000000..e9b12664 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win2kras.pyi @@ -0,0 +1 @@ +from win32.lib.win2kras import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/_wincerapi.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/_wincerapi.pyi new file mode 100644 index 00000000..a8834cc8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/_wincerapi.pyi @@ -0,0 +1,71 @@ +from _typeshed import Incomplete + +import _win32typing + +def CeRapiInit() -> None: ... +def CeRapiUninit() -> None: ... +def CreateProcess( + __appName: str | None, + __commandLine: str, + __processAttributes: _win32typing.PySECURITY_ATTRIBUTES | None, + __threadAttributes: _win32typing.PySECURITY_ATTRIBUTES | None, + __bInheritHandles: int | bool, + __dwCreationFlags: int, + __newEnvironment: dict[str, str] | None, + __currentDirectory: str | None, + __startupinfo: _win32typing.PySTARTUPINFO, +) -> tuple[int, int, Incomplete, Incomplete]: ... +def CeRapiInitEx(): ... +def CeCopyFile(_from: str, to: str, bFailIfExists) -> None: ... +def CeCheckPassword(password: str) -> None: ... +def CeCreateFile( + fileName: str, + desiredAccess, + shareMode, + attributes: _win32typing.PySECURITY_ATTRIBUTES, + creationDisposition, + flagsAndAttributes, + hTemplateFile: int, +) -> _win32typing.PyCEHANDLE: ... +def CeDeleteFile(fileName: str) -> None: ... +def CeMoveFile(existingFileName: str, newFileName: str) -> None: ... +def CeCreateDirectory(name: str, sa: _win32typing.PySECURITY_ATTRIBUTES) -> None: ... +def CeRemoveDirectory(lpPathName: str) -> None: ... +def CeGetTempPath() -> str: ... +def CeGetSystemInfo(): ... +def CeGetDesktopDeviceCaps(): ... +def CeGetSystemMetrics(): ... +def CeGetSpecialFolderPath() -> str: ... +def CeGetStoreInformation() -> tuple[Incomplete, Incomplete]: ... +def CeGetSystemPowerStatusEx(): ... +def CeSHCreateShortcut() -> None: ... +def CeSHGetShortcutTarget(): ... +def CeGetVersionEx() -> tuple[Incomplete, Incomplete, Incomplete, Incomplete, str]: ... +def CeGlobalMemoryStatus(): ... +def FindFiles(fileSpec: str): ... +def CeGetFileAttributes(fileName: str): ... +def CeSetFileAttributes(filename: str, newAttributes) -> None: ... +def CeGetFileSize(): ... +def CeReadFile(hFile: int, bufSize) -> str: ... +def WriteFile(__hFile: int, __data: str | bytes | _win32typing.PyOVERLAPPEDReadBuffer) -> tuple[int, int]: ... + +CSIDL_BITBUCKET = ... +CSIDL_COMMON_DESKTOPDIRECTORY = ... +CSIDL_COMMON_PROGRAMS = ... +CSIDL_COMMON_STARTMENU = ... +CSIDL_COMMON_STARTUP = ... +CSIDL_CONTROLS = ... +CSIDL_DESKTOP = ... +CSIDL_DESKTOPDIRECTORY = ... +CSIDL_DRIVES = ... +CSIDL_FONTS = ... +CSIDL_NETHOOD = ... +CSIDL_NETWORK = ... +CSIDL_PERSONAL = ... +CSIDL_PRINTERS = ... +CSIDL_PROGRAMS = ... +CSIDL_RECENT = ... +CSIDL_SENDTO = ... +CSIDL_STARTMENU = ... +CSIDL_STARTUP = ... +CSIDL_TEMPLATES = ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/afxres.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/afxres.pyi new file mode 100644 index 00000000..58a20869 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/afxres.pyi @@ -0,0 +1,499 @@ +from _typeshed import Incomplete + +TCS_MULTILINE: int +CBRS_ALIGN_LEFT: int +CBRS_ALIGN_TOP: int +CBRS_ALIGN_RIGHT: int +CBRS_ALIGN_BOTTOM: int +CBRS_ALIGN_ANY: int +CBRS_BORDER_LEFT: int +CBRS_BORDER_TOP: int +CBRS_BORDER_RIGHT: int +CBRS_BORDER_BOTTOM: int +CBRS_BORDER_ANY: int +CBRS_TOOLTIPS: int +CBRS_FLYBY: int +CBRS_FLOAT_MULTI: int +CBRS_BORDER_3D: int +CBRS_HIDE_INPLACE: int +CBRS_SIZE_DYNAMIC: int +CBRS_SIZE_FIXED: int +CBRS_FLOATING: int +CBRS_GRIPPER: int +CBRS_ORIENT_HORZ: Incomplete +CBRS_ORIENT_VERT: Incomplete +CBRS_ORIENT_ANY: Incomplete +CBRS_ALL: int +CBRS_NOALIGN: int +CBRS_LEFT: Incomplete +CBRS_TOP: Incomplete +CBRS_RIGHT: Incomplete +CBRS_BOTTOM: Incomplete +SBPS_NORMAL: int +SBPS_NOBORDERS: int +SBPS_POPOUT: int +SBPS_OWNERDRAW: int +SBPS_DISABLED: int +SBPS_STRETCH: int +ID_INDICATOR_EXT: int +ID_INDICATOR_CAPS: int +ID_INDICATOR_NUM: int +ID_INDICATOR_SCRL: int +ID_INDICATOR_OVR: int +ID_INDICATOR_REC: int +ID_INDICATOR_KANA: int +ID_SEPARATOR: int +AFX_IDW_CONTROLBAR_FIRST: int +AFX_IDW_CONTROLBAR_LAST: int +AFX_IDW_TOOLBAR: int +AFX_IDW_STATUS_BAR: int +AFX_IDW_PREVIEW_BAR: int +AFX_IDW_RESIZE_BAR: int +AFX_IDW_DOCKBAR_TOP: int +AFX_IDW_DOCKBAR_LEFT: int +AFX_IDW_DOCKBAR_RIGHT: int +AFX_IDW_DOCKBAR_BOTTOM: int +AFX_IDW_DOCKBAR_FLOAT: int + +def AFX_CONTROLBAR_MASK(nIDC): ... + +AFX_IDW_PANE_FIRST: int +AFX_IDW_PANE_LAST: int +AFX_IDW_HSCROLL_FIRST: int +AFX_IDW_VSCROLL_FIRST: int +AFX_IDW_SIZE_BOX: int +AFX_IDW_PANE_SAVE: int +AFX_IDS_APP_TITLE: int +AFX_IDS_IDLEMESSAGE: int +AFX_IDS_HELPMODEMESSAGE: int +AFX_IDS_APP_TITLE_EMBEDDING: int +AFX_IDS_COMPANY_NAME: int +AFX_IDS_OBJ_TITLE_INPLACE: int +ID_FILE_NEW: int +ID_FILE_OPEN: int +ID_FILE_CLOSE: int +ID_FILE_SAVE: int +ID_FILE_SAVE_AS: int +ID_FILE_PAGE_SETUP: int +ID_FILE_PRINT_SETUP: int +ID_FILE_PRINT: int +ID_FILE_PRINT_DIRECT: int +ID_FILE_PRINT_PREVIEW: int +ID_FILE_UPDATE: int +ID_FILE_SAVE_COPY_AS: int +ID_FILE_SEND_MAIL: int +ID_FILE_MRU_FIRST: int +ID_FILE_MRU_FILE1: int +ID_FILE_MRU_FILE2: int +ID_FILE_MRU_FILE3: int +ID_FILE_MRU_FILE4: int +ID_FILE_MRU_FILE5: int +ID_FILE_MRU_FILE6: int +ID_FILE_MRU_FILE7: int +ID_FILE_MRU_FILE8: int +ID_FILE_MRU_FILE9: int +ID_FILE_MRU_FILE10: int +ID_FILE_MRU_FILE11: int +ID_FILE_MRU_FILE12: int +ID_FILE_MRU_FILE13: int +ID_FILE_MRU_FILE14: int +ID_FILE_MRU_FILE15: int +ID_FILE_MRU_FILE16: int +ID_FILE_MRU_LAST: int +ID_EDIT_CLEAR: int +ID_EDIT_CLEAR_ALL: int +ID_EDIT_COPY: int +ID_EDIT_CUT: int +ID_EDIT_FIND: int +ID_EDIT_PASTE: int +ID_EDIT_PASTE_LINK: int +ID_EDIT_PASTE_SPECIAL: int +ID_EDIT_REPEAT: int +ID_EDIT_REPLACE: int +ID_EDIT_SELECT_ALL: int +ID_EDIT_UNDO: int +ID_EDIT_REDO: int +ID_WINDOW_NEW: int +ID_WINDOW_ARRANGE: int +ID_WINDOW_CASCADE: int +ID_WINDOW_TILE_HORZ: int +ID_WINDOW_TILE_VERT: int +ID_WINDOW_SPLIT: int +AFX_IDM_WINDOW_FIRST: int +AFX_IDM_WINDOW_LAST: int +AFX_IDM_FIRST_MDICHILD: int +ID_APP_ABOUT: int +ID_APP_EXIT: int +ID_HELP_INDEX: int +ID_HELP_FINDER: int +ID_HELP_USING: int +ID_CONTEXT_HELP: int +ID_HELP: int +ID_DEFAULT_HELP: int +ID_NEXT_PANE: int +ID_PREV_PANE: int +ID_FORMAT_FONT: int +ID_OLE_INSERT_NEW: int +ID_OLE_EDIT_LINKS: int +ID_OLE_EDIT_CONVERT: int +ID_OLE_EDIT_CHANGE_ICON: int +ID_OLE_EDIT_PROPERTIES: int +ID_OLE_VERB_FIRST: int +ID_OLE_VERB_LAST: int +AFX_ID_PREVIEW_CLOSE: int +AFX_ID_PREVIEW_NUMPAGE: int +AFX_ID_PREVIEW_NEXT: int +AFX_ID_PREVIEW_PREV: int +AFX_ID_PREVIEW_PRINT: int +AFX_ID_PREVIEW_ZOOMIN: int +AFX_ID_PREVIEW_ZOOMOUT: int +ID_VIEW_TOOLBAR: int +ID_VIEW_STATUS_BAR: int +ID_RECORD_FIRST: int +ID_RECORD_LAST: int +ID_RECORD_NEXT: int +ID_RECORD_PREV: int +IDC_STATIC: int +AFX_IDS_SCFIRST: int +AFX_IDS_SCSIZE: int +AFX_IDS_SCMOVE: int +AFX_IDS_SCMINIMIZE: int +AFX_IDS_SCMAXIMIZE: int +AFX_IDS_SCNEXTWINDOW: int +AFX_IDS_SCPREVWINDOW: int +AFX_IDS_SCCLOSE: int +AFX_IDS_SCRESTORE: int +AFX_IDS_SCTASKLIST: int +AFX_IDS_MDICHILD: int +AFX_IDS_DESKACCESSORY: int +AFX_IDS_OPENFILE: int +AFX_IDS_SAVEFILE: int +AFX_IDS_ALLFILTER: int +AFX_IDS_UNTITLED: int +AFX_IDS_SAVEFILECOPY: int +AFX_IDS_PREVIEW_CLOSE: int +AFX_IDS_UNNAMED_FILE: int +AFX_IDS_ABOUT: int +AFX_IDS_HIDE: int +AFX_IDP_NO_ERROR_AVAILABLE: int +AFX_IDS_NOT_SUPPORTED_EXCEPTION: int +AFX_IDS_RESOURCE_EXCEPTION: int +AFX_IDS_MEMORY_EXCEPTION: int +AFX_IDS_USER_EXCEPTION: int +AFX_IDS_PRINTONPORT: int +AFX_IDS_ONEPAGE: int +AFX_IDS_TWOPAGE: int +AFX_IDS_PRINTPAGENUM: int +AFX_IDS_PREVIEWPAGEDESC: int +AFX_IDS_PRINTDEFAULTEXT: int +AFX_IDS_PRINTDEFAULT: int +AFX_IDS_PRINTFILTER: int +AFX_IDS_PRINTCAPTION: int +AFX_IDS_PRINTTOFILE: int +AFX_IDS_OBJECT_MENUITEM: int +AFX_IDS_EDIT_VERB: int +AFX_IDS_ACTIVATE_VERB: int +AFX_IDS_CHANGE_LINK: int +AFX_IDS_AUTO: int +AFX_IDS_MANUAL: int +AFX_IDS_FROZEN: int +AFX_IDS_ALL_FILES: int +AFX_IDS_SAVE_MENU: int +AFX_IDS_UPDATE_MENU: int +AFX_IDS_SAVE_AS_MENU: int +AFX_IDS_SAVE_COPY_AS_MENU: int +AFX_IDS_EXIT_MENU: int +AFX_IDS_UPDATING_ITEMS: int +AFX_IDS_METAFILE_FORMAT: int +AFX_IDS_DIB_FORMAT: int +AFX_IDS_BITMAP_FORMAT: int +AFX_IDS_LINKSOURCE_FORMAT: int +AFX_IDS_EMBED_FORMAT: int +AFX_IDS_PASTELINKEDTYPE: int +AFX_IDS_UNKNOWNTYPE: int +AFX_IDS_RTF_FORMAT: int +AFX_IDS_TEXT_FORMAT: int +AFX_IDS_INVALID_CURRENCY: int +AFX_IDS_INVALID_DATETIME: int +AFX_IDS_INVALID_DATETIMESPAN: int +AFX_IDP_INVALID_FILENAME: int +AFX_IDP_FAILED_TO_OPEN_DOC: int +AFX_IDP_FAILED_TO_SAVE_DOC: int +AFX_IDP_ASK_TO_SAVE: int +AFX_IDP_FAILED_TO_CREATE_DOC: int +AFX_IDP_FILE_TOO_LARGE: int +AFX_IDP_FAILED_TO_START_PRINT: int +AFX_IDP_FAILED_TO_LAUNCH_HELP: int +AFX_IDP_INTERNAL_FAILURE: int +AFX_IDP_COMMAND_FAILURE: int +AFX_IDP_FAILED_MEMORY_ALLOC: int +AFX_IDP_PARSE_INT: int +AFX_IDP_PARSE_REAL: int +AFX_IDP_PARSE_INT_RANGE: int +AFX_IDP_PARSE_REAL_RANGE: int +AFX_IDP_PARSE_STRING_SIZE: int +AFX_IDP_PARSE_RADIO_BUTTON: int +AFX_IDP_PARSE_BYTE: int +AFX_IDP_PARSE_UINT: int +AFX_IDP_PARSE_DATETIME: int +AFX_IDP_PARSE_CURRENCY: int +AFX_IDP_FAILED_INVALID_FORMAT: int +AFX_IDP_FAILED_INVALID_PATH: int +AFX_IDP_FAILED_DISK_FULL: int +AFX_IDP_FAILED_ACCESS_READ: int +AFX_IDP_FAILED_ACCESS_WRITE: int +AFX_IDP_FAILED_IO_ERROR_READ: int +AFX_IDP_FAILED_IO_ERROR_WRITE: int +AFX_IDP_STATIC_OBJECT: int +AFX_IDP_FAILED_TO_CONNECT: int +AFX_IDP_SERVER_BUSY: int +AFX_IDP_BAD_VERB: int +AFX_IDP_FAILED_TO_NOTIFY: int +AFX_IDP_FAILED_TO_LAUNCH: int +AFX_IDP_ASK_TO_UPDATE: int +AFX_IDP_FAILED_TO_UPDATE: int +AFX_IDP_FAILED_TO_REGISTER: int +AFX_IDP_FAILED_TO_AUTO_REGISTER: int +AFX_IDP_FAILED_TO_CONVERT: int +AFX_IDP_GET_NOT_SUPPORTED: int +AFX_IDP_SET_NOT_SUPPORTED: int +AFX_IDP_ASK_TO_DISCARD: int +AFX_IDP_FAILED_TO_CREATE: int +AFX_IDP_FAILED_MAPI_LOAD: int +AFX_IDP_INVALID_MAPI_DLL: int +AFX_IDP_FAILED_MAPI_SEND: int +AFX_IDP_FILE_NONE: int +AFX_IDP_FILE_GENERIC: int +AFX_IDP_FILE_NOT_FOUND: int +AFX_IDP_FILE_BAD_PATH: int +AFX_IDP_FILE_TOO_MANY_OPEN: int +AFX_IDP_FILE_ACCESS_DENIED: int +AFX_IDP_FILE_INVALID_FILE: int +AFX_IDP_FILE_REMOVE_CURRENT: int +AFX_IDP_FILE_DIR_FULL: int +AFX_IDP_FILE_BAD_SEEK: int +AFX_IDP_FILE_HARD_IO: int +AFX_IDP_FILE_SHARING: int +AFX_IDP_FILE_LOCKING: int +AFX_IDP_FILE_DISKFULL: int +AFX_IDP_FILE_EOF: int +AFX_IDP_ARCH_NONE: int +AFX_IDP_ARCH_GENERIC: int +AFX_IDP_ARCH_READONLY: int +AFX_IDP_ARCH_ENDOFFILE: int +AFX_IDP_ARCH_WRITEONLY: int +AFX_IDP_ARCH_BADINDEX: int +AFX_IDP_ARCH_BADCLASS: int +AFX_IDP_ARCH_BADSCHEMA: int +AFX_IDS_OCC_SCALEUNITS_PIXELS: int +AFX_IDS_STATUS_FONT: int +AFX_IDS_TOOLTIP_FONT: int +AFX_IDS_UNICODE_FONT: int +AFX_IDS_MINI_FONT: int +AFX_IDP_SQL_FIRST: int +AFX_IDP_SQL_CONNECT_FAIL: int +AFX_IDP_SQL_RECORDSET_FORWARD_ONLY: int +AFX_IDP_SQL_EMPTY_COLUMN_LIST: int +AFX_IDP_SQL_FIELD_SCHEMA_MISMATCH: int +AFX_IDP_SQL_ILLEGAL_MODE: int +AFX_IDP_SQL_MULTIPLE_ROWS_AFFECTED: int +AFX_IDP_SQL_NO_CURRENT_RECORD: int +AFX_IDP_SQL_NO_ROWS_AFFECTED: int +AFX_IDP_SQL_RECORDSET_READONLY: int +AFX_IDP_SQL_SQL_NO_TOTAL: int +AFX_IDP_SQL_ODBC_LOAD_FAILED: int +AFX_IDP_SQL_DYNASET_NOT_SUPPORTED: int +AFX_IDP_SQL_SNAPSHOT_NOT_SUPPORTED: int +AFX_IDP_SQL_API_CONFORMANCE: int +AFX_IDP_SQL_SQL_CONFORMANCE: int +AFX_IDP_SQL_NO_DATA_FOUND: int +AFX_IDP_SQL_ROW_UPDATE_NOT_SUPPORTED: int +AFX_IDP_SQL_ODBC_V2_REQUIRED: int +AFX_IDP_SQL_NO_POSITIONED_UPDATES: int +AFX_IDP_SQL_LOCK_MODE_NOT_SUPPORTED: int +AFX_IDP_SQL_DATA_TRUNCATED: int +AFX_IDP_SQL_ROW_FETCH: int +AFX_IDP_SQL_INCORRECT_ODBC: int +AFX_IDP_SQL_UPDATE_DELETE_FAILED: int +AFX_IDP_SQL_DYNAMIC_CURSOR_NOT_SUPPORTED: int +AFX_IDP_DAO_FIRST: int +AFX_IDP_DAO_ENGINE_INITIALIZATION: int +AFX_IDP_DAO_DFX_BIND: int +AFX_IDP_DAO_OBJECT_NOT_OPEN: int +AFX_IDP_DAO_ROWTOOSHORT: int +AFX_IDP_DAO_BADBINDINFO: int +AFX_IDP_DAO_COLUMNUNAVAILABLE: int +AFX_IDC_LISTBOX: int +AFX_IDC_CHANGE: int +AFX_IDC_PRINT_DOCNAME: int +AFX_IDC_PRINT_PRINTERNAME: int +AFX_IDC_PRINT_PORTNAME: int +AFX_IDC_PRINT_PAGENUM: int +ID_APPLY_NOW: int +ID_WIZBACK: int +ID_WIZNEXT: int +ID_WIZFINISH: int +AFX_IDC_TAB_CONTROL: int +AFX_IDD_FILEOPEN: int +AFX_IDD_FILESAVE: int +AFX_IDD_FONT: int +AFX_IDD_COLOR: int +AFX_IDD_PRINT: int +AFX_IDD_PRINTSETUP: int +AFX_IDD_FIND: int +AFX_IDD_REPLACE: int +AFX_IDD_NEWTYPEDLG: int +AFX_IDD_PRINTDLG: int +AFX_IDD_PREVIEW_TOOLBAR: int +AFX_IDD_PREVIEW_SHORTTOOLBAR: int +AFX_IDD_INSERTOBJECT: int +AFX_IDD_CHANGEICON: int +AFX_IDD_CONVERT: int +AFX_IDD_PASTESPECIAL: int +AFX_IDD_EDITLINKS: int +AFX_IDD_FILEBROWSE: int +AFX_IDD_BUSY: int +AFX_IDD_OBJECTPROPERTIES: int +AFX_IDD_CHANGESOURCE: int +AFX_IDC_CONTEXTHELP: int +AFX_IDC_MAGNIFY: int +AFX_IDC_SMALLARROWS: int +AFX_IDC_HSPLITBAR: int +AFX_IDC_VSPLITBAR: int +AFX_IDC_NODROPCRSR: int +AFX_IDC_TRACKNWSE: int +AFX_IDC_TRACKNESW: int +AFX_IDC_TRACKNS: int +AFX_IDC_TRACKWE: int +AFX_IDC_TRACK4WAY: int +AFX_IDC_MOVE4WAY: int +AFX_IDB_MINIFRAME_MENU: int +AFX_IDB_CHECKLISTBOX_NT: int +AFX_IDB_CHECKLISTBOX_95: int +AFX_IDR_PREVIEW_ACCEL: int +AFX_IDI_STD_MDIFRAME: int +AFX_IDI_STD_FRAME: int +AFX_IDC_FONTPROP: int +AFX_IDC_FONTNAMES: int +AFX_IDC_FONTSTYLES: int +AFX_IDC_FONTSIZES: int +AFX_IDC_STRIKEOUT: int +AFX_IDC_UNDERLINE: int +AFX_IDC_SAMPLEBOX: int +AFX_IDC_COLOR_BLACK: int +AFX_IDC_COLOR_WHITE: int +AFX_IDC_COLOR_RED: int +AFX_IDC_COLOR_GREEN: int +AFX_IDC_COLOR_BLUE: int +AFX_IDC_COLOR_YELLOW: int +AFX_IDC_COLOR_MAGENTA: int +AFX_IDC_COLOR_CYAN: int +AFX_IDC_COLOR_GRAY: int +AFX_IDC_COLOR_LIGHTGRAY: int +AFX_IDC_COLOR_DARKRED: int +AFX_IDC_COLOR_DARKGREEN: int +AFX_IDC_COLOR_DARKBLUE: int +AFX_IDC_COLOR_LIGHTBROWN: int +AFX_IDC_COLOR_DARKMAGENTA: int +AFX_IDC_COLOR_DARKCYAN: int +AFX_IDC_COLORPROP: int +AFX_IDC_SYSTEMCOLORS: int +AFX_IDC_PROPNAME: int +AFX_IDC_PICTURE: int +AFX_IDC_BROWSE: int +AFX_IDC_CLEAR: int +AFX_IDD_PROPPAGE_COLOR: int +AFX_IDD_PROPPAGE_FONT: int +AFX_IDD_PROPPAGE_PICTURE: int +AFX_IDB_TRUETYPE: int +AFX_IDS_PROPPAGE_UNKNOWN: int +AFX_IDS_COLOR_DESKTOP: int +AFX_IDS_COLOR_APPWORKSPACE: int +AFX_IDS_COLOR_WNDBACKGND: int +AFX_IDS_COLOR_WNDTEXT: int +AFX_IDS_COLOR_MENUBAR: int +AFX_IDS_COLOR_MENUTEXT: int +AFX_IDS_COLOR_ACTIVEBAR: int +AFX_IDS_COLOR_INACTIVEBAR: int +AFX_IDS_COLOR_ACTIVETEXT: int +AFX_IDS_COLOR_INACTIVETEXT: int +AFX_IDS_COLOR_ACTIVEBORDER: int +AFX_IDS_COLOR_INACTIVEBORDER: int +AFX_IDS_COLOR_WNDFRAME: int +AFX_IDS_COLOR_SCROLLBARS: int +AFX_IDS_COLOR_BTNFACE: int +AFX_IDS_COLOR_BTNSHADOW: int +AFX_IDS_COLOR_BTNTEXT: int +AFX_IDS_COLOR_BTNHIGHLIGHT: int +AFX_IDS_COLOR_DISABLEDTEXT: int +AFX_IDS_COLOR_HIGHLIGHT: int +AFX_IDS_COLOR_HIGHLIGHTTEXT: int +AFX_IDS_REGULAR: int +AFX_IDS_BOLD: int +AFX_IDS_ITALIC: int +AFX_IDS_BOLDITALIC: int +AFX_IDS_SAMPLETEXT: int +AFX_IDS_DISPLAYSTRING_FONT: int +AFX_IDS_DISPLAYSTRING_COLOR: int +AFX_IDS_DISPLAYSTRING_PICTURE: int +AFX_IDS_PICTUREFILTER: int +AFX_IDS_PICTYPE_UNKNOWN: int +AFX_IDS_PICTYPE_NONE: int +AFX_IDS_PICTYPE_BITMAP: int +AFX_IDS_PICTYPE_METAFILE: int +AFX_IDS_PICTYPE_ICON: int +AFX_IDS_COLOR_PPG: int +AFX_IDS_COLOR_PPG_CAPTION: int +AFX_IDS_FONT_PPG: int +AFX_IDS_FONT_PPG_CAPTION: int +AFX_IDS_PICTURE_PPG: int +AFX_IDS_PICTURE_PPG_CAPTION: int +AFX_IDS_PICTUREBROWSETITLE: int +AFX_IDS_BORDERSTYLE_0: int +AFX_IDS_BORDERSTYLE_1: int +AFX_IDS_VERB_EDIT: int +AFX_IDS_VERB_PROPERTIES: int +AFX_IDP_PICTURECANTOPEN: int +AFX_IDP_PICTURECANTLOAD: int +AFX_IDP_PICTURETOOLARGE: int +AFX_IDP_PICTUREREADFAILED: int +AFX_IDP_E_ILLEGALFUNCTIONCALL: int +AFX_IDP_E_OVERFLOW: int +AFX_IDP_E_OUTOFMEMORY: int +AFX_IDP_E_DIVISIONBYZERO: int +AFX_IDP_E_OUTOFSTRINGSPACE: int +AFX_IDP_E_OUTOFSTACKSPACE: int +AFX_IDP_E_BADFILENAMEORNUMBER: int +AFX_IDP_E_FILENOTFOUND: int +AFX_IDP_E_BADFILEMODE: int +AFX_IDP_E_FILEALREADYOPEN: int +AFX_IDP_E_DEVICEIOERROR: int +AFX_IDP_E_FILEALREADYEXISTS: int +AFX_IDP_E_BADRECORDLENGTH: int +AFX_IDP_E_DISKFULL: int +AFX_IDP_E_BADRECORDNUMBER: int +AFX_IDP_E_BADFILENAME: int +AFX_IDP_E_TOOMANYFILES: int +AFX_IDP_E_DEVICEUNAVAILABLE: int +AFX_IDP_E_PERMISSIONDENIED: int +AFX_IDP_E_DISKNOTREADY: int +AFX_IDP_E_PATHFILEACCESSERROR: int +AFX_IDP_E_PATHNOTFOUND: int +AFX_IDP_E_INVALIDPATTERNSTRING: int +AFX_IDP_E_INVALIDUSEOFNULL: int +AFX_IDP_E_INVALIDFILEFORMAT: int +AFX_IDP_E_INVALIDPROPERTYVALUE: int +AFX_IDP_E_INVALIDPROPERTYARRAYINDEX: int +AFX_IDP_E_SETNOTSUPPORTEDATRUNTIME: int +AFX_IDP_E_SETNOTSUPPORTED: int +AFX_IDP_E_NEEDPROPERTYARRAYINDEX: int +AFX_IDP_E_SETNOTPERMITTED: int +AFX_IDP_E_GETNOTSUPPORTEDATRUNTIME: int +AFX_IDP_E_GETNOTSUPPORTED: int +AFX_IDP_E_PROPERTYNOTFOUND: int +AFX_IDP_E_INVALIDCLIPBOARDFORMAT: int +AFX_IDP_E_INVALIDPICTURE: int +AFX_IDP_E_PRINTERERROR: int +AFX_IDP_E_CANTSAVEFILETOTEMP: int +AFX_IDP_E_SEARCHTEXTNOTFOUND: int +AFX_IDP_E_REPLACEMENTSTOOLONG: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/commctrl.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/commctrl.pyi new file mode 100644 index 00000000..d1e76bcb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/commctrl.pyi @@ -0,0 +1,1522 @@ +from _typeshed import Incomplete + +WM_USER: int +ICC_LISTVIEW_CLASSES: int +ICC_TREEVIEW_CLASSES: int +ICC_BAR_CLASSES: int +ICC_TAB_CLASSES: int +ICC_UPDOWN_CLASS: int +ICC_PROGRESS_CLASS: int +ICC_HOTKEY_CLASS: int +ICC_ANIMATE_CLASS: int +ICC_WIN95_CLASSES: int +ICC_DATE_CLASSES: int +ICC_USEREX_CLASSES: int +ICC_COOL_CLASSES: int +ICC_INTERNET_CLASSES: int +ICC_PAGESCROLLER_CLASS: int +ICC_NATIVEFNTCTL_CLASS: int +ODT_HEADER: int +ODT_TAB: int +ODT_LISTVIEW: int +PY_0U: int +NM_FIRST: int +NM_LAST: Incomplete +LVN_FIRST: Incomplete +LVN_LAST: Incomplete +HDN_FIRST: Incomplete +HDN_LAST: Incomplete +TVN_FIRST: Incomplete +TVN_LAST: Incomplete +TTN_FIRST: Incomplete +TTN_LAST: Incomplete +TCN_FIRST: Incomplete +TCN_LAST: Incomplete +CDN_FIRST: Incomplete +CDN_LAST: Incomplete +TBN_FIRST: Incomplete +TBN_LAST: Incomplete +UDN_FIRST: Incomplete +UDN_LAST: Incomplete +MCN_FIRST: Incomplete +MCN_LAST: Incomplete +DTN_FIRST: Incomplete +DTN_LAST: Incomplete +CBEN_FIRST: Incomplete +CBEN_LAST: Incomplete +RBN_FIRST: Incomplete +RBN_LAST: Incomplete +IPN_FIRST: Incomplete +IPN_LAST: Incomplete +SBN_FIRST: Incomplete +SBN_LAST: Incomplete +PGN_FIRST: Incomplete +PGN_LAST: Incomplete +LVM_FIRST: int +TV_FIRST: int +HDM_FIRST: int +TCM_FIRST: int +PGM_FIRST: int +CCM_FIRST: int +CCM_SETBKCOLOR: Incomplete +CCM_SETCOLORSCHEME: Incomplete +CCM_GETCOLORSCHEME: Incomplete +CCM_GETDROPTARGET: Incomplete +CCM_SETUNICODEFORMAT: Incomplete +CCM_GETUNICODEFORMAT: Incomplete +INFOTIPSIZE: int +NM_OUTOFMEMORY: Incomplete +NM_CLICK: Incomplete +NM_DBLCLK: Incomplete +NM_RETURN: Incomplete +NM_RCLICK: Incomplete +NM_RDBLCLK: Incomplete +NM_SETFOCUS: Incomplete +NM_KILLFOCUS: Incomplete +NM_CUSTOMDRAW: Incomplete +NM_HOVER: Incomplete +NM_NCHITTEST: Incomplete +NM_KEYDOWN: Incomplete +NM_RELEASEDCAPTURE: Incomplete +NM_SETCURSOR: Incomplete +NM_CHAR: Incomplete +MSGF_COMMCTRL_BEGINDRAG: int +MSGF_COMMCTRL_SIZEHEADER: int +MSGF_COMMCTRL_DRAGSELECT: int +MSGF_COMMCTRL_TOOLBARCUST: int +CDRF_DODEFAULT: int +CDRF_NEWFONT: int +CDRF_SKIPDEFAULT: int +CDRF_NOTIFYPOSTPAINT: int +CDRF_NOTIFYITEMDRAW: int +CDRF_NOTIFYSUBITEMDRAW: int +CDRF_NOTIFYPOSTERASE: int +CDDS_PREPAINT: int +CDDS_POSTPAINT: int +CDDS_PREERASE: int +CDDS_POSTERASE: int +CDDS_ITEM: int +CDDS_ITEMPREPAINT: Incomplete +CDDS_ITEMPOSTPAINT: Incomplete +CDDS_ITEMPREERASE: Incomplete +CDDS_ITEMPOSTERASE: Incomplete +CDDS_SUBITEM: int +CDIS_SELECTED: int +CDIS_GRAYED: int +CDIS_DISABLED: int +CDIS_CHECKED: int +CDIS_FOCUS: int +CDIS_DEFAULT: int +CDIS_HOT: int +CDIS_MARKED: int +CDIS_INDETERMINATE: int +CLR_NONE: int +CLR_DEFAULT: int +ILC_MASK: int +ILC_COLOR: int +ILC_COLORDDB: int +ILC_COLOR4: int +ILC_COLOR8: int +ILC_COLOR16: int +ILC_COLOR24: int +ILC_COLOR32: int +ILC_PALETTE: int +ILD_NORMAL: int +ILD_TRANSPARENT: int +ILD_MASK: int +ILD_IMAGE: int +ILD_ROP: int +ILD_BLEND25: int +ILD_BLEND50: int +ILD_OVERLAYMASK: int +ILD_SELECTED: int +ILD_FOCUS: int +ILD_BLEND: int +CLR_HILIGHT: int +ILCF_MOVE: int +ILCF_SWAP: int +WC_HEADERA: str +WC_HEADER: str +HDS_HORZ: int +HDS_BUTTONS: int +HDS_HOTTRACK: int +HDS_HIDDEN: int +HDS_DRAGDROP: int +HDS_FULLDRAG: int +HDI_WIDTH: int +HDI_HEIGHT: int +HDI_TEXT: int +HDI_FORMAT: int +HDI_LPARAM: int +HDI_BITMAP: int +HDI_IMAGE: int +HDI_DI_SETITEM: int +HDI_ORDER: int +HDF_LEFT: int +HDF_RIGHT: int +HDF_CENTER: int +HDF_JUSTIFYMASK: int +HDF_RTLREADING: int +HDF_OWNERDRAW: int +HDF_STRING: int +HDF_BITMAP: int +HDF_BITMAP_ON_RIGHT: int +HDF_IMAGE: int +HDM_GETITEMCOUNT: Incomplete +HDM_INSERTITEMA: Incomplete +HDM_INSERTITEMW: Incomplete +HDM_INSERTITEM: Incomplete +HDM_DELETEITEM: Incomplete +HDM_GETITEMA: Incomplete +HDM_GETITEMW: Incomplete +HDM_GETITEM: Incomplete +HDM_SETITEMA: Incomplete +HDM_SETITEMW: Incomplete +HDM_SETITEM: Incomplete +HDM_LAYOUT: Incomplete +HHT_NOWHERE: int +HHT_ONHEADER: int +HHT_ONDIVIDER: int +HHT_ONDIVOPEN: int +HHT_ABOVE: int +HHT_BELOW: int +HHT_TORIGHT: int +HHT_TOLEFT: int +HDM_HITTEST: Incomplete +HDM_GETITEMRECT: Incomplete +HDM_SETIMAGELIST: Incomplete +HDM_GETIMAGELIST: Incomplete +HDM_ORDERTOINDEX: Incomplete +HDM_CREATEDRAGIMAGE: Incomplete +HDM_GETORDERARRAY: Incomplete +HDM_SETORDERARRAY: Incomplete +HDM_SETHOTDIVIDER: Incomplete +HDM_SETUNICODEFORMAT: Incomplete +HDM_GETUNICODEFORMAT: Incomplete +HDN_ITEMCHANGINGA: Incomplete +HDN_ITEMCHANGINGW: Incomplete +HDN_ITEMCHANGEDA: Incomplete +HDN_ITEMCHANGEDW: Incomplete +HDN_ITEMCLICKA: Incomplete +HDN_ITEMCLICKW: Incomplete +HDN_ITEMDBLCLICKA: Incomplete +HDN_ITEMDBLCLICKW: Incomplete +HDN_DIVIDERDBLCLICKA: Incomplete +HDN_DIVIDERDBLCLICKW: Incomplete +HDN_BEGINTRACKA: Incomplete +HDN_BEGINTRACKW: Incomplete +HDN_ENDTRACKA: Incomplete +HDN_ENDTRACKW: Incomplete +HDN_TRACKA: Incomplete +HDN_TRACKW: Incomplete +HDN_GETDISPINFOA: Incomplete +HDN_GETDISPINFOW: Incomplete +HDN_BEGINDRAG: Incomplete +HDN_ENDDRAG: Incomplete +HDN_ITEMCHANGING: Incomplete +HDN_ITEMCHANGED: Incomplete +HDN_ITEMCLICK: Incomplete +HDN_ITEMDBLCLICK: Incomplete +HDN_DIVIDERDBLCLICK: Incomplete +HDN_BEGINTRACK: Incomplete +HDN_ENDTRACK: Incomplete +HDN_TRACK: Incomplete +HDN_GETDISPINFO: Incomplete +TOOLBARCLASSNAMEA: str +TOOLBARCLASSNAME: str +CMB_MASKED: int +TBSTATE_CHECKED: int +TBSTATE_PRESSED: int +TBSTATE_ENABLED: int +TBSTATE_HIDDEN: int +TBSTATE_INDETERMINATE: int +TBSTATE_WRAP: int +TBSTATE_ELLIPSES: int +TBSTATE_MARKED: int +TBSTYLE_BUTTON: int +TBSTYLE_SEP: int +TBSTYLE_CHECK: int +TBSTYLE_GROUP: int +TBSTYLE_CHECKGROUP: Incomplete +TBSTYLE_DROPDOWN: int +TBSTYLE_AUTOSIZE: int +TBSTYLE_NOPREFIX: int +TBSTYLE_TOOLTIPS: int +TBSTYLE_WRAPABLE: int +TBSTYLE_ALTDRAG: int +TBSTYLE_FLAT: int +TBSTYLE_LIST: int +TBSTYLE_CUSTOMERASE: int +TBSTYLE_REGISTERDROP: int +TBSTYLE_TRANSPARENT: int +TBSTYLE_EX_DRAWDDARROWS: int +BTNS_BUTTON: int +BTNS_SEP: int +BTNS_CHECK: int +BTNS_GROUP: int +BTNS_CHECKGROUP: Incomplete +BTNS_DROPDOWN: int +BTNS_AUTOSIZE: int +BTNS_NOPREFIX: int +BTNS_SHOWTEXT: int +BTNS_WHOLEDROPDOWN: int +TBCDRF_NOEDGES: int +TBCDRF_HILITEHOTTRACK: int +TBCDRF_NOOFFSET: int +TBCDRF_NOMARK: int +TBCDRF_NOETCHEDEFFECT: int +TB_ENABLEBUTTON: Incomplete +TB_CHECKBUTTON: Incomplete +TB_PRESSBUTTON: Incomplete +TB_HIDEBUTTON: Incomplete +TB_INDETERMINATE: Incomplete +TB_MARKBUTTON: Incomplete +TB_ISBUTTONENABLED: Incomplete +TB_ISBUTTONCHECKED: Incomplete +TB_ISBUTTONPRESSED: Incomplete +TB_ISBUTTONHIDDEN: Incomplete +TB_ISBUTTONINDETERMINATE: Incomplete +TB_ISBUTTONHIGHLIGHTED: Incomplete +TB_SETSTATE: Incomplete +TB_GETSTATE: Incomplete +TB_ADDBITMAP: Incomplete +HINST_COMMCTRL: int +IDB_STD_SMALL_COLOR: int +IDB_STD_LARGE_COLOR: int +IDB_VIEW_SMALL_COLOR: int +IDB_VIEW_LARGE_COLOR: int +IDB_HIST_SMALL_COLOR: int +IDB_HIST_LARGE_COLOR: int +STD_CUT: int +STD_COPY: int +STD_PASTE: int +STD_UNDO: int +STD_REDOW: int +STD_DELETE: int +STD_FILENEW: int +STD_FILEOPEN: int +STD_FILESAVE: int +STD_PRINTPRE: int +STD_PROPERTIES: int +STD_HELP: int +STD_FIND: int +STD_REPLACE: int +STD_PRINT: int +VIEW_LARGEICONS: int +VIEW_SMALLICONS: int +VIEW_LIST: int +VIEW_DETAILS: int +VIEW_SORTNAME: int +VIEW_SORTSIZE: int +VIEW_SORTDATE: int +VIEW_SORTTYPE: int +VIEW_PARENTFOLDER: int +VIEW_NETCONNECT: int +VIEW_NETDISCONNECT: int +VIEW_NEWFOLDER: int +VIEW_VIEWMENU: int +HIST_BACK: int +HIST_FORWARD: int +HIST_FAVORITES: int +HIST_ADDTOFAVORITES: int +HIST_VIEWTREE: int +TB_ADDBUTTONSA: Incomplete +TB_INSERTBUTTONA: Incomplete +TB_ADDBUTTONS: Incomplete +TB_INSERTBUTTON: Incomplete +TB_DELETEBUTTON: Incomplete +TB_GETBUTTON: Incomplete +TB_BUTTONCOUNT: Incomplete +TB_COMMANDTOINDEX: Incomplete +TB_SAVERESTOREA: Incomplete +TB_SAVERESTOREW: Incomplete +TB_CUSTOMIZE: Incomplete +TB_ADDSTRINGA: Incomplete +TB_ADDSTRINGW: Incomplete +TB_GETITEMRECT: Incomplete +TB_BUTTONSTRUCTSIZE: Incomplete +TB_SETBUTTONSIZE: Incomplete +TB_SETBITMAPSIZE: Incomplete +TB_AUTOSIZE: Incomplete +TB_GETTOOLTIPS: Incomplete +TB_SETTOOLTIPS: Incomplete +TB_SETPARENT: Incomplete +TB_SETROWS: Incomplete +TB_GETROWS: Incomplete +TB_SETCMDID: Incomplete +TB_CHANGEBITMAP: Incomplete +TB_GETBITMAP: Incomplete +TB_GETBUTTONTEXTA: Incomplete +TB_GETBUTTONTEXTW: Incomplete +TB_REPLACEBITMAP: Incomplete +TB_SETINDENT: Incomplete +TB_SETIMAGELIST: Incomplete +TB_GETIMAGELIST: Incomplete +TB_LOADIMAGES: Incomplete +TB_GETRECT: Incomplete +TB_SETHOTIMAGELIST: Incomplete +TB_GETHOTIMAGELIST: Incomplete +TB_SETDISABLEDIMAGELIST: Incomplete +TB_GETDISABLEDIMAGELIST: Incomplete +TB_SETSTYLE: Incomplete +TB_GETSTYLE: Incomplete +TB_GETBUTTONSIZE: Incomplete +TB_SETBUTTONWIDTH: Incomplete +TB_SETMAXTEXTROWS: Incomplete +TB_GETTEXTROWS: Incomplete +TB_GETBUTTONTEXT: Incomplete +TB_SAVERESTORE: Incomplete +TB_ADDSTRING: Incomplete +TB_GETOBJECT: Incomplete +TB_GETHOTITEM: Incomplete +TB_SETHOTITEM: Incomplete +TB_SETANCHORHIGHLIGHT: Incomplete +TB_GETANCHORHIGHLIGHT: Incomplete +TB_MAPACCELERATORA: Incomplete +TBIMHT_AFTER: int +TBIMHT_BACKGROUND: int +TB_GETINSERTMARK: Incomplete +TB_SETINSERTMARK: Incomplete +TB_INSERTMARKHITTEST: Incomplete +TB_MOVEBUTTON: Incomplete +TB_GETMAXSIZE: Incomplete +TB_SETEXTENDEDSTYLE: Incomplete +TB_GETEXTENDEDSTYLE: Incomplete +TB_GETPADDING: Incomplete +TB_SETPADDING: Incomplete +TB_SETINSERTMARKCOLOR: Incomplete +TB_GETINSERTMARKCOLOR: Incomplete +TB_SETCOLORSCHEME: Incomplete +TB_GETCOLORSCHEME: Incomplete +TB_SETUNICODEFORMAT: Incomplete +TB_GETUNICODEFORMAT: Incomplete +TB_MAPACCELERATORW: Incomplete +TB_MAPACCELERATOR: Incomplete +TBBF_LARGE: int +TB_GETBITMAPFLAGS: Incomplete +TBIF_IMAGE: int +TBIF_TEXT: int +TBIF_STATE: int +TBIF_STYLE: int +TBIF_LPARAM: int +TBIF_COMMAND: int +TBIF_SIZE: int +TB_GETBUTTONINFOW: Incomplete +TB_SETBUTTONINFOW: Incomplete +TB_GETBUTTONINFOA: Incomplete +TB_SETBUTTONINFOA: Incomplete +TB_INSERTBUTTONW: Incomplete +TB_ADDBUTTONSW: Incomplete +TB_HITTEST: Incomplete +TB_SETDRAWTEXTFLAGS: Incomplete +TBN_GETBUTTONINFOA: Incomplete +TBN_GETBUTTONINFOW: Incomplete +TBN_BEGINDRAG: Incomplete +TBN_ENDDRAG: Incomplete +TBN_BEGINADJUST: Incomplete +TBN_ENDADJUST: Incomplete +TBN_RESET: Incomplete +TBN_QUERYINSERT: Incomplete +TBN_QUERYDELETE: Incomplete +TBN_TOOLBARCHANGE: Incomplete +TBN_CUSTHELP: Incomplete +TBN_DROPDOWN: Incomplete +TBN_GETOBJECT: Incomplete +HICF_OTHER: int +HICF_MOUSE: int +HICF_ARROWKEYS: int +HICF_ACCELERATOR: int +HICF_DUPACCEL: int +HICF_ENTERING: int +HICF_LEAVING: int +HICF_RESELECT: int +TBN_HOTITEMCHANGE: Incomplete +TBN_DRAGOUT: Incomplete +TBN_DELETINGBUTTON: Incomplete +TBN_GETDISPINFOA: Incomplete +TBN_GETDISPINFOW: Incomplete +TBN_GETINFOTIPA: Incomplete +TBN_GETINFOTIPW: Incomplete +TBN_GETINFOTIP: Incomplete +TBNF_IMAGE: int +TBNF_TEXT: int +TBNF_DI_SETITEM: int +TBN_GETDISPINFO: Incomplete +TBDDRET_DEFAULT: int +TBDDRET_NODEFAULT: int +TBDDRET_TREATPRESSED: int +TBN_GETBUTTONINFO: Incomplete +REBARCLASSNAMEA: str +REBARCLASSNAME: str +RBIM_IMAGELIST: int +RBS_TOOLTIPS: int +RBS_VARHEIGHT: int +RBS_BANDBORDERS: int +RBS_FIXEDORDER: int +RBS_REGISTERDROP: int +RBS_AUTOSIZE: int +RBS_VERTICALGRIPPER: int +RBS_DBLCLKTOGGLE: int +RBBS_BREAK: int +RBBS_FIXEDSIZE: int +RBBS_CHILDEDGE: int +RBBS_HIDDEN: int +RBBS_NOVERT: int +RBBS_FIXEDBMP: int +RBBS_VARIABLEHEIGHT: int +RBBS_GRIPPERALWAYS: int +RBBS_NOGRIPPER: int +RBBIM_STYLE: int +RBBIM_COLORS: int +RBBIM_TEXT: int +RBBIM_IMAGE: int +RBBIM_CHILD: int +RBBIM_CHILDSIZE: int +RBBIM_SIZE: int +RBBIM_BACKGROUND: int +RBBIM_ID: int +RBBIM_IDEALSIZE: int +RBBIM_LPARAM: int +RB_INSERTBANDA: Incomplete +RB_DELETEBAND: Incomplete +RB_GETBARINFO: Incomplete +RB_SETBARINFO: Incomplete +RB_SETBANDINFOA: Incomplete +RB_SETPARENT: Incomplete +RB_HITTEST: Incomplete +RB_GETRECT: Incomplete +RB_INSERTBANDW: Incomplete +RB_SETBANDINFOW: Incomplete +RB_GETBANDCOUNT: Incomplete +RB_GETROWCOUNT: Incomplete +RB_GETROWHEIGHT: Incomplete +RB_IDTOINDEX: Incomplete +RB_GETTOOLTIPS: Incomplete +RB_SETTOOLTIPS: Incomplete +RB_SETBKCOLOR: Incomplete +RB_GETBKCOLOR: Incomplete +RB_SETTEXTCOLOR: Incomplete +RB_GETTEXTCOLOR: Incomplete +RB_SIZETORECT: Incomplete +RB_SETCOLORSCHEME: Incomplete +RB_GETCOLORSCHEME: Incomplete +RB_INSERTBAND: Incomplete +RB_SETBANDINFO: Incomplete +RB_BEGINDRAG: Incomplete +RB_ENDDRAG: Incomplete +RB_DRAGMOVE: Incomplete +RB_GETBARHEIGHT: Incomplete +RB_GETBANDINFOW: Incomplete +RB_GETBANDINFOA: Incomplete +RB_GETBANDINFO: Incomplete +RB_MINIMIZEBAND: Incomplete +RB_MAXIMIZEBAND: Incomplete +RB_GETDROPTARGET: Incomplete +RB_GETBANDBORDERS: Incomplete +RB_SHOWBAND: Incomplete +RB_SETPALETTE: Incomplete +RB_GETPALETTE: Incomplete +RB_MOVEBAND: Incomplete +RB_SETUNICODEFORMAT: Incomplete +RB_GETUNICODEFORMAT: Incomplete +RBN_HEIGHTCHANGE: Incomplete +RBN_GETOBJECT: Incomplete +RBN_LAYOUTCHANGED: Incomplete +RBN_AUTOSIZE: Incomplete +RBN_BEGINDRAG: Incomplete +RBN_ENDDRAG: Incomplete +RBN_DELETINGBAND: Incomplete +RBN_DELETEDBAND: Incomplete +RBN_CHILDSIZE: Incomplete +RBNM_ID: int +RBNM_STYLE: int +RBNM_LPARAM: int +RBHT_NOWHERE: int +RBHT_CAPTION: int +RBHT_CLIENT: int +RBHT_GRABBER: int +TOOLTIPS_CLASSA: str +TOOLTIPS_CLASS: str +TTS_ALWAYSTIP: int +TTS_NOPREFIX: int +TTF_IDISHWND: int +TTF_CENTERTIP: int +TTF_RTLREADING: int +TTF_SUBCLASS: int +TTF_TRACK: int +TTF_ABSOLUTE: int +TTF_TRANSPARENT: int +TTF_DI_SETITEM: int +TTDT_AUTOMATIC: int +TTDT_RESHOW: int +TTDT_AUTOPOP: int +TTDT_INITIAL: int +TTM_ACTIVATE: Incomplete +TTM_SETDELAYTIME: Incomplete +TTM_ADDTOOLA: Incomplete +TTM_ADDTOOLW: Incomplete +TTM_DELTOOLA: Incomplete +TTM_DELTOOLW: Incomplete +TTM_NEWTOOLRECTA: Incomplete +TTM_NEWTOOLRECTW: Incomplete +TTM_RELAYEVENT: Incomplete +TTM_GETTOOLINFOA: Incomplete +TTM_GETTOOLINFOW: Incomplete +TTM_SETTOOLINFOA: Incomplete +TTM_SETTOOLINFOW: Incomplete +TTM_HITTESTA: Incomplete +TTM_HITTESTW: Incomplete +TTM_GETTEXTA: Incomplete +TTM_GETTEXTW: Incomplete +TTM_UPDATETIPTEXTA: Incomplete +TTM_UPDATETIPTEXTW: Incomplete +TTM_GETTOOLCOUNT: Incomplete +TTM_ENUMTOOLSA: Incomplete +TTM_ENUMTOOLSW: Incomplete +TTM_GETCURRENTTOOLA: Incomplete +TTM_GETCURRENTTOOLW: Incomplete +TTM_WINDOWFROMPOINT: Incomplete +TTM_TRACKACTIVATE: Incomplete +TTM_TRACKPOSITION: Incomplete +TTM_SETTIPBKCOLOR: Incomplete +TTM_SETTIPTEXTCOLOR: Incomplete +TTM_GETDELAYTIME: Incomplete +TTM_GETTIPBKCOLOR: Incomplete +TTM_GETTIPTEXTCOLOR: Incomplete +TTM_SETMAXTIPWIDTH: Incomplete +TTM_GETMAXTIPWIDTH: Incomplete +TTM_SETMARGIN: Incomplete +TTM_GETMARGIN: Incomplete +TTM_POP: Incomplete +TTM_UPDATE: Incomplete +TTM_ADDTOOL: Incomplete +TTM_DELTOOL: Incomplete +TTM_NEWTOOLRECT: Incomplete +TTM_GETTOOLINFO: Incomplete +TTM_SETTOOLINFO: Incomplete +TTM_HITTEST: Incomplete +TTM_GETTEXT: Incomplete +TTM_UPDATETIPTEXT: Incomplete +TTM_ENUMTOOLS: Incomplete +TTM_GETCURRENTTOOL: Incomplete +TTN_GETDISPINFOA: Incomplete +TTN_GETDISPINFOW: Incomplete +TTN_SHOW: Incomplete +TTN_POP: Incomplete +TTN_GETDISPINFO: Incomplete +TTN_NEEDTEXT: Incomplete +TTN_NEEDTEXTA: Incomplete +TTN_NEEDTEXTW: Incomplete +SBARS_SIZEGRIP: int +SBARS_TOOLTIPS: int +STATUSCLASSNAMEA: str +STATUSCLASSNAME: str +SB_SETTEXTA: Incomplete +SB_SETTEXTW: Incomplete +SB_GETTEXTA: Incomplete +SB_GETTEXTW: Incomplete +SB_GETTEXTLENGTHA: Incomplete +SB_GETTEXTLENGTHW: Incomplete +SB_GETTEXT: Incomplete +SB_SETTEXT: Incomplete +SB_GETTEXTLENGTH: Incomplete +SB_SETPARTS: Incomplete +SB_GETPARTS: Incomplete +SB_GETBORDERS: Incomplete +SB_SETMINHEIGHT: Incomplete +SB_SIMPLE: Incomplete +SB_GETRECT: Incomplete +SB_ISSIMPLE: Incomplete +SB_SETICON: Incomplete +SB_SETTIPTEXTA: Incomplete +SB_SETTIPTEXTW: Incomplete +SB_GETTIPTEXTA: Incomplete +SB_GETTIPTEXTW: Incomplete +SB_GETICON: Incomplete +SB_SETTIPTEXT: Incomplete +SB_GETTIPTEXT: Incomplete +SB_SETUNICODEFORMAT: Incomplete +SB_GETUNICODEFORMAT: Incomplete +SBT_OWNERDRAW: int +SBT_NOBORDERS: int +SBT_POPOUT: int +SBT_RTLREADING: int +SBT_NOTABPARSING: int +SBT_TOOLTIPS: int +SB_SETBKCOLOR: Incomplete +SBN_SIMPLEMODECHANGE: Incomplete +TRACKBAR_CLASSA: str +TRACKBAR_CLASS: str +TBS_AUTOTICKS: int +TBS_VERT: int +TBS_HORZ: int +TBS_TOP: int +TBS_BOTTOM: int +TBS_LEFT: int +TBS_RIGHT: int +TBS_BOTH: int +TBS_NOTICKS: int +TBS_ENABLESELRANGE: int +TBS_FIXEDLENGTH: int +TBS_NOTHUMB: int +TBS_TOOLTIPS: int +TBM_GETPOS: int +TBM_GETRANGEMIN: Incomplete +TBM_GETRANGEMAX: Incomplete +TBM_GETTIC: Incomplete +TBM_SETTIC: Incomplete +TBM_SETPOS: Incomplete +TBM_SETRANGE: Incomplete +TBM_SETRANGEMIN: Incomplete +TBM_SETRANGEMAX: Incomplete +TBM_CLEARTICS: Incomplete +TBM_SETSEL: Incomplete +TBM_SETSELSTART: Incomplete +TBM_SETSELEND: Incomplete +TBM_GETPTICS: Incomplete +TBM_GETTICPOS: Incomplete +TBM_GETNUMTICS: Incomplete +TBM_GETSELSTART: Incomplete +TBM_GETSELEND: Incomplete +TBM_CLEARSEL: Incomplete +TBM_SETTICFREQ: Incomplete +TBM_SETPAGESIZE: Incomplete +TBM_GETPAGESIZE: Incomplete +TBM_SETLINESIZE: Incomplete +TBM_GETLINESIZE: Incomplete +TBM_GETTHUMBRECT: Incomplete +TBM_GETCHANNELRECT: Incomplete +TBM_SETTHUMBLENGTH: Incomplete +TBM_GETTHUMBLENGTH: Incomplete +TBM_SETTOOLTIPS: Incomplete +TBM_GETTOOLTIPS: Incomplete +TBM_SETTIPSIDE: Incomplete +TBTS_TOP: int +TBTS_LEFT: int +TBTS_BOTTOM: int +TBTS_RIGHT: int +TBM_SETBUDDY: Incomplete +TBM_GETBUDDY: Incomplete +TBM_SETUNICODEFORMAT: Incomplete +TBM_GETUNICODEFORMAT: Incomplete +TB_LINEUP: int +TB_LINEDOWN: int +TB_PAGEUP: int +TB_PAGEDOWN: int +TB_THUMBPOSITION: int +TB_THUMBTRACK: int +TB_TOP: int +TB_BOTTOM: int +TB_ENDTRACK: int +TBCD_TICS: int +TBCD_THUMB: int +TBCD_CHANNEL: int +DL_BEGINDRAG: Incomplete +DL_DRAGGING: Incomplete +DL_DROPPED: Incomplete +DL_CANCELDRAG: Incomplete +DL_CURSORSET: int +DL_STOPCURSOR: int +DL_COPYCURSOR: int +DL_MOVECURSOR: int +DRAGLISTMSGSTRING: str +UPDOWN_CLASSA: str +UPDOWN_CLASS: str +UD_MAXVAL: int +UD_MINVAL: Incomplete +UDS_WRAP: int +UDS_SETBUDDYINT: int +UDS_ALIGNRIGHT: int +UDS_ALIGNLEFT: int +UDS_AUTOBUDDY: int +UDS_ARROWKEYS: int +UDS_HORZ: int +UDS_NOTHOUSANDS: int +UDS_HOTTRACK: int +UDM_SETRANGE: Incomplete +UDM_GETRANGE: Incomplete +UDM_SETPOS: Incomplete +UDM_GETPOS: Incomplete +UDM_SETBUDDY: Incomplete +UDM_GETBUDDY: Incomplete +UDM_SETACCEL: Incomplete +UDM_GETACCEL: Incomplete +UDM_SETBASE: Incomplete +UDM_GETBASE: Incomplete +UDM_SETRANGE32: Incomplete +UDM_GETRANGE32: Incomplete +UDM_SETUNICODEFORMAT: Incomplete +UDM_GETUNICODEFORMAT: Incomplete +UDN_DELTAPOS: Incomplete +PROGRESS_CLASSA: str +PROGRESS_CLASS: str +PBS_SMOOTH: int +PBS_VERTICAL: int +PBM_SETRANGE: Incomplete +PBM_SETPOS: Incomplete +PBM_DELTAPOS: Incomplete +PBM_SETSTEP: Incomplete +PBM_STEPIT: Incomplete +PBM_SETRANGE32: Incomplete +PBM_GETRANGE: Incomplete +PBM_GETPOS: Incomplete +PBM_SETBARCOLOR: Incomplete +PBM_SETBKCOLOR: Incomplete +HOTKEYF_SHIFT: int +HOTKEYF_CONTROL: int +HOTKEYF_ALT: int +HOTKEYF_EXT: int +HKCOMB_NONE: int +HKCOMB_S: int +HKCOMB_C: int +HKCOMB_A: int +HKCOMB_SC: int +HKCOMB_SA: int +HKCOMB_CA: int +HKCOMB_SCA: int +HKM_SETHOTKEY: Incomplete +HKM_GETHOTKEY: Incomplete +HKM_SETRULES: Incomplete +HOTKEY_CLASSA: str +HOTKEY_CLASS: str +CCS_TOP: int +CCS_NOMOVEY: int +CCS_BOTTOM: int +CCS_NORESIZE: int +CCS_NOPARENTALIGN: int +CCS_ADJUSTABLE: int +CCS_NODIVIDER: int +CCS_VERT: int +CCS_LEFT: Incomplete +CCS_RIGHT: Incomplete +CCS_NOMOVEX: Incomplete +WC_LISTVIEWA: str +WC_LISTVIEW: str +LVS_ICON: int +LVS_REPORT: int +LVS_SMALLICON: int +LVS_LIST: int +LVS_TYPEMASK: int +LVS_SINGLESEL: int +LVS_SHOWSELALWAYS: int +LVS_SORTASCENDING: int +LVS_SORTDESCENDING: int +LVS_SHAREIMAGELISTS: int +LVS_NOLABELWRAP: int +LVS_AUTOARRANGE: int +LVS_EDITLABELS: int +LVS_OWNERDATA: int +LVS_NOSCROLL: int +LVS_TYPESTYLEMASK: int +LVS_ALIGNTOP: int +LVS_ALIGNLEFT: int +LVS_ALIGNMASK: int +LVS_OWNERDRAWFIXED: int +LVS_NOCOLUMNHEADER: int +LVS_NOSORTHEADER: int +LVM_SETUNICODEFORMAT: Incomplete +LVM_GETUNICODEFORMAT: Incomplete +LVM_GETBKCOLOR: Incomplete +LVM_SETBKCOLOR: Incomplete +LVM_GETIMAGELIST: Incomplete +LVSIL_NORMAL: int +LVSIL_SMALL: int +LVSIL_STATE: int +LVM_SETIMAGELIST: Incomplete +LVM_GETITEMCOUNT: Incomplete +LVIF_TEXT: int +LVIF_IMAGE: int +LVIF_PARAM: int +LVIF_STATE: int +LVIF_INDENT: int +LVIF_NORECOMPUTE: int +LVIS_FOCUSED: int +LVIS_SELECTED: int +LVIS_CUT: int +LVIS_DROPHILITED: int +LVIS_ACTIVATING: int +LVIS_OVERLAYMASK: int +LVIS_STATEIMAGEMASK: int +I_INDENTCALLBACK: int +LPSTR_TEXTCALLBACKA: int +LPSTR_TEXTCALLBACK: int +I_IMAGECALLBACK: int +LVM_GETITEMA: Incomplete +LVM_GETITEMW: Incomplete +LVM_GETITEM: Incomplete +LVM_SETITEMA: Incomplete +LVM_SETITEMW: Incomplete +LVM_SETITEM: Incomplete +LVM_INSERTITEMA: Incomplete +LVM_INSERTITEMW: Incomplete +LVM_INSERTITEM: Incomplete +LVM_DELETEITEM: Incomplete +LVM_DELETEALLITEMS: Incomplete +LVM_GETCALLBACKMASK: Incomplete +LVM_SETCALLBACKMASK: Incomplete +LVNI_ALL: int +LVNI_FOCUSED: int +LVNI_SELECTED: int +LVNI_CUT: int +LVNI_DROPHILITED: int +LVNI_ABOVE: int +LVNI_BELOW: int +LVNI_TOLEFT: int +LVNI_TORIGHT: int +LVM_GETNEXTITEM: Incomplete +LVFI_PARAM: int +LVFI_STRING: int +LVFI_PARTIAL: int +LVFI_WRAP: int +LVFI_NEARESTXY: int +LVM_FINDITEMA: Incomplete +LVM_FINDITEMW: Incomplete +LVM_FINDITEM: Incomplete +LVIR_BOUNDS: int +LVIR_ICON: int +LVIR_LABEL: int +LVIR_SELECTBOUNDS: int +LVM_GETITEMRECT: Incomplete +LVM_SETITEMPOSITION: Incomplete +LVM_GETITEMPOSITION: Incomplete +LVM_GETSTRINGWIDTHA: Incomplete +LVM_GETSTRINGWIDTHW: Incomplete +LVM_GETSTRINGWIDTH: Incomplete +LVHT_NOWHERE: int +LVHT_ONITEMICON: int +LVHT_ONITEMLABEL: int +LVHT_ONITEMSTATEICON: int +LVHT_ONITEM: Incomplete +LVHT_ABOVE: int +LVHT_BELOW: int +LVHT_TORIGHT: int +LVHT_TOLEFT: int +LVM_HITTEST: Incomplete +LVM_ENSUREVISIBLE: Incomplete +LVM_SCROLL: Incomplete +LVM_REDRAWITEMS: Incomplete +LVA_DEFAULT: int +LVA_ALIGNLEFT: int +LVA_ALIGNTOP: int +LVA_SNAPTOGRID: int +LVM_ARRANGE: Incomplete +LVM_EDITLABELA: Incomplete +LVM_EDITLABELW: Incomplete +LVM_EDITLABEL: Incomplete +LVM_GETEDITCONTROL: Incomplete +LVCF_FMT: int +LVCF_WIDTH: int +LVCF_TEXT: int +LVCF_SUBITEM: int +LVCF_IMAGE: int +LVCF_ORDER: int +LVCFMT_LEFT: int +LVCFMT_RIGHT: int +LVCFMT_CENTER: int +LVCFMT_JUSTIFYMASK: int +LVCFMT_IMAGE: int +LVCFMT_BITMAP_ON_RIGHT: int +LVCFMT_COL_HAS_IMAGES: int +LVM_GETCOLUMNA: Incomplete +LVM_GETCOLUMNW: Incomplete +LVM_GETCOLUMN: Incomplete +LVM_SETCOLUMNA: Incomplete +LVM_SETCOLUMNW: Incomplete +LVM_SETCOLUMN: Incomplete +LVM_INSERTCOLUMNA: Incomplete +LVM_INSERTCOLUMNW: Incomplete +LVM_INSERTCOLUMN: Incomplete +LVM_DELETECOLUMN: Incomplete +LVM_GETCOLUMNWIDTH: Incomplete +LVSCW_AUTOSIZE: int +LVSCW_AUTOSIZE_USEHEADER: int +LVM_SETCOLUMNWIDTH: Incomplete +LVM_GETHEADER: Incomplete +LVM_CREATEDRAGIMAGE: Incomplete +LVM_GETVIEWRECT: Incomplete +LVM_GETTEXTCOLOR: Incomplete +LVM_SETTEXTCOLOR: Incomplete +LVM_GETTEXTBKCOLOR: Incomplete +LVM_SETTEXTBKCOLOR: Incomplete +LVM_GETTOPINDEX: Incomplete +LVM_GETCOUNTPERPAGE: Incomplete +LVM_GETORIGIN: Incomplete +LVM_UPDATE: Incomplete +LVM_SETITEMSTATE: Incomplete +LVM_GETITEMSTATE: Incomplete +LVM_GETITEMTEXTA: Incomplete +LVM_GETITEMTEXTW: Incomplete +LVM_GETITEMTEXT: Incomplete +LVM_SETITEMTEXTA: Incomplete +LVM_SETITEMTEXTW: Incomplete +LVM_SETITEMTEXT: Incomplete +LVSICF_NOINVALIDATEALL: int +LVSICF_NOSCROLL: int +LVM_SETITEMCOUNT: Incomplete +LVM_SORTITEMS: Incomplete +LVM_SETITEMPOSITION32: Incomplete +LVM_GETSELECTEDCOUNT: Incomplete +LVM_GETITEMSPACING: Incomplete +LVM_GETISEARCHSTRINGA: Incomplete +LVM_GETISEARCHSTRINGW: Incomplete +LVM_GETISEARCHSTRING: Incomplete +LVM_SETICONSPACING: Incomplete +LVM_SETEXTENDEDLISTVIEWSTYLE: Incomplete +LVM_GETEXTENDEDLISTVIEWSTYLE: Incomplete +LVS_EX_GRIDLINES: int +LVS_EX_SUBITEMIMAGES: int +LVS_EX_CHECKBOXES: int +LVS_EX_TRACKSELECT: int +LVS_EX_HEADERDRAGDROP: int +LVS_EX_FULLROWSELECT: int +LVS_EX_ONECLICKACTIVATE: int +LVS_EX_TWOCLICKACTIVATE: int +LVS_EX_FLATSB: int +LVS_EX_REGIONAL: int +LVS_EX_INFOTIP: int +LVS_EX_UNDERLINEHOT: int +LVS_EX_UNDERLINECOLD: int +LVS_EX_MULTIWORKAREAS: int +LVM_GETSUBITEMRECT: Incomplete +LVM_SUBITEMHITTEST: Incomplete +LVM_SETCOLUMNORDERARRAY: Incomplete +LVM_GETCOLUMNORDERARRAY: Incomplete +LVM_SETHOTITEM: Incomplete +LVM_GETHOTITEM: Incomplete +LVM_SETHOTCURSOR: Incomplete +LVM_GETHOTCURSOR: Incomplete +LVM_APPROXIMATEVIEWRECT: Incomplete +LV_MAX_WORKAREAS: int +LVM_SETWORKAREAS: Incomplete +LVM_GETWORKAREAS: Incomplete +LVM_GETNUMBEROFWORKAREAS: Incomplete +LVM_GETSELECTIONMARK: Incomplete +LVM_SETSELECTIONMARK: Incomplete +LVM_SETHOVERTIME: Incomplete +LVM_GETHOVERTIME: Incomplete +LVM_SETTOOLTIPS: Incomplete +LVM_GETTOOLTIPS: Incomplete +LVBKIF_SOURCE_NONE: int +LVBKIF_SOURCE_HBITMAP: int +LVBKIF_SOURCE_URL: int +LVBKIF_SOURCE_MASK: int +LVBKIF_STYLE_NORMAL: int +LVBKIF_STYLE_TILE: int +LVBKIF_STYLE_MASK: int +LVM_SETBKIMAGEA: Incomplete +LVM_SETBKIMAGEW: Incomplete +LVM_GETBKIMAGEA: Incomplete +LVM_GETBKIMAGEW: Incomplete +LVKF_ALT: int +LVKF_CONTROL: int +LVKF_SHIFT: int +LVN_ITEMCHANGING: Incomplete +LVN_ITEMCHANGED: Incomplete +LVN_INSERTITEM: Incomplete +LVN_DELETEITEM: Incomplete +LVN_DELETEALLITEMS: Incomplete +LVN_BEGINLABELEDITA: Incomplete +LVN_BEGINLABELEDITW: Incomplete +LVN_ENDLABELEDITA: Incomplete +LVN_ENDLABELEDITW: Incomplete +LVN_COLUMNCLICK: Incomplete +LVN_BEGINDRAG: Incomplete +LVN_BEGINRDRAG: Incomplete +LVN_ODCACHEHINT: Incomplete +LVN_ODFINDITEMA: Incomplete +LVN_ODFINDITEMW: Incomplete +LVN_ITEMACTIVATE: Incomplete +LVN_ODSTATECHANGED: Incomplete +LVN_ODFINDITEM: Incomplete +LVN_HOTTRACK: Incomplete +LVN_GETDISPINFOA: Incomplete +LVN_GETDISPINFOW: Incomplete +LVN_SETDISPINFOA: Incomplete +LVN_SETDISPINFOW: Incomplete +LVN_BEGINLABELEDIT: Incomplete +LVN_ENDLABELEDIT: Incomplete +LVN_GETDISPINFO: Incomplete +LVN_SETDISPINFO: Incomplete +LVIF_DI_SETITEM: int +LVN_KEYDOWN: Incomplete +LVN_MARQUEEBEGIN: Incomplete +LVGIT_UNFOLDED: int +LVN_GETINFOTIPA: Incomplete +LVN_GETINFOTIPW: Incomplete +LVN_GETINFOTIP: Incomplete +WC_TREEVIEWA: str +WC_TREEVIEW: str +TVS_HASBUTTONS: int +TVS_HASLINES: int +TVS_LINESATROOT: int +TVS_EDITLABELS: int +TVS_DISABLEDRAGDROP: int +TVS_SHOWSELALWAYS: int +TVS_RTLREADING: int +TVS_NOTOOLTIPS: int +TVS_CHECKBOXES: int +TVS_TRACKSELECT: int +TVS_SINGLEEXPAND: int +TVS_INFOTIP: int +TVS_FULLROWSELECT: int +TVS_NOSCROLL: int +TVS_NONEVENHEIGHT: int +TVIF_TEXT: int +TVIF_IMAGE: int +TVIF_PARAM: int +TVIF_STATE: int +TVIF_HANDLE: int +TVIF_SELECTEDIMAGE: int +TVIF_CHILDREN: int +TVIF_INTEGRAL: int +TVIS_SELECTED: int +TVIS_CUT: int +TVIS_DROPHILITED: int +TVIS_BOLD: int +TVIS_EXPANDED: int +TVIS_EXPANDEDONCE: int +TVIS_EXPANDPARTIAL: int +TVIS_OVERLAYMASK: int +TVIS_STATEIMAGEMASK: int +TVIS_USERMASK: int +I_CHILDRENCALLBACK: int +TVI_ROOT: int +TVI_FIRST: int +TVI_LAST: int +TVI_SORT: int +TVM_INSERTITEMA: Incomplete +TVM_INSERTITEMW: Incomplete +TVM_INSERTITEM: Incomplete +TVM_DELETEITEM: Incomplete +TVM_EXPAND: Incomplete +TVE_COLLAPSE: int +TVE_EXPAND: int +TVE_TOGGLE: int +TVE_EXPANDPARTIAL: int +TVE_COLLAPSERESET: int +TVM_GETITEMRECT: Incomplete +TVM_GETCOUNT: Incomplete +TVM_GETINDENT: Incomplete +TVM_SETINDENT: Incomplete +TVM_GETIMAGELIST: Incomplete +TVSIL_NORMAL: int +TVSIL_STATE: int +TVM_SETIMAGELIST: Incomplete +TVM_GETNEXTITEM: Incomplete +TVGN_ROOT: int +TVGN_NEXT: int +TVGN_PREVIOUS: int +TVGN_PARENT: int +TVGN_CHILD: int +TVGN_FIRSTVISIBLE: int +TVGN_NEXTVISIBLE: int +TVGN_PREVIOUSVISIBLE: int +TVGN_DROPHILITE: int +TVGN_CARET: int +TVGN_LASTVISIBLE: int +TVM_SELECTITEM: Incomplete +TVM_GETITEMA: Incomplete +TVM_GETITEMW: Incomplete +TVM_GETITEM: Incomplete +TVM_SETITEMA: Incomplete +TVM_SETITEMW: Incomplete +TVM_SETITEM: Incomplete +TVM_EDITLABELA: Incomplete +TVM_EDITLABELW: Incomplete +TVM_EDITLABEL: Incomplete +TVM_GETEDITCONTROL: Incomplete +TVM_GETVISIBLECOUNT: Incomplete +TVM_HITTEST: Incomplete +TVHT_NOWHERE: int +TVHT_ONITEMICON: int +TVHT_ONITEMLABEL: int +TVHT_ONITEMINDENT: int +TVHT_ONITEMBUTTON: int +TVHT_ONITEMRIGHT: int +TVHT_ONITEMSTATEICON: int +TVHT_ABOVE: int +TVHT_BELOW: int +TVHT_TORIGHT: int +TVHT_TOLEFT: int +TVHT_ONITEM: Incomplete +TVM_CREATEDRAGIMAGE: Incomplete +TVM_SORTCHILDREN: Incomplete +TVM_ENSUREVISIBLE: Incomplete +TVM_SORTCHILDRENCB: Incomplete +TVM_ENDEDITLABELNOW: Incomplete +TVM_GETISEARCHSTRINGA: Incomplete +TVM_GETISEARCHSTRINGW: Incomplete +TVM_GETISEARCHSTRING: Incomplete +TVM_SETTOOLTIPS: Incomplete +TVM_GETTOOLTIPS: Incomplete +TVM_SETINSERTMARK: Incomplete +TVM_SETUNICODEFORMAT: Incomplete +TVM_GETUNICODEFORMAT: Incomplete +TVM_SETITEMHEIGHT: Incomplete +TVM_GETITEMHEIGHT: Incomplete +TVM_SETBKCOLOR: Incomplete +TVM_SETTEXTCOLOR: Incomplete +TVM_GETBKCOLOR: Incomplete +TVM_GETTEXTCOLOR: Incomplete +TVM_SETSCROLLTIME: Incomplete +TVM_GETSCROLLTIME: Incomplete +TVM_SETINSERTMARKCOLOR: Incomplete +TVM_GETINSERTMARKCOLOR: Incomplete +TVN_SELCHANGINGA: Incomplete +TVN_SELCHANGINGW: Incomplete +TVN_SELCHANGEDA: Incomplete +TVN_SELCHANGEDW: Incomplete +TVC_UNKNOWN: int +TVC_BYMOUSE: int +TVC_BYKEYBOARD: int +TVN_GETDISPINFOA: Incomplete +TVN_GETDISPINFOW: Incomplete +TVN_SETDISPINFOA: Incomplete +TVN_SETDISPINFOW: Incomplete +TVIF_DI_SETITEM: int +TVN_ITEMEXPANDINGA: Incomplete +TVN_ITEMEXPANDINGW: Incomplete +TVN_ITEMEXPANDEDA: Incomplete +TVN_ITEMEXPANDEDW: Incomplete +TVN_BEGINDRAGA: Incomplete +TVN_BEGINDRAGW: Incomplete +TVN_BEGINRDRAGA: Incomplete +TVN_BEGINRDRAGW: Incomplete +TVN_DELETEITEMA: Incomplete +TVN_DELETEITEMW: Incomplete +TVN_BEGINLABELEDITA: Incomplete +TVN_BEGINLABELEDITW: Incomplete +TVN_ENDLABELEDITA: Incomplete +TVN_ENDLABELEDITW: Incomplete +TVN_KEYDOWN: Incomplete +TVN_GETINFOTIPA: Incomplete +TVN_GETINFOTIPW: Incomplete +TVN_SINGLEEXPAND: Incomplete +TVN_SELCHANGING: Incomplete +TVN_SELCHANGED: Incomplete +TVN_GETDISPINFO: Incomplete +TVN_SETDISPINFO: Incomplete +TVN_ITEMEXPANDING: Incomplete +TVN_ITEMEXPANDED: Incomplete +TVN_BEGINDRAG: Incomplete +TVN_BEGINRDRAG: Incomplete +TVN_DELETEITEM: Incomplete +TVN_BEGINLABELEDIT: Incomplete +TVN_ENDLABELEDIT: Incomplete +TVN_GETINFOTIP: Incomplete +TVCDRF_NOIMAGES: int +WC_COMBOBOXEXA: str +WC_COMBOBOXEX: str +CBEIF_TEXT: int +CBEIF_IMAGE: int +CBEIF_SELECTEDIMAGE: int +CBEIF_OVERLAY: int +CBEIF_INDENT: int +CBEIF_LPARAM: int +CBEIF_DI_SETITEM: int +CBEM_INSERTITEMA: Incomplete +CBEM_SETIMAGELIST: Incomplete +CBEM_GETIMAGELIST: Incomplete +CBEM_GETITEMA: Incomplete +CBEM_SETITEMA: Incomplete +CBEM_GETCOMBOCONTROL: Incomplete +CBEM_GETEDITCONTROL: Incomplete +CBEM_SETEXSTYLE: Incomplete +CBEM_SETEXTENDEDSTYLE: Incomplete +CBEM_GETEXSTYLE: Incomplete +CBEM_GETEXTENDEDSTYLE: Incomplete +CBEM_SETUNICODEFORMAT: Incomplete +CBEM_GETUNICODEFORMAT: Incomplete +CBEM_HASEDITCHANGED: Incomplete +CBEM_INSERTITEMW: Incomplete +CBEM_SETITEMW: Incomplete +CBEM_GETITEMW: Incomplete +CBEM_INSERTITEM: Incomplete +CBEM_SETITEM: Incomplete +CBEM_GETITEM: Incomplete +CBES_EX_NOEDITIMAGE: int +CBES_EX_NOEDITIMAGEINDENT: int +CBES_EX_PATHWORDBREAKPROC: int +CBES_EX_NOSIZELIMIT: int +CBES_EX_CASESENSITIVE: int +CBEN_GETDISPINFO: Incomplete +CBEN_GETDISPINFOA: Incomplete +CBEN_INSERTITEM: Incomplete +CBEN_DELETEITEM: Incomplete +CBEN_BEGINEDIT: Incomplete +CBEN_ENDEDITA: Incomplete +CBEN_ENDEDITW: Incomplete +CBEN_GETDISPINFOW: Incomplete +CBEN_DRAGBEGINA: Incomplete +CBEN_DRAGBEGINW: Incomplete +CBEN_DRAGBEGIN: Incomplete +CBEN_ENDEDIT: Incomplete +CBENF_KILLFOCUS: int +CBENF_RETURN: int +CBENF_ESCAPE: int +CBENF_DROPDOWN: int +CBEMAXSTRLEN: int +WC_TABCONTROLA: str +WC_TABCONTROL: str +TCS_SCROLLOPPOSITE: int +TCS_BOTTOM: int +TCS_RIGHT: int +TCS_MULTISELECT: int +TCS_FLATBUTTONS: int +TCS_FORCEICONLEFT: int +TCS_FORCELABELLEFT: int +TCS_HOTTRACK: int +TCS_VERTICAL: int +TCS_TABS: int +TCS_BUTTONS: int +TCS_SINGLELINE: int +TCS_MULTILINE: int +TCS_RIGHTJUSTIFY: int +TCS_FIXEDWIDTH: int +TCS_RAGGEDRIGHT: int +TCS_FOCUSONBUTTONDOWN: int +TCS_OWNERDRAWFIXED: int +TCS_TOOLTIPS: int +TCS_FOCUSNEVER: int +TCS_EX_FLATSEPARATORS: int +TCS_EX_REGISTERDROP: int +TCM_GETIMAGELIST: Incomplete +TCM_SETIMAGELIST: Incomplete +TCM_GETITEMCOUNT: Incomplete +TCIF_TEXT: int +TCIF_IMAGE: int +TCIF_RTLREADING: int +TCIF_PARAM: int +TCIF_STATE: int +TCIS_BUTTONPRESSED: int +TCIS_HIGHLIGHTED: int +TCM_GETITEMA: Incomplete +TCM_GETITEMW: Incomplete +TCM_GETITEM: Incomplete +TCM_SETITEMA: Incomplete +TCM_SETITEMW: Incomplete +TCM_SETITEM: Incomplete +TCM_INSERTITEMA: Incomplete +TCM_INSERTITEMW: Incomplete +TCM_INSERTITEM: Incomplete +TCM_DELETEITEM: Incomplete +TCM_DELETEALLITEMS: Incomplete +TCM_GETITEMRECT: Incomplete +TCM_GETCURSEL: Incomplete +TCM_SETCURSEL: Incomplete +TCHT_NOWHERE: int +TCHT_ONITEMICON: int +TCHT_ONITEMLABEL: int +TCHT_ONITEM: Incomplete +TCM_HITTEST: Incomplete +TCM_SETITEMEXTRA: Incomplete +TCM_ADJUSTRECT: Incomplete +TCM_SETITEMSIZE: Incomplete +TCM_REMOVEIMAGE: Incomplete +TCM_SETPADDING: Incomplete +TCM_GETROWCOUNT: Incomplete +TCM_GETTOOLTIPS: Incomplete +TCM_SETTOOLTIPS: Incomplete +TCM_GETCURFOCUS: Incomplete +TCM_SETCURFOCUS: Incomplete +TCM_SETMINTABWIDTH: Incomplete +TCM_DESELECTALL: Incomplete +TCM_HIGHLIGHTITEM: Incomplete +TCM_SETEXTENDEDSTYLE: Incomplete +TCM_GETEXTENDEDSTYLE: Incomplete +TCM_SETUNICODEFORMAT: Incomplete +TCM_GETUNICODEFORMAT: Incomplete +TCN_KEYDOWN: Incomplete +ANIMATE_CLASSA: str +ANIMATE_CLASS: str +ACS_CENTER: int +ACS_TRANSPARENT: int +ACS_AUTOPLAY: int +ACS_TIMER: int +ACM_OPENA: Incomplete +ACM_OPENW: Incomplete +ACM_OPEN: Incomplete +ACM_PLAY: Incomplete +ACM_STOP: Incomplete +ACN_START: int +ACN_STOP: int +MONTHCAL_CLASSA: str +MONTHCAL_CLASS: str +MCM_FIRST: int +MCM_GETCURSEL: Incomplete +MCM_SETCURSEL: Incomplete +MCM_GETMAXSELCOUNT: Incomplete +MCM_SETMAXSELCOUNT: Incomplete +MCM_GETSELRANGE: Incomplete +MCM_SETSELRANGE: Incomplete +MCM_GETMONTHRANGE: Incomplete +MCM_SETDAYSTATE: Incomplete +MCM_GETMINREQRECT: Incomplete +MCM_SETCOLOR: Incomplete +MCM_GETCOLOR: Incomplete +MCSC_BACKGROUND: int +MCSC_TEXT: int +MCSC_TITLEBK: int +MCSC_TITLETEXT: int +MCSC_MONTHBK: int +MCSC_TRAILINGTEXT: int +MCM_SETTODAY: Incomplete +MCM_GETTODAY: Incomplete +MCM_HITTEST: Incomplete +MCHT_TITLE: int +MCHT_CALENDAR: int +MCHT_TODAYLINK: int +MCHT_NEXT: int +MCHT_PREV: int +MCHT_NOWHERE: int +MCHT_TITLEBK: int +MCHT_TITLEMONTH: Incomplete +MCHT_TITLEYEAR: Incomplete +MCHT_TITLEBTNNEXT: Incomplete +MCHT_TITLEBTNPREV: Incomplete +MCHT_CALENDARBK: int +MCHT_CALENDARDATE: Incomplete +MCHT_CALENDARDATENEXT: Incomplete +MCHT_CALENDARDATEPREV: Incomplete +MCHT_CALENDARDAY: Incomplete +MCHT_CALENDARWEEKNUM: Incomplete +MCM_SETFIRSTDAYOFWEEK: Incomplete +MCM_GETFIRSTDAYOFWEEK: Incomplete +MCM_GETRANGE: Incomplete +MCM_SETRANGE: Incomplete +MCM_GETMONTHDELTA: Incomplete +MCM_SETMONTHDELTA: Incomplete +MCM_GETMAXTODAYWIDTH: Incomplete +MCM_SETUNICODEFORMAT: Incomplete +MCM_GETUNICODEFORMAT: Incomplete +MCN_SELCHANGE: Incomplete +MCN_GETDAYSTATE: Incomplete +MCN_SELECT: Incomplete +MCS_DAYSTATE: int +MCS_MULTISELECT: int +MCS_WEEKNUMBERS: int +MCS_NOTODAYCIRCLE: int +MCS_NOTODAY: int +GMR_VISIBLE: int +GMR_DAYSTATE: int +DATETIMEPICK_CLASSA: str +DATETIMEPICK_CLASS: str +DTM_FIRST: int +DTM_GETSYSTEMTIME: Incomplete +DTM_SETSYSTEMTIME: Incomplete +DTM_GETRANGE: Incomplete +DTM_SETRANGE: Incomplete +DTM_SETFORMATA: Incomplete +DTM_SETFORMATW: Incomplete +DTM_SETFORMAT: Incomplete +DTM_SETMCCOLOR: Incomplete +DTM_GETMCCOLOR: Incomplete +DTM_GETMONTHCAL: Incomplete +DTM_SETMCFONT: Incomplete +DTM_GETMCFONT: Incomplete +DTS_UPDOWN: int +DTS_SHOWNONE: int +DTS_SHORTDATEFORMAT: int +DTS_LONGDATEFORMAT: int +DTS_TIMEFORMAT: int +DTS_APPCANPARSE: int +DTS_RIGHTALIGN: int +DTN_DATETIMECHANGE: Incomplete +DTN_USERSTRINGA: Incomplete +DTN_USERSTRINGW: Incomplete +DTN_USERSTRING: Incomplete +DTN_WMKEYDOWNA: Incomplete +DTN_WMKEYDOWNW: Incomplete +DTN_WMKEYDOWN: Incomplete +DTN_FORMATA: Incomplete +DTN_FORMATW: Incomplete +DTN_FORMAT: Incomplete +DTN_FORMATQUERYA: Incomplete +DTN_FORMATQUERYW: Incomplete +DTN_FORMATQUERY: Incomplete +DTN_DROPDOWN: Incomplete +DTN_CLOSEUP: Incomplete +GDTR_MIN: int +GDTR_MAX: int +GDT_ERROR: int +GDT_VALID: int +GDT_NONE: int +IPM_CLEARADDRESS: Incomplete +IPM_SETADDRESS: Incomplete +IPM_GETADDRESS: Incomplete +IPM_SETRANGE: Incomplete +IPM_SETFOCUS: Incomplete +IPM_ISBLANK: Incomplete +WC_IPADDRESSA: str +WC_IPADDRESS: str +IPN_FIELDCHANGED: Incomplete +WC_PAGESCROLLERA: str +WC_PAGESCROLLER: str +PGS_VERT: int +PGS_HORZ: int +PGS_AUTOSCROLL: int +PGS_DRAGNDROP: int +PGF_INVISIBLE: int +PGF_NORMAL: int +PGF_GRAYED: int +PGF_DEPRESSED: int +PGF_HOT: int +PGB_TOPORLEFT: int +PGB_BOTTOMORRIGHT: int +PGM_SETCHILD: Incomplete +PGM_RECALCSIZE: Incomplete +PGM_FORWARDMOUSE: Incomplete +PGM_SETBKCOLOR: Incomplete +PGM_GETBKCOLOR: Incomplete +PGM_SETBORDER: Incomplete +PGM_GETBORDER: Incomplete +PGM_SETPOS: Incomplete +PGM_GETPOS: Incomplete +PGM_SETBUTTONSIZE: Incomplete +PGM_GETBUTTONSIZE: Incomplete +PGM_GETBUTTONSTATE: Incomplete +PGM_GETDROPTARGET: Incomplete +PGN_SCROLL: Incomplete +PGF_SCROLLUP: int +PGF_SCROLLDOWN: int +PGF_SCROLLLEFT: int +PGF_SCROLLRIGHT: int +PGK_SHIFT: int +PGK_CONTROL: int +PGK_MENU: int +PGN_CALCSIZE: Incomplete +PGF_CALCWIDTH: int +PGF_CALCHEIGHT: int +WC_NATIVEFONTCTLA: str +WC_NATIVEFONTCTL: str +NFS_EDIT: int +NFS_STATIC: int +NFS_LISTCOMBO: int +NFS_BUTTON: int +NFS_ALL: int +WM_MOUSEHOVER: int +WM_MOUSELEAVE: int +TME_HOVER: int +TME_LEAVE: int +TME_QUERY: int +TME_CANCEL: int +HOVER_DEFAULT: int +WSB_PROP_CYVSCROLL: int +WSB_PROP_CXHSCROLL: int +WSB_PROP_CYHSCROLL: int +WSB_PROP_CXVSCROLL: int +WSB_PROP_CXHTHUMB: int +WSB_PROP_CYVTHUMB: int +WSB_PROP_VBKGCOLOR: int +WSB_PROP_HBKGCOLOR: int +WSB_PROP_VSTYLE: int +WSB_PROP_HSTYLE: int +WSB_PROP_WINSTYLE: int +WSB_PROP_PALETTE: int +WSB_PROP_MASK: int +FSB_FLAT_MODE: int +FSB_ENCARTA_MODE: int +FSB_REGULAR_MODE: int + +def INDEXTOOVERLAYMASK(i): ... +def INDEXTOSTATEIMAGEMASK(i): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/mmsystem.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/mmsystem.pyi new file mode 100644 index 00000000..66704755 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/mmsystem.pyi @@ -0,0 +1,858 @@ +from _typeshed import Incomplete + +MAXPNAMELEN: int +MAXERRORLENGTH: int +MAX_JOYSTICKOEMVXDNAME: int +MM_MICROSOFT: int +MM_MIDI_MAPPER: int +MM_WAVE_MAPPER: int +MM_SNDBLST_MIDIOUT: int +MM_SNDBLST_MIDIIN: int +MM_SNDBLST_SYNTH: int +MM_SNDBLST_WAVEOUT: int +MM_SNDBLST_WAVEIN: int +MM_ADLIB: int +MM_MPU401_MIDIOUT: int +MM_MPU401_MIDIIN: int +MM_PC_JOYSTICK: int +TIME_MS: int +TIME_SAMPLES: int +TIME_BYTES: int +TIME_SMPTE: int +TIME_MIDI: int +TIME_TICKS: int +MM_JOY1MOVE: int +MM_JOY2MOVE: int +MM_JOY1ZMOVE: int +MM_JOY2ZMOVE: int +MM_JOY1BUTTONDOWN: int +MM_JOY2BUTTONDOWN: int +MM_JOY1BUTTONUP: int +MM_JOY2BUTTONUP: int +MM_MCINOTIFY: int +MM_WOM_OPEN: int +MM_WOM_CLOSE: int +MM_WOM_DONE: int +MM_WIM_OPEN: int +MM_WIM_CLOSE: int +MM_WIM_DATA: int +MM_MIM_OPEN: int +MM_MIM_CLOSE: int +MM_MIM_DATA: int +MM_MIM_LONGDATA: int +MM_MIM_ERROR: int +MM_MIM_LONGERROR: int +MM_MOM_OPEN: int +MM_MOM_CLOSE: int +MM_MOM_DONE: int +MM_STREAM_OPEN: int +MM_STREAM_CLOSE: int +MM_STREAM_DONE: int +MM_STREAM_ERROR: int +MM_MOM_POSITIONCB: int +MM_MIM_MOREDATA: int +MM_MIXM_LINE_CHANGE: int +MM_MIXM_CONTROL_CHANGE: int +MMSYSERR_BASE: int +WAVERR_BASE: int +MIDIERR_BASE: int +TIMERR_BASE: int +JOYERR_BASE: int +MCIERR_BASE: int +MIXERR_BASE: int +MCI_STRING_OFFSET: int +MCI_VD_OFFSET: int +MCI_CD_OFFSET: int +MCI_WAVE_OFFSET: int +MCI_SEQ_OFFSET: int +MMSYSERR_NOERROR: int +MMSYSERR_ERROR: Incomplete +MMSYSERR_BADDEVICEID: Incomplete +MMSYSERR_NOTENABLED: Incomplete +MMSYSERR_ALLOCATED: Incomplete +MMSYSERR_INVALHANDLE: Incomplete +MMSYSERR_NODRIVER: Incomplete +MMSYSERR_NOMEM: Incomplete +MMSYSERR_NOTSUPPORTED: Incomplete +MMSYSERR_BADERRNUM: Incomplete +MMSYSERR_INVALFLAG: Incomplete +MMSYSERR_INVALPARAM: Incomplete +MMSYSERR_HANDLEBUSY: Incomplete +MMSYSERR_INVALIDALIAS: Incomplete +MMSYSERR_BADDB: Incomplete +MMSYSERR_KEYNOTFOUND: Incomplete +MMSYSERR_READERROR: Incomplete +MMSYSERR_WRITEERROR: Incomplete +MMSYSERR_DELETEERROR: Incomplete +MMSYSERR_VALNOTFOUND: Incomplete +MMSYSERR_NODRIVERCB: Incomplete +MMSYSERR_LASTERROR: Incomplete +DRV_LOAD: int +DRV_ENABLE: int +DRV_OPEN: int +DRV_CLOSE: int +DRV_DISABLE: int +DRV_FREE: int +DRV_CONFIGURE: int +DRV_QUERYCONFIGURE: int +DRV_INSTALL: int +DRV_REMOVE: int +DRV_EXITSESSION: int +DRV_POWER: int +DRV_RESERVED: int +DRV_USER: int +DRVCNF_CANCEL: int +DRVCNF_OK: int +DRVCNF_RESTART: int +DRV_CANCEL: int +DRV_OK: int +DRV_RESTART: int +DRV_MCI_FIRST: int +DRV_MCI_LAST: Incomplete +CALLBACK_TYPEMASK: int +CALLBACK_NULL: int +CALLBACK_WINDOW: int +CALLBACK_TASK: int +CALLBACK_FUNCTION: int +CALLBACK_THREAD: int +CALLBACK_EVENT: int +SND_SYNC: int +SND_ASYNC: int +SND_NODEFAULT: int +SND_MEMORY: int +SND_LOOP: int +SND_NOSTOP: int +SND_NOWAIT: int +SND_ALIAS: int +SND_ALIAS_ID: int +SND_FILENAME: int +SND_RESOURCE: int +SND_PURGE: int +SND_APPLICATION: int +SND_ALIAS_START: int +WAVERR_BADFORMAT: Incomplete +WAVERR_STILLPLAYING: Incomplete +WAVERR_UNPREPARED: Incomplete +WAVERR_SYNC: Incomplete +WAVERR_LASTERROR: Incomplete +WOM_OPEN: int +WOM_CLOSE: int +WOM_DONE: int +WIM_OPEN: int +WIM_CLOSE: int +WIM_DATA: int +WAVE_MAPPER: int +WAVE_FORMAT_QUERY: int +WAVE_ALLOWSYNC: int +WAVE_MAPPED: int +WAVE_FORMAT_DIRECT: int +WAVE_FORMAT_DIRECT_QUERY: Incomplete +WHDR_DONE: int +WHDR_PREPARED: int +WHDR_BEGINLOOP: int +WHDR_ENDLOOP: int +WHDR_INQUEUE: int +WAVECAPS_PITCH: int +WAVECAPS_PLAYBACKRATE: int +WAVECAPS_VOLUME: int +WAVECAPS_LRVOLUME: int +WAVECAPS_SYNC: int +WAVECAPS_SAMPLEACCURATE: int +WAVECAPS_DIRECTSOUND: int +WAVE_INVALIDFORMAT: int +WAVE_FORMAT_1M08: int +WAVE_FORMAT_1S08: int +WAVE_FORMAT_1M16: int +WAVE_FORMAT_1S16: int +WAVE_FORMAT_2M08: int +WAVE_FORMAT_2S08: int +WAVE_FORMAT_2M16: int +WAVE_FORMAT_2S16: int +WAVE_FORMAT_4M08: int +WAVE_FORMAT_4S08: int +WAVE_FORMAT_4M16: int +WAVE_FORMAT_4S16: int +WAVE_FORMAT_PCM: int +WAVE_FORMAT_IEEE_FLOAT: int +MIDIERR_UNPREPARED: Incomplete +MIDIERR_STILLPLAYING: Incomplete +MIDIERR_NOMAP: Incomplete +MIDIERR_NOTREADY: Incomplete +MIDIERR_NODEVICE: Incomplete +MIDIERR_INVALIDSETUP: Incomplete +MIDIERR_BADOPENMODE: Incomplete +MIDIERR_DONT_CONTINUE: Incomplete +MIDIERR_LASTERROR: Incomplete +MIDIPATCHSIZE: int +MIM_OPEN: int +MIM_CLOSE: int +MIM_DATA: int +MIM_LONGDATA: int +MIM_ERROR: int +MIM_LONGERROR: int +MOM_OPEN: int +MOM_CLOSE: int +MOM_DONE: int +MIM_MOREDATA: int +MOM_POSITIONCB: int +MIDI_IO_STATUS: int +MIDI_CACHE_ALL: int +MIDI_CACHE_BESTFIT: int +MIDI_CACHE_QUERY: int +MIDI_UNCACHE: int +MOD_MIDIPORT: int +MOD_SYNTH: int +MOD_SQSYNTH: int +MOD_FMSYNTH: int +MOD_MAPPER: int +MIDICAPS_VOLUME: int +MIDICAPS_LRVOLUME: int +MIDICAPS_CACHE: int +MIDICAPS_STREAM: int +MHDR_DONE: int +MHDR_PREPARED: int +MHDR_INQUEUE: int +MHDR_ISSTRM: int +MEVT_F_SHORT: int +MEVT_F_LONG: int +MEVT_F_CALLBACK: int + +def MEVT_EVENTTYPE(x): ... +def MEVT_EVENTPARM(x): ... + +MIDISTRM_ERROR: int +MIDIPROP_SET: int +MIDIPROP_GET: int +MIDIPROP_TIMEDIV: int +MIDIPROP_TEMPO: int +AUXCAPS_CDAUDIO: int +AUXCAPS_AUXIN: int +AUXCAPS_VOLUME: int +AUXCAPS_LRVOLUME: int +MIXER_SHORT_NAME_CHARS: int +MIXER_LONG_NAME_CHARS: int +MIXERR_INVALLINE: Incomplete +MIXERR_INVALCONTROL: Incomplete +MIXERR_INVALVALUE: Incomplete +MIXERR_LASTERROR: Incomplete +MIXER_OBJECTF_HANDLE: int +MIXER_OBJECTF_MIXER: int +MIXER_OBJECTF_HMIXER: Incomplete +MIXER_OBJECTF_WAVEOUT: int +MIXER_OBJECTF_HWAVEOUT: Incomplete +MIXER_OBJECTF_WAVEIN: int +MIXER_OBJECTF_HWAVEIN: Incomplete +MIXER_OBJECTF_MIDIOUT: int +MIXER_OBJECTF_HMIDIOUT: Incomplete +MIXER_OBJECTF_MIDIIN: int +MIXER_OBJECTF_HMIDIIN: Incomplete +MIXER_OBJECTF_AUX: int +MIXERLINE_LINEF_ACTIVE: int +MIXERLINE_LINEF_DISCONNECTED: int +MIXERLINE_LINEF_SOURCE: int +MIXERLINE_COMPONENTTYPE_DST_FIRST: int +MIXERLINE_COMPONENTTYPE_DST_UNDEFINED: Incomplete +MIXERLINE_COMPONENTTYPE_DST_DIGITAL: Incomplete +MIXERLINE_COMPONENTTYPE_DST_LINE: Incomplete +MIXERLINE_COMPONENTTYPE_DST_MONITOR: Incomplete +MIXERLINE_COMPONENTTYPE_DST_SPEAKERS: Incomplete +MIXERLINE_COMPONENTTYPE_DST_HEADPHONES: Incomplete +MIXERLINE_COMPONENTTYPE_DST_TELEPHONE: Incomplete +MIXERLINE_COMPONENTTYPE_DST_WAVEIN: Incomplete +MIXERLINE_COMPONENTTYPE_DST_VOICEIN: Incomplete +MIXERLINE_COMPONENTTYPE_DST_LAST: Incomplete +MIXERLINE_COMPONENTTYPE_SRC_FIRST: int +MIXERLINE_COMPONENTTYPE_SRC_UNDEFINED: Incomplete +MIXERLINE_COMPONENTTYPE_SRC_DIGITAL: Incomplete +MIXERLINE_COMPONENTTYPE_SRC_LINE: Incomplete +MIXERLINE_COMPONENTTYPE_SRC_MICROPHONE: Incomplete +MIXERLINE_COMPONENTTYPE_SRC_SYNTHESIZER: Incomplete +MIXERLINE_COMPONENTTYPE_SRC_COMPACTDISC: Incomplete +MIXERLINE_COMPONENTTYPE_SRC_TELEPHONE: Incomplete +MIXERLINE_COMPONENTTYPE_SRC_PCSPEAKER: Incomplete +MIXERLINE_COMPONENTTYPE_SRC_WAVEOUT: Incomplete +MIXERLINE_COMPONENTTYPE_SRC_AUXILIARY: Incomplete +MIXERLINE_COMPONENTTYPE_SRC_ANALOG: Incomplete +MIXERLINE_COMPONENTTYPE_SRC_LAST: Incomplete +MIXERLINE_TARGETTYPE_UNDEFINED: int +MIXERLINE_TARGETTYPE_WAVEOUT: int +MIXERLINE_TARGETTYPE_WAVEIN: int +MIXERLINE_TARGETTYPE_MIDIOUT: int +MIXERLINE_TARGETTYPE_MIDIIN: int +MIXERLINE_TARGETTYPE_AUX: int +MIXER_GETLINEINFOF_DESTINATION: int +MIXER_GETLINEINFOF_SOURCE: int +MIXER_GETLINEINFOF_LINEID: int +MIXER_GETLINEINFOF_COMPONENTTYPE: int +MIXER_GETLINEINFOF_TARGETTYPE: int +MIXER_GETLINEINFOF_QUERYMASK: int +MIXERCONTROL_CONTROLF_UNIFORM: int +MIXERCONTROL_CONTROLF_MULTIPLE: int +MIXERCONTROL_CONTROLF_DISABLED: int +MIXERCONTROL_CT_CLASS_MASK: int +MIXERCONTROL_CT_CLASS_CUSTOM: int +MIXERCONTROL_CT_CLASS_METER: int +MIXERCONTROL_CT_CLASS_SWITCH: int +MIXERCONTROL_CT_CLASS_NUMBER: int +MIXERCONTROL_CT_CLASS_SLIDER: int +MIXERCONTROL_CT_CLASS_FADER: int +MIXERCONTROL_CT_CLASS_TIME: int +MIXERCONTROL_CT_CLASS_LIST: int +MIXERCONTROL_CT_SUBCLASS_MASK: int +MIXERCONTROL_CT_SC_SWITCH_BOOLEAN: int +MIXERCONTROL_CT_SC_SWITCH_BUTTON: int +MIXERCONTROL_CT_SC_METER_POLLED: int +MIXERCONTROL_CT_SC_TIME_MICROSECS: int +MIXERCONTROL_CT_SC_TIME_MILLISECS: int +MIXERCONTROL_CT_SC_LIST_SINGLE: int +MIXERCONTROL_CT_SC_LIST_MULTIPLE: int +MIXERCONTROL_CT_UNITS_MASK: int +MIXERCONTROL_CT_UNITS_CUSTOM: int +MIXERCONTROL_CT_UNITS_BOOLEAN: int +MIXERCONTROL_CT_UNITS_SIGNED: int +MIXERCONTROL_CT_UNITS_UNSIGNED: int +MIXERCONTROL_CT_UNITS_DECIBELS: int +MIXERCONTROL_CT_UNITS_PERCENT: int +MIXERCONTROL_CONTROLTYPE_CUSTOM: Incomplete +MIXERCONTROL_CONTROLTYPE_BOOLEANMETER: Incomplete +MIXERCONTROL_CONTROLTYPE_SIGNEDMETER: Incomplete +MIXERCONTROL_CONTROLTYPE_PEAKMETER: Incomplete +MIXERCONTROL_CONTROLTYPE_UNSIGNEDMETER: Incomplete +MIXERCONTROL_CONTROLTYPE_BOOLEAN: Incomplete +MIXERCONTROL_CONTROLTYPE_ONOFF: Incomplete +MIXERCONTROL_CONTROLTYPE_MUTE: Incomplete +MIXERCONTROL_CONTROLTYPE_MONO: Incomplete +MIXERCONTROL_CONTROLTYPE_LOUDNESS: Incomplete +MIXERCONTROL_CONTROLTYPE_STEREOENH: Incomplete +MIXERCONTROL_CONTROLTYPE_BUTTON: Incomplete +MIXERCONTROL_CONTROLTYPE_DECIBELS: Incomplete +MIXERCONTROL_CONTROLTYPE_SIGNED: Incomplete +MIXERCONTROL_CONTROLTYPE_UNSIGNED: Incomplete +MIXERCONTROL_CONTROLTYPE_PERCENT: Incomplete +MIXERCONTROL_CONTROLTYPE_SLIDER: Incomplete +MIXERCONTROL_CONTROLTYPE_PAN: Incomplete +MIXERCONTROL_CONTROLTYPE_QSOUNDPAN: Incomplete +MIXERCONTROL_CONTROLTYPE_FADER: Incomplete +MIXERCONTROL_CONTROLTYPE_VOLUME: Incomplete +MIXERCONTROL_CONTROLTYPE_BASS: Incomplete +MIXERCONTROL_CONTROLTYPE_TREBLE: Incomplete +MIXERCONTROL_CONTROLTYPE_EQUALIZER: Incomplete +MIXERCONTROL_CONTROLTYPE_SINGLESELECT: Incomplete +MIXERCONTROL_CONTROLTYPE_MUX: Incomplete +MIXERCONTROL_CONTROLTYPE_MULTIPLESELECT: Incomplete +MIXERCONTROL_CONTROLTYPE_MIXER: Incomplete +MIXERCONTROL_CONTROLTYPE_MICROTIME: Incomplete +MIXERCONTROL_CONTROLTYPE_MILLITIME: Incomplete +MIXER_GETLINECONTROLSF_ALL: int +MIXER_GETLINECONTROLSF_ONEBYID: int +MIXER_GETLINECONTROLSF_ONEBYTYPE: int +MIXER_GETLINECONTROLSF_QUERYMASK: int +MIXER_GETCONTROLDETAILSF_VALUE: int +MIXER_GETCONTROLDETAILSF_LISTTEXT: int +MIXER_GETCONTROLDETAILSF_QUERYMASK: int +MIXER_SETCONTROLDETAILSF_VALUE: int +MIXER_SETCONTROLDETAILSF_CUSTOM: int +MIXER_SETCONTROLDETAILSF_QUERYMASK: int +TIMERR_NOERROR: int +TIMERR_NOCANDO: Incomplete +TIMERR_STRUCT: Incomplete +TIME_ONESHOT: int +TIME_PERIODIC: int +TIME_CALLBACK_FUNCTION: int +TIME_CALLBACK_EVENT_SET: int +TIME_CALLBACK_EVENT_PULSE: int +JOYERR_NOERROR: int +JOYERR_PARMS: Incomplete +JOYERR_NOCANDO: Incomplete +JOYERR_UNPLUGGED: Incomplete +JOY_BUTTON1: int +JOY_BUTTON2: int +JOY_BUTTON3: int +JOY_BUTTON4: int +JOY_BUTTON1CHG: int +JOY_BUTTON2CHG: int +JOY_BUTTON3CHG: int +JOY_BUTTON4CHG: int +JOY_BUTTON5: int +JOY_BUTTON6: int +JOY_BUTTON7: int +JOY_BUTTON8: int +JOY_BUTTON9: int +JOY_BUTTON10: int +JOY_BUTTON11: int +JOY_BUTTON12: int +JOY_BUTTON13: int +JOY_BUTTON14: int +JOY_BUTTON15: int +JOY_BUTTON16: int +JOY_BUTTON17: int +JOY_BUTTON18: int +JOY_BUTTON19: int +JOY_BUTTON20: int +JOY_BUTTON21: int +JOY_BUTTON22: int +JOY_BUTTON23: int +JOY_BUTTON24: int +JOY_BUTTON25: int +JOY_BUTTON26: int +JOY_BUTTON27: int +JOY_BUTTON28: int +JOY_BUTTON29: int +JOY_BUTTON30: int +JOY_BUTTON31: int +JOY_BUTTON32: int +JOY_POVFORWARD: int +JOY_POVRIGHT: int +JOY_POVBACKWARD: int +JOY_POVLEFT: int +JOY_RETURNX: int +JOY_RETURNY: int +JOY_RETURNZ: int +JOY_RETURNR: int +JOY_RETURNU: int +JOY_RETURNV: int +JOY_RETURNPOV: int +JOY_RETURNBUTTONS: int +JOY_RETURNRAWDATA: int +JOY_RETURNPOVCTS: int +JOY_RETURNCENTERED: int +JOY_USEDEADZONE: int +JOY_RETURNALL: Incomplete +JOY_CAL_READALWAYS: int +JOY_CAL_READXYONLY: int +JOY_CAL_READ3: int +JOY_CAL_READ4: int +JOY_CAL_READXONLY: int +JOY_CAL_READYONLY: int +JOY_CAL_READ5: int +JOY_CAL_READ6: int +JOY_CAL_READZONLY: int +JOY_CAL_READRONLY: int +JOY_CAL_READUONLY: int +JOY_CAL_READVONLY: int +JOYSTICKID1: int +JOYSTICKID2: int +JOYCAPS_HASZ: int +JOYCAPS_HASR: int +JOYCAPS_HASU: int +JOYCAPS_HASV: int +JOYCAPS_HASPOV: int +JOYCAPS_POV4DIR: int +JOYCAPS_POVCTS: int +MMIOERR_BASE: int +MMIOERR_FILENOTFOUND: Incomplete +MMIOERR_OUTOFMEMORY: Incomplete +MMIOERR_CANNOTOPEN: Incomplete +MMIOERR_CANNOTCLOSE: Incomplete +MMIOERR_CANNOTREAD: Incomplete +MMIOERR_CANNOTWRITE: Incomplete +MMIOERR_CANNOTSEEK: Incomplete +MMIOERR_CANNOTEXPAND: Incomplete +MMIOERR_CHUNKNOTFOUND: Incomplete +MMIOERR_UNBUFFERED: Incomplete +MMIOERR_PATHNOTFOUND: Incomplete +MMIOERR_ACCESSDENIED: Incomplete +MMIOERR_SHARINGVIOLATION: Incomplete +MMIOERR_NETWORKERROR: Incomplete +MMIOERR_TOOMANYOPENFILES: Incomplete +MMIOERR_INVALIDFILE: Incomplete +CFSEPCHAR: Incomplete +MMIO_RWMODE: int +MMIO_SHAREMODE: int +MMIO_CREATE: int +MMIO_PARSE: int +MMIO_DELETE: int +MMIO_EXIST: int +MMIO_ALLOCBUF: int +MMIO_GETTEMP: int +MMIO_DIRTY: int +MMIO_READ: int +MMIO_WRITE: int +MMIO_READWRITE: int +MMIO_COMPAT: int +MMIO_EXCLUSIVE: int +MMIO_DENYWRITE: int +MMIO_DENYREAD: int +MMIO_DENYNONE: int +MMIO_FHOPEN: int +MMIO_EMPTYBUF: int +MMIO_TOUPPER: int +MMIO_INSTALLPROC: int +MMIO_GLOBALPROC: int +MMIO_REMOVEPROC: int +MMIO_UNICODEPROC: int +MMIO_FINDPROC: int +MMIO_FINDCHUNK: int +MMIO_FINDRIFF: int +MMIO_FINDLIST: int +MMIO_CREATERIFF: int +MMIO_CREATELIST: int +MMIOM_READ: int +MMIOM_WRITE: int +MMIOM_SEEK: int +MMIOM_OPEN: int +MMIOM_CLOSE: int +MMIOM_WRITEFLUSH: int +MMIOM_RENAME: int +MMIOM_USER: int +SEEK_SET: int +SEEK_CUR: int +SEEK_END: int +MMIO_DEFAULTBUFFER: int +MCIERR_INVALID_DEVICE_ID: Incomplete +MCIERR_UNRECOGNIZED_KEYWORD: Incomplete +MCIERR_UNRECOGNIZED_COMMAND: Incomplete +MCIERR_HARDWARE: Incomplete +MCIERR_INVALID_DEVICE_NAME: Incomplete +MCIERR_OUT_OF_MEMORY: Incomplete +MCIERR_DEVICE_OPEN: Incomplete +MCIERR_CANNOT_LOAD_DRIVER: Incomplete +MCIERR_MISSING_COMMAND_STRING: Incomplete +MCIERR_PARAM_OVERFLOW: Incomplete +MCIERR_MISSING_STRING_ARGUMENT: Incomplete +MCIERR_BAD_INTEGER: Incomplete +MCIERR_PARSER_INTERNAL: Incomplete +MCIERR_DRIVER_INTERNAL: Incomplete +MCIERR_MISSING_PARAMETER: Incomplete +MCIERR_UNSUPPORTED_FUNCTION: Incomplete +MCIERR_FILE_NOT_FOUND: Incomplete +MCIERR_DEVICE_NOT_READY: Incomplete +MCIERR_INTERNAL: Incomplete +MCIERR_DRIVER: Incomplete +MCIERR_CANNOT_USE_ALL: Incomplete +MCIERR_MULTIPLE: Incomplete +MCIERR_EXTENSION_NOT_FOUND: Incomplete +MCIERR_OUTOFRANGE: Incomplete +MCIERR_FLAGS_NOT_COMPATIBLE: Incomplete +MCIERR_FILE_NOT_SAVED: Incomplete +MCIERR_DEVICE_TYPE_REQUIRED: Incomplete +MCIERR_DEVICE_LOCKED: Incomplete +MCIERR_DUPLICATE_ALIAS: Incomplete +MCIERR_BAD_CONSTANT: Incomplete +MCIERR_MUST_USE_SHAREABLE: Incomplete +MCIERR_MISSING_DEVICE_NAME: Incomplete +MCIERR_BAD_TIME_FORMAT: Incomplete +MCIERR_NO_CLOSING_QUOTE: Incomplete +MCIERR_DUPLICATE_FLAGS: Incomplete +MCIERR_INVALID_FILE: Incomplete +MCIERR_NULL_PARAMETER_BLOCK: Incomplete +MCIERR_UNNAMED_RESOURCE: Incomplete +MCIERR_NEW_REQUIRES_ALIAS: Incomplete +MCIERR_NOTIFY_ON_AUTO_OPEN: Incomplete +MCIERR_NO_ELEMENT_ALLOWED: Incomplete +MCIERR_NONAPPLICABLE_FUNCTION: Incomplete +MCIERR_ILLEGAL_FOR_AUTO_OPEN: Incomplete +MCIERR_FILENAME_REQUIRED: Incomplete +MCIERR_EXTRA_CHARACTERS: Incomplete +MCIERR_DEVICE_NOT_INSTALLED: Incomplete +MCIERR_GET_CD: Incomplete +MCIERR_SET_CD: Incomplete +MCIERR_SET_DRIVE: Incomplete +MCIERR_DEVICE_LENGTH: Incomplete +MCIERR_DEVICE_ORD_LENGTH: Incomplete +MCIERR_NO_INTEGER: Incomplete +MCIERR_WAVE_OUTPUTSINUSE: Incomplete +MCIERR_WAVE_SETOUTPUTINUSE: Incomplete +MCIERR_WAVE_INPUTSINUSE: Incomplete +MCIERR_WAVE_SETINPUTINUSE: Incomplete +MCIERR_WAVE_OUTPUTUNSPECIFIED: Incomplete +MCIERR_WAVE_INPUTUNSPECIFIED: Incomplete +MCIERR_WAVE_OUTPUTSUNSUITABLE: Incomplete +MCIERR_WAVE_SETOUTPUTUNSUITABLE: Incomplete +MCIERR_WAVE_INPUTSUNSUITABLE: Incomplete +MCIERR_WAVE_SETINPUTUNSUITABLE: Incomplete +MCIERR_SEQ_DIV_INCOMPATIBLE: Incomplete +MCIERR_SEQ_PORT_INUSE: Incomplete +MCIERR_SEQ_PORT_NONEXISTENT: Incomplete +MCIERR_SEQ_PORT_MAPNODEVICE: Incomplete +MCIERR_SEQ_PORT_MISCERROR: Incomplete +MCIERR_SEQ_TIMER: Incomplete +MCIERR_SEQ_PORTUNSPECIFIED: Incomplete +MCIERR_SEQ_NOMIDIPRESENT: Incomplete +MCIERR_NO_WINDOW: Incomplete +MCIERR_CREATEWINDOW: Incomplete +MCIERR_FILE_READ: Incomplete +MCIERR_FILE_WRITE: Incomplete +MCIERR_NO_IDENTITY: Incomplete +MCIERR_CUSTOM_DRIVER_BASE: Incomplete +MCI_FIRST: int +MCI_OPEN: int +MCI_CLOSE: int +MCI_ESCAPE: int +MCI_PLAY: int +MCI_SEEK: int +MCI_STOP: int +MCI_PAUSE: int +MCI_INFO: int +MCI_GETDEVCAPS: int +MCI_SPIN: int +MCI_SET: int +MCI_STEP: int +MCI_RECORD: int +MCI_SYSINFO: int +MCI_BREAK: int +MCI_SAVE: int +MCI_STATUS: int +MCI_CUE: int +MCI_REALIZE: int +MCI_WINDOW: int +MCI_PUT: int +MCI_WHERE: int +MCI_FREEZE: int +MCI_UNFREEZE: int +MCI_LOAD: int +MCI_CUT: int +MCI_COPY: int +MCI_PASTE: int +MCI_UPDATE: int +MCI_RESUME: int +MCI_DELETE: int +MCI_USER_MESSAGES: Incomplete +MCI_LAST: int +MCI_DEVTYPE_VCR: int +MCI_DEVTYPE_VIDEODISC: int +MCI_DEVTYPE_OVERLAY: int +MCI_DEVTYPE_CD_AUDIO: int +MCI_DEVTYPE_DAT: int +MCI_DEVTYPE_SCANNER: int +MCI_DEVTYPE_ANIMATION: int +MCI_DEVTYPE_DIGITAL_VIDEO: int +MCI_DEVTYPE_OTHER: int +MCI_DEVTYPE_WAVEFORM_AUDIO: int +MCI_DEVTYPE_SEQUENCER: int +MCI_DEVTYPE_FIRST: int +MCI_DEVTYPE_LAST: int +MCI_DEVTYPE_FIRST_USER: int +MCI_MODE_NOT_READY: Incomplete +MCI_MODE_STOP: Incomplete +MCI_MODE_PLAY: Incomplete +MCI_MODE_RECORD: Incomplete +MCI_MODE_SEEK: Incomplete +MCI_MODE_PAUSE: Incomplete +MCI_MODE_OPEN: Incomplete +MCI_FORMAT_MILLISECONDS: int +MCI_FORMAT_HMS: int +MCI_FORMAT_MSF: int +MCI_FORMAT_FRAMES: int +MCI_FORMAT_SMPTE_24: int +MCI_FORMAT_SMPTE_25: int +MCI_FORMAT_SMPTE_30: int +MCI_FORMAT_SMPTE_30DROP: int +MCI_FORMAT_BYTES: int +MCI_FORMAT_SAMPLES: int +MCI_FORMAT_TMSF: int + +def MCI_MSF_MINUTE(msf): ... +def MCI_MSF_SECOND(msf): ... +def MCI_MSF_FRAME(msf): ... +def MCI_TMSF_TRACK(tmsf): ... +def MCI_TMSF_MINUTE(tmsf): ... +def MCI_TMSF_SECOND(tmsf): ... +def MCI_TMSF_FRAME(tmsf): ... +def MCI_HMS_HOUR(hms): ... +def MCI_HMS_MINUTE(hms): ... +def MCI_HMS_SECOND(hms): ... + +MCI_NOTIFY_SUCCESSFUL: int +MCI_NOTIFY_SUPERSEDED: int +MCI_NOTIFY_ABORTED: int +MCI_NOTIFY_FAILURE: int +MCI_NOTIFY: int +MCI_WAIT: int +MCI_FROM: int +MCI_TO: int +MCI_TRACK: int +MCI_OPEN_SHAREABLE: int +MCI_OPEN_ELEMENT: int +MCI_OPEN_ALIAS: int +MCI_OPEN_ELEMENT_ID: int +MCI_OPEN_TYPE_ID: int +MCI_OPEN_TYPE: int +MCI_SEEK_TO_START: int +MCI_SEEK_TO_END: int +MCI_STATUS_ITEM: int +MCI_STATUS_START: int +MCI_STATUS_LENGTH: int +MCI_STATUS_POSITION: int +MCI_STATUS_NUMBER_OF_TRACKS: int +MCI_STATUS_MODE: int +MCI_STATUS_MEDIA_PRESENT: int +MCI_STATUS_TIME_FORMAT: int +MCI_STATUS_READY: int +MCI_STATUS_CURRENT_TRACK: int +MCI_INFO_PRODUCT: int +MCI_INFO_FILE: int +MCI_INFO_MEDIA_UPC: int +MCI_INFO_MEDIA_IDENTITY: int +MCI_INFO_NAME: int +MCI_INFO_COPYRIGHT: int +MCI_GETDEVCAPS_ITEM: int +MCI_GETDEVCAPS_CAN_RECORD: int +MCI_GETDEVCAPS_HAS_AUDIO: int +MCI_GETDEVCAPS_HAS_VIDEO: int +MCI_GETDEVCAPS_DEVICE_TYPE: int +MCI_GETDEVCAPS_USES_FILES: int +MCI_GETDEVCAPS_COMPOUND_DEVICE: int +MCI_GETDEVCAPS_CAN_EJECT: int +MCI_GETDEVCAPS_CAN_PLAY: int +MCI_GETDEVCAPS_CAN_SAVE: int +MCI_SYSINFO_QUANTITY: int +MCI_SYSINFO_OPEN: int +MCI_SYSINFO_NAME: int +MCI_SYSINFO_INSTALLNAME: int +MCI_SET_DOOR_OPEN: int +MCI_SET_DOOR_CLOSED: int +MCI_SET_TIME_FORMAT: int +MCI_SET_AUDIO: int +MCI_SET_VIDEO: int +MCI_SET_ON: int +MCI_SET_OFF: int +MCI_SET_AUDIO_ALL: int +MCI_SET_AUDIO_LEFT: int +MCI_SET_AUDIO_RIGHT: int +MCI_BREAK_KEY: int +MCI_BREAK_HWND: int +MCI_BREAK_OFF: int +MCI_RECORD_INSERT: int +MCI_RECORD_OVERWRITE: int +MCI_SAVE_FILE: int +MCI_LOAD_FILE: int +MCI_VD_MODE_PARK: Incomplete +MCI_VD_MEDIA_CLV: Incomplete +MCI_VD_MEDIA_CAV: Incomplete +MCI_VD_MEDIA_OTHER: Incomplete +MCI_VD_FORMAT_TRACK: int +MCI_VD_PLAY_REVERSE: int +MCI_VD_PLAY_FAST: int +MCI_VD_PLAY_SPEED: int +MCI_VD_PLAY_SCAN: int +MCI_VD_PLAY_SLOW: int +MCI_VD_SEEK_REVERSE: int +MCI_VD_STATUS_SPEED: int +MCI_VD_STATUS_FORWARD: int +MCI_VD_STATUS_MEDIA_TYPE: int +MCI_VD_STATUS_SIDE: int +MCI_VD_STATUS_DISC_SIZE: int +MCI_VD_GETDEVCAPS_CLV: int +MCI_VD_GETDEVCAPS_CAV: int +MCI_VD_SPIN_UP: int +MCI_VD_SPIN_DOWN: int +MCI_VD_GETDEVCAPS_CAN_REVERSE: int +MCI_VD_GETDEVCAPS_FAST_RATE: int +MCI_VD_GETDEVCAPS_SLOW_RATE: int +MCI_VD_GETDEVCAPS_NORMAL_RATE: int +MCI_VD_STEP_FRAMES: int +MCI_VD_STEP_REVERSE: int +MCI_VD_ESCAPE_STRING: int +MCI_CDA_STATUS_TYPE_TRACK: int +MCI_CDA_TRACK_AUDIO: Incomplete +MCI_CDA_TRACK_OTHER: Incomplete +MCI_WAVE_PCM: Incomplete +MCI_WAVE_MAPPER: Incomplete +MCI_WAVE_OPEN_BUFFER: int +MCI_WAVE_SET_FORMATTAG: int +MCI_WAVE_SET_CHANNELS: int +MCI_WAVE_SET_SAMPLESPERSEC: int +MCI_WAVE_SET_AVGBYTESPERSEC: int +MCI_WAVE_SET_BLOCKALIGN: int +MCI_WAVE_SET_BITSPERSAMPLE: int +MCI_WAVE_INPUT: int +MCI_WAVE_OUTPUT: int +MCI_WAVE_STATUS_FORMATTAG: int +MCI_WAVE_STATUS_CHANNELS: int +MCI_WAVE_STATUS_SAMPLESPERSEC: int +MCI_WAVE_STATUS_AVGBYTESPERSEC: int +MCI_WAVE_STATUS_BLOCKALIGN: int +MCI_WAVE_STATUS_BITSPERSAMPLE: int +MCI_WAVE_STATUS_LEVEL: int +MCI_WAVE_SET_ANYINPUT: int +MCI_WAVE_SET_ANYOUTPUT: int +MCI_WAVE_GETDEVCAPS_INPUTS: int +MCI_WAVE_GETDEVCAPS_OUTPUTS: int +MCI_SEQ_DIV_PPQN: Incomplete +MCI_SEQ_DIV_SMPTE_24: Incomplete +MCI_SEQ_DIV_SMPTE_25: Incomplete +MCI_SEQ_DIV_SMPTE_30DROP: Incomplete +MCI_SEQ_DIV_SMPTE_30: Incomplete +MCI_SEQ_FORMAT_SONGPTR: int +MCI_SEQ_FILE: int +MCI_SEQ_MIDI: int +MCI_SEQ_SMPTE: int +MCI_SEQ_NONE: int +MCI_SEQ_MAPPER: int +MCI_SEQ_STATUS_TEMPO: int +MCI_SEQ_STATUS_PORT: int +MCI_SEQ_STATUS_SLAVE: int +MCI_SEQ_STATUS_MASTER: int +MCI_SEQ_STATUS_OFFSET: int +MCI_SEQ_STATUS_DIVTYPE: int +MCI_SEQ_STATUS_NAME: int +MCI_SEQ_STATUS_COPYRIGHT: int +MCI_SEQ_SET_TEMPO: int +MCI_SEQ_SET_PORT: int +MCI_SEQ_SET_SLAVE: int +MCI_SEQ_SET_MASTER: int +MCI_SEQ_SET_OFFSET: int +MCI_ANIM_OPEN_WS: int +MCI_ANIM_OPEN_PARENT: int +MCI_ANIM_OPEN_NOSTATIC: int +MCI_ANIM_PLAY_SPEED: int +MCI_ANIM_PLAY_REVERSE: int +MCI_ANIM_PLAY_FAST: int +MCI_ANIM_PLAY_SLOW: int +MCI_ANIM_PLAY_SCAN: int +MCI_ANIM_STEP_REVERSE: int +MCI_ANIM_STEP_FRAMES: int +MCI_ANIM_STATUS_SPEED: int +MCI_ANIM_STATUS_FORWARD: int +MCI_ANIM_STATUS_HWND: int +MCI_ANIM_STATUS_HPAL: int +MCI_ANIM_STATUS_STRETCH: int +MCI_ANIM_INFO_TEXT: int +MCI_ANIM_GETDEVCAPS_CAN_REVERSE: int +MCI_ANIM_GETDEVCAPS_FAST_RATE: int +MCI_ANIM_GETDEVCAPS_SLOW_RATE: int +MCI_ANIM_GETDEVCAPS_NORMAL_RATE: int +MCI_ANIM_GETDEVCAPS_PALETTES: int +MCI_ANIM_GETDEVCAPS_CAN_STRETCH: int +MCI_ANIM_GETDEVCAPS_MAX_WINDOWS: int +MCI_ANIM_REALIZE_NORM: int +MCI_ANIM_REALIZE_BKGD: int +MCI_ANIM_WINDOW_HWND: int +MCI_ANIM_WINDOW_STATE: int +MCI_ANIM_WINDOW_TEXT: int +MCI_ANIM_WINDOW_ENABLE_STRETCH: int +MCI_ANIM_WINDOW_DISABLE_STRETCH: int +MCI_ANIM_WINDOW_DEFAULT: int +MCI_ANIM_RECT: int +MCI_ANIM_PUT_SOURCE: int +MCI_ANIM_PUT_DESTINATION: int +MCI_ANIM_WHERE_SOURCE: int +MCI_ANIM_WHERE_DESTINATION: int +MCI_ANIM_UPDATE_HDC: int +MCI_OVLY_OPEN_WS: int +MCI_OVLY_OPEN_PARENT: int +MCI_OVLY_STATUS_HWND: int +MCI_OVLY_STATUS_STRETCH: int +MCI_OVLY_INFO_TEXT: int +MCI_OVLY_GETDEVCAPS_CAN_STRETCH: int +MCI_OVLY_GETDEVCAPS_CAN_FREEZE: int +MCI_OVLY_GETDEVCAPS_MAX_WINDOWS: int +MCI_OVLY_WINDOW_HWND: int +MCI_OVLY_WINDOW_STATE: int +MCI_OVLY_WINDOW_TEXT: int +MCI_OVLY_WINDOW_ENABLE_STRETCH: int +MCI_OVLY_WINDOW_DISABLE_STRETCH: int +MCI_OVLY_WINDOW_DEFAULT: int +MCI_OVLY_RECT: int +MCI_OVLY_PUT_SOURCE: int +MCI_OVLY_PUT_DESTINATION: int +MCI_OVLY_PUT_FRAME: int +MCI_OVLY_PUT_VIDEO: int +MCI_OVLY_WHERE_SOURCE: int +MCI_OVLY_WHERE_DESTINATION: int +MCI_OVLY_WHERE_FRAME: int +MCI_OVLY_WHERE_VIDEO: int +SELECTDIB: int + +def DIBINDEX(n): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/ntsecuritycon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/ntsecuritycon.pyi new file mode 100644 index 00000000..19bb7a9d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/ntsecuritycon.pyi @@ -0,0 +1,554 @@ +from typing_extensions import TypeAlias + +_SixIntTuple: TypeAlias = tuple[int, int, int, int, int, int] + +DELETE: int +READ_CONTROL: int +WRITE_DAC: int +WRITE_OWNER: int +SYNCHRONIZE: int +STANDARD_RIGHTS_REQUIRED: int +STANDARD_RIGHTS_READ: int +STANDARD_RIGHTS_WRITE: int +STANDARD_RIGHTS_EXECUTE: int +STANDARD_RIGHTS_ALL: int +SPECIFIC_RIGHTS_ALL: int +ACCESS_SYSTEM_SECURITY: int +MAXIMUM_ALLOWED: int +GENERIC_READ: int +GENERIC_WRITE: int +GENERIC_EXECUTE: int +GENERIC_ALL: int +FILE_READ_DATA: int +FILE_WRITE_DATA: int +FILE_ADD_FILE: int +FILE_APPEND_DATA: int +FILE_ADD_SUBDIRECTORY: int +FILE_CREATE_PIPE_INSTANCE: int +FILE_READ_EA: int +FILE_WRITE_EA: int +FILE_EXECUTE: int +FILE_TRAVERSE: int +FILE_DELETE_CHILD: int +FILE_READ_ATTRIBUTES: int +FILE_WRITE_ATTRIBUTES: int +FILE_ALL_ACCESS: int +FILE_GENERIC_READ: int +FILE_GENERIC_WRITE: int +FILE_GENERIC_EXECUTE: int +SECURITY_NULL_SID_AUTHORITY: _SixIntTuple +SECURITY_WORLD_SID_AUTHORITY: _SixIntTuple +SECURITY_LOCAL_SID_AUTHORITY: _SixIntTuple +SECURITY_CREATOR_SID_AUTHORITY: _SixIntTuple +SECURITY_NON_UNIQUE_AUTHORITY: _SixIntTuple +SECURITY_RESOURCE_MANAGER_AUTHORITY: _SixIntTuple +SECURITY_NULL_RID: int +SECURITY_WORLD_RID: int +SECURITY_LOCAL_RID: int +SECURITY_CREATOR_OWNER_RID: int +SECURITY_CREATOR_GROUP_RID: int +SECURITY_CREATOR_OWNER_SERVER_RID: int +SECURITY_CREATOR_GROUP_SERVER_RID: int +SECURITY_CREATOR_OWNER_RIGHTS_RID: int +SECURITY_NT_AUTHORITY: _SixIntTuple +SECURITY_DIALUP_RID: int +SECURITY_NETWORK_RID: int +SECURITY_BATCH_RID: int +SECURITY_INTERACTIVE_RID: int +SECURITY_SERVICE_RID: int +SECURITY_ANONYMOUS_LOGON_RID: int +SECURITY_PROXY_RID: int +SECURITY_SERVER_LOGON_RID: int +SECURITY_LOGON_IDS_RID: int +SECURITY_LOGON_IDS_RID_COUNT: int +SECURITY_LOCAL_SYSTEM_RID: int +SECURITY_NT_NON_UNIQUE: int +SECURITY_BUILTIN_DOMAIN_RID: int +DOMAIN_USER_RID_ADMIN: int +DOMAIN_USER_RID_GUEST: int +DOMAIN_USER_RID_KRBTGT: int +DOMAIN_USER_RID_MAX: int +DOMAIN_GROUP_RID_ADMINS: int +DOMAIN_GROUP_RID_USERS: int +DOMAIN_GROUP_RID_GUESTS: int +DOMAIN_GROUP_RID_COMPUTERS: int +DOMAIN_GROUP_RID_CONTROLLERS: int +DOMAIN_GROUP_RID_CERT_ADMINS: int +DOMAIN_GROUP_RID_SCHEMA_ADMINS: int +DOMAIN_GROUP_RID_ENTERPRISE_ADMINS: int +DOMAIN_GROUP_RID_POLICY_ADMINS: int +DOMAIN_GROUP_RID_READONLY_CONTROLLERS: int +DOMAIN_ALIAS_RID_ADMINS: int +DOMAIN_ALIAS_RID_USERS: int +DOMAIN_ALIAS_RID_GUESTS: int +DOMAIN_ALIAS_RID_POWER_USERS: int +DOMAIN_ALIAS_RID_ACCOUNT_OPS: int +DOMAIN_ALIAS_RID_SYSTEM_OPS: int +DOMAIN_ALIAS_RID_PRINT_OPS: int +DOMAIN_ALIAS_RID_BACKUP_OPS: int +DOMAIN_ALIAS_RID_REPLICATOR: int +DOMAIN_ALIAS_RID_RAS_SERVERS: int +DOMAIN_ALIAS_RID_PREW2KCOMPACCESS: int +DOMAIN_ALIAS_RID_REMOTE_DESKTOP_USERS: int +DOMAIN_ALIAS_RID_NETWORK_CONFIGURATION_OPS: int +DOMAIN_ALIAS_RID_INCOMING_FOREST_TRUST_BUILDERS: int +DOMAIN_ALIAS_RID_MONITORING_USERS: int +DOMAIN_ALIAS_RID_LOGGING_USERS: int +DOMAIN_ALIAS_RID_AUTHORIZATIONACCESS: int +DOMAIN_ALIAS_RID_TS_LICENSE_SERVERS: int +DOMAIN_ALIAS_RID_DCOM_USERS: int +DOMAIN_ALIAS_RID_IUSERS: int +DOMAIN_ALIAS_RID_CRYPTO_OPERATORS: int +DOMAIN_ALIAS_RID_CACHEABLE_PRINCIPALS_GROUP: int +DOMAIN_ALIAS_RID_NON_CACHEABLE_PRINCIPALS_GROUP: int +DOMAIN_ALIAS_RID_EVENT_LOG_READERS_GROUP: int +SECURITY_MANDATORY_LABEL_AUTHORITY: _SixIntTuple +SECURITY_MANDATORY_UNTRUSTED_RID: int +SECURITY_MANDATORY_LOW_RID: int +SECURITY_MANDATORY_MEDIUM_RID: int +SECURITY_MANDATORY_HIGH_RID: int +SECURITY_MANDATORY_SYSTEM_RID: int +SECURITY_MANDATORY_PROTECTED_PROCESS_RID: int +SECURITY_MANDATORY_MAXIMUM_USER_RID: int +SYSTEM_LUID: tuple[int, int] +ANONYMOUS_LOGON_LUID: tuple[int, int] +LOCALSERVICE_LUID: tuple[int, int] +NETWORKSERVICE_LUID: tuple[int, int] +IUSER_LUID: tuple[int, int] +SE_GROUP_MANDATORY: int +SE_GROUP_ENABLED_BY_DEFAULT: int +SE_GROUP_ENABLED: int +SE_GROUP_OWNER: int +SE_GROUP_USE_FOR_DENY_ONLY: int +SE_GROUP_INTEGRITY: int +SE_GROUP_INTEGRITY_ENABLED: int +SE_GROUP_RESOURCE: int +SE_GROUP_LOGON_ID: int +ACCESS_MIN_MS_ACE_TYPE: int +ACCESS_ALLOWED_ACE_TYPE: int +ACCESS_DENIED_ACE_TYPE: int +SYSTEM_AUDIT_ACE_TYPE: int +SYSTEM_ALARM_ACE_TYPE: int +ACCESS_MAX_MS_V2_ACE_TYPE: int +ACCESS_ALLOWED_COMPOUND_ACE_TYPE: int +ACCESS_MAX_MS_V3_ACE_TYPE: int +ACCESS_MIN_MS_OBJECT_ACE_TYPE: int +ACCESS_ALLOWED_OBJECT_ACE_TYPE: int +ACCESS_DENIED_OBJECT_ACE_TYPE: int +SYSTEM_AUDIT_OBJECT_ACE_TYPE: int +SYSTEM_ALARM_OBJECT_ACE_TYPE: int +ACCESS_MAX_MS_OBJECT_ACE_TYPE: int +ACCESS_MAX_MS_V4_ACE_TYPE: int +ACCESS_MAX_MS_ACE_TYPE: int +ACCESS_ALLOWED_CALLBACK_ACE_TYPE: int +ACCESS_DENIED_CALLBACK_ACE_TYPE: int +ACCESS_ALLOWED_CALLBACK_OBJECT_ACE_TYPE: int +ACCESS_DENIED_CALLBACK_OBJECT_ACE_TYPE: int +SYSTEM_AUDIT_CALLBACK_ACE_TYPE: int +SYSTEM_ALARM_CALLBACK_ACE_TYPE: int +SYSTEM_AUDIT_CALLBACK_OBJECT_ACE_TYPE: int +SYSTEM_ALARM_CALLBACK_OBJECT_ACE_TYPE: int +SYSTEM_MANDATORY_LABEL_ACE_TYPE: int +ACCESS_MAX_MS_V5_ACE_TYPE: int +OBJECT_INHERIT_ACE: int +CONTAINER_INHERIT_ACE: int +NO_PROPAGATE_INHERIT_ACE: int +INHERIT_ONLY_ACE: int +VALID_INHERIT_FLAGS: int +SUCCESSFUL_ACCESS_ACE_FLAG: int +FAILED_ACCESS_ACE_FLAG: int +SE_OWNER_DEFAULTED: int +SE_GROUP_DEFAULTED: int +SE_DACL_PRESENT: int +SE_DACL_DEFAULTED: int +SE_SACL_PRESENT: int +SE_SACL_DEFAULTED: int +SE_SELF_RELATIVE: int +SE_PRIVILEGE_ENABLED_BY_DEFAULT: int +SE_PRIVILEGE_ENABLED: int +SE_PRIVILEGE_USED_FOR_ACCESS: int +PRIVILEGE_SET_ALL_NECESSARY: int +SE_CREATE_TOKEN_NAME: str +SE_ASSIGNPRIMARYTOKEN_NAME: str +SE_LOCK_MEMORY_NAME: str +SE_INCREASE_QUOTA_NAME: str +SE_UNSOLICITED_INPUT_NAME: str +SE_MACHINE_ACCOUNT_NAME: str +SE_TCB_NAME: str +SE_SECURITY_NAME: str +SE_TAKE_OWNERSHIP_NAME: str +SE_LOAD_DRIVER_NAME: str +SE_SYSTEM_PROFILE_NAME: str +SE_SYSTEMTIME_NAME: str +SE_PROF_SINGLE_PROCESS_NAME: str +SE_INC_BASE_PRIORITY_NAME: str +SE_CREATE_PAGEFILE_NAME: str +SE_CREATE_PERMANENT_NAME: str +SE_BACKUP_NAME: str +SE_RESTORE_NAME: str +SE_SHUTDOWN_NAME: str +SE_DEBUG_NAME: str +SE_AUDIT_NAME: str +SE_SYSTEM_ENVIRONMENT_NAME: str +SE_CHANGE_NOTIFY_NAME: str +SE_REMOTE_SHUTDOWN_NAME: str +SecurityAnonymous: int +SecurityIdentification: int +SecurityImpersonation: int +SecurityDelegation: int +SECURITY_MAX_IMPERSONATION_LEVEL: int +DEFAULT_IMPERSONATION_LEVEL: int +TOKEN_ASSIGN_PRIMARY: int +TOKEN_DUPLICATE: int +TOKEN_IMPERSONATE: int +TOKEN_QUERY: int +TOKEN_QUERY_SOURCE: int +TOKEN_ADJUST_PRIVILEGES: int +TOKEN_ADJUST_GROUPS: int +TOKEN_ADJUST_DEFAULT: int +TOKEN_ALL_ACCESS: int +TOKEN_READ: int +TOKEN_WRITE: int +TOKEN_EXECUTE: int +SidTypeUser: int +SidTypeGroup: int +SidTypeDomain: int +SidTypeAlias: int +SidTypeWellKnownGroup: int +SidTypeDeletedAccount: int +SidTypeInvalid: int +SidTypeUnknown: int +SidTypeComputer: int +SidTypeLabel: int +TokenPrimary: int +TokenImpersonation: int +TokenUser: int +TokenGroups: int +TokenPrivileges: int +TokenOwner: int +TokenPrimaryGroup: int +TokenDefaultDacl: int +TokenSource: int +TokenType: int +TokenImpersonationLevel: int +TokenStatistics: int +TokenRestrictedSids: int +TokenSessionId: int +TokenGroupsAndPrivileges: int +TokenSessionReference: int +TokenSandBoxInert: int +TokenAuditPolicy: int +TokenOrigin: int +TokenElevationType: int +TokenLinkedToken: int +TokenElevation: int +TokenHasRestrictions: int +TokenAccessInformation: int +TokenVirtualizationAllowed: int +TokenVirtualizationEnabled: int +TokenIntegrityLevel: int +TokenUIAccess: int +TokenMandatoryPolicy: int +TokenLogonSid: int +DS_BEHAVIOR_WIN2000: int +DS_BEHAVIOR_WIN2003_WITH_MIXED_DOMAINS: int +DS_BEHAVIOR_WIN2003: int +DS_SYNCED_EVENT_NAME: str +ACTRL_DS_OPEN: int +ACTRL_DS_CREATE_CHILD: int +ACTRL_DS_DELETE_CHILD: int +ACTRL_DS_SELF: int +ACTRL_DS_READ_PROP: int +ACTRL_DS_WRITE_PROP: int +ACTRL_DS_DELETE_TREE: int +ACTRL_DS_CONTROL_ACCESS: int +NTDSAPI_BIND_ALLOW_DELEGATION: int +DS_REPSYNC_ASYNCHRONOUS_OPERATION: int +DS_REPSYNC_WRITEABLE: int +DS_REPSYNC_PERIODIC: int +DS_REPSYNC_INTERSITE_MESSAGING: int +DS_REPSYNC_ALL_SOURCES: int +DS_REPSYNC_FULL: int +DS_REPSYNC_URGENT: int +DS_REPSYNC_NO_DISCARD: int +DS_REPSYNC_FORCE: int +DS_REPSYNC_ADD_REFERENCE: int +DS_REPSYNC_NEVER_COMPLETED: int +DS_REPSYNC_TWO_WAY: int +DS_REPSYNC_NEVER_NOTIFY: int +DS_REPSYNC_INITIAL: int +DS_REPSYNC_USE_COMPRESSION: int +DS_REPSYNC_ABANDONED: int +DS_REPSYNC_INITIAL_IN_PROGRESS: int +DS_REPSYNC_PARTIAL_ATTRIBUTE_SET: int +DS_REPSYNC_REQUEUE: int +DS_REPSYNC_NOTIFICATION: int +DS_REPSYNC_ASYNCHRONOUS_REPLICA: int +DS_REPSYNC_CRITICAL: int +DS_REPSYNC_FULL_IN_PROGRESS: int +DS_REPSYNC_PREEMPTED: int +DS_REPADD_ASYNCHRONOUS_OPERATION: int +DS_REPADD_WRITEABLE: int +DS_REPADD_INITIAL: int +DS_REPADD_PERIODIC: int +DS_REPADD_INTERSITE_MESSAGING: int +DS_REPADD_ASYNCHRONOUS_REPLICA: int +DS_REPADD_DISABLE_NOTIFICATION: int +DS_REPADD_DISABLE_PERIODIC: int +DS_REPADD_USE_COMPRESSION: int +DS_REPADD_NEVER_NOTIFY: int +DS_REPADD_TWO_WAY: int +DS_REPADD_CRITICAL: int +DS_REPDEL_ASYNCHRONOUS_OPERATION: int +DS_REPDEL_WRITEABLE: int +DS_REPDEL_INTERSITE_MESSAGING: int +DS_REPDEL_IGNORE_ERRORS: int +DS_REPDEL_LOCAL_ONLY: int +DS_REPDEL_NO_SOURCE: int +DS_REPDEL_REF_OK: int +DS_REPMOD_ASYNCHRONOUS_OPERATION: int +DS_REPMOD_WRITEABLE: int +DS_REPMOD_UPDATE_FLAGS: int +DS_REPMOD_UPDATE_ADDRESS: int +DS_REPMOD_UPDATE_SCHEDULE: int +DS_REPMOD_UPDATE_RESULT: int +DS_REPMOD_UPDATE_TRANSPORT: int +DS_REPUPD_ASYNCHRONOUS_OPERATION: int +DS_REPUPD_WRITEABLE: int +DS_REPUPD_ADD_REFERENCE: int +DS_REPUPD_DELETE_REFERENCE: int +DS_INSTANCETYPE_IS_NC_HEAD: int +DS_INSTANCETYPE_NC_IS_WRITEABLE: int +DS_INSTANCETYPE_NC_COMING: int +DS_INSTANCETYPE_NC_GOING: int +NTDSDSA_OPT_IS_GC: int +NTDSDSA_OPT_DISABLE_INBOUND_REPL: int +NTDSDSA_OPT_DISABLE_OUTBOUND_REPL: int +NTDSDSA_OPT_DISABLE_NTDSCONN_XLATE: int +NTDSCONN_OPT_IS_GENERATED: int +NTDSCONN_OPT_TWOWAY_SYNC: int +NTDSCONN_OPT_OVERRIDE_NOTIFY_DEFAULT: int +NTDSCONN_OPT_USE_NOTIFY: int +NTDSCONN_OPT_DISABLE_INTERSITE_COMPRESSION: int +NTDSCONN_OPT_USER_OWNED_SCHEDULE: int +NTDSCONN_KCC_NO_REASON: int +NTDSCONN_KCC_GC_TOPOLOGY: int +NTDSCONN_KCC_RING_TOPOLOGY: int +NTDSCONN_KCC_MINIMIZE_HOPS_TOPOLOGY: int +NTDSCONN_KCC_STALE_SERVERS_TOPOLOGY: int +NTDSCONN_KCC_OSCILLATING_CONNECTION_TOPOLOGY: int +NTDSCONN_KCC_INTERSITE_GC_TOPOLOGY: int +NTDSCONN_KCC_INTERSITE_TOPOLOGY: int +NTDSCONN_KCC_SERVER_FAILOVER_TOPOLOGY: int +NTDSCONN_KCC_SITE_FAILOVER_TOPOLOGY: int +NTDSCONN_KCC_REDUNDANT_SERVER_TOPOLOGY: int +FRSCONN_PRIORITY_MASK: int +FRSCONN_MAX_PRIORITY: int +NTDSCONN_OPT_IGNORE_SCHEDULE_MASK: int +NTDSSETTINGS_OPT_IS_AUTO_TOPOLOGY_DISABLED: int +NTDSSETTINGS_OPT_IS_TOPL_CLEANUP_DISABLED: int +NTDSSETTINGS_OPT_IS_TOPL_MIN_HOPS_DISABLED: int +NTDSSETTINGS_OPT_IS_TOPL_DETECT_STALE_DISABLED: int +NTDSSETTINGS_OPT_IS_INTER_SITE_AUTO_TOPOLOGY_DISABLED: int +NTDSSETTINGS_OPT_IS_GROUP_CACHING_ENABLED: int +NTDSSETTINGS_OPT_FORCE_KCC_WHISTLER_BEHAVIOR: int +NTDSSETTINGS_OPT_FORCE_KCC_W2K_ELECTION: int +NTDSSETTINGS_OPT_IS_RAND_BH_SELECTION_DISABLED: int +NTDSSETTINGS_OPT_IS_SCHEDULE_HASHING_ENABLED: int +NTDSSETTINGS_OPT_IS_REDUNDANT_SERVER_TOPOLOGY_ENABLED: int +NTDSSETTINGS_DEFAULT_SERVER_REDUNDANCY: int +NTDSTRANSPORT_OPT_IGNORE_SCHEDULES: int +NTDSTRANSPORT_OPT_BRIDGES_REQUIRED: int +NTDSSITECONN_OPT_USE_NOTIFY: int +NTDSSITECONN_OPT_TWOWAY_SYNC: int +NTDSSITECONN_OPT_DISABLE_COMPRESSION: int +NTDSSITELINK_OPT_USE_NOTIFY: int +NTDSSITELINK_OPT_TWOWAY_SYNC: int +NTDSSITELINK_OPT_DISABLE_COMPRESSION: int +GUID_USERS_CONTAINER_A: str +GUID_COMPUTRS_CONTAINER_A: str +GUID_SYSTEMS_CONTAINER_A: str +GUID_DOMAIN_CONTROLLERS_CONTAINER_A: str +GUID_INFRASTRUCTURE_CONTAINER_A: str +GUID_DELETED_OBJECTS_CONTAINER_A: str +GUID_LOSTANDFOUND_CONTAINER_A: str +GUID_FOREIGNSECURITYPRINCIPALS_CONTAINER_A: str +GUID_PROGRAM_DATA_CONTAINER_A: str +GUID_MICROSOFT_PROGRAM_DATA_CONTAINER_A: str +GUID_NTDS_QUOTAS_CONTAINER_A: str +GUID_USERS_CONTAINER_BYTE: str +GUID_COMPUTRS_CONTAINER_BYTE: str +GUID_SYSTEMS_CONTAINER_BYTE: str +GUID_DOMAIN_CONTROLLERS_CONTAINER_BYTE: str +GUID_INFRASTRUCTURE_CONTAINER_BYTE: str +GUID_DELETED_OBJECTS_CONTAINER_BYTE: str +GUID_LOSTANDFOUND_CONTAINER_BYTE: str +GUID_FOREIGNSECURITYPRINCIPALS_CONTAINER_BYTE: str +GUID_PROGRAM_DATA_CONTAINER_BYTE: str +GUID_MICROSOFT_PROGRAM_DATA_CONTAINER_BYTE: str +GUID_NTDS_QUOTAS_CONTAINER_BYTE: str +DS_REPSYNCALL_NO_OPTIONS: int +DS_REPSYNCALL_ABORT_IF_SERVER_UNAVAILABLE: int +DS_REPSYNCALL_SYNC_ADJACENT_SERVERS_ONLY: int +DS_REPSYNCALL_ID_SERVERS_BY_DN: int +DS_REPSYNCALL_DO_NOT_SYNC: int +DS_REPSYNCALL_SKIP_INITIAL_CHECK: int +DS_REPSYNCALL_PUSH_CHANGES_OUTWARD: int +DS_REPSYNCALL_CROSS_SITE_BOUNDARIES: int +DS_ROLE_SCHEMA_OWNER: int +DS_ROLE_DOMAIN_OWNER: int +DS_ROLE_PDC_OWNER: int +DS_ROLE_RID_OWNER: int +DS_ROLE_INFRASTRUCTURE_OWNER: int +DS_SCHEMA_GUID_NOT_FOUND: int +DS_SCHEMA_GUID_ATTR: int +DS_SCHEMA_GUID_ATTR_SET: int +DS_SCHEMA_GUID_CLASS: int +DS_SCHEMA_GUID_CONTROL_RIGHT: int +DS_KCC_FLAG_ASYNC_OP: int +DS_KCC_FLAG_DAMPED: int +DS_EXIST_ADVISORY_MODE: int +DS_REPL_INFO_FLAG_IMPROVE_LINKED_ATTRS: int +DS_REPL_NBR_WRITEABLE: int +DS_REPL_NBR_SYNC_ON_STARTUP: int +DS_REPL_NBR_DO_SCHEDULED_SYNCS: int +DS_REPL_NBR_USE_ASYNC_INTERSITE_TRANSPORT: int +DS_REPL_NBR_TWO_WAY_SYNC: int +DS_REPL_NBR_RETURN_OBJECT_PARENTS: int +DS_REPL_NBR_FULL_SYNC_IN_PROGRESS: int +DS_REPL_NBR_FULL_SYNC_NEXT_PACKET: int +DS_REPL_NBR_NEVER_SYNCED: int +DS_REPL_NBR_PREEMPTED: int +DS_REPL_NBR_IGNORE_CHANGE_NOTIFICATIONS: int +DS_REPL_NBR_DISABLE_SCHEDULED_SYNC: int +DS_REPL_NBR_COMPRESS_CHANGES: int +DS_REPL_NBR_NO_CHANGE_NOTIFICATIONS: int +DS_REPL_NBR_PARTIAL_ATTRIBUTE_SET: int +DS_REPL_NBR_MODIFIABLE_MASK: int +DS_UNKNOWN_NAME: int +DS_FQDN_1779_NAME: int +DS_NT4_ACCOUNT_NAME: int +DS_DISPLAY_NAME: int +DS_UNIQUE_ID_NAME: int +DS_CANONICAL_NAME: int +DS_USER_PRINCIPAL_NAME: int +DS_CANONICAL_NAME_EX: int +DS_SERVICE_PRINCIPAL_NAME: int +DS_SID_OR_SID_HISTORY_NAME: int +DS_DNS_DOMAIN_NAME: int +DS_DOMAIN_SIMPLE_NAME: int +DS_ENTERPRISE_SIMPLE_NAME: int +DS_NAME_NO_FLAGS: int +DS_NAME_FLAG_SYNTACTICAL_ONLY: int +DS_NAME_FLAG_EVAL_AT_DC: int +DS_NAME_FLAG_GCVERIFY: int +DS_NAME_FLAG_TRUST_REFERRAL: int +DS_NAME_NO_ERROR: int +DS_NAME_ERROR_RESOLVING: int +DS_NAME_ERROR_NOT_FOUND: int +DS_NAME_ERROR_NOT_UNIQUE: int +DS_NAME_ERROR_NO_MAPPING: int +DS_NAME_ERROR_DOMAIN_ONLY: int +DS_NAME_ERROR_NO_SYNTACTICAL_MAPPING: int +DS_NAME_ERROR_TRUST_REFERRAL: int +DS_SPN_DNS_HOST: int +DS_SPN_DN_HOST: int +DS_SPN_NB_HOST: int +DS_SPN_DOMAIN: int +DS_SPN_NB_DOMAIN: int +DS_SPN_SERVICE: int +DS_SPN_ADD_SPN_OP: int +DS_SPN_REPLACE_SPN_OP: int +DS_SPN_DELETE_SPN_OP: int +DS_FORCE_REDISCOVERY: int +DS_DIRECTORY_SERVICE_REQUIRED: int +DS_DIRECTORY_SERVICE_PREFERRED: int +DS_GC_SERVER_REQUIRED: int +DS_PDC_REQUIRED: int +DS_BACKGROUND_ONLY: int +DS_IP_REQUIRED: int +DS_KDC_REQUIRED: int +DS_TIMESERV_REQUIRED: int +DS_WRITABLE_REQUIRED: int +DS_GOOD_TIMESERV_PREFERRED: int +DS_AVOID_SELF: int +DS_ONLY_LDAP_NEEDED: int +DS_IS_FLAT_NAME: int +DS_IS_DNS_NAME: int +DS_RETURN_DNS_NAME: int +DS_RETURN_FLAT_NAME: int +DSGETDC_VALID_FLAGS: int +DS_INET_ADDRESS: int +DS_NETBIOS_ADDRESS: int +DS_PDC_FLAG: int +DS_GC_FLAG: int +DS_LDAP_FLAG: int +DS_DS_FLAG: int +DS_KDC_FLAG: int +DS_TIMESERV_FLAG: int +DS_CLOSEST_FLAG: int +DS_WRITABLE_FLAG: int +DS_GOOD_TIMESERV_FLAG: int +DS_NDNC_FLAG: int +DS_PING_FLAGS: int +DS_DNS_CONTROLLER_FLAG: int +DS_DNS_DOMAIN_FLAG: int +DS_DNS_FOREST_FLAG: int +DS_DOMAIN_IN_FOREST: int +DS_DOMAIN_DIRECT_OUTBOUND: int +DS_DOMAIN_TREE_ROOT: int +DS_DOMAIN_PRIMARY: int +DS_DOMAIN_NATIVE_MODE: int +DS_DOMAIN_DIRECT_INBOUND: int +DS_DOMAIN_VALID_FLAGS: int +DS_GFTI_UPDATE_TDO: int +DS_GFTI_VALID_FLAGS: int +DS_ONLY_DO_SITE_NAME: int +DS_NOTIFY_AFTER_SITE_RECORDS: int +DS_OPEN_VALID_OPTION_FLAGS: int +DS_OPEN_VALID_FLAGS: int +SI_EDIT_PERMS: int +SI_EDIT_OWNER: int +SI_EDIT_AUDITS: int +SI_CONTAINER: int +SI_READONLY: int +SI_ADVANCED: int +SI_RESET: int +SI_OWNER_READONLY: int +SI_EDIT_PROPERTIES: int +SI_OWNER_RECURSE: int +SI_NO_ACL_PROTECT: int +SI_NO_TREE_APPLY: int +SI_PAGE_TITLE: int +SI_SERVER_IS_DC: int +SI_RESET_DACL_TREE: int +SI_RESET_SACL_TREE: int +SI_OBJECT_GUID: int +SI_EDIT_EFFECTIVE: int +SI_RESET_DACL: int +SI_RESET_SACL: int +SI_RESET_OWNER: int +SI_NO_ADDITIONAL_PERMISSION: int +SI_MAY_WRITE: int +SI_EDIT_ALL: int +SI_AUDITS_ELEVATION_REQUIRED: int +SI_VIEW_ONLY: int +SI_OWNER_ELEVATION_REQUIRED: int +SI_PERMS_ELEVATION_REQUIRED: int +SI_ACCESS_SPECIFIC: int +SI_ACCESS_GENERAL: int +SI_ACCESS_CONTAINER: int +SI_ACCESS_PROPERTY: int +SI_PAGE_PERM: int +SI_PAGE_ADVPERM: int +SI_PAGE_AUDIT: int +SI_PAGE_OWNER: int +SI_PAGE_EFFECTIVE: int +PSPCB_SI_INITDIALOG: int +ACTRL_DS_LIST: int +ACTRL_DS_LIST_OBJECT: int +CFSTR_ACLUI_SID_INFO_LIST: str +DS_LIST_ACCOUNT_OBJECT_FOR_SERVER: int +DS_LIST_DNS_HOST_NAME_FOR_SERVER: int +DS_LIST_DSA_OBJECT_FOR_SERVER: int +FILE_LIST_DIRECTORY: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/pywintypes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/pywintypes.pyi new file mode 100644 index 00000000..c0f99560 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/pywintypes.pyi @@ -0,0 +1,42 @@ +# Can't generate with stubgen because: +# "KeyError: 'pywintypes'" +from _typeshed import Incomplete +from datetime import datetime +from typing_extensions import Literal + +import _win32typing + +class error(Exception): + winerror: int + funcname: str + strerror: str + def __init__(self, winerror: int, funcname: str, strerror: str): ... + +class com_error(Exception): ... +class UnicodeType(str): ... + +class TimeType(datetime): + Format = datetime.strftime + +IIDType = _win32typing.PyIID + +def DosDateTimeToTime() -> _win32typing.PyTime: ... +def Unicode() -> str: ... +def UnicodeFromRaw(_str: str) -> str: ... +def IsTextUnicode(_str: str, flags) -> tuple[Incomplete, Incomplete]: ... +def OVERLAPPED() -> _win32typing.PyOVERLAPPED: ... +def IID(iidString: str, is_bytes: bool = ...) -> _win32typing.PyIID: ... +def Time(timeRepr) -> _win32typing.PyTime: ... +def CreateGuid() -> _win32typing.PyIID: ... +def ACL(__bufSize: int = ...) -> _win32typing.PyACL: ... +def SID(buffer, idAuthority, subAuthorities, bufSize=...) -> _win32typing.PySID: ... +def SECURITY_ATTRIBUTES() -> _win32typing.PySECURITY_ATTRIBUTES: ... +def SECURITY_DESCRIPTOR() -> _win32typing.PySECURITY_DESCRIPTOR: ... +def HANDLE() -> int: ... +def HKEY() -> _win32typing.PyHKEY: ... +def WAVEFORMATEX() -> _win32typing.PyWAVEFORMATEX: ... +def TimeStamp(*args, **kwargs): ... # incomplete + +FALSE: Literal[False] +TRUE: Literal[True] +WAVE_FORMAT_PCM: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/regutil.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/regutil.pyi new file mode 100644 index 00000000..73aeee49 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/regutil.pyi @@ -0,0 +1,27 @@ +from _typeshed import Incomplete + +error: str +CLSIDPyFile: str +RegistryIDPyFile: str +RegistryIDPycFile: str + +def BuildDefaultPythonKey(): ... +def GetRootKey(): ... +def GetRegistryDefaultValue(subkey, rootkey: Incomplete | None = ...): ... +def SetRegistryDefaultValue(subKey, value, rootkey: Incomplete | None = ...) -> None: ... +def GetAppPathsKey(): ... +def RegisterPythonExe(exeFullPath, exeAlias: Incomplete | None = ..., exeAppPath: Incomplete | None = ...) -> None: ... +def GetRegisteredExe(exeAlias): ... +def UnregisterPythonExe(exeAlias) -> None: ... +def RegisterNamedPath(name, path) -> None: ... +def UnregisterNamedPath(name) -> None: ... +def GetRegisteredNamedPath(name): ... +def RegisterModule(modName, modPath) -> None: ... +def UnregisterModule(modName) -> None: ... +def GetRegisteredHelpFile(helpDesc): ... +def RegisterHelpFile(helpFile, helpPath, helpDesc: Incomplete | None = ..., bCheckFile: int = ...) -> None: ... +def UnregisterHelpFile(helpFile, helpDesc: Incomplete | None = ...) -> None: ... +def RegisterCoreDLL(coredllName: Incomplete | None = ...) -> None: ... +def RegisterFileExtensions(defPyIcon, defPycIcon, runCommand) -> None: ... +def RegisterShellCommand(shellCommand, exeCommand, shellUserCommand: Incomplete | None = ...) -> None: ... +def RegisterDDECommand(shellCommand, ddeApp, ddeTopic, ddeCommand) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/sspicon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/sspicon.pyi new file mode 100644 index 00000000..978a4370 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/sspicon.pyi @@ -0,0 +1,457 @@ +ISSP_LEVEL: int +ISSP_MODE: int + +def SEC_SUCCESS(Status: int) -> bool: ... + +SECPKG_FLAG_INTEGRITY: int +SECPKG_FLAG_PRIVACY: int +SECPKG_FLAG_TOKEN_ONLY: int +SECPKG_FLAG_DATAGRAM: int +SECPKG_FLAG_CONNECTION: int +SECPKG_FLAG_MULTI_REQUIRED: int +SECPKG_FLAG_CLIENT_ONLY: int +SECPKG_FLAG_EXTENDED_ERROR: int +SECPKG_FLAG_IMPERSONATION: int +SECPKG_FLAG_ACCEPT_WIN32_NAME: int +SECPKG_FLAG_STREAM: int +SECPKG_FLAG_NEGOTIABLE: int +SECPKG_FLAG_GSS_COMPATIBLE: int +SECPKG_FLAG_LOGON: int +SECPKG_FLAG_ASCII_BUFFERS: int +SECPKG_FLAG_FRAGMENT: int +SECPKG_FLAG_MUTUAL_AUTH: int +SECPKG_FLAG_DELEGATION: int +SECPKG_FLAG_READONLY_WITH_CHECKSUM: int +SECPKG_ID_NONE: int +SECBUFFER_VERSION: int +SECBUFFER_EMPTY: int +SECBUFFER_DATA: int +SECBUFFER_TOKEN: int +SECBUFFER_PKG_PARAMS: int +SECBUFFER_MISSING: int +SECBUFFER_EXTRA: int +SECBUFFER_STREAM_TRAILER: int +SECBUFFER_STREAM_HEADER: int +SECBUFFER_NEGOTIATION_INFO: int +SECBUFFER_PADDING: int +SECBUFFER_STREAM: int +SECBUFFER_TARGET: int +SECBUFFER_CHANNEL_BINDINGS: int +SECBUFFER_ATTRMASK: int +SECBUFFER_READONLY: int +SECBUFFER_READONLY_WITH_CHECKSUM: int +SECBUFFER_RESERVED: int +SECURITY_NATIVE_DREP: int +SECURITY_NETWORK_DREP: int +SECPKG_CRED_INBOUND: int +SECPKG_CRED_OUTBOUND: int +SECPKG_CRED_BOTH: int +SECPKG_CRED_DEFAULT: int +SECPKG_CRED_RESERVED: int +ISC_REQ_DELEGATE: int +ISC_REQ_MUTUAL_AUTH: int +ISC_REQ_REPLAY_DETECT: int +ISC_REQ_SEQUENCE_DETECT: int +ISC_REQ_CONFIDENTIALITY: int +ISC_REQ_USE_SESSION_KEY: int +ISC_REQ_PROMPT_FOR_CREDS: int +ISC_REQ_USE_SUPPLIED_CREDS: int +ISC_REQ_ALLOCATE_MEMORY: int +ISC_REQ_USE_DCE_STYLE: int +ISC_REQ_DATAGRAM: int +ISC_REQ_CONNECTION: int +ISC_REQ_CALL_LEVEL: int +ISC_REQ_FRAGMENT_SUPPLIED: int +ISC_REQ_EXTENDED_ERROR: int +ISC_REQ_STREAM: int +ISC_REQ_INTEGRITY: int +ISC_REQ_IDENTIFY: int +ISC_REQ_NULL_SESSION: int +ISC_REQ_MANUAL_CRED_VALIDATION: int +ISC_REQ_RESERVED1: int +ISC_REQ_FRAGMENT_TO_FIT: int +ISC_REQ_HTTP: int +ISC_RET_DELEGATE: int +ISC_RET_MUTUAL_AUTH: int +ISC_RET_REPLAY_DETECT: int +ISC_RET_SEQUENCE_DETECT: int +ISC_RET_CONFIDENTIALITY: int +ISC_RET_USE_SESSION_KEY: int +ISC_RET_USED_COLLECTED_CREDS: int +ISC_RET_USED_SUPPLIED_CREDS: int +ISC_RET_ALLOCATED_MEMORY: int +ISC_RET_USED_DCE_STYLE: int +ISC_RET_DATAGRAM: int +ISC_RET_CONNECTION: int +ISC_RET_INTERMEDIATE_RETURN: int +ISC_RET_CALL_LEVEL: int +ISC_RET_EXTENDED_ERROR: int +ISC_RET_STREAM: int +ISC_RET_INTEGRITY: int +ISC_RET_IDENTIFY: int +ISC_RET_NULL_SESSION: int +ISC_RET_MANUAL_CRED_VALIDATION: int +ISC_RET_RESERVED1: int +ISC_RET_FRAGMENT_ONLY: int +ASC_REQ_DELEGATE: int +ASC_REQ_MUTUAL_AUTH: int +ASC_REQ_REPLAY_DETECT: int +ASC_REQ_SEQUENCE_DETECT: int +ASC_REQ_CONFIDENTIALITY: int +ASC_REQ_USE_SESSION_KEY: int +ASC_REQ_ALLOCATE_MEMORY: int +ASC_REQ_USE_DCE_STYLE: int +ASC_REQ_DATAGRAM: int +ASC_REQ_CONNECTION: int +ASC_REQ_CALL_LEVEL: int +ASC_REQ_EXTENDED_ERROR: int +ASC_REQ_STREAM: int +ASC_REQ_INTEGRITY: int +ASC_REQ_LICENSING: int +ASC_REQ_IDENTIFY: int +ASC_REQ_ALLOW_NULL_SESSION: int +ASC_REQ_ALLOW_NON_USER_LOGONS: int +ASC_REQ_ALLOW_CONTEXT_REPLAY: int +ASC_REQ_FRAGMENT_TO_FIT: int +ASC_REQ_FRAGMENT_SUPPLIED: int +ASC_REQ_NO_TOKEN: int +ASC_RET_DELEGATE: int +ASC_RET_MUTUAL_AUTH: int +ASC_RET_REPLAY_DETECT: int +ASC_RET_SEQUENCE_DETECT: int +ASC_RET_CONFIDENTIALITY: int +ASC_RET_USE_SESSION_KEY: int +ASC_RET_ALLOCATED_MEMORY: int +ASC_RET_USED_DCE_STYLE: int +ASC_RET_DATAGRAM: int +ASC_RET_CONNECTION: int +ASC_RET_CALL_LEVEL: int +ASC_RET_THIRD_LEG_FAILED: int +ASC_RET_EXTENDED_ERROR: int +ASC_RET_STREAM: int +ASC_RET_INTEGRITY: int +ASC_RET_LICENSING: int +ASC_RET_IDENTIFY: int +ASC_RET_NULL_SESSION: int +ASC_RET_ALLOW_NON_USER_LOGONS: int +ASC_RET_ALLOW_CONTEXT_REPLAY: int +ASC_RET_FRAGMENT_ONLY: int +SECPKG_CRED_ATTR_NAMES: int +SECPKG_ATTR_SIZES: int +SECPKG_ATTR_NAMES: int +SECPKG_ATTR_LIFESPAN: int +SECPKG_ATTR_DCE_INFO: int +SECPKG_ATTR_STREAM_SIZES: int +SECPKG_ATTR_KEY_INFO: int +SECPKG_ATTR_AUTHORITY: int +SECPKG_ATTR_PROTO_INFO: int +SECPKG_ATTR_PASSWORD_EXPIRY: int +SECPKG_ATTR_SESSION_KEY: int +SECPKG_ATTR_PACKAGE_INFO: int +SECPKG_ATTR_USER_FLAGS: int +SECPKG_ATTR_NEGOTIATION_INFO: int +SECPKG_ATTR_NATIVE_NAMES: int +SECPKG_ATTR_FLAGS: int +SECPKG_ATTR_USE_VALIDATED: int +SECPKG_ATTR_CREDENTIAL_NAME: int +SECPKG_ATTR_TARGET_INFORMATION: int +SECPKG_ATTR_ACCESS_TOKEN: int +SECPKG_ATTR_TARGET: int +SECPKG_ATTR_AUTHENTICATION_ID: int +SECPKG_ATTR_REMOTE_CERT_CONTEXT: int +SECPKG_ATTR_LOCAL_CERT_CONTEXT: int +SECPKG_ATTR_ROOT_STORE: int +SECPKG_ATTR_SUPPORTED_ALGS: int +SECPKG_ATTR_CIPHER_STRENGTHS: int +SECPKG_ATTR_SUPPORTED_PROTOCOLS: int +SECPKG_ATTR_CONNECTION_INFO: int +SECPKG_ATTR_EAP_KEY_BLOCK: int +SECPKG_ATTR_MAPPED_CRED_ATTR: int +SECPKG_ATTR_SESSION_INFO: int +SECPKG_ATTR_APP_DATA: int +SECPKG_NEGOTIATION_COMPLETE: int +SECPKG_NEGOTIATION_OPTIMISTIC: int +SECPKG_NEGOTIATION_IN_PROGRESS: int +SECPKG_NEGOTIATION_DIRECT: int +SECPKG_NEGOTIATION_TRY_MULTICRED: int +SECPKG_CONTEXT_EXPORT_RESET_NEW: int +SECPKG_CONTEXT_EXPORT_DELETE_OLD: int +SECQOP_WRAP_NO_ENCRYPT: int +SECURITY_ENTRYPOINT_ANSIW: str +SECURITY_ENTRYPOINT_ANSIA: str +SECURITY_ENTRYPOINT16: str +SECURITY_ENTRYPOINT: str +SECURITY_ENTRYPOINT_ANSI: str +SECURITY_SUPPORT_PROVIDER_INTERFACE_VERSION: int +SECURITY_SUPPORT_PROVIDER_INTERFACE_VERSION_2: int +SASL_OPTION_SEND_SIZE: int +SASL_OPTION_RECV_SIZE: int +SASL_OPTION_AUTHZ_STRING: int +SASL_OPTION_AUTHZ_PROCESSING: int +SEC_WINNT_AUTH_IDENTITY_ANSI: int +SEC_WINNT_AUTH_IDENTITY_UNICODE: int +SEC_WINNT_AUTH_IDENTITY_VERSION: int +SEC_WINNT_AUTH_IDENTITY_MARSHALLED: int +SEC_WINNT_AUTH_IDENTITY_ONLY: int +SECPKG_OPTIONS_TYPE_UNKNOWN: int +SECPKG_OPTIONS_TYPE_LSA: int +SECPKG_OPTIONS_TYPE_SSPI: int +SECPKG_OPTIONS_PERMANENT: int +SEC_E_INSUFFICIENT_MEMORY: int +SEC_E_INVALID_HANDLE: int +SEC_E_UNSUPPORTED_FUNCTION: int +SEC_E_TARGET_UNKNOWN: int +SEC_E_INTERNAL_ERROR: int +SEC_E_SECPKG_NOT_FOUND: int +SEC_E_NOT_OWNER: int +SEC_E_CANNOT_INSTALL: int +SEC_E_INVALID_TOKEN: int +SEC_E_CANNOT_PACK: int +SEC_E_QOP_NOT_SUPPORTED: int +SEC_E_NO_IMPERSONATION: int +SEC_E_LOGON_DENIED: int +SEC_E_UNKNOWN_CREDENTIALS: int +SEC_E_NO_CREDENTIALS: int +SEC_E_MESSAGE_ALTERED: int +SEC_E_OUT_OF_SEQUENCE: int +SEC_E_NO_AUTHENTICATING_AUTHORITY: int +SEC_I_CONTINUE_NEEDED: int +SEC_I_COMPLETE_NEEDED: int +SEC_I_COMPLETE_AND_CONTINUE: int +SEC_I_LOCAL_LOGON: int +SEC_E_BAD_PKGID: int +SEC_E_CONTEXT_EXPIRED: int +SEC_I_CONTEXT_EXPIRED: int +SEC_E_BUFFER_TOO_SMALL: int +SEC_I_RENEGOTIATE: int +SEC_E_WRONG_PRINCIPAL: int +SEC_I_NO_LSA_CONTEXT: int +SEC_E_TIME_SKEW: int +SEC_E_UNTRUSTED_ROOT: int +SEC_E_ILLEGAL_MESSAGE: int +SEC_E_CERT_UNKNOWN: int +SEC_E_CERT_EXPIRED: int +SEC_E_ENCRYPT_FAILURE: int +SEC_E_DECRYPT_FAILURE: int +SEC_E_ALGORITHM_MISMATCH: int +SEC_E_SECURITY_QOS_FAILED: int +SEC_E_UNFINISHED_CONTEXT_DELETED: int +SEC_E_NO_TGT_REPLY: int +SEC_E_NO_IP_ADDRESSES: int +SEC_E_WRONG_CREDENTIAL_HANDLE: int +SEC_E_CRYPTO_SYSTEM_INVALID: int +SEC_E_MAX_REFERRALS_EXCEEDED: int +SEC_E_MUST_BE_KDC: int +SEC_E_STRONG_CRYPTO_NOT_SUPPORTED: int +SEC_E_TOO_MANY_PRINCIPALS: int +SEC_E_NO_PA_DATA: int +SEC_E_PKINIT_NAME_MISMATCH: int +SEC_E_SMARTCARD_LOGON_REQUIRED: int +SEC_E_SHUTDOWN_IN_PROGRESS: int +SEC_E_KDC_INVALID_REQUEST: int +SEC_E_KDC_UNABLE_TO_REFER: int +SEC_E_KDC_UNKNOWN_ETYPE: int +SEC_E_UNSUPPORTED_PREAUTH: int +SEC_E_DELEGATION_REQUIRED: int +SEC_E_BAD_BINDINGS: int +SEC_E_MULTIPLE_ACCOUNTS: int +SEC_E_NO_KERB_KEY: int +ERROR_IPSEC_QM_POLICY_EXISTS: int +ERROR_IPSEC_QM_POLICY_NOT_FOUND: int +ERROR_IPSEC_QM_POLICY_IN_USE: int +ERROR_IPSEC_MM_POLICY_EXISTS: int +ERROR_IPSEC_MM_POLICY_NOT_FOUND: int +ERROR_IPSEC_MM_POLICY_IN_USE: int +ERROR_IPSEC_MM_FILTER_EXISTS: int +ERROR_IPSEC_MM_FILTER_NOT_FOUND: int +ERROR_IPSEC_TRANSPORT_FILTER_EXISTS: int +ERROR_IPSEC_TRANSPORT_FILTER_NOT_FOUND: int +ERROR_IPSEC_MM_AUTH_EXISTS: int +ERROR_IPSEC_MM_AUTH_NOT_FOUND: int +ERROR_IPSEC_MM_AUTH_IN_USE: int +ERROR_IPSEC_DEFAULT_MM_POLICY_NOT_FOUND: int +ERROR_IPSEC_DEFAULT_MM_AUTH_NOT_FOUND: int +ERROR_IPSEC_DEFAULT_QM_POLICY_NOT_FOUND: int +ERROR_IPSEC_TUNNEL_FILTER_EXISTS: int +ERROR_IPSEC_TUNNEL_FILTER_NOT_FOUND: int +ERROR_IPSEC_MM_FILTER_PENDING_DELETION: int +ERROR_IPSEC_TRANSPORT_FILTER_PENDING_DELETION: int +ERROR_IPSEC_TUNNEL_FILTER_PENDING_DELETION: int +ERROR_IPSEC_MM_POLICY_PENDING_DELETION: int +ERROR_IPSEC_MM_AUTH_PENDING_DELETION: int +ERROR_IPSEC_QM_POLICY_PENDING_DELETION: int +WARNING_IPSEC_MM_POLICY_PRUNED: int +WARNING_IPSEC_QM_POLICY_PRUNED: int +ERROR_IPSEC_IKE_NEG_STATUS_BEGIN: int +ERROR_IPSEC_IKE_AUTH_FAIL: int +ERROR_IPSEC_IKE_ATTRIB_FAIL: int +ERROR_IPSEC_IKE_NEGOTIATION_PENDING: int +ERROR_IPSEC_IKE_GENERAL_PROCESSING_ERROR: int +ERROR_IPSEC_IKE_TIMED_OUT: int +ERROR_IPSEC_IKE_NO_CERT: int +ERROR_IPSEC_IKE_SA_DELETED: int +ERROR_IPSEC_IKE_SA_REAPED: int +ERROR_IPSEC_IKE_MM_ACQUIRE_DROP: int +ERROR_IPSEC_IKE_QM_ACQUIRE_DROP: int +ERROR_IPSEC_IKE_QUEUE_DROP_MM: int +ERROR_IPSEC_IKE_QUEUE_DROP_NO_MM: int +ERROR_IPSEC_IKE_DROP_NO_RESPONSE: int +ERROR_IPSEC_IKE_MM_DELAY_DROP: int +ERROR_IPSEC_IKE_QM_DELAY_DROP: int +ERROR_IPSEC_IKE_ERROR: int +ERROR_IPSEC_IKE_CRL_FAILED: int +ERROR_IPSEC_IKE_INVALID_KEY_USAGE: int +ERROR_IPSEC_IKE_INVALID_CERT_TYPE: int +ERROR_IPSEC_IKE_NO_PRIVATE_KEY: int +ERROR_IPSEC_IKE_DH_FAIL: int +ERROR_IPSEC_IKE_INVALID_HEADER: int +ERROR_IPSEC_IKE_NO_POLICY: int +ERROR_IPSEC_IKE_INVALID_SIGNATURE: int +ERROR_IPSEC_IKE_KERBEROS_ERROR: int +ERROR_IPSEC_IKE_NO_PUBLIC_KEY: int +ERROR_IPSEC_IKE_PROCESS_ERR: int +ERROR_IPSEC_IKE_PROCESS_ERR_SA: int +ERROR_IPSEC_IKE_PROCESS_ERR_PROP: int +ERROR_IPSEC_IKE_PROCESS_ERR_TRANS: int +ERROR_IPSEC_IKE_PROCESS_ERR_KE: int +ERROR_IPSEC_IKE_PROCESS_ERR_ID: int +ERROR_IPSEC_IKE_PROCESS_ERR_CERT: int +ERROR_IPSEC_IKE_PROCESS_ERR_CERT_REQ: int +ERROR_IPSEC_IKE_PROCESS_ERR_HASH: int +ERROR_IPSEC_IKE_PROCESS_ERR_SIG: int +ERROR_IPSEC_IKE_PROCESS_ERR_NONCE: int +ERROR_IPSEC_IKE_PROCESS_ERR_NOTIFY: int +ERROR_IPSEC_IKE_PROCESS_ERR_DELETE: int +ERROR_IPSEC_IKE_PROCESS_ERR_VENDOR: int +ERROR_IPSEC_IKE_INVALID_PAYLOAD: int +ERROR_IPSEC_IKE_LOAD_SOFT_SA: int +ERROR_IPSEC_IKE_SOFT_SA_TORN_DOWN: int +ERROR_IPSEC_IKE_INVALID_COOKIE: int +ERROR_IPSEC_IKE_NO_PEER_CERT: int +ERROR_IPSEC_IKE_PEER_CRL_FAILED: int +ERROR_IPSEC_IKE_POLICY_CHANGE: int +ERROR_IPSEC_IKE_NO_MM_POLICY: int +ERROR_IPSEC_IKE_NOTCBPRIV: int +ERROR_IPSEC_IKE_SECLOADFAIL: int +ERROR_IPSEC_IKE_FAILSSPINIT: int +ERROR_IPSEC_IKE_FAILQUERYSSP: int +ERROR_IPSEC_IKE_SRVACQFAIL: int +ERROR_IPSEC_IKE_SRVQUERYCRED: int +ERROR_IPSEC_IKE_GETSPIFAIL: int +ERROR_IPSEC_IKE_INVALID_FILTER: int +ERROR_IPSEC_IKE_OUT_OF_MEMORY: int +ERROR_IPSEC_IKE_ADD_UPDATE_KEY_FAILED: int +ERROR_IPSEC_IKE_INVALID_POLICY: int +ERROR_IPSEC_IKE_UNKNOWN_DOI: int +ERROR_IPSEC_IKE_INVALID_SITUATION: int +ERROR_IPSEC_IKE_DH_FAILURE: int +ERROR_IPSEC_IKE_INVALID_GROUP: int +ERROR_IPSEC_IKE_ENCRYPT: int +ERROR_IPSEC_IKE_DECRYPT: int +ERROR_IPSEC_IKE_POLICY_MATCH: int +ERROR_IPSEC_IKE_UNSUPPORTED_ID: int +ERROR_IPSEC_IKE_INVALID_HASH: int +ERROR_IPSEC_IKE_INVALID_HASH_ALG: int +ERROR_IPSEC_IKE_INVALID_HASH_SIZE: int +ERROR_IPSEC_IKE_INVALID_ENCRYPT_ALG: int +ERROR_IPSEC_IKE_INVALID_AUTH_ALG: int +ERROR_IPSEC_IKE_INVALID_SIG: int +ERROR_IPSEC_IKE_LOAD_FAILED: int +ERROR_IPSEC_IKE_RPC_DELETE: int +ERROR_IPSEC_IKE_BENIGN_REINIT: int +ERROR_IPSEC_IKE_INVALID_RESPONDER_LIFETIME_NOTIFY: int +ERROR_IPSEC_IKE_INVALID_CERT_KEYLEN: int +ERROR_IPSEC_IKE_MM_LIMIT: int +ERROR_IPSEC_IKE_NEGOTIATION_DISABLED: int +ERROR_IPSEC_IKE_NEG_STATUS_END: int +CRYPT_E_MSG_ERROR: int +CRYPT_E_UNKNOWN_ALGO: int +CRYPT_E_OID_FORMAT: int +CRYPT_E_INVALID_MSG_TYPE: int +CRYPT_E_UNEXPECTED_ENCODING: int +CRYPT_E_AUTH_ATTR_MISSING: int +CRYPT_E_HASH_VALUE: int +CRYPT_E_INVALID_INDEX: int +CRYPT_E_ALREADY_DECRYPTED: int +CRYPT_E_NOT_DECRYPTED: int +CRYPT_E_RECIPIENT_NOT_FOUND: int +CRYPT_E_CONTROL_TYPE: int +CRYPT_E_ISSUER_SERIALNUMBER: int +CRYPT_E_SIGNER_NOT_FOUND: int +CRYPT_E_ATTRIBUTES_MISSING: int +CRYPT_E_STREAM_MSG_NOT_READY: int +CRYPT_E_STREAM_INSUFFICIENT_DATA: int +CRYPT_I_NEW_PROTECTION_REQUIRED: int +CRYPT_E_BAD_LEN: int +CRYPT_E_BAD_ENCODE: int +CRYPT_E_FILE_ERROR: int +CRYPT_E_NOT_FOUND: int +CRYPT_E_EXISTS: int +CRYPT_E_NO_PROVIDER: int +CRYPT_E_SELF_SIGNED: int +CRYPT_E_DELETED_PREV: int +CRYPT_E_NO_MATCH: int +CRYPT_E_UNEXPECTED_MSG_TYPE: int +CRYPT_E_NO_KEY_PROPERTY: int +CRYPT_E_NO_DECRYPT_CERT: int +CRYPT_E_BAD_MSG: int +CRYPT_E_NO_SIGNER: int +CRYPT_E_PENDING_CLOSE: int +CRYPT_E_REVOKED: int +CRYPT_E_NO_REVOCATION_DLL: int +CRYPT_E_NO_REVOCATION_CHECK: int +CRYPT_E_REVOCATION_OFFLINE: int +CRYPT_E_NOT_IN_REVOCATION_DATABASE: int +CRYPT_E_INVALID_NUMERIC_STRING: int +CRYPT_E_INVALID_PRINTABLE_STRING: int +CRYPT_E_INVALID_IA5_STRING: int +CRYPT_E_INVALID_X500_STRING: int +CRYPT_E_NOT_CHAR_STRING: int +CRYPT_E_FILERESIZED: int +CRYPT_E_SECURITY_SETTINGS: int +CRYPT_E_NO_VERIFY_USAGE_DLL: int +CRYPT_E_NO_VERIFY_USAGE_CHECK: int +CRYPT_E_VERIFY_USAGE_OFFLINE: int +CRYPT_E_NOT_IN_CTL: int +CRYPT_E_NO_TRUSTED_SIGNER: int +CRYPT_E_MISSING_PUBKEY_PARA: int +CRYPT_E_OSS_ERROR: int +KerbDebugRequestMessage: int +KerbQueryTicketCacheMessage: int +KerbChangeMachinePasswordMessage: int +KerbVerifyPacMessage: int +KerbRetrieveTicketMessage: int +KerbUpdateAddressesMessage: int +KerbPurgeTicketCacheMessage: int +KerbChangePasswordMessage: int +KerbRetrieveEncodedTicketMessage: int +KerbDecryptDataMessage: int +KerbAddBindingCacheEntryMessage: int +KerbSetPasswordMessage: int +KerbSetPasswordExMessage: int +KerbVerifyCredentialsMessage: int +KerbQueryTicketCacheExMessage: int +KerbPurgeTicketCacheExMessage: int +KerbRefreshSmartcardCredentialsMessage: int +KerbAddExtraCredentialsMessage: int +KerbQuerySupplementalCredentialsMessage: int +MsV1_0Lm20ChallengeRequest: int +MsV1_0Lm20GetChallengeResponse: int +MsV1_0EnumerateUsers: int +MsV1_0GetUserInfo: int +MsV1_0ReLogonUsers: int +MsV1_0ChangePassword: int +MsV1_0ChangeCachedPassword: int +MsV1_0GenericPassthrough: int +MsV1_0CacheLogon: int +MsV1_0SubAuth: int +MsV1_0DeriveCredential: int +MsV1_0CacheLookup: int +MsV1_0SetProcessOption: int +SEC_E_OK: int +SECBUFFER_MECHLIST: int +SECBUFFER_MECHLIST_SIGNATURE: int +SECPKG_ATTR_ISSUER_LIST_EX: int +SEC_E_INCOMPLETE_CREDENTIALS: int +SEC_E_INCOMPLETE_MESSAGE: int +SEC_I_INCOMPLETE_CREDENTIALS: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win2kras.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win2kras.pyi new file mode 100644 index 00000000..b766e47a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win2kras.pyi @@ -0,0 +1,34 @@ +RASEAPF_Logon: int +RASEAPF_NonInteractive: int +RASEAPF_Preview: int + +def GetEapUserIdentity(*args, **kwargs): ... # incomplete + +RASCS_AllDevicesConnected: int +RASCS_AuthAck: int +RASCS_AuthCallback: int +RASCS_AuthChangePassword: int +RASCS_AuthLinkSpeed: int +RASCS_AuthNotify: int +RASCS_AuthProject: int +RASCS_AuthRetry: int +RASCS_Authenticate: int +RASCS_Authenticated: int +RASCS_CallbackComplete: int +RASCS_CallbackSetByCaller: int +RASCS_ConnectDevice: int +RASCS_Connected: int +RASCS_DeviceConnected: int +RASCS_Disconnected: int +RASCS_Interactive: int +RASCS_LogonNetwork: int +RASCS_OpenPort: int +RASCS_PasswordExpired: int +RASCS_PortOpened: int +RASCS_PrepareForCallback: int +RASCS_Projected: int +RASCS_ReAuthenticate: int +RASCS_RetryAuthentication: int +RASCS_StartAuthentication: int +RASCS_WaitForCallback: int +RASCS_WaitForModemReset: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32con.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32con.pyi new file mode 100644 index 00000000..b5f256e4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32con.pyi @@ -0,0 +1,4778 @@ +WINVER: int +WM_USER: int +PY_0U: int +OFN_READONLY: int +OFN_OVERWRITEPROMPT: int +OFN_HIDEREADONLY: int +OFN_NOCHANGEDIR: int +OFN_SHOWHELP: int +OFN_ENABLEHOOK: int +OFN_ENABLETEMPLATE: int +OFN_ENABLETEMPLATEHANDLE: int +OFN_NOVALIDATE: int +OFN_ALLOWMULTISELECT: int +OFN_EXTENSIONDIFFERENT: int +OFN_PATHMUSTEXIST: int +OFN_FILEMUSTEXIST: int +OFN_CREATEPROMPT: int +OFN_SHAREAWARE: int +OFN_NOREADONLYRETURN: int +OFN_NOTESTFILECREATE: int +OFN_NONETWORKBUTTON: int +OFN_NOLONGNAMES: int +OFN_EXPLORER: int +OFN_NODEREFERENCELINKS: int +OFN_LONGNAMES: int +OFN_ENABLEINCLUDENOTIFY: int +OFN_ENABLESIZING: int +OFN_DONTADDTORECENT: int +OFN_FORCESHOWHIDDEN: int +OFN_EX_NOPLACESBAR: int +OFN_SHAREFALLTHROUGH: int +OFN_SHARENOWARN: int +OFN_SHAREWARN: int +CDN_FIRST: int +CDN_LAST: int +CDN_INITDONE: int +CDN_SELCHANGE: int +CDN_FOLDERCHANGE: int +CDN_SHAREVIOLATION: int +CDN_HELP: int +CDN_FILEOK: int +CDN_TYPECHANGE: int +CDN_INCLUDEITEM: int +CDM_FIRST: int +CDM_LAST: int +CDM_GETSPEC: int +CDM_GETFILEPATH: int +CDM_GETFOLDERPATH: int +CDM_SETCONTROLTEXT: int +CDM_HIDECONTROL: int +CDM_SETDEFEXT: int +CC_RGBINIT: int +CC_FULLOPEN: int +CC_PREVENTFULLOPEN: int +CC_SHOWHELP: int +CC_ENABLEHOOK: int +CC_ENABLETEMPLATE: int +CC_ENABLETEMPLATEHANDLE: int +CC_SOLIDCOLOR: int +CC_ANYCOLOR: int +FR_DOWN: int +FR_WHOLEWORD: int +FR_MATCHCASE: int +FR_FINDNEXT: int +FR_REPLACE: int +FR_REPLACEALL: int +FR_DIALOGTERM: int +FR_SHOWHELP: int +FR_ENABLEHOOK: int +FR_ENABLETEMPLATE: int +FR_NOUPDOWN: int +FR_NOMATCHCASE: int +FR_NOWHOLEWORD: int +FR_ENABLETEMPLATEHANDLE: int +FR_HIDEUPDOWN: int +FR_HIDEMATCHCASE: int +FR_HIDEWHOLEWORD: int +CF_SCREENFONTS: int +CF_PRINTERFONTS: int +CF_BOTH: int +CF_SHOWHELP: int +CF_ENABLEHOOK: int +CF_ENABLETEMPLATE: int +CF_ENABLETEMPLATEHANDLE: int +CF_INITTOLOGFONTSTRUCT: int +CF_USESTYLE: int +CF_EFFECTS: int +CF_APPLY: int +CF_ANSIONLY: int +CF_SCRIPTSONLY: int +CF_NOVECTORFONTS: int +CF_NOOEMFONTS: int +CF_NOSIMULATIONS: int +CF_LIMITSIZE: int +CF_FIXEDPITCHONLY: int +CF_WYSIWYG: int +CF_FORCEFONTEXIST: int +CF_SCALABLEONLY: int +CF_TTONLY: int +CF_NOFACESEL: int +CF_NOSTYLESEL: int +CF_NOSIZESEL: int +CF_SELECTSCRIPT: int +CF_NOSCRIPTSEL: int +CF_NOVERTFONTS: int +SIMULATED_FONTTYPE: int +PRINTER_FONTTYPE: int +SCREEN_FONTTYPE: int +BOLD_FONTTYPE: int +ITALIC_FONTTYPE: int +REGULAR_FONTTYPE: int +OPENTYPE_FONTTYPE: int +TYPE1_FONTTYPE: int +DSIG_FONTTYPE: int +WM_CHOOSEFONT_GETLOGFONT: int +WM_CHOOSEFONT_SETLOGFONT: int +WM_CHOOSEFONT_SETFLAGS: int +LBSELCHSTRINGA: str +SHAREVISTRINGA: str +FILEOKSTRINGA: str +COLOROKSTRINGA: str +SETRGBSTRINGA: str +HELPMSGSTRINGA: str +FINDMSGSTRINGA: str +LBSELCHSTRING: str +SHAREVISTRING: str +FILEOKSTRING: str +COLOROKSTRING: str +SETRGBSTRING: str +HELPMSGSTRING: str +FINDMSGSTRING: str +CD_LBSELNOITEMS: int +CD_LBSELCHANGE: int +CD_LBSELSUB: int +CD_LBSELADD: int +PD_ALLPAGES: int +PD_SELECTION: int +PD_PAGENUMS: int +PD_NOSELECTION: int +PD_NOPAGENUMS: int +PD_COLLATE: int +PD_PRINTTOFILE: int +PD_PRINTSETUP: int +PD_NOWARNING: int +PD_RETURNDC: int +PD_RETURNIC: int +PD_RETURNDEFAULT: int +PD_SHOWHELP: int +PD_ENABLEPRINTHOOK: int +PD_ENABLESETUPHOOK: int +PD_ENABLEPRINTTEMPLATE: int +PD_ENABLESETUPTEMPLATE: int +PD_ENABLEPRINTTEMPLATEHANDLE: int +PD_ENABLESETUPTEMPLATEHANDLE: int +PD_USEDEVMODECOPIES: int +PD_DISABLEPRINTTOFILE: int +PD_HIDEPRINTTOFILE: int +PD_NONETWORKBUTTON: int +DN_DEFAULTPRN: int +WM_PSD_PAGESETUPDLG: int +WM_PSD_FULLPAGERECT: int +WM_PSD_MINMARGINRECT: int +WM_PSD_MARGINRECT: int +WM_PSD_GREEKTEXTRECT: int +WM_PSD_ENVSTAMPRECT: int +WM_PSD_YAFULLPAGERECT: int +PSD_DEFAULTMINMARGINS: int +PSD_INWININIINTLMEASURE: int +PSD_MINMARGINS: int +PSD_MARGINS: int +PSD_INTHOUSANDTHSOFINCHES: int +PSD_INHUNDREDTHSOFMILLIMETERS: int +PSD_DISABLEMARGINS: int +PSD_DISABLEPRINTER: int +PSD_NOWARNING: int +PSD_DISABLEORIENTATION: int +PSD_RETURNDEFAULT: int +PSD_DISABLEPAPER: int +PSD_SHOWHELP: int +PSD_ENABLEPAGESETUPHOOK: int +PSD_ENABLEPAGESETUPTEMPLATE: int +PSD_ENABLEPAGESETUPTEMPLATEHANDLE: int +PSD_ENABLEPAGEPAINTHOOK: int +PSD_DISABLEPAGEPAINTING: int +PSD_NONETWORKBUTTON: int +HKEY_CLASSES_ROOT: int +HKEY_CURRENT_USER: int +HKEY_LOCAL_MACHINE: int +HKEY_USERS: int +HKEY_PERFORMANCE_DATA: int +HKEY_CURRENT_CONFIG: int +HKEY_DYN_DATA: int +HKEY_PERFORMANCE_TEXT: int +HKEY_PERFORMANCE_NLSTEXT: int +HWND_BROADCAST: int +HWND_DESKTOP: int +HWND_TOP: int +HWND_BOTTOM: int +HWND_TOPMOST: int +HWND_NOTOPMOST: int +HWND_MESSAGE: int +SM_CXSCREEN: int +SM_CYSCREEN: int +SM_CXVSCROLL: int +SM_CYHSCROLL: int +SM_CYCAPTION: int +SM_CXBORDER: int +SM_CYBORDER: int +SM_CXDLGFRAME: int +SM_CYDLGFRAME: int +SM_CYVTHUMB: int +SM_CXHTHUMB: int +SM_CXICON: int +SM_CYICON: int +SM_CXCURSOR: int +SM_CYCURSOR: int +SM_CYMENU: int +SM_CXFULLSCREEN: int +SM_CYFULLSCREEN: int +SM_CYKANJIWINDOW: int +SM_MOUSEPRESENT: int +SM_CYVSCROLL: int +SM_CXHSCROLL: int +SM_DEBUG: int +SM_SWAPBUTTON: int +SM_RESERVED1: int +SM_RESERVED2: int +SM_RESERVED3: int +SM_RESERVED4: int +SM_CXMIN: int +SM_CYMIN: int +SM_CXSIZE: int +SM_CYSIZE: int +SM_CXFRAME: int +SM_CYFRAME: int +SM_CXMINTRACK: int +SM_CYMINTRACK: int +SM_CXDOUBLECLK: int +SM_CYDOUBLECLK: int +SM_CXICONSPACING: int +SM_CYICONSPACING: int +SM_MENUDROPALIGNMENT: int +SM_PENWINDOWS: int +SM_DBCSENABLED: int +SM_CMOUSEBUTTONS: int +SM_CXFIXEDFRAME: int +SM_CYFIXEDFRAME: int +SM_CXSIZEFRAME: int +SM_CYSIZEFRAME: int +SM_SECURE: int +SM_CXEDGE: int +SM_CYEDGE: int +SM_CXMINSPACING: int +SM_CYMINSPACING: int +SM_CXSMICON: int +SM_CYSMICON: int +SM_CYSMCAPTION: int +SM_CXSMSIZE: int +SM_CYSMSIZE: int +SM_CXMENUSIZE: int +SM_CYMENUSIZE: int +SM_ARRANGE: int +SM_CXMINIMIZED: int +SM_CYMINIMIZED: int +SM_CXMAXTRACK: int +SM_CYMAXTRACK: int +SM_CXMAXIMIZED: int +SM_CYMAXIMIZED: int +SM_NETWORK: int +SM_CLEANBOOT: int +SM_CXDRAG: int +SM_CYDRAG: int +SM_SHOWSOUNDS: int +SM_CXMENUCHECK: int +SM_CYMENUCHECK: int +SM_SLOWMACHINE: int +SM_MIDEASTENABLED: int +SM_MOUSEWHEELPRESENT: int +SM_XVIRTUALSCREEN: int +SM_YVIRTUALSCREEN: int +SM_CXVIRTUALSCREEN: int +SM_CYVIRTUALSCREEN: int +SM_CMONITORS: int +SM_SAMEDISPLAYFORMAT: int +SM_CMETRICS: int +MNC_IGNORE: int +MNC_CLOSE: int +MNC_EXECUTE: int +MNC_SELECT: int +MNS_NOCHECK: int +MNS_MODELESS: int +MNS_DRAGDROP: int +MNS_AUTODISMISS: int +MNS_NOTIFYBYPOS: int +MNS_CHECKORBMP: int +MIM_MAXHEIGHT: int +MIM_BACKGROUND: int +MIM_HELPID: int +MIM_MENUDATA: int +MIM_STYLE: int +MIM_APPLYTOSUBMENUS: int +MND_CONTINUE: int +MND_ENDMENU: int +MNGOF_GAP: int +MNGO_NOINTERFACE: int +MNGO_NOERROR: int +MIIM_STATE: int +MIIM_ID: int +MIIM_SUBMENU: int +MIIM_CHECKMARKS: int +MIIM_TYPE: int +MIIM_DATA: int +MIIM_STRING: int +MIIM_BITMAP: int +MIIM_FTYPE: int +HBMMENU_CALLBACK: int +HBMMENU_SYSTEM: int +HBMMENU_MBAR_RESTORE: int +HBMMENU_MBAR_MINIMIZE: int +HBMMENU_MBAR_CLOSE: int +HBMMENU_MBAR_CLOSE_D: int +HBMMENU_MBAR_MINIMIZE_D: int +HBMMENU_POPUP_CLOSE: int +HBMMENU_POPUP_RESTORE: int +HBMMENU_POPUP_MAXIMIZE: int +HBMMENU_POPUP_MINIMIZE: int +GMDI_USEDISABLED: int +GMDI_GOINTOPOPUPS: int +TPM_LEFTBUTTON: int +TPM_RIGHTBUTTON: int +TPM_LEFTALIGN: int +TPM_CENTERALIGN: int +TPM_RIGHTALIGN: int +TPM_TOPALIGN: int +TPM_VCENTERALIGN: int +TPM_BOTTOMALIGN: int +TPM_HORIZONTAL: int +TPM_VERTICAL: int +TPM_NONOTIFY: int +TPM_RETURNCMD: int +TPM_RECURSE: int +DOF_EXECUTABLE: int +DOF_DOCUMENT: int +DOF_DIRECTORY: int +DOF_MULTIPLE: int +DOF_PROGMAN: int +DOF_SHELLDATA: int +DO_DROPFILE: int +DO_PRINTFILE: int +DT_TOP: int +DT_LEFT: int +DT_CENTER: int +DT_RIGHT: int +DT_VCENTER: int +DT_BOTTOM: int +DT_WORDBREAK: int +DT_SINGLELINE: int +DT_EXPANDTABS: int +DT_TABSTOP: int +DT_NOCLIP: int +DT_EXTERNALLEADING: int +DT_CALCRECT: int +DT_NOPREFIX: int +DT_INTERNAL: int +DT_EDITCONTROL: int +DT_PATH_ELLIPSIS: int +DT_END_ELLIPSIS: int +DT_MODIFYSTRING: int +DT_RTLREADING: int +DT_WORD_ELLIPSIS: int +DST_COMPLEX: int +DST_TEXT: int +DST_PREFIXTEXT: int +DST_ICON: int +DST_BITMAP: int +DSS_NORMAL: int +DSS_UNION: int +DSS_DISABLED: int +DSS_MONO: int +DSS_RIGHT: int +DCX_WINDOW: int +DCX_CACHE: int +DCX_NORESETATTRS: int +DCX_CLIPCHILDREN: int +DCX_CLIPSIBLINGS: int +DCX_PARENTCLIP: int +DCX_EXCLUDERGN: int +DCX_INTERSECTRGN: int +DCX_EXCLUDEUPDATE: int +DCX_INTERSECTUPDATE: int +DCX_LOCKWINDOWUPDATE: int +DCX_VALIDATE: int +CUDR_NORMAL: int +CUDR_NOSNAPTOGRID: int +CUDR_NORESOLVEPOSITIONS: int +CUDR_NOCLOSEGAPS: int +CUDR_NEGATIVECOORDS: int +CUDR_NOPRIMARY: int +RDW_INVALIDATE: int +RDW_INTERNALPAINT: int +RDW_ERASE: int +RDW_VALIDATE: int +RDW_NOINTERNALPAINT: int +RDW_NOERASE: int +RDW_NOCHILDREN: int +RDW_ALLCHILDREN: int +RDW_UPDATENOW: int +RDW_ERASENOW: int +RDW_FRAME: int +RDW_NOFRAME: int +SW_SCROLLCHILDREN: int +SW_INVALIDATE: int +SW_ERASE: int +SW_SMOOTHSCROLL: int +ESB_ENABLE_BOTH: int +ESB_DISABLE_BOTH: int +ESB_DISABLE_LEFT: int +ESB_DISABLE_RIGHT: int +ESB_DISABLE_UP: int +ESB_DISABLE_DOWN: int +ESB_DISABLE_LTUP: int +ESB_DISABLE_RTDN: int +HELPINFO_WINDOW: int +HELPINFO_MENUITEM: int +MB_OK: int +MB_OKCANCEL: int +MB_ABORTRETRYIGNORE: int +MB_YESNOCANCEL: int +MB_YESNO: int +MB_RETRYCANCEL: int +MB_ICONHAND: int +MB_ICONQUESTION: int +MB_ICONEXCLAMATION: int +MB_ICONASTERISK: int +MB_ICONWARNING: int +MB_ICONERROR: int +MB_ICONINFORMATION: int +MB_ICONSTOP: int +MB_DEFBUTTON1: int +MB_DEFBUTTON2: int +MB_DEFBUTTON3: int +MB_DEFBUTTON4: int +MB_APPLMODAL: int +MB_SYSTEMMODAL: int +MB_TASKMODAL: int +MB_HELP: int +MB_NOFOCUS: int +MB_SETFOREGROUND: int +MB_DEFAULT_DESKTOP_ONLY: int +MB_TOPMOST: int +MB_RIGHT: int +MB_RTLREADING: int +MB_SERVICE_NOTIFICATION: int +MB_TYPEMASK: int +MB_USERICON: int +MB_ICONMASK: int +MB_DEFMASK: int +MB_MODEMASK: int +MB_MISCMASK: int +CWP_ALL: int +CWP_SKIPINVISIBLE: int +CWP_SKIPDISABLED: int +CWP_SKIPTRANSPARENT: int +CTLCOLOR_MSGBOX: int +CTLCOLOR_EDIT: int +CTLCOLOR_BTN: int +CTLCOLOR_DLG: int +CTLCOLOR_SCROLLBAR: int +CTLCOLOR_STATIC: int +CTLCOLOR_MAX: int +COLOR_SCROLLBAR: int +COLOR_BACKGROUND: int +COLOR_ACTIVECAPTION: int +COLOR_INACTIVECAPTION: int +COLOR_MENU: int +COLOR_WINDOW: int +COLOR_WINDOWFRAME: int +COLOR_MENUTEXT: int +COLOR_WINDOWTEXT: int +COLOR_CAPTIONTEXT: int +COLOR_ACTIVEBORDER: int +COLOR_INACTIVEBORDER: int +COLOR_APPWORKSPACE: int +COLOR_HIGHLIGHT: int +COLOR_HIGHLIGHTTEXT: int +COLOR_BTNFACE: int +COLOR_BTNSHADOW: int +COLOR_GRAYTEXT: int +COLOR_BTNTEXT: int +COLOR_INACTIVECAPTIONTEXT: int +COLOR_BTNHIGHLIGHT: int +COLOR_3DDKSHADOW: int +COLOR_3DLIGHT: int +COLOR_INFOTEXT: int +COLOR_INFOBK: int +COLOR_HOTLIGHT: int +COLOR_GRADIENTACTIVECAPTION: int +COLOR_GRADIENTINACTIVECAPTION: int +COLOR_DESKTOP: int +COLOR_3DFACE: int +COLOR_3DSHADOW: int +COLOR_3DHIGHLIGHT: int +COLOR_3DHILIGHT: int +COLOR_BTNHILIGHT: int +GW_HWNDFIRST: int +GW_HWNDLAST: int +GW_HWNDNEXT: int +GW_HWNDPREV: int +GW_OWNER: int +GW_CHILD: int +GW_ENABLEDPOPUP: int +GW_MAX: int +MF_INSERT: int +MF_CHANGE: int +MF_APPEND: int +MF_DELETE: int +MF_REMOVE: int +MF_BYCOMMAND: int +MF_BYPOSITION: int +MF_SEPARATOR: int +MF_ENABLED: int +MF_GRAYED: int +MF_DISABLED: int +MF_UNCHECKED: int +MF_CHECKED: int +MF_USECHECKBITMAPS: int +MF_STRING: int +MF_BITMAP: int +MF_OWNERDRAW: int +MF_POPUP: int +MF_MENUBARBREAK: int +MF_MENUBREAK: int +MF_UNHILITE: int +MF_HILITE: int +MF_DEFAULT: int +MF_SYSMENU: int +MF_HELP: int +MF_RIGHTJUSTIFY: int +MF_MOUSESELECT: int +MF_END: int +MFT_STRING: int +MFT_BITMAP: int +MFT_MENUBARBREAK: int +MFT_MENUBREAK: int +MFT_OWNERDRAW: int +MFT_RADIOCHECK: int +MFT_SEPARATOR: int +MFT_RIGHTORDER: int +MFT_RIGHTJUSTIFY: int +MFS_GRAYED: int +MFS_DISABLED: int +MFS_CHECKED: int +MFS_HILITE: int +MFS_ENABLED: int +MFS_UNCHECKED: int +MFS_UNHILITE: int +MFS_DEFAULT: int +MFS_MASK: int +MFS_HOTTRACKDRAWN: int +MFS_CACHEDBMP: int +MFS_BOTTOMGAPDROP: int +MFS_TOPGAPDROP: int +MFS_GAPDROP: int +SC_SIZE: int +SC_MOVE: int +SC_MINIMIZE: int +SC_MAXIMIZE: int +SC_NEXTWINDOW: int +SC_PREVWINDOW: int +SC_CLOSE: int +SC_VSCROLL: int +SC_HSCROLL: int +SC_MOUSEMENU: int +SC_KEYMENU: int +SC_ARRANGE: int +SC_RESTORE: int +SC_SCREENSAVE: int +SC_HOTKEY: int +SC_DEFAULT: int +SC_MONITORPOWER: int +SC_CONTEXTHELP: int +SC_SEPARATOR: int +SC_ICON: int +SC_ZOOM: int +IDC_ARROW: int +IDC_IBEAM: int +IDC_WAIT: int +IDC_CROSS: int +IDC_UPARROW: int +IDC_SIZE: int +IDC_ICON: int +IDC_SIZENWSE: int +IDC_SIZENESW: int +IDC_SIZEWE: int +IDC_SIZENS: int +IDC_SIZEALL: int +IDC_NO: int +IDC_HAND: int +IDC_APPSTARTING: int +IDC_HELP: int +IMAGE_BITMAP: int +IMAGE_ICON: int +IMAGE_CURSOR: int +IMAGE_ENHMETAFILE: int +LR_DEFAULTCOLOR: int +LR_MONOCHROME: int +LR_COLOR: int +LR_COPYRETURNORG: int +LR_COPYDELETEORG: int +LR_LOADFROMFILE: int +LR_LOADTRANSPARENT: int +LR_DEFAULTSIZE: int +LR_LOADREALSIZE: int +LR_LOADMAP3DCOLORS: int +LR_CREATEDIBSECTION: int +LR_COPYFROMRESOURCE: int +LR_SHARED: int +DI_MASK: int +DI_IMAGE: int +DI_NORMAL: int +DI_COMPAT: int +DI_DEFAULTSIZE: int +RES_ICON: int +RES_CURSOR: int +OBM_CLOSE: int +OBM_UPARROW: int +OBM_DNARROW: int +OBM_RGARROW: int +OBM_LFARROW: int +OBM_REDUCE: int +OBM_ZOOM: int +OBM_RESTORE: int +OBM_REDUCED: int +OBM_ZOOMD: int +OBM_RESTORED: int +OBM_UPARROWD: int +OBM_DNARROWD: int +OBM_RGARROWD: int +OBM_LFARROWD: int +OBM_MNARROW: int +OBM_COMBO: int +OBM_UPARROWI: int +OBM_DNARROWI: int +OBM_RGARROWI: int +OBM_LFARROWI: int +OBM_OLD_CLOSE: int +OBM_SIZE: int +OBM_OLD_UPARROW: int +OBM_OLD_DNARROW: int +OBM_OLD_RGARROW: int +OBM_OLD_LFARROW: int +OBM_BTSIZE: int +OBM_CHECK: int +OBM_CHECKBOXES: int +OBM_BTNCORNERS: int +OBM_OLD_REDUCE: int +OBM_OLD_ZOOM: int +OBM_OLD_RESTORE: int +OCR_NORMAL: int +OCR_IBEAM: int +OCR_WAIT: int +OCR_CROSS: int +OCR_UP: int +OCR_SIZE: int +OCR_ICON: int +OCR_SIZENWSE: int +OCR_SIZENESW: int +OCR_SIZEWE: int +OCR_SIZENS: int +OCR_SIZEALL: int +OCR_ICOCUR: int +OCR_NO: int +OCR_HAND: int +OCR_APPSTARTING: int +OIC_SAMPLE: int +OIC_HAND: int +OIC_QUES: int +OIC_BANG: int +OIC_NOTE: int +OIC_WINLOGO: int +OIC_WARNING: int +OIC_ERROR: int +OIC_INFORMATION: int +ORD_LANGDRIVER: int +IDI_APPLICATION: int +IDI_HAND: int +IDI_QUESTION: int +IDI_EXCLAMATION: int +IDI_ASTERISK: int +IDI_WINLOGO: int +IDI_WARNING: int +IDI_ERROR: int +IDI_INFORMATION: int +IDOK: int +IDCANCEL: int +IDABORT: int +IDRETRY: int +IDIGNORE: int +IDYES: int +IDNO: int +IDCLOSE: int +IDHELP: int +ES_LEFT: int +ES_CENTER: int +ES_RIGHT: int +ES_MULTILINE: int +ES_UPPERCASE: int +ES_LOWERCASE: int +ES_PASSWORD: int +ES_AUTOVSCROLL: int +ES_AUTOHSCROLL: int +ES_NOHIDESEL: int +ES_OEMCONVERT: int +ES_READONLY: int +ES_WANTRETURN: int +ES_NUMBER: int +EN_SETFOCUS: int +EN_KILLFOCUS: int +EN_CHANGE: int +EN_UPDATE: int +EN_ERRSPACE: int +EN_MAXTEXT: int +EN_HSCROLL: int +EN_VSCROLL: int +EC_LEFTMARGIN: int +EC_RIGHTMARGIN: int +EC_USEFONTINFO: int +EMSIS_COMPOSITIONSTRING: int +EIMES_GETCOMPSTRATONCE: int +EIMES_CANCELCOMPSTRINFOCUS: int +EIMES_COMPLETECOMPSTRKILLFOCUS: int +EM_GETSEL: int +EM_SETSEL: int +EM_GETRECT: int +EM_SETRECT: int +EM_SETRECTNP: int +EM_SCROLL: int +EM_LINESCROLL: int +EM_SCROLLCARET: int +EM_GETMODIFY: int +EM_SETMODIFY: int +EM_GETLINECOUNT: int +EM_LINEINDEX: int +EM_SETHANDLE: int +EM_GETHANDLE: int +EM_GETTHUMB: int +EM_LINELENGTH: int +EM_REPLACESEL: int +EM_GETLINE: int +EM_LIMITTEXT: int +EM_CANUNDO: int +EM_UNDO: int +EM_FMTLINES: int +EM_LINEFROMCHAR: int +EM_SETTABSTOPS: int +EM_SETPASSWORDCHAR: int +EM_EMPTYUNDOBUFFER: int +EM_GETFIRSTVISIBLELINE: int +EM_SETREADONLY: int +EM_SETWORDBREAKPROC: int +EM_GETWORDBREAKPROC: int +EM_GETPASSWORDCHAR: int +EM_SETMARGINS: int +EM_GETMARGINS: int +EM_SETLIMITTEXT: int +EM_GETLIMITTEXT: int +EM_POSFROMCHAR: int +EM_CHARFROMPOS: int +EM_SETIMESTATUS: int +EM_GETIMESTATUS: int +WB_LEFT: int +WB_RIGHT: int +WB_ISDELIMITER: int +BS_PUSHBUTTON: int +BS_DEFPUSHBUTTON: int +BS_CHECKBOX: int +BS_AUTOCHECKBOX: int +BS_RADIOBUTTON: int +BS_3STATE: int +BS_AUTO3STATE: int +BS_GROUPBOX: int +BS_USERBUTTON: int +BS_AUTORADIOBUTTON: int +BS_OWNERDRAW: int +BS_LEFTTEXT: int +BS_TEXT: int +BS_ICON: int +BS_BITMAP: int +BS_LEFT: int +BS_RIGHT: int +BS_CENTER: int +BS_TOP: int +BS_BOTTOM: int +BS_VCENTER: int +BS_PUSHLIKE: int +BS_MULTILINE: int +BS_NOTIFY: int +BS_FLAT: int +BS_RIGHTBUTTON: int +BN_CLICKED: int +BN_PAINT: int +BN_HILITE: int +BN_UNHILITE: int +BN_DISABLE: int +BN_DOUBLECLICKED: int +BN_PUSHED: int +BN_UNPUSHED: int +BN_DBLCLK: int +BN_SETFOCUS: int +BN_KILLFOCUS: int +BM_GETCHECK: int +BM_SETCHECK: int +BM_GETSTATE: int +BM_SETSTATE: int +BM_SETSTYLE: int +BM_CLICK: int +BM_GETIMAGE: int +BM_SETIMAGE: int +BST_UNCHECKED: int +BST_CHECKED: int +BST_INDETERMINATE: int +BST_PUSHED: int +BST_FOCUS: int +SS_LEFT: int +SS_CENTER: int +SS_RIGHT: int +SS_ICON: int +SS_BLACKRECT: int +SS_GRAYRECT: int +SS_WHITERECT: int +SS_BLACKFRAME: int +SS_GRAYFRAME: int +SS_WHITEFRAME: int +SS_USERITEM: int +SS_SIMPLE: int +SS_LEFTNOWORDWRAP: int +SS_BITMAP: int +SS_OWNERDRAW: int +SS_ENHMETAFILE: int +SS_ETCHEDHORZ: int +SS_ETCHEDVERT: int +SS_ETCHEDFRAME: int +SS_TYPEMASK: int +SS_NOPREFIX: int +SS_NOTIFY: int +SS_CENTERIMAGE: int +SS_RIGHTJUST: int +SS_REALSIZEIMAGE: int +SS_SUNKEN: int +SS_ENDELLIPSIS: int +SS_PATHELLIPSIS: int +SS_WORDELLIPSIS: int +SS_ELLIPSISMASK: int +STM_SETICON: int +STM_GETICON: int +STM_SETIMAGE: int +STM_GETIMAGE: int +STN_CLICKED: int +STN_DBLCLK: int +STN_ENABLE: int +STN_DISABLE: int +STM_MSGMAX: int +DWL_MSGRESULT: int +DWL_DLGPROC: int +DWL_USER: int +DDL_READWRITE: int +DDL_READONLY: int +DDL_HIDDEN: int +DDL_SYSTEM: int +DDL_DIRECTORY: int +DDL_ARCHIVE: int +DDL_POSTMSGS: int +DDL_DRIVES: int +DDL_EXCLUSIVE: int +RT_CURSOR: int +RT_BITMAP: int +RT_ICON: int +RT_MENU: int +RT_DIALOG: int +RT_STRING: int +RT_FONTDIR: int +RT_FONT: int +RT_ACCELERATOR: int +RT_RCDATA: int +RT_MESSAGETABLE: int +DIFFERENCE: int +RT_GROUP_CURSOR: int +RT_GROUP_ICON: int +RT_VERSION: int +RT_DLGINCLUDE: int +RT_PLUGPLAY: int +RT_VXD: int +RT_ANICURSOR: int +RT_ANIICON: int +RT_HTML: int +SB_HORZ: int +SB_VERT: int +SB_CTL: int +SB_BOTH: int +SB_LINEUP: int +SB_LINELEFT: int +SB_LINEDOWN: int +SB_LINERIGHT: int +SB_PAGEUP: int +SB_PAGELEFT: int +SB_PAGEDOWN: int +SB_PAGERIGHT: int +SB_THUMBPOSITION: int +SB_THUMBTRACK: int +SB_TOP: int +SB_LEFT: int +SB_BOTTOM: int +SB_RIGHT: int +SB_ENDSCROLL: int +SW_HIDE: int +SW_SHOWNORMAL: int +SW_NORMAL: int +SW_SHOWMINIMIZED: int +SW_SHOWMAXIMIZED: int +SW_MAXIMIZE: int +SW_SHOWNOACTIVATE: int +SW_SHOW: int +SW_MINIMIZE: int +SW_SHOWMINNOACTIVE: int +SW_SHOWNA: int +SW_RESTORE: int +SW_SHOWDEFAULT: int +SW_FORCEMINIMIZE: int +SW_MAX: int +HIDE_WINDOW: int +SHOW_OPENWINDOW: int +SHOW_ICONWINDOW: int +SHOW_FULLSCREEN: int +SHOW_OPENNOACTIVATE: int +SW_PARENTCLOSING: int +SW_OTHERZOOM: int +SW_PARENTOPENING: int +SW_OTHERUNZOOM: int +AW_HOR_POSITIVE: int +AW_HOR_NEGATIVE: int +AW_VER_POSITIVE: int +AW_VER_NEGATIVE: int +AW_CENTER: int +AW_HIDE: int +AW_ACTIVATE: int +AW_SLIDE: int +AW_BLEND: int +KF_EXTENDED: int +KF_DLGMODE: int +KF_MENUMODE: int +KF_ALTDOWN: int +KF_REPEAT: int +KF_UP: int +VK_LBUTTON: int +VK_RBUTTON: int +VK_CANCEL: int +VK_MBUTTON: int +VK_BACK: int +VK_TAB: int +VK_CLEAR: int +VK_RETURN: int +VK_SHIFT: int +VK_CONTROL: int +VK_MENU: int +VK_PAUSE: int +VK_CAPITAL: int +VK_KANA: int +VK_HANGEUL: int +VK_HANGUL: int +VK_JUNJA: int +VK_FINAL: int +VK_HANJA: int +VK_KANJI: int +VK_ESCAPE: int +VK_CONVERT: int +VK_NONCONVERT: int +VK_ACCEPT: int +VK_MODECHANGE: int +VK_SPACE: int +VK_PRIOR: int +VK_NEXT: int +VK_END: int +VK_HOME: int +VK_LEFT: int +VK_UP: int +VK_RIGHT: int +VK_DOWN: int +VK_SELECT: int +VK_PRINT: int +VK_EXECUTE: int +VK_SNAPSHOT: int +VK_INSERT: int +VK_DELETE: int +VK_HELP: int +VK_LWIN: int +VK_RWIN: int +VK_APPS: int +VK_NUMPAD0: int +VK_NUMPAD1: int +VK_NUMPAD2: int +VK_NUMPAD3: int +VK_NUMPAD4: int +VK_NUMPAD5: int +VK_NUMPAD6: int +VK_NUMPAD7: int +VK_NUMPAD8: int +VK_NUMPAD9: int +VK_MULTIPLY: int +VK_ADD: int +VK_SEPARATOR: int +VK_SUBTRACT: int +VK_DECIMAL: int +VK_DIVIDE: int +VK_F1: int +VK_F2: int +VK_F3: int +VK_F4: int +VK_F5: int +VK_F6: int +VK_F7: int +VK_F8: int +VK_F9: int +VK_F10: int +VK_F11: int +VK_F12: int +VK_F13: int +VK_F14: int +VK_F15: int +VK_F16: int +VK_F17: int +VK_F18: int +VK_F19: int +VK_F20: int +VK_F21: int +VK_F22: int +VK_F23: int +VK_F24: int +VK_NUMLOCK: int +VK_SCROLL: int +VK_LSHIFT: int +VK_RSHIFT: int +VK_LCONTROL: int +VK_RCONTROL: int +VK_LMENU: int +VK_RMENU: int +VK_PROCESSKEY: int +VK_ATTN: int +VK_CRSEL: int +VK_EXSEL: int +VK_EREOF: int +VK_PLAY: int +VK_ZOOM: int +VK_NONAME: int +VK_PA1: int +VK_OEM_CLEAR: int +MOUSEEVENTF_XDOWN: int +MOUSEEVENTF_XUP: int +MOUSEEVENTF_WHEEL: int +VK_XBUTTON1: int +VK_XBUTTON2: int +VK_VOLUME_MUTE: int +VK_VOLUME_DOWN: int +VK_VOLUME_UP: int +VK_MEDIA_NEXT_TRACK: int +VK_MEDIA_PREV_TRACK: int +VK_MEDIA_PLAY_PAUSE: int +VK_BROWSER_BACK: int +VK_BROWSER_FORWARD: int +WH_MIN: int +WH_MSGFILTER: int +WH_JOURNALRECORD: int +WH_JOURNALPLAYBACK: int +WH_KEYBOARD: int +WH_GETMESSAGE: int +WH_CALLWNDPROC: int +WH_CBT: int +WH_SYSMSGFILTER: int +WH_MOUSE: int +WH_HARDWARE: int +WH_DEBUG: int +WH_SHELL: int +WH_FOREGROUNDIDLE: int +WH_CALLWNDPROCRET: int +WH_KEYBOARD_LL: int +WH_MOUSE_LL: int +WH_MAX: int +WH_MINHOOK: int +WH_MAXHOOK: int +HC_ACTION: int +HC_GETNEXT: int +HC_SKIP: int +HC_NOREMOVE: int +HC_NOREM: int +HC_SYSMODALON: int +HC_SYSMODALOFF: int +HCBT_MOVESIZE: int +HCBT_MINMAX: int +HCBT_QS: int +HCBT_CREATEWND: int +HCBT_DESTROYWND: int +HCBT_ACTIVATE: int +HCBT_CLICKSKIPPED: int +HCBT_KEYSKIPPED: int +HCBT_SYSCOMMAND: int +HCBT_SETFOCUS: int +MSGF_DIALOGBOX: int +MSGF_MESSAGEBOX: int +MSGF_MENU: int +MSGF_SCROLLBAR: int +MSGF_NEXTWINDOW: int +MSGF_MAX: int +MSGF_USER: int +HSHELL_WINDOWCREATED: int +HSHELL_WINDOWDESTROYED: int +HSHELL_ACTIVATESHELLWINDOW: int +HSHELL_WINDOWACTIVATED: int +HSHELL_GETMINRECT: int +HSHELL_REDRAW: int +HSHELL_TASKMAN: int +HSHELL_LANGUAGE: int +HSHELL_ACCESSIBILITYSTATE: int +ACCESS_STICKYKEYS: int +ACCESS_FILTERKEYS: int +ACCESS_MOUSEKEYS: int +LLKHF_EXTENDED: int +LLKHF_INJECTED: int +LLKHF_ALTDOWN: int +LLKHF_UP: int +LLKHF_LOWER_IL_INJECTED: int +LLMHF_INJECTED: int +LLMHF_LOWER_IL_INJECTED: int +HKL_PREV: int +HKL_NEXT: int +KLF_ACTIVATE: int +KLF_SUBSTITUTE_OK: int +KLF_UNLOADPREVIOUS: int +KLF_REORDER: int +KLF_REPLACELANG: int +KLF_NOTELLSHELL: int +KLF_SETFORPROCESS: int +KL_NAMELENGTH: int +DESKTOP_READOBJECTS: int +DESKTOP_CREATEWINDOW: int +DESKTOP_CREATEMENU: int +DESKTOP_HOOKCONTROL: int +DESKTOP_JOURNALRECORD: int +DESKTOP_JOURNALPLAYBACK: int +DESKTOP_ENUMERATE: int +DESKTOP_WRITEOBJECTS: int +DESKTOP_SWITCHDESKTOP: int +DF_ALLOWOTHERACCOUNTHOOK: int +WINSTA_ENUMDESKTOPS: int +WINSTA_READATTRIBUTES: int +WINSTA_ACCESSCLIPBOARD: int +WINSTA_CREATEDESKTOP: int +WINSTA_WRITEATTRIBUTES: int +WINSTA_ACCESSGLOBALATOMS: int +WINSTA_EXITWINDOWS: int +WINSTA_ENUMERATE: int +WINSTA_READSCREEN: int +WSF_VISIBLE: int +UOI_FLAGS: int +UOI_NAME: int +UOI_TYPE: int +UOI_USER_SID: int +GWL_WNDPROC: int +GWL_HINSTANCE: int +GWL_HWNDPARENT: int +GWL_STYLE: int +GWL_EXSTYLE: int +GWL_USERDATA: int +GWL_ID: int +GCL_MENUNAME: int +GCL_HBRBACKGROUND: int +GCL_HCURSOR: int +GCL_HICON: int +GCL_HMODULE: int +GCL_CBWNDEXTRA: int +GCL_CBCLSEXTRA: int +GCL_WNDPROC: int +GCL_STYLE: int +GCW_ATOM: int +GCL_HICONSM: int +WM_NULL: int +WM_CREATE: int +WM_DESTROY: int +WM_MOVE: int +WM_SIZE: int +WM_ACTIVATE: int +WA_INACTIVE: int +WA_ACTIVE: int +WA_CLICKACTIVE: int +WM_SETFOCUS: int +WM_KILLFOCUS: int +WM_ENABLE: int +WM_SETREDRAW: int +WM_SETTEXT: int +WM_GETTEXT: int +WM_GETTEXTLENGTH: int +WM_PAINT: int +WM_CLOSE: int +WM_QUERYENDSESSION: int +WM_QUIT: int +WM_QUERYOPEN: int +WM_ERASEBKGND: int +WM_SYSCOLORCHANGE: int +WM_ENDSESSION: int +WM_SHOWWINDOW: int +WM_WININICHANGE: int +WM_SETTINGCHANGE: int +WM_DEVMODECHANGE: int +WM_ACTIVATEAPP: int +WM_FONTCHANGE: int +WM_TIMECHANGE: int +WM_CANCELMODE: int +WM_SETCURSOR: int +WM_MOUSEACTIVATE: int +WM_CHILDACTIVATE: int +WM_QUEUESYNC: int +WM_GETMINMAXINFO: int +WM_PAINTICON: int +WM_ICONERASEBKGND: int +WM_NEXTDLGCTL: int +WM_SPOOLERSTATUS: int +WM_DRAWITEM: int +WM_MEASUREITEM: int +WM_DELETEITEM: int +WM_VKEYTOITEM: int +WM_CHARTOITEM: int +WM_SETFONT: int +WM_GETFONT: int +WM_SETHOTKEY: int +WM_GETHOTKEY: int +WM_QUERYDRAGICON: int +WM_COMPAREITEM: int +WM_GETOBJECT: int +WM_COMPACTING: int +WM_COMMNOTIFY: int +WM_WINDOWPOSCHANGING: int +WM_WINDOWPOSCHANGED: int +WM_POWER: int +PWR_OK: int +PWR_FAIL: int +PWR_SUSPENDREQUEST: int +PWR_SUSPENDRESUME: int +PWR_CRITICALRESUME: int +WM_COPYDATA: int +WM_CANCELJOURNAL: int +WM_NOTIFY: int +WM_INPUTLANGCHANGEREQUEST: int +WM_INPUTLANGCHANGE: int +WM_TCARD: int +WM_HELP: int +WM_USERCHANGED: int +WM_NOTIFYFORMAT: int +NFR_ANSI: int +NFR_UNICODE: int +NF_QUERY: int +NF_REQUERY: int +WM_CONTEXTMENU: int +WM_STYLECHANGING: int +WM_STYLECHANGED: int +WM_DISPLAYCHANGE: int +WM_GETICON: int +WM_SETICON: int +WM_NCCREATE: int +WM_NCDESTROY: int +WM_NCCALCSIZE: int +WM_NCHITTEST: int +WM_NCPAINT: int +WM_NCACTIVATE: int +WM_GETDLGCODE: int +WM_SYNCPAINT: int +WM_NCMOUSEMOVE: int +WM_NCLBUTTONDOWN: int +WM_NCLBUTTONUP: int +WM_NCLBUTTONDBLCLK: int +WM_NCRBUTTONDOWN: int +WM_NCRBUTTONUP: int +WM_NCRBUTTONDBLCLK: int +WM_NCMBUTTONDOWN: int +WM_NCMBUTTONUP: int +WM_NCMBUTTONDBLCLK: int +WM_KEYFIRST: int +WM_KEYDOWN: int +WM_KEYUP: int +WM_CHAR: int +WM_DEADCHAR: int +WM_SYSKEYDOWN: int +WM_SYSKEYUP: int +WM_SYSCHAR: int +WM_SYSDEADCHAR: int +WM_KEYLAST: int +WM_IME_STARTCOMPOSITION: int +WM_IME_ENDCOMPOSITION: int +WM_IME_COMPOSITION: int +WM_IME_KEYLAST: int +WM_INITDIALOG: int +WM_COMMAND: int +WM_SYSCOMMAND: int +WM_TIMER: int +WM_HSCROLL: int +WM_VSCROLL: int +WM_INITMENU: int +WM_INITMENUPOPUP: int +WM_MENUSELECT: int +WM_MENUCHAR: int +WM_ENTERIDLE: int +WM_MENURBUTTONUP: int +WM_MENUDRAG: int +WM_MENUGETOBJECT: int +WM_UNINITMENUPOPUP: int +WM_MENUCOMMAND: int +WM_CTLCOLORMSGBOX: int +WM_CTLCOLOREDIT: int +WM_CTLCOLORBTN: int +WM_CTLCOLORDLG: int +WM_CTLCOLORSCROLLBAR: int +WM_CTLCOLORSTATIC: int +WM_MOUSEFIRST: int +WM_MOUSEMOVE: int +WM_LBUTTONDOWN: int +WM_LBUTTONUP: int +WM_LBUTTONDBLCLK: int +WM_RBUTTONDOWN: int +WM_RBUTTONUP: int +WM_RBUTTONDBLCLK: int +WM_MBUTTONDOWN: int +WM_MBUTTONUP: int +WM_MBUTTONDBLCLK: int +WM_MOUSEWHEEL: int +WM_MOUSELAST: int +WHEEL_DELTA: int +WHEEL_PAGESCROLL: int +WM_PARENTNOTIFY: int +MENULOOP_WINDOW: int +MENULOOP_POPUP: int +WM_ENTERMENULOOP: int +WM_EXITMENULOOP: int +WM_NEXTMENU: int +WM_SIZING: int +WM_CAPTURECHANGED: int +WM_MOVING: int +WM_POWERBROADCAST: int +PBT_APMQUERYSUSPEND: int +PBT_APMQUERYSTANDBY: int +PBT_APMQUERYSUSPENDFAILED: int +PBT_APMQUERYSTANDBYFAILED: int +PBT_APMSUSPEND: int +PBT_APMSTANDBY: int +PBT_APMRESUMECRITICAL: int +PBT_APMRESUMESUSPEND: int +PBT_APMRESUMESTANDBY: int +PBTF_APMRESUMEFROMFAILURE: int +PBT_APMBATTERYLOW: int +PBT_APMPOWERSTATUSCHANGE: int +PBT_APMOEMEVENT: int +PBT_APMRESUMEAUTOMATIC: int +WM_DEVICECHANGE: int +WM_MDICREATE: int +WM_MDIDESTROY: int +WM_MDIACTIVATE: int +WM_MDIRESTORE: int +WM_MDINEXT: int +WM_MDIMAXIMIZE: int +WM_MDITILE: int +WM_MDICASCADE: int +WM_MDIICONARRANGE: int +WM_MDIGETACTIVE: int +WM_MDISETMENU: int +WM_ENTERSIZEMOVE: int +WM_EXITSIZEMOVE: int +WM_DROPFILES: int +WM_MDIREFRESHMENU: int +WM_IME_SETCONTEXT: int +WM_IME_NOTIFY: int +WM_IME_CONTROL: int +WM_IME_COMPOSITIONFULL: int +WM_IME_SELECT: int +WM_IME_CHAR: int +WM_IME_REQUEST: int +WM_IME_KEYDOWN: int +WM_IME_KEYUP: int +WM_MOUSEHOVER: int +WM_MOUSELEAVE: int +WM_CUT: int +WM_COPY: int +WM_PASTE: int +WM_CLEAR: int +WM_UNDO: int +WM_RENDERFORMAT: int +WM_RENDERALLFORMATS: int +WM_DESTROYCLIPBOARD: int +WM_DRAWCLIPBOARD: int +WM_PAINTCLIPBOARD: int +WM_VSCROLLCLIPBOARD: int +WM_SIZECLIPBOARD: int +WM_ASKCBFORMATNAME: int +WM_CHANGECBCHAIN: int +WM_HSCROLLCLIPBOARD: int +WM_QUERYNEWPALETTE: int +WM_PALETTEISCHANGING: int +WM_PALETTECHANGED: int +WM_HOTKEY: int +WM_PRINT: int +WM_PRINTCLIENT: int +WM_HANDHELDFIRST: int +WM_HANDHELDLAST: int +WM_AFXFIRST: int +WM_AFXLAST: int +WM_PENWINFIRST: int +WM_PENWINLAST: int +WM_APP: int +WMSZ_LEFT: int +WMSZ_RIGHT: int +WMSZ_TOP: int +WMSZ_TOPLEFT: int +WMSZ_TOPRIGHT: int +WMSZ_BOTTOM: int +WMSZ_BOTTOMLEFT: int +WMSZ_BOTTOMRIGHT: int +HTERROR: int +HTTRANSPARENT: int +HTNOWHERE: int +HTCLIENT: int +HTCAPTION: int +HTSYSMENU: int +HTGROWBOX: int +HTSIZE: int +HTMENU: int +HTHSCROLL: int +HTVSCROLL: int +HTMINBUTTON: int +HTMAXBUTTON: int +HTLEFT: int +HTRIGHT: int +HTTOP: int +HTTOPLEFT: int +HTTOPRIGHT: int +HTBOTTOM: int +HTBOTTOMLEFT: int +HTBOTTOMRIGHT: int +HTBORDER: int +HTREDUCE: int +HTZOOM: int +HTSIZEFIRST: int +HTSIZELAST: int +HTOBJECT: int +HTCLOSE: int +HTHELP: int +SMTO_NORMAL: int +SMTO_BLOCK: int +SMTO_ABORTIFHUNG: int +SMTO_NOTIMEOUTIFNOTHUNG: int +MA_ACTIVATE: int +MA_ACTIVATEANDEAT: int +MA_NOACTIVATE: int +MA_NOACTIVATEANDEAT: int +ICON_SMALL: int +ICON_BIG: int +SIZE_RESTORED: int +SIZE_MINIMIZED: int +SIZE_MAXIMIZED: int +SIZE_MAXSHOW: int +SIZE_MAXHIDE: int +SIZENORMAL: int +SIZEICONIC: int +SIZEFULLSCREEN: int +SIZEZOOMSHOW: int +SIZEZOOMHIDE: int +WVR_ALIGNTOP: int +WVR_ALIGNLEFT: int +WVR_ALIGNBOTTOM: int +WVR_ALIGNRIGHT: int +WVR_HREDRAW: int +WVR_VREDRAW: int +WVR_REDRAW: int +WVR_VALIDRECTS: int +MK_LBUTTON: int +MK_RBUTTON: int +MK_SHIFT: int +MK_CONTROL: int +MK_MBUTTON: int +TME_HOVER: int +TME_LEAVE: int +TME_QUERY: int +TME_CANCEL: int +HOVER_DEFAULT: int +WS_OVERLAPPED: int +WS_POPUP: int +WS_CHILD: int +WS_MINIMIZE: int +WS_VISIBLE: int +WS_DISABLED: int +WS_CLIPSIBLINGS: int +WS_CLIPCHILDREN: int +WS_MAXIMIZE: int +WS_CAPTION: int +WS_BORDER: int +WS_DLGFRAME: int +WS_VSCROLL: int +WS_HSCROLL: int +WS_SYSMENU: int +WS_THICKFRAME: int +WS_GROUP: int +WS_TABSTOP: int +WS_MINIMIZEBOX: int +WS_MAXIMIZEBOX: int +WS_TILED: int +WS_ICONIC: int +WS_SIZEBOX: int +WS_OVERLAPPEDWINDOW: int +WS_POPUPWINDOW: int +WS_CHILDWINDOW: int +WS_TILEDWINDOW: int +WS_EX_DLGMODALFRAME: int +WS_EX_NOPARENTNOTIFY: int +WS_EX_TOPMOST: int +WS_EX_ACCEPTFILES: int +WS_EX_TRANSPARENT: int +WS_EX_MDICHILD: int +WS_EX_TOOLWINDOW: int +WS_EX_WINDOWEDGE: int +WS_EX_CLIENTEDGE: int +WS_EX_CONTEXTHELP: int +WS_EX_RIGHT: int +WS_EX_LEFT: int +WS_EX_RTLREADING: int +WS_EX_LTRREADING: int +WS_EX_LEFTSCROLLBAR: int +WS_EX_RIGHTSCROLLBAR: int +WS_EX_CONTROLPARENT: int +WS_EX_STATICEDGE: int +WS_EX_APPWINDOW: int +WS_EX_OVERLAPPEDWINDOW: int +WS_EX_PALETTEWINDOW: int +WS_EX_LAYERED: int +WS_EX_NOINHERITLAYOUT: int +WS_EX_LAYOUTRTL: int +WS_EX_COMPOSITED: int +WS_EX_NOACTIVATE: int +CS_VREDRAW: int +CS_HREDRAW: int +CS_DBLCLKS: int +CS_OWNDC: int +CS_CLASSDC: int +CS_PARENTDC: int +CS_NOCLOSE: int +CS_SAVEBITS: int +CS_BYTEALIGNCLIENT: int +CS_BYTEALIGNWINDOW: int +CS_GLOBALCLASS: int +CS_IME: int +PRF_CHECKVISIBLE: int +PRF_NONCLIENT: int +PRF_CLIENT: int +PRF_ERASEBKGND: int +PRF_CHILDREN: int +PRF_OWNED: int +BDR_RAISEDOUTER: int +BDR_SUNKENOUTER: int +BDR_RAISEDINNER: int +BDR_SUNKENINNER: int +BDR_OUTER: int +BDR_INNER: int +EDGE_RAISED: int +EDGE_SUNKEN: int +EDGE_ETCHED: int +EDGE_BUMP: int +ISMEX_NOSEND: int +ISMEX_SEND: int +ISMEX_NOTIFY: int +ISMEX_CALLBACK: int +ISMEX_REPLIED: int +CW_USEDEFAULT: int +FLASHW_STOP: int +FLASHW_CAPTION: int +FLASHW_TRAY: int +FLASHW_ALL: int +FLASHW_TIMER: int +FLASHW_TIMERNOFG: int +DS_ABSALIGN: int +DS_SYSMODAL: int +DS_LOCALEDIT: int +DS_SETFONT: int +DS_MODALFRAME: int +DS_NOIDLEMSG: int +DS_SETFOREGROUND: int +DS_3DLOOK: int +DS_FIXEDSYS: int +DS_NOFAILCREATE: int +DS_CONTROL: int +DS_CENTER: int +DS_CENTERMOUSE: int +DS_CONTEXTHELP: int +DM_GETDEFID: int +DM_SETDEFID: int +DM_REPOSITION: int +DC_HASDEFID: int +DLGC_WANTARROWS: int +DLGC_WANTTAB: int +DLGC_WANTALLKEYS: int +DLGC_WANTMESSAGE: int +DLGC_HASSETSEL: int +DLGC_DEFPUSHBUTTON: int +DLGC_UNDEFPUSHBUTTON: int +DLGC_RADIOBUTTON: int +DLGC_WANTCHARS: int +DLGC_STATIC: int +DLGC_BUTTON: int +LB_CTLCODE: int +LB_OKAY: int +LB_ERR: int +LB_ERRSPACE: int +LBN_ERRSPACE: int +LBN_SELCHANGE: int +LBN_DBLCLK: int +LBN_SELCANCEL: int +LBN_SETFOCUS: int +LBN_KILLFOCUS: int +LB_ADDSTRING: int +LB_INSERTSTRING: int +LB_DELETESTRING: int +LB_SELITEMRANGEEX: int +LB_RESETCONTENT: int +LB_SETSEL: int +LB_SETCURSEL: int +LB_GETSEL: int +LB_GETCURSEL: int +LB_GETTEXT: int +LB_GETTEXTLEN: int +LB_GETCOUNT: int +LB_SELECTSTRING: int +LB_DIR: int +LB_GETTOPINDEX: int +LB_FINDSTRING: int +LB_GETSELCOUNT: int +LB_GETSELITEMS: int +LB_SETTABSTOPS: int +LB_GETHORIZONTALEXTENT: int +LB_SETHORIZONTALEXTENT: int +LB_SETCOLUMNWIDTH: int +LB_ADDFILE: int +LB_SETTOPINDEX: int +LB_GETITEMRECT: int +LB_GETITEMDATA: int +LB_SETITEMDATA: int +LB_SELITEMRANGE: int +LB_SETANCHORINDEX: int +LB_GETANCHORINDEX: int +LB_SETCARETINDEX: int +LB_GETCARETINDEX: int +LB_SETITEMHEIGHT: int +LB_GETITEMHEIGHT: int +LB_FINDSTRINGEXACT: int +LB_SETLOCALE: int +LB_GETLOCALE: int +LB_SETCOUNT: int +LB_INITSTORAGE: int +LB_ITEMFROMPOINT: int +LB_MSGMAX: int +LBS_NOTIFY: int +LBS_SORT: int +LBS_NOREDRAW: int +LBS_MULTIPLESEL: int +LBS_OWNERDRAWFIXED: int +LBS_OWNERDRAWVARIABLE: int +LBS_HASSTRINGS: int +LBS_USETABSTOPS: int +LBS_NOINTEGRALHEIGHT: int +LBS_MULTICOLUMN: int +LBS_WANTKEYBOARDINPUT: int +LBS_EXTENDEDSEL: int +LBS_DISABLENOSCROLL: int +LBS_NODATA: int +LBS_NOSEL: int +LBS_STANDARD: int +CB_OKAY: int +CB_ERR: int +CB_ERRSPACE: int +CBN_ERRSPACE: int +CBN_SELCHANGE: int +CBN_DBLCLK: int +CBN_SETFOCUS: int +CBN_KILLFOCUS: int +CBN_EDITCHANGE: int +CBN_EDITUPDATE: int +CBN_DROPDOWN: int +CBN_CLOSEUP: int +CBN_SELENDOK: int +CBN_SELENDCANCEL: int +CBS_SIMPLE: int +CBS_DROPDOWN: int +CBS_OWNERDRAWFIXED: int +CBS_OWNERDRAWVARIABLE: int +CBS_AUTOHSCROLL: int +CBS_OEMCONVERT: int +CBS_SORT: int +CBS_HASSTRINGS: int +CBS_NOINTEGRALHEIGHT: int +CBS_DISABLENOSCROLL: int +CBS_UPPERCASE: int +CBS_LOWERCASE: int +CB_GETEDITSEL: int +CB_LIMITTEXT: int +CB_SETEDITSEL: int +CB_ADDSTRING: int +CB_DELETESTRING: int +CB_DIR: int +CB_GETCOUNT: int +CB_GETCURSEL: int +CB_GETLBTEXT: int +CB_GETLBTEXTLEN: int +CB_INSERTSTRING: int +CB_RESETCONTENT: int +CB_FINDSTRING: int +CB_SELECTSTRING: int +CB_SETCURSEL: int +CB_SHOWDROPDOWN: int +CB_GETITEMDATA: int +CB_SETITEMDATA: int +CB_GETDROPPEDCONTROLRECT: int +CB_SETITEMHEIGHT: int +CB_GETITEMHEIGHT: int +CB_SETEXTENDEDUI: int +CB_GETEXTENDEDUI: int +CB_GETDROPPEDSTATE: int +CB_FINDSTRINGEXACT: int +CB_SETLOCALE: int +CB_GETLOCALE: int +CB_GETTOPINDEX: int +CB_SETTOPINDEX: int +CB_GETHORIZONTALEXTENT: int +CB_SETHORIZONTALEXTENT: int +CB_GETDROPPEDWIDTH: int +CB_SETDROPPEDWIDTH: int +CB_INITSTORAGE: int +CB_MSGMAX: int +SBS_HORZ: int +SBS_VERT: int +SBS_TOPALIGN: int +SBS_LEFTALIGN: int +SBS_BOTTOMALIGN: int +SBS_RIGHTALIGN: int +SBS_SIZEBOXTOPLEFTALIGN: int +SBS_SIZEBOXBOTTOMRIGHTALIGN: int +SBS_SIZEBOX: int +SBS_SIZEGRIP: int +SBM_SETPOS: int +SBM_GETPOS: int +SBM_SETRANGE: int +SBM_SETRANGEREDRAW: int +SBM_GETRANGE: int +SBM_ENABLE_ARROWS: int +SBM_SETSCROLLINFO: int +SBM_GETSCROLLINFO: int +SIF_RANGE: int +SIF_PAGE: int +SIF_POS: int +SIF_DISABLENOSCROLL: int +SIF_TRACKPOS: int +SIF_ALL: int +MDIS_ALLCHILDSTYLES: int +MDITILE_VERTICAL: int +MDITILE_HORIZONTAL: int +MDITILE_SKIPDISABLED: int +IMC_GETCANDIDATEPOS: int +IMC_SETCANDIDATEPOS: int +IMC_GETCOMPOSITIONFONT: int +IMC_SETCOMPOSITIONFONT: int +IMC_GETCOMPOSITIONWINDOW: int +IMC_SETCOMPOSITIONWINDOW: int +IMC_GETSTATUSWINDOWPOS: int +IMC_SETSTATUSWINDOWPOS: int +IMC_CLOSESTATUSWINDOW: int +IMC_OPENSTATUSWINDOW: int +DELETE: int +READ_CONTROL: int +WRITE_DAC: int +WRITE_OWNER: int +SYNCHRONIZE: int +STANDARD_RIGHTS_REQUIRED: int +STANDARD_RIGHTS_READ: int +STANDARD_RIGHTS_WRITE: int +STANDARD_RIGHTS_EXECUTE: int +STANDARD_RIGHTS_ALL: int +SPECIFIC_RIGHTS_ALL: int +ACCESS_SYSTEM_SECURITY: int +MAXIMUM_ALLOWED: int +GENERIC_READ: int +GENERIC_WRITE: int +GENERIC_EXECUTE: int +GENERIC_ALL: int +SERVICE_KERNEL_DRIVER: int +SERVICE_FILE_SYSTEM_DRIVER: int +SERVICE_ADAPTER: int +SERVICE_RECOGNIZER_DRIVER: int +SERVICE_DRIVER: int +SERVICE_WIN32_OWN_PROCESS: int +SERVICE_WIN32_SHARE_PROCESS: int +SERVICE_WIN32: int +SERVICE_INTERACTIVE_PROCESS: int +SERVICE_TYPE_ALL: int +SERVICE_BOOT_START: int +SERVICE_SYSTEM_START: int +SERVICE_AUTO_START: int +SERVICE_DEMAND_START: int +SERVICE_DISABLED: int +SERVICE_ERROR_IGNORE: int +SERVICE_ERROR_NORMAL: int +SERVICE_ERROR_SEVERE: int +SERVICE_ERROR_CRITICAL: int +TAPE_ERASE_SHORT: int +TAPE_ERASE_LONG: int +TAPE_LOAD: int +TAPE_UNLOAD: int +TAPE_TENSION: int +TAPE_LOCK: int +TAPE_UNLOCK: int +TAPE_FORMAT: int +TAPE_SETMARKS: int +TAPE_FILEMARKS: int +TAPE_SHORT_FILEMARKS: int +TAPE_LONG_FILEMARKS: int +TAPE_ABSOLUTE_POSITION: int +TAPE_LOGICAL_POSITION: int +TAPE_PSEUDO_LOGICAL_POSITION: int +TAPE_REWIND: int +TAPE_ABSOLUTE_BLOCK: int +TAPE_LOGICAL_BLOCK: int +TAPE_PSEUDO_LOGICAL_BLOCK: int +TAPE_SPACE_END_OF_DATA: int +TAPE_SPACE_RELATIVE_BLOCKS: int +TAPE_SPACE_FILEMARKS: int +TAPE_SPACE_SEQUENTIAL_FMKS: int +TAPE_SPACE_SETMARKS: int +TAPE_SPACE_SEQUENTIAL_SMKS: int +TAPE_DRIVE_FIXED: int +TAPE_DRIVE_SELECT: int +TAPE_DRIVE_INITIATOR: int +TAPE_DRIVE_ERASE_SHORT: int +TAPE_DRIVE_ERASE_LONG: int +TAPE_DRIVE_ERASE_BOP_ONLY: int +TAPE_DRIVE_ERASE_IMMEDIATE: int +TAPE_DRIVE_TAPE_CAPACITY: int +TAPE_DRIVE_TAPE_REMAINING: int +TAPE_DRIVE_FIXED_BLOCK: int +TAPE_DRIVE_VARIABLE_BLOCK: int +TAPE_DRIVE_WRITE_PROTECT: int +TAPE_DRIVE_EOT_WZ_SIZE: int +TAPE_DRIVE_ECC: int +TAPE_DRIVE_COMPRESSION: int +TAPE_DRIVE_PADDING: int +TAPE_DRIVE_REPORT_SMKS: int +TAPE_DRIVE_GET_ABSOLUTE_BLK: int +TAPE_DRIVE_GET_LOGICAL_BLK: int +TAPE_DRIVE_SET_EOT_WZ_SIZE: int +TAPE_DRIVE_LOAD_UNLOAD: int +TAPE_DRIVE_TENSION: int +TAPE_DRIVE_LOCK_UNLOCK: int +TAPE_DRIVE_REWIND_IMMEDIATE: int +TAPE_DRIVE_SET_BLOCK_SIZE: int +TAPE_DRIVE_LOAD_UNLD_IMMED: int +TAPE_DRIVE_TENSION_IMMED: int +TAPE_DRIVE_LOCK_UNLK_IMMED: int +TAPE_DRIVE_SET_ECC: int +TAPE_DRIVE_SET_COMPRESSION: int +TAPE_DRIVE_SET_PADDING: int +TAPE_DRIVE_SET_REPORT_SMKS: int +TAPE_DRIVE_ABSOLUTE_BLK: int +TAPE_DRIVE_ABS_BLK_IMMED: int +TAPE_DRIVE_LOGICAL_BLK: int +TAPE_DRIVE_LOG_BLK_IMMED: int +TAPE_DRIVE_END_OF_DATA: int +TAPE_DRIVE_RELATIVE_BLKS: int +TAPE_DRIVE_FILEMARKS: int +TAPE_DRIVE_SEQUENTIAL_FMKS: int +TAPE_DRIVE_SETMARKS: int +TAPE_DRIVE_SEQUENTIAL_SMKS: int +TAPE_DRIVE_REVERSE_POSITION: int +TAPE_DRIVE_SPACE_IMMEDIATE: int +TAPE_DRIVE_WRITE_SETMARKS: int +TAPE_DRIVE_WRITE_FILEMARKS: int +TAPE_DRIVE_WRITE_SHORT_FMKS: int +TAPE_DRIVE_WRITE_LONG_FMKS: int +TAPE_DRIVE_WRITE_MARK_IMMED: int +TAPE_DRIVE_FORMAT: int +TAPE_DRIVE_FORMAT_IMMEDIATE: int +TAPE_FIXED_PARTITIONS: int +TAPE_SELECT_PARTITIONS: int +TAPE_INITIATOR_PARTITIONS: int +APPLICATION_ERROR_MASK: int +ERROR_SEVERITY_SUCCESS: int +ERROR_SEVERITY_INFORMATIONAL: int +ERROR_SEVERITY_WARNING: int +ERROR_SEVERITY_ERROR: int +MINCHAR: int +MAXCHAR: int +MINSHORT: int +MAXSHORT: int +MINLONG: int +MAXLONG: int +MAXBYTE: int +MAXWORD: int +MAXDWORD: int +LANG_NEUTRAL: int +LANG_BULGARIAN: int +LANG_CHINESE: int +LANG_CROATIAN: int +LANG_CZECH: int +LANG_DANISH: int +LANG_DUTCH: int +LANG_ENGLISH: int +LANG_FINNISH: int +LANG_FRENCH: int +LANG_GERMAN: int +LANG_GREEK: int +LANG_HUNGARIAN: int +LANG_ICELANDIC: int +LANG_ITALIAN: int +LANG_JAPANESE: int +LANG_KOREAN: int +LANG_NORWEGIAN: int +LANG_POLISH: int +LANG_PORTUGUESE: int +LANG_ROMANIAN: int +LANG_RUSSIAN: int +LANG_SLOVAK: int +LANG_SLOVENIAN: int +LANG_SPANISH: int +LANG_SWEDISH: int +LANG_TURKISH: int +SUBLANG_NEUTRAL: int +SUBLANG_DEFAULT: int +SUBLANG_SYS_DEFAULT: int +SUBLANG_CHINESE_TRADITIONAL: int +SUBLANG_CHINESE_SIMPLIFIED: int +SUBLANG_CHINESE_HONGKONG: int +SUBLANG_CHINESE_SINGAPORE: int +SUBLANG_DUTCH: int +SUBLANG_DUTCH_BELGIAN: int +SUBLANG_ENGLISH_US: int +SUBLANG_ENGLISH_UK: int +SUBLANG_ENGLISH_AUS: int +SUBLANG_ENGLISH_CAN: int +SUBLANG_ENGLISH_NZ: int +SUBLANG_ENGLISH_EIRE: int +SUBLANG_FRENCH: int +SUBLANG_FRENCH_BELGIAN: int +SUBLANG_FRENCH_CANADIAN: int +SUBLANG_FRENCH_SWISS: int +SUBLANG_GERMAN: int +SUBLANG_GERMAN_SWISS: int +SUBLANG_GERMAN_AUSTRIAN: int +SUBLANG_ITALIAN: int +SUBLANG_ITALIAN_SWISS: int +SUBLANG_NORWEGIAN_BOKMAL: int +SUBLANG_NORWEGIAN_NYNORSK: int +SUBLANG_PORTUGUESE: int +SUBLANG_PORTUGUESE_BRAZILIAN: int +SUBLANG_SPANISH: int +SUBLANG_SPANISH_MEXICAN: int +SUBLANG_SPANISH_MODERN: int +SORT_DEFAULT: int +SORT_JAPANESE_XJIS: int +SORT_JAPANESE_UNICODE: int +SORT_CHINESE_BIG5: int +SORT_CHINESE_UNICODE: int +SORT_KOREAN_KSC: int +SORT_KOREAN_UNICODE: int + +def PRIMARYLANGID(lgid: int) -> int: ... +def SUBLANGID(lgid: int) -> int: ... + +NLS_VALID_LOCALE_MASK: int +CONTEXT_PORTABLE_32BIT: int +CONTEXT_ALPHA: int +SIZE_OF_80387_REGISTERS: int +CONTEXT_CONTROL: int +CONTEXT_FLOATING_POINT: int +CONTEXT_INTEGER: int +CONTEXT_FULL: int +PROCESS_TERMINATE: int +PROCESS_CREATE_THREAD: int +PROCESS_VM_OPERATION: int +PROCESS_VM_READ: int +PROCESS_VM_WRITE: int +PROCESS_DUP_HANDLE: int +PROCESS_CREATE_PROCESS: int +PROCESS_SET_QUOTA: int +PROCESS_SET_INFORMATION: int +PROCESS_QUERY_INFORMATION: int +PROCESS_SUSPEND_RESUME: int +PROCESS_QUERY_LIMITED_INFORMATION: int +PROCESS_SET_LIMITED_INFORMATION: int +PROCESS_ALL_ACCESS: int +THREAD_TERMINATE: int +THREAD_SUSPEND_RESUME: int +THREAD_GET_CONTEXT: int +THREAD_SET_CONTEXT: int +THREAD_SET_INFORMATION: int +THREAD_QUERY_INFORMATION: int +THREAD_SET_THREAD_TOKEN: int +THREAD_IMPERSONATE: int +THREAD_DIRECT_IMPERSONATION: int +THREAD_SET_LIMITED_INFORMATION: int +THREAD_QUERY_LIMITED_INFORMATION: int +THREAD_RESUME: int +TLS_MINIMUM_AVAILABLE: int +EVENT_MODIFY_STATE: int +MUTANT_QUERY_STATE: int +SEMAPHORE_MODIFY_STATE: int +TIME_ZONE_ID_UNKNOWN: int +TIME_ZONE_ID_STANDARD: int +TIME_ZONE_ID_DAYLIGHT: int +PROCESSOR_INTEL_386: int +PROCESSOR_INTEL_486: int +PROCESSOR_INTEL_PENTIUM: int +PROCESSOR_INTEL_860: int +PROCESSOR_MIPS_R2000: int +PROCESSOR_MIPS_R3000: int +PROCESSOR_MIPS_R4000: int +PROCESSOR_ALPHA_21064: int +PROCESSOR_PPC_601: int +PROCESSOR_PPC_603: int +PROCESSOR_PPC_604: int +PROCESSOR_PPC_620: int +SECTION_QUERY: int +SECTION_MAP_WRITE: int +SECTION_MAP_READ: int +SECTION_MAP_EXECUTE: int +SECTION_EXTEND_SIZE: int +PAGE_NOACCESS: int +PAGE_READONLY: int +PAGE_READWRITE: int +PAGE_WRITECOPY: int +PAGE_EXECUTE: int +PAGE_EXECUTE_READ: int +PAGE_EXECUTE_READWRITE: int +PAGE_EXECUTE_WRITECOPY: int +PAGE_GUARD: int +PAGE_NOCACHE: int +MEM_COMMIT: int +MEM_RESERVE: int +MEM_DECOMMIT: int +MEM_RELEASE: int +MEM_FREE: int +MEM_PRIVATE: int +MEM_MAPPED: int +MEM_TOP_DOWN: int +SEC_FILE: int +SEC_IMAGE: int +SEC_RESERVE: int +SEC_COMMIT: int +SEC_NOCACHE: int +MEM_IMAGE: int +FILE_SHARE_READ: int +FILE_SHARE_WRITE: int +FILE_SHARE_DELETE: int +FILE_ATTRIBUTE_READONLY: int +FILE_ATTRIBUTE_HIDDEN: int +FILE_ATTRIBUTE_SYSTEM: int +FILE_ATTRIBUTE_DIRECTORY: int +FILE_ATTRIBUTE_ARCHIVE: int +FILE_ATTRIBUTE_DEVICE: int +FILE_ATTRIBUTE_NORMAL: int +FILE_ATTRIBUTE_TEMPORARY: int +FILE_ATTRIBUTE_SPARSE_FILE: int +FILE_ATTRIBUTE_REPARSE_POINT: int +FILE_ATTRIBUTE_COMPRESSED: int +FILE_ATTRIBUTE_OFFLINE: int +FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: int +FILE_ATTRIBUTE_ENCRYPTED: int +FILE_ATTRIBUTE_VIRTUAL: int +FILE_ATTRIBUTE_ATOMIC_WRITE: int +FILE_ATTRIBUTE_XACTION_WRITE: int +FILE_NOTIFY_CHANGE_FILE_NAME: int +FILE_NOTIFY_CHANGE_DIR_NAME: int +FILE_NOTIFY_CHANGE_ATTRIBUTES: int +FILE_NOTIFY_CHANGE_SIZE: int +FILE_NOTIFY_CHANGE_LAST_WRITE: int +FILE_NOTIFY_CHANGE_SECURITY: int +FILE_CASE_SENSITIVE_SEARCH: int +FILE_CASE_PRESERVED_NAMES: int +FILE_UNICODE_ON_DISK: int +FILE_PERSISTENT_ACLS: int +FILE_FILE_COMPRESSION: int +FILE_VOLUME_IS_COMPRESSED: int +IO_COMPLETION_MODIFY_STATE: int +DUPLICATE_CLOSE_SOURCE: int +DUPLICATE_SAME_ACCESS: int +SID_MAX_SUB_AUTHORITIES: int +SECURITY_NULL_RID: int +SECURITY_WORLD_RID: int +SECURITY_LOCAL_RID: int +SECURITY_CREATOR_OWNER_RID: int +SECURITY_CREATOR_GROUP_RID: int +SECURITY_DIALUP_RID: int +SECURITY_NETWORK_RID: int +SECURITY_BATCH_RID: int +SECURITY_INTERACTIVE_RID: int +SECURITY_SERVICE_RID: int +SECURITY_ANONYMOUS_LOGON_RID: int +SECURITY_LOGON_IDS_RID: int +SECURITY_LOGON_IDS_RID_COUNT: int +SECURITY_LOCAL_SYSTEM_RID: int +SECURITY_NT_NON_UNIQUE: int +SECURITY_BUILTIN_DOMAIN_RID: int +DOMAIN_USER_RID_ADMIN: int +DOMAIN_USER_RID_GUEST: int +DOMAIN_GROUP_RID_ADMINS: int +DOMAIN_GROUP_RID_USERS: int +DOMAIN_GROUP_RID_GUESTS: int +DOMAIN_ALIAS_RID_ADMINS: int +DOMAIN_ALIAS_RID_USERS: int +DOMAIN_ALIAS_RID_GUESTS: int +DOMAIN_ALIAS_RID_POWER_USERS: int +DOMAIN_ALIAS_RID_ACCOUNT_OPS: int +DOMAIN_ALIAS_RID_SYSTEM_OPS: int +DOMAIN_ALIAS_RID_PRINT_OPS: int +DOMAIN_ALIAS_RID_BACKUP_OPS: int +DOMAIN_ALIAS_RID_REPLICATOR: int +SE_GROUP_MANDATORY: int +SE_GROUP_ENABLED_BY_DEFAULT: int +SE_GROUP_ENABLED: int +SE_GROUP_OWNER: int +SE_GROUP_LOGON_ID: int +ACL_REVISION: int +ACL_REVISION1: int +ACL_REVISION2: int +ACCESS_ALLOWED_ACE_TYPE: int +ACCESS_DENIED_ACE_TYPE: int +SYSTEM_AUDIT_ACE_TYPE: int +SYSTEM_ALARM_ACE_TYPE: int +OBJECT_INHERIT_ACE: int +CONTAINER_INHERIT_ACE: int +NO_PROPAGATE_INHERIT_ACE: int +INHERIT_ONLY_ACE: int +VALID_INHERIT_FLAGS: int +SUCCESSFUL_ACCESS_ACE_FLAG: int +FAILED_ACCESS_ACE_FLAG: int +SECURITY_DESCRIPTOR_REVISION: int +SECURITY_DESCRIPTOR_REVISION1: int +SECURITY_DESCRIPTOR_MIN_LENGTH: int +SE_OWNER_DEFAULTED: int +SE_GROUP_DEFAULTED: int +SE_DACL_PRESENT: int +SE_DACL_DEFAULTED: int +SE_SACL_PRESENT: int +SE_SACL_DEFAULTED: int +SE_SELF_RELATIVE: int +SE_PRIVILEGE_ENABLED_BY_DEFAULT: int +SE_PRIVILEGE_ENABLED: int +SE_PRIVILEGE_USED_FOR_ACCESS: int +PRIVILEGE_SET_ALL_NECESSARY: int +SE_CREATE_TOKEN_NAME: str +SE_ASSIGNPRIMARYTOKEN_NAME: str +SE_LOCK_MEMORY_NAME: str +SE_INCREASE_QUOTA_NAME: str +SE_UNSOLICITED_INPUT_NAME: str +SE_MACHINE_ACCOUNT_NAME: str +SE_TCB_NAME: str +SE_SECURITY_NAME: str +SE_TAKE_OWNERSHIP_NAME: str +SE_LOAD_DRIVER_NAME: str +SE_SYSTEM_PROFILE_NAME: str +SE_SYSTEMTIME_NAME: str +SE_PROF_SINGLE_PROCESS_NAME: str +SE_INC_BASE_PRIORITY_NAME: str +SE_CREATE_PAGEFILE_NAME: str +SE_CREATE_PERMANENT_NAME: str +SE_BACKUP_NAME: str +SE_RESTORE_NAME: str +SE_SHUTDOWN_NAME: str +SE_DEBUG_NAME: str +SE_AUDIT_NAME: str +SE_SYSTEM_ENVIRONMENT_NAME: str +SE_CHANGE_NOTIFY_NAME: str +SE_REMOTE_SHUTDOWN_NAME: str +TOKEN_ASSIGN_PRIMARY: int +TOKEN_DUPLICATE: int +TOKEN_IMPERSONATE: int +TOKEN_QUERY: int +TOKEN_QUERY_SOURCE: int +TOKEN_ADJUST_PRIVILEGES: int +TOKEN_ADJUST_GROUPS: int +TOKEN_ADJUST_DEFAULT: int +TOKEN_ALL_ACCESS: int +TOKEN_READ: int +TOKEN_WRITE: int +TOKEN_EXECUTE: int +TOKEN_SOURCE_LENGTH: int +KEY_QUERY_VALUE: int +KEY_SET_VALUE: int +KEY_CREATE_SUB_KEY: int +KEY_ENUMERATE_SUB_KEYS: int +KEY_NOTIFY: int +KEY_CREATE_LINK: int +KEY_WOW64_32KEY: int +KEY_WOW64_64KEY: int +KEY_WOW64_RES: int +KEY_READ: int +KEY_WRITE: int +KEY_EXECUTE: int +KEY_ALL_ACCESS: int +REG_NOTIFY_CHANGE_ATTRIBUTES: int +REG_NOTIFY_CHANGE_SECURITY: int +REG_NONE: int +REG_SZ: int +REG_EXPAND_SZ: int +REG_BINARY: int +REG_DWORD: int +REG_DWORD_LITTLE_ENDIAN: int +REG_DWORD_BIG_ENDIAN: int +REG_LINK: int +REG_MULTI_SZ: int +REG_FULL_RESOURCE_DESCRIPTOR: int +REG_QWORD: int +REG_QWORD_LITTLE_ENDIAN: int +NULL: int +HEAP_NO_SERIALIZE: int +HEAP_GROWABLE: int +HEAP_GENERATE_EXCEPTIONS: int +HEAP_ZERO_MEMORY: int +HEAP_REALLOC_IN_PLACE_ONLY: int +HEAP_TAIL_CHECKING_ENABLED: int +HEAP_FREE_CHECKING_ENABLED: int +HEAP_DISABLE_COALESCE_ON_FREE: int +IS_TEXT_UNICODE_ASCII16: int +IS_TEXT_UNICODE_REVERSE_ASCII16: int +IS_TEXT_UNICODE_STATISTICS: int +IS_TEXT_UNICODE_REVERSE_STATISTICS: int +IS_TEXT_UNICODE_CONTROLS: int +IS_TEXT_UNICODE_REVERSE_CONTROLS: int +IS_TEXT_UNICODE_SIGNATURE: int +IS_TEXT_UNICODE_REVERSE_SIGNATURE: int +IS_TEXT_UNICODE_ILLEGAL_CHARS: int +IS_TEXT_UNICODE_ODD_LENGTH: int +IS_TEXT_UNICODE_DBCS_LEADBYTE: int +IS_TEXT_UNICODE_NULL_BYTES: int +IS_TEXT_UNICODE_UNICODE_MASK: int +IS_TEXT_UNICODE_REVERSE_MASK: int +IS_TEXT_UNICODE_NOT_UNICODE_MASK: int +IS_TEXT_UNICODE_NOT_ASCII_MASK: int +COMPRESSION_FORMAT_NONE: int +COMPRESSION_FORMAT_DEFAULT: int +COMPRESSION_FORMAT_LZNT1: int +COMPRESSION_ENGINE_STANDARD: int +COMPRESSION_ENGINE_MAXIMUM: int +MESSAGE_RESOURCE_UNICODE: int +RTL_CRITSECT_TYPE: int +RTL_RESOURCE_TYPE: int +DLL_PROCESS_ATTACH: int +DLL_THREAD_ATTACH: int +DLL_THREAD_DETACH: int +DLL_PROCESS_DETACH: int +EVENTLOG_SEQUENTIAL_READ: int +EVENTLOG_SEEK_READ: int +EVENTLOG_FORWARDS_READ: int +EVENTLOG_BACKWARDS_READ: int +EVENTLOG_SUCCESS: int +EVENTLOG_ERROR_TYPE: int +EVENTLOG_WARNING_TYPE: int +EVENTLOG_INFORMATION_TYPE: int +EVENTLOG_AUDIT_SUCCESS: int +EVENTLOG_AUDIT_FAILURE: int +EVENTLOG_START_PAIRED_EVENT: int +EVENTLOG_END_PAIRED_EVENT: int +EVENTLOG_END_ALL_PAIRED_EVENTS: int +EVENTLOG_PAIRED_EVENT_ACTIVE: int +EVENTLOG_PAIRED_EVENT_INACTIVE: int +OWNER_SECURITY_INFORMATION: int +GROUP_SECURITY_INFORMATION: int +DACL_SECURITY_INFORMATION: int +SACL_SECURITY_INFORMATION: int +IMAGE_SIZEOF_FILE_HEADER: int +IMAGE_FILE_MACHINE_UNKNOWN: int +IMAGE_NUMBEROF_DIRECTORY_ENTRIES: int +IMAGE_SIZEOF_ROM_OPTIONAL_HEADER: int +IMAGE_SIZEOF_STD_OPTIONAL_HEADER: int +IMAGE_SIZEOF_NT_OPTIONAL_HEADER: int +IMAGE_NT_OPTIONAL_HDR_MAGIC: int +IMAGE_ROM_OPTIONAL_HDR_MAGIC: int +IMAGE_SIZEOF_SHORT_NAME: int +IMAGE_SIZEOF_SECTION_HEADER: int +IMAGE_SIZEOF_SYMBOL: int +IMAGE_SYM_CLASS_NULL: int +IMAGE_SYM_CLASS_AUTOMATIC: int +IMAGE_SYM_CLASS_EXTERNAL: int +IMAGE_SYM_CLASS_STATIC: int +IMAGE_SYM_CLASS_REGISTER: int +IMAGE_SYM_CLASS_EXTERNAL_DEF: int +IMAGE_SYM_CLASS_LABEL: int +IMAGE_SYM_CLASS_UNDEFINED_LABEL: int +IMAGE_SYM_CLASS_MEMBER_OF_STRUCT: int +IMAGE_SYM_CLASS_ARGUMENT: int +IMAGE_SYM_CLASS_STRUCT_TAG: int +IMAGE_SYM_CLASS_MEMBER_OF_UNION: int +IMAGE_SYM_CLASS_UNION_TAG: int +IMAGE_SYM_CLASS_TYPE_DEFINITION: int +IMAGE_SYM_CLASS_UNDEFINED_STATIC: int +IMAGE_SYM_CLASS_ENUM_TAG: int +IMAGE_SYM_CLASS_MEMBER_OF_ENUM: int +IMAGE_SYM_CLASS_REGISTER_PARAM: int +IMAGE_SYM_CLASS_BIT_FIELD: int +IMAGE_SYM_CLASS_BLOCK: int +IMAGE_SYM_CLASS_FUNCTION: int +IMAGE_SYM_CLASS_END_OF_STRUCT: int +IMAGE_SYM_CLASS_FILE: int +IMAGE_SYM_CLASS_SECTION: int +IMAGE_SYM_CLASS_WEAK_EXTERNAL: int +N_BTMASK: int +N_TMASK: int +N_TMASK1: int +N_TMASK2: int +N_BTSHFT: int +N_TSHIFT: int +IMAGE_SIZEOF_AUX_SYMBOL: int +IMAGE_COMDAT_SELECT_NODUPLICATES: int +IMAGE_COMDAT_SELECT_ANY: int +IMAGE_COMDAT_SELECT_SAME_SIZE: int +IMAGE_COMDAT_SELECT_EXACT_MATCH: int +IMAGE_COMDAT_SELECT_ASSOCIATIVE: int +IMAGE_WEAK_EXTERN_SEARCH_NOLIBRARY: int +IMAGE_WEAK_EXTERN_SEARCH_LIBRARY: int +IMAGE_WEAK_EXTERN_SEARCH_ALIAS: int +IMAGE_SIZEOF_RELOCATION: int +IMAGE_REL_I386_SECTION: int +IMAGE_REL_I386_SECREL: int +IMAGE_REL_MIPS_REFHALF: int +IMAGE_REL_MIPS_REFWORD: int +IMAGE_REL_MIPS_JMPADDR: int +IMAGE_REL_MIPS_REFHI: int +IMAGE_REL_MIPS_REFLO: int +IMAGE_REL_MIPS_GPREL: int +IMAGE_REL_MIPS_LITERAL: int +IMAGE_REL_MIPS_SECTION: int +IMAGE_REL_MIPS_SECREL: int +IMAGE_REL_MIPS_REFWORDNB: int +IMAGE_REL_MIPS_PAIR: int +IMAGE_REL_ALPHA_ABSOLUTE: int +IMAGE_REL_ALPHA_REFLONG: int +IMAGE_REL_ALPHA_REFQUAD: int +IMAGE_REL_ALPHA_GPREL32: int +IMAGE_REL_ALPHA_LITERAL: int +IMAGE_REL_ALPHA_LITUSE: int +IMAGE_REL_ALPHA_GPDISP: int +IMAGE_REL_ALPHA_BRADDR: int +IMAGE_REL_ALPHA_HINT: int +IMAGE_REL_ALPHA_INLINE_REFLONG: int +IMAGE_REL_ALPHA_REFHI: int +IMAGE_REL_ALPHA_REFLO: int +IMAGE_REL_ALPHA_PAIR: int +IMAGE_REL_ALPHA_MATCH: int +IMAGE_REL_ALPHA_SECTION: int +IMAGE_REL_ALPHA_SECREL: int +IMAGE_REL_ALPHA_REFLONGNB: int +IMAGE_SIZEOF_BASE_RELOCATION: int +IMAGE_REL_BASED_ABSOLUTE: int +IMAGE_REL_BASED_HIGH: int +IMAGE_REL_BASED_LOW: int +IMAGE_REL_BASED_HIGHLOW: int +IMAGE_REL_BASED_HIGHADJ: int +IMAGE_REL_BASED_MIPS_JMPADDR: int +IMAGE_SIZEOF_LINENUMBER: int +IMAGE_ARCHIVE_START_SIZE: int +IMAGE_ARCHIVE_START: str +IMAGE_ARCHIVE_END: str +IMAGE_ARCHIVE_PAD: str +IMAGE_ARCHIVE_LINKER_MEMBER: str +IMAGE_ARCHIVE_LONGNAMES_MEMBER: str +IMAGE_SIZEOF_ARCHIVE_MEMBER_HDR: int +IMAGE_ORDINAL_FLAG: int + +def IMAGE_SNAP_BY_ORDINAL(Ordinal: int) -> bool: ... +def IMAGE_ORDINAL(Ordinal: int) -> int: ... + +IMAGE_RESOURCE_NAME_IS_STRING: int +IMAGE_RESOURCE_DATA_IS_DIRECTORY: int +IMAGE_DEBUG_TYPE_UNKNOWN: int +IMAGE_DEBUG_TYPE_COFF: int +IMAGE_DEBUG_TYPE_CODEVIEW: int +IMAGE_DEBUG_TYPE_FPO: int +IMAGE_DEBUG_TYPE_MISC: int +IMAGE_DEBUG_TYPE_EXCEPTION: int +IMAGE_DEBUG_TYPE_FIXUP: int +IMAGE_DEBUG_TYPE_OMAP_TO_SRC: int +IMAGE_DEBUG_TYPE_OMAP_FROM_SRC: int +FRAME_FPO: int +FRAME_TRAP: int +FRAME_TSS: int +SIZEOF_RFPO_DATA: int +IMAGE_DEBUG_MISC_EXENAME: int +IMAGE_SEPARATE_DEBUG_SIGNATURE: int +NEWFRAME: int +ABORTDOC: int +NEXTBAND: int +SETCOLORTABLE: int +GETCOLORTABLE: int +FLUSHOUTPUT: int +DRAFTMODE: int +QUERYESCSUPPORT: int +SETABORTPROC: int +STARTDOC: int +ENDDOC: int +GETPHYSPAGESIZE: int +GETPRINTINGOFFSET: int +GETSCALINGFACTOR: int +MFCOMMENT: int +GETPENWIDTH: int +SETCOPYCOUNT: int +SELECTPAPERSOURCE: int +DEVICEDATA: int +PASSTHROUGH: int +GETTECHNOLGY: int +GETTECHNOLOGY: int +SETLINECAP: int +SETLINEJOIN: int +SETMITERLIMIT: int +BANDINFO: int +DRAWPATTERNRECT: int +GETVECTORPENSIZE: int +GETVECTORBRUSHSIZE: int +ENABLEDUPLEX: int +GETSETPAPERBINS: int +GETSETPRINTORIENT: int +ENUMPAPERBINS: int +SETDIBSCALING: int +EPSPRINTING: int +ENUMPAPERMETRICS: int +GETSETPAPERMETRICS: int +POSTSCRIPT_DATA: int +POSTSCRIPT_IGNORE: int +MOUSETRAILS: int +GETDEVICEUNITS: int +GETEXTENDEDTEXTMETRICS: int +GETEXTENTTABLE: int +GETPAIRKERNTABLE: int +GETTRACKKERNTABLE: int +EXTTEXTOUT: int +GETFACENAME: int +DOWNLOADFACE: int +ENABLERELATIVEWIDTHS: int +ENABLEPAIRKERNING: int +SETKERNTRACK: int +SETALLJUSTVALUES: int +SETCHARSET: int +STRETCHBLT: int +GETSETSCREENPARAMS: int +BEGIN_PATH: int +CLIP_TO_PATH: int +END_PATH: int +EXT_DEVICE_CAPS: int +RESTORE_CTM: int +SAVE_CTM: int +SET_ARC_DIRECTION: int +SET_BACKGROUND_COLOR: int +SET_POLY_MODE: int +SET_SCREEN_ANGLE: int +SET_SPREAD: int +TRANSFORM_CTM: int +SET_CLIP_BOX: int +SET_BOUNDS: int +SET_MIRROR_MODE: int +OPENCHANNEL: int +DOWNLOADHEADER: int +CLOSECHANNEL: int +POSTSCRIPT_PASSTHROUGH: int +ENCAPSULATED_POSTSCRIPT: int +SP_NOTREPORTED: int +SP_ERROR: int +SP_APPABORT: int +SP_USERABORT: int +SP_OUTOFDISK: int +SP_OUTOFMEMORY: int +PR_JOBSTATUS: int +OBJ_PEN: int +OBJ_BRUSH: int +OBJ_DC: int +OBJ_METADC: int +OBJ_PAL: int +OBJ_FONT: int +OBJ_BITMAP: int +OBJ_REGION: int +OBJ_METAFILE: int +OBJ_MEMDC: int +OBJ_EXTPEN: int +OBJ_ENHMETADC: int +OBJ_ENHMETAFILE: int +OBJ_COLORSPACE: int +MWT_IDENTITY: int +MWT_LEFTMULTIPLY: int +MWT_RIGHTMULTIPLY: int +MWT_MIN: int +MWT_MAX: int +BI_RGB: int +BI_RLE8: int +BI_RLE4: int +BI_BITFIELDS: int +TMPF_FIXED_PITCH: int +TMPF_VECTOR: int +TMPF_DEVICE: int +TMPF_TRUETYPE: int +NTM_REGULAR: int +NTM_BOLD: int +NTM_ITALIC: int +LF_FACESIZE: int +LF_FULLFACESIZE: int +OUT_DEFAULT_PRECIS: int +OUT_STRING_PRECIS: int +OUT_CHARACTER_PRECIS: int +OUT_STROKE_PRECIS: int +OUT_TT_PRECIS: int +OUT_DEVICE_PRECIS: int +OUT_RASTER_PRECIS: int +OUT_TT_ONLY_PRECIS: int +OUT_OUTLINE_PRECIS: int +CLIP_DEFAULT_PRECIS: int +CLIP_CHARACTER_PRECIS: int +CLIP_STROKE_PRECIS: int +CLIP_MASK: int +CLIP_LH_ANGLES: int +CLIP_TT_ALWAYS: int +CLIP_EMBEDDED: int +DEFAULT_QUALITY: int +DRAFT_QUALITY: int +PROOF_QUALITY: int +NONANTIALIASED_QUALITY: int +ANTIALIASED_QUALITY: int +CLEARTYPE_QUALITY: int +CLEARTYPE_NATURAL_QUALITY: int +DEFAULT_PITCH: int +FIXED_PITCH: int +VARIABLE_PITCH: int +ANSI_CHARSET: int +DEFAULT_CHARSET: int +SYMBOL_CHARSET: int +SHIFTJIS_CHARSET: int +HANGEUL_CHARSET: int +CHINESEBIG5_CHARSET: int +OEM_CHARSET: int +JOHAB_CHARSET: int +HEBREW_CHARSET: int +ARABIC_CHARSET: int +GREEK_CHARSET: int +TURKISH_CHARSET: int +VIETNAMESE_CHARSET: int +THAI_CHARSET: int +EASTEUROPE_CHARSET: int +RUSSIAN_CHARSET: int +MAC_CHARSET: int +BALTIC_CHARSET: int +FF_DONTCARE: int +FF_ROMAN: int +FF_SWISS: int +FF_MODERN: int +FF_SCRIPT: int +FF_DECORATIVE: int +FW_DONTCARE: int +FW_THIN: int +FW_EXTRALIGHT: int +FW_LIGHT: int +FW_NORMAL: int +FW_MEDIUM: int +FW_SEMIBOLD: int +FW_BOLD: int +FW_EXTRABOLD: int +FW_HEAVY: int +FW_ULTRALIGHT: int +FW_REGULAR: int +FW_DEMIBOLD: int +FW_ULTRABOLD: int +FW_BLACK: int +BS_SOLID: int +BS_NULL: int +BS_HOLLOW: int +BS_HATCHED: int +BS_PATTERN: int +BS_INDEXED: int +BS_DIBPATTERN: int +BS_DIBPATTERNPT: int +BS_PATTERN8X8: int +BS_DIBPATTERN8X8: int +HS_HORIZONTAL: int +HS_VERTICAL: int +HS_FDIAGONAL: int +HS_BDIAGONAL: int +HS_CROSS: int +HS_DIAGCROSS: int +HS_FDIAGONAL1: int +HS_BDIAGONAL1: int +HS_SOLID: int +HS_DENSE1: int +HS_DENSE2: int +HS_DENSE3: int +HS_DENSE4: int +HS_DENSE5: int +HS_DENSE6: int +HS_DENSE7: int +HS_DENSE8: int +HS_NOSHADE: int +HS_HALFTONE: int +HS_SOLIDCLR: int +HS_DITHEREDCLR: int +HS_SOLIDTEXTCLR: int +HS_DITHEREDTEXTCLR: int +HS_SOLIDBKCLR: int +HS_DITHEREDBKCLR: int +HS_API_MAX: int +PS_SOLID: int +PS_DASH: int +PS_DOT: int +PS_DASHDOT: int +PS_DASHDOTDOT: int +PS_NULL: int +PS_INSIDEFRAME: int +PS_USERSTYLE: int +PS_ALTERNATE: int +PS_STYLE_MASK: int +PS_ENDCAP_ROUND: int +PS_ENDCAP_SQUARE: int +PS_ENDCAP_FLAT: int +PS_ENDCAP_MASK: int +PS_JOIN_ROUND: int +PS_JOIN_BEVEL: int +PS_JOIN_MITER: int +PS_JOIN_MASK: int +PS_COSMETIC: int +PS_GEOMETRIC: int +PS_TYPE_MASK: int +AD_COUNTERCLOCKWISE: int +AD_CLOCKWISE: int +DRIVERVERSION: int +TECHNOLOGY: int +HORZSIZE: int +VERTSIZE: int +HORZRES: int +VERTRES: int +BITSPIXEL: int +PLANES: int +NUMBRUSHES: int +NUMPENS: int +NUMMARKERS: int +NUMFONTS: int +NUMCOLORS: int +PDEVICESIZE: int +CURVECAPS: int +LINECAPS: int +POLYGONALCAPS: int +TEXTCAPS: int +CLIPCAPS: int +RASTERCAPS: int +ASPECTX: int +ASPECTY: int +ASPECTXY: int +LOGPIXELSX: int +LOGPIXELSY: int +SIZEPALETTE: int +NUMRESERVED: int +COLORRES: int +PHYSICALWIDTH: int +PHYSICALHEIGHT: int +PHYSICALOFFSETX: int +PHYSICALOFFSETY: int +SCALINGFACTORX: int +SCALINGFACTORY: int +VREFRESH: int +DESKTOPVERTRES: int +DESKTOPHORZRES: int +BLTALIGNMENT: int +SHADEBLENDCAPS: int +COLORMGMTCAPS: int +DT_PLOTTER: int +DT_RASDISPLAY: int +DT_RASPRINTER: int +DT_RASCAMERA: int +DT_CHARSTREAM: int +DT_METAFILE: int +DT_DISPFILE: int +CC_NONE: int +CC_CIRCLES: int +CC_PIE: int +CC_CHORD: int +CC_ELLIPSES: int +CC_WIDE: int +CC_STYLED: int +CC_WIDESTYLED: int +CC_INTERIORS: int +CC_ROUNDRECT: int +LC_NONE: int +LC_POLYLINE: int +LC_MARKER: int +LC_POLYMARKER: int +LC_WIDE: int +LC_STYLED: int +LC_WIDESTYLED: int +LC_INTERIORS: int +PC_NONE: int +PC_POLYGON: int +PC_RECTANGLE: int +PC_WINDPOLYGON: int +PC_TRAPEZOID: int +PC_SCANLINE: int +PC_WIDE: int +PC_STYLED: int +PC_WIDESTYLED: int +PC_INTERIORS: int +CP_NONE: int +CP_RECTANGLE: int +CP_REGION: int +TC_OP_CHARACTER: int +TC_OP_STROKE: int +TC_CP_STROKE: int +TC_CR_90: int +TC_CR_ANY: int +TC_SF_X_YINDEP: int +TC_SA_DOUBLE: int +TC_SA_INTEGER: int +TC_SA_CONTIN: int +TC_EA_DOUBLE: int +TC_IA_ABLE: int +TC_UA_ABLE: int +TC_SO_ABLE: int +TC_RA_ABLE: int +TC_VA_ABLE: int +TC_RESERVED: int +TC_SCROLLBLT: int +RC_BITBLT: int +RC_BANDING: int +RC_SCALING: int +RC_BITMAP64: int +RC_GDI20_OUTPUT: int +RC_GDI20_STATE: int +RC_SAVEBITMAP: int +RC_DI_BITMAP: int +RC_PALETTE: int +RC_DIBTODEV: int +RC_BIGFONT: int +RC_STRETCHBLT: int +RC_FLOODFILL: int +RC_STRETCHDIB: int +RC_OP_DX_OUTPUT: int +RC_DEVBITS: int +DIB_RGB_COLORS: int +DIB_PAL_COLORS: int +DIB_PAL_INDICES: int +DIB_PAL_PHYSINDICES: int +DIB_PAL_LOGINDICES: int +SYSPAL_ERROR: int +SYSPAL_STATIC: int +SYSPAL_NOSTATIC: int +CBM_CREATEDIB: int +CBM_INIT: int +FLOODFILLBORDER: int +FLOODFILLSURFACE: int +CCHDEVICENAME: int +CCHFORMNAME: int +DM_SPECVERSION: int +DM_ORIENTATION: int +DM_PAPERSIZE: int +DM_PAPERLENGTH: int +DM_PAPERWIDTH: int +DM_SCALE: int +DM_POSITION: int +DM_NUP: int +DM_DISPLAYORIENTATION: int +DM_COPIES: int +DM_DEFAULTSOURCE: int +DM_PRINTQUALITY: int +DM_COLOR: int +DM_DUPLEX: int +DM_YRESOLUTION: int +DM_TTOPTION: int +DM_COLLATE: int +DM_FORMNAME: int +DM_LOGPIXELS: int +DM_BITSPERPEL: int +DM_PELSWIDTH: int +DM_PELSHEIGHT: int +DM_DISPLAYFLAGS: int +DM_DISPLAYFREQUENCY: int +DM_ICMMETHOD: int +DM_ICMINTENT: int +DM_MEDIATYPE: int +DM_DITHERTYPE: int +DM_PANNINGWIDTH: int +DM_PANNINGHEIGHT: int +DM_DISPLAYFIXEDOUTPUT: int +DMORIENT_PORTRAIT: int +DMORIENT_LANDSCAPE: int +DMDO_DEFAULT: int +DMDO_90: int +DMDO_180: int +DMDO_270: int +DMDFO_DEFAULT: int +DMDFO_STRETCH: int +DMDFO_CENTER: int +DMPAPER_LETTER: int +DMPAPER_LETTERSMALL: int +DMPAPER_TABLOID: int +DMPAPER_LEDGER: int +DMPAPER_LEGAL: int +DMPAPER_STATEMENT: int +DMPAPER_EXECUTIVE: int +DMPAPER_A3: int +DMPAPER_A4: int +DMPAPER_A4SMALL: int +DMPAPER_A5: int +DMPAPER_B4: int +DMPAPER_B5: int +DMPAPER_FOLIO: int +DMPAPER_QUARTO: int +DMPAPER_10X14: int +DMPAPER_11X17: int +DMPAPER_NOTE: int +DMPAPER_ENV_9: int +DMPAPER_ENV_10: int +DMPAPER_ENV_11: int +DMPAPER_ENV_12: int +DMPAPER_ENV_14: int +DMPAPER_CSHEET: int +DMPAPER_DSHEET: int +DMPAPER_ESHEET: int +DMPAPER_ENV_DL: int +DMPAPER_ENV_C5: int +DMPAPER_ENV_C3: int +DMPAPER_ENV_C4: int +DMPAPER_ENV_C6: int +DMPAPER_ENV_C65: int +DMPAPER_ENV_B4: int +DMPAPER_ENV_B5: int +DMPAPER_ENV_B6: int +DMPAPER_ENV_ITALY: int +DMPAPER_ENV_MONARCH: int +DMPAPER_ENV_PERSONAL: int +DMPAPER_FANFOLD_US: int +DMPAPER_FANFOLD_STD_GERMAN: int +DMPAPER_FANFOLD_LGL_GERMAN: int +DMPAPER_ISO_B4: int +DMPAPER_JAPANESE_POSTCARD: int +DMPAPER_9X11: int +DMPAPER_10X11: int +DMPAPER_15X11: int +DMPAPER_ENV_INVITE: int +DMPAPER_RESERVED_48: int +DMPAPER_RESERVED_49: int +DMPAPER_LETTER_EXTRA: int +DMPAPER_LEGAL_EXTRA: int +DMPAPER_TABLOID_EXTRA: int +DMPAPER_A4_EXTRA: int +DMPAPER_LETTER_TRANSVERSE: int +DMPAPER_A4_TRANSVERSE: int +DMPAPER_LETTER_EXTRA_TRANSVERSE: int +DMPAPER_A_PLUS: int +DMPAPER_B_PLUS: int +DMPAPER_LETTER_PLUS: int +DMPAPER_A4_PLUS: int +DMPAPER_A5_TRANSVERSE: int +DMPAPER_B5_TRANSVERSE: int +DMPAPER_A3_EXTRA: int +DMPAPER_A5_EXTRA: int +DMPAPER_B5_EXTRA: int +DMPAPER_A2: int +DMPAPER_A3_TRANSVERSE: int +DMPAPER_A3_EXTRA_TRANSVERSE: int +DMPAPER_DBL_JAPANESE_POSTCARD: int +DMPAPER_A6: int +DMPAPER_JENV_KAKU2: int +DMPAPER_JENV_KAKU3: int +DMPAPER_JENV_CHOU3: int +DMPAPER_JENV_CHOU4: int +DMPAPER_LETTER_ROTATED: int +DMPAPER_A3_ROTATED: int +DMPAPER_A4_ROTATED: int +DMPAPER_A5_ROTATED: int +DMPAPER_B4_JIS_ROTATED: int +DMPAPER_B5_JIS_ROTATED: int +DMPAPER_JAPANESE_POSTCARD_ROTATED: int +DMPAPER_DBL_JAPANESE_POSTCARD_ROTATED: int +DMPAPER_A6_ROTATED: int +DMPAPER_JENV_KAKU2_ROTATED: int +DMPAPER_JENV_KAKU3_ROTATED: int +DMPAPER_JENV_CHOU3_ROTATED: int +DMPAPER_JENV_CHOU4_ROTATED: int +DMPAPER_B6_JIS: int +DMPAPER_B6_JIS_ROTATED: int +DMPAPER_12X11: int +DMPAPER_JENV_YOU4: int +DMPAPER_JENV_YOU4_ROTATED: int +DMPAPER_P16K: int +DMPAPER_P32K: int +DMPAPER_P32KBIG: int +DMPAPER_PENV_1: int +DMPAPER_PENV_2: int +DMPAPER_PENV_3: int +DMPAPER_PENV_4: int +DMPAPER_PENV_5: int +DMPAPER_PENV_6: int +DMPAPER_PENV_7: int +DMPAPER_PENV_8: int +DMPAPER_PENV_9: int +DMPAPER_PENV_10: int +DMPAPER_P16K_ROTATED: int +DMPAPER_P32K_ROTATED: int +DMPAPER_P32KBIG_ROTATED: int +DMPAPER_PENV_1_ROTATED: int +DMPAPER_PENV_2_ROTATED: int +DMPAPER_PENV_3_ROTATED: int +DMPAPER_PENV_4_ROTATED: int +DMPAPER_PENV_5_ROTATED: int +DMPAPER_PENV_6_ROTATED: int +DMPAPER_PENV_7_ROTATED: int +DMPAPER_PENV_8_ROTATED: int +DMPAPER_PENV_9_ROTATED: int +DMPAPER_PENV_10_ROTATED: int +DMPAPER_LAST: int +DMPAPER_USER: int +DMBIN_UPPER: int +DMBIN_ONLYONE: int +DMBIN_LOWER: int +DMBIN_MIDDLE: int +DMBIN_MANUAL: int +DMBIN_ENVELOPE: int +DMBIN_ENVMANUAL: int +DMBIN_AUTO: int +DMBIN_TRACTOR: int +DMBIN_SMALLFMT: int +DMBIN_LARGEFMT: int +DMBIN_LARGECAPACITY: int +DMBIN_CASSETTE: int +DMBIN_FORMSOURCE: int +DMBIN_LAST: int +DMBIN_USER: int +DMRES_DRAFT: int +DMRES_LOW: int +DMRES_MEDIUM: int +DMRES_HIGH: int +DMCOLOR_MONOCHROME: int +DMCOLOR_COLOR: int +DMDUP_SIMPLEX: int +DMDUP_VERTICAL: int +DMDUP_HORIZONTAL: int +DMTT_BITMAP: int +DMTT_DOWNLOAD: int +DMTT_SUBDEV: int +DMTT_DOWNLOAD_OUTLINE: int +DMCOLLATE_FALSE: int +DMCOLLATE_TRUE: int +DM_GRAYSCALE: int +DM_INTERLACED: int +DMICMMETHOD_NONE: int +DMICMMETHOD_SYSTEM: int +DMICMMETHOD_DRIVER: int +DMICMMETHOD_DEVICE: int +DMICMMETHOD_USER: int +DMICM_SATURATE: int +DMICM_CONTRAST: int +DMICM_COLORIMETRIC: int +DMICM_ABS_COLORIMETRIC: int +DMICM_USER: int +DMMEDIA_STANDARD: int +DMMEDIA_TRANSPARENCY: int +DMMEDIA_GLOSSY: int +DMMEDIA_USER: int +DMDITHER_NONE: int +DMDITHER_COARSE: int +DMDITHER_FINE: int +DMDITHER_LINEART: int +DMDITHER_ERRORDIFFUSION: int +DMDITHER_RESERVED6: int +DMDITHER_RESERVED7: int +DMDITHER_RESERVED8: int +DMDITHER_RESERVED9: int +DMDITHER_GRAYSCALE: int +DMDITHER_USER: int +DMNUP_SYSTEM: int +DMNUP_ONEUP: int +FEATURESETTING_NUP: int +FEATURESETTING_OUTPUT: int +FEATURESETTING_PSLEVEL: int +FEATURESETTING_CUSTPAPER: int +FEATURESETTING_MIRROR: int +FEATURESETTING_NEGATIVE: int +FEATURESETTING_PROTOCOL: int +FEATURESETTING_PRIVATE_BEGIN: int +FEATURESETTING_PRIVATE_END: int +RDH_RECTANGLES: int +GGO_METRICS: int +GGO_BITMAP: int +GGO_NATIVE: int +TT_POLYGON_TYPE: int +TT_PRIM_LINE: int +TT_PRIM_QSPLINE: int +TT_AVAILABLE: int +TT_ENABLED: int +DM_UPDATE: int +DM_COPY: int +DM_PROMPT: int +DM_MODIFY: int +DM_IN_BUFFER: int +DM_IN_PROMPT: int +DM_OUT_BUFFER: int +DM_OUT_DEFAULT: int +DISPLAY_DEVICE_ATTACHED_TO_DESKTOP: int +DISPLAY_DEVICE_MULTI_DRIVER: int +DISPLAY_DEVICE_PRIMARY_DEVICE: int +DISPLAY_DEVICE_MIRRORING_DRIVER: int +DISPLAY_DEVICE_VGA_COMPATIBLE: int +DISPLAY_DEVICE_REMOVABLE: int +DISPLAY_DEVICE_MODESPRUNED: int +DISPLAY_DEVICE_REMOTE: int +DISPLAY_DEVICE_DISCONNECT: int +DC_FIELDS: int +DC_PAPERS: int +DC_PAPERSIZE: int +DC_MINEXTENT: int +DC_MAXEXTENT: int +DC_BINS: int +DC_DUPLEX: int +DC_SIZE: int +DC_EXTRA: int +DC_VERSION: int +DC_DRIVER: int +DC_BINNAMES: int +DC_ENUMRESOLUTIONS: int +DC_FILEDEPENDENCIES: int +DC_TRUETYPE: int +DC_PAPERNAMES: int +DC_ORIENTATION: int +DC_COPIES: int +DC_BINADJUST: int +DC_EMF_COMPLIANT: int +DC_DATATYPE_PRODUCED: int +DC_COLLATE: int +DC_MANUFACTURER: int +DC_MODEL: int +DC_PERSONALITY: int +DC_PRINTRATE: int +DC_PRINTRATEUNIT: int +DC_PRINTERMEM: int +DC_MEDIAREADY: int +DC_STAPLE: int +DC_PRINTRATEPPM: int +DC_COLORDEVICE: int +DC_NUP: int +DC_MEDIATYPENAMES: int +DC_MEDIATYPES: int +PRINTRATEUNIT_PPM: int +PRINTRATEUNIT_CPS: int +PRINTRATEUNIT_LPM: int +PRINTRATEUNIT_IPM: int +DCTT_BITMAP: int +DCTT_DOWNLOAD: int +DCTT_SUBDEV: int +DCTT_DOWNLOAD_OUTLINE: int +DCBA_FACEUPNONE: int +DCBA_FACEUPCENTER: int +DCBA_FACEUPLEFT: int +DCBA_FACEUPRIGHT: int +DCBA_FACEDOWNNONE: int +DCBA_FACEDOWNCENTER: int +DCBA_FACEDOWNLEFT: int +DCBA_FACEDOWNRIGHT: int +CA_NEGATIVE: int +CA_LOG_FILTER: int +ILLUMINANT_DEVICE_DEFAULT: int +ILLUMINANT_A: int +ILLUMINANT_B: int +ILLUMINANT_C: int +ILLUMINANT_D50: int +ILLUMINANT_D55: int +ILLUMINANT_D65: int +ILLUMINANT_D75: int +ILLUMINANT_F2: int +ILLUMINANT_MAX_INDEX: int +ILLUMINANT_TUNGSTEN: int +ILLUMINANT_DAYLIGHT: int +ILLUMINANT_FLUORESCENT: int +ILLUMINANT_NTSC: int +FONTMAPPER_MAX: int +ENHMETA_SIGNATURE: int +ENHMETA_STOCK_OBJECT: int +EMR_HEADER: int +EMR_POLYBEZIER: int +EMR_POLYGON: int +EMR_POLYLINE: int +EMR_POLYBEZIERTO: int +EMR_POLYLINETO: int +EMR_POLYPOLYLINE: int +EMR_POLYPOLYGON: int +EMR_SETWINDOWEXTEX: int +EMR_SETWINDOWORGEX: int +EMR_SETVIEWPORTEXTEX: int +EMR_SETVIEWPORTORGEX: int +EMR_SETBRUSHORGEX: int +EMR_EOF: int +EMR_SETPIXELV: int +EMR_SETMAPPERFLAGS: int +EMR_SETMAPMODE: int +EMR_SETBKMODE: int +EMR_SETPOLYFILLMODE: int +EMR_SETROP2: int +EMR_SETSTRETCHBLTMODE: int +EMR_SETTEXTALIGN: int +EMR_SETCOLORADJUSTMENT: int +EMR_SETTEXTCOLOR: int +EMR_SETBKCOLOR: int +EMR_OFFSETCLIPRGN: int +EMR_MOVETOEX: int +EMR_SETMETARGN: int +EMR_EXCLUDECLIPRECT: int +EMR_INTERSECTCLIPRECT: int +EMR_SCALEVIEWPORTEXTEX: int +EMR_SCALEWINDOWEXTEX: int +EMR_SAVEDC: int +EMR_RESTOREDC: int +EMR_SETWORLDTRANSFORM: int +EMR_MODIFYWORLDTRANSFORM: int +EMR_SELECTOBJECT: int +EMR_CREATEPEN: int +EMR_CREATEBRUSHINDIRECT: int +EMR_DELETEOBJECT: int +EMR_ANGLEARC: int +EMR_ELLIPSE: int +EMR_RECTANGLE: int +EMR_ROUNDRECT: int +EMR_ARC: int +EMR_CHORD: int +EMR_PIE: int +EMR_SELECTPALETTE: int +EMR_CREATEPALETTE: int +EMR_SETPALETTEENTRIES: int +EMR_RESIZEPALETTE: int +EMR_REALIZEPALETTE: int +EMR_EXTFLOODFILL: int +EMR_LINETO: int +EMR_ARCTO: int +EMR_POLYDRAW: int +EMR_SETARCDIRECTION: int +EMR_SETMITERLIMIT: int +EMR_BEGINPATH: int +EMR_ENDPATH: int +EMR_CLOSEFIGURE: int +EMR_FILLPATH: int +EMR_STROKEANDFILLPATH: int +EMR_STROKEPATH: int +EMR_FLATTENPATH: int +EMR_WIDENPATH: int +EMR_SELECTCLIPPATH: int +EMR_ABORTPATH: int +EMR_GDICOMMENT: int +EMR_FILLRGN: int +EMR_FRAMERGN: int +EMR_INVERTRGN: int +EMR_PAINTRGN: int +EMR_EXTSELECTCLIPRGN: int +EMR_BITBLT: int +EMR_STRETCHBLT: int +EMR_MASKBLT: int +EMR_PLGBLT: int +EMR_SETDIBITSTODEVICE: int +EMR_STRETCHDIBITS: int +EMR_EXTCREATEFONTINDIRECTW: int +EMR_EXTTEXTOUTA: int +EMR_EXTTEXTOUTW: int +EMR_POLYBEZIER16: int +EMR_POLYGON16: int +EMR_POLYLINE16: int +EMR_POLYBEZIERTO16: int +EMR_POLYLINETO16: int +EMR_POLYPOLYLINE16: int +EMR_POLYPOLYGON16: int +EMR_POLYDRAW16: int +EMR_CREATEMONOBRUSH: int +EMR_CREATEDIBPATTERNBRUSHPT: int +EMR_EXTCREATEPEN: int +EMR_POLYTEXTOUTA: int +EMR_POLYTEXTOUTW: int +EMR_MIN: int +EMR_MAX: int +PANOSE_COUNT: int +PAN_FAMILYTYPE_INDEX: int +PAN_SERIFSTYLE_INDEX: int +PAN_WEIGHT_INDEX: int +PAN_PROPORTION_INDEX: int +PAN_CONTRAST_INDEX: int +PAN_STROKEVARIATION_INDEX: int +PAN_ARMSTYLE_INDEX: int +PAN_LETTERFORM_INDEX: int +PAN_MIDLINE_INDEX: int +PAN_XHEIGHT_INDEX: int +PAN_CULTURE_LATIN: int +PAN_ANY: int +PAN_NO_FIT: int +PAN_FAMILY_TEXT_DISPLAY: int +PAN_FAMILY_SCRIPT: int +PAN_FAMILY_DECORATIVE: int +PAN_FAMILY_PICTORIAL: int +PAN_SERIF_COVE: int +PAN_SERIF_OBTUSE_COVE: int +PAN_SERIF_SQUARE_COVE: int +PAN_SERIF_OBTUSE_SQUARE_COVE: int +PAN_SERIF_SQUARE: int +PAN_SERIF_THIN: int +PAN_SERIF_BONE: int +PAN_SERIF_EXAGGERATED: int +PAN_SERIF_TRIANGLE: int +PAN_SERIF_NORMAL_SANS: int +PAN_SERIF_OBTUSE_SANS: int +PAN_SERIF_PERP_SANS: int +PAN_SERIF_FLARED: int +PAN_SERIF_ROUNDED: int +PAN_WEIGHT_VERY_LIGHT: int +PAN_WEIGHT_LIGHT: int +PAN_WEIGHT_THIN: int +PAN_WEIGHT_BOOK: int +PAN_WEIGHT_MEDIUM: int +PAN_WEIGHT_DEMI: int +PAN_WEIGHT_BOLD: int +PAN_WEIGHT_HEAVY: int +PAN_WEIGHT_BLACK: int +PAN_WEIGHT_NORD: int +PAN_PROP_OLD_STYLE: int +PAN_PROP_MODERN: int +PAN_PROP_EVEN_WIDTH: int +PAN_PROP_EXPANDED: int +PAN_PROP_CONDENSED: int +PAN_PROP_VERY_EXPANDED: int +PAN_PROP_VERY_CONDENSED: int +PAN_PROP_MONOSPACED: int +PAN_CONTRAST_NONE: int +PAN_CONTRAST_VERY_LOW: int +PAN_CONTRAST_LOW: int +PAN_CONTRAST_MEDIUM_LOW: int +PAN_CONTRAST_MEDIUM: int +PAN_CONTRAST_MEDIUM_HIGH: int +PAN_CONTRAST_HIGH: int +PAN_CONTRAST_VERY_HIGH: int +PAN_STROKE_GRADUAL_DIAG: int +PAN_STROKE_GRADUAL_TRAN: int +PAN_STROKE_GRADUAL_VERT: int +PAN_STROKE_GRADUAL_HORZ: int +PAN_STROKE_RAPID_VERT: int +PAN_STROKE_RAPID_HORZ: int +PAN_STROKE_INSTANT_VERT: int +PAN_STRAIGHT_ARMS_HORZ: int +PAN_STRAIGHT_ARMS_WEDGE: int +PAN_STRAIGHT_ARMS_VERT: int +PAN_STRAIGHT_ARMS_SINGLE_SERIF: int +PAN_STRAIGHT_ARMS_DOUBLE_SERIF: int +PAN_BENT_ARMS_HORZ: int +PAN_BENT_ARMS_WEDGE: int +PAN_BENT_ARMS_VERT: int +PAN_BENT_ARMS_SINGLE_SERIF: int +PAN_BENT_ARMS_DOUBLE_SERIF: int +PAN_LETT_NORMAL_CONTACT: int +PAN_LETT_NORMAL_WEIGHTED: int +PAN_LETT_NORMAL_BOXED: int +PAN_LETT_NORMAL_FLATTENED: int +PAN_LETT_NORMAL_ROUNDED: int +PAN_LETT_NORMAL_OFF_CENTER: int +PAN_LETT_NORMAL_SQUARE: int +PAN_LETT_OBLIQUE_CONTACT: int +PAN_LETT_OBLIQUE_WEIGHTED: int +PAN_LETT_OBLIQUE_BOXED: int +PAN_LETT_OBLIQUE_FLATTENED: int +PAN_LETT_OBLIQUE_ROUNDED: int +PAN_LETT_OBLIQUE_OFF_CENTER: int +PAN_LETT_OBLIQUE_SQUARE: int +PAN_MIDLINE_STANDARD_TRIMMED: int +PAN_MIDLINE_STANDARD_POINTED: int +PAN_MIDLINE_STANDARD_SERIFED: int +PAN_MIDLINE_HIGH_TRIMMED: int +PAN_MIDLINE_HIGH_POINTED: int +PAN_MIDLINE_HIGH_SERIFED: int +PAN_MIDLINE_CONSTANT_TRIMMED: int +PAN_MIDLINE_CONSTANT_POINTED: int +PAN_MIDLINE_CONSTANT_SERIFED: int +PAN_MIDLINE_LOW_TRIMMED: int +PAN_MIDLINE_LOW_POINTED: int +PAN_MIDLINE_LOW_SERIFED: int +PAN_XHEIGHT_CONSTANT_SMALL: int +PAN_XHEIGHT_CONSTANT_STD: int +PAN_XHEIGHT_CONSTANT_LARGE: int +PAN_XHEIGHT_DUCKING_SMALL: int +PAN_XHEIGHT_DUCKING_STD: int +PAN_XHEIGHT_DUCKING_LARGE: int +ELF_VENDOR_SIZE: int +ELF_VERSION: int +ELF_CULTURE_LATIN: int +RASTER_FONTTYPE: int +DEVICE_FONTTYPE: int +TRUETYPE_FONTTYPE: int + +def PALETTEINDEX(i: int) -> int: ... + +PC_RESERVED: int +PC_EXPLICIT: int +PC_NOCOLLAPSE: int + +def GetRValue(rgb: int) -> int: ... +def GetGValue(rgb: int) -> int: ... +def GetBValue(rgb: int) -> int: ... + +TRANSPARENT: int +OPAQUE: int +BKMODE_LAST: int +GM_COMPATIBLE: int +GM_ADVANCED: int +GM_LAST: int +PT_CLOSEFIGURE: int +PT_LINETO: int +PT_BEZIERTO: int +PT_MOVETO: int +MM_TEXT: int +MM_LOMETRIC: int +MM_HIMETRIC: int +MM_LOENGLISH: int +MM_HIENGLISH: int +MM_TWIPS: int +MM_ISOTROPIC: int +MM_ANISOTROPIC: int +MM_MIN: int +MM_MAX: int +MM_MAX_FIXEDSCALE: int +ABSOLUTE: int +RELATIVE: int +WHITE_BRUSH: int +LTGRAY_BRUSH: int +GRAY_BRUSH: int +DKGRAY_BRUSH: int +BLACK_BRUSH: int +NULL_BRUSH: int +HOLLOW_BRUSH: int +WHITE_PEN: int +BLACK_PEN: int +NULL_PEN: int +OEM_FIXED_FONT: int +ANSI_FIXED_FONT: int +ANSI_VAR_FONT: int +SYSTEM_FONT: int +DEVICE_DEFAULT_FONT: int +DEFAULT_PALETTE: int +SYSTEM_FIXED_FONT: int +STOCK_LAST: int +CLR_INVALID: int +DC_BRUSH: int +DC_PEN: int +STATUS_WAIT_0: int +STATUS_ABANDONED_WAIT_0: int +STATUS_USER_APC: int +STATUS_TIMEOUT: int +STATUS_PENDING: int +STATUS_SEGMENT_NOTIFICATION: int +STATUS_GUARD_PAGE_VIOLATION: int +STATUS_DATATYPE_MISALIGNMENT: int +STATUS_BREAKPOINT: int +STATUS_SINGLE_STEP: int +STATUS_ACCESS_VIOLATION: int +STATUS_IN_PAGE_ERROR: int +STATUS_INVALID_HANDLE: int +STATUS_NO_MEMORY: int +STATUS_ILLEGAL_INSTRUCTION: int +STATUS_NONCONTINUABLE_EXCEPTION: int +STATUS_INVALID_DISPOSITION: int +STATUS_ARRAY_BOUNDS_EXCEEDED: int +STATUS_FLOAT_DENORMAL_OPERAND: int +STATUS_FLOAT_DIVIDE_BY_ZERO: int +STATUS_FLOAT_INEXACT_RESULT: int +STATUS_FLOAT_INVALID_OPERATION: int +STATUS_FLOAT_OVERFLOW: int +STATUS_FLOAT_STACK_CHECK: int +STATUS_FLOAT_UNDERFLOW: int +STATUS_INTEGER_DIVIDE_BY_ZERO: int +STATUS_INTEGER_OVERFLOW: int +STATUS_PRIVILEGED_INSTRUCTION: int +STATUS_STACK_OVERFLOW: int +STATUS_CONTROL_C_EXIT: int +WAIT_FAILED: int +WAIT_OBJECT_0: int +WAIT_ABANDONED: int +WAIT_ABANDONED_0: int +WAIT_TIMEOUT: int +WAIT_IO_COMPLETION: int +STILL_ACTIVE: int +EXCEPTION_ACCESS_VIOLATION: int +EXCEPTION_DATATYPE_MISALIGNMENT: int +EXCEPTION_BREAKPOINT: int +EXCEPTION_SINGLE_STEP: int +EXCEPTION_ARRAY_BOUNDS_EXCEEDED: int +EXCEPTION_FLT_DENORMAL_OPERAND: int +EXCEPTION_FLT_DIVIDE_BY_ZERO: int +EXCEPTION_FLT_INEXACT_RESULT: int +EXCEPTION_FLT_INVALID_OPERATION: int +EXCEPTION_FLT_OVERFLOW: int +EXCEPTION_FLT_STACK_CHECK: int +EXCEPTION_FLT_UNDERFLOW: int +EXCEPTION_INT_DIVIDE_BY_ZERO: int +EXCEPTION_INT_OVERFLOW: int +EXCEPTION_PRIV_INSTRUCTION: int +EXCEPTION_IN_PAGE_ERROR: int +EXCEPTION_ILLEGAL_INSTRUCTION: int +EXCEPTION_NONCONTINUABLE_EXCEPTION: int +EXCEPTION_STACK_OVERFLOW: int +EXCEPTION_INVALID_DISPOSITION: int +EXCEPTION_GUARD_PAGE: int +EXCEPTION_INVALID_HANDLE: int +CONTROL_C_EXIT: int +SPI_GETBEEP: int +SPI_SETBEEP: int +SPI_GETMOUSE: int +SPI_SETMOUSE: int +SPI_GETBORDER: int +SPI_SETBORDER: int +SPI_GETKEYBOARDSPEED: int +SPI_SETKEYBOARDSPEED: int +SPI_LANGDRIVER: int +SPI_ICONHORIZONTALSPACING: int +SPI_GETSCREENSAVETIMEOUT: int +SPI_SETSCREENSAVETIMEOUT: int +SPI_GETSCREENSAVEACTIVE: int +SPI_SETSCREENSAVEACTIVE: int +SPI_GETGRIDGRANULARITY: int +SPI_SETGRIDGRANULARITY: int +SPI_SETDESKWALLPAPER: int +SPI_SETDESKPATTERN: int +SPI_GETKEYBOARDDELAY: int +SPI_SETKEYBOARDDELAY: int +SPI_ICONVERTICALSPACING: int +SPI_GETICONTITLEWRAP: int +SPI_SETICONTITLEWRAP: int +SPI_GETMENUDROPALIGNMENT: int +SPI_SETMENUDROPALIGNMENT: int +SPI_SETDOUBLECLKWIDTH: int +SPI_SETDOUBLECLKHEIGHT: int +SPI_GETICONTITLELOGFONT: int +SPI_SETDOUBLECLICKTIME: int +SPI_SETMOUSEBUTTONSWAP: int +SPI_SETICONTITLELOGFONT: int +SPI_GETFASTTASKSWITCH: int +SPI_SETFASTTASKSWITCH: int +SPI_SETDRAGFULLWINDOWS: int +SPI_GETDRAGFULLWINDOWS: int +SPI_GETNONCLIENTMETRICS: int +SPI_SETNONCLIENTMETRICS: int +SPI_GETMINIMIZEDMETRICS: int +SPI_SETMINIMIZEDMETRICS: int +SPI_GETICONMETRICS: int +SPI_SETICONMETRICS: int +SPI_SETWORKAREA: int +SPI_GETWORKAREA: int +SPI_SETPENWINDOWS: int +SPI_GETFILTERKEYS: int +SPI_SETFILTERKEYS: int +SPI_GETTOGGLEKEYS: int +SPI_SETTOGGLEKEYS: int +SPI_GETMOUSEKEYS: int +SPI_SETMOUSEKEYS: int +SPI_GETSHOWSOUNDS: int +SPI_SETSHOWSOUNDS: int +SPI_GETSTICKYKEYS: int +SPI_SETSTICKYKEYS: int +SPI_GETACCESSTIMEOUT: int +SPI_SETACCESSTIMEOUT: int +SPI_GETSERIALKEYS: int +SPI_SETSERIALKEYS: int +SPI_GETSOUNDSENTRY: int +SPI_SETSOUNDSENTRY: int +SPI_GETHIGHCONTRAST: int +SPI_SETHIGHCONTRAST: int +SPI_GETKEYBOARDPREF: int +SPI_SETKEYBOARDPREF: int +SPI_GETSCREENREADER: int +SPI_SETSCREENREADER: int +SPI_GETANIMATION: int +SPI_SETANIMATION: int +SPI_GETFONTSMOOTHING: int +SPI_SETFONTSMOOTHING: int +SPI_SETDRAGWIDTH: int +SPI_SETDRAGHEIGHT: int +SPI_SETHANDHELD: int +SPI_GETLOWPOWERTIMEOUT: int +SPI_GETPOWEROFFTIMEOUT: int +SPI_SETLOWPOWERTIMEOUT: int +SPI_SETPOWEROFFTIMEOUT: int +SPI_GETLOWPOWERACTIVE: int +SPI_GETPOWEROFFACTIVE: int +SPI_SETLOWPOWERACTIVE: int +SPI_SETPOWEROFFACTIVE: int +SPI_SETCURSORS: int +SPI_SETICONS: int +SPI_GETDEFAULTINPUTLANG: int +SPI_SETDEFAULTINPUTLANG: int +SPI_SETLANGTOGGLE: int +SPI_GETWINDOWSEXTENSION: int +SPI_SETMOUSETRAILS: int +SPI_GETMOUSETRAILS: int +SPI_GETSNAPTODEFBUTTON: int +SPI_SETSNAPTODEFBUTTON: int +SPI_SETSCREENSAVERRUNNING: int +SPI_SCREENSAVERRUNNING: int +SPI_GETMOUSEHOVERWIDTH: int +SPI_SETMOUSEHOVERWIDTH: int +SPI_GETMOUSEHOVERHEIGHT: int +SPI_SETMOUSEHOVERHEIGHT: int +SPI_GETMOUSEHOVERTIME: int +SPI_SETMOUSEHOVERTIME: int +SPI_GETWHEELSCROLLLINES: int +SPI_SETWHEELSCROLLLINES: int +SPI_GETMENUSHOWDELAY: int +SPI_SETMENUSHOWDELAY: int +SPI_GETSHOWIMEUI: int +SPI_SETSHOWIMEUI: int +SPI_GETMOUSESPEED: int +SPI_SETMOUSESPEED: int +SPI_GETSCREENSAVERRUNNING: int +SPI_GETDESKWALLPAPER: int +SPI_GETACTIVEWINDOWTRACKING: int +SPI_SETACTIVEWINDOWTRACKING: int +SPI_GETMENUANIMATION: int +SPI_SETMENUANIMATION: int +SPI_GETCOMBOBOXANIMATION: int +SPI_SETCOMBOBOXANIMATION: int +SPI_GETGRADIENTCAPTIONS: int +SPI_SETGRADIENTCAPTIONS: int +SPI_GETKEYBOARDCUES: int +SPI_SETKEYBOARDCUES: int +SPI_GETMENUUNDERLINES: int +SPI_SETMENUUNDERLINES: int +SPI_GETACTIVEWNDTRKZORDER: int +SPI_SETACTIVEWNDTRKZORDER: int +SPI_GETHOTTRACKING: int +SPI_SETHOTTRACKING: int +SPI_GETMENUFADE: int +SPI_SETMENUFADE: int +SPI_GETSELECTIONFADE: int +SPI_SETSELECTIONFADE: int +SPI_GETTOOLTIPANIMATION: int +SPI_SETTOOLTIPANIMATION: int +SPI_GETTOOLTIPFADE: int +SPI_SETTOOLTIPFADE: int +SPI_GETCURSORSHADOW: int +SPI_SETCURSORSHADOW: int +SPI_GETMOUSESONAR: int +SPI_SETMOUSESONAR: int +SPI_GETMOUSECLICKLOCK: int +SPI_SETMOUSECLICKLOCK: int +SPI_GETMOUSEVANISH: int +SPI_SETMOUSEVANISH: int +SPI_GETFLATMENU: int +SPI_SETFLATMENU: int +SPI_GETDROPSHADOW: int +SPI_SETDROPSHADOW: int +SPI_GETBLOCKSENDINPUTRESETS: int +SPI_SETBLOCKSENDINPUTRESETS: int +SPI_GETUIEFFECTS: int +SPI_SETUIEFFECTS: int +SPI_GETFOREGROUNDLOCKTIMEOUT: int +SPI_SETFOREGROUNDLOCKTIMEOUT: int +SPI_GETACTIVEWNDTRKTIMEOUT: int +SPI_SETACTIVEWNDTRKTIMEOUT: int +SPI_GETFOREGROUNDFLASHCOUNT: int +SPI_SETFOREGROUNDFLASHCOUNT: int +SPI_GETCARETWIDTH: int +SPI_SETCARETWIDTH: int +SPI_GETMOUSECLICKLOCKTIME: int +SPI_SETMOUSECLICKLOCKTIME: int +SPI_GETFONTSMOOTHINGTYPE: int +SPI_SETFONTSMOOTHINGTYPE: int +SPI_GETFONTSMOOTHINGCONTRAST: int +SPI_SETFONTSMOOTHINGCONTRAST: int +SPI_GETFOCUSBORDERWIDTH: int +SPI_SETFOCUSBORDERWIDTH: int +SPI_GETFOCUSBORDERHEIGHT: int +SPI_SETFOCUSBORDERHEIGHT: int +SPI_GETFONTSMOOTHINGORIENTATION: int +SPI_SETFONTSMOOTHINGORIENTATION: int +SPIF_UPDATEINIFILE: int +SPIF_SENDWININICHANGE: int +SPIF_SENDCHANGE: int +FE_FONTSMOOTHINGSTANDARD: int +FE_FONTSMOOTHINGCLEARTYPE: int +FE_FONTSMOOTHINGDOCKING: int +METRICS_USEDEFAULT: int +ARW_BOTTOMLEFT: int +ARW_BOTTOMRIGHT: int +ARW_TOPLEFT: int +ARW_TOPRIGHT: int +ARW_STARTMASK: int +ARW_STARTRIGHT: int +ARW_STARTTOP: int +ARW_LEFT: int +ARW_RIGHT: int +ARW_UP: int +ARW_DOWN: int +ARW_HIDE: int +SERKF_SERIALKEYSON: int +SERKF_AVAILABLE: int +SERKF_INDICATOR: int +HCF_HIGHCONTRASTON: int +HCF_AVAILABLE: int +HCF_HOTKEYACTIVE: int +HCF_CONFIRMHOTKEY: int +HCF_HOTKEYSOUND: int +HCF_INDICATOR: int +HCF_HOTKEYAVAILABLE: int +CDS_UPDATEREGISTRY: int +CDS_TEST: int +CDS_FULLSCREEN: int +CDS_GLOBAL: int +CDS_SET_PRIMARY: int +CDS_RESET: int +CDS_SETRECT: int +CDS_NORESET: int +DISP_CHANGE_SUCCESSFUL: int +DISP_CHANGE_RESTART: int +DISP_CHANGE_FAILED: int +DISP_CHANGE_BADMODE: int +DISP_CHANGE_NOTUPDATED: int +DISP_CHANGE_BADFLAGS: int +DISP_CHANGE_BADPARAM: int +DISP_CHANGE_BADDUALVIEW: int +ENUM_CURRENT_SETTINGS: int +ENUM_REGISTRY_SETTINGS: int +FKF_FILTERKEYSON: int +FKF_AVAILABLE: int +FKF_HOTKEYACTIVE: int +FKF_CONFIRMHOTKEY: int +FKF_HOTKEYSOUND: int +FKF_INDICATOR: int +FKF_CLICKON: int +SKF_STICKYKEYSON: int +SKF_AVAILABLE: int +SKF_HOTKEYACTIVE: int +SKF_CONFIRMHOTKEY: int +SKF_HOTKEYSOUND: int +SKF_INDICATOR: int +SKF_AUDIBLEFEEDBACK: int +SKF_TRISTATE: int +SKF_TWOKEYSOFF: int +SKF_LALTLATCHED: int +SKF_LCTLLATCHED: int +SKF_LSHIFTLATCHED: int +SKF_RALTLATCHED: int +SKF_RCTLLATCHED: int +SKF_RSHIFTLATCHED: int +SKF_LWINLATCHED: int +SKF_RWINLATCHED: int +SKF_LALTLOCKED: int +SKF_LCTLLOCKED: int +SKF_LSHIFTLOCKED: int +SKF_RALTLOCKED: int +SKF_RCTLLOCKED: int +SKF_RSHIFTLOCKED: int +SKF_LWINLOCKED: int +SKF_RWINLOCKED: int +MKF_MOUSEKEYSON: int +MKF_AVAILABLE: int +MKF_HOTKEYACTIVE: int +MKF_CONFIRMHOTKEY: int +MKF_HOTKEYSOUND: int +MKF_INDICATOR: int +MKF_MODIFIERS: int +MKF_REPLACENUMBERS: int +MKF_LEFTBUTTONSEL: int +MKF_RIGHTBUTTONSEL: int +MKF_LEFTBUTTONDOWN: int +MKF_RIGHTBUTTONDOWN: int +MKF_MOUSEMODE: int +ATF_TIMEOUTON: int +ATF_ONOFFFEEDBACK: int +SSGF_NONE: int +SSGF_DISPLAY: int +SSTF_NONE: int +SSTF_CHARS: int +SSTF_BORDER: int +SSTF_DISPLAY: int +SSWF_NONE: int +SSWF_TITLE: int +SSWF_WINDOW: int +SSWF_DISPLAY: int +SSWF_CUSTOM: int +SSF_SOUNDSENTRYON: int +SSF_AVAILABLE: int +SSF_INDICATOR: int +TKF_TOGGLEKEYSON: int +TKF_AVAILABLE: int +TKF_HOTKEYACTIVE: int +TKF_CONFIRMHOTKEY: int +TKF_HOTKEYSOUND: int +TKF_INDICATOR: int +SLE_ERROR: int +SLE_MINORERROR: int +SLE_WARNING: int +MONITOR_DEFAULTTONULL: int +MONITOR_DEFAULTTOPRIMARY: int +MONITOR_DEFAULTTONEAREST: int +MONITORINFOF_PRIMARY: int +CHILDID_SELF: int +INDEXID_OBJECT: int +INDEXID_CONTAINER: int +OBJID_WINDOW: int +OBJID_SYSMENU: int +OBJID_TITLEBAR: int +OBJID_MENU: int +OBJID_CLIENT: int +OBJID_VSCROLL: int +OBJID_HSCROLL: int +OBJID_SIZEGRIP: int +OBJID_CARET: int +OBJID_CURSOR: int +OBJID_ALERT: int +OBJID_SOUND: int +EVENT_MIN: int +EVENT_MAX: int +EVENT_SYSTEM_SOUND: int +EVENT_SYSTEM_ALERT: int +EVENT_SYSTEM_FOREGROUND: int +EVENT_SYSTEM_MENUSTART: int +EVENT_SYSTEM_MENUEND: int +EVENT_SYSTEM_MENUPOPUPSTART: int +EVENT_SYSTEM_MENUPOPUPEND: int +EVENT_SYSTEM_CAPTURESTART: int +EVENT_SYSTEM_CAPTUREEND: int +EVENT_SYSTEM_MOVESIZESTART: int +EVENT_SYSTEM_MOVESIZEEND: int +EVENT_SYSTEM_CONTEXTHELPSTART: int +EVENT_SYSTEM_CONTEXTHELPEND: int +EVENT_SYSTEM_DRAGDROPSTART: int +EVENT_SYSTEM_DRAGDROPEND: int +EVENT_SYSTEM_DIALOGSTART: int +EVENT_SYSTEM_DIALOGEND: int +EVENT_SYSTEM_SCROLLINGSTART: int +EVENT_SYSTEM_SCROLLINGEND: int +EVENT_SYSTEM_SWITCHSTART: int +EVENT_SYSTEM_SWITCHEND: int +EVENT_SYSTEM_MINIMIZESTART: int +EVENT_SYSTEM_MINIMIZEEND: int +EVENT_OBJECT_CREATE: int +EVENT_OBJECT_DESTROY: int +EVENT_OBJECT_SHOW: int +EVENT_OBJECT_HIDE: int +EVENT_OBJECT_REORDER: int +EVENT_OBJECT_FOCUS: int +EVENT_OBJECT_SELECTION: int +EVENT_OBJECT_SELECTIONADD: int +EVENT_OBJECT_SELECTIONREMOVE: int +EVENT_OBJECT_SELECTIONWITHIN: int +EVENT_OBJECT_STATECHANGE: int +EVENT_OBJECT_LOCATIONCHANGE: int +EVENT_OBJECT_NAMECHANGE: int +EVENT_OBJECT_DESCRIPTIONCHANGE: int +EVENT_OBJECT_VALUECHANGE: int +EVENT_OBJECT_PARENTCHANGE: int +EVENT_OBJECT_HELPCHANGE: int +EVENT_OBJECT_DEFACTIONCHANGE: int +EVENT_OBJECT_ACCELERATORCHANGE: int +SOUND_SYSTEM_STARTUP: int +SOUND_SYSTEM_SHUTDOWN: int +SOUND_SYSTEM_BEEP: int +SOUND_SYSTEM_ERROR: int +SOUND_SYSTEM_QUESTION: int +SOUND_SYSTEM_WARNING: int +SOUND_SYSTEM_INFORMATION: int +SOUND_SYSTEM_MAXIMIZE: int +SOUND_SYSTEM_MINIMIZE: int +SOUND_SYSTEM_RESTOREUP: int +SOUND_SYSTEM_RESTOREDOWN: int +SOUND_SYSTEM_APPSTART: int +SOUND_SYSTEM_FAULT: int +SOUND_SYSTEM_APPEND: int +SOUND_SYSTEM_MENUCOMMAND: int +SOUND_SYSTEM_MENUPOPUP: int +CSOUND_SYSTEM: int +ALERT_SYSTEM_INFORMATIONAL: int +ALERT_SYSTEM_WARNING: int +ALERT_SYSTEM_ERROR: int +ALERT_SYSTEM_QUERY: int +ALERT_SYSTEM_CRITICAL: int +CALERT_SYSTEM: int +WINEVENT_OUTOFCONTEXT: int +WINEVENT_SKIPOWNTHREAD: int +WINEVENT_SKIPOWNPROCESS: int +WINEVENT_INCONTEXT: int +GUI_CARETBLINKING: int +GUI_INMOVESIZE: int +GUI_INMENUMODE: int +GUI_SYSTEMMENUMODE: int +GUI_POPUPMENUMODE: int +STATE_SYSTEM_UNAVAILABLE: int +STATE_SYSTEM_SELECTED: int +STATE_SYSTEM_FOCUSED: int +STATE_SYSTEM_PRESSED: int +STATE_SYSTEM_CHECKED: int +STATE_SYSTEM_MIXED: int +STATE_SYSTEM_READONLY: int +STATE_SYSTEM_HOTTRACKED: int +STATE_SYSTEM_DEFAULT: int +STATE_SYSTEM_EXPANDED: int +STATE_SYSTEM_COLLAPSED: int +STATE_SYSTEM_BUSY: int +STATE_SYSTEM_FLOATING: int +STATE_SYSTEM_MARQUEED: int +STATE_SYSTEM_ANIMATED: int +STATE_SYSTEM_INVISIBLE: int +STATE_SYSTEM_OFFSCREEN: int +STATE_SYSTEM_SIZEABLE: int +STATE_SYSTEM_MOVEABLE: int +STATE_SYSTEM_SELFVOICING: int +STATE_SYSTEM_FOCUSABLE: int +STATE_SYSTEM_SELECTABLE: int +STATE_SYSTEM_LINKED: int +STATE_SYSTEM_TRAVERSED: int +STATE_SYSTEM_MULTISELECTABLE: int +STATE_SYSTEM_EXTSELECTABLE: int +STATE_SYSTEM_ALERT_LOW: int +STATE_SYSTEM_ALERT_MEDIUM: int +STATE_SYSTEM_ALERT_HIGH: int +STATE_SYSTEM_VALID: int +CCHILDREN_TITLEBAR: int +CCHILDREN_SCROLLBAR: int +CURSOR_SHOWING: int +WS_ACTIVECAPTION: int +GA_MIC: int +GA_PARENT: int +GA_ROOT: int +GA_ROOTOWNER: int +GA_MAC: int +BF_LEFT: int +BF_TOP: int +BF_RIGHT: int +BF_BOTTOM: int +BF_TOPLEFT: int +BF_TOPRIGHT: int +BF_BOTTOMLEFT: int +BF_BOTTOMRIGHT: int +BF_RECT: int +BF_DIAGONAL: int +BF_DIAGONAL_ENDTOPRIGHT: int +BF_DIAGONAL_ENDTOPLEFT: int +BF_DIAGONAL_ENDBOTTOMLEFT: int +BF_DIAGONAL_ENDBOTTOMRIGHT: int +BF_MIDDLE: int +BF_SOFT: int +BF_ADJUST: int +BF_FLAT: int +BF_MONO: int +DFC_CAPTION: int +DFC_MENU: int +DFC_SCROLL: int +DFC_BUTTON: int +DFC_POPUPMENU: int +DFCS_CAPTIONCLOSE: int +DFCS_CAPTIONMIN: int +DFCS_CAPTIONMAX: int +DFCS_CAPTIONRESTORE: int +DFCS_CAPTIONHELP: int +DFCS_MENUARROW: int +DFCS_MENUCHECK: int +DFCS_MENUBULLET: int +DFCS_MENUARROWRIGHT: int +DFCS_SCROLLUP: int +DFCS_SCROLLDOWN: int +DFCS_SCROLLLEFT: int +DFCS_SCROLLRIGHT: int +DFCS_SCROLLCOMBOBOX: int +DFCS_SCROLLSIZEGRIP: int +DFCS_SCROLLSIZEGRIPRIGHT: int +DFCS_BUTTONCHECK: int +DFCS_BUTTONRADIOIMAGE: int +DFCS_BUTTONRADIOMASK: int +DFCS_BUTTONRADIO: int +DFCS_BUTTON3STATE: int +DFCS_BUTTONPUSH: int +DFCS_INACTIVE: int +DFCS_PUSHED: int +DFCS_CHECKED: int +DFCS_TRANSPARENT: int +DFCS_HOT: int +DFCS_ADJUSTRECT: int +DFCS_FLAT: int +DFCS_MONO: int +DC_ACTIVE: int +DC_SMALLCAP: int +DC_ICON: int +DC_TEXT: int +DC_INBUTTON: int +DC_GRADIENT: int +IDANI_OPEN: int +IDANI_CLOSE: int +IDANI_CAPTION: int +CF_TEXT: int +CF_BITMAP: int +CF_METAFILEPICT: int +CF_SYLK: int +CF_DIF: int +CF_TIFF: int +CF_OEMTEXT: int +CF_DIB: int +CF_PALETTE: int +CF_PENDATA: int +CF_RIFF: int +CF_WAVE: int +CF_UNICODETEXT: int +CF_ENHMETAFILE: int +CF_HDROP: int +CF_LOCALE: int +CF_DIBV5: int +CF_MAX: int +CF_OWNERDISPLAY: int +CF_DSPTEXT: int +CF_DSPBITMAP: int +CF_DSPMETAFILEPICT: int +CF_DSPENHMETAFILE: int +CF_PRIVATEFIRST: int +CF_PRIVATELAST: int +CF_GDIOBJFIRST: int +CF_GDIOBJLAST: int +FVIRTKEY: int +FNOINVERT: int +FSHIFT: int +FCONTROL: int +FALT: int +WPF_SETMINPOSITION: int +WPF_RESTORETOMAXIMIZED: int +ODT_MENU: int +ODT_COMBOBOX: int +ODT_BUTTON: int +ODT_STATIC: int +ODA_DRAWENTIRE: int +ODA_SELECT: int +ODA_FOCUS: int +ODS_SELECTED: int +ODS_GRAYED: int +ODS_DISABLED: int +ODS_CHECKED: int +ODS_FOCUS: int +ODS_DEFAULT: int +ODS_COMBOBOXEDIT: int +ODS_HOTLIGHT: int +ODS_INACTIVE: int +PM_NOREMOVE: int +PM_REMOVE: int +PM_NOYIELD: int +MOD_ALT: int +MOD_CONTROL: int +MOD_SHIFT: int +MOD_WIN: int +IDHOT_SNAPWINDOW: int +IDHOT_SNAPDESKTOP: int +ENDSESSION_LOGOFF: int +EWX_LOGOFF: int +EWX_SHUTDOWN: int +EWX_REBOOT: int +EWX_FORCE: int +EWX_POWEROFF: int +EWX_FORCEIFHUNG: int +BSM_ALLCOMPONENTS: int +BSM_VXDS: int +BSM_NETDRIVER: int +BSM_INSTALLABLEDRIVERS: int +BSM_APPLICATIONS: int +BSM_ALLDESKTOPS: int +BSF_QUERY: int +BSF_IGNORECURRENTTASK: int +BSF_FLUSHDISK: int +BSF_NOHANG: int +BSF_POSTMESSAGE: int +BSF_FORCEIFHUNG: int +BSF_NOTIMEOUTIFNOTHUNG: int +BROADCAST_QUERY_DENY: int +DBWF_LPARAMPOINTER: int +SWP_NOSIZE: int +SWP_NOMOVE: int +SWP_NOZORDER: int +SWP_NOREDRAW: int +SWP_NOACTIVATE: int +SWP_FRAMECHANGED: int +SWP_SHOWWINDOW: int +SWP_HIDEWINDOW: int +SWP_NOCOPYBITS: int +SWP_NOOWNERZORDER: int +SWP_NOSENDCHANGING: int +SWP_DRAWFRAME: int +SWP_NOREPOSITION: int +SWP_DEFERERASE: int +SWP_ASYNCWINDOWPOS: int +DLGWINDOWEXTRA: int +KEYEVENTF_EXTENDEDKEY: int +KEYEVENTF_KEYUP: int +MOUSEEVENTF_MOVE: int +MOUSEEVENTF_LEFTDOWN: int +MOUSEEVENTF_LEFTUP: int +MOUSEEVENTF_RIGHTDOWN: int +MOUSEEVENTF_RIGHTUP: int +MOUSEEVENTF_MIDDLEDOWN: int +MOUSEEVENTF_MIDDLEUP: int +MOUSEEVENTF_ABSOLUTE: int +INPUT_MOUSE: int +INPUT_KEYBOARD: int +INPUT_HARDWARE: int +MWMO_WAITALL: int +MWMO_ALERTABLE: int +MWMO_INPUTAVAILABLE: int +QS_KEY: int +QS_MOUSEMOVE: int +QS_MOUSEBUTTON: int +QS_POSTMESSAGE: int +QS_TIMER: int +QS_PAINT: int +QS_SENDMESSAGE: int +QS_HOTKEY: int +QS_MOUSE: int +QS_INPUT: int +QS_ALLEVENTS: int +QS_ALLINPUT: int +IMN_CLOSESTATUSWINDOW: int +IMN_OPENSTATUSWINDOW: int +IMN_CHANGECANDIDATE: int +IMN_CLOSECANDIDATE: int +IMN_OPENCANDIDATE: int +IMN_SETCONVERSIONMODE: int +IMN_SETSENTENCEMODE: int +IMN_SETOPENSTATUS: int +IMN_SETCANDIDATEPOS: int +IMN_SETCOMPOSITIONFONT: int +IMN_SETCOMPOSITIONWINDOW: int +IMN_SETSTATUSWINDOWPOS: int +IMN_GUIDELINE: int +IMN_PRIVATE: int +HELP_CONTEXT: int +HELP_QUIT: int +HELP_INDEX: int +HELP_CONTENTS: int +HELP_HELPONHELP: int +HELP_SETINDEX: int +HELP_SETCONTENTS: int +HELP_CONTEXTPOPUP: int +HELP_FORCEFILE: int +HELP_KEY: int +HELP_COMMAND: int +HELP_PARTIALKEY: int +HELP_MULTIKEY: int +HELP_SETWINPOS: int +HELP_CONTEXTMENU: int +HELP_FINDER: int +HELP_WM_HELP: int +HELP_SETPOPUP_POS: int +HELP_TCARD: int +HELP_TCARD_DATA: int +HELP_TCARD_OTHER_CALLER: int +IDH_NO_HELP: int +IDH_MISSING_CONTEXT: int +IDH_GENERIC_HELP_BUTTON: int +IDH_OK: int +IDH_CANCEL: int +IDH_HELP: int +GR_GDIOBJECTS: int +GR_USEROBJECTS: int +SRCCOPY: int +SRCPAINT: int +SRCAND: int +SRCINVERT: int +SRCERASE: int +NOTSRCCOPY: int +NOTSRCERASE: int +MERGECOPY: int +MERGEPAINT: int +PATCOPY: int +PATPAINT: int +PATINVERT: int +DSTINVERT: int +BLACKNESS: int +WHITENESS: int +R2_BLACK: int +R2_NOTMERGEPEN: int +R2_MASKNOTPEN: int +R2_NOTCOPYPEN: int +R2_MASKPENNOT: int +R2_NOT: int +R2_XORPEN: int +R2_NOTMASKPEN: int +R2_MASKPEN: int +R2_NOTXORPEN: int +R2_NOP: int +R2_MERGENOTPEN: int +R2_COPYPEN: int +R2_MERGEPENNOT: int +R2_MERGEPEN: int +R2_WHITE: int +R2_LAST: int +GDI_ERROR: int +ERROR: int +NULLREGION: int +SIMPLEREGION: int +COMPLEXREGION: int +RGN_ERROR: int +RGN_AND: int +RGN_OR: int +RGN_XOR: int +RGN_DIFF: int +RGN_COPY: int +RGN_MIN: int +RGN_MAX: int +BLACKONWHITE: int +WHITEONBLACK: int +COLORONCOLOR: int +HALFTONE: int +MAXSTRETCHBLTMODE: int +STRETCH_ANDSCANS: int +STRETCH_ORSCANS: int +STRETCH_DELETESCANS: int +STRETCH_HALFTONE: int +ALTERNATE: int +WINDING: int +POLYFILL_LAST: int +LAYOUT_RTL: int +LAYOUT_BTT: int +LAYOUT_VBH: int +LAYOUT_ORIENTATIONMASK: int +LAYOUT_BITMAPORIENTATIONPRESERVED: int +TA_NOUPDATECP: int +TA_UPDATECP: int +TA_LEFT: int +TA_RIGHT: int +TA_CENTER: int +TA_TOP: int +TA_BOTTOM: int +TA_BASELINE: int +TA_MASK: int +VTA_BASELINE: int +VTA_LEFT: int +VTA_RIGHT: int +VTA_CENTER: int +VTA_BOTTOM: int +VTA_TOP: int +ETO_GRAYED: int +ETO_OPAQUE: int +ETO_CLIPPED: int +ASPECT_FILTERING: int +DCB_RESET: int +DCB_ACCUMULATE: int +DCB_DIRTY: int +DCB_SET: int +DCB_ENABLE: int +DCB_DISABLE: int +META_SETBKCOLOR: int +META_SETBKMODE: int +META_SETMAPMODE: int +META_SETROP2: int +META_SETRELABS: int +META_SETPOLYFILLMODE: int +META_SETSTRETCHBLTMODE: int +META_SETTEXTCHAREXTRA: int +META_SETTEXTCOLOR: int +META_SETTEXTJUSTIFICATION: int +META_SETWINDOWORG: int +META_SETWINDOWEXT: int +META_SETVIEWPORTORG: int +META_SETVIEWPORTEXT: int +META_OFFSETWINDOWORG: int +META_SCALEWINDOWEXT: int +META_OFFSETVIEWPORTORG: int +META_SCALEVIEWPORTEXT: int +META_LINETO: int +META_MOVETO: int +META_EXCLUDECLIPRECT: int +META_INTERSECTCLIPRECT: int +META_ARC: int +META_ELLIPSE: int +META_FLOODFILL: int +META_PIE: int +META_RECTANGLE: int +META_ROUNDRECT: int +META_PATBLT: int +META_SAVEDC: int +META_SETPIXEL: int +META_OFFSETCLIPRGN: int +META_TEXTOUT: int +META_BITBLT: int +META_STRETCHBLT: int +META_POLYGON: int +META_POLYLINE: int +META_ESCAPE: int +META_RESTOREDC: int +META_FILLREGION: int +META_FRAMEREGION: int +META_INVERTREGION: int +META_PAINTREGION: int +META_SELECTCLIPREGION: int +META_SELECTOBJECT: int +META_SETTEXTALIGN: int +META_CHORD: int +META_SETMAPPERFLAGS: int +META_EXTTEXTOUT: int +META_SETDIBTODEV: int +META_SELECTPALETTE: int +META_REALIZEPALETTE: int +META_ANIMATEPALETTE: int +META_SETPALENTRIES: int +META_POLYPOLYGON: int +META_RESIZEPALETTE: int +META_DIBBITBLT: int +META_DIBSTRETCHBLT: int +META_DIBCREATEPATTERNBRUSH: int +META_STRETCHDIB: int +META_EXTFLOODFILL: int +META_DELETEOBJECT: int +META_CREATEPALETTE: int +META_CREATEPATTERNBRUSH: int +META_CREATEPENINDIRECT: int +META_CREATEFONTINDIRECT: int +META_CREATEBRUSHINDIRECT: int +META_CREATEREGION: int +FILE_BEGIN: int +FILE_CURRENT: int +FILE_END: int +FILE_FLAG_WRITE_THROUGH: int +FILE_FLAG_OVERLAPPED: int +FILE_FLAG_NO_BUFFERING: int +FILE_FLAG_RANDOM_ACCESS: int +FILE_FLAG_SEQUENTIAL_SCAN: int +FILE_FLAG_DELETE_ON_CLOSE: int +FILE_FLAG_BACKUP_SEMANTICS: int +FILE_FLAG_POSIX_SEMANTICS: int +CREATE_NEW: int +CREATE_ALWAYS: int +OPEN_EXISTING: int +OPEN_ALWAYS: int +TRUNCATE_EXISTING: int +PIPE_ACCESS_INBOUND: int +PIPE_ACCESS_OUTBOUND: int +PIPE_ACCESS_DUPLEX: int +PIPE_CLIENT_END: int +PIPE_SERVER_END: int +PIPE_WAIT: int +PIPE_NOWAIT: int +PIPE_READMODE_BYTE: int +PIPE_READMODE_MESSAGE: int +PIPE_TYPE_BYTE: int +PIPE_TYPE_MESSAGE: int +PIPE_UNLIMITED_INSTANCES: int +SECURITY_CONTEXT_TRACKING: int +SECURITY_EFFECTIVE_ONLY: int +SECURITY_SQOS_PRESENT: int +SECURITY_VALID_SQOS_FLAGS: int +DTR_CONTROL_DISABLE: int +DTR_CONTROL_ENABLE: int +DTR_CONTROL_HANDSHAKE: int +RTS_CONTROL_DISABLE: int +RTS_CONTROL_ENABLE: int +RTS_CONTROL_HANDSHAKE: int +RTS_CONTROL_TOGGLE: int +GMEM_FIXED: int +GMEM_MOVEABLE: int +GMEM_NOCOMPACT: int +GMEM_NODISCARD: int +GMEM_ZEROINIT: int +GMEM_MODIFY: int +GMEM_DISCARDABLE: int +GMEM_NOT_BANKED: int +GMEM_SHARE: int +GMEM_DDESHARE: int +GMEM_NOTIFY: int +GMEM_LOWER: int +GMEM_VALID_FLAGS: int +GMEM_INVALID_HANDLE: int +GHND: int +GPTR: int +GMEM_DISCARDED: int +GMEM_LOCKCOUNT: int +LMEM_FIXED: int +LMEM_MOVEABLE: int +LMEM_NOCOMPACT: int +LMEM_NODISCARD: int +LMEM_ZEROINIT: int +LMEM_MODIFY: int +LMEM_DISCARDABLE: int +LMEM_VALID_FLAGS: int +LMEM_INVALID_HANDLE: int +LHND: int +LPTR: int +NONZEROLHND: int +NONZEROLPTR: int +LMEM_DISCARDED: int +LMEM_LOCKCOUNT: int +DEBUG_PROCESS: int +DEBUG_ONLY_THIS_PROCESS: int +CREATE_SUSPENDED: int +DETACHED_PROCESS: int +CREATE_NEW_CONSOLE: int +NORMAL_PRIORITY_CLASS: int +IDLE_PRIORITY_CLASS: int +HIGH_PRIORITY_CLASS: int +REALTIME_PRIORITY_CLASS: int +CREATE_NEW_PROCESS_GROUP: int +CREATE_UNICODE_ENVIRONMENT: int +CREATE_SEPARATE_WOW_VDM: int +CREATE_SHARED_WOW_VDM: int +CREATE_DEFAULT_ERROR_MODE: int +CREATE_NO_WINDOW: int +PROFILE_USER: int +PROFILE_KERNEL: int +PROFILE_SERVER: int +THREAD_BASE_PRIORITY_LOWRT: int +THREAD_BASE_PRIORITY_MAX: int +THREAD_BASE_PRIORITY_MIN: int +THREAD_BASE_PRIORITY_IDLE: int +THREAD_PRIORITY_LOWEST: int +THREAD_PRIORITY_BELOW_NORMAL: int +THREAD_PRIORITY_HIGHEST: int +THREAD_PRIORITY_ABOVE_NORMAL: int +THREAD_PRIORITY_ERROR_RETURN: int +THREAD_PRIORITY_TIME_CRITICAL: int +THREAD_PRIORITY_IDLE: int +THREAD_PRIORITY_NORMAL: int +THREAD_MODE_BACKGROUND_BEGIN: int +THREAD_MODE_BACKGROUND_END: int +EXCEPTION_DEBUG_EVENT: int +CREATE_THREAD_DEBUG_EVENT: int +CREATE_PROCESS_DEBUG_EVENT: int +EXIT_THREAD_DEBUG_EVENT: int +EXIT_PROCESS_DEBUG_EVENT: int +LOAD_DLL_DEBUG_EVENT: int +UNLOAD_DLL_DEBUG_EVENT: int +OUTPUT_DEBUG_STRING_EVENT: int +RIP_EVENT: int +DRIVE_UNKNOWN: int +DRIVE_NO_ROOT_DIR: int +DRIVE_REMOVABLE: int +DRIVE_FIXED: int +DRIVE_REMOTE: int +DRIVE_CDROM: int +DRIVE_RAMDISK: int +FILE_TYPE_UNKNOWN: int +FILE_TYPE_DISK: int +FILE_TYPE_CHAR: int +FILE_TYPE_PIPE: int +FILE_TYPE_REMOTE: int +NOPARITY: int +ODDPARITY: int +EVENPARITY: int +MARKPARITY: int +SPACEPARITY: int +ONESTOPBIT: int +ONE5STOPBITS: int +TWOSTOPBITS: int +CBR_110: int +CBR_300: int +CBR_600: int +CBR_1200: int +CBR_2400: int +CBR_4800: int +CBR_9600: int +CBR_14400: int +CBR_19200: int +CBR_38400: int +CBR_56000: int +CBR_57600: int +CBR_115200: int +CBR_128000: int +CBR_256000: int +S_QUEUEEMPTY: int +S_THRESHOLD: int +S_ALLTHRESHOLD: int +S_NORMAL: int +S_LEGATO: int +S_STACCATO: int +NMPWAIT_WAIT_FOREVER: int +NMPWAIT_NOWAIT: int +NMPWAIT_USE_DEFAULT_WAIT: int +OF_READ: int +OF_WRITE: int +OF_READWRITE: int +OF_SHARE_COMPAT: int +OF_SHARE_EXCLUSIVE: int +OF_SHARE_DENY_WRITE: int +OF_SHARE_DENY_READ: int +OF_SHARE_DENY_NONE: int +OF_PARSE: int +OF_DELETE: int +OF_VERIFY: int +OF_CANCEL: int +OF_CREATE: int +OF_PROMPT: int +OF_EXIST: int +OF_REOPEN: int +OFS_MAXPATHNAME: int +MAXINTATOM: int +PROCESS_HEAP_REGION: int +PROCESS_HEAP_UNCOMMITTED_RANGE: int +PROCESS_HEAP_ENTRY_BUSY: int +PROCESS_HEAP_ENTRY_MOVEABLE: int +PROCESS_HEAP_ENTRY_DDESHARE: int +SCS_32BIT_BINARY: int +SCS_DOS_BINARY: int +SCS_WOW_BINARY: int +SCS_PIF_BINARY: int +SCS_POSIX_BINARY: int +SCS_OS216_BINARY: int +SEM_FAILCRITICALERRORS: int +SEM_NOGPFAULTERRORBOX: int +SEM_NOALIGNMENTFAULTEXCEPT: int +SEM_NOOPENFILEERRORBOX: int +LOCKFILE_FAIL_IMMEDIATELY: int +LOCKFILE_EXCLUSIVE_LOCK: int +HANDLE_FLAG_INHERIT: int +HANDLE_FLAG_PROTECT_FROM_CLOSE: int +HINSTANCE_ERROR: int +GET_TAPE_MEDIA_INFORMATION: int +GET_TAPE_DRIVE_INFORMATION: int +SET_TAPE_MEDIA_INFORMATION: int +SET_TAPE_DRIVE_INFORMATION: int +FORMAT_MESSAGE_ALLOCATE_BUFFER: int +FORMAT_MESSAGE_IGNORE_INSERTS: int +FORMAT_MESSAGE_FROM_STRING: int +FORMAT_MESSAGE_FROM_HMODULE: int +FORMAT_MESSAGE_FROM_SYSTEM: int +FORMAT_MESSAGE_ARGUMENT_ARRAY: int +FORMAT_MESSAGE_MAX_WIDTH_MASK: int +BACKUP_INVALID: int +BACKUP_DATA: int +BACKUP_EA_DATA: int +BACKUP_SECURITY_DATA: int +BACKUP_ALTERNATE_DATA: int +BACKUP_LINK: int +BACKUP_PROPERTY_DATA: int +BACKUP_OBJECT_ID: int +BACKUP_REPARSE_DATA: int +BACKUP_SPARSE_BLOCK: int +STREAM_NORMAL_ATTRIBUTE: int +STREAM_MODIFIED_WHEN_READ: int +STREAM_CONTAINS_SECURITY: int +STREAM_CONTAINS_PROPERTIES: int +STARTF_USESHOWWINDOW: int +STARTF_USESIZE: int +STARTF_USEPOSITION: int +STARTF_USECOUNTCHARS: int +STARTF_USEFILLATTRIBUTE: int +STARTF_FORCEONFEEDBACK: int +STARTF_FORCEOFFFEEDBACK: int +STARTF_USESTDHANDLES: int +STARTF_USEHOTKEY: int +SHUTDOWN_NORETRY: int +DONT_RESOLVE_DLL_REFERENCES: int +LOAD_LIBRARY_AS_DATAFILE: int +LOAD_WITH_ALTERED_SEARCH_PATH: int +DDD_RAW_TARGET_PATH: int +DDD_REMOVE_DEFINITION: int +DDD_EXACT_MATCH_ON_REMOVE: int +MOVEFILE_REPLACE_EXISTING: int +MOVEFILE_COPY_ALLOWED: int +MOVEFILE_DELAY_UNTIL_REBOOT: int +MAX_COMPUTERNAME_LENGTH: int +LOGON32_LOGON_INTERACTIVE: int +LOGON32_LOGON_NETWORK: int +LOGON32_LOGON_BATCH: int +LOGON32_LOGON_SERVICE: int +LOGON32_LOGON_UNLOCK: int +LOGON32_LOGON_NETWORK_CLEARTEXT: int +LOGON32_LOGON_NEW_CREDENTIALS: int +LOGON32_PROVIDER_DEFAULT: int +LOGON32_PROVIDER_WINNT35: int +LOGON32_PROVIDER_WINNT40: int +LOGON32_PROVIDER_WINNT50: int +VER_PLATFORM_WIN32s: int +VER_PLATFORM_WIN32_WINDOWS: int +VER_PLATFORM_WIN32_NT: int +TC_NORMAL: int +TC_HARDERR: int +TC_GP_TRAP: int +TC_SIGNAL: int +AC_LINE_OFFLINE: int +AC_LINE_ONLINE: int +AC_LINE_BACKUP_POWER: int +AC_LINE_UNKNOWN: int +BATTERY_FLAG_HIGH: int +BATTERY_FLAG_LOW: int +BATTERY_FLAG_CRITICAL: int +BATTERY_FLAG_CHARGING: int +BATTERY_FLAG_NO_BATTERY: int +BATTERY_FLAG_UNKNOWN: int +BATTERY_PERCENTAGE_UNKNOWN: int +BATTERY_LIFE_UNKNOWN: int +cchTextLimitDefault: int +EN_MSGFILTER: int +EN_REQUESTRESIZE: int +EN_SELCHANGE: int +EN_DROPFILES: int +EN_PROTECTED: int +EN_CORRECTTEXT: int +EN_STOPNOUNDO: int +EN_IMECHANGE: int +EN_SAVECLIPBOARD: int +EN_OLEOPFAILED: int +ENM_NONE: int +ENM_CHANGE: int +ENM_UPDATE: int +ENM_SCROLL: int +ENM_KEYEVENTS: int +ENM_MOUSEEVENTS: int +ENM_REQUESTRESIZE: int +ENM_SELCHANGE: int +ENM_DROPFILES: int +ENM_PROTECTED: int +ENM_CORRECTTEXT: int +ENM_IMECHANGE: int +ES_SAVESEL: int +ES_SUNKEN: int +ES_DISABLENOSCROLL: int +ES_SELECTIONBAR: int +ES_EX_NOCALLOLEINIT: int +ES_VERTICAL: int +ES_NOIME: int +ES_SELFIME: int +ECO_AUTOWORDSELECTION: int +ECO_AUTOVSCROLL: int +ECO_AUTOHSCROLL: int +ECO_NOHIDESEL: int +ECO_READONLY: int +ECO_WANTRETURN: int +ECO_SAVESEL: int +ECO_SELECTIONBAR: int +ECO_VERTICAL: int +ECOOP_SET: int +ECOOP_OR: int +ECOOP_AND: int +ECOOP_XOR: int +WB_CLASSIFY: int +WB_MOVEWORDLEFT: int +WB_MOVEWORDRIGHT: int +WB_LEFTBREAK: int +WB_RIGHTBREAK: int +WB_MOVEWORDPREV: int +WB_MOVEWORDNEXT: int +WB_PREVBREAK: int +WB_NEXTBREAK: int +PC_FOLLOWING: int +PC_LEADING: int +PC_OVERFLOW: int +PC_DELIMITER: int +WBF_WORDWRAP: int +WBF_WORDBREAK: int +WBF_OVERFLOW: int +WBF_LEVEL1: int +WBF_LEVEL2: int +WBF_CUSTOM: int +CFM_BOLD: int +CFM_ITALIC: int +CFM_UNDERLINE: int +CFM_STRIKEOUT: int +CFM_PROTECTED: int +CFM_SIZE: int +CFM_COLOR: int +CFM_FACE: int +CFM_OFFSET: int +CFM_CHARSET: int +CFE_BOLD: int +CFE_ITALIC: int +CFE_UNDERLINE: int +CFE_STRIKEOUT: int +CFE_PROTECTED: int +CFE_AUTOCOLOR: int +yHeightCharPtsMost: int +SCF_SELECTION: int +SCF_WORD: int +SF_TEXT: int +SF_RTF: int +SF_RTFNOOBJS: int +SF_TEXTIZED: int +SFF_SELECTION: int +SFF_PLAINRTF: int +MAX_TAB_STOPS: int +lDefaultTab: int +PFM_STARTINDENT: int +PFM_RIGHTINDENT: int +PFM_OFFSET: int +PFM_ALIGNMENT: int +PFM_TABSTOPS: int +PFM_NUMBERING: int +PFM_OFFSETINDENT: int +PFN_BULLET: int +PFA_LEFT: int +PFA_RIGHT: int +PFA_CENTER: int +SEL_EMPTY: int +SEL_TEXT: int +SEL_OBJECT: int +SEL_MULTICHAR: int +SEL_MULTIOBJECT: int +OLEOP_DOVERB: int +CF_RTF: str +CF_RTFNOOBJS: str +CF_RETEXTOBJ: str +RIGHT_ALT_PRESSED: int +LEFT_ALT_PRESSED: int +RIGHT_CTRL_PRESSED: int +LEFT_CTRL_PRESSED: int +SHIFT_PRESSED: int +NUMLOCK_ON: int +SCROLLLOCK_ON: int +CAPSLOCK_ON: int +ENHANCED_KEY: int +NLS_DBCSCHAR: int +NLS_ALPHANUMERIC: int +NLS_KATAKANA: int +NLS_HIRAGANA: int +NLS_ROMAN: int +NLS_IME_CONVERSION: int +NLS_IME_DISABLE: int +FROM_LEFT_1ST_BUTTON_PRESSED: int +RIGHTMOST_BUTTON_PRESSED: int +FROM_LEFT_2ND_BUTTON_PRESSED: int +FROM_LEFT_3RD_BUTTON_PRESSED: int +FROM_LEFT_4TH_BUTTON_PRESSED: int +CTRL_C_EVENT: int +CTRL_BREAK_EVENT: int +CTRL_CLOSE_EVENT: int +CTRL_LOGOFF_EVENT: int +CTRL_SHUTDOWN_EVENT: int +MOUSE_MOVED: int +DOUBLE_CLICK: int +MOUSE_WHEELED: int +PSM_SETCURSEL: int +PSM_REMOVEPAGE: int +PSM_ADDPAGE: int +PSM_CHANGED: int +PSM_RESTARTWINDOWS: int +PSM_REBOOTSYSTEM: int +PSM_CANCELTOCLOSE: int +PSM_QUERYSIBLINGS: int +PSM_UNCHANGED: int +PSM_APPLY: int +PSM_SETTITLEA: int +PSM_SETTITLEW: int +PSM_SETWIZBUTTONS: int +PSM_PRESSBUTTON: int +PSM_SETCURSELID: int +PSM_SETFINISHTEXTA: int +PSM_SETFINISHTEXTW: int +PSM_GETTABCONTROL: int +PSM_ISDIALOGMESSAGE: int +PSM_GETCURRENTPAGEHWND: int +PSM_INSERTPAGE: int +PSM_SETHEADERTITLEA: int +PSM_SETHEADERTITLEW: int +PSM_SETHEADERSUBTITLEA: int +PSM_SETHEADERSUBTITLEW: int +PSM_HWNDTOINDEX: int +PSM_INDEXTOHWND: int +PSM_PAGETOINDEX: int +PSM_INDEXTOPAGE: int +PSM_IDTOINDEX: int +PSM_INDEXTOID: int +PSM_GETRESULT: int +PSM_RECALCPAGESIZES: int +NameUnknown: int +NameFullyQualifiedDN: int +NameSamCompatible: int +NameDisplay: int +NameUniqueId: int +NameCanonical: int +NameUserPrincipal: int +NameCanonicalEx: int +NameServicePrincipal: int +NameDnsDomain: int +ComputerNameNetBIOS: int +ComputerNameDnsHostname: int +ComputerNameDnsDomain: int +ComputerNameDnsFullyQualified: int +ComputerNamePhysicalNetBIOS: int +ComputerNamePhysicalDnsHostname: int +ComputerNamePhysicalDnsDomain: int +ComputerNamePhysicalDnsFullyQualified: int +LWA_COLORKEY: int +LWA_ALPHA: int +ULW_COLORKEY: int +ULW_ALPHA: int +ULW_OPAQUE: int +TRUE: int +FALSE: int +MAX_PATH: int +AC_SRC_OVER: int +AC_SRC_ALPHA: int +GRADIENT_FILL_RECT_H: int +GRADIENT_FILL_RECT_V: int +GRADIENT_FILL_TRIANGLE: int +GRADIENT_FILL_OP_FLAG: int +MM_WORKING_SET_MAX_HARD_ENABLE: int +MM_WORKING_SET_MAX_HARD_DISABLE: int +MM_WORKING_SET_MIN_HARD_ENABLE: int +MM_WORKING_SET_MIN_HARD_DISABLE: int +VOLUME_NAME_DOS: int +VOLUME_NAME_GUID: int +VOLUME_NAME_NT: int +VOLUME_NAME_NONE: int +FILE_NAME_NORMALIZED: int +FILE_NAME_OPENED: int +DEVICE_NOTIFY_WINDOW_HANDLE: int +DEVICE_NOTIFY_SERVICE_HANDLE: int +BSF_MSGSRV32ISOK: int +BSF_MSGSRV32ISOK_BIT: int +DBT_APPYEND: int +DBT_DEVNODES_CHANGED: int +DBT_QUERYCHANGECONFIG: int +DBT_CONFIGCHANGED: int +DBT_CONFIGCHANGECANCELED: int +DBT_MONITORCHANGE: int +DBT_SHELLLOGGEDON: int +DBT_CONFIGMGAPI32: int +DBT_VXDINITCOMPLETE: int +DBT_VOLLOCKQUERYLOCK: int +DBT_VOLLOCKLOCKTAKEN: int +DBT_VOLLOCKLOCKFAILED: int +DBT_VOLLOCKQUERYUNLOCK: int +DBT_VOLLOCKLOCKRELEASED: int +DBT_VOLLOCKUNLOCKFAILED: int +LOCKP_ALLOW_WRITES: int +LOCKP_FAIL_WRITES: int +LOCKP_FAIL_MEM_MAPPING: int +LOCKP_ALLOW_MEM_MAPPING: int +LOCKP_USER_MASK: int +LOCKP_LOCK_FOR_FORMAT: int +LOCKF_LOGICAL_LOCK: int +LOCKF_PHYSICAL_LOCK: int +DBT_NO_DISK_SPACE: int +DBT_LOW_DISK_SPACE: int +DBT_CONFIGMGPRIVATE: int +DBT_DEVICEARRIVAL: int +DBT_DEVICEQUERYREMOVE: int +DBT_DEVICEQUERYREMOVEFAILED: int +DBT_DEVICEREMOVEPENDING: int +DBT_DEVICEREMOVECOMPLETE: int +DBT_DEVICETYPESPECIFIC: int +DBT_CUSTOMEVENT: int +DBT_DEVTYP_OEM: int +DBT_DEVTYP_DEVNODE: int +DBT_DEVTYP_VOLUME: int +DBT_DEVTYP_PORT: int +DBT_DEVTYP_NET: int +DBT_DEVTYP_DEVICEINTERFACE: int +DBT_DEVTYP_HANDLE: int +DBTF_MEDIA: int +DBTF_NET: int +DBTF_RESOURCE: int +DBTF_XPORT: int +DBTF_SLOWNET: int +DBT_VPOWERDAPI: int +DBT_USERDEFINED: int +CBS_DROPDOWNLIST: int +CDM_GETFOLDERIDLIST: int +CTLCOLOR_LISTBOX: int +DBT_APPYBEGIN: int +FILE_NAMED_STREAMS: int +FILE_READ_ONLY_VOLUME: int +FILE_SEQUENTIAL_WRITE_ONCE: int +FILE_SUPPORTS_ENCRYPTION: int +FILE_SUPPORTS_EXTENDED_ATTRIBUTES: int +FILE_SUPPORTS_HARD_LINKS: int +FILE_SUPPORTS_OBJECT_IDS: int +FILE_SUPPORTS_OPEN_BY_FILE_ID: int +FILE_SUPPORTS_REPARSE_POINTS: int +FILE_SUPPORTS_SPARSE_FILES: int +FILE_SUPPORTS_TRANSACTIONS: int +FILE_SUPPORTS_USN_JOURNAL: int +FILE_VOLUME_QUOTAS: int +KEYEVENTF_SCANCODE: int +KEYEVENTF_UNICODE: int +MOUSEEVENTF_HWHEEL: int +MOUSEEVENTF_MOVE_NOCOALESCE: int +MOUSEEVENTF_VIRTUALDESK: int +ODT_LISTBOX: int +REG_RESOURCE_LIST: int +REG_RESOURCE_REQUIREMENTS_LIST: int +SC_TASKLIST: int +SPI_GETLISTBOXSMOOTHSCROLLING: int +SPI_SETLISTBOXSMOOTHSCROLLING: int +WM_CTLCOLORLISTBOX: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32cryptcon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32cryptcon.pyi new file mode 100644 index 00000000..c3865abf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32cryptcon.pyi @@ -0,0 +1,1790 @@ +def GET_ALG_CLASS(x: int) -> int: ... +def GET_ALG_TYPE(x: int) -> int: ... +def GET_ALG_SID(x: int) -> int: ... + +ALG_CLASS_ANY: int +ALG_CLASS_SIGNATURE: int +ALG_CLASS_MSG_ENCRYPT: int +ALG_CLASS_DATA_ENCRYPT: int +ALG_CLASS_HASH: int +ALG_CLASS_KEY_EXCHANGE: int +ALG_CLASS_ALL: int +ALG_TYPE_ANY: int +ALG_TYPE_DSS: int +ALG_TYPE_RSA: int +ALG_TYPE_BLOCK: int +ALG_TYPE_STREAM: int +ALG_TYPE_DH: int +ALG_TYPE_SECURECHANNEL: int +ALG_SID_ANY: int +ALG_SID_RSA_ANY: int +ALG_SID_RSA_PKCS: int +ALG_SID_RSA_MSATWORK: int +ALG_SID_RSA_ENTRUST: int +ALG_SID_RSA_PGP: int +ALG_SID_DSS_ANY: int +ALG_SID_DSS_PKCS: int +ALG_SID_DSS_DMS: int +ALG_SID_DES: int +ALG_SID_3DES: int +ALG_SID_DESX: int +ALG_SID_IDEA: int +ALG_SID_CAST: int +ALG_SID_SAFERSK64: int +ALG_SID_SAFERSK128: int +ALG_SID_3DES_112: int +ALG_SID_CYLINK_MEK: int +ALG_SID_RC5: int +ALG_SID_AES_128: int +ALG_SID_AES_192: int +ALG_SID_AES_256: int +ALG_SID_AES: int +ALG_SID_SKIPJACK: int +ALG_SID_TEK: int +CRYPT_MODE_CBCI: int +CRYPT_MODE_CFBP: int +CRYPT_MODE_OFBP: int +CRYPT_MODE_CBCOFM: int +CRYPT_MODE_CBCOFMI: int +ALG_SID_RC2: int +ALG_SID_RC4: int +ALG_SID_SEAL: int +ALG_SID_DH_SANDF: int +ALG_SID_DH_EPHEM: int +ALG_SID_AGREED_KEY_ANY: int +ALG_SID_KEA: int +ALG_SID_MD2: int +ALG_SID_MD4: int +ALG_SID_MD5: int +ALG_SID_SHA: int +ALG_SID_SHA1: int +ALG_SID_MAC: int +ALG_SID_RIPEMD: int +ALG_SID_RIPEMD160: int +ALG_SID_SSL3SHAMD5: int +ALG_SID_HMAC: int +ALG_SID_TLS1PRF: int +ALG_SID_HASH_REPLACE_OWF: int +ALG_SID_SHA_256: int +ALG_SID_SHA_384: int +ALG_SID_SHA_512: int +ALG_SID_SSL3_MASTER: int +ALG_SID_SCHANNEL_MASTER_HASH: int +ALG_SID_SCHANNEL_MAC_KEY: int +ALG_SID_PCT1_MASTER: int +ALG_SID_SSL2_MASTER: int +ALG_SID_TLS1_MASTER: int +ALG_SID_SCHANNEL_ENC_KEY: int +ALG_SID_EXAMPLE: int +CALG_MD2: int +CALG_MD4: int +CALG_MD5: int +CALG_SHA: int +CALG_SHA1: int +CALG_MAC: int +CALG_RSA_SIGN: int +CALG_DSS_SIGN: int +CALG_NO_SIGN: int +CALG_RSA_KEYX: int +CALG_DES: int +CALG_3DES_112: int +CALG_3DES: int +CALG_DESX: int +CALG_RC2: int +CALG_RC4: int +CALG_SEAL: int +CALG_DH_SF: int +CALG_DH_EPHEM: int +CALG_AGREEDKEY_ANY: int +CALG_KEA_KEYX: int +CALG_HUGHES_MD5: int +CALG_SKIPJACK: int +CALG_TEK: int +CALG_CYLINK_MEK: int +CALG_SSL3_SHAMD5: int +CALG_SSL3_MASTER: int +CALG_SCHANNEL_MASTER_HASH: int +CALG_SCHANNEL_MAC_KEY: int +CALG_SCHANNEL_ENC_KEY: int +CALG_PCT1_MASTER: int +CALG_SSL2_MASTER: int +CALG_TLS1_MASTER: int +CALG_RC5: int +CALG_HMAC: int +CALG_TLS1PRF: int +CALG_HASH_REPLACE_OWF: int +CALG_AES_128: int +CALG_AES_192: int +CALG_AES_256: int +CALG_AES: int +CALG_SHA_256: int +CALG_SHA_384: int +CALG_SHA_512: int +CRYPT_VERIFYCONTEXT: int +CRYPT_NEWKEYSET: int +CRYPT_DELETEKEYSET: int +CRYPT_MACHINE_KEYSET: int +CRYPT_SILENT: int +CRYPT_EXPORTABLE: int +CRYPT_USER_PROTECTED: int +CRYPT_CREATE_SALT: int +CRYPT_UPDATE_KEY: int +CRYPT_NO_SALT: int +CRYPT_PREGEN: int +CRYPT_RECIPIENT: int +CRYPT_INITIATOR: int +CRYPT_ONLINE: int +CRYPT_SF: int +CRYPT_CREATE_IV: int +CRYPT_KEK: int +CRYPT_DATA_KEY: int +CRYPT_VOLATILE: int +CRYPT_SGCKEY: int +CRYPT_ARCHIVABLE: int +RSA1024BIT_KEY: int +CRYPT_SERVER: int +KEY_LENGTH_MASK: int +CRYPT_Y_ONLY: int +CRYPT_SSL2_FALLBACK: int +CRYPT_DESTROYKEY: int +CRYPT_OAEP: int +CRYPT_BLOB_VER3: int +CRYPT_IPSEC_HMAC_KEY: int +CRYPT_DECRYPT_RSA_NO_PADDING_CHECK: int +CRYPT_SECRETDIGEST: int +CRYPT_OWF_REPL_LM_HASH: int +CRYPT_LITTLE_ENDIAN: int +CRYPT_NOHASHOID: int +CRYPT_TYPE2_FORMAT: int +CRYPT_X931_FORMAT: int +CRYPT_MACHINE_DEFAULT: int +CRYPT_USER_DEFAULT: int +CRYPT_DELETE_DEFAULT: int +SIMPLEBLOB: int +PUBLICKEYBLOB: int +PRIVATEKEYBLOB: int +PLAINTEXTKEYBLOB: int +OPAQUEKEYBLOB: int +PUBLICKEYBLOBEX: int +SYMMETRICWRAPKEYBLOB: int +AT_KEYEXCHANGE: int +AT_SIGNATURE: int +CRYPT_USERDATA: int +KP_IV: int +KP_SALT: int +KP_PADDING: int +KP_MODE: int +KP_MODE_BITS: int +KP_PERMISSIONS: int +KP_ALGID: int +KP_BLOCKLEN: int +KP_KEYLEN: int +KP_SALT_EX: int +KP_P: int +KP_G: int +KP_Q: int +KP_X: int +KP_Y: int +KP_RA: int +KP_RB: int +KP_INFO: int +KP_EFFECTIVE_KEYLEN: int +KP_SCHANNEL_ALG: int +KP_CLIENT_RANDOM: int +KP_SERVER_RANDOM: int +KP_RP: int +KP_PRECOMP_MD5: int +KP_PRECOMP_SHA: int +KP_CERTIFICATE: int +KP_CLEAR_KEY: int +KP_PUB_EX_LEN: int +KP_PUB_EX_VAL: int +KP_KEYVAL: int +KP_ADMIN_PIN: int +KP_KEYEXCHANGE_PIN: int +KP_SIGNATURE_PIN: int +KP_PREHASH: int +KP_ROUNDS: int +KP_OAEP_PARAMS: int +KP_CMS_KEY_INFO: int +KP_CMS_DH_KEY_INFO: int +KP_PUB_PARAMS: int +KP_VERIFY_PARAMS: int +KP_HIGHEST_VERSION: int +KP_GET_USE_COUNT: int +PKCS5_PADDING: int +RANDOM_PADDING: int +ZERO_PADDING: int +CRYPT_MODE_CBC: int +CRYPT_MODE_ECB: int +CRYPT_MODE_OFB: int +CRYPT_MODE_CFB: int +CRYPT_MODE_CTS: int +CRYPT_ENCRYPT: int +CRYPT_DECRYPT: int +CRYPT_EXPORT: int +CRYPT_READ: int +CRYPT_WRITE: int +CRYPT_MAC: int +CRYPT_EXPORT_KEY: int +CRYPT_IMPORT_KEY: int +CRYPT_ARCHIVE: int +HP_ALGID: int +HP_HASHVAL: int +HP_HASHSIZE: int +HP_HMAC_INFO: int +HP_TLS1PRF_LABEL: int +HP_TLS1PRF_SEED: int +CRYPT_FAILED: int +CRYPT_SUCCEED: int + +def RCRYPT_SUCCEEDED(rt: int) -> bool: ... +def RCRYPT_FAILED(rt: int) -> bool: ... + +PP_ENUMALGS: int +PP_ENUMCONTAINERS: int +PP_IMPTYPE: int +PP_NAME: int +PP_VERSION: int +PP_CONTAINER: int +PP_CHANGE_PASSWORD: int +PP_KEYSET_SEC_DESCR: int +PP_CERTCHAIN: int +PP_KEY_TYPE_SUBTYPE: int +PP_PROVTYPE: int +PP_KEYSTORAGE: int +PP_APPLI_CERT: int +PP_SYM_KEYSIZE: int +PP_SESSION_KEYSIZE: int +PP_UI_PROMPT: int +PP_ENUMALGS_EX: int +PP_ENUMMANDROOTS: int +PP_ENUMELECTROOTS: int +PP_KEYSET_TYPE: int +PP_ADMIN_PIN: int +PP_KEYEXCHANGE_PIN: int +PP_SIGNATURE_PIN: int +PP_SIG_KEYSIZE_INC: int +PP_KEYX_KEYSIZE_INC: int +PP_UNIQUE_CONTAINER: int +PP_SGC_INFO: int +PP_USE_HARDWARE_RNG: int +PP_KEYSPEC: int +PP_ENUMEX_SIGNING_PROT: int +PP_CRYPT_COUNT_KEY_USE: int +CRYPT_FIRST: int +CRYPT_NEXT: int +CRYPT_SGC_ENUM: int +CRYPT_IMPL_HARDWARE: int +CRYPT_IMPL_SOFTWARE: int +CRYPT_IMPL_MIXED: int +CRYPT_IMPL_UNKNOWN: int +CRYPT_IMPL_REMOVABLE: int +CRYPT_SEC_DESCR: int +CRYPT_PSTORE: int +CRYPT_UI_PROMPT: int +CRYPT_FLAG_PCT1: int +CRYPT_FLAG_SSL2: int +CRYPT_FLAG_SSL3: int +CRYPT_FLAG_TLS1: int +CRYPT_FLAG_IPSEC: int +CRYPT_FLAG_SIGNING: int +CRYPT_SGC: int +CRYPT_FASTSGC: int +PP_CLIENT_HWND: int +PP_CONTEXT_INFO: int +PP_KEYEXCHANGE_KEYSIZE: int +PP_SIGNATURE_KEYSIZE: int +PP_KEYEXCHANGE_ALG: int +PP_SIGNATURE_ALG: int +PP_DELETEKEY: int +PROV_RSA_FULL: int +PROV_RSA_SIG: int +PROV_DSS: int +PROV_FORTEZZA: int +PROV_MS_EXCHANGE: int +PROV_SSL: int +PROV_RSA_SCHANNEL: int +PROV_DSS_DH: int +PROV_EC_ECDSA_SIG: int +PROV_EC_ECNRA_SIG: int +PROV_EC_ECDSA_FULL: int +PROV_EC_ECNRA_FULL: int +PROV_DH_SCHANNEL: int +PROV_SPYRUS_LYNKS: int +PROV_RNG: int +PROV_INTEL_SEC: int +PROV_REPLACE_OWF: int +PROV_RSA_AES: int +MS_DEF_PROV_A: str +MS_DEF_PROV: str +MS_ENHANCED_PROV_A: str +MS_ENHANCED_PROV: str +MS_STRONG_PROV_A: str +MS_STRONG_PROV: str +MS_DEF_RSA_SIG_PROV_A: str +MS_DEF_RSA_SIG_PROV: str +MS_DEF_RSA_SCHANNEL_PROV_A: str +MS_DEF_RSA_SCHANNEL_PROV: str +MS_DEF_DSS_PROV_A: str +MS_DEF_DSS_PROV: str +MS_DEF_DSS_DH_PROV_A: str +MS_DEF_DSS_DH_PROV: str +MS_ENH_DSS_DH_PROV_A: str +MS_ENH_DSS_DH_PROV: str +MS_DEF_DH_SCHANNEL_PROV_A: str +MS_DEF_DH_SCHANNEL_PROV: str +MS_SCARD_PROV_A: str +MS_SCARD_PROV: str +MS_ENH_RSA_AES_PROV_A: str +MS_ENH_RSA_AES_PROV: str +MAXUIDLEN: int +EXPO_OFFLOAD_REG_VALUE: str +EXPO_OFFLOAD_FUNC_NAME: str +szKEY_CRYPTOAPI_PRIVATE_KEY_OPTIONS: str +szFORCE_KEY_PROTECTION: str +dwFORCE_KEY_PROTECTION_DISABLED: int +dwFORCE_KEY_PROTECTION_USER_SELECT: int +dwFORCE_KEY_PROTECTION_HIGH: int +szKEY_CACHE_ENABLED: str +szKEY_CACHE_SECONDS: str +CUR_BLOB_VERSION: int +SCHANNEL_MAC_KEY: int +SCHANNEL_ENC_KEY: int +INTERNATIONAL_USAGE: int +szOID_RSA: str +szOID_PKCS: str +szOID_RSA_HASH: str +szOID_RSA_ENCRYPT: str +szOID_PKCS_1: str +szOID_PKCS_2: str +szOID_PKCS_3: str +szOID_PKCS_4: str +szOID_PKCS_5: str +szOID_PKCS_6: str +szOID_PKCS_7: str +szOID_PKCS_8: str +szOID_PKCS_9: str +szOID_PKCS_10: str +szOID_PKCS_12: str +szOID_RSA_RSA: str +szOID_RSA_MD2RSA: str +szOID_RSA_MD4RSA: str +szOID_RSA_MD5RSA: str +szOID_RSA_SHA1RSA: str +szOID_RSA_SETOAEP_RSA: str +szOID_RSA_DH: str +szOID_RSA_data: str +szOID_RSA_signedData: str +szOID_RSA_envelopedData: str +szOID_RSA_signEnvData: str +szOID_RSA_digestedData: str +szOID_RSA_hashedData: str +szOID_RSA_encryptedData: str +szOID_RSA_emailAddr: str +szOID_RSA_unstructName: str +szOID_RSA_contentType: str +szOID_RSA_messageDigest: str +szOID_RSA_signingTime: str +szOID_RSA_counterSign: str +szOID_RSA_challengePwd: str +szOID_RSA_unstructAddr: str +szOID_RSA_extCertAttrs: str +szOID_RSA_certExtensions: str +szOID_RSA_SMIMECapabilities: str +szOID_RSA_preferSignedData: str +szOID_RSA_SMIMEalg: str +szOID_RSA_SMIMEalgESDH: str +szOID_RSA_SMIMEalgCMS3DESwrap: str +szOID_RSA_SMIMEalgCMSRC2wrap: str +szOID_RSA_MD2: str +szOID_RSA_MD4: str +szOID_RSA_MD5: str +szOID_RSA_RC2CBC: str +szOID_RSA_RC4: str +szOID_RSA_DES_EDE3_CBC: str +szOID_RSA_RC5_CBCPad: str +szOID_ANSI_X942: str +szOID_ANSI_X942_DH: str +szOID_X957: str +szOID_X957_DSA: str +szOID_X957_SHA1DSA: str +szOID_DS: str +szOID_DSALG: str +szOID_DSALG_CRPT: str +szOID_DSALG_HASH: str +szOID_DSALG_SIGN: str +szOID_DSALG_RSA: str +szOID_OIW: str +szOID_OIWSEC: str +szOID_OIWSEC_md4RSA: str +szOID_OIWSEC_md5RSA: str +szOID_OIWSEC_md4RSA2: str +szOID_OIWSEC_desECB: str +szOID_OIWSEC_desCBC: str +szOID_OIWSEC_desOFB: str +szOID_OIWSEC_desCFB: str +szOID_OIWSEC_desMAC: str +szOID_OIWSEC_rsaSign: str +szOID_OIWSEC_dsa: str +szOID_OIWSEC_shaDSA: str +szOID_OIWSEC_mdc2RSA: str +szOID_OIWSEC_shaRSA: str +szOID_OIWSEC_dhCommMod: str +szOID_OIWSEC_desEDE: str +szOID_OIWSEC_sha: str +szOID_OIWSEC_mdc2: str +szOID_OIWSEC_dsaComm: str +szOID_OIWSEC_dsaCommSHA: str +szOID_OIWSEC_rsaXchg: str +szOID_OIWSEC_keyHashSeal: str +szOID_OIWSEC_md2RSASign: str +szOID_OIWSEC_md5RSASign: str +szOID_OIWSEC_sha1: str +szOID_OIWSEC_dsaSHA1: str +szOID_OIWSEC_dsaCommSHA1: str +szOID_OIWSEC_sha1RSASign: str +szOID_OIWDIR: str +szOID_OIWDIR_CRPT: str +szOID_OIWDIR_HASH: str +szOID_OIWDIR_SIGN: str +szOID_OIWDIR_md2: str +szOID_OIWDIR_md2RSA: str +szOID_INFOSEC: str +szOID_INFOSEC_sdnsSignature: str +szOID_INFOSEC_mosaicSignature: str +szOID_INFOSEC_sdnsConfidentiality: str +szOID_INFOSEC_mosaicConfidentiality: str +szOID_INFOSEC_sdnsIntegrity: str +szOID_INFOSEC_mosaicIntegrity: str +szOID_INFOSEC_sdnsTokenProtection: str +szOID_INFOSEC_mosaicTokenProtection: str +szOID_INFOSEC_sdnsKeyManagement: str +szOID_INFOSEC_mosaicKeyManagement: str +szOID_INFOSEC_sdnsKMandSig: str +szOID_INFOSEC_mosaicKMandSig: str +szOID_INFOSEC_SuiteASignature: str +szOID_INFOSEC_SuiteAConfidentiality: str +szOID_INFOSEC_SuiteAIntegrity: str +szOID_INFOSEC_SuiteATokenProtection: str +szOID_INFOSEC_SuiteAKeyManagement: str +szOID_INFOSEC_SuiteAKMandSig: str +szOID_INFOSEC_mosaicUpdatedSig: str +szOID_INFOSEC_mosaicKMandUpdSig: str +szOID_INFOSEC_mosaicUpdatedInteg: str +szOID_COMMON_NAME: str +szOID_SUR_NAME: str +szOID_DEVICE_SERIAL_NUMBER: str +szOID_COUNTRY_NAME: str +szOID_LOCALITY_NAME: str +szOID_STATE_OR_PROVINCE_NAME: str +szOID_STREET_ADDRESS: str +szOID_ORGANIZATION_NAME: str +szOID_ORGANIZATIONAL_UNIT_NAME: str +szOID_TITLE: str +szOID_DESCRIPTION: str +szOID_SEARCH_GUIDE: str +szOID_BUSINESS_CATEGORY: str +szOID_POSTAL_ADDRESS: str +szOID_POSTAL_CODE: str +szOID_POST_OFFICE_BOX: str +szOID_PHYSICAL_DELIVERY_OFFICE_NAME: str +szOID_TELEPHONE_NUMBER: str +szOID_TELEX_NUMBER: str +szOID_TELETEXT_TERMINAL_IDENTIFIER: str +szOID_FACSIMILE_TELEPHONE_NUMBER: str +szOID_X21_ADDRESS: str +szOID_INTERNATIONAL_ISDN_NUMBER: str +szOID_REGISTERED_ADDRESS: str +szOID_DESTINATION_INDICATOR: str +szOID_PREFERRED_DELIVERY_METHOD: str +szOID_PRESENTATION_ADDRESS: str +szOID_SUPPORTED_APPLICATION_CONTEXT: str +szOID_MEMBER: str +szOID_OWNER: str +szOID_ROLE_OCCUPANT: str +szOID_SEE_ALSO: str +szOID_USER_PASSWORD: str +szOID_USER_CERTIFICATE: str +szOID_CA_CERTIFICATE: str +szOID_CROSS_CERTIFICATE_PAIR: str +szOID_GIVEN_NAME: str +szOID_INITIALS: str +szOID_DN_QUALIFIER: str +szOID_DOMAIN_COMPONENT: str +szOID_PKCS_12_FRIENDLY_NAME_ATTR: str +szOID_PKCS_12_LOCAL_KEY_ID: str +szOID_PKCS_12_KEY_PROVIDER_NAME_ATTR: str +szOID_LOCAL_MACHINE_KEYSET: str +szOID_KEYID_RDN: str +CERT_RDN_ANY_TYPE: int +CERT_RDN_ENCODED_BLOB: int +CERT_RDN_OCTET_STRING: int +CERT_RDN_NUMERIC_STRING: int +CERT_RDN_PRINTABLE_STRING: int +CERT_RDN_TELETEX_STRING: int +CERT_RDN_T61_STRING: int +CERT_RDN_VIDEOTEX_STRING: int +CERT_RDN_IA5_STRING: int +CERT_RDN_GRAPHIC_STRING: int +CERT_RDN_VISIBLE_STRING: int +CERT_RDN_ISO646_STRING: int +CERT_RDN_GENERAL_STRING: int +CERT_RDN_UNIVERSAL_STRING: int +CERT_RDN_INT4_STRING: int +CERT_RDN_BMP_STRING: int +CERT_RDN_UNICODE_STRING: int +CERT_RDN_UTF8_STRING: int +CERT_RDN_TYPE_MASK: int +CERT_RDN_FLAGS_MASK: int +CERT_RDN_ENABLE_T61_UNICODE_FLAG: int +CERT_RDN_ENABLE_UTF8_UNICODE_FLAG: int +CERT_RDN_DISABLE_CHECK_TYPE_FLAG: int +CERT_RDN_DISABLE_IE4_UTF8_FLAG: int +CERT_RSA_PUBLIC_KEY_OBJID: str +CERT_DEFAULT_OID_PUBLIC_KEY_SIGN: str +CERT_DEFAULT_OID_PUBLIC_KEY_XCHG: str +CERT_V1: int +CERT_V2: int +CERT_V3: int +CERT_INFO_VERSION_FLAG: int +CERT_INFO_SERIAL_NUMBER_FLAG: int +CERT_INFO_SIGNATURE_ALGORITHM_FLAG: int +CERT_INFO_ISSUER_FLAG: int +CERT_INFO_NOT_BEFORE_FLAG: int +CERT_INFO_NOT_AFTER_FLAG: int +CERT_INFO_SUBJECT_FLAG: int +CERT_INFO_SUBJECT_PUBLIC_KEY_INFO_FLAG: int +CERT_INFO_ISSUER_UNIQUE_ID_FLAG: int +CERT_INFO_SUBJECT_UNIQUE_ID_FLAG: int +CERT_INFO_EXTENSION_FLAG: int +CRL_V1: int +CRL_V2: int +CERT_REQUEST_V1: int +CERT_KEYGEN_REQUEST_V1: int +CTL_V1: int +CERT_ENCODING_TYPE_MASK: int +CMSG_ENCODING_TYPE_MASK: int + +def GET_CERT_ENCODING_TYPE(X: int) -> int: ... +def GET_CMSG_ENCODING_TYPE(X: int) -> int: ... + +CRYPT_ASN_ENCODING: int +CRYPT_NDR_ENCODING: int +X509_ASN_ENCODING: int +X509_NDR_ENCODING: int +PKCS_7_ASN_ENCODING: int +PKCS_7_NDR_ENCODING: int +CRYPT_FORMAT_STR_MULTI_LINE: int +CRYPT_FORMAT_STR_NO_HEX: int +CRYPT_FORMAT_SIMPLE: int +CRYPT_FORMAT_X509: int +CRYPT_FORMAT_OID: int +CRYPT_FORMAT_RDN_SEMICOLON: int +CRYPT_FORMAT_RDN_CRLF: int +CRYPT_FORMAT_RDN_UNQUOTE: int +CRYPT_FORMAT_RDN_REVERSE: int +CRYPT_FORMAT_COMMA: int +CRYPT_FORMAT_SEMICOLON: int +CRYPT_FORMAT_CRLF: int +CRYPT_ENCODE_NO_SIGNATURE_BYTE_REVERSAL_FLAG: int +CRYPT_ENCODE_ALLOC_FLAG: int +CRYPT_UNICODE_NAME_ENCODE_ENABLE_T61_UNICODE_FLAG: int +CRYPT_UNICODE_NAME_ENCODE_ENABLE_UTF8_UNICODE_FLAG: int +CRYPT_UNICODE_NAME_ENCODE_DISABLE_CHECK_TYPE_FLAG: int +CRYPT_SORTED_CTL_ENCODE_HASHED_SUBJECT_IDENTIFIER_FLAG: int +CRYPT_DECODE_NOCOPY_FLAG: int +CRYPT_DECODE_TO_BE_SIGNED_FLAG: int +CRYPT_DECODE_SHARE_OID_STRING_FLAG: int +CRYPT_DECODE_NO_SIGNATURE_BYTE_REVERSAL_FLAG: int +CRYPT_DECODE_ALLOC_FLAG: int +CRYPT_UNICODE_NAME_DECODE_DISABLE_IE4_UTF8_FLAG: int +CRYPT_ENCODE_DECODE_NONE: int +X509_CERT: int +X509_CERT_TO_BE_SIGNED: int +X509_CERT_CRL_TO_BE_SIGNED: int +X509_CERT_REQUEST_TO_BE_SIGNED: int +X509_EXTENSIONS: int +X509_NAME_VALUE: int +X509_NAME: int +X509_PUBLIC_KEY_INFO: int +X509_AUTHORITY_KEY_ID: int +X509_KEY_ATTRIBUTES: int +X509_KEY_USAGE_RESTRICTION: int +X509_ALTERNATE_NAME: int +X509_BASIC_CONSTRAINTS: int +X509_KEY_USAGE: int +X509_BASIC_CONSTRAINTS2: int +X509_CERT_POLICIES: int +PKCS_UTC_TIME: int +PKCS_TIME_REQUEST: int +RSA_CSP_PUBLICKEYBLOB: int +X509_UNICODE_NAME: int +X509_KEYGEN_REQUEST_TO_BE_SIGNED: int +PKCS_ATTRIBUTE: int +PKCS_CONTENT_INFO_SEQUENCE_OF_ANY: int +X509_UNICODE_NAME_VALUE: int +X509_ANY_STRING: int +X509_UNICODE_ANY_STRING: int +X509_OCTET_STRING: int +X509_BITS: int +X509_INTEGER: int +X509_MULTI_BYTE_INTEGER: int +X509_ENUMERATED: int +X509_CHOICE_OF_TIME: int +X509_AUTHORITY_KEY_ID2: int +X509_AUTHORITY_INFO_ACCESS: int +X509_SUBJECT_INFO_ACCESS: int +X509_CRL_REASON_CODE: int +PKCS_CONTENT_INFO: int +X509_SEQUENCE_OF_ANY: int +X509_CRL_DIST_POINTS: int +X509_ENHANCED_KEY_USAGE: int +PKCS_CTL: int +X509_MULTI_BYTE_UINT: int +X509_DSS_PUBLICKEY: int +X509_DSS_PARAMETERS: int +X509_DSS_SIGNATURE: int +PKCS_RC2_CBC_PARAMETERS: int +PKCS_SMIME_CAPABILITIES: int +X509_QC_STATEMENTS_EXT: int +PKCS_RSA_PRIVATE_KEY: int +PKCS_PRIVATE_KEY_INFO: int +PKCS_ENCRYPTED_PRIVATE_KEY_INFO: int +X509_PKIX_POLICY_QUALIFIER_USERNOTICE: int +X509_DH_PUBLICKEY: int +X509_DH_PARAMETERS: int +PKCS_ATTRIBUTES: int +PKCS_SORTED_CTL: int +X509_ECC_SIGNATURE: int +X942_DH_PARAMETERS: int +X509_BITS_WITHOUT_TRAILING_ZEROES: int +X942_OTHER_INFO: int +X509_CERT_PAIR: int +X509_ISSUING_DIST_POINT: int +X509_NAME_CONSTRAINTS: int +X509_POLICY_MAPPINGS: int +X509_POLICY_CONSTRAINTS: int +X509_CROSS_CERT_DIST_POINTS: int +CMC_DATA: int +CMC_RESPONSE: int +CMC_STATUS: int +CMC_ADD_EXTENSIONS: int +CMC_ADD_ATTRIBUTES: int +X509_CERTIFICATE_TEMPLATE: int +OCSP_SIGNED_REQUEST: int +OCSP_REQUEST: int +OCSP_RESPONSE: int +OCSP_BASIC_SIGNED_RESPONSE: int +OCSP_BASIC_RESPONSE: int +X509_LOGOTYPE_EXT: int +X509_BIOMETRIC_EXT: int +CNG_RSA_PUBLIC_KEY_BLOB: int +X509_OBJECT_IDENTIFIER: int +X509_ALGORITHM_IDENTIFIER: int +PKCS_RSA_SSA_PSS_PARAMETERS: int +PKCS_RSAES_OAEP_PARAMETERS: int +ECC_CMS_SHARED_INFO: int +TIMESTAMP_REQUEST: int +TIMESTAMP_RESPONSE: int +TIMESTAMP_INFO: int +X509_CERT_BUNDLE: int +PKCS7_SIGNER_INFO: int +CMS_SIGNER_INFO: int +szOID_AUTHORITY_KEY_IDENTIFIER: str +szOID_KEY_ATTRIBUTES: str +szOID_CERT_POLICIES_95: str +szOID_KEY_USAGE_RESTRICTION: str +szOID_SUBJECT_ALT_NAME: str +szOID_ISSUER_ALT_NAME: str +szOID_BASIC_CONSTRAINTS: str +szOID_KEY_USAGE: str +szOID_PRIVATEKEY_USAGE_PERIOD: str +szOID_BASIC_CONSTRAINTS2: str +szOID_CERT_POLICIES: str +szOID_ANY_CERT_POLICY: str +szOID_AUTHORITY_KEY_IDENTIFIER2: str +szOID_SUBJECT_KEY_IDENTIFIER: str +szOID_SUBJECT_ALT_NAME2: str +szOID_ISSUER_ALT_NAME2: str +szOID_CRL_REASON_CODE: str +szOID_REASON_CODE_HOLD: str +szOID_CRL_DIST_POINTS: str +szOID_ENHANCED_KEY_USAGE: str +szOID_CRL_NUMBER: str +szOID_DELTA_CRL_INDICATOR: str +szOID_ISSUING_DIST_POINT: str +szOID_FRESHEST_CRL: str +szOID_NAME_CONSTRAINTS: str +szOID_POLICY_MAPPINGS: str +szOID_LEGACY_POLICY_MAPPINGS: str +szOID_POLICY_CONSTRAINTS: str +szOID_RENEWAL_CERTIFICATE: str +szOID_ENROLLMENT_NAME_VALUE_PAIR: str +szOID_ENROLLMENT_CSP_PROVIDER: str +szOID_OS_VERSION: str +szOID_ENROLLMENT_AGENT: str +szOID_PKIX: str +szOID_PKIX_PE: str +szOID_AUTHORITY_INFO_ACCESS: str +szOID_CERT_EXTENSIONS: str +szOID_NEXT_UPDATE_LOCATION: str +szOID_REMOVE_CERTIFICATE: str +szOID_CROSS_CERT_DIST_POINTS: str +szOID_CTL: str +szOID_SORTED_CTL: str +szOID_SERIALIZED: str +szOID_NT_PRINCIPAL_NAME: str +szOID_PRODUCT_UPDATE: str +szOID_ANY_APPLICATION_POLICY: str +szOID_AUTO_ENROLL_CTL_USAGE: str +szOID_ENROLL_CERTTYPE_EXTENSION: str +szOID_CERT_MANIFOLD: str +szOID_CERTSRV_CA_VERSION: str +szOID_CERTSRV_PREVIOUS_CERT_HASH: str +szOID_CRL_VIRTUAL_BASE: str +szOID_CRL_NEXT_PUBLISH: str +szOID_KP_CA_EXCHANGE: str +szOID_KP_KEY_RECOVERY_AGENT: str +szOID_CERTIFICATE_TEMPLATE: str +szOID_ENTERPRISE_OID_ROOT: str +szOID_RDN_DUMMY_SIGNER: str +szOID_APPLICATION_CERT_POLICIES: str +szOID_APPLICATION_POLICY_MAPPINGS: str +szOID_APPLICATION_POLICY_CONSTRAINTS: str +szOID_ARCHIVED_KEY_ATTR: str +szOID_CRL_SELF_CDP: str +szOID_REQUIRE_CERT_CHAIN_POLICY: str +szOID_ARCHIVED_KEY_CERT_HASH: str +szOID_ISSUED_CERT_HASH: str +szOID_DS_EMAIL_REPLICATION: str +szOID_REQUEST_CLIENT_INFO: str +szOID_ENCRYPTED_KEY_HASH: str +szOID_CERTSRV_CROSSCA_VERSION: str +szOID_NTDS_REPLICATION: str +szOID_SUBJECT_DIR_ATTRS: str +szOID_PKIX_KP: str +szOID_PKIX_KP_SERVER_AUTH: str +szOID_PKIX_KP_CLIENT_AUTH: str +szOID_PKIX_KP_CODE_SIGNING: str +szOID_PKIX_KP_EMAIL_PROTECTION: str +szOID_PKIX_KP_IPSEC_END_SYSTEM: str +szOID_PKIX_KP_IPSEC_TUNNEL: str +szOID_PKIX_KP_IPSEC_USER: str +szOID_PKIX_KP_TIMESTAMP_SIGNING: str +szOID_IPSEC_KP_IKE_INTERMEDIATE: str +szOID_KP_CTL_USAGE_SIGNING: str +szOID_KP_TIME_STAMP_SIGNING: str +szOID_SERVER_GATED_CRYPTO: str +szOID_SGC_NETSCAPE: str +szOID_KP_EFS: str +szOID_EFS_RECOVERY: str +szOID_WHQL_CRYPTO: str +szOID_NT5_CRYPTO: str +szOID_OEM_WHQL_CRYPTO: str +szOID_EMBEDDED_NT_CRYPTO: str +szOID_KP_QUALIFIED_SUBORDINATION: str +szOID_KP_KEY_RECOVERY: str +szOID_KP_DOCUMENT_SIGNING: str +szOID_KP_LIFETIME_SIGNING: str +szOID_KP_MOBILE_DEVICE_SOFTWARE: str +szOID_DRM: str +szOID_DRM_INDIVIDUALIZATION: str +szOID_LICENSES: str +szOID_LICENSE_SERVER: str +szOID_KP_SMARTCARD_LOGON: str +szOID_YESNO_TRUST_ATTR: str +szOID_PKIX_POLICY_QUALIFIER_CPS: str +szOID_PKIX_POLICY_QUALIFIER_USERNOTICE: str +szOID_CERT_POLICIES_95_QUALIFIER1: str +CERT_UNICODE_RDN_ERR_INDEX_MASK: int +CERT_UNICODE_RDN_ERR_INDEX_SHIFT: int +CERT_UNICODE_ATTR_ERR_INDEX_MASK: int +CERT_UNICODE_ATTR_ERR_INDEX_SHIFT: int +CERT_UNICODE_VALUE_ERR_INDEX_MASK: int +CERT_UNICODE_VALUE_ERR_INDEX_SHIFT: int +CERT_DIGITAL_SIGNATURE_KEY_USAGE: int +CERT_NON_REPUDIATION_KEY_USAGE: int +CERT_KEY_ENCIPHERMENT_KEY_USAGE: int +CERT_DATA_ENCIPHERMENT_KEY_USAGE: int +CERT_KEY_AGREEMENT_KEY_USAGE: int +CERT_KEY_CERT_SIGN_KEY_USAGE: int +CERT_OFFLINE_CRL_SIGN_KEY_USAGE: int +CERT_CRL_SIGN_KEY_USAGE: int +CERT_ENCIPHER_ONLY_KEY_USAGE: int +CERT_DECIPHER_ONLY_KEY_USAGE: int +CERT_ALT_NAME_OTHER_NAME: int +CERT_ALT_NAME_RFC822_NAME: int +CERT_ALT_NAME_DNS_NAME: int +CERT_ALT_NAME_X400_ADDRESS: int +CERT_ALT_NAME_DIRECTORY_NAME: int +CERT_ALT_NAME_EDI_PARTY_NAME: int +CERT_ALT_NAME_URL: int +CERT_ALT_NAME_IP_ADDRESS: int +CERT_ALT_NAME_REGISTERED_ID: int +CERT_ALT_NAME_ENTRY_ERR_INDEX_MASK: int +CERT_ALT_NAME_ENTRY_ERR_INDEX_SHIFT: int +CERT_ALT_NAME_VALUE_ERR_INDEX_MASK: int +CERT_ALT_NAME_VALUE_ERR_INDEX_SHIFT: int +CERT_CA_SUBJECT_FLAG: int +CERT_END_ENTITY_SUBJECT_FLAG: int +szOID_PKIX_ACC_DESCR: str +szOID_PKIX_OCSP: str +szOID_PKIX_CA_ISSUERS: str +CRL_REASON_UNSPECIFIED: int +CRL_REASON_KEY_COMPROMISE: int +CRL_REASON_CA_COMPROMISE: int +CRL_REASON_AFFILIATION_CHANGED: int +CRL_REASON_SUPERSEDED: int +CRL_REASON_CESSATION_OF_OPERATION: int +CRL_REASON_CERTIFICATE_HOLD: int +CRL_REASON_REMOVE_FROM_CRL: int +CRL_DIST_POINT_NO_NAME: int +CRL_DIST_POINT_FULL_NAME: int +CRL_DIST_POINT_ISSUER_RDN_NAME: int +CRL_REASON_UNUSED_FLAG: int +CRL_REASON_KEY_COMPROMISE_FLAG: int +CRL_REASON_CA_COMPROMISE_FLAG: int +CRL_REASON_AFFILIATION_CHANGED_FLAG: int +CRL_REASON_SUPERSEDED_FLAG: int +CRL_REASON_CESSATION_OF_OPERATION_FLAG: int +CRL_REASON_CERTIFICATE_HOLD_FLAG: int +CRL_DIST_POINT_ERR_INDEX_MASK: int +CRL_DIST_POINT_ERR_INDEX_SHIFT: int +CRL_DIST_POINT_ERR_CRL_ISSUER_BIT: int +CROSS_CERT_DIST_POINT_ERR_INDEX_MASK: int +CROSS_CERT_DIST_POINT_ERR_INDEX_SHIFT: int +CERT_EXCLUDED_SUBTREE_BIT: int +SORTED_CTL_EXT_FLAGS_OFFSET: int +SORTED_CTL_EXT_COUNT_OFFSET: int +SORTED_CTL_EXT_MAX_COLLISION_OFFSET: int +SORTED_CTL_EXT_HASH_BUCKET_OFFSET: int +SORTED_CTL_EXT_HASHED_SUBJECT_IDENTIFIER_FLAG: int +CERT_DSS_R_LEN: int +CERT_DSS_S_LEN: int +CERT_DSS_SIGNATURE_LEN: int +CERT_MAX_ASN_ENCODED_DSS_SIGNATURE_LEN: int +CRYPT_X942_COUNTER_BYTE_LENGTH: int +CRYPT_X942_KEY_LENGTH_BYTE_LENGTH: int +CRYPT_X942_PUB_INFO_BYTE_LENGTH: float +CRYPT_RC2_40BIT_VERSION: int +CRYPT_RC2_56BIT_VERSION: int +CRYPT_RC2_64BIT_VERSION: int +CRYPT_RC2_128BIT_VERSION: int +szOID_VERISIGN_PRIVATE_6_9: str +szOID_VERISIGN_ONSITE_JURISDICTION_HASH: str +szOID_VERISIGN_BITSTRING_6_13: str +szOID_VERISIGN_ISS_STRONG_CRYPTO: str +szOID_NETSCAPE: str +szOID_NETSCAPE_CERT_EXTENSION: str +szOID_NETSCAPE_CERT_TYPE: str +szOID_NETSCAPE_BASE_URL: str +szOID_NETSCAPE_REVOCATION_URL: str +szOID_NETSCAPE_CA_REVOCATION_URL: str +szOID_NETSCAPE_CERT_RENEWAL_URL: str +szOID_NETSCAPE_CA_POLICY_URL: str +szOID_NETSCAPE_SSL_SERVER_NAME: str +szOID_NETSCAPE_COMMENT: str +szOID_NETSCAPE_DATA_TYPE: str +szOID_NETSCAPE_CERT_SEQUENCE: str +NETSCAPE_SSL_CLIENT_AUTH_CERT_TYPE: int +NETSCAPE_SSL_SERVER_AUTH_CERT_TYPE: int +NETSCAPE_SMIME_CERT_TYPE: int +NETSCAPE_SIGN_CERT_TYPE: int +NETSCAPE_SSL_CA_CERT_TYPE: int +NETSCAPE_SMIME_CA_CERT_TYPE: int +NETSCAPE_SIGN_CA_CERT_TYPE: int +szOID_CT_PKI_DATA: str +szOID_CT_PKI_RESPONSE: str +szOID_PKIX_NO_SIGNATURE: str +szOID_CMC: str +szOID_CMC_STATUS_INFO: str +szOID_CMC_IDENTIFICATION: str +szOID_CMC_IDENTITY_PROOF: str +szOID_CMC_DATA_RETURN: str +szOID_CMC_TRANSACTION_ID: str +szOID_CMC_SENDER_NONCE: str +szOID_CMC_RECIPIENT_NONCE: str +szOID_CMC_ADD_EXTENSIONS: str +szOID_CMC_ENCRYPTED_POP: str +szOID_CMC_DECRYPTED_POP: str +szOID_CMC_LRA_POP_WITNESS: str +szOID_CMC_GET_CERT: str +szOID_CMC_GET_CRL: str +szOID_CMC_REVOKE_REQUEST: str +szOID_CMC_REG_INFO: str +szOID_CMC_RESPONSE_INFO: str +szOID_CMC_QUERY_PENDING: str +szOID_CMC_ID_POP_LINK_RANDOM: str +szOID_CMC_ID_POP_LINK_WITNESS: str +szOID_CMC_ID_CONFIRM_CERT_ACCEPTANCE: str +szOID_CMC_ADD_ATTRIBUTES: str +CMC_TAGGED_CERT_REQUEST_CHOICE: int +CMC_OTHER_INFO_NO_CHOICE: int +CMC_OTHER_INFO_FAIL_CHOICE: int +CMC_OTHER_INFO_PEND_CHOICE: int +CMC_STATUS_SUCCESS: int +CMC_STATUS_FAILED: int +CMC_STATUS_PENDING: int +CMC_STATUS_NO_SUPPORT: int +CMC_STATUS_CONFIRM_REQUIRED: int +CMC_FAIL_BAD_ALG: int +CMC_FAIL_BAD_MESSAGE_CHECK: int +CMC_FAIL_BAD_REQUEST: int +CMC_FAIL_BAD_TIME: int +CMC_FAIL_BAD_CERT_ID: int +CMC_FAIL_UNSUPORTED_EXT: int +CMC_FAIL_MUST_ARCHIVE_KEYS: int +CMC_FAIL_BAD_IDENTITY: int +CMC_FAIL_POP_REQUIRED: int +CMC_FAIL_POP_FAILED: int +CMC_FAIL_NO_KEY_REUSE: int +CMC_FAIL_INTERNAL_CA_ERROR: int +CMC_FAIL_TRY_LATER: int +CRYPT_OID_ENCODE_OBJECT_FUNC: str +CRYPT_OID_DECODE_OBJECT_FUNC: str +CRYPT_OID_ENCODE_OBJECT_EX_FUNC: str +CRYPT_OID_DECODE_OBJECT_EX_FUNC: str +CRYPT_OID_CREATE_COM_OBJECT_FUNC: str +CRYPT_OID_VERIFY_REVOCATION_FUNC: str +CRYPT_OID_VERIFY_CTL_USAGE_FUNC: str +CRYPT_OID_FORMAT_OBJECT_FUNC: str +CRYPT_OID_FIND_OID_INFO_FUNC: str +CRYPT_OID_FIND_LOCALIZED_NAME_FUNC: str +CRYPT_OID_REGPATH: str +CRYPT_OID_REG_ENCODING_TYPE_PREFIX: str +CRYPT_OID_REG_DLL_VALUE_NAME: str +CRYPT_OID_REG_FUNC_NAME_VALUE_NAME: str +CRYPT_OID_REG_FUNC_NAME_VALUE_NAME_A: str +CRYPT_OID_REG_FLAGS_VALUE_NAME: str +CRYPT_DEFAULT_OID: str +CRYPT_INSTALL_OID_FUNC_BEFORE_FLAG: int +CRYPT_GET_INSTALLED_OID_FUNC_FLAG: int +CRYPT_REGISTER_FIRST_INDEX: int +CRYPT_REGISTER_LAST_INDEX: int +CRYPT_MATCH_ANY_ENCODING_TYPE: int +CRYPT_HASH_ALG_OID_GROUP_ID: int +CRYPT_ENCRYPT_ALG_OID_GROUP_ID: int +CRYPT_PUBKEY_ALG_OID_GROUP_ID: int +CRYPT_SIGN_ALG_OID_GROUP_ID: int +CRYPT_RDN_ATTR_OID_GROUP_ID: int +CRYPT_EXT_OR_ATTR_OID_GROUP_ID: int +CRYPT_ENHKEY_USAGE_OID_GROUP_ID: int +CRYPT_POLICY_OID_GROUP_ID: int +CRYPT_TEMPLATE_OID_GROUP_ID: int +CRYPT_LAST_OID_GROUP_ID: int +CRYPT_FIRST_ALG_OID_GROUP_ID: int +CRYPT_LAST_ALG_OID_GROUP_ID: int +CRYPT_OID_INHIBIT_SIGNATURE_FORMAT_FLAG: int +CRYPT_OID_USE_PUBKEY_PARA_FOR_PKCS7_FLAG: int +CRYPT_OID_NO_NULL_ALGORITHM_PARA_FLAG: int +CRYPT_OID_INFO_OID_KEY: int +CRYPT_OID_INFO_NAME_KEY: int +CRYPT_OID_INFO_ALGID_KEY: int +CRYPT_OID_INFO_SIGN_KEY: int +CRYPT_INSTALL_OID_INFO_BEFORE_FLAG: int +CRYPT_LOCALIZED_NAME_ENCODING_TYPE: int +CRYPT_LOCALIZED_NAME_OID: str +szOID_PKCS_7_DATA: str +szOID_PKCS_7_SIGNED: str +szOID_PKCS_7_ENVELOPED: str +szOID_PKCS_7_SIGNEDANDENVELOPED: str +szOID_PKCS_7_DIGESTED: str +szOID_PKCS_7_ENCRYPTED: str +szOID_PKCS_9_CONTENT_TYPE: str +szOID_PKCS_9_MESSAGE_DIGEST: str +CMSG_DATA: int +CMSG_SIGNED: int +CMSG_ENVELOPED: int +CMSG_SIGNED_AND_ENVELOPED: int +CMSG_HASHED: int +CMSG_ENCRYPTED: int +CMSG_ALL_FLAGS: int +CMSG_DATA_FLAG: int +CMSG_SIGNED_FLAG: int +CMSG_ENVELOPED_FLAG: int +CMSG_SIGNED_AND_ENVELOPED_FLAG: int +CMSG_HASHED_FLAG: int +CMSG_ENCRYPTED_FLAG: int +CERT_ID_ISSUER_SERIAL_NUMBER: int +CERT_ID_KEY_IDENTIFIER: int +CERT_ID_SHA1_HASH: int +CMSG_KEY_AGREE_EPHEMERAL_KEY_CHOICE: int +CMSG_KEY_AGREE_STATIC_KEY_CHOICE: int +CMSG_KEY_TRANS_RECIPIENT: int +CMSG_KEY_AGREE_RECIPIENT: int +CMSG_SP3_COMPATIBLE_ENCRYPT_FLAG: int +CMSG_RC4_NO_SALT_FLAG: int +CMSG_INDEFINITE_LENGTH: int +CMSG_BARE_CONTENT_FLAG: int +CMSG_LENGTH_ONLY_FLAG: int +CMSG_DETACHED_FLAG: int +CMSG_AUTHENTICATED_ATTRIBUTES_FLAG: int +CMSG_CONTENTS_OCTETS_FLAG: int +CMSG_MAX_LENGTH_FLAG: int +CMSG_CMS_ENCAPSULATED_CONTENT_FLAG: int +CMSG_CRYPT_RELEASE_CONTEXT_FLAG: int +CMSG_TYPE_PARAM: int +CMSG_CONTENT_PARAM: int +CMSG_BARE_CONTENT_PARAM: int +CMSG_INNER_CONTENT_TYPE_PARAM: int +CMSG_SIGNER_COUNT_PARAM: int +CMSG_SIGNER_INFO_PARAM: int +CMSG_SIGNER_CERT_INFO_PARAM: int +CMSG_SIGNER_HASH_ALGORITHM_PARAM: int +CMSG_SIGNER_AUTH_ATTR_PARAM: int +CMSG_SIGNER_UNAUTH_ATTR_PARAM: int +CMSG_CERT_COUNT_PARAM: int +CMSG_CERT_PARAM: int +CMSG_CRL_COUNT_PARAM: int +CMSG_CRL_PARAM: int +CMSG_ENVELOPE_ALGORITHM_PARAM: int +CMSG_RECIPIENT_COUNT_PARAM: int +CMSG_RECIPIENT_INDEX_PARAM: int +CMSG_RECIPIENT_INFO_PARAM: int +CMSG_HASH_ALGORITHM_PARAM: int +CMSG_HASH_DATA_PARAM: int +CMSG_COMPUTED_HASH_PARAM: int +CMSG_ENCRYPT_PARAM: int +CMSG_ENCRYPTED_DIGEST: int +CMSG_ENCODED_SIGNER: int +CMSG_ENCODED_MESSAGE: int +CMSG_VERSION_PARAM: int +CMSG_ATTR_CERT_COUNT_PARAM: int +CMSG_ATTR_CERT_PARAM: int +CMSG_CMS_RECIPIENT_COUNT_PARAM: int +CMSG_CMS_RECIPIENT_INDEX_PARAM: int +CMSG_CMS_RECIPIENT_ENCRYPTED_KEY_INDEX_PARAM: int +CMSG_CMS_RECIPIENT_INFO_PARAM: int +CMSG_UNPROTECTED_ATTR_PARAM: int +CMSG_SIGNER_CERT_ID_PARAM: int +CMSG_CMS_SIGNER_INFO_PARAM: int +CMSG_SIGNED_DATA_V1: int +CMSG_SIGNED_DATA_V3: int +CMSG_SIGNED_DATA_PKCS_1_5_VERSION: int +CMSG_SIGNED_DATA_CMS_VERSION: int +CMSG_SIGNER_INFO_V1: int +CMSG_SIGNER_INFO_V3: int +CMSG_SIGNER_INFO_PKCS_1_5_VERSION: int +CMSG_SIGNER_INFO_CMS_VERSION: int +CMSG_HASHED_DATA_V0: int +CMSG_HASHED_DATA_V2: int +CMSG_HASHED_DATA_PKCS_1_5_VERSION: int +CMSG_HASHED_DATA_CMS_VERSION: int +CMSG_ENVELOPED_DATA_V0: int +CMSG_ENVELOPED_DATA_V2: int +CMSG_ENVELOPED_DATA_PKCS_1_5_VERSION: int +CMSG_ENVELOPED_DATA_CMS_VERSION: int +CMSG_KEY_AGREE_ORIGINATOR_CERT: int +CMSG_KEY_AGREE_ORIGINATOR_PUBLIC_KEY: int +CMSG_ENVELOPED_RECIPIENT_V0: int +CMSG_ENVELOPED_RECIPIENT_V2: int +CMSG_ENVELOPED_RECIPIENT_V3: int +CMSG_ENVELOPED_RECIPIENT_V4: int +CMSG_KEY_TRANS_PKCS_1_5_VERSION: int +CMSG_KEY_TRANS_CMS_VERSION: int +CMSG_KEY_AGREE_VERSION: int +CMSG_CTRL_VERIFY_SIGNATURE: int +CMSG_CTRL_DECRYPT: int +CMSG_CTRL_VERIFY_HASH: int +CMSG_CTRL_ADD_SIGNER: int +CMSG_CTRL_DEL_SIGNER: int +CMSG_CTRL_ADD_SIGNER_UNAUTH_ATTR: int +CMSG_CTRL_DEL_SIGNER_UNAUTH_ATTR: int +CMSG_CTRL_ADD_CERT: int +CMSG_CTRL_DEL_CERT: int +CMSG_CTRL_ADD_CRL: int +CMSG_CTRL_DEL_CRL: int +CMSG_CTRL_ADD_ATTR_CERT: int +CMSG_CTRL_DEL_ATTR_CERT: int +CMSG_CTRL_KEY_TRANS_DECRYPT: int +CMSG_CTRL_KEY_AGREE_DECRYPT: int +CMSG_CTRL_VERIFY_SIGNATURE_EX: int +CMSG_CTRL_ADD_CMS_SIGNER_INFO: int +CMSG_VERIFY_SIGNER_PUBKEY: int +CMSG_VERIFY_SIGNER_CERT: int +CMSG_VERIFY_SIGNER_CHAIN: int +CMSG_VERIFY_SIGNER_NULL: int +CMSG_OID_GEN_ENCRYPT_KEY_FUNC: str +CMSG_OID_EXPORT_ENCRYPT_KEY_FUNC: str +CMSG_OID_IMPORT_ENCRYPT_KEY_FUNC: str +CMSG_CONTENT_ENCRYPT_PAD_ENCODED_LEN_FLAG: int +CMSG_DEFAULT_INSTALLABLE_FUNC_OID: int +CMSG_CONTENT_ENCRYPT_FREE_PARA_FLAG: int +CMSG_CONTENT_ENCRYPT_RELEASE_CONTEXT_FLAG: int +CMSG_OID_GEN_CONTENT_ENCRYPT_KEY_FUNC: str +CMSG_KEY_TRANS_ENCRYPT_FREE_PARA_FLAG: int +CMSG_OID_EXPORT_KEY_TRANS_FUNC: str +CMSG_KEY_AGREE_ENCRYPT_FREE_PARA_FLAG: int +CMSG_KEY_AGREE_ENCRYPT_FREE_MATERIAL_FLAG: int +CMSG_KEY_AGREE_ENCRYPT_FREE_PUBKEY_ALG_FLAG: int +CMSG_KEY_AGREE_ENCRYPT_FREE_PUBKEY_PARA_FLAG: int +CMSG_KEY_AGREE_ENCRYPT_FREE_PUBKEY_BITS_FLAG: int +CMSG_OID_EXPORT_KEY_AGREE_FUNC: str +CMSG_OID_IMPORT_KEY_TRANS_FUNC: str +CMSG_OID_IMPORT_KEY_AGREE_FUNC: str +CERT_KEY_PROV_HANDLE_PROP_ID: int +CERT_KEY_PROV_INFO_PROP_ID: int +CERT_SHA1_HASH_PROP_ID: int +CERT_MD5_HASH_PROP_ID: int +CERT_HASH_PROP_ID: int +CERT_KEY_CONTEXT_PROP_ID: int +CERT_KEY_SPEC_PROP_ID: int +CERT_IE30_RESERVED_PROP_ID: int +CERT_PUBKEY_HASH_RESERVED_PROP_ID: int +CERT_ENHKEY_USAGE_PROP_ID: int +CERT_CTL_USAGE_PROP_ID: int +CERT_NEXT_UPDATE_LOCATION_PROP_ID: int +CERT_FRIENDLY_NAME_PROP_ID: int +CERT_PVK_FILE_PROP_ID: int +CERT_DESCRIPTION_PROP_ID: int +CERT_ACCESS_STATE_PROP_ID: int +CERT_SIGNATURE_HASH_PROP_ID: int +CERT_SMART_CARD_DATA_PROP_ID: int +CERT_EFS_PROP_ID: int +CERT_FORTEZZA_DATA_PROP_ID: int +CERT_ARCHIVED_PROP_ID: int +CERT_KEY_IDENTIFIER_PROP_ID: int +CERT_AUTO_ENROLL_PROP_ID: int +CERT_PUBKEY_ALG_PARA_PROP_ID: int +CERT_CROSS_CERT_DIST_POINTS_PROP_ID: int +CERT_ISSUER_PUBLIC_KEY_MD5_HASH_PROP_ID: int +CERT_SUBJECT_PUBLIC_KEY_MD5_HASH_PROP_ID: int +CERT_ENROLLMENT_PROP_ID: int +CERT_DATE_STAMP_PROP_ID: int +CERT_ISSUER_SERIAL_NUMBER_MD5_HASH_PROP_ID: int +CERT_SUBJECT_NAME_MD5_HASH_PROP_ID: int +CERT_EXTENDED_ERROR_INFO_PROP_ID: int +CERT_RENEWAL_PROP_ID: int +CERT_ARCHIVED_KEY_HASH_PROP_ID: int +CERT_AUTO_ENROLL_RETRY_PROP_ID: int +CERT_AIA_URL_RETRIEVED_PROP_ID: int +CERT_AUTHORITY_INFO_ACCESS_PROP_ID: int +CERT_BACKED_UP_PROP_ID: int +CERT_OCSP_RESPONSE_PROP_ID: int +CERT_REQUEST_ORIGINATOR_PROP_ID: int +CERT_SOURCE_LOCATION_PROP_ID: int +CERT_SOURCE_URL_PROP_ID: int +CERT_NEW_KEY_PROP_ID: int +CERT_OCSP_CACHE_PREFIX_PROP_ID: int +CERT_SMART_CARD_ROOT_INFO_PROP_ID: int +CERT_NO_AUTO_EXPIRE_CHECK_PROP_ID: int +CERT_NCRYPT_KEY_HANDLE_PROP_ID: int +CERT_HCRYPTPROV_OR_NCRYPT_KEY_HANDLE_PROP_ID: int +CERT_SUBJECT_INFO_ACCESS_PROP_ID: int +CERT_CA_OCSP_AUTHORITY_INFO_ACCESS_PROP_ID: int +CERT_CA_DISABLE_CRL_PROP_ID: int +CERT_ROOT_PROGRAM_CERT_POLICIES_PROP_ID: int +CERT_ROOT_PROGRAM_NAME_CONSTRAINTS_PROP_ID: int +CERT_SUBJECT_OCSP_AUTHORITY_INFO_ACCESS_PROP_ID: int +CERT_SUBJECT_DISABLE_CRL_PROP_ID: int +CERT_CEP_PROP_ID: int +CERT_SIGN_HASH_CNG_ALG_PROP_ID: int +CERT_SCARD_PIN_ID_PROP_ID: int +CERT_SCARD_PIN_INFO_PROP_ID: int +CERT_FIRST_RESERVED_PROP_ID: int +CERT_LAST_RESERVED_PROP_ID: int +CERT_FIRST_USER_PROP_ID: int +CERT_LAST_USER_PROP_ID: int +szOID_CERT_PROP_ID_PREFIX: str +szOID_CERT_KEY_IDENTIFIER_PROP_ID: str +szOID_CERT_ISSUER_SERIAL_NUMBER_MD5_HASH_PROP_ID: str +szOID_CERT_SUBJECT_NAME_MD5_HASH_PROP_ID: str +CERT_ACCESS_STATE_WRITE_PERSIST_FLAG: int +CERT_ACCESS_STATE_SYSTEM_STORE_FLAG: int +CERT_ACCESS_STATE_LM_SYSTEM_STORE_FLAG: int +CERT_SET_KEY_PROV_HANDLE_PROP_ID: int +CERT_SET_KEY_CONTEXT_PROP_ID: int +sz_CERT_STORE_PROV_MEMORY: str +sz_CERT_STORE_PROV_FILENAME_W: str +sz_CERT_STORE_PROV_FILENAME: str +sz_CERT_STORE_PROV_SYSTEM_W: str +sz_CERT_STORE_PROV_SYSTEM: str +sz_CERT_STORE_PROV_PKCS7: str +sz_CERT_STORE_PROV_SERIALIZED: str +sz_CERT_STORE_PROV_COLLECTION: str +sz_CERT_STORE_PROV_SYSTEM_REGISTRY_W: str +sz_CERT_STORE_PROV_SYSTEM_REGISTRY: str +sz_CERT_STORE_PROV_PHYSICAL_W: str +sz_CERT_STORE_PROV_PHYSICAL: str +sz_CERT_STORE_PROV_SMART_CARD_W: str +sz_CERT_STORE_PROV_SMART_CARD: str +sz_CERT_STORE_PROV_LDAP_W: str +sz_CERT_STORE_PROV_LDAP: str +CERT_STORE_SIGNATURE_FLAG: int +CERT_STORE_TIME_VALIDITY_FLAG: int +CERT_STORE_REVOCATION_FLAG: int +CERT_STORE_NO_CRL_FLAG: int +CERT_STORE_NO_ISSUER_FLAG: int +CERT_STORE_BASE_CRL_FLAG: int +CERT_STORE_DELTA_CRL_FLAG: int +CERT_STORE_NO_CRYPT_RELEASE_FLAG: int +CERT_STORE_SET_LOCALIZED_NAME_FLAG: int +CERT_STORE_DEFER_CLOSE_UNTIL_LAST_FREE_FLAG: int +CERT_STORE_DELETE_FLAG: int +CERT_STORE_UNSAFE_PHYSICAL_FLAG: int +CERT_STORE_SHARE_STORE_FLAG: int +CERT_STORE_SHARE_CONTEXT_FLAG: int +CERT_STORE_MANIFOLD_FLAG: int +CERT_STORE_ENUM_ARCHIVED_FLAG: int +CERT_STORE_UPDATE_KEYID_FLAG: int +CERT_STORE_BACKUP_RESTORE_FLAG: int +CERT_STORE_READONLY_FLAG: int +CERT_STORE_OPEN_EXISTING_FLAG: int +CERT_STORE_CREATE_NEW_FLAG: int +CERT_STORE_MAXIMUM_ALLOWED_FLAG: int +CERT_SYSTEM_STORE_MASK: int +CERT_SYSTEM_STORE_RELOCATE_FLAG: int +CERT_SYSTEM_STORE_UNPROTECTED_FLAG: int +CERT_SYSTEM_STORE_LOCATION_MASK: int +CERT_SYSTEM_STORE_LOCATION_SHIFT: int +CERT_SYSTEM_STORE_CURRENT_USER_ID: int +CERT_SYSTEM_STORE_LOCAL_MACHINE_ID: int +CERT_SYSTEM_STORE_CURRENT_SERVICE_ID: int +CERT_SYSTEM_STORE_SERVICES_ID: int +CERT_SYSTEM_STORE_USERS_ID: int +CERT_SYSTEM_STORE_CURRENT_USER_GROUP_POLICY_ID: int +CERT_SYSTEM_STORE_LOCAL_MACHINE_GROUP_POLICY_ID: int +CERT_SYSTEM_STORE_LOCAL_MACHINE_ENTERPRISE_ID: int +CERT_SYSTEM_STORE_CURRENT_USER: int +CERT_SYSTEM_STORE_LOCAL_MACHINE: int +CERT_SYSTEM_STORE_CURRENT_SERVICE: int +CERT_SYSTEM_STORE_SERVICES: int +CERT_SYSTEM_STORE_USERS: int +CERT_SYSTEM_STORE_CURRENT_USER_GROUP_POLICY: int +CERT_SYSTEM_STORE_LOCAL_MACHINE_GROUP_POLICY: int +CERT_SYSTEM_STORE_LOCAL_MACHINE_ENTERPRISE: int +CERT_PROT_ROOT_DISABLE_CURRENT_USER_FLAG: int +CERT_PROT_ROOT_INHIBIT_ADD_AT_INIT_FLAG: int +CERT_PROT_ROOT_INHIBIT_PURGE_LM_FLAG: int +CERT_PROT_ROOT_DISABLE_LM_AUTH_FLAG: int +CERT_PROT_ROOT_ONLY_LM_GPT_FLAG: int +CERT_PROT_ROOT_DISABLE_NT_AUTH_REQUIRED_FLAG: int +CERT_PROT_ROOT_DISABLE_NOT_DEFINED_NAME_CONSTRAINT_FLAG: int +CERT_TRUST_PUB_ALLOW_TRUST_MASK: int +CERT_TRUST_PUB_ALLOW_END_USER_TRUST: int +CERT_TRUST_PUB_ALLOW_MACHINE_ADMIN_TRUST: int +CERT_TRUST_PUB_ALLOW_ENTERPRISE_ADMIN_TRUST: int +CERT_TRUST_PUB_CHECK_PUBLISHER_REV_FLAG: int +CERT_TRUST_PUB_CHECK_TIMESTAMP_REV_FLAG: int +CERT_AUTH_ROOT_AUTO_UPDATE_LOCAL_MACHINE_REGPATH: str +CERT_AUTH_ROOT_AUTO_UPDATE_DISABLE_UNTRUSTED_ROOT_LOGGING_FLAG: int +CERT_AUTH_ROOT_AUTO_UPDATE_DISABLE_PARTIAL_CHAIN_LOGGING_FLAG: int +CERT_AUTH_ROOT_AUTO_UPDATE_ROOT_DIR_URL_VALUE_NAME: str +CERT_AUTH_ROOT_AUTO_UPDATE_SYNC_DELTA_TIME_VALUE_NAME: str +CERT_AUTH_ROOT_AUTO_UPDATE_FLAGS_VALUE_NAME: str +CERT_AUTH_ROOT_CTL_FILENAME: str +CERT_AUTH_ROOT_CTL_FILENAME_A: str +CERT_AUTH_ROOT_CAB_FILENAME: str +CERT_AUTH_ROOT_SEQ_FILENAME: str +CERT_AUTH_ROOT_CERT_EXT: str +CERT_GROUP_POLICY_SYSTEM_STORE_REGPATH: str +CERT_EFSBLOB_REGPATH: str +CERT_EFSBLOB_VALUE_NAME: str +CERT_PROT_ROOT_FLAGS_REGPATH: str +CERT_PROT_ROOT_FLAGS_VALUE_NAME: str +CERT_TRUST_PUB_SAFER_GROUP_POLICY_REGPATH: str +CERT_LOCAL_MACHINE_SYSTEM_STORE_REGPATH: str +CERT_TRUST_PUB_SAFER_LOCAL_MACHINE_REGPATH: str +CERT_TRUST_PUB_AUTHENTICODE_FLAGS_VALUE_NAME: str +CERT_OCM_SUBCOMPONENTS_LOCAL_MACHINE_REGPATH: str +CERT_OCM_SUBCOMPONENTS_ROOT_AUTO_UPDATE_VALUE_NAME: str +CERT_DISABLE_ROOT_AUTO_UPDATE_REGPATH: str +CERT_DISABLE_ROOT_AUTO_UPDATE_VALUE_NAME: str +CERT_REGISTRY_STORE_REMOTE_FLAG: int +CERT_REGISTRY_STORE_SERIALIZED_FLAG: int +CERT_REGISTRY_STORE_CLIENT_GPT_FLAG: int +CERT_REGISTRY_STORE_LM_GPT_FLAG: int +CERT_REGISTRY_STORE_ROAMING_FLAG: int +CERT_REGISTRY_STORE_MY_IE_DIRTY_FLAG: int +CERT_IE_DIRTY_FLAGS_REGPATH: str +CERT_FILE_STORE_COMMIT_ENABLE_FLAG: int +CERT_LDAP_STORE_SIGN_FLAG: int +CERT_LDAP_STORE_AREC_EXCLUSIVE_FLAG: int +CERT_LDAP_STORE_OPENED_FLAG: int +CERT_LDAP_STORE_UNBIND_FLAG: int +CRYPT_OID_OPEN_STORE_PROV_FUNC: str +CERT_STORE_PROV_EXTERNAL_FLAG: int +CERT_STORE_PROV_DELETED_FLAG: int +CERT_STORE_PROV_NO_PERSIST_FLAG: int +CERT_STORE_PROV_SYSTEM_STORE_FLAG: int +CERT_STORE_PROV_LM_SYSTEM_STORE_FLAG: int +CERT_STORE_PROV_CLOSE_FUNC: int +CERT_STORE_PROV_READ_CERT_FUNC: int +CERT_STORE_PROV_WRITE_CERT_FUNC: int +CERT_STORE_PROV_DELETE_CERT_FUNC: int +CERT_STORE_PROV_SET_CERT_PROPERTY_FUNC: int +CERT_STORE_PROV_READ_CRL_FUNC: int +CERT_STORE_PROV_WRITE_CRL_FUNC: int +CERT_STORE_PROV_DELETE_CRL_FUNC: int +CERT_STORE_PROV_SET_CRL_PROPERTY_FUNC: int +CERT_STORE_PROV_READ_CTL_FUNC: int +CERT_STORE_PROV_WRITE_CTL_FUNC: int +CERT_STORE_PROV_DELETE_CTL_FUNC: int +CERT_STORE_PROV_SET_CTL_PROPERTY_FUNC: int +CERT_STORE_PROV_CONTROL_FUNC: int +CERT_STORE_PROV_FIND_CERT_FUNC: int +CERT_STORE_PROV_FREE_FIND_CERT_FUNC: int +CERT_STORE_PROV_GET_CERT_PROPERTY_FUNC: int +CERT_STORE_PROV_FIND_CRL_FUNC: int +CERT_STORE_PROV_FREE_FIND_CRL_FUNC: int +CERT_STORE_PROV_GET_CRL_PROPERTY_FUNC: int +CERT_STORE_PROV_FIND_CTL_FUNC: int +CERT_STORE_PROV_FREE_FIND_CTL_FUNC: int +CERT_STORE_PROV_GET_CTL_PROPERTY_FUNC: int +CERT_STORE_PROV_WRITE_ADD_FLAG: int +CERT_STORE_SAVE_AS_STORE: int +CERT_STORE_SAVE_AS_PKCS7: int +CERT_STORE_SAVE_TO_FILE: int +CERT_STORE_SAVE_TO_MEMORY: int +CERT_STORE_SAVE_TO_FILENAME_A: int +CERT_STORE_SAVE_TO_FILENAME_W: int +CERT_STORE_SAVE_TO_FILENAME: int +CERT_CLOSE_STORE_FORCE_FLAG: int +CERT_CLOSE_STORE_CHECK_FLAG: int +CERT_COMPARE_MASK: int +CERT_COMPARE_SHIFT: int +CERT_COMPARE_ANY: int +CERT_COMPARE_SHA1_HASH: int +CERT_COMPARE_NAME: int +CERT_COMPARE_ATTR: int +CERT_COMPARE_MD5_HASH: int +CERT_COMPARE_PROPERTY: int +CERT_COMPARE_PUBLIC_KEY: int +CERT_COMPARE_HASH: int +CERT_COMPARE_NAME_STR_A: int +CERT_COMPARE_NAME_STR_W: int +CERT_COMPARE_KEY_SPEC: int +CERT_COMPARE_ENHKEY_USAGE: int +CERT_COMPARE_CTL_USAGE: int +CERT_COMPARE_SUBJECT_CERT: int +CERT_COMPARE_ISSUER_OF: int +CERT_COMPARE_EXISTING: int +CERT_COMPARE_SIGNATURE_HASH: int +CERT_COMPARE_KEY_IDENTIFIER: int +CERT_COMPARE_CERT_ID: int +CERT_COMPARE_CROSS_CERT_DIST_POINTS: int +CERT_COMPARE_PUBKEY_MD5_HASH: int +CERT_FIND_ANY: int +CERT_FIND_SHA1_HASH: int +CERT_FIND_MD5_HASH: int +CERT_FIND_SIGNATURE_HASH: int +CERT_FIND_KEY_IDENTIFIER: int +CERT_FIND_HASH: int +CERT_FIND_PROPERTY: int +CERT_FIND_PUBLIC_KEY: int +CERT_FIND_SUBJECT_NAME: int +CERT_FIND_SUBJECT_ATTR: int +CERT_FIND_ISSUER_NAME: int +CERT_FIND_ISSUER_ATTR: int +CERT_FIND_SUBJECT_STR_A: int +CERT_FIND_SUBJECT_STR_W: int +CERT_FIND_SUBJECT_STR: int +CERT_FIND_ISSUER_STR_A: int +CERT_FIND_ISSUER_STR_W: int +CERT_FIND_ISSUER_STR: int +CERT_FIND_KEY_SPEC: int +CERT_FIND_ENHKEY_USAGE: int +CERT_FIND_CTL_USAGE: int +CERT_FIND_SUBJECT_CERT: int +CERT_FIND_ISSUER_OF: int +CERT_FIND_EXISTING: int +CERT_FIND_CERT_ID: int +CERT_FIND_CROSS_CERT_DIST_POINTS: int +CERT_FIND_PUBKEY_MD5_HASH: int +CERT_FIND_OPTIONAL_ENHKEY_USAGE_FLAG: int +CERT_FIND_EXT_ONLY_ENHKEY_USAGE_FLAG: int +CERT_FIND_PROP_ONLY_ENHKEY_USAGE_FLAG: int +CERT_FIND_NO_ENHKEY_USAGE_FLAG: int +CERT_FIND_OR_ENHKEY_USAGE_FLAG: int +CERT_FIND_VALID_ENHKEY_USAGE_FLAG: int +CERT_FIND_OPTIONAL_CTL_USAGE_FLAG: int +CERT_FIND_EXT_ONLY_CTL_USAGE_FLAG: int +CERT_FIND_PROP_ONLY_CTL_USAGE_FLAG: int +CERT_FIND_NO_CTL_USAGE_FLAG: int +CERT_FIND_OR_CTL_USAGE_FLAG: int +CERT_FIND_VALID_CTL_USAGE_FLAG: int +CERT_SET_PROPERTY_IGNORE_PERSIST_ERROR_FLAG: int +CERT_SET_PROPERTY_INHIBIT_PERSIST_FLAG: int +CTL_ENTRY_FROM_PROP_CHAIN_FLAG: int +CRL_FIND_ANY: int +CRL_FIND_ISSUED_BY: int +CRL_FIND_EXISTING: int +CRL_FIND_ISSUED_FOR: int +CRL_FIND_ISSUED_BY_AKI_FLAG: int +CRL_FIND_ISSUED_BY_SIGNATURE_FLAG: int +CRL_FIND_ISSUED_BY_DELTA_FLAG: int +CRL_FIND_ISSUED_BY_BASE_FLAG: int +CERT_STORE_ADD_NEW: int +CERT_STORE_ADD_USE_EXISTING: int +CERT_STORE_ADD_REPLACE_EXISTING: int +CERT_STORE_ADD_ALWAYS: int +CERT_STORE_ADD_REPLACE_EXISTING_INHERIT_PROPERTIES: int +CERT_STORE_ADD_NEWER: int +CERT_STORE_ADD_NEWER_INHERIT_PROPERTIES: int +CERT_STORE_CERTIFICATE_CONTEXT: int +CERT_STORE_CRL_CONTEXT: int +CERT_STORE_CTL_CONTEXT: int +CERT_STORE_ALL_CONTEXT_FLAG: int +CERT_STORE_CERTIFICATE_CONTEXT_FLAG: int +CERT_STORE_CRL_CONTEXT_FLAG: int +CERT_STORE_CTL_CONTEXT_FLAG: int +CTL_ANY_SUBJECT_TYPE: int +CTL_CERT_SUBJECT_TYPE: int +CTL_FIND_ANY: int +CTL_FIND_SHA1_HASH: int +CTL_FIND_MD5_HASH: int +CTL_FIND_USAGE: int +CTL_FIND_SUBJECT: int +CTL_FIND_EXISTING: int +CTL_FIND_SAME_USAGE_FLAG: int +CERT_STORE_CTRL_RESYNC: int +CERT_STORE_CTRL_NOTIFY_CHANGE: int +CERT_STORE_CTRL_COMMIT: int +CERT_STORE_CTRL_AUTO_RESYNC: int +CERT_STORE_CTRL_CANCEL_NOTIFY: int +CERT_STORE_CTRL_INHIBIT_DUPLICATE_HANDLE_FLAG: int +CERT_STORE_CTRL_COMMIT_FORCE_FLAG: int +CERT_STORE_CTRL_COMMIT_CLEAR_FLAG: int +CERT_STORE_LOCALIZED_NAME_PROP_ID: int +CERT_CREATE_CONTEXT_NOCOPY_FLAG: int +CERT_CREATE_CONTEXT_SORTED_FLAG: int +CERT_CREATE_CONTEXT_NO_HCRYPTMSG_FLAG: int +CERT_CREATE_CONTEXT_NO_ENTRY_FLAG: int +CERT_PHYSICAL_STORE_ADD_ENABLE_FLAG: int +CERT_PHYSICAL_STORE_OPEN_DISABLE_FLAG: int +CERT_PHYSICAL_STORE_REMOTE_OPEN_DISABLE_FLAG: int +CERT_PHYSICAL_STORE_INSERT_COMPUTER_NAME_ENABLE_FLAG: int +CERT_PHYSICAL_STORE_PREDEFINED_ENUM_FLAG: int +CERT_PHYSICAL_STORE_DEFAULT_NAME: str +CERT_PHYSICAL_STORE_GROUP_POLICY_NAME: str +CERT_PHYSICAL_STORE_LOCAL_MACHINE_NAME: str +CERT_PHYSICAL_STORE_DS_USER_CERTIFICATE_NAME: str +CERT_PHYSICAL_STORE_LOCAL_MACHINE_GROUP_POLICY_NAME: str +CERT_PHYSICAL_STORE_ENTERPRISE_NAME: str +CERT_PHYSICAL_STORE_AUTH_ROOT_NAME: str +CERT_PHYSICAL_STORE_SMART_CARD_NAME: str +CRYPT_OID_OPEN_SYSTEM_STORE_PROV_FUNC: str +CRYPT_OID_REGISTER_SYSTEM_STORE_FUNC: str +CRYPT_OID_UNREGISTER_SYSTEM_STORE_FUNC: str +CRYPT_OID_ENUM_SYSTEM_STORE_FUNC: str +CRYPT_OID_REGISTER_PHYSICAL_STORE_FUNC: str +CRYPT_OID_UNREGISTER_PHYSICAL_STORE_FUNC: str +CRYPT_OID_ENUM_PHYSICAL_STORE_FUNC: str +CRYPT_OID_SYSTEM_STORE_LOCATION_VALUE_NAME: str +CMSG_TRUSTED_SIGNER_FLAG: int +CMSG_SIGNER_ONLY_FLAG: int +CMSG_USE_SIGNER_INDEX_FLAG: int +CMSG_CMS_ENCAPSULATED_CTL_FLAG: int +CMSG_ENCODE_SORTED_CTL_FLAG: int +CMSG_ENCODE_HASHED_SUBJECT_IDENTIFIER_FLAG: int +CERT_VERIFY_INHIBIT_CTL_UPDATE_FLAG: int +CERT_VERIFY_TRUSTED_SIGNERS_FLAG: int +CERT_VERIFY_NO_TIME_CHECK_FLAG: int +CERT_VERIFY_ALLOW_MORE_USAGE_FLAG: int +CERT_VERIFY_UPDATED_CTL_FLAG: int +CERT_CONTEXT_REVOCATION_TYPE: int +CERT_VERIFY_REV_CHAIN_FLAG: int +CERT_VERIFY_CACHE_ONLY_BASED_REVOCATION: int +CERT_VERIFY_REV_ACCUMULATIVE_TIMEOUT_FLAG: int +CERT_UNICODE_IS_RDN_ATTRS_FLAG: int +CERT_CASE_INSENSITIVE_IS_RDN_ATTRS_FLAG: int +CRYPT_VERIFY_CERT_SIGN_SUBJECT_BLOB: int +CRYPT_VERIFY_CERT_SIGN_SUBJECT_CERT: int +CRYPT_VERIFY_CERT_SIGN_SUBJECT_CRL: int +CRYPT_VERIFY_CERT_SIGN_ISSUER_PUBKEY: int +CRYPT_VERIFY_CERT_SIGN_ISSUER_CERT: int +CRYPT_VERIFY_CERT_SIGN_ISSUER_CHAIN: int +CRYPT_VERIFY_CERT_SIGN_ISSUER_NULL: int +CRYPT_DEFAULT_CONTEXT_AUTO_RELEASE_FLAG: int +CRYPT_DEFAULT_CONTEXT_PROCESS_FLAG: int +CRYPT_DEFAULT_CONTEXT_CERT_SIGN_OID: int +CRYPT_DEFAULT_CONTEXT_MULTI_CERT_SIGN_OID: int +CRYPT_OID_EXPORT_PUBLIC_KEY_INFO_FUNC: str +CRYPT_OID_IMPORT_PUBLIC_KEY_INFO_FUNC: str +CRYPT_ACQUIRE_CACHE_FLAG: int +CRYPT_ACQUIRE_USE_PROV_INFO_FLAG: int +CRYPT_ACQUIRE_COMPARE_KEY_FLAG: int +CRYPT_ACQUIRE_SILENT_FLAG: int +CRYPT_FIND_USER_KEYSET_FLAG: int +CRYPT_FIND_MACHINE_KEYSET_FLAG: int +CRYPT_FIND_SILENT_KEYSET_FLAG: int +CRYPT_OID_IMPORT_PRIVATE_KEY_INFO_FUNC: str +CRYPT_OID_EXPORT_PRIVATE_KEY_INFO_FUNC: str +CRYPT_DELETE_KEYSET: int +CERT_SIMPLE_NAME_STR: int +CERT_OID_NAME_STR: int +CERT_X500_NAME_STR: int +CERT_NAME_STR_SEMICOLON_FLAG: int +CERT_NAME_STR_NO_PLUS_FLAG: int +CERT_NAME_STR_NO_QUOTING_FLAG: int +CERT_NAME_STR_CRLF_FLAG: int +CERT_NAME_STR_COMMA_FLAG: int +CERT_NAME_STR_REVERSE_FLAG: int +CERT_NAME_STR_DISABLE_IE4_UTF8_FLAG: int +CERT_NAME_STR_ENABLE_T61_UNICODE_FLAG: int +CERT_NAME_STR_ENABLE_UTF8_UNICODE_FLAG: int +CERT_NAME_EMAIL_TYPE: int +CERT_NAME_RDN_TYPE: int +CERT_NAME_ATTR_TYPE: int +CERT_NAME_SIMPLE_DISPLAY_TYPE: int +CERT_NAME_FRIENDLY_DISPLAY_TYPE: int +CERT_NAME_DNS_TYPE: int +CERT_NAME_URL_TYPE: int +CERT_NAME_UPN_TYPE: int +CERT_NAME_ISSUER_FLAG: int +CERT_NAME_DISABLE_IE4_UTF8_FLAG: int +CRYPT_MESSAGE_BARE_CONTENT_OUT_FLAG: int +CRYPT_MESSAGE_ENCAPSULATED_CONTENT_OUT_FLAG: int +CRYPT_MESSAGE_KEYID_SIGNER_FLAG: int +CRYPT_MESSAGE_SILENT_KEYSET_FLAG: int +CRYPT_MESSAGE_KEYID_RECIPIENT_FLAG: int +CERT_QUERY_OBJECT_FILE: int +CERT_QUERY_OBJECT_BLOB: int +CERT_QUERY_CONTENT_CERT: int +CERT_QUERY_CONTENT_CTL: int +CERT_QUERY_CONTENT_CRL: int +CERT_QUERY_CONTENT_SERIALIZED_STORE: int +CERT_QUERY_CONTENT_SERIALIZED_CERT: int +CERT_QUERY_CONTENT_SERIALIZED_CTL: int +CERT_QUERY_CONTENT_SERIALIZED_CRL: int +CERT_QUERY_CONTENT_PKCS7_SIGNED: int +CERT_QUERY_CONTENT_PKCS7_UNSIGNED: int +CERT_QUERY_CONTENT_PKCS7_SIGNED_EMBED: int +CERT_QUERY_CONTENT_PKCS10: int +CERT_QUERY_CONTENT_PFX: int +CERT_QUERY_CONTENT_CERT_PAIR: int +CERT_QUERY_CONTENT_FLAG_CERT: int +CERT_QUERY_CONTENT_FLAG_CTL: int +CERT_QUERY_CONTENT_FLAG_CRL: int +CERT_QUERY_CONTENT_FLAG_SERIALIZED_STORE: int +CERT_QUERY_CONTENT_FLAG_SERIALIZED_CERT: int +CERT_QUERY_CONTENT_FLAG_SERIALIZED_CTL: int +CERT_QUERY_CONTENT_FLAG_SERIALIZED_CRL: int +CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED: int +CERT_QUERY_CONTENT_FLAG_PKCS7_UNSIGNED: int +CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED_EMBED: int +CERT_QUERY_CONTENT_FLAG_PKCS10: int +CERT_QUERY_CONTENT_FLAG_PFX: int +CERT_QUERY_CONTENT_FLAG_CERT_PAIR: int +CERT_QUERY_CONTENT_FLAG_ALL: int +CERT_QUERY_FORMAT_BINARY: int +CERT_QUERY_FORMAT_BASE64_ENCODED: int +CERT_QUERY_FORMAT_ASN_ASCII_HEX_ENCODED: int +CERT_QUERY_FORMAT_FLAG_BINARY: int +CERT_QUERY_FORMAT_FLAG_BASE64_ENCODED: int +CERT_QUERY_FORMAT_FLAG_ASN_ASCII_HEX_ENCODED: int +CERT_QUERY_FORMAT_FLAG_ALL: int +CREDENTIAL_OID_PASSWORD_CREDENTIALS_A: int +CREDENTIAL_OID_PASSWORD_CREDENTIALS_W: int +CREDENTIAL_OID_PASSWORD_CREDENTIALS: int +SCHEME_OID_RETRIEVE_ENCODED_OBJECT_FUNC: str +SCHEME_OID_RETRIEVE_ENCODED_OBJECTW_FUNC: str +CONTEXT_OID_CREATE_OBJECT_CONTEXT_FUNC: str +CONTEXT_OID_CERTIFICATE: int +CONTEXT_OID_CRL: int +CONTEXT_OID_CTL: int +CONTEXT_OID_PKCS7: int +CONTEXT_OID_CAPI2_ANY: int +CONTEXT_OID_OCSP_RESP: int +CRYPT_RETRIEVE_MULTIPLE_OBJECTS: int +CRYPT_CACHE_ONLY_RETRIEVAL: int +CRYPT_WIRE_ONLY_RETRIEVAL: int +CRYPT_DONT_CACHE_RESULT: int +CRYPT_ASYNC_RETRIEVAL: int +CRYPT_STICKY_CACHE_RETRIEVAL: int +CRYPT_LDAP_SCOPE_BASE_ONLY_RETRIEVAL: int +CRYPT_OFFLINE_CHECK_RETRIEVAL: int +CRYPT_LDAP_INSERT_ENTRY_ATTRIBUTE: int +CRYPT_LDAP_SIGN_RETRIEVAL: int +CRYPT_NO_AUTH_RETRIEVAL: int +CRYPT_LDAP_AREC_EXCLUSIVE_RETRIEVAL: int +CRYPT_AIA_RETRIEVAL: int +CRYPT_VERIFY_CONTEXT_SIGNATURE: int +CRYPT_VERIFY_DATA_HASH: int +CRYPT_KEEP_TIME_VALID: int +CRYPT_DONT_VERIFY_SIGNATURE: int +CRYPT_DONT_CHECK_TIME_VALIDITY: int +CRYPT_CHECK_FRESHNESS_TIME_VALIDITY: int +CRYPT_ACCUMULATIVE_TIMEOUT: int +CRYPT_PARAM_ASYNC_RETRIEVAL_COMPLETION: int +CRYPT_PARAM_CANCEL_ASYNC_RETRIEVAL: int +CRYPT_GET_URL_FROM_PROPERTY: int +CRYPT_GET_URL_FROM_EXTENSION: int +CRYPT_GET_URL_FROM_UNAUTH_ATTRIBUTE: int +CRYPT_GET_URL_FROM_AUTH_ATTRIBUTE: int +URL_OID_GET_OBJECT_URL_FUNC: str +TIME_VALID_OID_GET_OBJECT_FUNC: str +TIME_VALID_OID_FLUSH_OBJECT_FUNC: str +TIME_VALID_OID_GET_CTL: int +TIME_VALID_OID_GET_CRL: int +TIME_VALID_OID_GET_CRL_FROM_CERT: int +TIME_VALID_OID_GET_FRESHEST_CRL_FROM_CERT: int +TIME_VALID_OID_GET_FRESHEST_CRL_FROM_CRL: int +TIME_VALID_OID_FLUSH_CTL: int +TIME_VALID_OID_FLUSH_CRL: int +TIME_VALID_OID_FLUSH_CRL_FROM_CERT: int +TIME_VALID_OID_FLUSH_FRESHEST_CRL_FROM_CERT: int +TIME_VALID_OID_FLUSH_FRESHEST_CRL_FROM_CRL: int +CRYPTPROTECT_PROMPT_ON_UNPROTECT: int +CRYPTPROTECT_PROMPT_ON_PROTECT: int +CRYPTPROTECT_PROMPT_RESERVED: int +CRYPTPROTECT_PROMPT_STRONG: int +CRYPTPROTECT_PROMPT_REQUIRE_STRONG: int +CRYPTPROTECT_UI_FORBIDDEN: int +CRYPTPROTECT_LOCAL_MACHINE: int +CRYPTPROTECT_CRED_SYNC: int +CRYPTPROTECT_AUDIT: int +CRYPTPROTECT_NO_RECOVERY: int +CRYPTPROTECT_VERIFY_PROTECTION: int +CRYPTPROTECT_CRED_REGENERATE: int +CRYPTPROTECT_FIRST_RESERVED_FLAGVAL: int +CRYPTPROTECT_LAST_RESERVED_FLAGVAL: int +CRYPTPROTECTMEMORY_BLOCK_SIZE: int +CRYPTPROTECTMEMORY_SAME_PROCESS: int +CRYPTPROTECTMEMORY_CROSS_PROCESS: int +CRYPTPROTECTMEMORY_SAME_LOGON: int +CERT_CREATE_SELFSIGN_NO_SIGN: int +CERT_CREATE_SELFSIGN_NO_KEY_INFO: int +CRYPT_KEYID_MACHINE_FLAG: int +CRYPT_KEYID_ALLOC_FLAG: int +CRYPT_KEYID_DELETE_FLAG: int +CRYPT_KEYID_SET_NEW_FLAG: int +CERT_CHAIN_MAX_AIA_URL_COUNT_IN_CERT_DEFAULT: int +CERT_CHAIN_MAX_AIA_URL_RETRIEVAL_COUNT_PER_CHAIN_DEFAULT: int +CERT_CHAIN_MAX_AIA_URL_RETRIEVAL_BYTE_COUNT_DEFAULT: int +CERT_CHAIN_MAX_AIA_URL_RETRIEVAL_CERT_COUNT_DEFAULT: int +CERT_CHAIN_CACHE_END_CERT: int +CERT_CHAIN_THREAD_STORE_SYNC: int +CERT_CHAIN_CACHE_ONLY_URL_RETRIEVAL: int +CERT_CHAIN_USE_LOCAL_MACHINE_STORE: int +CERT_CHAIN_ENABLE_CACHE_AUTO_UPDATE: int +CERT_CHAIN_ENABLE_SHARE_STORE: int +CERT_TRUST_NO_ERROR: int +CERT_TRUST_IS_NOT_TIME_VALID: int +CERT_TRUST_IS_NOT_TIME_NESTED: int +CERT_TRUST_IS_REVOKED: int +CERT_TRUST_IS_NOT_SIGNATURE_VALID: int +CERT_TRUST_IS_NOT_VALID_FOR_USAGE: int +CERT_TRUST_IS_UNTRUSTED_ROOT: int +CERT_TRUST_REVOCATION_STATUS_UNKNOWN: int +CERT_TRUST_IS_CYCLIC: int +CERT_TRUST_INVALID_EXTENSION: int +CERT_TRUST_INVALID_POLICY_CONSTRAINTS: int +CERT_TRUST_INVALID_BASIC_CONSTRAINTS: int +CERT_TRUST_INVALID_NAME_CONSTRAINTS: int +CERT_TRUST_HAS_NOT_SUPPORTED_NAME_CONSTRAINT: int +CERT_TRUST_HAS_NOT_DEFINED_NAME_CONSTRAINT: int +CERT_TRUST_HAS_NOT_PERMITTED_NAME_CONSTRAINT: int +CERT_TRUST_HAS_EXCLUDED_NAME_CONSTRAINT: int +CERT_TRUST_IS_OFFLINE_REVOCATION: int +CERT_TRUST_NO_ISSUANCE_CHAIN_POLICY: int +CERT_TRUST_IS_PARTIAL_CHAIN: int +CERT_TRUST_CTL_IS_NOT_TIME_VALID: int +CERT_TRUST_CTL_IS_NOT_SIGNATURE_VALID: int +CERT_TRUST_CTL_IS_NOT_VALID_FOR_USAGE: int +CERT_TRUST_HAS_EXACT_MATCH_ISSUER: int +CERT_TRUST_HAS_KEY_MATCH_ISSUER: int +CERT_TRUST_HAS_NAME_MATCH_ISSUER: int +CERT_TRUST_IS_SELF_SIGNED: int +CERT_TRUST_HAS_PREFERRED_ISSUER: int +CERT_TRUST_HAS_ISSUANCE_CHAIN_POLICY: int +CERT_TRUST_HAS_VALID_NAME_CONSTRAINTS: int +CERT_TRUST_IS_COMPLEX_CHAIN: int +USAGE_MATCH_TYPE_AND: int +USAGE_MATCH_TYPE_OR: int +CERT_CHAIN_REVOCATION_CHECK_END_CERT: int +CERT_CHAIN_REVOCATION_CHECK_CHAIN: int +CERT_CHAIN_REVOCATION_CHECK_CHAIN_EXCLUDE_ROOT: int +CERT_CHAIN_REVOCATION_CHECK_CACHE_ONLY: int +CERT_CHAIN_REVOCATION_ACCUMULATIVE_TIMEOUT: int +CERT_CHAIN_DISABLE_PASS1_QUALITY_FILTERING: int +CERT_CHAIN_RETURN_LOWER_QUALITY_CONTEXTS: int +CERT_CHAIN_DISABLE_AUTH_ROOT_AUTO_UPDATE: int +CERT_CHAIN_TIMESTAMP_TIME: int +REVOCATION_OID_CRL_REVOCATION: int +CERT_CHAIN_FIND_BY_ISSUER: int +CERT_CHAIN_FIND_BY_ISSUER_COMPARE_KEY_FLAG: int +CERT_CHAIN_FIND_BY_ISSUER_COMPLEX_CHAIN_FLAG: int +CERT_CHAIN_FIND_BY_ISSUER_CACHE_ONLY_URL_FLAG: int +CERT_CHAIN_FIND_BY_ISSUER_LOCAL_MACHINE_FLAG: int +CERT_CHAIN_FIND_BY_ISSUER_NO_KEY_FLAG: int +CERT_CHAIN_FIND_BY_ISSUER_CACHE_ONLY_FLAG: int +CERT_CHAIN_POLICY_IGNORE_NOT_TIME_VALID_FLAG: int +CERT_CHAIN_POLICY_IGNORE_CTL_NOT_TIME_VALID_FLAG: int +CERT_CHAIN_POLICY_IGNORE_NOT_TIME_NESTED_FLAG: int +CERT_CHAIN_POLICY_IGNORE_INVALID_BASIC_CONSTRAINTS_FLAG: int +CERT_CHAIN_POLICY_IGNORE_ALL_NOT_TIME_VALID_FLAGS: int +CERT_CHAIN_POLICY_ALLOW_UNKNOWN_CA_FLAG: int +CERT_CHAIN_POLICY_IGNORE_WRONG_USAGE_FLAG: int +CERT_CHAIN_POLICY_IGNORE_INVALID_NAME_FLAG: int +CERT_CHAIN_POLICY_IGNORE_INVALID_POLICY_FLAG: int +CERT_CHAIN_POLICY_IGNORE_END_REV_UNKNOWN_FLAG: int +CERT_CHAIN_POLICY_IGNORE_CTL_SIGNER_REV_UNKNOWN_FLAG: int +CERT_CHAIN_POLICY_IGNORE_CA_REV_UNKNOWN_FLAG: int +CERT_CHAIN_POLICY_IGNORE_ROOT_REV_UNKNOWN_FLAG: int +CERT_CHAIN_POLICY_IGNORE_ALL_REV_UNKNOWN_FLAGS: int +CERT_CHAIN_POLICY_ALLOW_TESTROOT_FLAG: int +CERT_CHAIN_POLICY_TRUST_TESTROOT_FLAG: int +CRYPT_OID_VERIFY_CERTIFICATE_CHAIN_POLICY_FUNC: str +AUTHTYPE_CLIENT: int +AUTHTYPE_SERVER: int +BASIC_CONSTRAINTS_CERT_CHAIN_POLICY_CA_FLAG: int +BASIC_CONSTRAINTS_CERT_CHAIN_POLICY_END_ENTITY_FLAG: int +MICROSOFT_ROOT_CERT_CHAIN_POLICY_ENABLE_TEST_ROOT_FLAG: int +CRYPT_STRING_BASE64HEADER: int +CRYPT_STRING_BASE64: int +CRYPT_STRING_BINARY: int +CRYPT_STRING_BASE64REQUESTHEADER: int +CRYPT_STRING_HEX: int +CRYPT_STRING_HEXASCII: int +CRYPT_STRING_BASE64_ANY: int +CRYPT_STRING_ANY: int +CRYPT_STRING_HEX_ANY: int +CRYPT_STRING_BASE64X509CRLHEADER: int +CRYPT_STRING_HEXADDR: int +CRYPT_STRING_HEXASCIIADDR: int +CRYPT_STRING_NOCR: int +CRYPT_USER_KEYSET: int +PKCS12_IMPORT_RESERVED_MASK: int +REPORT_NO_PRIVATE_KEY: int +REPORT_NOT_ABLE_TO_EXPORT_PRIVATE_KEY: int +EXPORT_PRIVATE_KEYS: int +PKCS12_EXPORT_RESERVED_MASK: int +CERT_STORE_PROV_MSG: int +CERT_STORE_PROV_MEMORY: int +CERT_STORE_PROV_FILE: int +CERT_STORE_PROV_REG: int +CERT_STORE_PROV_PKCS7: int +CERT_STORE_PROV_SERIALIZED: int +CERT_STORE_PROV_FILENAME: int +CERT_STORE_PROV_SYSTEM: int +CERT_STORE_PROV_COLLECTION: int +CERT_STORE_PROV_SYSTEM_REGISTRY: int +CERT_STORE_PROV_PHYSICAL: int +CERT_STORE_PROV_SMART_CARD: int +CERT_STORE_PROV_LDAP: int +URL_OID_CERTIFICATE_ISSUER: int +URL_OID_CERTIFICATE_CRL_DIST_POINT: int +URL_OID_CTL_ISSUER: int +URL_OID_CTL_NEXT_UPDATE: int +URL_OID_CRL_ISSUER: int +URL_OID_CERTIFICATE_FRESHEST_CRL: int +URL_OID_CRL_FRESHEST_CRL: int +URL_OID_CROSS_CERT_DIST_POINT: int +URL_OID_CERTIFICATE_OCSP: int +URL_OID_CERTIFICATE_OCSP_AND_CRL_DIST_POINT: int +URL_OID_CERTIFICATE_CRL_DIST_POINT_AND_OCSP: int +URL_OID_CROSS_CERT_SUBJECT_INFO_ACCESS: int +URL_OID_CERTIFICATE_ONLY_OCSP: int +CMSG_CTRL_MAIL_LIST_DECRYPT: int +CMSG_MAIL_LIST_ENCRYPT_FREE_PARA_FLAG: int +CMSG_MAIL_LIST_HANDLE_KEY_CHOICE: int +CMSG_MAIL_LIST_RECIPIENT: int +CMSG_MAIL_LIST_VERSION: int +CMSG_OID_EXPORT_MAIL_LIST_FUNC: str +CMSG_OID_IMPORT_MAIL_LIST_FUNC: str +CTL_FIND_NO_LIST_ID_CBDATA: int +szOID_AUTHORITY_REVOCATION_LIST: str +szOID_CERTIFICATE_REVOCATION_LIST: str +szOID_ROOT_LIST_SIGNER: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32evtlogutil.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32evtlogutil.pyi new file mode 100644 index 00000000..626cab5b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32evtlogutil.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete +from collections.abc import Iterable + +import _win32typing + +error: Incomplete +langid: Incomplete + +def AddSourceToRegistry( + appName, msgDLL: Incomplete | None = ..., eventLogType: str = ..., eventLogFlags: Incomplete | None = ... +) -> None: ... +def RemoveSourceFromRegistry(appName, eventLogType: str = ...) -> None: ... +def ReportEvent( + appName: str, + eventID: int, + eventCategory: int = ..., + eventType: int = ..., + strings: Iterable[str] | None = ..., + data: bytes | None = ..., + sid: _win32typing.PySID | None = ..., +) -> None: ... +def FormatMessage(eventLogRecord: _win32typing.PyEventLogRecord, logType: str = ...): ... +def SafeFormatMessage(eventLogRecord, logType: Incomplete | None = ...): ... +def FeedEventLogRecords(feeder, machineName: Incomplete | None = ..., logName: str = ..., readFlags: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32gui_struct.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32gui_struct.pyi new file mode 100644 index 00000000..e7f2c24f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32gui_struct.pyi @@ -0,0 +1,198 @@ +from _typeshed import Incomplete, ReadableBuffer +from array import array +from typing import NamedTuple + +is64bit: bool + +class _WMNOTIFY(NamedTuple): + hwndFrom: Incomplete + idFrom: Incomplete + code: Incomplete + +def UnpackWMNOTIFY(lparam: int) -> _WMNOTIFY: ... + +class _NMITEMACTIVATE(NamedTuple): + hwndFrom: Incomplete + idFrom: Incomplete + code: Incomplete + iItem: Incomplete + iSubItem: Incomplete + uNewState: Incomplete + uOldState: Incomplete + uChanged: Incomplete + actionx: Incomplete + actiony: Incomplete + lParam: Incomplete + +def UnpackNMITEMACTIVATE(lparam) -> _NMITEMACTIVATE: ... +def PackMENUITEMINFO( + fType: Incomplete | None = ..., + fState: Incomplete | None = ..., + wID: Incomplete | None = ..., + hSubMenu: Incomplete | None = ..., + hbmpChecked: Incomplete | None = ..., + hbmpUnchecked: Incomplete | None = ..., + dwItemData: Incomplete | None = ..., + text: Incomplete | None = ..., + hbmpItem: Incomplete | None = ..., + dwTypeData: Incomplete | None = ..., +) -> tuple[array[int], list[Incomplete]]: ... + +class _MENUITEMINFO(NamedTuple): + fType: int | None + fState: int | None + wID: int | None + hSubMenu: int | None + hbmpChecked: int | None + hbmpUnchecked: int | None + dwItemData: int | None + text: str | None + hbmpItem: int | None + +def UnpackMENUITEMINFO(s: ReadableBuffer) -> _MENUITEMINFO: ... +def EmptyMENUITEMINFO(mask: Incomplete | None = ..., text_buf_size: int = ...) -> tuple[array[int], list[array[int]]]: ... +def PackMENUINFO( + dwStyle: Incomplete | None = ..., + cyMax: Incomplete | None = ..., + hbrBack: Incomplete | None = ..., + dwContextHelpID: Incomplete | None = ..., + dwMenuData: Incomplete | None = ..., + fMask: int = ..., +) -> array[int]: ... + +class _MENUINFO(NamedTuple): + dwStyle: Incomplete | None + cyMax: Incomplete | None + hbrBack: Incomplete | None + dwContextHelpID: Incomplete | None + dwMenuData: Incomplete | None + +def UnpackMENUINFO(s: ReadableBuffer) -> _MENUINFO: ... +def EmptyMENUINFO(mask: Incomplete | None = ...) -> array[int]: ... +def PackTVINSERTSTRUCT(parent, insertAfter, tvitem) -> tuple[bytes, list[Incomplete]]: ... +def PackTVITEM(hitem, state, stateMask, text, image, selimage, citems, param) -> tuple[array[int], list[Incomplete]]: ... +def EmptyTVITEM(hitem, mask: Incomplete | None = ..., text_buf_size: int = ...) -> tuple[array[int], list[Incomplete]]: ... + +class _TVITEM(NamedTuple): + item_hItem: Incomplete + item_state: Incomplete | None + item_stateMask: Incomplete | None + text: Incomplete | None + item_image: Incomplete | None + item_selimage: Incomplete | None + item_cChildren: Incomplete | None + item_param: Incomplete | None + +def UnpackTVITEM(buffer: ReadableBuffer) -> _TVITEM: ... + +class _TVNOTIFY(NamedTuple): + hwndFrom: Incomplete + id: Incomplete + code: Incomplete + action: Incomplete + item_old: _TVITEM + item_new: _TVITEM + +def UnpackTVNOTIFY(lparam: int) -> _TVNOTIFY: ... + +class _TVDISPINFO(NamedTuple): + hwndFrom: Incomplete + id: Incomplete + code: Incomplete + item: _TVITEM + +def UnpackTVDISPINFO(lparam: int) -> _TVDISPINFO: ... +def PackLVITEM( + item: Incomplete | None = ..., + subItem: Incomplete | None = ..., + state: Incomplete | None = ..., + stateMask: Incomplete | None = ..., + text: Incomplete | None = ..., + image: Incomplete | None = ..., + param: Incomplete | None = ..., + indent: Incomplete | None = ..., +) -> tuple[array[int], list[Incomplete]]: ... + +class _LVITEM(NamedTuple): + item_item: Incomplete + item_subItem: Incomplete + item_state: Incomplete | None + item_stateMask: Incomplete | None + text: Incomplete | None + item_image: Incomplete | None + item_param: Incomplete | None + item_indent: Incomplete | None + +def UnpackLVITEM(buffer: ReadableBuffer) -> _LVITEM: ... + +class _LVDISPINFO(NamedTuple): + hwndFrom: Incomplete + id: Incomplete + code: Incomplete + item: _LVITEM + +def UnpackLVDISPINFO(lparam: int) -> _LVDISPINFO: ... + +class _UnpackLVNOTIFY(NamedTuple): + hwndFrom: Incomplete + id: Incomplete + code: Incomplete + item: Incomplete + subitem: Incomplete + newstate: Incomplete + oldstate: Incomplete + changed: Incomplete + pt: tuple[Incomplete, Incomplete] + lparam: Incomplete + +def UnpackLVNOTIFY(lparam: int) -> _UnpackLVNOTIFY: ... +def EmptyLVITEM( + item, subitem, mask: Incomplete | None = ..., text_buf_size: int = ... +) -> tuple[array[int], list[Incomplete]]: ... +def PackLVCOLUMN( + fmt: Incomplete | None = ..., + cx: Incomplete | None = ..., + text: Incomplete | None = ..., + subItem: Incomplete | None = ..., + image: Incomplete | None = ..., + order: Incomplete | None = ..., +) -> tuple[array[int], list[Incomplete]]: ... + +class _LVCOLUMN(NamedTuple): + fmt: Incomplete | None + cx: Incomplete | None + text: Incomplete | None + subItem: Incomplete | None + image: Incomplete | None + order: Incomplete | None + +def UnpackLVCOLUMN(lparam: ReadableBuffer) -> _LVCOLUMN: ... +def EmptyLVCOLUMN(mask: Incomplete | None = ..., text_buf_size: int = ...) -> tuple[array[int], list[Incomplete]]: ... +def PackLVHITTEST(pt) -> tuple[array[int], None]: ... + +class _LVHITTEST(NamedTuple): + pt: tuple[Incomplete, Incomplete] + flags: Incomplete + item: Incomplete + subitem: Incomplete + +def UnpackLVHITTEST(buf: ReadableBuffer) -> tuple[tuple[Incomplete, Incomplete], Incomplete, Incomplete, Incomplete]: ... +def PackHDITEM( + cxy: Incomplete | None = ..., + text: Incomplete | None = ..., + hbm: Incomplete | None = ..., + fmt: Incomplete | None = ..., + param: Incomplete | None = ..., + image: Incomplete | None = ..., + order: Incomplete | None = ..., +) -> tuple[array[int], list[Incomplete]]: ... +def PackDEV_BROADCAST(devicetype, rest_fmt, rest_data, extra_data=...) -> bytes: ... +def PackDEV_BROADCAST_HANDLE(handle, hdevnotify: int = ..., guid=..., name_offset: int = ..., data=...) -> bytes: ... +def PackDEV_BROADCAST_VOLUME(unitmask, flags) -> bytes: ... +def PackDEV_BROADCAST_DEVICEINTERFACE(classguid, name: str = ...) -> bytes: ... + +class DEV_BROADCAST_INFO: + devicetype: Incomplete + def __init__(self, devicetype, **kw) -> None: ... + +def UnpackDEV_BROADCAST(lparam: int) -> DEV_BROADCAST_INFO | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32inetcon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32inetcon.pyi new file mode 100644 index 00000000..10e562a2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32inetcon.pyi @@ -0,0 +1,989 @@ +INTERNET_INVALID_PORT_NUMBER: int +INTERNET_DEFAULT_FTP_PORT: int +INTERNET_DEFAULT_GOPHER_PORT: int +INTERNET_DEFAULT_HTTP_PORT: int +INTERNET_DEFAULT_HTTPS_PORT: int +INTERNET_DEFAULT_SOCKS_PORT: int +INTERNET_MAX_HOST_NAME_LENGTH: int +INTERNET_MAX_USER_NAME_LENGTH: int +INTERNET_MAX_PASSWORD_LENGTH: int +INTERNET_MAX_PORT_NUMBER_LENGTH: int +INTERNET_MAX_PORT_NUMBER_VALUE: int +INTERNET_MAX_PATH_LENGTH: int +INTERNET_MAX_SCHEME_LENGTH: int +INTERNET_KEEP_ALIVE_ENABLED: int +INTERNET_KEEP_ALIVE_DISABLED: int +INTERNET_REQFLAG_FROM_CACHE: int +INTERNET_REQFLAG_ASYNC: int +INTERNET_REQFLAG_VIA_PROXY: int +INTERNET_REQFLAG_NO_HEADERS: int +INTERNET_REQFLAG_PASSIVE: int +INTERNET_REQFLAG_CACHE_WRITE_DISABLED: int +INTERNET_REQFLAG_NET_TIMEOUT: int +INTERNET_FLAG_RELOAD: int +INTERNET_FLAG_RAW_DATA: int +INTERNET_FLAG_EXISTING_CONNECT: int +INTERNET_FLAG_ASYNC: int +INTERNET_FLAG_PASSIVE: int +INTERNET_FLAG_NO_CACHE_WRITE: int +INTERNET_FLAG_DONT_CACHE: int +INTERNET_FLAG_MAKE_PERSISTENT: int +INTERNET_FLAG_FROM_CACHE: int +INTERNET_FLAG_OFFLINE: int +INTERNET_FLAG_SECURE: int +INTERNET_FLAG_KEEP_CONNECTION: int +INTERNET_FLAG_NO_AUTO_REDIRECT: int +INTERNET_FLAG_READ_PREFETCH: int +INTERNET_FLAG_NO_COOKIES: int +INTERNET_FLAG_NO_AUTH: int +INTERNET_FLAG_RESTRICTED_ZONE: int +INTERNET_FLAG_CACHE_IF_NET_FAIL: int +INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTP: int +INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTPS: int +INTERNET_FLAG_IGNORE_CERT_DATE_INVALID: int +INTERNET_FLAG_IGNORE_CERT_CN_INVALID: int +INTERNET_FLAG_RESYNCHRONIZE: int +INTERNET_FLAG_HYPERLINK: int +INTERNET_FLAG_NO_UI: int +INTERNET_FLAG_PRAGMA_NOCACHE: int +INTERNET_FLAG_CACHE_ASYNC: int +INTERNET_FLAG_FORMS_SUBMIT: int +INTERNET_FLAG_FWD_BACK: int +INTERNET_FLAG_NEED_FILE: int +INTERNET_FLAG_MUST_CACHE_REQUEST: int +SECURITY_INTERNET_MASK: int +INTERNET_ERROR_MASK_INSERT_CDROM: int +INTERNET_ERROR_MASK_COMBINED_SEC_CERT: int +INTERNET_ERROR_MASK_NEED_MSN_SSPI_PKG: int +INTERNET_ERROR_MASK_LOGIN_FAILURE_DISPLAY_ENTITY_BODY: int +WININET_API_FLAG_ASYNC: int +WININET_API_FLAG_SYNC: int +WININET_API_FLAG_USE_CONTEXT: int +INTERNET_NO_CALLBACK: int +IDSI_FLAG_KEEP_ALIVE: int +IDSI_FLAG_SECURE: int +IDSI_FLAG_PROXY: int +IDSI_FLAG_TUNNEL: int +INTERNET_PER_CONN_FLAGS: int +INTERNET_PER_CONN_PROXY_SERVER: int +INTERNET_PER_CONN_PROXY_BYPASS: int +INTERNET_PER_CONN_AUTOCONFIG_URL: int +INTERNET_PER_CONN_AUTODISCOVERY_FLAGS: int +INTERNET_PER_CONN_AUTOCONFIG_SECONDARY_URL: int +INTERNET_PER_CONN_AUTOCONFIG_RELOAD_DELAY_MINS: int +INTERNET_PER_CONN_AUTOCONFIG_LAST_DETECT_TIME: int +INTERNET_PER_CONN_AUTOCONFIG_LAST_DETECT_URL: int +PROXY_TYPE_DIRECT: int +PROXY_TYPE_PROXY: int +PROXY_TYPE_AUTO_PROXY_URL: int +PROXY_TYPE_AUTO_DETECT: int +AUTO_PROXY_FLAG_USER_SET: int +AUTO_PROXY_FLAG_ALWAYS_DETECT: int +AUTO_PROXY_FLAG_DETECTION_RUN: int +AUTO_PROXY_FLAG_MIGRATED: int +AUTO_PROXY_FLAG_DONT_CACHE_PROXY_RESULT: int +AUTO_PROXY_FLAG_CACHE_INIT_RUN: int +AUTO_PROXY_FLAG_DETECTION_SUSPECT: int +ISO_FORCE_DISCONNECTED: int +INTERNET_RFC1123_FORMAT: int +INTERNET_RFC1123_BUFSIZE: int +ICU_ESCAPE: int +ICU_USERNAME: int +ICU_NO_ENCODE: int +ICU_DECODE: int +ICU_NO_META: int +ICU_ENCODE_SPACES_ONLY: int +ICU_BROWSER_MODE: int +ICU_ENCODE_PERCENT: int +INTERNET_OPEN_TYPE_PRECONFIG: int +INTERNET_OPEN_TYPE_DIRECT: int +INTERNET_OPEN_TYPE_PROXY: int +INTERNET_OPEN_TYPE_PRECONFIG_WITH_NO_AUTOPROXY: int +PRE_CONFIG_INTERNET_ACCESS: int +LOCAL_INTERNET_ACCESS: int +CERN_PROXY_INTERNET_ACCESS: int +INTERNET_SERVICE_FTP: int +INTERNET_SERVICE_GOPHER: int +INTERNET_SERVICE_HTTP: int +IRF_ASYNC: int +IRF_SYNC: int +IRF_USE_CONTEXT: int +IRF_NO_WAIT: int +ISO_GLOBAL: int +ISO_REGISTRY: int +ISO_VALID_FLAGS: int +INTERNET_OPTION_CALLBACK: int +INTERNET_OPTION_CONNECT_TIMEOUT: int +INTERNET_OPTION_CONNECT_RETRIES: int +INTERNET_OPTION_CONNECT_BACKOFF: int +INTERNET_OPTION_SEND_TIMEOUT: int +INTERNET_OPTION_CONTROL_SEND_TIMEOUT: int +INTERNET_OPTION_RECEIVE_TIMEOUT: int +INTERNET_OPTION_CONTROL_RECEIVE_TIMEOUT: int +INTERNET_OPTION_DATA_SEND_TIMEOUT: int +INTERNET_OPTION_DATA_RECEIVE_TIMEOUT: int +INTERNET_OPTION_HANDLE_TYPE: int +INTERNET_OPTION_READ_BUFFER_SIZE: int +INTERNET_OPTION_WRITE_BUFFER_SIZE: int +INTERNET_OPTION_ASYNC_ID: int +INTERNET_OPTION_ASYNC_PRIORITY: int +INTERNET_OPTION_PARENT_HANDLE: int +INTERNET_OPTION_KEEP_CONNECTION: int +INTERNET_OPTION_REQUEST_FLAGS: int +INTERNET_OPTION_EXTENDED_ERROR: int +INTERNET_OPTION_OFFLINE_MODE: int +INTERNET_OPTION_CACHE_STREAM_HANDLE: int +INTERNET_OPTION_USERNAME: int +INTERNET_OPTION_PASSWORD: int +INTERNET_OPTION_ASYNC: int +INTERNET_OPTION_SECURITY_FLAGS: int +INTERNET_OPTION_SECURITY_CERTIFICATE_STRUCT: int +INTERNET_OPTION_DATAFILE_NAME: int +INTERNET_OPTION_URL: int +INTERNET_OPTION_SECURITY_CERTIFICATE: int +INTERNET_OPTION_SECURITY_KEY_BITNESS: int +INTERNET_OPTION_REFRESH: int +INTERNET_OPTION_PROXY: int +INTERNET_OPTION_SETTINGS_CHANGED: int +INTERNET_OPTION_VERSION: int +INTERNET_OPTION_USER_AGENT: int +INTERNET_OPTION_END_BROWSER_SESSION: int +INTERNET_OPTION_PROXY_USERNAME: int +INTERNET_OPTION_PROXY_PASSWORD: int +INTERNET_OPTION_CONTEXT_VALUE: int +INTERNET_OPTION_CONNECT_LIMIT: int +INTERNET_OPTION_SECURITY_SELECT_CLIENT_CERT: int +INTERNET_OPTION_POLICY: int +INTERNET_OPTION_DISCONNECTED_TIMEOUT: int +INTERNET_OPTION_CONNECTED_STATE: int +INTERNET_OPTION_IDLE_STATE: int +INTERNET_OPTION_OFFLINE_SEMANTICS: int +INTERNET_OPTION_SECONDARY_CACHE_KEY: int +INTERNET_OPTION_CALLBACK_FILTER: int +INTERNET_OPTION_CONNECT_TIME: int +INTERNET_OPTION_SEND_THROUGHPUT: int +INTERNET_OPTION_RECEIVE_THROUGHPUT: int +INTERNET_OPTION_REQUEST_PRIORITY: int +INTERNET_OPTION_HTTP_VERSION: int +INTERNET_OPTION_RESET_URLCACHE_SESSION: int +INTERNET_OPTION_ERROR_MASK: int +INTERNET_OPTION_FROM_CACHE_TIMEOUT: int +INTERNET_OPTION_BYPASS_EDITED_ENTRY: int +INTERNET_OPTION_DIAGNOSTIC_SOCKET_INFO: int +INTERNET_OPTION_CODEPAGE: int +INTERNET_OPTION_CACHE_TIMESTAMPS: int +INTERNET_OPTION_DISABLE_AUTODIAL: int +INTERNET_OPTION_MAX_CONNS_PER_SERVER: int +INTERNET_OPTION_MAX_CONNS_PER_1_0_SERVER: int +INTERNET_OPTION_PER_CONNECTION_OPTION: int +INTERNET_OPTION_DIGEST_AUTH_UNLOAD: int +INTERNET_OPTION_IGNORE_OFFLINE: int +INTERNET_OPTION_IDENTITY: int +INTERNET_OPTION_REMOVE_IDENTITY: int +INTERNET_OPTION_ALTER_IDENTITY: int +INTERNET_OPTION_SUPPRESS_BEHAVIOR: int +INTERNET_OPTION_AUTODIAL_MODE: int +INTERNET_OPTION_AUTODIAL_CONNECTION: int +INTERNET_OPTION_CLIENT_CERT_CONTEXT: int +INTERNET_OPTION_AUTH_FLAGS: int +INTERNET_OPTION_COOKIES_3RD_PARTY: int +INTERNET_OPTION_DISABLE_PASSPORT_AUTH: int +INTERNET_OPTION_SEND_UTF8_SERVERNAME_TO_PROXY: int +INTERNET_OPTION_EXEMPT_CONNECTION_LIMIT: int +INTERNET_OPTION_ENABLE_PASSPORT_AUTH: int +INTERNET_OPTION_HIBERNATE_INACTIVE_WORKER_THREADS: int +INTERNET_OPTION_ACTIVATE_WORKER_THREADS: int +INTERNET_OPTION_RESTORE_WORKER_THREAD_DEFAULTS: int +INTERNET_OPTION_SOCKET_SEND_BUFFER_LENGTH: int +INTERNET_OPTION_PROXY_SETTINGS_CHANGED: int +INTERNET_FIRST_OPTION: int +INTERNET_LAST_OPTION: int +INTERNET_PRIORITY_FOREGROUND: int +INTERNET_HANDLE_TYPE_INTERNET: int +INTERNET_HANDLE_TYPE_CONNECT_FTP: int +INTERNET_HANDLE_TYPE_CONNECT_GOPHER: int +INTERNET_HANDLE_TYPE_CONNECT_HTTP: int +INTERNET_HANDLE_TYPE_FTP_FIND: int +INTERNET_HANDLE_TYPE_FTP_FIND_HTML: int +INTERNET_HANDLE_TYPE_FTP_FILE: int +INTERNET_HANDLE_TYPE_FTP_FILE_HTML: int +INTERNET_HANDLE_TYPE_GOPHER_FIND: int +INTERNET_HANDLE_TYPE_GOPHER_FIND_HTML: int +INTERNET_HANDLE_TYPE_GOPHER_FILE: int +INTERNET_HANDLE_TYPE_GOPHER_FILE_HTML: int +INTERNET_HANDLE_TYPE_HTTP_REQUEST: int +INTERNET_HANDLE_TYPE_FILE_REQUEST: int +AUTH_FLAG_DISABLE_NEGOTIATE: int +AUTH_FLAG_ENABLE_NEGOTIATE: int +SECURITY_FLAG_SECURE: int +SECURITY_FLAG_STRENGTH_WEAK: int +SECURITY_FLAG_STRENGTH_MEDIUM: int +SECURITY_FLAG_STRENGTH_STRONG: int +SECURITY_FLAG_UNKNOWNBIT: int +SECURITY_FLAG_FORTEZZA: int +SECURITY_FLAG_NORMALBITNESS: int +SECURITY_FLAG_SSL: int +SECURITY_FLAG_SSL3: int +SECURITY_FLAG_PCT: int +SECURITY_FLAG_PCT4: int +SECURITY_FLAG_IETFSSL4: int +SECURITY_FLAG_40BIT: int +SECURITY_FLAG_128BIT: int +SECURITY_FLAG_56BIT: int +SECURITY_FLAG_IGNORE_REVOCATION: int +SECURITY_FLAG_IGNORE_UNKNOWN_CA: int +SECURITY_FLAG_IGNORE_WRONG_USAGE: int +SECURITY_FLAG_IGNORE_CERT_CN_INVALID: int +SECURITY_FLAG_IGNORE_CERT_DATE_INVALID: int +SECURITY_FLAG_IGNORE_REDIRECT_TO_HTTPS: int +SECURITY_FLAG_IGNORE_REDIRECT_TO_HTTP: int +SECURITY_SET_MASK: int +AUTODIAL_MODE_NEVER: int +AUTODIAL_MODE_ALWAYS: int +AUTODIAL_MODE_NO_NETWORK_PRESENT: int +INTERNET_STATUS_RESOLVING_NAME: int +INTERNET_STATUS_NAME_RESOLVED: int +INTERNET_STATUS_CONNECTING_TO_SERVER: int +INTERNET_STATUS_CONNECTED_TO_SERVER: int +INTERNET_STATUS_SENDING_REQUEST: int +INTERNET_STATUS_REQUEST_SENT: int +INTERNET_STATUS_RECEIVING_RESPONSE: int +INTERNET_STATUS_RESPONSE_RECEIVED: int +INTERNET_STATUS_CTL_RESPONSE_RECEIVED: int +INTERNET_STATUS_PREFETCH: int +INTERNET_STATUS_CLOSING_CONNECTION: int +INTERNET_STATUS_CONNECTION_CLOSED: int +INTERNET_STATUS_HANDLE_CREATED: int +INTERNET_STATUS_HANDLE_CLOSING: int +INTERNET_STATUS_DETECTING_PROXY: int +INTERNET_STATUS_REQUEST_COMPLETE: int +INTERNET_STATUS_REDIRECT: int +INTERNET_STATUS_INTERMEDIATE_RESPONSE: int +INTERNET_STATUS_USER_INPUT_REQUIRED: int +INTERNET_STATUS_STATE_CHANGE: int +INTERNET_STATUS_COOKIE_SENT: int +INTERNET_STATUS_COOKIE_RECEIVED: int +INTERNET_STATUS_PRIVACY_IMPACTED: int +INTERNET_STATUS_P3P_HEADER: int +INTERNET_STATUS_P3P_POLICYREF: int +INTERNET_STATUS_COOKIE_HISTORY: int +INTERNET_STATE_CONNECTED: int +INTERNET_STATE_DISCONNECTED: int +INTERNET_STATE_DISCONNECTED_BY_USER: int +INTERNET_STATE_IDLE: int +INTERNET_STATE_BUSY: int +FTP_TRANSFER_TYPE_UNKNOWN: int +FTP_TRANSFER_TYPE_ASCII: int +FTP_TRANSFER_TYPE_BINARY: int +FTP_TRANSFER_TYPE_MASK: int +MAX_GOPHER_DISPLAY_TEXT: int +MAX_GOPHER_SELECTOR_TEXT: int +MAX_GOPHER_HOST_NAME: int +MAX_GOPHER_LOCATOR_LENGTH: int +GOPHER_TYPE_TEXT_FILE: int +GOPHER_TYPE_DIRECTORY: int +GOPHER_TYPE_CSO: int +GOPHER_TYPE_ERROR: int +GOPHER_TYPE_MAC_BINHEX: int +GOPHER_TYPE_DOS_ARCHIVE: int +GOPHER_TYPE_UNIX_UUENCODED: int +GOPHER_TYPE_INDEX_SERVER: int +GOPHER_TYPE_TELNET: int +GOPHER_TYPE_BINARY: int +GOPHER_TYPE_REDUNDANT: int +GOPHER_TYPE_TN3270: int +GOPHER_TYPE_GIF: int +GOPHER_TYPE_IMAGE: int +GOPHER_TYPE_BITMAP: int +GOPHER_TYPE_MOVIE: int +GOPHER_TYPE_SOUND: int +GOPHER_TYPE_HTML: int +GOPHER_TYPE_PDF: int +GOPHER_TYPE_CALENDAR: int +GOPHER_TYPE_INLINE: int +GOPHER_TYPE_UNKNOWN: int +GOPHER_TYPE_ASK: int +GOPHER_TYPE_GOPHER_PLUS: int +GOPHER_TYPE_FILE_MASK: int +MAX_GOPHER_CATEGORY_NAME: int +MAX_GOPHER_ATTRIBUTE_NAME: int +MIN_GOPHER_ATTRIBUTE_LENGTH: int +GOPHER_ATTRIBUTE_ID_BASE: int +GOPHER_CATEGORY_ID_ALL: int +GOPHER_CATEGORY_ID_INFO: int +GOPHER_CATEGORY_ID_ADMIN: int +GOPHER_CATEGORY_ID_VIEWS: int +GOPHER_CATEGORY_ID_ABSTRACT: int +GOPHER_CATEGORY_ID_VERONICA: int +GOPHER_CATEGORY_ID_ASK: int +GOPHER_CATEGORY_ID_UNKNOWN: int +GOPHER_ATTRIBUTE_ID_ALL: int +GOPHER_ATTRIBUTE_ID_ADMIN: int +GOPHER_ATTRIBUTE_ID_MOD_DATE: int +GOPHER_ATTRIBUTE_ID_TTL: int +GOPHER_ATTRIBUTE_ID_SCORE: int +GOPHER_ATTRIBUTE_ID_RANGE: int +GOPHER_ATTRIBUTE_ID_SITE: int +GOPHER_ATTRIBUTE_ID_ORG: int +GOPHER_ATTRIBUTE_ID_LOCATION: int +GOPHER_ATTRIBUTE_ID_GEOG: int +GOPHER_ATTRIBUTE_ID_TIMEZONE: int +GOPHER_ATTRIBUTE_ID_PROVIDER: int +GOPHER_ATTRIBUTE_ID_VERSION: int +GOPHER_ATTRIBUTE_ID_ABSTRACT: int +GOPHER_ATTRIBUTE_ID_VIEW: int +GOPHER_ATTRIBUTE_ID_TREEWALK: int +GOPHER_ATTRIBUTE_ID_UNKNOWN: int +HTTP_MAJOR_VERSION: int +HTTP_MINOR_VERSION: int +HTTP_VERSIONA: str +HTTP_VERSION: str +HTTP_QUERY_MIME_VERSION: int +HTTP_QUERY_CONTENT_TYPE: int +HTTP_QUERY_CONTENT_TRANSFER_ENCODING: int +HTTP_QUERY_CONTENT_ID: int +HTTP_QUERY_CONTENT_DESCRIPTION: int +HTTP_QUERY_CONTENT_LENGTH: int +HTTP_QUERY_CONTENT_LANGUAGE: int +HTTP_QUERY_ALLOW: int +HTTP_QUERY_PUBLIC: int +HTTP_QUERY_DATE: int +HTTP_QUERY_EXPIRES: int +HTTP_QUERY_LAST_MODIFIED: int +HTTP_QUERY_MESSAGE_ID: int +HTTP_QUERY_URI: int +HTTP_QUERY_DERIVED_FROM: int +HTTP_QUERY_COST: int +HTTP_QUERY_LINK: int +HTTP_QUERY_PRAGMA: int +HTTP_QUERY_VERSION: int +HTTP_QUERY_STATUS_CODE: int +HTTP_QUERY_STATUS_TEXT: int +HTTP_QUERY_RAW_HEADERS: int +HTTP_QUERY_RAW_HEADERS_CRLF: int +HTTP_QUERY_CONNECTION: int +HTTP_QUERY_ACCEPT: int +HTTP_QUERY_ACCEPT_CHARSET: int +HTTP_QUERY_ACCEPT_ENCODING: int +HTTP_QUERY_ACCEPT_LANGUAGE: int +HTTP_QUERY_AUTHORIZATION: int +HTTP_QUERY_CONTENT_ENCODING: int +HTTP_QUERY_FORWARDED: int +HTTP_QUERY_FROM: int +HTTP_QUERY_IF_MODIFIED_SINCE: int +HTTP_QUERY_LOCATION: int +HTTP_QUERY_ORIG_URI: int +HTTP_QUERY_REFERER: int +HTTP_QUERY_RETRY_AFTER: int +HTTP_QUERY_SERVER: int +HTTP_QUERY_TITLE: int +HTTP_QUERY_USER_AGENT: int +HTTP_QUERY_WWW_AUTHENTICATE: int +HTTP_QUERY_PROXY_AUTHENTICATE: int +HTTP_QUERY_ACCEPT_RANGES: int +HTTP_QUERY_SET_COOKIE: int +HTTP_QUERY_COOKIE: int +HTTP_QUERY_REQUEST_METHOD: int +HTTP_QUERY_REFRESH: int +HTTP_QUERY_CONTENT_DISPOSITION: int +HTTP_QUERY_AGE: int +HTTP_QUERY_CACHE_CONTROL: int +HTTP_QUERY_CONTENT_BASE: int +HTTP_QUERY_CONTENT_LOCATION: int +HTTP_QUERY_CONTENT_MD5: int +HTTP_QUERY_CONTENT_RANGE: int +HTTP_QUERY_ETAG: int +HTTP_QUERY_HOST: int +HTTP_QUERY_IF_MATCH: int +HTTP_QUERY_IF_NONE_MATCH: int +HTTP_QUERY_IF_RANGE: int +HTTP_QUERY_IF_UNMODIFIED_SINCE: int +HTTP_QUERY_MAX_FORWARDS: int +HTTP_QUERY_PROXY_AUTHORIZATION: int +HTTP_QUERY_RANGE: int +HTTP_QUERY_TRANSFER_ENCODING: int +HTTP_QUERY_UPGRADE: int +HTTP_QUERY_VARY: int +HTTP_QUERY_VIA: int +HTTP_QUERY_WARNING: int +HTTP_QUERY_EXPECT: int +HTTP_QUERY_PROXY_CONNECTION: int +HTTP_QUERY_UNLESS_MODIFIED_SINCE: int +HTTP_QUERY_ECHO_REQUEST: int +HTTP_QUERY_ECHO_REPLY: int +HTTP_QUERY_ECHO_HEADERS: int +HTTP_QUERY_ECHO_HEADERS_CRLF: int +HTTP_QUERY_PROXY_SUPPORT: int +HTTP_QUERY_AUTHENTICATION_INFO: int +HTTP_QUERY_PASSPORT_URLS: int +HTTP_QUERY_PASSPORT_CONFIG: int +HTTP_QUERY_MAX: int +HTTP_QUERY_CUSTOM: int +HTTP_QUERY_FLAG_REQUEST_HEADERS: int +HTTP_QUERY_FLAG_SYSTEMTIME: int +HTTP_QUERY_FLAG_NUMBER: int +HTTP_QUERY_FLAG_COALESCE: int +HTTP_QUERY_MODIFIER_FLAGS_MASK: int +HTTP_QUERY_HEADER_MASK: int +HTTP_STATUS_CONTINUE: int +HTTP_STATUS_SWITCH_PROTOCOLS: int +HTTP_STATUS_OK: int +HTTP_STATUS_CREATED: int +HTTP_STATUS_ACCEPTED: int +HTTP_STATUS_PARTIAL: int +HTTP_STATUS_NO_CONTENT: int +HTTP_STATUS_RESET_CONTENT: int +HTTP_STATUS_PARTIAL_CONTENT: int +HTTP_STATUS_AMBIGUOUS: int +HTTP_STATUS_MOVED: int +HTTP_STATUS_REDIRECT: int +HTTP_STATUS_REDIRECT_METHOD: int +HTTP_STATUS_NOT_MODIFIED: int +HTTP_STATUS_USE_PROXY: int +HTTP_STATUS_REDIRECT_KEEP_VERB: int +HTTP_STATUS_BAD_REQUEST: int +HTTP_STATUS_DENIED: int +HTTP_STATUS_PAYMENT_REQ: int +HTTP_STATUS_FORBIDDEN: int +HTTP_STATUS_NOT_FOUND: int +HTTP_STATUS_BAD_METHOD: int +HTTP_STATUS_NONE_ACCEPTABLE: int +HTTP_STATUS_PROXY_AUTH_REQ: int +HTTP_STATUS_REQUEST_TIMEOUT: int +HTTP_STATUS_CONFLICT: int +HTTP_STATUS_GONE: int +HTTP_STATUS_LENGTH_REQUIRED: int +HTTP_STATUS_PRECOND_FAILED: int +HTTP_STATUS_REQUEST_TOO_LARGE: int +HTTP_STATUS_URI_TOO_LONG: int +HTTP_STATUS_UNSUPPORTED_MEDIA: int +HTTP_STATUS_RETRY_WITH: int +HTTP_STATUS_SERVER_ERROR: int +HTTP_STATUS_NOT_SUPPORTED: int +HTTP_STATUS_BAD_GATEWAY: int +HTTP_STATUS_SERVICE_UNAVAIL: int +HTTP_STATUS_GATEWAY_TIMEOUT: int +HTTP_STATUS_VERSION_NOT_SUP: int +HTTP_STATUS_FIRST: int +HTTP_STATUS_LAST: int +HTTP_ADDREQ_INDEX_MASK: int +HTTP_ADDREQ_FLAGS_MASK: int +HTTP_ADDREQ_FLAG_ADD_IF_NEW: int +HTTP_ADDREQ_FLAG_ADD: int +HTTP_ADDREQ_FLAG_COALESCE_WITH_COMMA: int +HTTP_ADDREQ_FLAG_COALESCE_WITH_SEMICOLON: int +HTTP_ADDREQ_FLAG_COALESCE: int +HTTP_ADDREQ_FLAG_REPLACE: int +HSR_ASYNC: int +HSR_SYNC: int +HSR_USE_CONTEXT: int +HSR_INITIATE: int +HSR_DOWNLOAD: int +HSR_CHUNKED: int +INTERNET_COOKIE_IS_SECURE: int +INTERNET_COOKIE_IS_SESSION: int +INTERNET_COOKIE_THIRD_PARTY: int +INTERNET_COOKIE_PROMPT_REQUIRED: int +INTERNET_COOKIE_EVALUATE_P3P: int +INTERNET_COOKIE_APPLY_P3P: int +INTERNET_COOKIE_P3P_ENABLED: int +INTERNET_COOKIE_IS_RESTRICTED: int +INTERNET_COOKIE_IE6: int +INTERNET_COOKIE_IS_LEGACY: int +FLAG_ICC_FORCE_CONNECTION: int +FLAGS_ERROR_UI_FILTER_FOR_ERRORS: int +FLAGS_ERROR_UI_FLAGS_CHANGE_OPTIONS: int +FLAGS_ERROR_UI_FLAGS_GENERATE_DATA: int +FLAGS_ERROR_UI_FLAGS_NO_UI: int +FLAGS_ERROR_UI_SERIALIZE_DIALOGS: int +INTERNET_ERROR_BASE: int +ERROR_INTERNET_OUT_OF_HANDLES: int +ERROR_INTERNET_TIMEOUT: int +ERROR_INTERNET_EXTENDED_ERROR: int +ERROR_INTERNET_INTERNAL_ERROR: int +ERROR_INTERNET_INVALID_URL: int +ERROR_INTERNET_UNRECOGNIZED_SCHEME: int +ERROR_INTERNET_NAME_NOT_RESOLVED: int +ERROR_INTERNET_PROTOCOL_NOT_FOUND: int +ERROR_INTERNET_INVALID_OPTION: int +ERROR_INTERNET_BAD_OPTION_LENGTH: int +ERROR_INTERNET_OPTION_NOT_SETTABLE: int +ERROR_INTERNET_SHUTDOWN: int +ERROR_INTERNET_INCORRECT_USER_NAME: int +ERROR_INTERNET_INCORRECT_PASSWORD: int +ERROR_INTERNET_LOGIN_FAILURE: int +ERROR_INTERNET_INVALID_OPERATION: int +ERROR_INTERNET_OPERATION_CANCELLED: int +ERROR_INTERNET_INCORRECT_HANDLE_TYPE: int +ERROR_INTERNET_INCORRECT_HANDLE_STATE: int +ERROR_INTERNET_NOT_PROXY_REQUEST: int +ERROR_INTERNET_REGISTRY_VALUE_NOT_FOUND: int +ERROR_INTERNET_BAD_REGISTRY_PARAMETER: int +ERROR_INTERNET_NO_DIRECT_ACCESS: int +ERROR_INTERNET_NO_CONTEXT: int +ERROR_INTERNET_NO_CALLBACK: int +ERROR_INTERNET_REQUEST_PENDING: int +ERROR_INTERNET_INCORRECT_FORMAT: int +ERROR_INTERNET_ITEM_NOT_FOUND: int +ERROR_INTERNET_CANNOT_CONNECT: int +ERROR_INTERNET_CONNECTION_ABORTED: int +ERROR_INTERNET_CONNECTION_RESET: int +ERROR_INTERNET_FORCE_RETRY: int +ERROR_INTERNET_INVALID_PROXY_REQUEST: int +ERROR_INTERNET_NEED_UI: int +ERROR_INTERNET_HANDLE_EXISTS: int +ERROR_INTERNET_SEC_CERT_DATE_INVALID: int +ERROR_INTERNET_SEC_CERT_CN_INVALID: int +ERROR_INTERNET_HTTP_TO_HTTPS_ON_REDIR: int +ERROR_INTERNET_HTTPS_TO_HTTP_ON_REDIR: int +ERROR_INTERNET_MIXED_SECURITY: int +ERROR_INTERNET_CHG_POST_IS_NON_SECURE: int +ERROR_INTERNET_POST_IS_NON_SECURE: int +ERROR_INTERNET_CLIENT_AUTH_CERT_NEEDED: int +ERROR_INTERNET_INVALID_CA: int +ERROR_INTERNET_CLIENT_AUTH_NOT_SETUP: int +ERROR_INTERNET_ASYNC_THREAD_FAILED: int +ERROR_INTERNET_REDIRECT_SCHEME_CHANGE: int +ERROR_INTERNET_DIALOG_PENDING: int +ERROR_INTERNET_RETRY_DIALOG: int +ERROR_INTERNET_HTTPS_HTTP_SUBMIT_REDIR: int +ERROR_INTERNET_INSERT_CDROM: int +ERROR_INTERNET_FORTEZZA_LOGIN_NEEDED: int +ERROR_INTERNET_SEC_CERT_ERRORS: int +ERROR_INTERNET_SEC_CERT_NO_REV: int +ERROR_INTERNET_SEC_CERT_REV_FAILED: int +ERROR_FTP_TRANSFER_IN_PROGRESS: int +ERROR_FTP_DROPPED: int +ERROR_FTP_NO_PASSIVE_MODE: int +ERROR_GOPHER_PROTOCOL_ERROR: int +ERROR_GOPHER_NOT_FILE: int +ERROR_GOPHER_DATA_ERROR: int +ERROR_GOPHER_END_OF_DATA: int +ERROR_GOPHER_INVALID_LOCATOR: int +ERROR_GOPHER_INCORRECT_LOCATOR_TYPE: int +ERROR_GOPHER_NOT_GOPHER_PLUS: int +ERROR_GOPHER_ATTRIBUTE_NOT_FOUND: int +ERROR_GOPHER_UNKNOWN_LOCATOR: int +ERROR_HTTP_HEADER_NOT_FOUND: int +ERROR_HTTP_DOWNLEVEL_SERVER: int +ERROR_HTTP_INVALID_SERVER_RESPONSE: int +ERROR_HTTP_INVALID_HEADER: int +ERROR_HTTP_INVALID_QUERY_REQUEST: int +ERROR_HTTP_HEADER_ALREADY_EXISTS: int +ERROR_HTTP_REDIRECT_FAILED: int +ERROR_HTTP_NOT_REDIRECTED: int +ERROR_HTTP_COOKIE_NEEDS_CONFIRMATION: int +ERROR_HTTP_COOKIE_DECLINED: int +ERROR_HTTP_REDIRECT_NEEDS_CONFIRMATION: int +ERROR_INTERNET_SECURITY_CHANNEL_ERROR: int +ERROR_INTERNET_UNABLE_TO_CACHE_FILE: int +ERROR_INTERNET_TCPIP_NOT_INSTALLED: int +ERROR_INTERNET_DISCONNECTED: int +ERROR_INTERNET_SERVER_UNREACHABLE: int +ERROR_INTERNET_PROXY_SERVER_UNREACHABLE: int +ERROR_INTERNET_BAD_AUTO_PROXY_SCRIPT: int +ERROR_INTERNET_UNABLE_TO_DOWNLOAD_SCRIPT: int +ERROR_INTERNET_SEC_INVALID_CERT: int +ERROR_INTERNET_SEC_CERT_REVOKED: int +ERROR_INTERNET_FAILED_DUETOSECURITYCHECK: int +ERROR_INTERNET_NOT_INITIALIZED: int +ERROR_INTERNET_NEED_MSN_SSPI_PKG: int +ERROR_INTERNET_LOGIN_FAILURE_DISPLAY_ENTITY_BODY: int +INTERNET_ERROR_LAST: int +NORMAL_CACHE_ENTRY: int +STICKY_CACHE_ENTRY: int +EDITED_CACHE_ENTRY: int +TRACK_OFFLINE_CACHE_ENTRY: int +TRACK_ONLINE_CACHE_ENTRY: int +SPARSE_CACHE_ENTRY: int +COOKIE_CACHE_ENTRY: int +URLHISTORY_CACHE_ENTRY: int +URLCACHE_FIND_DEFAULT_FILTER: int +CACHEGROUP_ATTRIBUTE_GET_ALL: int +CACHEGROUP_ATTRIBUTE_BASIC: int +CACHEGROUP_ATTRIBUTE_FLAG: int +CACHEGROUP_ATTRIBUTE_TYPE: int +CACHEGROUP_ATTRIBUTE_QUOTA: int +CACHEGROUP_ATTRIBUTE_GROUPNAME: int +CACHEGROUP_ATTRIBUTE_STORAGE: int +CACHEGROUP_FLAG_NONPURGEABLE: int +CACHEGROUP_FLAG_GIDONLY: int +CACHEGROUP_FLAG_FLUSHURL_ONDELETE: int +CACHEGROUP_SEARCH_ALL: int +CACHEGROUP_SEARCH_BYURL: int +CACHEGROUP_TYPE_INVALID: int +CACHEGROUP_READWRITE_MASK: int +GROUPNAME_MAX_LENGTH: int +GROUP_OWNER_STORAGE_SIZE: int +CACHE_ENTRY_ATTRIBUTE_FC: int +CACHE_ENTRY_HITRATE_FC: int +CACHE_ENTRY_MODTIME_FC: int +CACHE_ENTRY_EXPTIME_FC: int +CACHE_ENTRY_ACCTIME_FC: int +CACHE_ENTRY_SYNCTIME_FC: int +CACHE_ENTRY_HEADERINFO_FC: int +CACHE_ENTRY_EXEMPT_DELTA_FC: int +INTERNET_CACHE_GROUP_ADD: int +INTERNET_CACHE_GROUP_REMOVE: int +INTERNET_DIAL_FORCE_PROMPT: int +INTERNET_DIAL_SHOW_OFFLINE: int +INTERNET_DIAL_UNATTENDED: int +INTERENT_GOONLINE_REFRESH: int +INTERENT_GOONLINE_MASK: int +INTERNET_AUTODIAL_FORCE_ONLINE: int +INTERNET_AUTODIAL_FORCE_UNATTENDED: int +INTERNET_AUTODIAL_FAILIFSECURITYCHECK: int +INTERNET_AUTODIAL_OVERRIDE_NET_PRESENT: int +INTERNET_AUTODIAL_FLAGS_MASK: int +PROXY_AUTO_DETECT_TYPE_DHCP: int +PROXY_AUTO_DETECT_TYPE_DNS_A: int +INTERNET_CONNECTION_MODEM: int +INTERNET_CONNECTION_LAN: int +INTERNET_CONNECTION_PROXY: int +INTERNET_CONNECTION_MODEM_BUSY: int +INTERNET_RAS_INSTALLED: int +INTERNET_CONNECTION_OFFLINE: int +INTERNET_CONNECTION_CONFIGURED: int +INTERNET_CUSTOMDIAL_CONNECT: int +INTERNET_CUSTOMDIAL_UNATTENDED: int +INTERNET_CUSTOMDIAL_DISCONNECT: int +INTERNET_CUSTOMDIAL_SHOWOFFLINE: int +INTERNET_CUSTOMDIAL_SAFE_FOR_UNATTENDED: int +INTERNET_CUSTOMDIAL_WILL_SUPPLY_STATE: int +INTERNET_CUSTOMDIAL_CAN_HANGUP: int +INTERNET_DIALSTATE_DISCONNECTED: int +INTERNET_IDENTITY_FLAG_PRIVATE_CACHE: int +INTERNET_IDENTITY_FLAG_SHARED_CACHE: int +INTERNET_IDENTITY_FLAG_CLEAR_DATA: int +INTERNET_IDENTITY_FLAG_CLEAR_COOKIES: int +INTERNET_IDENTITY_FLAG_CLEAR_HISTORY: int +INTERNET_IDENTITY_FLAG_CLEAR_CONTENT: int +INTERNET_SUPPRESS_RESET_ALL: int +INTERNET_SUPPRESS_COOKIE_POLICY: int +INTERNET_SUPPRESS_COOKIE_POLICY_RESET: int +PRIVACY_TEMPLATE_NO_COOKIES: int +PRIVACY_TEMPLATE_HIGH: int +PRIVACY_TEMPLATE_MEDIUM_HIGH: int +PRIVACY_TEMPLATE_MEDIUM: int +PRIVACY_TEMPLATE_MEDIUM_LOW: int +PRIVACY_TEMPLATE_LOW: int +PRIVACY_TEMPLATE_CUSTOM: int +PRIVACY_TEMPLATE_ADVANCED: int +PRIVACY_TEMPLATE_MAX: int +PRIVACY_TYPE_FIRST_PARTY: int +PRIVACY_TYPE_THIRD_PARTY: int +INTERNET_DEFAULT_PORT: int +WINHTTP_FLAG_ASYNC: int +WINHTTP_FLAG_SECURE: int +WINHTTP_FLAG_ESCAPE_PERCENT: int +WINHTTP_FLAG_NULL_CODEPAGE: int +WINHTTP_FLAG_BYPASS_PROXY_CACHE: int +WINHTTP_FLAG_REFRESH: int +WINHTTP_FLAG_ESCAPE_DISABLE: int +WINHTTP_FLAG_ESCAPE_DISABLE_QUERY: int +SECURITY_FLAG_IGNORE_CERT_WRONG_USAGE: int +INTERNET_SCHEME_HTTP: int +INTERNET_SCHEME_HTTPS: int +WINHTTP_AUTOPROXY_AUTO_DETECT: int +WINHTTP_AUTOPROXY_CONFIG_URL: int +WINHTTP_AUTOPROXY_RUN_INPROCESS: int +WINHTTP_AUTOPROXY_RUN_OUTPROCESS_ONLY: int +WINHTTP_AUTO_DETECT_TYPE_DHCP: int +WINHTTP_AUTO_DETECT_TYPE_DNS_A: int +WINHTTP_TIME_FORMAT_BUFSIZE: int +ICU_ESCAPE_AUTHORITY: int +ICU_REJECT_USERPWD: int +WINHTTP_ACCESS_TYPE_DEFAULT_PROXY: int +WINHTTP_ACCESS_TYPE_NO_PROXY: int +WINHTTP_ACCESS_TYPE_NAMED_PROXY: int +WINHTTP_OPTION_CALLBACK: int +WINHTTP_OPTION_RESOLVE_TIMEOUT: int +WINHTTP_OPTION_CONNECT_TIMEOUT: int +WINHTTP_OPTION_CONNECT_RETRIES: int +WINHTTP_OPTION_SEND_TIMEOUT: int +WINHTTP_OPTION_RECEIVE_TIMEOUT: int +WINHTTP_OPTION_RECEIVE_RESPONSE_TIMEOUT: int +WINHTTP_OPTION_HANDLE_TYPE: int +WINHTTP_OPTION_READ_BUFFER_SIZE: int +WINHTTP_OPTION_WRITE_BUFFER_SIZE: int +WINHTTP_OPTION_PARENT_HANDLE: int +WINHTTP_OPTION_EXTENDED_ERROR: int +WINHTTP_OPTION_SECURITY_FLAGS: int +WINHTTP_OPTION_SECURITY_CERTIFICATE_STRUCT: int +WINHTTP_OPTION_URL: int +WINHTTP_OPTION_SECURITY_KEY_BITNESS: int +WINHTTP_OPTION_PROXY: int +WINHTTP_OPTION_USER_AGENT: int +WINHTTP_OPTION_CONTEXT_VALUE: int +WINHTTP_OPTION_CLIENT_CERT_CONTEXT: int +WINHTTP_OPTION_REQUEST_PRIORITY: int +WINHTTP_OPTION_HTTP_VERSION: int +WINHTTP_OPTION_DISABLE_FEATURE: int +WINHTTP_OPTION_CODEPAGE: int +WINHTTP_OPTION_MAX_CONNS_PER_SERVER: int +WINHTTP_OPTION_MAX_CONNS_PER_1_0_SERVER: int +WINHTTP_OPTION_AUTOLOGON_POLICY: int +WINHTTP_OPTION_SERVER_CERT_CONTEXT: int +WINHTTP_OPTION_ENABLE_FEATURE: int +WINHTTP_OPTION_WORKER_THREAD_COUNT: int +WINHTTP_OPTION_PASSPORT_COBRANDING_TEXT: int +WINHTTP_OPTION_PASSPORT_COBRANDING_URL: int +WINHTTP_OPTION_CONFIGURE_PASSPORT_AUTH: int +WINHTTP_OPTION_SECURE_PROTOCOLS: int +WINHTTP_OPTION_ENABLETRACING: int +WINHTTP_OPTION_PASSPORT_SIGN_OUT: int +WINHTTP_OPTION_PASSPORT_RETURN_URL: int +WINHTTP_OPTION_REDIRECT_POLICY: int +WINHTTP_OPTION_MAX_HTTP_AUTOMATIC_REDIRECTS: int +WINHTTP_OPTION_MAX_HTTP_STATUS_CONTINUE: int +WINHTTP_OPTION_MAX_RESPONSE_HEADER_SIZE: int +WINHTTP_OPTION_MAX_RESPONSE_DRAIN_SIZE: int +WINHTTP_OPTION_CONNECTION_INFO: int +WINHTTP_OPTION_SPN: int +WINHTTP_OPTION_GLOBAL_PROXY_CREDS: int +WINHTTP_OPTION_GLOBAL_SERVER_CREDS: int +WINHTTP_OPTION_UNLOAD_NOTIFY_EVENT: int +WINHTTP_OPTION_REJECT_USERPWD_IN_URL: int +WINHTTP_OPTION_USE_GLOBAL_SERVER_CREDENTIALS: int +WINHTTP_LAST_OPTION: int +WINHTTP_OPTION_USERNAME: int +WINHTTP_OPTION_PASSWORD: int +WINHTTP_OPTION_PROXY_USERNAME: int +WINHTTP_OPTION_PROXY_PASSWORD: int +WINHTTP_CONNS_PER_SERVER_UNLIMITED: int +WINHTTP_AUTOLOGON_SECURITY_LEVEL_MEDIUM: int +WINHTTP_AUTOLOGON_SECURITY_LEVEL_LOW: int +WINHTTP_AUTOLOGON_SECURITY_LEVEL_HIGH: int +WINHTTP_AUTOLOGON_SECURITY_LEVEL_DEFAULT: int +WINHTTP_OPTION_REDIRECT_POLICY_NEVER: int +WINHTTP_OPTION_REDIRECT_POLICY_DISALLOW_HTTPS_TO_HTTP: int +WINHTTP_OPTION_REDIRECT_POLICY_ALWAYS: int +WINHTTP_OPTION_REDIRECT_POLICY_LAST: int +WINHTTP_OPTION_REDIRECT_POLICY_DEFAULT: int +WINHTTP_DISABLE_PASSPORT_AUTH: int +WINHTTP_ENABLE_PASSPORT_AUTH: int +WINHTTP_DISABLE_PASSPORT_KEYRING: int +WINHTTP_ENABLE_PASSPORT_KEYRING: int +WINHTTP_DISABLE_COOKIES: int +WINHTTP_DISABLE_REDIRECTS: int +WINHTTP_DISABLE_AUTHENTICATION: int +WINHTTP_DISABLE_KEEP_ALIVE: int +WINHTTP_ENABLE_SSL_REVOCATION: int +WINHTTP_ENABLE_SSL_REVERT_IMPERSONATION: int +WINHTTP_DISABLE_SPN_SERVER_PORT: int +WINHTTP_ENABLE_SPN_SERVER_PORT: int +WINHTTP_OPTION_SPN_MASK: int +WINHTTP_HANDLE_TYPE_SESSION: int +WINHTTP_HANDLE_TYPE_CONNECT: int +WINHTTP_HANDLE_TYPE_REQUEST: int +WINHTTP_AUTH_SCHEME_BASIC: int +WINHTTP_AUTH_SCHEME_NTLM: int +WINHTTP_AUTH_SCHEME_PASSPORT: int +WINHTTP_AUTH_SCHEME_DIGEST: int +WINHTTP_AUTH_SCHEME_NEGOTIATE: int +WINHTTP_AUTH_TARGET_SERVER: int +WINHTTP_AUTH_TARGET_PROXY: int +WINHTTP_CALLBACK_STATUS_FLAG_CERT_REV_FAILED: int +WINHTTP_CALLBACK_STATUS_FLAG_INVALID_CERT: int +WINHTTP_CALLBACK_STATUS_FLAG_CERT_REVOKED: int +WINHTTP_CALLBACK_STATUS_FLAG_INVALID_CA: int +WINHTTP_CALLBACK_STATUS_FLAG_CERT_CN_INVALID: int +WINHTTP_CALLBACK_STATUS_FLAG_CERT_DATE_INVALID: int +WINHTTP_CALLBACK_STATUS_FLAG_CERT_WRONG_USAGE: int +WINHTTP_CALLBACK_STATUS_FLAG_SECURITY_CHANNEL_ERROR: int +WINHTTP_FLAG_SECURE_PROTOCOL_SSL2: int +WINHTTP_FLAG_SECURE_PROTOCOL_SSL3: int +WINHTTP_FLAG_SECURE_PROTOCOL_TLS1: int +WINHTTP_FLAG_SECURE_PROTOCOL_ALL: int +WINHTTP_CALLBACK_STATUS_RESOLVING_NAME: int +WINHTTP_CALLBACK_STATUS_NAME_RESOLVED: int +WINHTTP_CALLBACK_STATUS_CONNECTING_TO_SERVER: int +WINHTTP_CALLBACK_STATUS_CONNECTED_TO_SERVER: int +WINHTTP_CALLBACK_STATUS_SENDING_REQUEST: int +WINHTTP_CALLBACK_STATUS_REQUEST_SENT: int +WINHTTP_CALLBACK_STATUS_RECEIVING_RESPONSE: int +WINHTTP_CALLBACK_STATUS_RESPONSE_RECEIVED: int +WINHTTP_CALLBACK_STATUS_CLOSING_CONNECTION: int +WINHTTP_CALLBACK_STATUS_CONNECTION_CLOSED: int +WINHTTP_CALLBACK_STATUS_HANDLE_CREATED: int +WINHTTP_CALLBACK_STATUS_HANDLE_CLOSING: int +WINHTTP_CALLBACK_STATUS_DETECTING_PROXY: int +WINHTTP_CALLBACK_STATUS_REDIRECT: int +WINHTTP_CALLBACK_STATUS_INTERMEDIATE_RESPONSE: int +WINHTTP_CALLBACK_STATUS_SECURE_FAILURE: int +WINHTTP_CALLBACK_STATUS_HEADERS_AVAILABLE: int +WINHTTP_CALLBACK_STATUS_DATA_AVAILABLE: int +WINHTTP_CALLBACK_STATUS_READ_COMPLETE: int +WINHTTP_CALLBACK_STATUS_WRITE_COMPLETE: int +WINHTTP_CALLBACK_STATUS_REQUEST_ERROR: int +WINHTTP_CALLBACK_STATUS_SENDREQUEST_COMPLETE: int +API_RECEIVE_RESPONSE: int +API_QUERY_DATA_AVAILABLE: int +API_READ_DATA: int +API_WRITE_DATA: int +API_SEND_REQUEST: int +WINHTTP_CALLBACK_FLAG_RESOLVE_NAME: int +WINHTTP_CALLBACK_FLAG_CONNECT_TO_SERVER: int +WINHTTP_CALLBACK_FLAG_SEND_REQUEST: int +WINHTTP_CALLBACK_FLAG_RECEIVE_RESPONSE: int +WINHTTP_CALLBACK_FLAG_CLOSE_CONNECTION: int +WINHTTP_CALLBACK_FLAG_HANDLES: int +WINHTTP_CALLBACK_FLAG_DETECTING_PROXY: int +WINHTTP_CALLBACK_FLAG_REDIRECT: int +WINHTTP_CALLBACK_FLAG_INTERMEDIATE_RESPONSE: int +WINHTTP_CALLBACK_FLAG_SECURE_FAILURE: int +WINHTTP_CALLBACK_FLAG_SENDREQUEST_COMPLETE: int +WINHTTP_CALLBACK_FLAG_HEADERS_AVAILABLE: int +WINHTTP_CALLBACK_FLAG_DATA_AVAILABLE: int +WINHTTP_CALLBACK_FLAG_READ_COMPLETE: int +WINHTTP_CALLBACK_FLAG_WRITE_COMPLETE: int +WINHTTP_CALLBACK_FLAG_REQUEST_ERROR: int +WINHTTP_CALLBACK_FLAG_ALL_COMPLETIONS: int +WINHTTP_CALLBACK_FLAG_ALL_NOTIFICATIONS: int +WINHTTP_QUERY_MIME_VERSION: int +WINHTTP_QUERY_CONTENT_TYPE: int +WINHTTP_QUERY_CONTENT_TRANSFER_ENCODING: int +WINHTTP_QUERY_CONTENT_ID: int +WINHTTP_QUERY_CONTENT_DESCRIPTION: int +WINHTTP_QUERY_CONTENT_LENGTH: int +WINHTTP_QUERY_CONTENT_LANGUAGE: int +WINHTTP_QUERY_ALLOW: int +WINHTTP_QUERY_PUBLIC: int +WINHTTP_QUERY_DATE: int +WINHTTP_QUERY_EXPIRES: int +WINHTTP_QUERY_LAST_MODIFIED: int +WINHTTP_QUERY_MESSAGE_ID: int +WINHTTP_QUERY_URI: int +WINHTTP_QUERY_DERIVED_FROM: int +WINHTTP_QUERY_COST: int +WINHTTP_QUERY_LINK: int +WINHTTP_QUERY_PRAGMA: int +WINHTTP_QUERY_VERSION: int +WINHTTP_QUERY_STATUS_CODE: int +WINHTTP_QUERY_STATUS_TEXT: int +WINHTTP_QUERY_RAW_HEADERS: int +WINHTTP_QUERY_RAW_HEADERS_CRLF: int +WINHTTP_QUERY_CONNECTION: int +WINHTTP_QUERY_ACCEPT: int +WINHTTP_QUERY_ACCEPT_CHARSET: int +WINHTTP_QUERY_ACCEPT_ENCODING: int +WINHTTP_QUERY_ACCEPT_LANGUAGE: int +WINHTTP_QUERY_AUTHORIZATION: int +WINHTTP_QUERY_CONTENT_ENCODING: int +WINHTTP_QUERY_FORWARDED: int +WINHTTP_QUERY_FROM: int +WINHTTP_QUERY_IF_MODIFIED_SINCE: int +WINHTTP_QUERY_LOCATION: int +WINHTTP_QUERY_ORIG_URI: int +WINHTTP_QUERY_REFERER: int +WINHTTP_QUERY_RETRY_AFTER: int +WINHTTP_QUERY_SERVER: int +WINHTTP_QUERY_TITLE: int +WINHTTP_QUERY_USER_AGENT: int +WINHTTP_QUERY_WWW_AUTHENTICATE: int +WINHTTP_QUERY_PROXY_AUTHENTICATE: int +WINHTTP_QUERY_ACCEPT_RANGES: int +WINHTTP_QUERY_SET_COOKIE: int +WINHTTP_QUERY_COOKIE: int +WINHTTP_QUERY_REQUEST_METHOD: int +WINHTTP_QUERY_REFRESH: int +WINHTTP_QUERY_CONTENT_DISPOSITION: int +WINHTTP_QUERY_AGE: int +WINHTTP_QUERY_CACHE_CONTROL: int +WINHTTP_QUERY_CONTENT_BASE: int +WINHTTP_QUERY_CONTENT_LOCATION: int +WINHTTP_QUERY_CONTENT_MD5: int +WINHTTP_QUERY_CONTENT_RANGE: int +WINHTTP_QUERY_ETAG: int +WINHTTP_QUERY_HOST: int +WINHTTP_QUERY_IF_MATCH: int +WINHTTP_QUERY_IF_NONE_MATCH: int +WINHTTP_QUERY_IF_RANGE: int +WINHTTP_QUERY_IF_UNMODIFIED_SINCE: int +WINHTTP_QUERY_MAX_FORWARDS: int +WINHTTP_QUERY_PROXY_AUTHORIZATION: int +WINHTTP_QUERY_RANGE: int +WINHTTP_QUERY_TRANSFER_ENCODING: int +WINHTTP_QUERY_UPGRADE: int +WINHTTP_QUERY_VARY: int +WINHTTP_QUERY_VIA: int +WINHTTP_QUERY_WARNING: int +WINHTTP_QUERY_EXPECT: int +WINHTTP_QUERY_PROXY_CONNECTION: int +WINHTTP_QUERY_UNLESS_MODIFIED_SINCE: int +WINHTTP_QUERY_PROXY_SUPPORT: int +WINHTTP_QUERY_AUTHENTICATION_INFO: int +WINHTTP_QUERY_PASSPORT_URLS: int +WINHTTP_QUERY_PASSPORT_CONFIG: int +WINHTTP_QUERY_MAX: int +WINHTTP_QUERY_CUSTOM: int +WINHTTP_QUERY_FLAG_REQUEST_HEADERS: int +WINHTTP_QUERY_FLAG_SYSTEMTIME: int +WINHTTP_QUERY_FLAG_NUMBER: int +HTTP_STATUS_WEBDAV_MULTI_STATUS: int +WINHTTP_ADDREQ_INDEX_MASK: int +WINHTTP_ADDREQ_FLAGS_MASK: int +WINHTTP_ADDREQ_FLAG_ADD_IF_NEW: int +WINHTTP_ADDREQ_FLAG_ADD: int +WINHTTP_ADDREQ_FLAG_COALESCE_WITH_COMMA: int +WINHTTP_ADDREQ_FLAG_COALESCE_WITH_SEMICOLON: int +WINHTTP_ADDREQ_FLAG_COALESCE: int +WINHTTP_ADDREQ_FLAG_REPLACE: int +WINHTTP_IGNORE_REQUEST_TOTAL_LENGTH: int +WINHTTP_ERROR_BASE: int +ERROR_WINHTTP_OUT_OF_HANDLES: int +ERROR_WINHTTP_TIMEOUT: int +ERROR_WINHTTP_INTERNAL_ERROR: int +ERROR_WINHTTP_INVALID_URL: int +ERROR_WINHTTP_UNRECOGNIZED_SCHEME: int +ERROR_WINHTTP_NAME_NOT_RESOLVED: int +ERROR_WINHTTP_INVALID_OPTION: int +ERROR_WINHTTP_OPTION_NOT_SETTABLE: int +ERROR_WINHTTP_SHUTDOWN: int +ERROR_WINHTTP_LOGIN_FAILURE: int +ERROR_WINHTTP_OPERATION_CANCELLED: int +ERROR_WINHTTP_INCORRECT_HANDLE_TYPE: int +ERROR_WINHTTP_INCORRECT_HANDLE_STATE: int +ERROR_WINHTTP_CANNOT_CONNECT: int +ERROR_WINHTTP_CONNECTION_ERROR: int +ERROR_WINHTTP_RESEND_REQUEST: int +ERROR_WINHTTP_CLIENT_AUTH_CERT_NEEDED: int +ERROR_WINHTTP_CANNOT_CALL_BEFORE_OPEN: int +ERROR_WINHTTP_CANNOT_CALL_BEFORE_SEND: int +ERROR_WINHTTP_CANNOT_CALL_AFTER_SEND: int +ERROR_WINHTTP_CANNOT_CALL_AFTER_OPEN: int +ERROR_WINHTTP_HEADER_NOT_FOUND: int +ERROR_WINHTTP_INVALID_SERVER_RESPONSE: int +ERROR_WINHTTP_INVALID_HEADER: int +ERROR_WINHTTP_INVALID_QUERY_REQUEST: int +ERROR_WINHTTP_HEADER_ALREADY_EXISTS: int +ERROR_WINHTTP_REDIRECT_FAILED: int +ERROR_WINHTTP_AUTO_PROXY_SERVICE_ERROR: int +ERROR_WINHTTP_BAD_AUTO_PROXY_SCRIPT: int +ERROR_WINHTTP_UNABLE_TO_DOWNLOAD_SCRIPT: int +ERROR_WINHTTP_NOT_INITIALIZED: int +ERROR_WINHTTP_SECURE_FAILURE: int +ERROR_WINHTTP_SECURE_CERT_DATE_INVALID: int +ERROR_WINHTTP_SECURE_CERT_CN_INVALID: int +ERROR_WINHTTP_SECURE_INVALID_CA: int +ERROR_WINHTTP_SECURE_CERT_REV_FAILED: int +ERROR_WINHTTP_SECURE_CHANNEL_ERROR: int +ERROR_WINHTTP_SECURE_INVALID_CERT: int +ERROR_WINHTTP_SECURE_CERT_REVOKED: int +ERROR_WINHTTP_SECURE_CERT_WRONG_USAGE: int +ERROR_WINHTTP_AUTODETECTION_FAILED: int +ERROR_WINHTTP_HEADER_COUNT_EXCEEDED: int +ERROR_WINHTTP_HEADER_SIZE_OVERFLOW: int +ERROR_WINHTTP_CHUNKED_ENCODING_HEADER_SIZE_OVERFLOW: int +ERROR_WINHTTP_RESPONSE_DRAIN_OVERFLOW: int +ERROR_WINHTTP_CLIENT_CERT_NO_PRIVATE_KEY: int +ERROR_WINHTTP_CLIENT_CERT_NO_ACCESS_PRIVATE_KEY: int +WINHTTP_ERROR_LAST: int +WINHTTP_NO_PROXY_NAME: None +WINHTTP_NO_PROXY_BYPASS: None +WINHTTP_NO_REFERER: None +WINHTTP_DEFAULT_ACCEPT_TYPES: None +WINHTTP_NO_ADDITIONAL_HEADERS: None +WINHTTP_NO_REQUEST_DATA: None +INTERNET_OPTION_LISTEN_TIMEOUT: int +WINHTTP_OPTION_CLIENT_CERT_ISSUER_LIST: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32netcon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32netcon.pyi new file mode 100644 index 00000000..0538f443 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32netcon.pyi @@ -0,0 +1,571 @@ +CNLEN: int +LM20_CNLEN: int +DNLEN: int +LM20_DNLEN: int +UNCLEN: int +LM20_UNCLEN: int +NNLEN: int +LM20_NNLEN: int +RMLEN: int +LM20_RMLEN: int +SNLEN: int +LM20_SNLEN: int +STXTLEN: int +LM20_STXTLEN: int +PATHLEN: int +LM20_PATHLEN: int +DEVLEN: int +LM20_DEVLEN: int +EVLEN: int +UNLEN: int +LM20_UNLEN: int +GNLEN: int +LM20_GNLEN: int +PWLEN: int +LM20_PWLEN: int +SHPWLEN: int +CLTYPE_LEN: int +MAXCOMMENTSZ: int +LM20_MAXCOMMENTSZ: int +QNLEN: int +LM20_QNLEN: int +ALERTSZ: int +NETBIOS_NAME_LEN: int +CRYPT_KEY_LEN: int +CRYPT_TXT_LEN: int +ENCRYPTED_PWLEN: int +SESSION_PWLEN: int +SESSION_CRYPT_KLEN: int +PARMNUM_ALL: int +PARM_ERROR_NONE: int +PARMNUM_BASE_INFOLEVEL: int +NULL: int +PLATFORM_ID_DOS: int +PLATFORM_ID_OS2: int +PLATFORM_ID_NT: int +PLATFORM_ID_OSF: int +PLATFORM_ID_VMS: int +MAX_LANMAN_MESSAGE_ID: int +UF_SCRIPT: int +UF_ACCOUNTDISABLE: int +UF_HOMEDIR_REQUIRED: int +UF_LOCKOUT: int +UF_PASSWD_NOTREQD: int +UF_PASSWD_CANT_CHANGE: int +UF_TEMP_DUPLICATE_ACCOUNT: int +UF_NORMAL_ACCOUNT: int +UF_INTERDOMAIN_TRUST_ACCOUNT: int +UF_WORKSTATION_TRUST_ACCOUNT: int +UF_SERVER_TRUST_ACCOUNT: int +UF_MACHINE_ACCOUNT_MASK: int +UF_ACCOUNT_TYPE_MASK: int +UF_DONT_EXPIRE_PASSWD: int +UF_MNS_LOGON_ACCOUNT: int +UF_SETTABLE_BITS: int +FILTER_TEMP_DUPLICATE_ACCOUNT: int +FILTER_NORMAL_ACCOUNT: int +FILTER_INTERDOMAIN_TRUST_ACCOUNT: int +FILTER_WORKSTATION_TRUST_ACCOUNT: int +FILTER_SERVER_TRUST_ACCOUNT: int +LG_INCLUDE_INDIRECT: int +AF_OP_PRINT: int +AF_OP_COMM: int +AF_OP_SERVER: int +AF_OP_ACCOUNTS: int +AF_SETTABLE_BITS: int +UAS_ROLE_STANDALONE: int +UAS_ROLE_MEMBER: int +UAS_ROLE_BACKUP: int +UAS_ROLE_PRIMARY: int +USER_NAME_PARMNUM: int +USER_PASSWORD_PARMNUM: int +USER_PASSWORD_AGE_PARMNUM: int +USER_PRIV_PARMNUM: int +USER_HOME_DIR_PARMNUM: int +USER_COMMENT_PARMNUM: int +USER_FLAGS_PARMNUM: int +USER_SCRIPT_PATH_PARMNUM: int +USER_AUTH_FLAGS_PARMNUM: int +USER_FULL_NAME_PARMNUM: int +USER_USR_COMMENT_PARMNUM: int +USER_PARMS_PARMNUM: int +USER_WORKSTATIONS_PARMNUM: int +USER_LAST_LOGON_PARMNUM: int +USER_LAST_LOGOFF_PARMNUM: int +USER_ACCT_EXPIRES_PARMNUM: int +USER_MAX_STORAGE_PARMNUM: int +USER_UNITS_PER_WEEK_PARMNUM: int +USER_LOGON_HOURS_PARMNUM: int +USER_PAD_PW_COUNT_PARMNUM: int +USER_NUM_LOGONS_PARMNUM: int +USER_LOGON_SERVER_PARMNUM: int +USER_COUNTRY_CODE_PARMNUM: int +USER_CODE_PAGE_PARMNUM: int +USER_PRIMARY_GROUP_PARMNUM: int +USER_PROFILE: int +USER_PROFILE_PARMNUM: int +USER_HOME_DIR_DRIVE_PARMNUM: int +USER_NAME_INFOLEVEL: int +USER_PASSWORD_INFOLEVEL: int +USER_PASSWORD_AGE_INFOLEVEL: int +USER_PRIV_INFOLEVEL: int +USER_HOME_DIR_INFOLEVEL: int +USER_COMMENT_INFOLEVEL: int +USER_FLAGS_INFOLEVEL: int +USER_SCRIPT_PATH_INFOLEVEL: int +USER_AUTH_FLAGS_INFOLEVEL: int +USER_FULL_NAME_INFOLEVEL: int +USER_USR_COMMENT_INFOLEVEL: int +USER_PARMS_INFOLEVEL: int +USER_WORKSTATIONS_INFOLEVEL: int +USER_LAST_LOGON_INFOLEVEL: int +USER_LAST_LOGOFF_INFOLEVEL: int +USER_ACCT_EXPIRES_INFOLEVEL: int +USER_MAX_STORAGE_INFOLEVEL: int +USER_UNITS_PER_WEEK_INFOLEVEL: int +USER_LOGON_HOURS_INFOLEVEL: int +USER_PAD_PW_COUNT_INFOLEVEL: int +USER_NUM_LOGONS_INFOLEVEL: int +USER_LOGON_SERVER_INFOLEVEL: int +USER_COUNTRY_CODE_INFOLEVEL: int +USER_CODE_PAGE_INFOLEVEL: int +USER_PRIMARY_GROUP_INFOLEVEL: int +USER_HOME_DIR_DRIVE_INFOLEVEL: int +NULL_USERSETINFO_PASSWD: str +UNITS_PER_DAY: int +UNITS_PER_WEEK: int +USER_PRIV_MASK: int +USER_PRIV_GUEST: int +USER_PRIV_USER: int +USER_PRIV_ADMIN: int +MAX_PASSWD_LEN: int +DEF_MIN_PWLEN: int +DEF_PWUNIQUENESS: int +DEF_MAX_PWHIST: int +DEF_MAX_BADPW: int +VALIDATED_LOGON: int +PASSWORD_EXPIRED: int +NON_VALIDATED_LOGON: int +VALID_LOGOFF: int +MODALS_MIN_PASSWD_LEN_PARMNUM: int +MODALS_MAX_PASSWD_AGE_PARMNUM: int +MODALS_MIN_PASSWD_AGE_PARMNUM: int +MODALS_FORCE_LOGOFF_PARMNUM: int +MODALS_PASSWD_HIST_LEN_PARMNUM: int +MODALS_ROLE_PARMNUM: int +MODALS_PRIMARY_PARMNUM: int +MODALS_DOMAIN_NAME_PARMNUM: int +MODALS_DOMAIN_ID_PARMNUM: int +MODALS_LOCKOUT_DURATION_PARMNUM: int +MODALS_LOCKOUT_OBSERVATION_WINDOW_PARMNUM: int +MODALS_LOCKOUT_THRESHOLD_PARMNUM: int +MODALS_MIN_PASSWD_LEN_INFOLEVEL: int +MODALS_MAX_PASSWD_AGE_INFOLEVEL: int +MODALS_MIN_PASSWD_AGE_INFOLEVEL: int +MODALS_FORCE_LOGOFF_INFOLEVEL: int +MODALS_PASSWD_HIST_LEN_INFOLEVEL: int +MODALS_ROLE_INFOLEVEL: int +MODALS_PRIMARY_INFOLEVEL: int +MODALS_DOMAIN_NAME_INFOLEVEL: int +MODALS_DOMAIN_ID_INFOLEVEL: int +GROUPIDMASK: int +GROUP_ALL_PARMNUM: int +GROUP_NAME_PARMNUM: int +GROUP_COMMENT_PARMNUM: int +GROUP_ATTRIBUTES_PARMNUM: int +GROUP_ALL_INFOLEVEL: int +GROUP_NAME_INFOLEVEL: int +GROUP_COMMENT_INFOLEVEL: int +GROUP_ATTRIBUTES_INFOLEVEL: int +LOCALGROUP_NAME_PARMNUM: int +LOCALGROUP_COMMENT_PARMNUM: int +MAXPERMENTRIES: int +ACCESS_NONE: int +ACCESS_READ: int +ACCESS_WRITE: int +ACCESS_CREATE: int +ACCESS_EXEC: int +ACCESS_DELETE: int +ACCESS_ATRIB: int +ACCESS_PERM: int +ACCESS_GROUP: int +ACCESS_AUDIT: int +ACCESS_SUCCESS_OPEN: int +ACCESS_SUCCESS_WRITE: int +ACCESS_SUCCESS_DELETE: int +ACCESS_SUCCESS_ACL: int +ACCESS_SUCCESS_MASK: int +ACCESS_FAIL_OPEN: int +ACCESS_FAIL_WRITE: int +ACCESS_FAIL_DELETE: int +ACCESS_FAIL_ACL: int +ACCESS_FAIL_MASK: int +ACCESS_FAIL_SHIFT: int +ACCESS_RESOURCE_NAME_PARMNUM: int +ACCESS_ATTR_PARMNUM: int +ACCESS_COUNT_PARMNUM: int +ACCESS_RESOURCE_NAME_INFOLEVEL: int +ACCESS_ATTR_INFOLEVEL: int +ACCESS_COUNT_INFOLEVEL: int +ACCESS_LETTERS: str +NETLOGON_CONTROL_QUERY: int +NETLOGON_CONTROL_REPLICATE: int +NETLOGON_CONTROL_SYNCHRONIZE: int +NETLOGON_CONTROL_PDC_REPLICATE: int +NETLOGON_CONTROL_REDISCOVER: int +NETLOGON_CONTROL_TC_QUERY: int +NETLOGON_CONTROL_TRANSPORT_NOTIFY: int +NETLOGON_CONTROL_FIND_USER: int +NETLOGON_CONTROL_UNLOAD_NETLOGON_DLL: int +NETLOGON_CONTROL_BACKUP_CHANGE_LOG: int +NETLOGON_CONTROL_TRUNCATE_LOG: int +NETLOGON_CONTROL_SET_DBFLAG: int +NETLOGON_CONTROL_BREAKPOINT: int +NETLOGON_REPLICATION_NEEDED: int +NETLOGON_REPLICATION_IN_PROGRESS: int +NETLOGON_FULL_SYNC_REPLICATION: int +NETLOGON_REDO_NEEDED: int + +def TEXT(x: str) -> str: ... + +MAX_PREFERRED_LENGTH: int +PARM_ERROR_UNKNOWN: int +MESSAGE_FILENAME: str +OS2MSG_FILENAME: str +HELP_MSG_FILENAME: str +BACKUP_MSG_FILENAME: str +TIMEQ_FOREVER: int +USER_MAXSTORAGE_UNLIMITED: int +USER_NO_LOGOFF: int +DEF_MAX_PWAGE: int +DEF_MIN_PWAGE: int +DEF_FORCE_LOGOFF: int +ONE_DAY: int +GROUP_SPECIALGRP_USERS: str +GROUP_SPECIALGRP_ADMINS: str +GROUP_SPECIALGRP_GUESTS: str +GROUP_SPECIALGRP_LOCAL: str +ACCESS_ALL: int +SV_PLATFORM_ID_OS2: int +SV_PLATFORM_ID_NT: int +MAJOR_VERSION_MASK: int +SV_TYPE_WORKSTATION: int +SV_TYPE_SERVER: int +SV_TYPE_SQLSERVER: int +SV_TYPE_DOMAIN_CTRL: int +SV_TYPE_DOMAIN_BAKCTRL: int +SV_TYPE_TIME_SOURCE: int +SV_TYPE_AFP: int +SV_TYPE_NOVELL: int +SV_TYPE_DOMAIN_MEMBER: int +SV_TYPE_PRINTQ_SERVER: int +SV_TYPE_DIALIN_SERVER: int +SV_TYPE_XENIX_SERVER: int +SV_TYPE_SERVER_UNIX: int +SV_TYPE_NT: int +SV_TYPE_WFW: int +SV_TYPE_SERVER_MFPN: int +SV_TYPE_SERVER_NT: int +SV_TYPE_POTENTIAL_BROWSER: int +SV_TYPE_BACKUP_BROWSER: int +SV_TYPE_MASTER_BROWSER: int +SV_TYPE_DOMAIN_MASTER: int +SV_TYPE_SERVER_OSF: int +SV_TYPE_SERVER_VMS: int +SV_TYPE_WINDOWS: int +SV_TYPE_DFS: int +SV_TYPE_CLUSTER_NT: int +SV_TYPE_DCE: int +SV_TYPE_ALTERNATE_XPORT: int +SV_TYPE_DOMAIN_ENUM: int +SV_TYPE_ALL: int +SV_NODISC: int +SV_USERSECURITY: int +SV_SHARESECURITY: int +SV_HIDDEN: int +SV_VISIBLE: int +SV_PLATFORM_ID_PARMNUM: int +SV_NAME_PARMNUM: int +SV_VERSION_MAJOR_PARMNUM: int +SV_VERSION_MINOR_PARMNUM: int +SV_TYPE_PARMNUM: int +SV_COMMENT_PARMNUM: int +SV_USERS_PARMNUM: int +SV_DISC_PARMNUM: int +SV_HIDDEN_PARMNUM: int +SV_ANNOUNCE_PARMNUM: int +SV_ANNDELTA_PARMNUM: int +SV_USERPATH_PARMNUM: int +SV_ALERTS_PARMNUM: int +SV_SECURITY_PARMNUM: int +SV_NUMADMIN_PARMNUM: int +SV_LANMASK_PARMNUM: int +SV_GUESTACC_PARMNUM: int +SV_CHDEVQ_PARMNUM: int +SV_CHDEVJOBS_PARMNUM: int +SV_CONNECTIONS_PARMNUM: int +SV_SHARES_PARMNUM: int +SV_OPENFILES_PARMNUM: int +SV_SESSREQS_PARMNUM: int +SV_ACTIVELOCKS_PARMNUM: int +SV_NUMREQBUF_PARMNUM: int +SV_NUMBIGBUF_PARMNUM: int +SV_NUMFILETASKS_PARMNUM: int +SV_ALERTSCHED_PARMNUM: int +SV_ERRORALERT_PARMNUM: int +SV_LOGONALERT_PARMNUM: int +SV_ACCESSALERT_PARMNUM: int +SV_DISKALERT_PARMNUM: int +SV_NETIOALERT_PARMNUM: int +SV_MAXAUDITSZ_PARMNUM: int +SV_SRVHEURISTICS_PARMNUM: int +SV_SESSOPENS_PARMNUM: int +SV_SESSVCS_PARMNUM: int +SV_OPENSEARCH_PARMNUM: int +SV_SIZREQBUF_PARMNUM: int +SV_INITWORKITEMS_PARMNUM: int +SV_MAXWORKITEMS_PARMNUM: int +SV_RAWWORKITEMS_PARMNUM: int +SV_IRPSTACKSIZE_PARMNUM: int +SV_MAXRAWBUFLEN_PARMNUM: int +SV_SESSUSERS_PARMNUM: int +SV_SESSCONNS_PARMNUM: int +SV_MAXNONPAGEDMEMORYUSAGE_PARMNUM: int +SV_MAXPAGEDMEMORYUSAGE_PARMNUM: int +SV_ENABLESOFTCOMPAT_PARMNUM: int +SV_ENABLEFORCEDLOGOFF_PARMNUM: int +SV_TIMESOURCE_PARMNUM: int +SV_ACCEPTDOWNLEVELAPIS_PARMNUM: int +SV_LMANNOUNCE_PARMNUM: int +SV_DOMAIN_PARMNUM: int +SV_MAXCOPYREADLEN_PARMNUM: int +SV_MAXCOPYWRITELEN_PARMNUM: int +SV_MINKEEPSEARCH_PARMNUM: int +SV_MAXKEEPSEARCH_PARMNUM: int +SV_MINKEEPCOMPLSEARCH_PARMNUM: int +SV_MAXKEEPCOMPLSEARCH_PARMNUM: int +SV_THREADCOUNTADD_PARMNUM: int +SV_NUMBLOCKTHREADS_PARMNUM: int +SV_SCAVTIMEOUT_PARMNUM: int +SV_MINRCVQUEUE_PARMNUM: int +SV_MINFREEWORKITEMS_PARMNUM: int +SV_XACTMEMSIZE_PARMNUM: int +SV_THREADPRIORITY_PARMNUM: int +SV_MAXMPXCT_PARMNUM: int +SV_OPLOCKBREAKWAIT_PARMNUM: int +SV_OPLOCKBREAKRESPONSEWAIT_PARMNUM: int +SV_ENABLEOPLOCKS_PARMNUM: int +SV_ENABLEOPLOCKFORCECLOSE_PARMNUM: int +SV_ENABLEFCBOPENS_PARMNUM: int +SV_ENABLERAW_PARMNUM: int +SV_ENABLESHAREDNETDRIVES_PARMNUM: int +SV_MINFREECONNECTIONS_PARMNUM: int +SV_MAXFREECONNECTIONS_PARMNUM: int +SV_INITSESSTABLE_PARMNUM: int +SV_INITCONNTABLE_PARMNUM: int +SV_INITFILETABLE_PARMNUM: int +SV_INITSEARCHTABLE_PARMNUM: int +SV_ALERTSCHEDULE_PARMNUM: int +SV_ERRORTHRESHOLD_PARMNUM: int +SV_NETWORKERRORTHRESHOLD_PARMNUM: int +SV_DISKSPACETHRESHOLD_PARMNUM: int +SV_MAXLINKDELAY_PARMNUM: int +SV_MINLINKTHROUGHPUT_PARMNUM: int +SV_LINKINFOVALIDTIME_PARMNUM: int +SV_SCAVQOSINFOUPDATETIME_PARMNUM: int +SV_MAXWORKITEMIDLETIME_PARMNUM: int +SV_MAXRAWWORKITEMS_PARMNUM: int +SV_PRODUCTTYPE_PARMNUM: int +SV_SERVERSIZE_PARMNUM: int +SV_CONNECTIONLESSAUTODISC_PARMNUM: int +SV_SHARINGVIOLATIONRETRIES_PARMNUM: int +SV_SHARINGVIOLATIONDELAY_PARMNUM: int +SV_MAXGLOBALOPENSEARCH_PARMNUM: int +SV_REMOVEDUPLICATESEARCHES_PARMNUM: int +SV_LOCKVIOLATIONRETRIES_PARMNUM: int +SV_LOCKVIOLATIONOFFSET_PARMNUM: int +SV_LOCKVIOLATIONDELAY_PARMNUM: int +SV_MDLREADSWITCHOVER_PARMNUM: int +SV_CACHEDOPENLIMIT_PARMNUM: int +SV_CRITICALTHREADS_PARMNUM: int +SV_RESTRICTNULLSESSACCESS_PARMNUM: int +SV_ENABLEWFW311DIRECTIPX_PARMNUM: int +SV_OTHERQUEUEAFFINITY_PARMNUM: int +SV_QUEUESAMPLESECS_PARMNUM: int +SV_BALANCECOUNT_PARMNUM: int +SV_PREFERREDAFFINITY_PARMNUM: int +SV_MAXFREERFCBS_PARMNUM: int +SV_MAXFREEMFCBS_PARMNUM: int +SV_MAXFREELFCBS_PARMNUM: int +SV_MAXFREEPAGEDPOOLCHUNKS_PARMNUM: int +SV_MINPAGEDPOOLCHUNKSIZE_PARMNUM: int +SV_MAXPAGEDPOOLCHUNKSIZE_PARMNUM: int +SV_SENDSFROMPREFERREDPROCESSOR_PARMNUM: int +SV_MAXTHREADSPERQUEUE_PARMNUM: int +SV_CACHEDDIRECTORYLIMIT_PARMNUM: int +SV_MAXCOPYLENGTH_PARMNUM: int +SV_ENABLEBULKTRANSFER_PARMNUM: int +SV_ENABLECOMPRESSION_PARMNUM: int +SV_AUTOSHAREWKS_PARMNUM: int +SV_AUTOSHARESERVER_PARMNUM: int +SV_ENABLESECURITYSIGNATURE_PARMNUM: int +SV_REQUIRESECURITYSIGNATURE_PARMNUM: int +SV_MINCLIENTBUFFERSIZE_PARMNUM: int +SV_CONNECTIONNOSESSIONSTIMEOUT_PARMNUM: int +SVI1_NUM_ELEMENTS: int +SVI2_NUM_ELEMENTS: int +SVI3_NUM_ELEMENTS: int +SW_AUTOPROF_LOAD_MASK: int +SW_AUTOPROF_SAVE_MASK: int +SV_MAX_SRV_HEUR_LEN: int +SV_USERS_PER_LICENSE: int +SVTI2_REMAP_PIPE_NAMES: int +SHARE_NETNAME_PARMNUM: int +SHARE_TYPE_PARMNUM: int +SHARE_REMARK_PARMNUM: int +SHARE_PERMISSIONS_PARMNUM: int +SHARE_MAX_USES_PARMNUM: int +SHARE_CURRENT_USES_PARMNUM: int +SHARE_PATH_PARMNUM: int +SHARE_PASSWD_PARMNUM: int +SHARE_FILE_SD_PARMNUM: int +SHI1_NUM_ELEMENTS: int +SHI2_NUM_ELEMENTS: int +STYPE_DISKTREE: int +STYPE_PRINTQ: int +STYPE_DEVICE: int +STYPE_IPC: int +STYPE_SPECIAL: int +SHI1005_FLAGS_DFS: int +SHI1005_FLAGS_DFS_ROOT: int +COW_PERMACHINE: int +COW_PERUSER: int +CSC_CACHEABLE: int +CSC_NOFLOWOPS: int +CSC_AUTO_INWARD: int +CSC_AUTO_OUTWARD: int +SHI1005_VALID_FLAGS_SET: int +SHI1007_VALID_FLAGS_SET: int +SESS_GUEST: int +SESS_NOENCRYPTION: int +SESI1_NUM_ELEMENTS: int +SESI2_NUM_ELEMENTS: int +PERM_FILE_READ: int +PERM_FILE_WRITE: int +PERM_FILE_CREATE: int +WNNC_NET_MSNET: int +WNNC_NET_LANMAN: int +WNNC_NET_NETWARE: int +WNNC_NET_VINES: int +WNNC_NET_10NET: int +WNNC_NET_LOCUS: int +WNNC_NET_SUN_PC_NFS: int +WNNC_NET_LANSTEP: int +WNNC_NET_9TILES: int +WNNC_NET_LANTASTIC: int +WNNC_NET_AS400: int +WNNC_NET_FTP_NFS: int +WNNC_NET_PATHWORKS: int +WNNC_NET_LIFENET: int +WNNC_NET_POWERLAN: int +WNNC_NET_BWNFS: int +WNNC_NET_COGENT: int +WNNC_NET_FARALLON: int +WNNC_NET_APPLETALK: int +WNNC_NET_INTERGRAPH: int +WNNC_NET_SYMFONET: int +WNNC_NET_CLEARCASE: int +WNNC_NET_FRONTIER: int +WNNC_NET_BMC: int +WNNC_NET_DCE: int +WNNC_NET_DECORB: int +WNNC_NET_PROTSTOR: int +WNNC_NET_FJ_REDIR: int +WNNC_NET_DISTINCT: int +WNNC_NET_TWINS: int +WNNC_NET_RDR2SAMPLE: int +RESOURCE_CONNECTED: int +RESOURCE_GLOBALNET: int +RESOURCE_REMEMBERED: int +RESOURCE_RECENT: int +RESOURCE_CONTEXT: int +RESOURCETYPE_ANY: int +RESOURCETYPE_DISK: int +RESOURCETYPE_PRINT: int +RESOURCETYPE_RESERVED: int +RESOURCETYPE_UNKNOWN: int +RESOURCEUSAGE_CONNECTABLE: int +RESOURCEUSAGE_CONTAINER: int +RESOURCEUSAGE_NOLOCALDEVICE: int +RESOURCEUSAGE_SIBLING: int +RESOURCEUSAGE_ATTACHED: int +RESOURCEUSAGE_ALL: int +RESOURCEUSAGE_RESERVED: int +RESOURCEDISPLAYTYPE_GENERIC: int +RESOURCEDISPLAYTYPE_DOMAIN: int +RESOURCEDISPLAYTYPE_SERVER: int +RESOURCEDISPLAYTYPE_SHARE: int +RESOURCEDISPLAYTYPE_FILE: int +RESOURCEDISPLAYTYPE_GROUP: int +RESOURCEDISPLAYTYPE_NETWORK: int +RESOURCEDISPLAYTYPE_ROOT: int +RESOURCEDISPLAYTYPE_SHAREADMIN: int +RESOURCEDISPLAYTYPE_DIRECTORY: int +RESOURCEDISPLAYTYPE_TREE: int +RESOURCEDISPLAYTYPE_NDSCONTAINER: int +NETPROPERTY_PERSISTENT: int +CONNECT_UPDATE_PROFILE: int +CONNECT_UPDATE_RECENT: int +CONNECT_TEMPORARY: int +CONNECT_INTERACTIVE: int +CONNECT_PROMPT: int +CONNECT_NEED_DRIVE: int +CONNECT_REFCOUNT: int +CONNECT_REDIRECT: int +CONNECT_LOCALDRIVE: int +CONNECT_CURRENT_MEDIA: int +CONNECT_DEFERRED: int +CONNECT_RESERVED: int +CONNDLG_RO_PATH: int +CONNDLG_CONN_POINT: int +CONNDLG_USE_MRU: int +CONNDLG_HIDE_BOX: int +CONNDLG_PERSIST: int +CONNDLG_NOT_PERSIST: int +DISC_UPDATE_PROFILE: int +DISC_NO_FORCE: int +UNIVERSAL_NAME_INFO_LEVEL: int +REMOTE_NAME_INFO_LEVEL: int +WNFMT_MULTILINE: int +WNFMT_ABBREVIATED: int +WNFMT_INENUM: int +WNFMT_CONNECTION: int +NETINFO_DLL16: int +NETINFO_DISKRED: int +NETINFO_PRINTERRED: int +RP_LOGON: int +RP_INIFILE: int +PP_DISPLAYERRORS: int +WNCON_FORNETCARD: int +WNCON_NOTROUTED: int +WNCON_SLOWLINK: int +WNCON_DYNAMIC: int +NetSetupUnknown: int +NetSetupMachine: int +NetSetupWorkgroup: int +NetSetupDomain: int +NetSetupNonExistentDomain: int +NetSetupDnsMachine: int +NetSetupUnknownStatus: int +NetSetupUnjoined: int +NetSetupWorkgroupName: int +NetSetupDomainName: int +NetValidateAuthentication: int +NetValidatePasswordChange: int +NetValidatePasswordReset: int +ACCESS_ACCESS_LIST_INFOLEVEL: int +ACCESS_ACCESS_LIST_PARMNUM: int +SV_ALIST_MTIME_PARMNUM: int +SV_GLIST_MTIME_PARMNUM: int +SV_TYPE_LOCAL_LIST_ONLY: int +SV_ULIST_MTIME_PARMNUM: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32pdhquery.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32pdhquery.pyi new file mode 100644 index 00000000..5130ceae --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32pdhquery.pyi @@ -0,0 +1,42 @@ +from _typeshed import Incomplete + +class BaseQuery: + counters: Incomplete + paths: Incomplete + active: int + curpaths: Incomplete + def __init__(self, paths: Incomplete | None = ...) -> None: ... + def addcounterbybrowsing(self, flags=..., windowtitle: str = ...) -> None: ... + def rawaddcounter( + self, object, counter, instance: Incomplete | None = ..., inum: int = ..., machine: Incomplete | None = ... + ) -> None: ... + def addcounter( + self, object, counter, instance: Incomplete | None = ..., inum: int = ..., machine: Incomplete | None = ... + ): ... + def open(self): ... + def killbase(self, base: Incomplete | None = ...) -> None: ... + def close(self) -> None: ... + __del__: Incomplete + def collectdata(self, format=...): ... + def collectdataslave(self, format=...): ... + def __getinitargs__(self): ... + +class Query(BaseQuery): + volatilecounters: Incomplete + def __init__(self, *args, **namedargs) -> None: ... + def addinstcounter( + self, object, counter, machine: Incomplete | None = ..., objtype: str = ..., volatile: int = ..., format=... + ) -> None: ... + def getinstpaths(self, object, counter, machine: Incomplete | None = ..., objtype: str = ..., format=...): ... + def open(self, *args, **namedargs) -> None: ... + curresults: Incomplete + def collectdatafor(self, totalperiod, period: int = ...) -> None: ... + collectdatawhile_active: int + def collectdatawhile(self, period: int = ...) -> None: ... + def collectdatawhile_stop(self) -> None: ... + def collectdatawhile_slave(self, period) -> None: ... + def __getinitargs__(self): ... + +class QueryError(Exception): + query: Incomplete + def __init__(self, query) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32serviceutil.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32serviceutil.pyi new file mode 100644 index 00000000..d3147125 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32serviceutil.pyi @@ -0,0 +1,81 @@ +from _typeshed import Incomplete +from collections.abc import Iterable, Sequence + +error = RuntimeError + +def LocatePythonServiceExe(exe: Incomplete | None = ...): ... +def SmartOpenService(hscm, name, access): ... +def LocateSpecificServiceExe(serviceName): ... +def InstallPerfmonForService(serviceName, iniName, dllName: Incomplete | None = ...) -> None: ... +def InstallService( + pythonClassString, + serviceName, + displayName, + startType: Incomplete | None = ..., + errorControl: Incomplete | None = ..., + bRunInteractive: int = ..., + serviceDeps: Incomplete | None = ..., + userName: Incomplete | None = ..., + password: Incomplete | None = ..., + exeName: Incomplete | None = ..., + perfMonIni: Incomplete | None = ..., + perfMonDll: Incomplete | None = ..., + exeArgs: Incomplete | None = ..., + description: Incomplete | None = ..., + delayedstart: Incomplete | None = ..., +) -> None: ... +def ChangeServiceConfig( + pythonClassString, + serviceName, + startType: Incomplete | None = ..., + errorControl: Incomplete | None = ..., + bRunInteractive: int = ..., + serviceDeps: Incomplete | None = ..., + userName: Incomplete | None = ..., + password: Incomplete | None = ..., + exeName: Incomplete | None = ..., + displayName: Incomplete | None = ..., + perfMonIni: Incomplete | None = ..., + perfMonDll: Incomplete | None = ..., + exeArgs: Incomplete | None = ..., + description: Incomplete | None = ..., + delayedstart: Incomplete | None = ..., +) -> None: ... +def InstallPythonClassString(pythonClassString, serviceName) -> None: ... +def SetServiceCustomOption(serviceName, option, value) -> None: ... +def GetServiceCustomOption(serviceName, option, defaultValue: Incomplete | None = ...): ... +def RemoveService(serviceName) -> None: ... +def ControlService(serviceName, code, machine: Incomplete | None = ...): ... +def WaitForServiceStatus(serviceName, status, waitSecs, machine: Incomplete | None = ...) -> None: ... +def StopServiceWithDeps(serviceName, machine: Incomplete | None = ..., waitSecs: int = ...) -> None: ... +def StopService(serviceName, machine: Incomplete | None = ...): ... +def StartService(serviceName, args: Incomplete | None = ..., machine: Incomplete | None = ...) -> None: ... +def RestartService( + serviceName, args: Incomplete | None = ..., waitSeconds: int = ..., machine: Incomplete | None = ... +) -> None: ... +def DebugService(cls, argv=...) -> None: ... +def GetServiceClassString(cls, argv: Incomplete | None = ...): ... +def QueryServiceStatus(serviceName, machine: Incomplete | None = ...): ... +def usage() -> None: ... +def HandleCommandLine( + cls: type[ServiceFramework], + serviceClassString: Incomplete | None = ..., + argv: Sequence[str] | None = ..., + customInstallOptions: str = ..., + customOptionHandler: Incomplete | None = ..., +): ... + +class ServiceFramework: + ssh: Incomplete + checkPoint: int + def __init__(self, args: Iterable[str]) -> None: ... + def GetAcceptedControls(self): ... + def ReportServiceStatus( + self, serviceStatus, waitHint: int = ..., win32ExitCode: int = ..., svcExitCode: int = ... + ) -> None: ... + def SvcInterrogate(self) -> None: ... + def SvcOther(self, control) -> None: ... + def ServiceCtrlHandler(self, control): ... + def SvcOtherEx(self, control, event_type, data): ... + def ServiceCtrlHandlerEx(self, control, event_type, data): ... + def SvcRun(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32timezone.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32timezone.pyi new file mode 100644 index 00000000..763eee6b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/win32timezone.pyi @@ -0,0 +1,73 @@ +import datetime +from _typeshed import Incomplete + +log: Incomplete + +class _SimpleStruct: + def __init__(self, *args, **kw) -> None: ... + def field_names(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + +class SYSTEMTIME(_SimpleStruct): ... +class TIME_ZONE_INFORMATION(_SimpleStruct): ... +class DYNAMIC_TIME_ZONE_INFORMATION(_SimpleStruct): ... + +class TimeZoneDefinition(DYNAMIC_TIME_ZONE_INFORMATION): + def __init__(self, *args, **kwargs) -> None: ... + def __getattribute__(self, attr: str): ... + @classmethod + def current(cls): ... + def set(self) -> None: ... + def copy(self): ... + def locate_daylight_start(self, year): ... + def locate_standard_start(self, year): ... + +class TimeZoneInfo(datetime.tzinfo): + tzRegKey: str + timeZoneName: Incomplete + fixedStandardTime: Incomplete + def __init__(self, param: Incomplete | None = ..., fix_standard_time: bool = ...) -> None: ... + def tzname(self, dt): ... + def getWinInfo(self, targetYear): ... + def utcoffset(self, dt): ... + def dst(self, dt): ... + def GetDSTStartTime(self, year): ... + def GetDSTEndTime(self, year): ... + def __cmp__(self, other): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + @classmethod + def local(cls): ... + @classmethod + def utc(cls): ... + @staticmethod + def get_sorted_time_zone_names(): ... + @staticmethod + def get_all_time_zones(): ... + @staticmethod + def get_sorted_time_zones(key: Incomplete | None = ...): ... + +def utcnow(): ... +def now(): ... +def GetTZCapabilities(): ... + +class DLLHandleCache: + def __getitem__(self, filename): ... + +DLLCache: Incomplete + +def resolveMUITimeZone(spec): ... + +class RangeMap(dict[int, str]): + sort_params: Incomplete + match: Incomplete + def __init__(self, source, sort_params=..., key_match_comparator=...) -> None: ... + def __getitem__(self, item): ... + def get(self, key, default: Incomplete | None = ...): ... + def bounds(self): ... + undefined_value: Incomplete + + class Item(int): ... + first_item: Incomplete + last_item: Incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/winerror.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/winerror.pyi new file mode 100644 index 00000000..b586d0f7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/winerror.pyi @@ -0,0 +1,2586 @@ +TRUST_E_PROVIDER_UNKNOWN: int +TRUST_E_ACTION_UNKNOWN: int +TRUST_E_SUBJECT_FORM_UNKNOWN: int +TRUST_E_SUBJECT_NOT_TRUSTED: int +FACILITY_WINRM: int +FACILITY_WINDOWSUPDATE: int +FACILITY_WINDOWS_DEFENDER: int +FACILITY_WINDOWS_CE: int +FACILITY_WINDOWS: int +FACILITY_URT: int +FACILITY_UMI: int +FACILITY_TPM_SOFTWARE: int +FACILITY_TPM_SERVICES: int +FACILITY_SXS: int +FACILITY_STORAGE: int +FACILITY_STATE_MANAGEMENT: int +FACILITY_SSPI: int +FACILITY_SCARD: int +FACILITY_SHELL: int +FACILITY_SETUPAPI: int +FACILITY_SECURITY: int +FACILITY_RPC: int +FACILITY_PLA: int +FACILITY_WIN32: int +FACILITY_CONTROL: int +FACILITY_NULL: int +FACILITY_NDIS: int +FACILITY_METADIRECTORY: int +FACILITY_MSMQ: int +FACILITY_MEDIASERVER: int +FACILITY_INTERNET: int +FACILITY_ITF: int +FACILITY_USERMODE_HYPERVISOR: int +FACILITY_HTTP: int +FACILITY_GRAPHICS: int +FACILITY_FWP: int +FACILITY_FVE: int +FACILITY_USERMODE_FILTER_MANAGER: int +FACILITY_DPLAY: int +FACILITY_DISPATCH: int +FACILITY_DIRECTORYSERVICE: int +FACILITY_CONFIGURATION: int +FACILITY_COMPLUS: int +FACILITY_USERMODE_COMMONLOG: int +FACILITY_CMI: int +FACILITY_CERT: int +FACILITY_BACKGROUNDCOPY: int +FACILITY_ACS: int +FACILITY_AAF: int +ERROR_SUCCESS: int +NO_ERROR: int +S_OK: int +S_FALSE: int +ERROR_INVALID_FUNCTION: int +ERROR_FILE_NOT_FOUND: int +ERROR_PATH_NOT_FOUND: int +ERROR_TOO_MANY_OPEN_FILES: int +ERROR_ACCESS_DENIED: int +ERROR_INVALID_HANDLE: int +ERROR_ARENA_TRASHED: int +ERROR_NOT_ENOUGH_MEMORY: int +ERROR_INVALID_BLOCK: int +ERROR_BAD_ENVIRONMENT: int +ERROR_BAD_FORMAT: int +ERROR_INVALID_ACCESS: int +ERROR_INVALID_DATA: int +ERROR_OUTOFMEMORY: int +ERROR_INVALID_DRIVE: int +ERROR_CURRENT_DIRECTORY: int +ERROR_NOT_SAME_DEVICE: int +ERROR_NO_MORE_FILES: int +ERROR_WRITE_PROTECT: int +ERROR_BAD_UNIT: int +ERROR_NOT_READY: int +ERROR_BAD_COMMAND: int +ERROR_CRC: int +ERROR_BAD_LENGTH: int +ERROR_SEEK: int +ERROR_NOT_DOS_DISK: int +ERROR_SECTOR_NOT_FOUND: int +ERROR_OUT_OF_PAPER: int +ERROR_WRITE_FAULT: int +ERROR_READ_FAULT: int +ERROR_GEN_FAILURE: int +ERROR_SHARING_VIOLATION: int +ERROR_LOCK_VIOLATION: int +ERROR_WRONG_DISK: int +ERROR_SHARING_BUFFER_EXCEEDED: int +ERROR_HANDLE_EOF: int +ERROR_HANDLE_DISK_FULL: int +ERROR_NOT_SUPPORTED: int +ERROR_REM_NOT_LIST: int +ERROR_DUP_NAME: int +ERROR_BAD_NETPATH: int +ERROR_NETWORK_BUSY: int +ERROR_DEV_NOT_EXIST: int +ERROR_TOO_MANY_CMDS: int +ERROR_ADAP_HDW_ERR: int +ERROR_BAD_NET_RESP: int +ERROR_UNEXP_NET_ERR: int +ERROR_BAD_REM_ADAP: int +ERROR_PRINTQ_FULL: int +ERROR_NO_SPOOL_SPACE: int +ERROR_PRINT_CANCELLED: int +ERROR_NETNAME_DELETED: int +ERROR_NETWORK_ACCESS_DENIED: int +ERROR_BAD_DEV_TYPE: int +ERROR_BAD_NET_NAME: int +ERROR_TOO_MANY_NAMES: int +ERROR_TOO_MANY_SESS: int +ERROR_SHARING_PAUSED: int +ERROR_REQ_NOT_ACCEP: int +ERROR_REDIR_PAUSED: int +ERROR_FILE_EXISTS: int +ERROR_CANNOT_MAKE: int +ERROR_FAIL_I24: int +ERROR_OUT_OF_STRUCTURES: int +ERROR_ALREADY_ASSIGNED: int +ERROR_INVALID_PASSWORD: int +ERROR_INVALID_PARAMETER: int +ERROR_NET_WRITE_FAULT: int +ERROR_NO_PROC_SLOTS: int +ERROR_TOO_MANY_SEMAPHORES: int +ERROR_EXCL_SEM_ALREADY_OWNED: int +ERROR_SEM_IS_SET: int +ERROR_TOO_MANY_SEM_REQUESTS: int +ERROR_INVALID_AT_INTERRUPT_TIME: int +ERROR_SEM_OWNER_DIED: int +ERROR_SEM_USER_LIMIT: int +ERROR_DISK_CHANGE: int +ERROR_DRIVE_LOCKED: int +ERROR_BROKEN_PIPE: int +ERROR_OPEN_FAILED: int +ERROR_BUFFER_OVERFLOW: int +ERROR_DISK_FULL: int +ERROR_NO_MORE_SEARCH_HANDLES: int +ERROR_INVALID_TARGET_HANDLE: int +ERROR_INVALID_CATEGORY: int +ERROR_INVALID_VERIFY_SWITCH: int +ERROR_BAD_DRIVER_LEVEL: int +ERROR_CALL_NOT_IMPLEMENTED: int +ERROR_SEM_TIMEOUT: int +ERROR_INSUFFICIENT_BUFFER: int +ERROR_INVALID_NAME: int +ERROR_INVALID_LEVEL: int +ERROR_NO_VOLUME_LABEL: int +ERROR_MOD_NOT_FOUND: int +ERROR_PROC_NOT_FOUND: int +ERROR_WAIT_NO_CHILDREN: int +ERROR_CHILD_NOT_COMPLETE: int +ERROR_DIRECT_ACCESS_HANDLE: int +ERROR_NEGATIVE_SEEK: int +ERROR_SEEK_ON_DEVICE: int +ERROR_IS_JOIN_TARGET: int +ERROR_IS_JOINED: int +ERROR_IS_SUBSTED: int +ERROR_NOT_JOINED: int +ERROR_NOT_SUBSTED: int +ERROR_JOIN_TO_JOIN: int +ERROR_SUBST_TO_SUBST: int +ERROR_JOIN_TO_SUBST: int +ERROR_SUBST_TO_JOIN: int +ERROR_BUSY_DRIVE: int +ERROR_SAME_DRIVE: int +ERROR_DIR_NOT_ROOT: int +ERROR_DIR_NOT_EMPTY: int +ERROR_IS_SUBST_PATH: int +ERROR_IS_JOIN_PATH: int +ERROR_PATH_BUSY: int +ERROR_IS_SUBST_TARGET: int +ERROR_SYSTEM_TRACE: int +ERROR_INVALID_EVENT_COUNT: int +ERROR_TOO_MANY_MUXWAITERS: int +ERROR_INVALID_LIST_FORMAT: int +ERROR_LABEL_TOO_LONG: int +ERROR_TOO_MANY_TCBS: int +ERROR_SIGNAL_REFUSED: int +ERROR_DISCARDED: int +ERROR_NOT_LOCKED: int +ERROR_BAD_THREADID_ADDR: int +ERROR_BAD_ARGUMENTS: int +ERROR_BAD_PATHNAME: int +ERROR_SIGNAL_PENDING: int +ERROR_MAX_THRDS_REACHED: int +ERROR_LOCK_FAILED: int +ERROR_BUSY: int +ERROR_CANCEL_VIOLATION: int +ERROR_ATOMIC_LOCKS_NOT_SUPPORTED: int +ERROR_INVALID_SEGMENT_NUMBER: int +ERROR_INVALID_ORDINAL: int +ERROR_ALREADY_EXISTS: int +ERROR_INVALID_FLAG_NUMBER: int +ERROR_SEM_NOT_FOUND: int +ERROR_INVALID_STARTING_CODESEG: int +ERROR_INVALID_STACKSEG: int +ERROR_INVALID_MODULETYPE: int +ERROR_INVALID_EXE_SIGNATURE: int +ERROR_EXE_MARKED_INVALID: int +ERROR_BAD_EXE_FORMAT: int +ERROR_ITERATED_DATA_EXCEEDS_64k: int +ERROR_INVALID_MINALLOCSIZE: int +ERROR_DYNLINK_FROM_INVALID_RING: int +ERROR_IOPL_NOT_ENABLED: int +ERROR_INVALID_SEGDPL: int +ERROR_AUTODATASEG_EXCEEDS_64k: int +ERROR_RING2SEG_MUST_BE_MOVABLE: int +ERROR_RELOC_CHAIN_XEEDS_SEGLIM: int +ERROR_INFLOOP_IN_RELOC_CHAIN: int +ERROR_ENVVAR_NOT_FOUND: int +ERROR_NO_SIGNAL_SENT: int +ERROR_FILENAME_EXCED_RANGE: int +ERROR_RING2_STACK_IN_USE: int +ERROR_META_EXPANSION_TOO_LONG: int +ERROR_INVALID_SIGNAL_NUMBER: int +ERROR_THREAD_1_INACTIVE: int +ERROR_LOCKED: int +ERROR_TOO_MANY_MODULES: int +ERROR_NESTING_NOT_ALLOWED: int +ERROR_EXE_MACHINE_TYPE_MISMATCH: int +ERROR_EXE_CANNOT_MODIFY_SIGNED_BINARY: int +ERROR_EXE_CANNOT_MODIFY_STRONG_SIGNED_BINARY: int +ERROR_FILE_CHECKED_OUT: int +ERROR_CHECKOUT_REQUIRED: int +ERROR_BAD_FILE_TYPE: int +ERROR_FILE_TOO_LARGE: int +ERROR_FORMS_AUTH_REQUIRED: int +ERROR_VIRUS_INFECTED: int +ERROR_VIRUS_DELETED: int +ERROR_PIPE_LOCAL: int +ERROR_BAD_PIPE: int +ERROR_PIPE_BUSY: int +ERROR_NO_DATA: int +ERROR_PIPE_NOT_CONNECTED: int +ERROR_MORE_DATA: int +ERROR_VC_DISCONNECTED: int +ERROR_INVALID_EA_NAME: int +ERROR_EA_LIST_INCONSISTENT: int +WAIT_TIMEOUT: int +ERROR_NO_MORE_ITEMS: int +ERROR_CANNOT_COPY: int +ERROR_DIRECTORY: int +ERROR_EAS_DIDNT_FIT: int +ERROR_EA_FILE_CORRUPT: int +ERROR_EA_TABLE_FULL: int +ERROR_INVALID_EA_HANDLE: int +ERROR_EAS_NOT_SUPPORTED: int +ERROR_NOT_OWNER: int +ERROR_TOO_MANY_POSTS: int +ERROR_PARTIAL_COPY: int +ERROR_OPLOCK_NOT_GRANTED: int +ERROR_INVALID_OPLOCK_PROTOCOL: int +ERROR_DISK_TOO_FRAGMENTED: int +ERROR_DELETE_PENDING: int +ERROR_MR_MID_NOT_FOUND: int +ERROR_SCOPE_NOT_FOUND: int +ERROR_FAIL_NOACTION_REBOOT: int +ERROR_FAIL_SHUTDOWN: int +ERROR_FAIL_RESTART: int +ERROR_MAX_SESSIONS_REACHED: int +ERROR_THREAD_MODE_ALREADY_BACKGROUND: int +ERROR_THREAD_MODE_NOT_BACKGROUND: int +ERROR_PROCESS_MODE_ALREADY_BACKGROUND: int +ERROR_PROCESS_MODE_NOT_BACKGROUND: int +ERROR_INVALID_ADDRESS: int +ERROR_USER_PROFILE_LOAD: int +ERROR_ARITHMETIC_OVERFLOW: int +ERROR_PIPE_CONNECTED: int +ERROR_PIPE_LISTENING: int +ERROR_VERIFIER_STOP: int +ERROR_ABIOS_ERROR: int +ERROR_WX86_WARNING: int +ERROR_WX86_ERROR: int +ERROR_TIMER_NOT_CANCELED: int +ERROR_UNWIND: int +ERROR_BAD_STACK: int +ERROR_INVALID_UNWIND_TARGET: int +ERROR_INVALID_PORT_ATTRIBUTES: int +ERROR_PORT_MESSAGE_TOO_LONG: int +ERROR_INVALID_QUOTA_LOWER: int +ERROR_DEVICE_ALREADY_ATTACHED: int +ERROR_INSTRUCTION_MISALIGNMENT: int +ERROR_PROFILING_NOT_STARTED: int +ERROR_PROFILING_NOT_STOPPED: int +ERROR_COULD_NOT_INTERPRET: int +ERROR_PROFILING_AT_LIMIT: int +ERROR_CANT_WAIT: int +ERROR_CANT_TERMINATE_SELF: int +ERROR_UNEXPECTED_MM_CREATE_ERR: int +ERROR_UNEXPECTED_MM_MAP_ERROR: int +ERROR_UNEXPECTED_MM_EXTEND_ERR: int +ERROR_BAD_FUNCTION_TABLE: int +ERROR_NO_GUID_TRANSLATION: int +ERROR_INVALID_LDT_SIZE: int +ERROR_INVALID_LDT_OFFSET: int +ERROR_INVALID_LDT_DESCRIPTOR: int +ERROR_TOO_MANY_THREADS: int +ERROR_THREAD_NOT_IN_PROCESS: int +ERROR_PAGEFILE_QUOTA_EXCEEDED: int +ERROR_LOGON_SERVER_CONFLICT: int +ERROR_SYNCHRONIZATION_REQUIRED: int +ERROR_NET_OPEN_FAILED: int +ERROR_IO_PRIVILEGE_FAILED: int +ERROR_CONTROL_C_EXIT: int +ERROR_MISSING_SYSTEMFILE: int +ERROR_UNHANDLED_EXCEPTION: int +ERROR_APP_INIT_FAILURE: int +ERROR_PAGEFILE_CREATE_FAILED: int +ERROR_INVALID_IMAGE_HASH: int +ERROR_NO_PAGEFILE: int +ERROR_ILLEGAL_FLOAT_CONTEXT: int +ERROR_NO_EVENT_PAIR: int +ERROR_DOMAIN_CTRLR_CONFIG_ERROR: int +ERROR_ILLEGAL_CHARACTER: int +ERROR_UNDEFINED_CHARACTER: int +ERROR_FLOPPY_VOLUME: int +ERROR_BIOS_FAILED_TO_CONNECT_INTERRUPT: int +ERROR_BACKUP_CONTROLLER: int +ERROR_MUTANT_LIMIT_EXCEEDED: int +ERROR_FS_DRIVER_REQUIRED: int +ERROR_CANNOT_LOAD_REGISTRY_FILE: int +ERROR_DEBUG_ATTACH_FAILED: int +ERROR_SYSTEM_PROCESS_TERMINATED: int +ERROR_DATA_NOT_ACCEPTED: int +ERROR_VDM_HARD_ERROR: int +ERROR_DRIVER_CANCEL_TIMEOUT: int +ERROR_REPLY_MESSAGE_MISMATCH: int +ERROR_LOST_WRITEBEHIND_DATA: int +ERROR_CLIENT_SERVER_PARAMETERS_INVALID: int +ERROR_NOT_TINY_STREAM: int +ERROR_STACK_OVERFLOW_READ: int +ERROR_CONVERT_TO_LARGE: int +ERROR_FOUND_OUT_OF_SCOPE: int +ERROR_ALLOCATE_BUCKET: int +ERROR_MARSHALL_OVERFLOW: int +ERROR_INVALID_VARIANT: int +ERROR_BAD_COMPRESSION_BUFFER: int +ERROR_AUDIT_FAILED: int +ERROR_TIMER_RESOLUTION_NOT_SET: int +ERROR_INSUFFICIENT_LOGON_INFO: int +ERROR_BAD_DLL_ENTRYPOINT: int +ERROR_BAD_SERVICE_ENTRYPOINT: int +ERROR_IP_ADDRESS_CONFLICT1: int +ERROR_IP_ADDRESS_CONFLICT2: int +ERROR_REGISTRY_QUOTA_LIMIT: int +ERROR_NO_CALLBACK_ACTIVE: int +ERROR_PWD_TOO_SHORT: int +ERROR_PWD_TOO_RECENT: int +ERROR_PWD_HISTORY_CONFLICT: int +ERROR_UNSUPPORTED_COMPRESSION: int +ERROR_INVALID_HW_PROFILE: int +ERROR_INVALID_PLUGPLAY_DEVICE_PATH: int +ERROR_QUOTA_LIST_INCONSISTENT: int +ERROR_EVALUATION_EXPIRATION: int +ERROR_ILLEGAL_DLL_RELOCATION: int +ERROR_DLL_INIT_FAILED_LOGOFF: int +ERROR_VALIDATE_CONTINUE: int +ERROR_NO_MORE_MATCHES: int +ERROR_RANGE_LIST_CONFLICT: int +ERROR_SERVER_SID_MISMATCH: int +ERROR_CANT_ENABLE_DENY_ONLY: int +ERROR_FLOAT_MULTIPLE_FAULTS: int +ERROR_FLOAT_MULTIPLE_TRAPS: int +ERROR_NOINTERFACE: int +ERROR_DRIVER_FAILED_SLEEP: int +ERROR_CORRUPT_SYSTEM_FILE: int +ERROR_COMMITMENT_MINIMUM: int +ERROR_PNP_RESTART_ENUMERATION: int +ERROR_SYSTEM_IMAGE_BAD_SIGNATURE: int +ERROR_PNP_REBOOT_REQUIRED: int +ERROR_INSUFFICIENT_POWER: int +ERROR_MULTIPLE_FAULT_VIOLATION: int +ERROR_SYSTEM_SHUTDOWN: int +ERROR_PORT_NOT_SET: int +ERROR_DS_VERSION_CHECK_FAILURE: int +ERROR_RANGE_NOT_FOUND: int +ERROR_NOT_SAFE_MODE_DRIVER: int +ERROR_FAILED_DRIVER_ENTRY: int +ERROR_DEVICE_ENUMERATION_ERROR: int +ERROR_MOUNT_POINT_NOT_RESOLVED: int +ERROR_INVALID_DEVICE_OBJECT_PARAMETER: int +ERROR_MCA_OCCURED: int +ERROR_DRIVER_DATABASE_ERROR: int +ERROR_SYSTEM_HIVE_TOO_LARGE: int +ERROR_DRIVER_FAILED_PRIOR_UNLOAD: int +ERROR_VOLSNAP_PREPARE_HIBERNATE: int +ERROR_HIBERNATION_FAILURE: int +ERROR_FILE_SYSTEM_LIMITATION: int +ERROR_ASSERTION_FAILURE: int +ERROR_ACPI_ERROR: int +ERROR_WOW_ASSERTION: int +ERROR_PNP_BAD_MPS_TABLE: int +ERROR_PNP_TRANSLATION_FAILED: int +ERROR_PNP_IRQ_TRANSLATION_FAILED: int +ERROR_PNP_INVALID_ID: int +ERROR_WAKE_SYSTEM_DEBUGGER: int +ERROR_HANDLES_CLOSED: int +ERROR_EXTRANEOUS_INFORMATION: int +ERROR_RXACT_COMMIT_NECESSARY: int +ERROR_MEDIA_CHECK: int +ERROR_GUID_SUBSTITUTION_MADE: int +ERROR_STOPPED_ON_SYMLINK: int +ERROR_LONGJUMP: int +ERROR_PLUGPLAY_QUERY_VETOED: int +ERROR_UNWIND_CONSOLIDATE: int +ERROR_REGISTRY_HIVE_RECOVERED: int +ERROR_DLL_MIGHT_BE_INSECURE: int +ERROR_DLL_MIGHT_BE_INCOMPATIBLE: int +ERROR_DBG_EXCEPTION_NOT_HANDLED: int +ERROR_DBG_REPLY_LATER: int +ERROR_DBG_UNABLE_TO_PROVIDE_HANDLE: int +ERROR_DBG_TERMINATE_THREAD: int +ERROR_DBG_TERMINATE_PROCESS: int +ERROR_DBG_CONTROL_C: int +ERROR_DBG_PRINTEXCEPTION_C: int +ERROR_DBG_RIPEXCEPTION: int +ERROR_DBG_CONTROL_BREAK: int +ERROR_DBG_COMMAND_EXCEPTION: int +ERROR_OBJECT_NAME_EXISTS: int +ERROR_THREAD_WAS_SUSPENDED: int +ERROR_IMAGE_NOT_AT_BASE: int +ERROR_RXACT_STATE_CREATED: int +ERROR_SEGMENT_NOTIFICATION: int +ERROR_BAD_CURRENT_DIRECTORY: int +ERROR_FT_READ_RECOVERY_FROM_BACKUP: int +ERROR_FT_WRITE_RECOVERY: int +ERROR_IMAGE_MACHINE_TYPE_MISMATCH: int +ERROR_RECEIVE_PARTIAL: int +ERROR_RECEIVE_EXPEDITED: int +ERROR_RECEIVE_PARTIAL_EXPEDITED: int +ERROR_EVENT_DONE: int +ERROR_EVENT_PENDING: int +ERROR_CHECKING_FILE_SYSTEM: int +ERROR_FATAL_APP_EXIT: int +ERROR_PREDEFINED_HANDLE: int +ERROR_WAS_UNLOCKED: int +ERROR_SERVICE_NOTIFICATION: int +ERROR_WAS_LOCKED: int +ERROR_LOG_HARD_ERROR: int +ERROR_ALREADY_WIN32: int +ERROR_IMAGE_MACHINE_TYPE_MISMATCH_EXE: int +ERROR_NO_YIELD_PERFORMED: int +ERROR_TIMER_RESUME_IGNORED: int +ERROR_ARBITRATION_UNHANDLED: int +ERROR_CARDBUS_NOT_SUPPORTED: int +ERROR_MP_PROCESSOR_MISMATCH: int +ERROR_HIBERNATED: int +ERROR_RESUME_HIBERNATION: int +ERROR_FIRMWARE_UPDATED: int +ERROR_DRIVERS_LEAKING_LOCKED_PAGES: int +ERROR_WAKE_SYSTEM: int +ERROR_WAIT_1: int +ERROR_WAIT_2: int +ERROR_WAIT_3: int +ERROR_WAIT_63: int +ERROR_ABANDONED_WAIT_0: int +ERROR_ABANDONED_WAIT_63: int +ERROR_USER_APC: int +ERROR_KERNEL_APC: int +ERROR_ALERTED: int +ERROR_ELEVATION_REQUIRED: int +ERROR_REPARSE: int +ERROR_OPLOCK_BREAK_IN_PROGRESS: int +ERROR_VOLUME_MOUNTED: int +ERROR_RXACT_COMMITTED: int +ERROR_NOTIFY_CLEANUP: int +ERROR_PRIMARY_TRANSPORT_CONNECT_FAILED: int +ERROR_PAGE_FAULT_TRANSITION: int +ERROR_PAGE_FAULT_DEMAND_ZERO: int +ERROR_PAGE_FAULT_COPY_ON_WRITE: int +ERROR_PAGE_FAULT_GUARD_PAGE: int +ERROR_PAGE_FAULT_PAGING_FILE: int +ERROR_CACHE_PAGE_LOCKED: int +ERROR_CRASH_DUMP: int +ERROR_BUFFER_ALL_ZEROS: int +ERROR_REPARSE_OBJECT: int +ERROR_RESOURCE_REQUIREMENTS_CHANGED: int +ERROR_TRANSLATION_COMPLETE: int +ERROR_NOTHING_TO_TERMINATE: int +ERROR_PROCESS_NOT_IN_JOB: int +ERROR_PROCESS_IN_JOB: int +ERROR_VOLSNAP_HIBERNATE_READY: int +ERROR_FSFILTER_OP_COMPLETED_SUCCESSFULLY: int +ERROR_INTERRUPT_VECTOR_ALREADY_CONNECTED: int +ERROR_INTERRUPT_STILL_CONNECTED: int +ERROR_WAIT_FOR_OPLOCK: int +ERROR_DBG_EXCEPTION_HANDLED: int +ERROR_DBG_CONTINUE: int +ERROR_CALLBACK_POP_STACK: int +ERROR_COMPRESSION_DISABLED: int +ERROR_CANTFETCHBACKWARDS: int +ERROR_CANTSCROLLBACKWARDS: int +ERROR_ROWSNOTRELEASED: int +ERROR_BAD_ACCESSOR_FLAGS: int +ERROR_ERRORS_ENCOUNTERED: int +ERROR_NOT_CAPABLE: int +ERROR_REQUEST_OUT_OF_SEQUENCE: int +ERROR_VERSION_PARSE_ERROR: int +ERROR_BADSTARTPOSITION: int +ERROR_MEMORY_HARDWARE: int +ERROR_DISK_REPAIR_DISABLED: int +ERROR_INSUFFICIENT_RESOURCE_FOR_SPECIFIED_SHARED_SECTION_SIZE: int +ERROR_SYSTEM_POWERSTATE_TRANSITION: int +ERROR_SYSTEM_POWERSTATE_COMPLEX_TRANSITION: int +ERROR_MCA_EXCEPTION: int +ERROR_ACCESS_AUDIT_BY_POLICY: int +ERROR_ACCESS_DISABLED_NO_SAFER_UI_BY_POLICY: int +ERROR_ABANDON_HIBERFILE: int +ERROR_LOST_WRITEBEHIND_DATA_NETWORK_DISCONNECTED: int +ERROR_LOST_WRITEBEHIND_DATA_NETWORK_SERVER_ERROR: int +ERROR_LOST_WRITEBEHIND_DATA_LOCAL_DISK_ERROR: int +ERROR_BAD_MCFG_TABLE: int +ERROR_EA_ACCESS_DENIED: int +ERROR_OPERATION_ABORTED: int +ERROR_IO_INCOMPLETE: int +ERROR_IO_PENDING: int +ERROR_NOACCESS: int +ERROR_SWAPERROR: int +ERROR_STACK_OVERFLOW: int +ERROR_INVALID_MESSAGE: int +ERROR_CAN_NOT_COMPLETE: int +ERROR_INVALID_FLAGS: int +ERROR_UNRECOGNIZED_VOLUME: int +ERROR_FILE_INVALID: int +ERROR_FULLSCREEN_MODE: int +ERROR_NO_TOKEN: int +ERROR_BADDB: int +ERROR_BADKEY: int +ERROR_CANTOPEN: int +ERROR_CANTREAD: int +ERROR_CANTWRITE: int +ERROR_REGISTRY_RECOVERED: int +ERROR_REGISTRY_CORRUPT: int +ERROR_REGISTRY_IO_FAILED: int +ERROR_NOT_REGISTRY_FILE: int +ERROR_KEY_DELETED: int +ERROR_NO_LOG_SPACE: int +ERROR_KEY_HAS_CHILDREN: int +ERROR_CHILD_MUST_BE_VOLATILE: int +ERROR_NOTIFY_ENUM_DIR: int +ERROR_DEPENDENT_SERVICES_RUNNING: int +ERROR_INVALID_SERVICE_CONTROL: int +ERROR_SERVICE_REQUEST_TIMEOUT: int +ERROR_SERVICE_NO_THREAD: int +ERROR_SERVICE_DATABASE_LOCKED: int +ERROR_SERVICE_ALREADY_RUNNING: int +ERROR_INVALID_SERVICE_ACCOUNT: int +ERROR_SERVICE_DISABLED: int +ERROR_CIRCULAR_DEPENDENCY: int +ERROR_SERVICE_DOES_NOT_EXIST: int +ERROR_SERVICE_CANNOT_ACCEPT_CTRL: int +ERROR_SERVICE_NOT_ACTIVE: int +ERROR_FAILED_SERVICE_CONTROLLER_CONNECT: int +ERROR_EXCEPTION_IN_SERVICE: int +ERROR_DATABASE_DOES_NOT_EXIST: int +ERROR_SERVICE_SPECIFIC_ERROR: int +ERROR_PROCESS_ABORTED: int +ERROR_SERVICE_DEPENDENCY_FAIL: int +ERROR_SERVICE_LOGON_FAILED: int +ERROR_SERVICE_START_HANG: int +ERROR_INVALID_SERVICE_LOCK: int +ERROR_SERVICE_MARKED_FOR_DELETE: int +ERROR_SERVICE_EXISTS: int +ERROR_ALREADY_RUNNING_LKG: int +ERROR_SERVICE_DEPENDENCY_DELETED: int +ERROR_BOOT_ALREADY_ACCEPTED: int +ERROR_SERVICE_NEVER_STARTED: int +ERROR_DUPLICATE_SERVICE_NAME: int +ERROR_DIFFERENT_SERVICE_ACCOUNT: int +ERROR_CANNOT_DETECT_DRIVER_FAILURE: int +ERROR_CANNOT_DETECT_PROCESS_ABORT: int +ERROR_NO_RECOVERY_PROGRAM: int +ERROR_SERVICE_NOT_IN_EXE: int +ERROR_NOT_SAFEBOOT_SERVICE: int +ERROR_END_OF_MEDIA: int +ERROR_FILEMARK_DETECTED: int +ERROR_BEGINNING_OF_MEDIA: int +ERROR_SETMARK_DETECTED: int +ERROR_NO_DATA_DETECTED: int +ERROR_PARTITION_FAILURE: int +ERROR_INVALID_BLOCK_LENGTH: int +ERROR_DEVICE_NOT_PARTITIONED: int +ERROR_UNABLE_TO_LOCK_MEDIA: int +ERROR_UNABLE_TO_UNLOAD_MEDIA: int +ERROR_MEDIA_CHANGED: int +ERROR_BUS_RESET: int +ERROR_NO_MEDIA_IN_DRIVE: int +ERROR_NO_UNICODE_TRANSLATION: int +ERROR_DLL_INIT_FAILED: int +ERROR_SHUTDOWN_IN_PROGRESS: int +ERROR_NO_SHUTDOWN_IN_PROGRESS: int +ERROR_IO_DEVICE: int +ERROR_SERIAL_NO_DEVICE: int +ERROR_IRQ_BUSY: int +ERROR_MORE_WRITES: int +ERROR_COUNTER_TIMEOUT: int +ERROR_FLOPPY_ID_MARK_NOT_FOUND: int +ERROR_FLOPPY_WRONG_CYLINDER: int +ERROR_FLOPPY_UNKNOWN_ERROR: int +ERROR_FLOPPY_BAD_REGISTERS: int +ERROR_DISK_RECALIBRATE_FAILED: int +ERROR_DISK_OPERATION_FAILED: int +ERROR_DISK_RESET_FAILED: int +ERROR_EOM_OVERFLOW: int +ERROR_NOT_ENOUGH_SERVER_MEMORY: int +ERROR_POSSIBLE_DEADLOCK: int +ERROR_MAPPED_ALIGNMENT: int +ERROR_SET_POWER_STATE_VETOED: int +ERROR_SET_POWER_STATE_FAILED: int +ERROR_TOO_MANY_LINKS: int +ERROR_OLD_WIN_VERSION: int +ERROR_APP_WRONG_OS: int +ERROR_SINGLE_INSTANCE_APP: int +ERROR_RMODE_APP: int +ERROR_INVALID_DLL: int +ERROR_NO_ASSOCIATION: int +ERROR_DDE_FAIL: int +ERROR_DLL_NOT_FOUND: int +ERROR_NO_MORE_USER_HANDLES: int +ERROR_MESSAGE_SYNC_ONLY: int +ERROR_SOURCE_ELEMENT_EMPTY: int +ERROR_DESTINATION_ELEMENT_FULL: int +ERROR_ILLEGAL_ELEMENT_ADDRESS: int +ERROR_MAGAZINE_NOT_PRESENT: int +ERROR_DEVICE_REINITIALIZATION_NEEDED: int +ERROR_DEVICE_REQUIRES_CLEANING: int +ERROR_DEVICE_DOOR_OPEN: int +ERROR_DEVICE_NOT_CONNECTED: int +ERROR_NOT_FOUND: int +ERROR_NO_MATCH: int +ERROR_SET_NOT_FOUND: int +ERROR_POINT_NOT_FOUND: int +ERROR_NO_TRACKING_SERVICE: int +ERROR_NO_VOLUME_ID: int +ERROR_CONNECTED_OTHER_PASSWORD: int +ERROR_BAD_USERNAME: int +ERROR_NOT_CONNECTED: int +ERROR_OPEN_FILES: int +ERROR_ACTIVE_CONNECTIONS: int +ERROR_DEVICE_IN_USE: int +ERROR_BAD_DEVICE: int +ERROR_CONNECTION_UNAVAIL: int +ERROR_DEVICE_ALREADY_REMEMBERED: int +ERROR_NO_NET_OR_BAD_PATH: int +ERROR_BAD_PROVIDER: int +ERROR_CANNOT_OPEN_PROFILE: int +ERROR_BAD_PROFILE: int +ERROR_NOT_CONTAINER: int +ERROR_EXTENDED_ERROR: int +ERROR_INVALID_GROUPNAME: int +ERROR_INVALID_COMPUTERNAME: int +ERROR_INVALID_EVENTNAME: int +ERROR_INVALID_DOMAINNAME: int +ERROR_INVALID_SERVICENAME: int +ERROR_INVALID_NETNAME: int +ERROR_INVALID_SHARENAME: int +ERROR_INVALID_PASSWORDNAME: int +ERROR_INVALID_MESSAGENAME: int +ERROR_INVALID_MESSAGEDEST: int +ERROR_SESSION_CREDENTIAL_CONFLICT: int +ERROR_REMOTE_SESSION_LIMIT_EXCEEDED: int +ERROR_DUP_DOMAINNAME: int +ERROR_NO_NETWORK: int +ERROR_CANCELLED: int +ERROR_USER_MAPPED_FILE: int +ERROR_CONNECTION_REFUSED: int +ERROR_GRACEFUL_DISCONNECT: int +ERROR_ADDRESS_ALREADY_ASSOCIATED: int +ERROR_ADDRESS_NOT_ASSOCIATED: int +ERROR_CONNECTION_INVALID: int +ERROR_CONNECTION_ACTIVE: int +ERROR_NETWORK_UNREACHABLE: int +ERROR_HOST_UNREACHABLE: int +ERROR_PROTOCOL_UNREACHABLE: int +ERROR_PORT_UNREACHABLE: int +ERROR_REQUEST_ABORTED: int +ERROR_CONNECTION_ABORTED: int +ERROR_RETRY: int +ERROR_CONNECTION_COUNT_LIMIT: int +ERROR_LOGIN_TIME_RESTRICTION: int +ERROR_LOGIN_WKSTA_RESTRICTION: int +ERROR_INCORRECT_ADDRESS: int +ERROR_ALREADY_REGISTERED: int +ERROR_SERVICE_NOT_FOUND: int +ERROR_NOT_AUTHENTICATED: int +ERROR_NOT_LOGGED_ON: int +ERROR_CONTINUE: int +ERROR_ALREADY_INITIALIZED: int +ERROR_NO_MORE_DEVICES: int +ERROR_NO_SUCH_SITE: int +ERROR_DOMAIN_CONTROLLER_EXISTS: int +ERROR_DS_NOT_INSTALLED: int +ERROR_NOT_ALL_ASSIGNED: int +ERROR_SOME_NOT_MAPPED: int +ERROR_NO_QUOTAS_FOR_ACCOUNT: int +ERROR_LOCAL_USER_SESSION_KEY: int +ERROR_NULL_LM_PASSWORD: int +ERROR_UNKNOWN_REVISION: int +ERROR_REVISION_MISMATCH: int +ERROR_INVALID_OWNER: int +ERROR_INVALID_PRIMARY_GROUP: int +ERROR_NO_IMPERSONATION_TOKEN: int +ERROR_CANT_DISABLE_MANDATORY: int +ERROR_NO_LOGON_SERVERS: int +ERROR_NO_SUCH_LOGON_SESSION: int +ERROR_NO_SUCH_PRIVILEGE: int +ERROR_PRIVILEGE_NOT_HELD: int +ERROR_INVALID_ACCOUNT_NAME: int +ERROR_USER_EXISTS: int +ERROR_NO_SUCH_USER: int +ERROR_GROUP_EXISTS: int +ERROR_NO_SUCH_GROUP: int +ERROR_MEMBER_IN_GROUP: int +ERROR_MEMBER_NOT_IN_GROUP: int +ERROR_LAST_ADMIN: int +ERROR_WRONG_PASSWORD: int +ERROR_ILL_FORMED_PASSWORD: int +ERROR_PASSWORD_RESTRICTION: int +ERROR_LOGON_FAILURE: int +ERROR_ACCOUNT_RESTRICTION: int +ERROR_INVALID_LOGON_HOURS: int +ERROR_INVALID_WORKSTATION: int +ERROR_PASSWORD_EXPIRED: int +ERROR_ACCOUNT_DISABLED: int +ERROR_NONE_MAPPED: int +ERROR_TOO_MANY_LUIDS_REQUESTED: int +ERROR_LUIDS_EXHAUSTED: int +ERROR_INVALID_SUB_AUTHORITY: int +ERROR_INVALID_ACL: int +ERROR_INVALID_SID: int +ERROR_INVALID_SECURITY_DESCR: int +ERROR_BAD_INHERITANCE_ACL: int +ERROR_SERVER_DISABLED: int +ERROR_SERVER_NOT_DISABLED: int +ERROR_INVALID_ID_AUTHORITY: int +ERROR_ALLOTTED_SPACE_EXCEEDED: int +ERROR_INVALID_GROUP_ATTRIBUTES: int +ERROR_BAD_IMPERSONATION_LEVEL: int +ERROR_CANT_OPEN_ANONYMOUS: int +ERROR_BAD_VALIDATION_CLASS: int +ERROR_BAD_TOKEN_TYPE: int +ERROR_NO_SECURITY_ON_OBJECT: int +ERROR_CANT_ACCESS_DOMAIN_INFO: int +ERROR_INVALID_SERVER_STATE: int +ERROR_INVALID_DOMAIN_STATE: int +ERROR_INVALID_DOMAIN_ROLE: int +ERROR_NO_SUCH_DOMAIN: int +ERROR_DOMAIN_EXISTS: int +ERROR_DOMAIN_LIMIT_EXCEEDED: int +ERROR_INTERNAL_DB_CORRUPTION: int +ERROR_INTERNAL_ERROR: int +ERROR_GENERIC_NOT_MAPPED: int +ERROR_BAD_DESCRIPTOR_FORMAT: int +ERROR_NOT_LOGON_PROCESS: int +ERROR_LOGON_SESSION_EXISTS: int +ERROR_NO_SUCH_PACKAGE: int +ERROR_BAD_LOGON_SESSION_STATE: int +ERROR_LOGON_SESSION_COLLISION: int +ERROR_INVALID_LOGON_TYPE: int +ERROR_CANNOT_IMPERSONATE: int +ERROR_RXACT_INVALID_STATE: int +ERROR_RXACT_COMMIT_FAILURE: int +ERROR_SPECIAL_ACCOUNT: int +ERROR_SPECIAL_GROUP: int +ERROR_SPECIAL_USER: int +ERROR_MEMBERS_PRIMARY_GROUP: int +ERROR_TOKEN_ALREADY_IN_USE: int +ERROR_NO_SUCH_ALIAS: int +ERROR_MEMBER_NOT_IN_ALIAS: int +ERROR_MEMBER_IN_ALIAS: int +ERROR_ALIAS_EXISTS: int +ERROR_LOGON_NOT_GRANTED: int +ERROR_TOO_MANY_SECRETS: int +ERROR_SECRET_TOO_LONG: int +ERROR_INTERNAL_DB_ERROR: int +ERROR_TOO_MANY_CONTEXT_IDS: int +ERROR_LOGON_TYPE_NOT_GRANTED: int +ERROR_NT_CROSS_ENCRYPTION_REQUIRED: int +ERROR_NO_SUCH_MEMBER: int +ERROR_INVALID_MEMBER: int +ERROR_TOO_MANY_SIDS: int +ERROR_LM_CROSS_ENCRYPTION_REQUIRED: int +ERROR_NO_INHERITANCE: int +ERROR_FILE_CORRUPT: int +ERROR_DISK_CORRUPT: int +ERROR_NO_USER_SESSION_KEY: int +ERROR_LICENSE_QUOTA_EXCEEDED: int +ERROR_INVALID_WINDOW_HANDLE: int +ERROR_INVALID_MENU_HANDLE: int +ERROR_INVALID_CURSOR_HANDLE: int +ERROR_INVALID_ACCEL_HANDLE: int +ERROR_INVALID_HOOK_HANDLE: int +ERROR_INVALID_DWP_HANDLE: int +ERROR_TLW_WITH_WSCHILD: int +ERROR_CANNOT_FIND_WND_CLASS: int +ERROR_WINDOW_OF_OTHER_THREAD: int +ERROR_HOTKEY_ALREADY_REGISTERED: int +ERROR_CLASS_ALREADY_EXISTS: int +ERROR_CLASS_DOES_NOT_EXIST: int +ERROR_CLASS_HAS_WINDOWS: int +ERROR_INVALID_INDEX: int +ERROR_INVALID_ICON_HANDLE: int +ERROR_PRIVATE_DIALOG_INDEX: int +ERROR_LISTBOX_ID_NOT_FOUND: int +ERROR_NO_WILDCARD_CHARACTERS: int +ERROR_CLIPBOARD_NOT_OPEN: int +ERROR_HOTKEY_NOT_REGISTERED: int +ERROR_WINDOW_NOT_DIALOG: int +ERROR_CONTROL_ID_NOT_FOUND: int +ERROR_INVALID_COMBOBOX_MESSAGE: int +ERROR_WINDOW_NOT_COMBOBOX: int +ERROR_INVALID_EDIT_HEIGHT: int +ERROR_DC_NOT_FOUND: int +ERROR_INVALID_HOOK_FILTER: int +ERROR_INVALID_FILTER_PROC: int +ERROR_HOOK_NEEDS_HMOD: int +ERROR_GLOBAL_ONLY_HOOK: int +ERROR_JOURNAL_HOOK_SET: int +ERROR_HOOK_NOT_INSTALLED: int +ERROR_INVALID_LB_MESSAGE: int +ERROR_SETCOUNT_ON_BAD_LB: int +ERROR_LB_WITHOUT_TABSTOPS: int +ERROR_DESTROY_OBJECT_OF_OTHER_THREAD: int +ERROR_CHILD_WINDOW_MENU: int +ERROR_NO_SYSTEM_MENU: int +ERROR_INVALID_MSGBOX_STYLE: int +ERROR_INVALID_SPI_VALUE: int +ERROR_SCREEN_ALREADY_LOCKED: int +ERROR_HWNDS_HAVE_DIFF_PARENT: int +ERROR_NOT_CHILD_WINDOW: int +ERROR_INVALID_GW_COMMAND: int +ERROR_INVALID_THREAD_ID: int +ERROR_NON_MDICHILD_WINDOW: int +ERROR_POPUP_ALREADY_ACTIVE: int +ERROR_NO_SCROLLBARS: int +ERROR_INVALID_SCROLLBAR_RANGE: int +ERROR_INVALID_SHOWWIN_COMMAND: int +ERROR_NO_SYSTEM_RESOURCES: int +ERROR_NONPAGED_SYSTEM_RESOURCES: int +ERROR_PAGED_SYSTEM_RESOURCES: int +ERROR_WORKING_SET_QUOTA: int +ERROR_PAGEFILE_QUOTA: int +ERROR_COMMITMENT_LIMIT: int +ERROR_MENU_ITEM_NOT_FOUND: int +ERROR_INVALID_KEYBOARD_HANDLE: int +ERROR_HOOK_TYPE_NOT_ALLOWED: int +ERROR_REQUIRES_INTERACTIVE_WINDOWSTATION: int +ERROR_TIMEOUT: int +ERROR_INVALID_MONITOR_HANDLE: int +ERROR_INCORRECT_SIZE: int +ERROR_SYMLINK_CLASS_DISABLED: int +ERROR_SYMLINK_NOT_SUPPORTED: int +ERROR_XML_PARSE_ERROR: int +ERROR_XMLDSIG_ERROR: int +ERROR_RESTART_APPLICATION: int +ERROR_WRONG_COMPARTMENT: int +ERROR_AUTHIP_FAILURE: int +ERROR_EVENTLOG_FILE_CORRUPT: int +ERROR_EVENTLOG_CANT_START: int +ERROR_LOG_FILE_FULL: int +ERROR_EVENTLOG_FILE_CHANGED: int +ERROR_INSTALL_SERVICE: int +ERROR_INSTALL_USEREXIT: int +ERROR_INSTALL_FAILURE: int +ERROR_INSTALL_SUSPEND: int +ERROR_UNKNOWN_PRODUCT: int +ERROR_UNKNOWN_FEATURE: int +ERROR_UNKNOWN_COMPONENT: int +ERROR_UNKNOWN_PROPERTY: int +ERROR_INVALID_HANDLE_STATE: int +ERROR_BAD_CONFIGURATION: int +ERROR_INDEX_ABSENT: int +ERROR_INSTALL_SOURCE_ABSENT: int +ERROR_BAD_DATABASE_VERSION: int +ERROR_PRODUCT_UNINSTALLED: int +ERROR_BAD_QUERY_SYNTAX: int +ERROR_INVALID_FIELD: int +ERROR_DEVICE_REMOVED: int +ERROR_INSTALL_ALREADY_RUNNING: int +ERROR_INSTALL_PACKAGE_OPEN_FAILED: int +ERROR_INSTALL_PACKAGE_INVALID: int +ERROR_INSTALL_UI_FAILURE: int +ERROR_INSTALL_LOG_FAILURE: int +ERROR_INSTALL_LANGUAGE_UNSUPPORTED: int +ERROR_INSTALL_TRANSFORM_FAILURE: int +ERROR_INSTALL_PACKAGE_REJECTED: int +ERROR_FUNCTION_NOT_CALLED: int +ERROR_FUNCTION_FAILED: int +ERROR_INVALID_TABLE: int +ERROR_DATATYPE_MISMATCH: int +ERROR_UNSUPPORTED_TYPE: int +ERROR_CREATE_FAILED: int +ERROR_INSTALL_TEMP_UNWRITABLE: int +ERROR_INSTALL_PLATFORM_UNSUPPORTED: int +ERROR_INSTALL_NOTUSED: int +ERROR_PATCH_PACKAGE_OPEN_FAILED: int +ERROR_PATCH_PACKAGE_INVALID: int +ERROR_PATCH_PACKAGE_UNSUPPORTED: int +ERROR_PRODUCT_VERSION: int +ERROR_INVALID_COMMAND_LINE: int +ERROR_INSTALL_REMOTE_DISALLOWED: int +ERROR_SUCCESS_REBOOT_INITIATED: int +ERROR_PATCH_TARGET_NOT_FOUND: int +ERROR_PATCH_PACKAGE_REJECTED: int +ERROR_INSTALL_TRANSFORM_REJECTED: int +ERROR_INSTALL_REMOTE_PROHIBITED: int +ERROR_PATCH_REMOVAL_UNSUPPORTED: int +ERROR_UNKNOWN_PATCH: int +ERROR_PATCH_NO_SEQUENCE: int +ERROR_PATCH_REMOVAL_DISALLOWED: int +ERROR_INVALID_PATCH_XML: int +ERROR_PATCH_MANAGED_ADVERTISED_PRODUCT: int +ERROR_INSTALL_SERVICE_SAFEBOOT: int +RPC_S_INVALID_STRING_BINDING: int +RPC_S_WRONG_KIND_OF_BINDING: int +RPC_S_INVALID_BINDING: int +RPC_S_PROTSEQ_NOT_SUPPORTED: int +RPC_S_INVALID_RPC_PROTSEQ: int +RPC_S_INVALID_STRING_UUID: int +RPC_S_INVALID_ENDPOINT_FORMAT: int +RPC_S_INVALID_NET_ADDR: int +RPC_S_NO_ENDPOINT_FOUND: int +RPC_S_INVALID_TIMEOUT: int +RPC_S_OBJECT_NOT_FOUND: int +RPC_S_ALREADY_REGISTERED: int +RPC_S_TYPE_ALREADY_REGISTERED: int +RPC_S_ALREADY_LISTENING: int +RPC_S_NO_PROTSEQS_REGISTERED: int +RPC_S_NOT_LISTENING: int +RPC_S_UNKNOWN_MGR_TYPE: int +RPC_S_UNKNOWN_IF: int +RPC_S_NO_BINDINGS: int +RPC_S_NO_PROTSEQS: int +RPC_S_CANT_CREATE_ENDPOINT: int +RPC_S_OUT_OF_RESOURCES: int +RPC_S_SERVER_UNAVAILABLE: int +RPC_S_SERVER_TOO_BUSY: int +RPC_S_INVALID_NETWORK_OPTIONS: int +RPC_S_NO_CALL_ACTIVE: int +RPC_S_CALL_FAILED: int +RPC_S_CALL_FAILED_DNE: int +RPC_S_PROTOCOL_ERROR: int +RPC_S_PROXY_ACCESS_DENIED: int +RPC_S_UNSUPPORTED_TRANS_SYN: int +RPC_S_UNSUPPORTED_TYPE: int +RPC_S_INVALID_TAG: int +RPC_S_INVALID_BOUND: int +RPC_S_NO_ENTRY_NAME: int +RPC_S_INVALID_NAME_SYNTAX: int +RPC_S_UNSUPPORTED_NAME_SYNTAX: int +RPC_S_UUID_NO_ADDRESS: int +RPC_S_DUPLICATE_ENDPOINT: int +RPC_S_UNKNOWN_AUTHN_TYPE: int +RPC_S_MAX_CALLS_TOO_SMALL: int +RPC_S_STRING_TOO_LONG: int +RPC_S_PROTSEQ_NOT_FOUND: int +RPC_S_PROCNUM_OUT_OF_RANGE: int +RPC_S_BINDING_HAS_NO_AUTH: int +RPC_S_UNKNOWN_AUTHN_SERVICE: int +RPC_S_UNKNOWN_AUTHN_LEVEL: int +RPC_S_INVALID_AUTH_IDENTITY: int +RPC_S_UNKNOWN_AUTHZ_SERVICE: int +EPT_S_INVALID_ENTRY: int +EPT_S_CANT_PERFORM_OP: int +EPT_S_NOT_REGISTERED: int +RPC_S_NOTHING_TO_EXPORT: int +RPC_S_INCOMPLETE_NAME: int +RPC_S_INVALID_VERS_OPTION: int +RPC_S_NO_MORE_MEMBERS: int +RPC_S_NOT_ALL_OBJS_UNEXPORTED: int +RPC_S_INTERFACE_NOT_FOUND: int +RPC_S_ENTRY_ALREADY_EXISTS: int +RPC_S_ENTRY_NOT_FOUND: int +RPC_S_NAME_SERVICE_UNAVAILABLE: int +RPC_S_INVALID_NAF_ID: int +RPC_S_CANNOT_SUPPORT: int +RPC_S_NO_CONTEXT_AVAILABLE: int +RPC_S_INTERNAL_ERROR: int +RPC_S_ZERO_DIVIDE: int +RPC_S_ADDRESS_ERROR: int +RPC_S_FP_DIV_ZERO: int +RPC_S_FP_UNDERFLOW: int +RPC_S_FP_OVERFLOW: int +RPC_X_NO_MORE_ENTRIES: int +RPC_X_SS_CHAR_TRANS_OPEN_FAIL: int +RPC_X_SS_CHAR_TRANS_SHORT_FILE: int +RPC_X_SS_IN_NULL_CONTEXT: int +RPC_X_SS_CONTEXT_DAMAGED: int +RPC_X_SS_HANDLES_MISMATCH: int +RPC_X_SS_CANNOT_GET_CALL_HANDLE: int +RPC_X_NULL_REF_POINTER: int +RPC_X_ENUM_VALUE_OUT_OF_RANGE: int +RPC_X_BYTE_COUNT_TOO_SMALL: int +RPC_X_BAD_STUB_DATA: int +ERROR_INVALID_USER_BUFFER: int +ERROR_UNRECOGNIZED_MEDIA: int +ERROR_NO_TRUST_LSA_SECRET: int +ERROR_NO_TRUST_SAM_ACCOUNT: int +ERROR_TRUSTED_DOMAIN_FAILURE: int +ERROR_TRUSTED_RELATIONSHIP_FAILURE: int +ERROR_TRUST_FAILURE: int +RPC_S_CALL_IN_PROGRESS: int +ERROR_NETLOGON_NOT_STARTED: int +ERROR_ACCOUNT_EXPIRED: int +ERROR_REDIRECTOR_HAS_OPEN_HANDLES: int +ERROR_PRINTER_DRIVER_ALREADY_INSTALLED: int +ERROR_UNKNOWN_PORT: int +ERROR_UNKNOWN_PRINTER_DRIVER: int +ERROR_UNKNOWN_PRINTPROCESSOR: int +ERROR_INVALID_SEPARATOR_FILE: int +ERROR_INVALID_PRIORITY: int +ERROR_INVALID_PRINTER_NAME: int +ERROR_PRINTER_ALREADY_EXISTS: int +ERROR_INVALID_PRINTER_COMMAND: int +ERROR_INVALID_DATATYPE: int +ERROR_INVALID_ENVIRONMENT: int +RPC_S_NO_MORE_BINDINGS: int +ERROR_NOLOGON_INTERDOMAIN_TRUST_ACCOUNT: int +ERROR_NOLOGON_WORKSTATION_TRUST_ACCOUNT: int +ERROR_NOLOGON_SERVER_TRUST_ACCOUNT: int +ERROR_DOMAIN_TRUST_INCONSISTENT: int +ERROR_SERVER_HAS_OPEN_HANDLES: int +ERROR_RESOURCE_DATA_NOT_FOUND: int +ERROR_RESOURCE_TYPE_NOT_FOUND: int +ERROR_RESOURCE_NAME_NOT_FOUND: int +ERROR_RESOURCE_LANG_NOT_FOUND: int +ERROR_NOT_ENOUGH_QUOTA: int +RPC_S_NO_INTERFACES: int +RPC_S_CALL_CANCELLED: int +RPC_S_BINDING_INCOMPLETE: int +RPC_S_COMM_FAILURE: int +RPC_S_UNSUPPORTED_AUTHN_LEVEL: int +RPC_S_NO_PRINC_NAME: int +RPC_S_NOT_RPC_ERROR: int +RPC_S_UUID_LOCAL_ONLY: int +RPC_S_SEC_PKG_ERROR: int +RPC_S_NOT_CANCELLED: int +RPC_X_INVALID_ES_ACTION: int +RPC_X_WRONG_ES_VERSION: int +RPC_X_WRONG_STUB_VERSION: int +RPC_X_INVALID_PIPE_OBJECT: int +RPC_X_WRONG_PIPE_ORDER: int +RPC_X_WRONG_PIPE_VERSION: int +RPC_S_GROUP_MEMBER_NOT_FOUND: int +EPT_S_CANT_CREATE: int +RPC_S_INVALID_OBJECT: int +ERROR_INVALID_TIME: int +ERROR_INVALID_FORM_NAME: int +ERROR_INVALID_FORM_SIZE: int +ERROR_ALREADY_WAITING: int +ERROR_PRINTER_DELETED: int +ERROR_INVALID_PRINTER_STATE: int +ERROR_PASSWORD_MUST_CHANGE: int +ERROR_DOMAIN_CONTROLLER_NOT_FOUND: int +ERROR_ACCOUNT_LOCKED_OUT: int +OR_INVALID_OXID: int +OR_INVALID_OID: int +OR_INVALID_SET: int +RPC_S_SEND_INCOMPLETE: int +RPC_S_INVALID_ASYNC_HANDLE: int +RPC_S_INVALID_ASYNC_CALL: int +RPC_X_PIPE_CLOSED: int +RPC_X_PIPE_DISCIPLINE_ERROR: int +RPC_X_PIPE_EMPTY: int +ERROR_NO_SITENAME: int +ERROR_CANT_ACCESS_FILE: int +ERROR_CANT_RESOLVE_FILENAME: int +RPC_S_ENTRY_TYPE_MISMATCH: int +RPC_S_NOT_ALL_OBJS_EXPORTED: int +RPC_S_INTERFACE_NOT_EXPORTED: int +RPC_S_PROFILE_NOT_ADDED: int +RPC_S_PRF_ELT_NOT_ADDED: int +RPC_S_PRF_ELT_NOT_REMOVED: int +RPC_S_GRP_ELT_NOT_ADDED: int +RPC_S_GRP_ELT_NOT_REMOVED: int +ERROR_KM_DRIVER_BLOCKED: int +ERROR_CONTEXT_EXPIRED: int +ERROR_PER_USER_TRUST_QUOTA_EXCEEDED: int +ERROR_ALL_USER_TRUST_QUOTA_EXCEEDED: int +ERROR_USER_DELETE_TRUST_QUOTA_EXCEEDED: int +ERROR_AUTHENTICATION_FIREWALL_FAILED: int +ERROR_REMOTE_PRINT_CONNECTIONS_BLOCKED: int +ERROR_NTLM_BLOCKED: int +ERROR_INVALID_PIXEL_FORMAT: int +ERROR_BAD_DRIVER: int +ERROR_INVALID_WINDOW_STYLE: int +ERROR_METAFILE_NOT_SUPPORTED: int +ERROR_TRANSFORM_NOT_SUPPORTED: int +ERROR_CLIPPING_NOT_SUPPORTED: int +ERROR_INVALID_CMM: int +ERROR_INVALID_PROFILE: int +ERROR_TAG_NOT_FOUND: int +ERROR_TAG_NOT_PRESENT: int +ERROR_DUPLICATE_TAG: int +ERROR_PROFILE_NOT_ASSOCIATED_WITH_DEVICE: int +ERROR_PROFILE_NOT_FOUND: int +ERROR_INVALID_COLORSPACE: int +ERROR_ICM_NOT_ENABLED: int +ERROR_DELETING_ICM_XFORM: int +ERROR_INVALID_TRANSFORM: int +ERROR_COLORSPACE_MISMATCH: int +ERROR_INVALID_COLORINDEX: int +ERROR_PROFILE_DOES_NOT_MATCH_DEVICE: int +ERROR_CONNECTED_OTHER_PASSWORD_DEFAULT: int +ERROR_UNKNOWN_PRINT_MONITOR: int +ERROR_PRINTER_DRIVER_IN_USE: int +ERROR_SPOOL_FILE_NOT_FOUND: int +ERROR_SPL_NO_STARTDOC: int +ERROR_SPL_NO_ADDJOB: int +ERROR_PRINT_PROCESSOR_ALREADY_INSTALLED: int +ERROR_PRINT_MONITOR_ALREADY_INSTALLED: int +ERROR_INVALID_PRINT_MONITOR: int +ERROR_PRINT_MONITOR_IN_USE: int +ERROR_PRINTER_HAS_JOBS_QUEUED: int +ERROR_SUCCESS_REBOOT_REQUIRED: int +ERROR_SUCCESS_RESTART_REQUIRED: int +ERROR_PRINTER_NOT_FOUND: int +ERROR_PRINTER_DRIVER_WARNED: int +ERROR_PRINTER_DRIVER_BLOCKED: int +ERROR_PRINTER_DRIVER_PACKAGE_IN_USE: int +ERROR_CORE_DRIVER_PACKAGE_NOT_FOUND: int +ERROR_FAIL_REBOOT_REQUIRED: int +ERROR_FAIL_REBOOT_INITIATED: int +ERROR_PRINTER_DRIVER_DOWNLOAD_NEEDED: int +ERROR_PRINT_JOB_RESTART_REQUIRED: int +ERROR_IO_REISSUE_AS_CACHED: int +ERROR_WINS_INTERNAL: int +ERROR_CAN_NOT_DEL_LOCAL_WINS: int +ERROR_STATIC_INIT: int +ERROR_INC_BACKUP: int +ERROR_FULL_BACKUP: int +ERROR_REC_NON_EXISTENT: int +ERROR_RPL_NOT_ALLOWED: int +ERROR_DHCP_ADDRESS_CONFLICT: int +ERROR_WMI_GUID_NOT_FOUND: int +ERROR_WMI_INSTANCE_NOT_FOUND: int +ERROR_WMI_ITEMID_NOT_FOUND: int +ERROR_WMI_TRY_AGAIN: int +ERROR_WMI_DP_NOT_FOUND: int +ERROR_WMI_UNRESOLVED_INSTANCE_REF: int +ERROR_WMI_ALREADY_ENABLED: int +ERROR_WMI_GUID_DISCONNECTED: int +ERROR_WMI_SERVER_UNAVAILABLE: int +ERROR_WMI_DP_FAILED: int +ERROR_WMI_INVALID_MOF: int +ERROR_WMI_INVALID_REGINFO: int +ERROR_WMI_ALREADY_DISABLED: int +ERROR_WMI_READ_ONLY: int +ERROR_WMI_SET_FAILURE: int +ERROR_INVALID_MEDIA: int +ERROR_INVALID_LIBRARY: int +ERROR_INVALID_MEDIA_POOL: int +ERROR_DRIVE_MEDIA_MISMATCH: int +ERROR_MEDIA_OFFLINE: int +ERROR_LIBRARY_OFFLINE: int +ERROR_EMPTY: int +ERROR_NOT_EMPTY: int +ERROR_MEDIA_UNAVAILABLE: int +ERROR_RESOURCE_DISABLED: int +ERROR_INVALID_CLEANER: int +ERROR_UNABLE_TO_CLEAN: int +ERROR_OBJECT_NOT_FOUND: int +ERROR_DATABASE_FAILURE: int +ERROR_DATABASE_FULL: int +ERROR_MEDIA_INCOMPATIBLE: int +ERROR_RESOURCE_NOT_PRESENT: int +ERROR_INVALID_OPERATION: int +ERROR_MEDIA_NOT_AVAILABLE: int +ERROR_DEVICE_NOT_AVAILABLE: int +ERROR_REQUEST_REFUSED: int +ERROR_INVALID_DRIVE_OBJECT: int +ERROR_LIBRARY_FULL: int +ERROR_MEDIUM_NOT_ACCESSIBLE: int +ERROR_UNABLE_TO_LOAD_MEDIUM: int +ERROR_UNABLE_TO_INVENTORY_DRIVE: int +ERROR_UNABLE_TO_INVENTORY_SLOT: int +ERROR_UNABLE_TO_INVENTORY_TRANSPORT: int +ERROR_TRANSPORT_FULL: int +ERROR_CONTROLLING_IEPORT: int +ERROR_UNABLE_TO_EJECT_MOUNTED_MEDIA: int +ERROR_CLEANER_SLOT_SET: int +ERROR_CLEANER_SLOT_NOT_SET: int +ERROR_CLEANER_CARTRIDGE_SPENT: int +ERROR_UNEXPECTED_OMID: int +ERROR_CANT_DELETE_LAST_ITEM: int +ERROR_MESSAGE_EXCEEDS_MAX_SIZE: int +ERROR_VOLUME_CONTAINS_SYS_FILES: int +ERROR_INDIGENOUS_TYPE: int +ERROR_NO_SUPPORTING_DRIVES: int +ERROR_CLEANER_CARTRIDGE_INSTALLED: int +ERROR_IEPORT_FULL: int +ERROR_FILE_OFFLINE: int +ERROR_REMOTE_STORAGE_NOT_ACTIVE: int +ERROR_REMOTE_STORAGE_MEDIA_ERROR: int +ERROR_NOT_A_REPARSE_POINT: int +ERROR_REPARSE_ATTRIBUTE_CONFLICT: int +ERROR_INVALID_REPARSE_DATA: int +ERROR_REPARSE_TAG_INVALID: int +ERROR_REPARSE_TAG_MISMATCH: int +ERROR_VOLUME_NOT_SIS_ENABLED: int +ERROR_DEPENDENT_RESOURCE_EXISTS: int +ERROR_DEPENDENCY_NOT_FOUND: int +ERROR_DEPENDENCY_ALREADY_EXISTS: int +ERROR_RESOURCE_NOT_ONLINE: int +ERROR_HOST_NODE_NOT_AVAILABLE: int +ERROR_RESOURCE_NOT_AVAILABLE: int +ERROR_RESOURCE_NOT_FOUND: int +ERROR_SHUTDOWN_CLUSTER: int +ERROR_CANT_EVICT_ACTIVE_NODE: int +ERROR_OBJECT_ALREADY_EXISTS: int +ERROR_OBJECT_IN_LIST: int +ERROR_GROUP_NOT_AVAILABLE: int +ERROR_GROUP_NOT_FOUND: int +ERROR_GROUP_NOT_ONLINE: int +ERROR_HOST_NODE_NOT_RESOURCE_OWNER: int +ERROR_HOST_NODE_NOT_GROUP_OWNER: int +ERROR_RESMON_CREATE_FAILED: int +ERROR_RESMON_ONLINE_FAILED: int +ERROR_RESOURCE_ONLINE: int +ERROR_QUORUM_RESOURCE: int +ERROR_NOT_QUORUM_CAPABLE: int +ERROR_CLUSTER_SHUTTING_DOWN: int +ERROR_INVALID_STATE: int +ERROR_RESOURCE_PROPERTIES_STORED: int +ERROR_NOT_QUORUM_CLASS: int +ERROR_CORE_RESOURCE: int +ERROR_QUORUM_RESOURCE_ONLINE_FAILED: int +ERROR_QUORUMLOG_OPEN_FAILED: int +ERROR_CLUSTERLOG_CORRUPT: int +ERROR_CLUSTERLOG_RECORD_EXCEEDS_MAXSIZE: int +ERROR_CLUSTERLOG_EXCEEDS_MAXSIZE: int +ERROR_CLUSTERLOG_CHKPOINT_NOT_FOUND: int +ERROR_CLUSTERLOG_NOT_ENOUGH_SPACE: int +ERROR_QUORUM_OWNER_ALIVE: int +ERROR_NETWORK_NOT_AVAILABLE: int +ERROR_NODE_NOT_AVAILABLE: int +ERROR_ALL_NODES_NOT_AVAILABLE: int +ERROR_RESOURCE_FAILED: int +ERROR_CLUSTER_INVALID_NODE: int +ERROR_CLUSTER_NODE_EXISTS: int +ERROR_CLUSTER_JOIN_IN_PROGRESS: int +ERROR_CLUSTER_NODE_NOT_FOUND: int +ERROR_CLUSTER_LOCAL_NODE_NOT_FOUND: int +ERROR_CLUSTER_NETWORK_EXISTS: int +ERROR_CLUSTER_NETWORK_NOT_FOUND: int +ERROR_CLUSTER_NETINTERFACE_EXISTS: int +ERROR_CLUSTER_NETINTERFACE_NOT_FOUND: int +ERROR_CLUSTER_INVALID_REQUEST: int +ERROR_CLUSTER_INVALID_NETWORK_PROVIDER: int +ERROR_CLUSTER_NODE_DOWN: int +ERROR_CLUSTER_NODE_UNREACHABLE: int +ERROR_CLUSTER_NODE_NOT_MEMBER: int +ERROR_CLUSTER_JOIN_NOT_IN_PROGRESS: int +ERROR_CLUSTER_INVALID_NETWORK: int +ERROR_CLUSTER_NODE_UP: int +ERROR_CLUSTER_IPADDR_IN_USE: int +ERROR_CLUSTER_NODE_NOT_PAUSED: int +ERROR_CLUSTER_NO_SECURITY_CONTEXT: int +ERROR_CLUSTER_NETWORK_NOT_INTERNAL: int +ERROR_CLUSTER_NODE_ALREADY_UP: int +ERROR_CLUSTER_NODE_ALREADY_DOWN: int +ERROR_CLUSTER_NETWORK_ALREADY_ONLINE: int +ERROR_CLUSTER_NETWORK_ALREADY_OFFLINE: int +ERROR_CLUSTER_NODE_ALREADY_MEMBER: int +ERROR_CLUSTER_LAST_INTERNAL_NETWORK: int +ERROR_CLUSTER_NETWORK_HAS_DEPENDENTS: int +ERROR_INVALID_OPERATION_ON_QUORUM: int +ERROR_DEPENDENCY_NOT_ALLOWED: int +ERROR_CLUSTER_NODE_PAUSED: int +ERROR_NODE_CANT_HOST_RESOURCE: int +ERROR_CLUSTER_NODE_NOT_READY: int +ERROR_CLUSTER_NODE_SHUTTING_DOWN: int +ERROR_CLUSTER_JOIN_ABORTED: int +ERROR_CLUSTER_INCOMPATIBLE_VERSIONS: int +ERROR_CLUSTER_MAXNUM_OF_RESOURCES_EXCEEDED: int +ERROR_CLUSTER_SYSTEM_CONFIG_CHANGED: int +ERROR_CLUSTER_RESOURCE_TYPE_NOT_FOUND: int +ERROR_CLUSTER_RESTYPE_NOT_SUPPORTED: int +ERROR_CLUSTER_RESNAME_NOT_FOUND: int +ERROR_CLUSTER_NO_RPC_PACKAGES_REGISTERED: int +ERROR_CLUSTER_OWNER_NOT_IN_PREFLIST: int +ERROR_CLUSTER_DATABASE_SEQMISMATCH: int +ERROR_RESMON_INVALID_STATE: int +ERROR_CLUSTER_GUM_NOT_LOCKER: int +ERROR_QUORUM_DISK_NOT_FOUND: int +ERROR_DATABASE_BACKUP_CORRUPT: int +ERROR_CLUSTER_NODE_ALREADY_HAS_DFS_ROOT: int +ERROR_RESOURCE_PROPERTY_UNCHANGEABLE: int +ERROR_CLUSTER_MEMBERSHIP_INVALID_STATE: int +ERROR_CLUSTER_QUORUMLOG_NOT_FOUND: int +ERROR_CLUSTER_MEMBERSHIP_HALT: int +ERROR_CLUSTER_INSTANCE_ID_MISMATCH: int +ERROR_CLUSTER_NETWORK_NOT_FOUND_FOR_IP: int +ERROR_CLUSTER_PROPERTY_DATA_TYPE_MISMATCH: int +ERROR_CLUSTER_EVICT_WITHOUT_CLEANUP: int +ERROR_CLUSTER_PARAMETER_MISMATCH: int +ERROR_NODE_CANNOT_BE_CLUSTERED: int +ERROR_CLUSTER_WRONG_OS_VERSION: int +ERROR_CLUSTER_CANT_CREATE_DUP_CLUSTER_NAME: int +ERROR_CLUSCFG_ALREADY_COMMITTED: int +ERROR_CLUSCFG_ROLLBACK_FAILED: int +ERROR_CLUSCFG_SYSTEM_DISK_DRIVE_LETTER_CONFLICT: int +ERROR_CLUSTER_OLD_VERSION: int +ERROR_CLUSTER_MISMATCHED_COMPUTER_ACCT_NAME: int +ERROR_CLUSTER_NO_NET_ADAPTERS: int +ERROR_CLUSTER_POISONED: int +ERROR_CLUSTER_GROUP_MOVING: int +ERROR_CLUSTER_RESOURCE_TYPE_BUSY: int +ERROR_RESOURCE_CALL_TIMED_OUT: int +ERROR_INVALID_CLUSTER_IPV6_ADDRESS: int +ERROR_CLUSTER_INTERNAL_INVALID_FUNCTION: int +ERROR_CLUSTER_PARAMETER_OUT_OF_BOUNDS: int +ERROR_CLUSTER_PARTIAL_SEND: int +ERROR_CLUSTER_REGISTRY_INVALID_FUNCTION: int +ERROR_CLUSTER_INVALID_STRING_TERMINATION: int +ERROR_CLUSTER_INVALID_STRING_FORMAT: int +ERROR_CLUSTER_DATABASE_TRANSACTION_IN_PROGRESS: int +ERROR_CLUSTER_DATABASE_TRANSACTION_NOT_IN_PROGRESS: int +ERROR_CLUSTER_NULL_DATA: int +ERROR_CLUSTER_PARTIAL_READ: int +ERROR_CLUSTER_PARTIAL_WRITE: int +ERROR_CLUSTER_CANT_DESERIALIZE_DATA: int +ERROR_DEPENDENT_RESOURCE_PROPERTY_CONFLICT: int +ERROR_CLUSTER_NO_QUORUM: int +ERROR_CLUSTER_INVALID_IPV6_NETWORK: int +ERROR_CLUSTER_INVALID_IPV6_TUNNEL_NETWORK: int +ERROR_QUORUM_NOT_ALLOWED_IN_THIS_GROUP: int +ERROR_DEPENDENCY_TREE_TOO_COMPLEX: int +ERROR_EXCEPTION_IN_RESOURCE_CALL: int +ERROR_CLUSTER_RHS_FAILED_INITIALIZATION: int +ERROR_CLUSTER_NOT_INSTALLED: int +ERROR_CLUSTER_RESOURCES_MUST_BE_ONLINE_ON_THE_SAME_NODE: int +ERROR_ENCRYPTION_FAILED: int +ERROR_DECRYPTION_FAILED: int +ERROR_FILE_ENCRYPTED: int +ERROR_NO_RECOVERY_POLICY: int +ERROR_NO_EFS: int +ERROR_WRONG_EFS: int +ERROR_NO_USER_KEYS: int +ERROR_FILE_NOT_ENCRYPTED: int +ERROR_NOT_EXPORT_FORMAT: int +ERROR_FILE_READ_ONLY: int +ERROR_DIR_EFS_DISALLOWED: int +ERROR_EFS_SERVER_NOT_TRUSTED: int +ERROR_BAD_RECOVERY_POLICY: int +ERROR_EFS_ALG_BLOB_TOO_BIG: int +ERROR_VOLUME_NOT_SUPPORT_EFS: int +ERROR_EFS_DISABLED: int +ERROR_EFS_VERSION_NOT_SUPPORT: int +ERROR_CS_ENCRYPTION_INVALID_SERVER_RESPONSE: int +ERROR_CS_ENCRYPTION_UNSUPPORTED_SERVER: int +ERROR_CS_ENCRYPTION_EXISTING_ENCRYPTED_FILE: int +ERROR_CS_ENCRYPTION_NEW_ENCRYPTED_FILE: int +ERROR_CS_ENCRYPTION_FILE_NOT_CSE: int +ERROR_NO_BROWSER_SERVERS_FOUND: int +ERROR_LOG_SECTOR_INVALID: int +ERROR_LOG_SECTOR_PARITY_INVALID: int +ERROR_LOG_SECTOR_REMAPPED: int +ERROR_LOG_BLOCK_INCOMPLETE: int +ERROR_LOG_INVALID_RANGE: int +ERROR_LOG_BLOCKS_EXHAUSTED: int +ERROR_LOG_READ_CONTEXT_INVALID: int +ERROR_LOG_RESTART_INVALID: int +ERROR_LOG_BLOCK_VERSION: int +ERROR_LOG_BLOCK_INVALID: int +ERROR_LOG_READ_MODE_INVALID: int +ERROR_LOG_NO_RESTART: int +ERROR_LOG_METADATA_CORRUPT: int +ERROR_LOG_METADATA_INVALID: int +ERROR_LOG_METADATA_INCONSISTENT: int +ERROR_LOG_RESERVATION_INVALID: int +ERROR_LOG_CANT_DELETE: int +ERROR_LOG_CONTAINER_LIMIT_EXCEEDED: int +ERROR_LOG_START_OF_LOG: int +ERROR_LOG_POLICY_ALREADY_INSTALLED: int +ERROR_LOG_POLICY_NOT_INSTALLED: int +ERROR_LOG_POLICY_INVALID: int +ERROR_LOG_POLICY_CONFLICT: int +ERROR_LOG_PINNED_ARCHIVE_TAIL: int +ERROR_LOG_RECORD_NONEXISTENT: int +ERROR_LOG_RECORDS_RESERVED_INVALID: int +ERROR_LOG_SPACE_RESERVED_INVALID: int +ERROR_LOG_TAIL_INVALID: int +ERROR_LOG_FULL: int +ERROR_COULD_NOT_RESIZE_LOG: int +ERROR_LOG_MULTIPLEXED: int +ERROR_LOG_DEDICATED: int +ERROR_LOG_ARCHIVE_NOT_IN_PROGRESS: int +ERROR_LOG_ARCHIVE_IN_PROGRESS: int +ERROR_LOG_EPHEMERAL: int +ERROR_LOG_NOT_ENOUGH_CONTAINERS: int +ERROR_LOG_CLIENT_ALREADY_REGISTERED: int +ERROR_LOG_CLIENT_NOT_REGISTERED: int +ERROR_LOG_FULL_HANDLER_IN_PROGRESS: int +ERROR_LOG_CONTAINER_READ_FAILED: int +ERROR_LOG_CONTAINER_WRITE_FAILED: int +ERROR_LOG_CONTAINER_OPEN_FAILED: int +ERROR_LOG_CONTAINER_STATE_INVALID: int +ERROR_LOG_STATE_INVALID: int +ERROR_LOG_PINNED: int +ERROR_LOG_METADATA_FLUSH_FAILED: int +ERROR_LOG_INCONSISTENT_SECURITY: int +ERROR_LOG_APPENDED_FLUSH_FAILED: int +ERROR_LOG_PINNED_RESERVATION: int +ERROR_INVALID_TRANSACTION: int +ERROR_TRANSACTION_NOT_ACTIVE: int +ERROR_TRANSACTION_REQUEST_NOT_VALID: int +ERROR_TRANSACTION_NOT_REQUESTED: int +ERROR_TRANSACTION_ALREADY_ABORTED: int +ERROR_TRANSACTION_ALREADY_COMMITTED: int +ERROR_TM_INITIALIZATION_FAILED: int +ERROR_RESOURCEMANAGER_READ_ONLY: int +ERROR_TRANSACTION_NOT_JOINED: int +ERROR_TRANSACTION_SUPERIOR_EXISTS: int +ERROR_CRM_PROTOCOL_ALREADY_EXISTS: int +ERROR_TRANSACTION_PROPAGATION_FAILED: int +ERROR_CRM_PROTOCOL_NOT_FOUND: int +ERROR_TRANSACTION_INVALID_MARSHALL_BUFFER: int +ERROR_CURRENT_TRANSACTION_NOT_VALID: int +ERROR_TRANSACTION_NOT_FOUND: int +ERROR_RESOURCEMANAGER_NOT_FOUND: int +ERROR_ENLISTMENT_NOT_FOUND: int +ERROR_TRANSACTIONMANAGER_NOT_FOUND: int +ERROR_TRANSACTIONMANAGER_NOT_ONLINE: int +ERROR_TRANSACTIONMANAGER_RECOVERY_NAME_COLLISION: int +ERROR_TRANSACTION_NOT_ROOT: int +ERROR_TRANSACTION_OBJECT_EXPIRED: int +ERROR_TRANSACTION_RESPONSE_NOT_ENLISTED: int +ERROR_TRANSACTION_RECORD_TOO_LONG: int +ERROR_IMPLICIT_TRANSACTION_NOT_SUPPORTED: int +ERROR_TRANSACTION_INTEGRITY_VIOLATED: int +ERROR_TRANSACTIONAL_CONFLICT: int +ERROR_RM_NOT_ACTIVE: int +ERROR_RM_METADATA_CORRUPT: int +ERROR_DIRECTORY_NOT_RM: int +ERROR_TRANSACTIONS_UNSUPPORTED_REMOTE: int +ERROR_LOG_RESIZE_INVALID_SIZE: int +ERROR_OBJECT_NO_LONGER_EXISTS: int +ERROR_STREAM_MINIVERSION_NOT_FOUND: int +ERROR_STREAM_MINIVERSION_NOT_VALID: int +ERROR_MINIVERSION_INACCESSIBLE_FROM_SPECIFIED_TRANSACTION: int +ERROR_CANT_OPEN_MINIVERSION_WITH_MODIFY_INTENT: int +ERROR_CANT_CREATE_MORE_STREAM_MINIVERSIONS: int +ERROR_REMOTE_FILE_VERSION_MISMATCH: int +ERROR_HANDLE_NO_LONGER_VALID: int +ERROR_NO_TXF_METADATA: int +ERROR_LOG_CORRUPTION_DETECTED: int +ERROR_CANT_RECOVER_WITH_HANDLE_OPEN: int +ERROR_RM_DISCONNECTED: int +ERROR_ENLISTMENT_NOT_SUPERIOR: int +ERROR_RECOVERY_NOT_NEEDED: int +ERROR_RM_ALREADY_STARTED: int +ERROR_FILE_IDENTITY_NOT_PERSISTENT: int +ERROR_CANT_BREAK_TRANSACTIONAL_DEPENDENCY: int +ERROR_CANT_CROSS_RM_BOUNDARY: int +ERROR_TXF_DIR_NOT_EMPTY: int +ERROR_INDOUBT_TRANSACTIONS_EXIST: int +ERROR_TM_VOLATILE: int +ERROR_ROLLBACK_TIMER_EXPIRED: int +ERROR_TXF_ATTRIBUTE_CORRUPT: int +ERROR_EFS_NOT_ALLOWED_IN_TRANSACTION: int +ERROR_TRANSACTIONAL_OPEN_NOT_ALLOWED: int +ERROR_LOG_GROWTH_FAILED: int +ERROR_TRANSACTED_MAPPING_UNSUPPORTED_REMOTE: int +ERROR_TXF_METADATA_ALREADY_PRESENT: int +ERROR_TRANSACTION_SCOPE_CALLBACKS_NOT_SET: int +ERROR_TRANSACTION_REQUIRED_PROMOTION: int +ERROR_CANNOT_EXECUTE_FILE_IN_TRANSACTION: int +ERROR_TRANSACTIONS_NOT_FROZEN: int +ERROR_TRANSACTION_FREEZE_IN_PROGRESS: int +ERROR_NOT_SNAPSHOT_VOLUME: int +ERROR_NO_SAVEPOINT_WITH_OPEN_FILES: int +ERROR_DATA_LOST_REPAIR: int +ERROR_SPARSE_NOT_ALLOWED_IN_TRANSACTION: int +ERROR_TM_IDENTITY_MISMATCH: int +ERROR_FLOATED_SECTION: int +ERROR_CANNOT_ACCEPT_TRANSACTED_WORK: int +ERROR_CANNOT_ABORT_TRANSACTIONS: int +ERROR_BAD_CLUSTERS: int +ERROR_COMPRESSION_NOT_ALLOWED_IN_TRANSACTION: int +ERROR_VOLUME_DIRTY: int +ERROR_NO_LINK_TRACKING_IN_TRANSACTION: int +ERROR_OPERATION_NOT_SUPPORTED_IN_TRANSACTION: int +ERROR_CTX_WINSTATION_NAME_INVALID: int +ERROR_CTX_INVALID_PD: int +ERROR_CTX_PD_NOT_FOUND: int +ERROR_CTX_WD_NOT_FOUND: int +ERROR_CTX_CANNOT_MAKE_EVENTLOG_ENTRY: int +ERROR_CTX_SERVICE_NAME_COLLISION: int +ERROR_CTX_CLOSE_PENDING: int +ERROR_CTX_NO_OUTBUF: int +ERROR_CTX_MODEM_INF_NOT_FOUND: int +ERROR_CTX_INVALID_MODEMNAME: int +ERROR_CTX_MODEM_RESPONSE_ERROR: int +ERROR_CTX_MODEM_RESPONSE_TIMEOUT: int +ERROR_CTX_MODEM_RESPONSE_NO_CARRIER: int +ERROR_CTX_MODEM_RESPONSE_NO_DIALTONE: int +ERROR_CTX_MODEM_RESPONSE_BUSY: int +ERROR_CTX_MODEM_RESPONSE_VOICE: int +ERROR_CTX_TD_ERROR: int +ERROR_CTX_WINSTATION_NOT_FOUND: int +ERROR_CTX_WINSTATION_ALREADY_EXISTS: int +ERROR_CTX_WINSTATION_BUSY: int +ERROR_CTX_BAD_VIDEO_MODE: int +ERROR_CTX_GRAPHICS_INVALID: int +ERROR_CTX_LOGON_DISABLED: int +ERROR_CTX_NOT_CONSOLE: int +ERROR_CTX_CLIENT_QUERY_TIMEOUT: int +ERROR_CTX_CONSOLE_DISCONNECT: int +ERROR_CTX_CONSOLE_CONNECT: int +ERROR_CTX_SHADOW_DENIED: int +ERROR_CTX_WINSTATION_ACCESS_DENIED: int +ERROR_CTX_INVALID_WD: int +ERROR_CTX_SHADOW_INVALID: int +ERROR_CTX_SHADOW_DISABLED: int +ERROR_CTX_CLIENT_LICENSE_IN_USE: int +ERROR_CTX_CLIENT_LICENSE_NOT_SET: int +ERROR_CTX_LICENSE_NOT_AVAILABLE: int +ERROR_CTX_LICENSE_CLIENT_INVALID: int +ERROR_CTX_LICENSE_EXPIRED: int +ERROR_CTX_SHADOW_NOT_RUNNING: int +ERROR_CTX_SHADOW_ENDED_BY_MODE_CHANGE: int +ERROR_ACTIVATION_COUNT_EXCEEDED: int +ERROR_CTX_WINSTATIONS_DISABLED: int +ERROR_CTX_ENCRYPTION_LEVEL_REQUIRED: int +ERROR_CTX_SESSION_IN_USE: int +ERROR_CTX_NO_FORCE_LOGOFF: int +ERROR_CTX_ACCOUNT_RESTRICTION: int +ERROR_RDP_PROTOCOL_ERROR: int +ERROR_CTX_CDM_CONNECT: int +ERROR_CTX_CDM_DISCONNECT: int +ERROR_CTX_SECURITY_LAYER_ERROR: int +ERROR_TS_INCOMPATIBLE_SESSIONS: int +FRS_ERR_INVALID_API_SEQUENCE: int +FRS_ERR_STARTING_SERVICE: int +FRS_ERR_STOPPING_SERVICE: int +FRS_ERR_INTERNAL_API: int +FRS_ERR_INTERNAL: int +FRS_ERR_SERVICE_COMM: int +FRS_ERR_INSUFFICIENT_PRIV: int +FRS_ERR_AUTHENTICATION: int +FRS_ERR_PARENT_INSUFFICIENT_PRIV: int +FRS_ERR_PARENT_AUTHENTICATION: int +FRS_ERR_CHILD_TO_PARENT_COMM: int +FRS_ERR_PARENT_TO_CHILD_COMM: int +FRS_ERR_SYSVOL_POPULATE: int +FRS_ERR_SYSVOL_POPULATE_TIMEOUT: int +FRS_ERR_SYSVOL_IS_BUSY: int +FRS_ERR_SYSVOL_DEMOTE: int +FRS_ERR_INVALID_SERVICE_PARAMETER: int +DS_S_SUCCESS: int +ERROR_DS_MEMBERSHIP_EVALUATED_LOCALLY: int +ERROR_DS_NO_ATTRIBUTE_OR_VALUE: int +ERROR_DS_INVALID_ATTRIBUTE_SYNTAX: int +ERROR_DS_ATTRIBUTE_TYPE_UNDEFINED: int +ERROR_DS_ATTRIBUTE_OR_VALUE_EXISTS: int +ERROR_DS_BUSY: int +ERROR_DS_UNAVAILABLE: int +ERROR_DS_NO_RIDS_ALLOCATED: int +ERROR_DS_NO_MORE_RIDS: int +ERROR_DS_INCORRECT_ROLE_OWNER: int +ERROR_DS_RIDMGR_INIT_ERROR: int +ERROR_DS_OBJ_CLASS_VIOLATION: int +ERROR_DS_CANT_ON_NON_LEAF: int +ERROR_DS_CANT_ON_RDN: int +ERROR_DS_CANT_MOD_OBJ_CLASS: int +ERROR_DS_CROSS_DOM_MOVE_ERROR: int +ERROR_DS_GC_NOT_AVAILABLE: int +ERROR_SHARED_POLICY: int +ERROR_POLICY_OBJECT_NOT_FOUND: int +ERROR_POLICY_ONLY_IN_DS: int +ERROR_PROMOTION_ACTIVE: int +ERROR_NO_PROMOTION_ACTIVE: int +ERROR_DS_OPERATIONS_ERROR: int +ERROR_DS_PROTOCOL_ERROR: int +ERROR_DS_TIMELIMIT_EXCEEDED: int +ERROR_DS_SIZELIMIT_EXCEEDED: int +ERROR_DS_ADMIN_LIMIT_EXCEEDED: int +ERROR_DS_COMPARE_FALSE: int +ERROR_DS_COMPARE_TRUE: int +ERROR_DS_AUTH_METHOD_NOT_SUPPORTED: int +ERROR_DS_STRONG_AUTH_REQUIRED: int +ERROR_DS_INAPPROPRIATE_AUTH: int +ERROR_DS_AUTH_UNKNOWN: int +ERROR_DS_REFERRAL: int +ERROR_DS_UNAVAILABLE_CRIT_EXTENSION: int +ERROR_DS_CONFIDENTIALITY_REQUIRED: int +ERROR_DS_INAPPROPRIATE_MATCHING: int +ERROR_DS_CONSTRAINT_VIOLATION: int +ERROR_DS_NO_SUCH_OBJECT: int +ERROR_DS_ALIAS_PROBLEM: int +ERROR_DS_INVALID_DN_SYNTAX: int +ERROR_DS_IS_LEAF: int +ERROR_DS_ALIAS_DEREF_PROBLEM: int +ERROR_DS_UNWILLING_TO_PERFORM: int +ERROR_DS_LOOP_DETECT: int +ERROR_DS_NAMING_VIOLATION: int +ERROR_DS_OBJECT_RESULTS_TOO_LARGE: int +ERROR_DS_AFFECTS_MULTIPLE_DSAS: int +ERROR_DS_SERVER_DOWN: int +ERROR_DS_LOCAL_ERROR: int +ERROR_DS_ENCODING_ERROR: int +ERROR_DS_DECODING_ERROR: int +ERROR_DS_FILTER_UNKNOWN: int +ERROR_DS_PARAM_ERROR: int +ERROR_DS_NOT_SUPPORTED: int +ERROR_DS_NO_RESULTS_RETURNED: int +ERROR_DS_CONTROL_NOT_FOUND: int +ERROR_DS_CLIENT_LOOP: int +ERROR_DS_REFERRAL_LIMIT_EXCEEDED: int +ERROR_DS_SORT_CONTROL_MISSING: int +ERROR_DS_OFFSET_RANGE_ERROR: int +ERROR_DS_ROOT_MUST_BE_NC: int +ERROR_DS_ADD_REPLICA_INHIBITED: int +ERROR_DS_ATT_NOT_DEF_IN_SCHEMA: int +ERROR_DS_MAX_OBJ_SIZE_EXCEEDED: int +ERROR_DS_OBJ_STRING_NAME_EXISTS: int +ERROR_DS_NO_RDN_DEFINED_IN_SCHEMA: int +ERROR_DS_RDN_DOESNT_MATCH_SCHEMA: int +ERROR_DS_NO_REQUESTED_ATTS_FOUND: int +ERROR_DS_USER_BUFFER_TO_SMALL: int +ERROR_DS_ATT_IS_NOT_ON_OBJ: int +ERROR_DS_ILLEGAL_MOD_OPERATION: int +ERROR_DS_OBJ_TOO_LARGE: int +ERROR_DS_BAD_INSTANCE_TYPE: int +ERROR_DS_MASTERDSA_REQUIRED: int +ERROR_DS_OBJECT_CLASS_REQUIRED: int +ERROR_DS_MISSING_REQUIRED_ATT: int +ERROR_DS_ATT_NOT_DEF_FOR_CLASS: int +ERROR_DS_ATT_ALREADY_EXISTS: int +ERROR_DS_CANT_ADD_ATT_VALUES: int +ERROR_DS_SINGLE_VALUE_CONSTRAINT: int +ERROR_DS_RANGE_CONSTRAINT: int +ERROR_DS_ATT_VAL_ALREADY_EXISTS: int +ERROR_DS_CANT_REM_MISSING_ATT: int +ERROR_DS_CANT_REM_MISSING_ATT_VAL: int +ERROR_DS_ROOT_CANT_BE_SUBREF: int +ERROR_DS_NO_CHAINING: int +ERROR_DS_NO_CHAINED_EVAL: int +ERROR_DS_NO_PARENT_OBJECT: int +ERROR_DS_PARENT_IS_AN_ALIAS: int +ERROR_DS_CANT_MIX_MASTER_AND_REPS: int +ERROR_DS_CHILDREN_EXIST: int +ERROR_DS_OBJ_NOT_FOUND: int +ERROR_DS_ALIASED_OBJ_MISSING: int +ERROR_DS_BAD_NAME_SYNTAX: int +ERROR_DS_ALIAS_POINTS_TO_ALIAS: int +ERROR_DS_CANT_DEREF_ALIAS: int +ERROR_DS_OUT_OF_SCOPE: int +ERROR_DS_OBJECT_BEING_REMOVED: int +ERROR_DS_CANT_DELETE_DSA_OBJ: int +ERROR_DS_GENERIC_ERROR: int +ERROR_DS_DSA_MUST_BE_INT_MASTER: int +ERROR_DS_CLASS_NOT_DSA: int +ERROR_DS_INSUFF_ACCESS_RIGHTS: int +ERROR_DS_ILLEGAL_SUPERIOR: int +ERROR_DS_ATTRIBUTE_OWNED_BY_SAM: int +ERROR_DS_NAME_TOO_MANY_PARTS: int +ERROR_DS_NAME_TOO_LONG: int +ERROR_DS_NAME_VALUE_TOO_LONG: int +ERROR_DS_NAME_UNPARSEABLE: int +ERROR_DS_NAME_TYPE_UNKNOWN: int +ERROR_DS_NOT_AN_OBJECT: int +ERROR_DS_SEC_DESC_TOO_SHORT: int +ERROR_DS_SEC_DESC_INVALID: int +ERROR_DS_NO_DELETED_NAME: int +ERROR_DS_SUBREF_MUST_HAVE_PARENT: int +ERROR_DS_NCNAME_MUST_BE_NC: int +ERROR_DS_CANT_ADD_SYSTEM_ONLY: int +ERROR_DS_CLASS_MUST_BE_CONCRETE: int +ERROR_DS_INVALID_DMD: int +ERROR_DS_OBJ_GUID_EXISTS: int +ERROR_DS_NOT_ON_BACKLINK: int +ERROR_DS_NO_CROSSREF_FOR_NC: int +ERROR_DS_SHUTTING_DOWN: int +ERROR_DS_UNKNOWN_OPERATION: int +ERROR_DS_INVALID_ROLE_OWNER: int +ERROR_DS_COULDNT_CONTACT_FSMO: int +ERROR_DS_CROSS_NC_DN_RENAME: int +ERROR_DS_CANT_MOD_SYSTEM_ONLY: int +ERROR_DS_REPLICATOR_ONLY: int +ERROR_DS_OBJ_CLASS_NOT_DEFINED: int +ERROR_DS_OBJ_CLASS_NOT_SUBCLASS: int +ERROR_DS_NAME_REFERENCE_INVALID: int +ERROR_DS_CROSS_REF_EXISTS: int +ERROR_DS_CANT_DEL_MASTER_CROSSREF: int +ERROR_DS_SUBTREE_NOTIFY_NOT_NC_HEAD: int +ERROR_DS_NOTIFY_FILTER_TOO_COMPLEX: int +ERROR_DS_DUP_RDN: int +ERROR_DS_DUP_OID: int +ERROR_DS_DUP_MAPI_ID: int +ERROR_DS_DUP_SCHEMA_ID_GUID: int +ERROR_DS_DUP_LDAP_DISPLAY_NAME: int +ERROR_DS_SEMANTIC_ATT_TEST: int +ERROR_DS_SYNTAX_MISMATCH: int +ERROR_DS_EXISTS_IN_MUST_HAVE: int +ERROR_DS_EXISTS_IN_MAY_HAVE: int +ERROR_DS_NONEXISTENT_MAY_HAVE: int +ERROR_DS_NONEXISTENT_MUST_HAVE: int +ERROR_DS_AUX_CLS_TEST_FAIL: int +ERROR_DS_NONEXISTENT_POSS_SUP: int +ERROR_DS_SUB_CLS_TEST_FAIL: int +ERROR_DS_BAD_RDN_ATT_ID_SYNTAX: int +ERROR_DS_EXISTS_IN_AUX_CLS: int +ERROR_DS_EXISTS_IN_SUB_CLS: int +ERROR_DS_EXISTS_IN_POSS_SUP: int +ERROR_DS_RECALCSCHEMA_FAILED: int +ERROR_DS_TREE_DELETE_NOT_FINISHED: int +ERROR_DS_CANT_DELETE: int +ERROR_DS_ATT_SCHEMA_REQ_ID: int +ERROR_DS_BAD_ATT_SCHEMA_SYNTAX: int +ERROR_DS_CANT_CACHE_ATT: int +ERROR_DS_CANT_CACHE_CLASS: int +ERROR_DS_CANT_REMOVE_ATT_CACHE: int +ERROR_DS_CANT_REMOVE_CLASS_CACHE: int +ERROR_DS_CANT_RETRIEVE_DN: int +ERROR_DS_MISSING_SUPREF: int +ERROR_DS_CANT_RETRIEVE_INSTANCE: int +ERROR_DS_CODE_INCONSISTENCY: int +ERROR_DS_DATABASE_ERROR: int +ERROR_DS_GOVERNSID_MISSING: int +ERROR_DS_MISSING_EXPECTED_ATT: int +ERROR_DS_NCNAME_MISSING_CR_REF: int +ERROR_DS_SECURITY_CHECKING_ERROR: int +ERROR_DS_SCHEMA_NOT_LOADED: int +ERROR_DS_SCHEMA_ALLOC_FAILED: int +ERROR_DS_ATT_SCHEMA_REQ_SYNTAX: int +ERROR_DS_GCVERIFY_ERROR: int +ERROR_DS_DRA_SCHEMA_MISMATCH: int +ERROR_DS_CANT_FIND_DSA_OBJ: int +ERROR_DS_CANT_FIND_EXPECTED_NC: int +ERROR_DS_CANT_FIND_NC_IN_CACHE: int +ERROR_DS_CANT_RETRIEVE_CHILD: int +ERROR_DS_SECURITY_ILLEGAL_MODIFY: int +ERROR_DS_CANT_REPLACE_HIDDEN_REC: int +ERROR_DS_BAD_HIERARCHY_FILE: int +ERROR_DS_BUILD_HIERARCHY_TABLE_FAILED: int +ERROR_DS_CONFIG_PARAM_MISSING: int +ERROR_DS_COUNTING_AB_INDICES_FAILED: int +ERROR_DS_HIERARCHY_TABLE_MALLOC_FAILED: int +ERROR_DS_INTERNAL_FAILURE: int +ERROR_DS_UNKNOWN_ERROR: int +ERROR_DS_ROOT_REQUIRES_CLASS_TOP: int +ERROR_DS_REFUSING_FSMO_ROLES: int +ERROR_DS_MISSING_FSMO_SETTINGS: int +ERROR_DS_UNABLE_TO_SURRENDER_ROLES: int +ERROR_DS_DRA_GENERIC: int +ERROR_DS_DRA_INVALID_PARAMETER: int +ERROR_DS_DRA_BUSY: int +ERROR_DS_DRA_BAD_DN: int +ERROR_DS_DRA_BAD_NC: int +ERROR_DS_DRA_DN_EXISTS: int +ERROR_DS_DRA_INTERNAL_ERROR: int +ERROR_DS_DRA_INCONSISTENT_DIT: int +ERROR_DS_DRA_CONNECTION_FAILED: int +ERROR_DS_DRA_BAD_INSTANCE_TYPE: int +ERROR_DS_DRA_OUT_OF_MEM: int +ERROR_DS_DRA_MAIL_PROBLEM: int +ERROR_DS_DRA_REF_ALREADY_EXISTS: int +ERROR_DS_DRA_REF_NOT_FOUND: int +ERROR_DS_DRA_OBJ_IS_REP_SOURCE: int +ERROR_DS_DRA_DB_ERROR: int +ERROR_DS_DRA_NO_REPLICA: int +ERROR_DS_DRA_ACCESS_DENIED: int +ERROR_DS_DRA_NOT_SUPPORTED: int +ERROR_DS_DRA_RPC_CANCELLED: int +ERROR_DS_DRA_SOURCE_DISABLED: int +ERROR_DS_DRA_SINK_DISABLED: int +ERROR_DS_DRA_NAME_COLLISION: int +ERROR_DS_DRA_SOURCE_REINSTALLED: int +ERROR_DS_DRA_MISSING_PARENT: int +ERROR_DS_DRA_PREEMPTED: int +ERROR_DS_DRA_ABANDON_SYNC: int +ERROR_DS_DRA_SHUTDOWN: int +ERROR_DS_DRA_INCOMPATIBLE_PARTIAL_SET: int +ERROR_DS_DRA_SOURCE_IS_PARTIAL_REPLICA: int +ERROR_DS_DRA_EXTN_CONNECTION_FAILED: int +ERROR_DS_INSTALL_SCHEMA_MISMATCH: int +ERROR_DS_DUP_LINK_ID: int +ERROR_DS_NAME_ERROR_RESOLVING: int +ERROR_DS_NAME_ERROR_NOT_FOUND: int +ERROR_DS_NAME_ERROR_NOT_UNIQUE: int +ERROR_DS_NAME_ERROR_NO_MAPPING: int +ERROR_DS_NAME_ERROR_DOMAIN_ONLY: int +ERROR_DS_NAME_ERROR_NO_SYNTACTICAL_MAPPING: int +ERROR_DS_CONSTRUCTED_ATT_MOD: int +ERROR_DS_WRONG_OM_OBJ_CLASS: int +ERROR_DS_DRA_REPL_PENDING: int +ERROR_DS_DS_REQUIRED: int +ERROR_DS_INVALID_LDAP_DISPLAY_NAME: int +ERROR_DS_NON_BASE_SEARCH: int +ERROR_DS_CANT_RETRIEVE_ATTS: int +ERROR_DS_BACKLINK_WITHOUT_LINK: int +ERROR_DS_EPOCH_MISMATCH: int +ERROR_DS_SRC_NAME_MISMATCH: int +ERROR_DS_SRC_AND_DST_NC_IDENTICAL: int +ERROR_DS_DST_NC_MISMATCH: int +ERROR_DS_NOT_AUTHORITIVE_FOR_DST_NC: int +ERROR_DS_SRC_GUID_MISMATCH: int +ERROR_DS_CANT_MOVE_DELETED_OBJECT: int +ERROR_DS_PDC_OPERATION_IN_PROGRESS: int +ERROR_DS_CROSS_DOMAIN_CLEANUP_REQD: int +ERROR_DS_ILLEGAL_XDOM_MOVE_OPERATION: int +ERROR_DS_CANT_WITH_ACCT_GROUP_MEMBERSHPS: int +ERROR_DS_NC_MUST_HAVE_NC_PARENT: int +ERROR_DS_CR_IMPOSSIBLE_TO_VALIDATE: int +ERROR_DS_DST_DOMAIN_NOT_NATIVE: int +ERROR_DS_MISSING_INFRASTRUCTURE_CONTAINER: int +ERROR_DS_CANT_MOVE_ACCOUNT_GROUP: int +ERROR_DS_CANT_MOVE_RESOURCE_GROUP: int +ERROR_DS_INVALID_SEARCH_FLAG: int +ERROR_DS_NO_TREE_DELETE_ABOVE_NC: int +ERROR_DS_COULDNT_LOCK_TREE_FOR_DELETE: int +ERROR_DS_COULDNT_IDENTIFY_OBJECTS_FOR_TREE_DELETE: int +ERROR_DS_SAM_INIT_FAILURE: int +ERROR_DS_SENSITIVE_GROUP_VIOLATION: int +ERROR_DS_CANT_MOD_PRIMARYGROUPID: int +ERROR_DS_ILLEGAL_BASE_SCHEMA_MOD: int +ERROR_DS_NONSAFE_SCHEMA_CHANGE: int +ERROR_DS_SCHEMA_UPDATE_DISALLOWED: int +ERROR_DS_CANT_CREATE_UNDER_SCHEMA: int +ERROR_DS_INSTALL_NO_SRC_SCH_VERSION: int +ERROR_DS_INSTALL_NO_SCH_VERSION_IN_INIFILE: int +ERROR_DS_INVALID_GROUP_TYPE: int +ERROR_DS_NO_NEST_GLOBALGROUP_IN_MIXEDDOMAIN: int +ERROR_DS_NO_NEST_LOCALGROUP_IN_MIXEDDOMAIN: int +ERROR_DS_GLOBAL_CANT_HAVE_LOCAL_MEMBER: int +ERROR_DS_GLOBAL_CANT_HAVE_UNIVERSAL_MEMBER: int +ERROR_DS_UNIVERSAL_CANT_HAVE_LOCAL_MEMBER: int +ERROR_DS_GLOBAL_CANT_HAVE_CROSSDOMAIN_MEMBER: int +ERROR_DS_LOCAL_CANT_HAVE_CROSSDOMAIN_LOCAL_MEMBER: int +ERROR_DS_HAVE_PRIMARY_MEMBERS: int +ERROR_DS_STRING_SD_CONVERSION_FAILED: int +ERROR_DS_NAMING_MASTER_GC: int +ERROR_DS_DNS_LOOKUP_FAILURE: int +ERROR_DS_COULDNT_UPDATE_SPNS: int +ERROR_DS_CANT_RETRIEVE_SD: int +ERROR_DS_KEY_NOT_UNIQUE: int +ERROR_DS_WRONG_LINKED_ATT_SYNTAX: int +ERROR_DS_SAM_NEED_BOOTKEY_PASSWORD: int +ERROR_DS_SAM_NEED_BOOTKEY_FLOPPY: int +ERROR_DS_CANT_START: int +ERROR_DS_INIT_FAILURE: int +ERROR_DS_NO_PKT_PRIVACY_ON_CONNECTION: int +ERROR_DS_SOURCE_DOMAIN_IN_FOREST: int +ERROR_DS_DESTINATION_DOMAIN_NOT_IN_FOREST: int +ERROR_DS_DESTINATION_AUDITING_NOT_ENABLED: int +ERROR_DS_CANT_FIND_DC_FOR_SRC_DOMAIN: int +ERROR_DS_SRC_OBJ_NOT_GROUP_OR_USER: int +ERROR_DS_SRC_SID_EXISTS_IN_FOREST: int +ERROR_DS_SRC_AND_DST_OBJECT_CLASS_MISMATCH: int +ERROR_SAM_INIT_FAILURE: int +ERROR_DS_DRA_SCHEMA_INFO_SHIP: int +ERROR_DS_DRA_SCHEMA_CONFLICT: int +ERROR_DS_DRA_EARLIER_SCHEMA_CONFLICT: int +ERROR_DS_DRA_OBJ_NC_MISMATCH: int +ERROR_DS_NC_STILL_HAS_DSAS: int +ERROR_DS_GC_REQUIRED: int +ERROR_DS_LOCAL_MEMBER_OF_LOCAL_ONLY: int +ERROR_DS_NO_FPO_IN_UNIVERSAL_GROUPS: int +ERROR_DS_CANT_ADD_TO_GC: int +ERROR_DS_NO_CHECKPOINT_WITH_PDC: int +ERROR_DS_SOURCE_AUDITING_NOT_ENABLED: int +ERROR_DS_CANT_CREATE_IN_NONDOMAIN_NC: int +ERROR_DS_INVALID_NAME_FOR_SPN: int +ERROR_DS_FILTER_USES_CONTRUCTED_ATTRS: int +ERROR_DS_UNICODEPWD_NOT_IN_QUOTES: int +ERROR_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED: int +ERROR_DS_MUST_BE_RUN_ON_DST_DC: int +ERROR_DS_SRC_DC_MUST_BE_SP4_OR_GREATER: int +ERROR_DS_CANT_TREE_DELETE_CRITICAL_OBJ: int +ERROR_DS_INIT_FAILURE_CONSOLE: int +ERROR_DS_SAM_INIT_FAILURE_CONSOLE: int +ERROR_DS_FOREST_VERSION_TOO_HIGH: int +ERROR_DS_DOMAIN_VERSION_TOO_HIGH: int +ERROR_DS_FOREST_VERSION_TOO_LOW: int +ERROR_DS_DOMAIN_VERSION_TOO_LOW: int +ERROR_DS_INCOMPATIBLE_VERSION: int +ERROR_DS_LOW_DSA_VERSION: int +ERROR_DS_NO_BEHAVIOR_VERSION_IN_MIXEDDOMAIN: int +ERROR_DS_NOT_SUPPORTED_SORT_ORDER: int +ERROR_DS_NAME_NOT_UNIQUE: int +ERROR_DS_MACHINE_ACCOUNT_CREATED_PRENT4: int +ERROR_DS_OUT_OF_VERSION_STORE: int +ERROR_DS_INCOMPATIBLE_CONTROLS_USED: int +ERROR_DS_NO_REF_DOMAIN: int +ERROR_DS_RESERVED_LINK_ID: int +ERROR_DS_LINK_ID_NOT_AVAILABLE: int +ERROR_DS_AG_CANT_HAVE_UNIVERSAL_MEMBER: int +ERROR_DS_MODIFYDN_DISALLOWED_BY_INSTANCE_TYPE: int +ERROR_DS_NO_OBJECT_MOVE_IN_SCHEMA_NC: int +ERROR_DS_MODIFYDN_DISALLOWED_BY_FLAG: int +ERROR_DS_MODIFYDN_WRONG_GRANDPARENT: int +ERROR_DS_NAME_ERROR_TRUST_REFERRAL: int +ERROR_NOT_SUPPORTED_ON_STANDARD_SERVER: int +ERROR_DS_CANT_ACCESS_REMOTE_PART_OF_AD: int +ERROR_DS_CR_IMPOSSIBLE_TO_VALIDATE_V2: int +ERROR_DS_THREAD_LIMIT_EXCEEDED: int +ERROR_DS_NOT_CLOSEST: int +ERROR_DS_CANT_DERIVE_SPN_WITHOUT_SERVER_REF: int +ERROR_DS_SINGLE_USER_MODE_FAILED: int +ERROR_DS_NTDSCRIPT_SYNTAX_ERROR: int +ERROR_DS_NTDSCRIPT_PROCESS_ERROR: int +ERROR_DS_DIFFERENT_REPL_EPOCHS: int +ERROR_DS_DRS_EXTENSIONS_CHANGED: int +ERROR_DS_REPLICA_SET_CHANGE_NOT_ALLOWED_ON_DISABLED_CR: int +ERROR_DS_NO_MSDS_INTID: int +ERROR_DS_DUP_MSDS_INTID: int +ERROR_DS_EXISTS_IN_RDNATTID: int +ERROR_DS_AUTHORIZATION_FAILED: int +ERROR_DS_INVALID_SCRIPT: int +ERROR_DS_REMOTE_CROSSREF_OP_FAILED: int +ERROR_DS_CROSS_REF_BUSY: int +ERROR_DS_CANT_DERIVE_SPN_FOR_DELETED_DOMAIN: int +ERROR_DS_CANT_DEMOTE_WITH_WRITEABLE_NC: int +ERROR_DS_DUPLICATE_ID_FOUND: int +ERROR_DS_INSUFFICIENT_ATTR_TO_CREATE_OBJECT: int +ERROR_DS_GROUP_CONVERSION_ERROR: int +ERROR_DS_CANT_MOVE_APP_BASIC_GROUP: int +ERROR_DS_CANT_MOVE_APP_QUERY_GROUP: int +ERROR_DS_ROLE_NOT_VERIFIED: int +ERROR_DS_WKO_CONTAINER_CANNOT_BE_SPECIAL: int +ERROR_DS_DOMAIN_RENAME_IN_PROGRESS: int +ERROR_DS_EXISTING_AD_CHILD_NC: int +ERROR_DS_REPL_LIFETIME_EXCEEDED: int +ERROR_DS_DISALLOWED_IN_SYSTEM_CONTAINER: int +ERROR_DS_LDAP_SEND_QUEUE_FULL: int +ERROR_DS_DRA_OUT_SCHEDULE_WINDOW: int +ERROR_DS_POLICY_NOT_KNOWN: int +ERROR_NO_SITE_SETTINGS_OBJECT: int +ERROR_NO_SECRETS: int +ERROR_NO_WRITABLE_DC_FOUND: int +ERROR_DS_NO_SERVER_OBJECT: int +ERROR_DS_NO_NTDSA_OBJECT: int +ERROR_DS_NON_ASQ_SEARCH: int +ERROR_DS_AUDIT_FAILURE: int +ERROR_DS_INVALID_SEARCH_FLAG_SUBTREE: int +ERROR_DS_INVALID_SEARCH_FLAG_TUPLE: int +ERROR_DS_HIERARCHY_TABLE_TOO_DEEP: int +SEVERITY_SUCCESS: int +SEVERITY_ERROR: int + +def HRESULT_FROM_WIN32(scode): ... +def SUCCEEDED(Status): ... +def FAILED(Status): ... +def HRESULT_CODE(hr: int) -> int: ... +def SCODE_CODE(sc): ... +def HRESULT_FACILITY(hr): ... +def SCODE_FACILITY(sc): ... +def HRESULT_SEVERITY(hr): ... +def SCODE_SEVERITY(sc): ... + +FACILITY_NT_BIT: int + +def HRESULT_FROM_NT(x): ... +def GetScode(hr): ... +def ResultFromScode(sc): ... + +NOERROR: int +E_UNEXPECTED: int +E_NOTIMPL: int +E_OUTOFMEMORY: int +E_INVALIDARG: int +E_NOINTERFACE: int +E_POINTER: int +E_HANDLE: int +E_ABORT: int +E_FAIL: int +E_ACCESSDENIED: int +win16_E_NOTIMPL: int +win16_E_OUTOFMEMORY: int +win16_E_INVALIDARG: int +win16_E_NOINTERFACE: int +win16_E_POINTER: int +win16_E_HANDLE: int +win16_E_ABORT: int +win16_E_FAIL: int +win16_E_ACCESSDENIED: int +E_PENDING: int +CO_E_INIT_TLS: int +CO_E_INIT_SHARED_ALLOCATOR: int +CO_E_INIT_MEMORY_ALLOCATOR: int +CO_E_INIT_CLASS_CACHE: int +CO_E_INIT_RPC_CHANNEL: int +CO_E_INIT_TLS_SET_CHANNEL_CONTROL: int +CO_E_INIT_TLS_CHANNEL_CONTROL: int +CO_E_INIT_UNACCEPTED_USER_ALLOCATOR: int +CO_E_INIT_SCM_MUTEX_EXISTS: int +CO_E_INIT_SCM_FILE_MAPPING_EXISTS: int +CO_E_INIT_SCM_MAP_VIEW_OF_FILE: int +CO_E_INIT_SCM_EXEC_FAILURE: int +CO_E_INIT_ONLY_SINGLE_THREADED: int +CO_E_CANT_REMOTE: int +CO_E_BAD_SERVER_NAME: int +CO_E_WRONG_SERVER_IDENTITY: int +CO_E_OLE1DDE_DISABLED: int +CO_E_RUNAS_SYNTAX: int +CO_E_CREATEPROCESS_FAILURE: int +CO_E_RUNAS_CREATEPROCESS_FAILURE: int +CO_E_RUNAS_LOGON_FAILURE: int +CO_E_LAUNCH_PERMSSION_DENIED: int +CO_E_START_SERVICE_FAILURE: int +CO_E_REMOTE_COMMUNICATION_FAILURE: int +CO_E_SERVER_START_TIMEOUT: int +CO_E_CLSREG_INCONSISTENT: int +CO_E_IIDREG_INCONSISTENT: int +CO_E_NOT_SUPPORTED: int +CO_E_RELOAD_DLL: int +CO_E_MSI_ERROR: int +OLE_E_FIRST: int +OLE_E_LAST: int +OLE_S_FIRST: int +OLE_S_LAST: int +OLE_E_OLEVERB: int +OLE_E_ADVF: int +OLE_E_ENUM_NOMORE: int +OLE_E_ADVISENOTSUPPORTED: int +OLE_E_NOCONNECTION: int +OLE_E_NOTRUNNING: int +OLE_E_NOCACHE: int +OLE_E_BLANK: int +OLE_E_CLASSDIFF: int +OLE_E_CANT_GETMONIKER: int +OLE_E_CANT_BINDTOSOURCE: int +OLE_E_STATIC: int +OLE_E_PROMPTSAVECANCELLED: int +OLE_E_INVALIDRECT: int +OLE_E_WRONGCOMPOBJ: int +OLE_E_INVALIDHWND: int +OLE_E_NOT_INPLACEACTIVE: int +OLE_E_CANTCONVERT: int +OLE_E_NOSTORAGE: int +DV_E_FORMATETC: int +DV_E_DVTARGETDEVICE: int +DV_E_STGMEDIUM: int +DV_E_STATDATA: int +DV_E_LINDEX: int +DV_E_TYMED: int +DV_E_CLIPFORMAT: int +DV_E_DVASPECT: int +DV_E_DVTARGETDEVICE_SIZE: int +DV_E_NOIVIEWOBJECT: int +DRAGDROP_E_FIRST: int +DRAGDROP_E_LAST: int +DRAGDROP_S_FIRST: int +DRAGDROP_S_LAST: int +DRAGDROP_E_NOTREGISTERED: int +DRAGDROP_E_ALREADYREGISTERED: int +DRAGDROP_E_INVALIDHWND: int +CLASSFACTORY_E_FIRST: int +CLASSFACTORY_E_LAST: int +CLASSFACTORY_S_FIRST: int +CLASSFACTORY_S_LAST: int +CLASS_E_NOAGGREGATION: int +CLASS_E_CLASSNOTAVAILABLE: int +CLASS_E_NOTLICENSED: int +MARSHAL_E_FIRST: int +MARSHAL_E_LAST: int +MARSHAL_S_FIRST: int +MARSHAL_S_LAST: int +DATA_E_FIRST: int +DATA_E_LAST: int +DATA_S_FIRST: int +DATA_S_LAST: int +VIEW_E_FIRST: int +VIEW_E_LAST: int +VIEW_S_FIRST: int +VIEW_S_LAST: int +VIEW_E_DRAW: int +REGDB_E_FIRST: int +REGDB_E_LAST: int +REGDB_S_FIRST: int +REGDB_S_LAST: int +REGDB_E_READREGDB: int +REGDB_E_WRITEREGDB: int +REGDB_E_KEYMISSING: int +REGDB_E_INVALIDVALUE: int +REGDB_E_CLASSNOTREG: int +REGDB_E_IIDNOTREG: int +CAT_E_FIRST: int +CAT_E_LAST: int +CAT_E_CATIDNOEXIST: int +CAT_E_NODESCRIPTION: int +CS_E_FIRST: int +CS_E_LAST: int +CS_E_PACKAGE_NOTFOUND: int +CS_E_NOT_DELETABLE: int +CS_E_CLASS_NOTFOUND: int +CS_E_INVALID_VERSION: int +CS_E_NO_CLASSSTORE: int +CACHE_E_FIRST: int +CACHE_E_LAST: int +CACHE_S_FIRST: int +CACHE_S_LAST: int +CACHE_E_NOCACHE_UPDATED: int +OLEOBJ_E_FIRST: int +OLEOBJ_E_LAST: int +OLEOBJ_S_FIRST: int +OLEOBJ_S_LAST: int +OLEOBJ_E_NOVERBS: int +OLEOBJ_E_INVALIDVERB: int +CLIENTSITE_E_FIRST: int +CLIENTSITE_E_LAST: int +CLIENTSITE_S_FIRST: int +CLIENTSITE_S_LAST: int +INPLACE_E_NOTUNDOABLE: int +INPLACE_E_NOTOOLSPACE: int +INPLACE_E_FIRST: int +INPLACE_E_LAST: int +INPLACE_S_FIRST: int +INPLACE_S_LAST: int +ENUM_E_FIRST: int +ENUM_E_LAST: int +ENUM_S_FIRST: int +ENUM_S_LAST: int +CONVERT10_E_FIRST: int +CONVERT10_E_LAST: int +CONVERT10_S_FIRST: int +CONVERT10_S_LAST: int +CONVERT10_E_OLESTREAM_GET: int +CONVERT10_E_OLESTREAM_PUT: int +CONVERT10_E_OLESTREAM_FMT: int +CONVERT10_E_OLESTREAM_BITMAP_TO_DIB: int +CONVERT10_E_STG_FMT: int +CONVERT10_E_STG_NO_STD_STREAM: int +CONVERT10_E_STG_DIB_TO_BITMAP: int +CLIPBRD_E_FIRST: int +CLIPBRD_E_LAST: int +CLIPBRD_S_FIRST: int +CLIPBRD_S_LAST: int +CLIPBRD_E_CANT_OPEN: int +CLIPBRD_E_CANT_EMPTY: int +CLIPBRD_E_CANT_SET: int +CLIPBRD_E_BAD_DATA: int +CLIPBRD_E_CANT_CLOSE: int +MK_E_FIRST: int +MK_E_LAST: int +MK_S_FIRST: int +MK_S_LAST: int +MK_E_CONNECTMANUALLY: int +MK_E_EXCEEDEDDEADLINE: int +MK_E_NEEDGENERIC: int +MK_E_UNAVAILABLE: int +MK_E_SYNTAX: int +MK_E_NOOBJECT: int +MK_E_INVALIDEXTENSION: int +MK_E_INTERMEDIATEINTERFACENOTSUPPORTED: int +MK_E_NOTBINDABLE: int +MK_E_NOTBOUND: int +MK_E_CANTOPENFILE: int +MK_E_MUSTBOTHERUSER: int +MK_E_NOINVERSE: int +MK_E_NOSTORAGE: int +MK_E_NOPREFIX: int +MK_E_ENUMERATION_FAILED: int +CO_E_FIRST: int +CO_E_LAST: int +CO_S_FIRST: int +CO_S_LAST: int +CO_E_NOTINITIALIZED: int +CO_E_ALREADYINITIALIZED: int +CO_E_CANTDETERMINECLASS: int +CO_E_CLASSSTRING: int +CO_E_IIDSTRING: int +CO_E_APPNOTFOUND: int +CO_E_APPSINGLEUSE: int +CO_E_ERRORINAPP: int +CO_E_DLLNOTFOUND: int +CO_E_ERRORINDLL: int +CO_E_WRONGOSFORAPP: int +CO_E_OBJNOTREG: int +CO_E_OBJISREG: int +CO_E_OBJNOTCONNECTED: int +CO_E_APPDIDNTREG: int +CO_E_RELEASED: int +CO_E_FAILEDTOIMPERSONATE: int +CO_E_FAILEDTOGETSECCTX: int +CO_E_FAILEDTOOPENTHREADTOKEN: int +CO_E_FAILEDTOGETTOKENINFO: int +CO_E_TRUSTEEDOESNTMATCHCLIENT: int +CO_E_FAILEDTOQUERYCLIENTBLANKET: int +CO_E_FAILEDTOSETDACL: int +CO_E_ACCESSCHECKFAILED: int +CO_E_NETACCESSAPIFAILED: int +CO_E_WRONGTRUSTEENAMESYNTAX: int +CO_E_INVALIDSID: int +CO_E_CONVERSIONFAILED: int +CO_E_NOMATCHINGSIDFOUND: int +CO_E_LOOKUPACCSIDFAILED: int +CO_E_NOMATCHINGNAMEFOUND: int +CO_E_LOOKUPACCNAMEFAILED: int +CO_E_SETSERLHNDLFAILED: int +CO_E_FAILEDTOGETWINDIR: int +CO_E_PATHTOOLONG: int +CO_E_FAILEDTOGENUUID: int +CO_E_FAILEDTOCREATEFILE: int +CO_E_FAILEDTOCLOSEHANDLE: int +CO_E_EXCEEDSYSACLLIMIT: int +CO_E_ACESINWRONGORDER: int +CO_E_INCOMPATIBLESTREAMVERSION: int +CO_E_FAILEDTOOPENPROCESSTOKEN: int +CO_E_DECODEFAILED: int +CO_E_ACNOTINITIALIZED: int +OLE_S_USEREG: int +OLE_S_STATIC: int +OLE_S_MAC_CLIPFORMAT: int +DRAGDROP_S_DROP: int +DRAGDROP_S_CANCEL: int +DRAGDROP_S_USEDEFAULTCURSORS: int +DATA_S_SAMEFORMATETC: int +VIEW_S_ALREADY_FROZEN: int +CACHE_S_FORMATETC_NOTSUPPORTED: int +CACHE_S_SAMECACHE: int +CACHE_S_SOMECACHES_NOTUPDATED: int +OLEOBJ_S_INVALIDVERB: int +OLEOBJ_S_CANNOT_DOVERB_NOW: int +OLEOBJ_S_INVALIDHWND: int +INPLACE_S_TRUNCATED: int +CONVERT10_S_NO_PRESENTATION: int +MK_S_REDUCED_TO_SELF: int +MK_S_ME: int +MK_S_HIM: int +MK_S_US: int +MK_S_MONIKERALREADYREGISTERED: int +CO_E_CLASS_CREATE_FAILED: int +CO_E_SCM_ERROR: int +CO_E_SCM_RPC_FAILURE: int +CO_E_BAD_PATH: int +CO_E_SERVER_EXEC_FAILURE: int +CO_E_OBJSRV_RPC_FAILURE: int +MK_E_NO_NORMALIZED: int +CO_E_SERVER_STOPPING: int +MEM_E_INVALID_ROOT: int +MEM_E_INVALID_LINK: int +MEM_E_INVALID_SIZE: int +CO_S_NOTALLINTERFACES: int +DISP_E_UNKNOWNINTERFACE: int +DISP_E_MEMBERNOTFOUND: int +DISP_E_PARAMNOTFOUND: int +DISP_E_TYPEMISMATCH: int +DISP_E_UNKNOWNNAME: int +DISP_E_NONAMEDARGS: int +DISP_E_BADVARTYPE: int +DISP_E_EXCEPTION: int +DISP_E_OVERFLOW: int +DISP_E_BADINDEX: int +DISP_E_UNKNOWNLCID: int +DISP_E_ARRAYISLOCKED: int +DISP_E_BADPARAMCOUNT: int +DISP_E_PARAMNOTOPTIONAL: int +DISP_E_BADCALLEE: int +DISP_E_NOTACOLLECTION: int +DISP_E_DIVBYZERO: int +TYPE_E_BUFFERTOOSMALL: int +TYPE_E_FIELDNOTFOUND: int +TYPE_E_INVDATAREAD: int +TYPE_E_UNSUPFORMAT: int +TYPE_E_REGISTRYACCESS: int +TYPE_E_LIBNOTREGISTERED: int +TYPE_E_UNDEFINEDTYPE: int +TYPE_E_QUALIFIEDNAMEDISALLOWED: int +TYPE_E_INVALIDSTATE: int +TYPE_E_WRONGTYPEKIND: int +TYPE_E_ELEMENTNOTFOUND: int +TYPE_E_AMBIGUOUSNAME: int +TYPE_E_NAMECONFLICT: int +TYPE_E_UNKNOWNLCID: int +TYPE_E_DLLFUNCTIONNOTFOUND: int +TYPE_E_BADMODULEKIND: int +TYPE_E_SIZETOOBIG: int +TYPE_E_DUPLICATEID: int +TYPE_E_INVALIDID: int +TYPE_E_TYPEMISMATCH: int +TYPE_E_OUTOFBOUNDS: int +TYPE_E_IOERROR: int +TYPE_E_CANTCREATETMPFILE: int +TYPE_E_CANTLOADLIBRARY: int +TYPE_E_INCONSISTENTPROPFUNCS: int +TYPE_E_CIRCULARTYPE: int +STG_E_INVALIDFUNCTION: int +STG_E_FILENOTFOUND: int +STG_E_PATHNOTFOUND: int +STG_E_TOOMANYOPENFILES: int +STG_E_ACCESSDENIED: int +STG_E_INVALIDHANDLE: int +STG_E_INSUFFICIENTMEMORY: int +STG_E_INVALIDPOINTER: int +STG_E_NOMOREFILES: int +STG_E_DISKISWRITEPROTECTED: int +STG_E_SEEKERROR: int +STG_E_WRITEFAULT: int +STG_E_READFAULT: int +STG_E_SHAREVIOLATION: int +STG_E_LOCKVIOLATION: int +STG_E_FILEALREADYEXISTS: int +STG_E_INVALIDPARAMETER: int +STG_E_MEDIUMFULL: int +STG_E_PROPSETMISMATCHED: int +STG_E_ABNORMALAPIEXIT: int +STG_E_INVALIDHEADER: int +STG_E_INVALIDNAME: int +STG_E_UNKNOWN: int +STG_E_UNIMPLEMENTEDFUNCTION: int +STG_E_INVALIDFLAG: int +STG_E_INUSE: int +STG_E_NOTCURRENT: int +STG_E_REVERTED: int +STG_E_CANTSAVE: int +STG_E_OLDFORMAT: int +STG_E_OLDDLL: int +STG_E_SHAREREQUIRED: int +STG_E_NOTFILEBASEDSTORAGE: int +STG_E_EXTANTMARSHALLINGS: int +STG_E_DOCFILECORRUPT: int +STG_E_BADBASEADDRESS: int +STG_E_INCOMPLETE: int +STG_E_TERMINATED: int +STG_S_CONVERTED: int +STG_S_BLOCK: int +STG_S_RETRYNOW: int +STG_S_MONITORING: int +STG_S_MULTIPLEOPENS: int +STG_S_CONSOLIDATIONFAILED: int +STG_S_CANNOTCONSOLIDATE: int +RPC_E_CALL_REJECTED: int +RPC_E_CALL_CANCELED: int +RPC_E_CANTPOST_INSENDCALL: int +RPC_E_CANTCALLOUT_INASYNCCALL: int +RPC_E_CANTCALLOUT_INEXTERNALCALL: int +RPC_E_CONNECTION_TERMINATED: int +RPC_E_SERVER_DIED: int +RPC_E_CLIENT_DIED: int +RPC_E_INVALID_DATAPACKET: int +RPC_E_CANTTRANSMIT_CALL: int +RPC_E_CLIENT_CANTMARSHAL_DATA: int +RPC_E_CLIENT_CANTUNMARSHAL_DATA: int +RPC_E_SERVER_CANTMARSHAL_DATA: int +RPC_E_SERVER_CANTUNMARSHAL_DATA: int +RPC_E_INVALID_DATA: int +RPC_E_INVALID_PARAMETER: int +RPC_E_CANTCALLOUT_AGAIN: int +RPC_E_SERVER_DIED_DNE: int +RPC_E_SYS_CALL_FAILED: int +RPC_E_OUT_OF_RESOURCES: int +RPC_E_ATTEMPTED_MULTITHREAD: int +RPC_E_NOT_REGISTERED: int +RPC_E_FAULT: int +RPC_E_SERVERFAULT: int +RPC_E_CHANGED_MODE: int +RPC_E_INVALIDMETHOD: int +RPC_E_DISCONNECTED: int +RPC_E_RETRY: int +RPC_E_SERVERCALL_RETRYLATER: int +RPC_E_SERVERCALL_REJECTED: int +RPC_E_INVALID_CALLDATA: int +RPC_E_CANTCALLOUT_ININPUTSYNCCALL: int +RPC_E_WRONG_THREAD: int +RPC_E_THREAD_NOT_INIT: int +RPC_E_VERSION_MISMATCH: int +RPC_E_INVALID_HEADER: int +RPC_E_INVALID_EXTENSION: int +RPC_E_INVALID_IPID: int +RPC_E_INVALID_OBJECT: int +RPC_S_CALLPENDING: int +RPC_S_WAITONTIMER: int +RPC_E_CALL_COMPLETE: int +RPC_E_UNSECURE_CALL: int +RPC_E_TOO_LATE: int +RPC_E_NO_GOOD_SECURITY_PACKAGES: int +RPC_E_ACCESS_DENIED: int +RPC_E_REMOTE_DISABLED: int +RPC_E_INVALID_OBJREF: int +RPC_E_NO_CONTEXT: int +RPC_E_TIMEOUT: int +RPC_E_NO_SYNC: int +RPC_E_UNEXPECTED: int +NTE_BAD_UID: int +NTE_BAD_HASH: int +NTE_BAD_KEY: int +NTE_BAD_LEN: int +NTE_BAD_DATA: int +NTE_BAD_SIGNATURE: int +NTE_BAD_VER: int +NTE_BAD_ALGID: int +NTE_BAD_FLAGS: int +NTE_BAD_TYPE: int +NTE_BAD_KEY_STATE: int +NTE_BAD_HASH_STATE: int +NTE_NO_KEY: int +NTE_NO_MEMORY: int +NTE_EXISTS: int +NTE_PERM: int +NTE_NOT_FOUND: int +NTE_DOUBLE_ENCRYPT: int +NTE_BAD_PROVIDER: int +NTE_BAD_PROV_TYPE: int +NTE_BAD_PUBLIC_KEY: int +NTE_BAD_KEYSET: int +NTE_PROV_TYPE_NOT_DEF: int +NTE_PROV_TYPE_ENTRY_BAD: int +NTE_KEYSET_NOT_DEF: int +NTE_KEYSET_ENTRY_BAD: int +NTE_PROV_TYPE_NO_MATCH: int +NTE_SIGNATURE_FILE_BAD: int +NTE_PROVIDER_DLL_FAIL: int +NTE_PROV_DLL_NOT_FOUND: int +NTE_BAD_KEYSET_PARAM: int +NTE_FAIL: int +NTE_SYS_ERR: int +CRYPT_E_MSG_ERROR: int +CRYPT_E_UNKNOWN_ALGO: int +CRYPT_E_OID_FORMAT: int +CRYPT_E_INVALID_MSG_TYPE: int +CRYPT_E_UNEXPECTED_ENCODING: int +CRYPT_E_AUTH_ATTR_MISSING: int +CRYPT_E_HASH_VALUE: int +CRYPT_E_INVALID_INDEX: int +CRYPT_E_ALREADY_DECRYPTED: int +CRYPT_E_NOT_DECRYPTED: int +CRYPT_E_RECIPIENT_NOT_FOUND: int +CRYPT_E_CONTROL_TYPE: int +CRYPT_E_ISSUER_SERIALNUMBER: int +CRYPT_E_SIGNER_NOT_FOUND: int +CRYPT_E_ATTRIBUTES_MISSING: int +CRYPT_E_STREAM_MSG_NOT_READY: int +CRYPT_E_STREAM_INSUFFICIENT_DATA: int +CRYPT_E_BAD_LEN: int +CRYPT_E_BAD_ENCODE: int +CRYPT_E_FILE_ERROR: int +CRYPT_E_NOT_FOUND: int +CRYPT_E_EXISTS: int +CRYPT_E_NO_PROVIDER: int +CRYPT_E_SELF_SIGNED: int +CRYPT_E_DELETED_PREV: int +CRYPT_E_NO_MATCH: int +CRYPT_E_UNEXPECTED_MSG_TYPE: int +CRYPT_E_NO_KEY_PROPERTY: int +CRYPT_E_NO_DECRYPT_CERT: int +CRYPT_E_BAD_MSG: int +CRYPT_E_NO_SIGNER: int +CRYPT_E_PENDING_CLOSE: int +CRYPT_E_REVOKED: int +CRYPT_E_NO_REVOCATION_DLL: int +CRYPT_E_NO_REVOCATION_CHECK: int +CRYPT_E_REVOCATION_OFFLINE: int +CRYPT_E_NOT_IN_REVOCATION_DATABASE: int +CRYPT_E_INVALID_NUMERIC_STRING: int +CRYPT_E_INVALID_PRINTABLE_STRING: int +CRYPT_E_INVALID_IA5_STRING: int +CRYPT_E_INVALID_X500_STRING: int +CRYPT_E_NOT_CHAR_STRING: int +CRYPT_E_FILERESIZED: int +CRYPT_E_SECURITY_SETTINGS: int +CRYPT_E_NO_VERIFY_USAGE_DLL: int +CRYPT_E_NO_VERIFY_USAGE_CHECK: int +CRYPT_E_VERIFY_USAGE_OFFLINE: int +CRYPT_E_NOT_IN_CTL: int +CRYPT_E_NO_TRUSTED_SIGNER: int +CRYPT_E_OSS_ERROR: int +CERTSRV_E_BAD_REQUESTSUBJECT: int +CERTSRV_E_NO_REQUEST: int +CERTSRV_E_BAD_REQUESTSTATUS: int +CERTSRV_E_PROPERTY_EMPTY: int +CERTDB_E_JET_ERROR: int +TRUST_E_SYSTEM_ERROR: int +TRUST_E_NO_SIGNER_CERT: int +TRUST_E_COUNTER_SIGNER: int +TRUST_E_CERT_SIGNATURE: int +TRUST_E_TIME_STAMP: int +TRUST_E_BAD_DIGEST: int +TRUST_E_BASIC_CONSTRAINTS: int +TRUST_E_FINANCIAL_CRITERIA: int +NTE_OP_OK: int +DIGSIG_E_ENCODE: int +DIGSIG_E_DECODE: int +DIGSIG_E_EXTENSIBILITY: int +DIGSIG_E_CRYPTO: int +PERSIST_E_SIZEDEFINITE: int +PERSIST_E_SIZEINDEFINITE: int +PERSIST_E_NOTSELFSIZING: int +TRUST_E_NOSIGNATURE: int +CERT_E_EXPIRED: int +CERT_E_VALIDITYPERIODNESTING: int +CERT_E_ROLE: int +CERT_E_PATHLENCONST: int +CERT_E_CRITICAL: int +CERT_E_PURPOSE: int +CERT_E_ISSUERCHAINING: int +CERT_E_MALFORMED: int +CERT_E_UNTRUSTEDROOT: int +CERT_E_CHAINING: int +TRUST_E_FAIL: int +CERT_E_REVOKED: int +CERT_E_UNTRUSTEDTESTROOT: int +CERT_E_REVOCATION_FAILURE: int +CERT_E_CN_NO_MATCH: int +CERT_E_WRONG_USAGE: int +SPAPI_E_EXPECTED_SECTION_NAME: int +SPAPI_E_BAD_SECTION_NAME_LINE: int +SPAPI_E_SECTION_NAME_TOO_LONG: int +SPAPI_E_GENERAL_SYNTAX: int +SPAPI_E_WRONG_INF_STYLE: int +SPAPI_E_SECTION_NOT_FOUND: int +SPAPI_E_LINE_NOT_FOUND: int +SPAPI_E_NO_ASSOCIATED_CLASS: int +SPAPI_E_CLASS_MISMATCH: int +SPAPI_E_DUPLICATE_FOUND: int +SPAPI_E_NO_DRIVER_SELECTED: int +SPAPI_E_KEY_DOES_NOT_EXIST: int +SPAPI_E_INVALID_DEVINST_NAME: int +SPAPI_E_INVALID_CLASS: int +SPAPI_E_DEVINST_ALREADY_EXISTS: int +SPAPI_E_DEVINFO_NOT_REGISTERED: int +SPAPI_E_INVALID_REG_PROPERTY: int +SPAPI_E_NO_INF: int +SPAPI_E_NO_SUCH_DEVINST: int +SPAPI_E_CANT_LOAD_CLASS_ICON: int +SPAPI_E_INVALID_CLASS_INSTALLER: int +SPAPI_E_DI_DO_DEFAULT: int +SPAPI_E_DI_NOFILECOPY: int +SPAPI_E_INVALID_HWPROFILE: int +SPAPI_E_NO_DEVICE_SELECTED: int +SPAPI_E_DEVINFO_LIST_LOCKED: int +SPAPI_E_DEVINFO_DATA_LOCKED: int +SPAPI_E_DI_BAD_PATH: int +SPAPI_E_NO_CLASSINSTALL_PARAMS: int +SPAPI_E_FILEQUEUE_LOCKED: int +SPAPI_E_BAD_SERVICE_INSTALLSECT: int +SPAPI_E_NO_CLASS_DRIVER_LIST: int +SPAPI_E_NO_ASSOCIATED_SERVICE: int +SPAPI_E_NO_DEFAULT_DEVICE_INTERFACE: int +SPAPI_E_DEVICE_INTERFACE_ACTIVE: int +SPAPI_E_DEVICE_INTERFACE_REMOVED: int +SPAPI_E_BAD_INTERFACE_INSTALLSECT: int +SPAPI_E_NO_SUCH_INTERFACE_CLASS: int +SPAPI_E_INVALID_REFERENCE_STRING: int +SPAPI_E_INVALID_MACHINENAME: int +SPAPI_E_REMOTE_COMM_FAILURE: int +SPAPI_E_MACHINE_UNAVAILABLE: int +SPAPI_E_NO_CONFIGMGR_SERVICES: int +SPAPI_E_INVALID_PROPPAGE_PROVIDER: int +SPAPI_E_NO_SUCH_DEVICE_INTERFACE: int +SPAPI_E_DI_POSTPROCESSING_REQUIRED: int +SPAPI_E_INVALID_COINSTALLER: int +SPAPI_E_NO_COMPAT_DRIVERS: int +SPAPI_E_NO_DEVICE_ICON: int +SPAPI_E_INVALID_INF_LOGCONFIG: int +SPAPI_E_DI_DONT_INSTALL: int +SPAPI_E_INVALID_FILTER_DRIVER: int +SPAPI_E_ERROR_NOT_INSTALLED: int +CDERR_DIALOGFAILURE: int +CDERR_GENERALCODES: int +CDERR_STRUCTSIZE: int +CDERR_INITIALIZATION: int +CDERR_NOTEMPLATE: int +CDERR_NOHINSTANCE: int +CDERR_LOADSTRFAILURE: int +CDERR_FINDRESFAILURE: int +CDERR_LOADRESFAILURE: int +CDERR_LOCKRESFAILURE: int +CDERR_MEMALLOCFAILURE: int +CDERR_MEMLOCKFAILURE: int +CDERR_NOHOOK: int +CDERR_REGISTERMSGFAIL: int +PDERR_PRINTERCODES: int +PDERR_SETUPFAILURE: int +PDERR_PARSEFAILURE: int +PDERR_RETDEFFAILURE: int +PDERR_LOADDRVFAILURE: int +PDERR_GETDEVMODEFAIL: int +PDERR_INITFAILURE: int +PDERR_NODEVICES: int +PDERR_NODEFAULTPRN: int +PDERR_DNDMMISMATCH: int +PDERR_CREATEICFAILURE: int +PDERR_PRINTERNOTFOUND: int +PDERR_DEFAULTDIFFERENT: int +CFERR_CHOOSEFONTCODES: int +CFERR_NOFONTS: int +CFERR_MAXLESSTHANMIN: int +FNERR_FILENAMECODES: int +FNERR_SUBCLASSFAILURE: int +FNERR_INVALIDFILENAME: int +FNERR_BUFFERTOOSMALL: int +FRERR_FINDREPLACECODES: int +FRERR_BUFFERLENGTHZERO: int +CCERR_CHOOSECOLORCODES: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/winioctlcon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/winioctlcon.pyi new file mode 100644 index 00000000..49028ceb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/winioctlcon.pyi @@ -0,0 +1,661 @@ +import _win32typing + +def CTL_CODE(DeviceType: int, Function: int, Method: int, Access: int) -> int: ... +def DEVICE_TYPE_FROM_CTL_CODE(ctrlCode: int) -> int: ... + +FILE_DEVICE_BEEP: int +FILE_DEVICE_CD_ROM: int +FILE_DEVICE_CD_ROM_FILE_SYSTEM: int +FILE_DEVICE_CONTROLLER: int +FILE_DEVICE_DATALINK: int +FILE_DEVICE_DFS: int +FILE_DEVICE_DISK: int +FILE_DEVICE_DISK_FILE_SYSTEM: int +FILE_DEVICE_FILE_SYSTEM: int +FILE_DEVICE_INPORT_PORT: int +FILE_DEVICE_KEYBOARD: int +FILE_DEVICE_MAILSLOT: int +FILE_DEVICE_MIDI_IN: int +FILE_DEVICE_MIDI_OUT: int +FILE_DEVICE_MOUSE: int +FILE_DEVICE_MULTI_UNC_PROVIDER: int +FILE_DEVICE_NAMED_PIPE: int +FILE_DEVICE_NETWORK: int +FILE_DEVICE_NETWORK_BROWSER: int +FILE_DEVICE_NETWORK_FILE_SYSTEM: int +FILE_DEVICE_NULL: int +FILE_DEVICE_PARALLEL_PORT: int +FILE_DEVICE_PHYSICAL_NETCARD: int +FILE_DEVICE_PRINTER: int +FILE_DEVICE_SCANNER: int +FILE_DEVICE_SERIAL_MOUSE_PORT: int +FILE_DEVICE_SERIAL_PORT: int +FILE_DEVICE_SCREEN: int +FILE_DEVICE_SOUND: int +FILE_DEVICE_STREAMS: int +FILE_DEVICE_TAPE: int +FILE_DEVICE_TAPE_FILE_SYSTEM: int +FILE_DEVICE_TRANSPORT: int +FILE_DEVICE_UNKNOWN: int +FILE_DEVICE_VIDEO: int +FILE_DEVICE_VIRTUAL_DISK: int +FILE_DEVICE_WAVE_IN: int +FILE_DEVICE_WAVE_OUT: int +FILE_DEVICE_8042_PORT: int +FILE_DEVICE_NETWORK_REDIRECTOR: int +FILE_DEVICE_BATTERY: int +FILE_DEVICE_BUS_EXTENDER: int +FILE_DEVICE_MODEM: int +FILE_DEVICE_VDM: int +FILE_DEVICE_MASS_STORAGE: int +FILE_DEVICE_SMB: int +FILE_DEVICE_KS: int +FILE_DEVICE_CHANGER: int +FILE_DEVICE_SMARTCARD: int +FILE_DEVICE_ACPI: int +FILE_DEVICE_DVD: int +FILE_DEVICE_FULLSCREEN_VIDEO: int +FILE_DEVICE_DFS_FILE_SYSTEM: int +FILE_DEVICE_DFS_VOLUME: int +FILE_DEVICE_SERENUM: int +FILE_DEVICE_TERMSRV: int +FILE_DEVICE_KSEC: int +FILE_DEVICE_FIPS: int +FILE_DEVICE_INFINIBAND: int +METHOD_BUFFERED: int +METHOD_IN_DIRECT: int +METHOD_OUT_DIRECT: int +METHOD_NEITHER: int +METHOD_DIRECT_TO_HARDWARE: int +METHOD_DIRECT_FROM_HARDWARE: int +FILE_ANY_ACCESS: int +FILE_SPECIAL_ACCESS: int +FILE_READ_ACCESS: int +FILE_WRITE_ACCESS: int +IOCTL_STORAGE_BASE: int +RECOVERED_WRITES_VALID: int +UNRECOVERED_WRITES_VALID: int +RECOVERED_READS_VALID: int +UNRECOVERED_READS_VALID: int +WRITE_COMPRESSION_INFO_VALID: int +READ_COMPRESSION_INFO_VALID: int +TAPE_RETURN_STATISTICS: int +TAPE_RETURN_ENV_INFO: int +TAPE_RESET_STATISTICS: int +MEDIA_ERASEABLE: int +MEDIA_WRITE_ONCE: int +MEDIA_READ_ONLY: int +MEDIA_READ_WRITE: int +MEDIA_WRITE_PROTECTED: int +MEDIA_CURRENTLY_MOUNTED: int +IOCTL_DISK_BASE: int +PARTITION_ENTRY_UNUSED: int +PARTITION_FAT_12: int +PARTITION_XENIX_1: int +PARTITION_XENIX_2: int +PARTITION_FAT_16: int +PARTITION_EXTENDED: int +PARTITION_HUGE: int +PARTITION_IFS: int +PARTITION_OS2BOOTMGR: int +PARTITION_FAT32: int +PARTITION_FAT32_XINT13: int +PARTITION_XINT13: int +PARTITION_XINT13_EXTENDED: int +PARTITION_PREP: int +PARTITION_LDM: int +PARTITION_UNIX: int +VALID_NTFT: int +PARTITION_NTFT: int +GPT_ATTRIBUTE_PLATFORM_REQUIRED: int +GPT_BASIC_DATA_ATTRIBUTE_NO_DRIVE_LETTER: int +GPT_BASIC_DATA_ATTRIBUTE_HIDDEN: int +GPT_BASIC_DATA_ATTRIBUTE_SHADOW_COPY: int +GPT_BASIC_DATA_ATTRIBUTE_READ_ONLY: int +HIST_NO_OF_BUCKETS: int +DISK_LOGGING_START: int +DISK_LOGGING_STOP: int +DISK_LOGGING_DUMP: int +DISK_BINNING: int +CAP_ATA_ID_CMD: int +CAP_ATAPI_ID_CMD: int +CAP_SMART_CMD: int +ATAPI_ID_CMD: int +ID_CMD: int +SMART_CMD: int +SMART_CYL_LOW: int +SMART_CYL_HI: int +SMART_NO_ERROR: int +SMART_IDE_ERROR: int +SMART_INVALID_FLAG: int +SMART_INVALID_COMMAND: int +SMART_INVALID_BUFFER: int +SMART_INVALID_DRIVE: int +SMART_INVALID_IOCTL: int +SMART_ERROR_NO_MEM: int +SMART_INVALID_REGISTER: int +SMART_NOT_SUPPORTED: int +SMART_NO_IDE_DEVICE: int +SMART_OFFLINE_ROUTINE_OFFLINE: int +SMART_SHORT_SELFTEST_OFFLINE: int +SMART_EXTENDED_SELFTEST_OFFLINE: int +SMART_ABORT_OFFLINE_SELFTEST: int +SMART_SHORT_SELFTEST_CAPTIVE: int +SMART_EXTENDED_SELFTEST_CAPTIVE: int +READ_ATTRIBUTE_BUFFER_SIZE: int +IDENTIFY_BUFFER_SIZE: int +READ_THRESHOLD_BUFFER_SIZE: int +SMART_LOG_SECTOR_SIZE: int +READ_ATTRIBUTES: int +READ_THRESHOLDS: int +ENABLE_DISABLE_AUTOSAVE: int +SAVE_ATTRIBUTE_VALUES: int +EXECUTE_OFFLINE_DIAGS: int +SMART_READ_LOG: int +SMART_WRITE_LOG: int +ENABLE_SMART: int +DISABLE_SMART: int +RETURN_SMART_STATUS: int +ENABLE_DISABLE_AUTO_OFFLINE: int +IOCTL_CHANGER_BASE: int +MAX_VOLUME_ID_SIZE: int +MAX_VOLUME_TEMPLATE_SIZE: int +VENDOR_ID_LENGTH: int +PRODUCT_ID_LENGTH: int +REVISION_LENGTH: int +SERIAL_NUMBER_LENGTH: int +CHANGER_BAR_CODE_SCANNER_INSTALLED: int +CHANGER_INIT_ELEM_STAT_WITH_RANGE: int +CHANGER_CLOSE_IEPORT: int +CHANGER_OPEN_IEPORT: int +CHANGER_STATUS_NON_VOLATILE: int +CHANGER_EXCHANGE_MEDIA: int +CHANGER_CLEANER_SLOT: int +CHANGER_LOCK_UNLOCK: int +CHANGER_CARTRIDGE_MAGAZINE: int +CHANGER_MEDIUM_FLIP: int +CHANGER_POSITION_TO_ELEMENT: int +CHANGER_REPORT_IEPORT_STATE: int +CHANGER_STORAGE_DRIVE: int +CHANGER_STORAGE_IEPORT: int +CHANGER_STORAGE_SLOT: int +CHANGER_STORAGE_TRANSPORT: int +CHANGER_DRIVE_CLEANING_REQUIRED: int +CHANGER_PREDISMOUNT_EJECT_REQUIRED: int +CHANGER_CLEANER_ACCESS_NOT_VALID: int +CHANGER_PREMOUNT_EJECT_REQUIRED: int +CHANGER_VOLUME_IDENTIFICATION: int +CHANGER_VOLUME_SEARCH: int +CHANGER_VOLUME_ASSERT: int +CHANGER_VOLUME_REPLACE: int +CHANGER_VOLUME_UNDEFINE: int +CHANGER_SERIAL_NUMBER_VALID: int +CHANGER_DEVICE_REINITIALIZE_CAPABLE: int +CHANGER_KEYPAD_ENABLE_DISABLE: int +CHANGER_DRIVE_EMPTY_ON_DOOR_ACCESS: int +CHANGER_RESERVED_BIT: int +CHANGER_PREDISMOUNT_ALIGN_TO_SLOT: int +CHANGER_PREDISMOUNT_ALIGN_TO_DRIVE: int +CHANGER_CLEANER_AUTODISMOUNT: int +CHANGER_TRUE_EXCHANGE_CAPABLE: int +CHANGER_SLOTS_USE_TRAYS: int +CHANGER_RTN_MEDIA_TO_ORIGINAL_ADDR: int +CHANGER_CLEANER_OPS_NOT_SUPPORTED: int +CHANGER_IEPORT_USER_CONTROL_OPEN: int +CHANGER_IEPORT_USER_CONTROL_CLOSE: int +CHANGER_MOVE_EXTENDS_IEPORT: int +CHANGER_MOVE_RETRACTS_IEPORT: int +CHANGER_TO_TRANSPORT: int +CHANGER_TO_SLOT: int +CHANGER_TO_IEPORT: int +CHANGER_TO_DRIVE: int +LOCK_UNLOCK_IEPORT: int +LOCK_UNLOCK_DOOR: int +LOCK_UNLOCK_KEYPAD: int +LOCK_ELEMENT: int +UNLOCK_ELEMENT: int +EXTEND_IEPORT: int +RETRACT_IEPORT: int +ELEMENT_STATUS_FULL: int +ELEMENT_STATUS_IMPEXP: int +ELEMENT_STATUS_EXCEPT: int +ELEMENT_STATUS_ACCESS: int +ELEMENT_STATUS_EXENAB: int +ELEMENT_STATUS_INENAB: int +ELEMENT_STATUS_PRODUCT_DATA: int +ELEMENT_STATUS_LUN_VALID: int +ELEMENT_STATUS_ID_VALID: int +ELEMENT_STATUS_NOT_BUS: int +ELEMENT_STATUS_INVERT: int +ELEMENT_STATUS_SVALID: int +ELEMENT_STATUS_PVOLTAG: int +ELEMENT_STATUS_AVOLTAG: int +ERROR_LABEL_UNREADABLE: int +ERROR_LABEL_QUESTIONABLE: int +ERROR_SLOT_NOT_PRESENT: int +ERROR_DRIVE_NOT_INSTALLED: int +ERROR_TRAY_MALFUNCTION: int +ERROR_INIT_STATUS_NEEDED: int +ERROR_UNHANDLED_ERROR: int +SEARCH_ALL: int +SEARCH_PRIMARY: int +SEARCH_ALTERNATE: int +SEARCH_ALL_NO_SEQ: int +SEARCH_PRI_NO_SEQ: int +SEARCH_ALT_NO_SEQ: int +ASSERT_PRIMARY: int +ASSERT_ALTERNATE: int +REPLACE_PRIMARY: int +REPLACE_ALTERNATE: int +UNDEFINE_PRIMARY: int +UNDEFINE_ALTERNATE: int +USN_PAGE_SIZE: int +USN_REASON_DATA_OVERWRITE: int +USN_REASON_DATA_EXTEND: int +USN_REASON_DATA_TRUNCATION: int +USN_REASON_NAMED_DATA_OVERWRITE: int +USN_REASON_NAMED_DATA_EXTEND: int +USN_REASON_NAMED_DATA_TRUNCATION: int +USN_REASON_FILE_CREATE: int +USN_REASON_FILE_DELETE: int +USN_REASON_EA_CHANGE: int +USN_REASON_SECURITY_CHANGE: int +USN_REASON_RENAME_OLD_NAME: int +USN_REASON_RENAME_NEW_NAME: int +USN_REASON_INDEXABLE_CHANGE: int +USN_REASON_BASIC_INFO_CHANGE: int +USN_REASON_HARD_LINK_CHANGE: int +USN_REASON_COMPRESSION_CHANGE: int +USN_REASON_ENCRYPTION_CHANGE: int +USN_REASON_OBJECT_ID_CHANGE: int +USN_REASON_REPARSE_POINT_CHANGE: int +USN_REASON_STREAM_CHANGE: int +USN_REASON_TRANSACTED_CHANGE: int +USN_REASON_CLOSE: int +USN_DELETE_FLAG_DELETE: int +USN_DELETE_FLAG_NOTIFY: int +USN_DELETE_VALID_FLAGS: int +USN_SOURCE_DATA_MANAGEMENT: int +USN_SOURCE_AUXILIARY_DATA: int +USN_SOURCE_REPLICATION_MANAGEMENT: int +MARK_HANDLE_PROTECT_CLUSTERS: int +MARK_HANDLE_TXF_SYSTEM_LOG: int +MARK_HANDLE_NOT_TXF_SYSTEM_LOG: int +VOLUME_IS_DIRTY: int +VOLUME_UPGRADE_SCHEDULED: int +VOLUME_SESSION_OPEN: int +FILE_PREFETCH_TYPE_FOR_CREATE: int +FILE_PREFETCH_TYPE_FOR_DIRENUM: int +FILE_PREFETCH_TYPE_FOR_CREATE_EX: int +FILE_PREFETCH_TYPE_FOR_DIRENUM_EX: int +FILE_PREFETCH_TYPE_MAX: int +FILESYSTEM_STATISTICS_TYPE_NTFS: int +FILESYSTEM_STATISTICS_TYPE_FAT: int +FILE_SET_ENCRYPTION: int +FILE_CLEAR_ENCRYPTION: int +STREAM_SET_ENCRYPTION: int +STREAM_CLEAR_ENCRYPTION: int +MAXIMUM_ENCRYPTION_VALUE: int +ENCRYPTION_FORMAT_DEFAULT: int +COMPRESSION_FORMAT_SPARSE: int +COPYFILE_SIS_LINK: int +COPYFILE_SIS_REPLACE: int +COPYFILE_SIS_FLAGS: int +WMI_DISK_GEOMETRY_GUID: _win32typing.PyIID +GUID_DEVINTERFACE_CDROM: _win32typing.PyIID +GUID_DEVINTERFACE_FLOPPY: _win32typing.PyIID +GUID_DEVINTERFACE_SERENUM_BUS_ENUMERATOR: _win32typing.PyIID +GUID_DEVINTERFACE_COMPORT: _win32typing.PyIID +GUID_DEVINTERFACE_DISK: _win32typing.PyIID +GUID_DEVINTERFACE_STORAGEPORT: _win32typing.PyIID +GUID_DEVINTERFACE_CDCHANGER: _win32typing.PyIID +GUID_DEVINTERFACE_PARTITION: _win32typing.PyIID +GUID_DEVINTERFACE_VOLUME: _win32typing.PyIID +GUID_DEVINTERFACE_WRITEONCEDISK: _win32typing.PyIID +GUID_DEVINTERFACE_TAPE: _win32typing.PyIID +GUID_DEVINTERFACE_MEDIUMCHANGER: _win32typing.PyIID +GUID_SERENUM_BUS_ENUMERATOR: int +GUID_CLASS_COMPORT: int +DiskClassGuid: int +CdRomClassGuid: int +PartitionClassGuid: int +TapeClassGuid: int +WriteOnceDiskClassGuid: int +VolumeClassGuid: int +MediumChangerClassGuid: int +FloppyClassGuid: int +CdChangerClassGuid: int +StoragePortClassGuid: int +IOCTL_STORAGE_CHECK_VERIFY: int +IOCTL_STORAGE_CHECK_VERIFY2: int +IOCTL_STORAGE_MEDIA_REMOVAL: int +IOCTL_STORAGE_EJECT_MEDIA: int +IOCTL_STORAGE_LOAD_MEDIA: int +IOCTL_STORAGE_LOAD_MEDIA2: int +IOCTL_STORAGE_RESERVE: int +IOCTL_STORAGE_RELEASE: int +IOCTL_STORAGE_FIND_NEW_DEVICES: int +IOCTL_STORAGE_EJECTION_CONTROL: int +IOCTL_STORAGE_MCN_CONTROL: int +IOCTL_STORAGE_GET_MEDIA_TYPES: int +IOCTL_STORAGE_GET_MEDIA_TYPES_EX: int +IOCTL_STORAGE_GET_MEDIA_SERIAL_NUMBER: int +IOCTL_STORAGE_GET_HOTPLUG_INFO: int +IOCTL_STORAGE_SET_HOTPLUG_INFO: int +IOCTL_STORAGE_RESET_BUS: int +IOCTL_STORAGE_RESET_DEVICE: int +IOCTL_STORAGE_BREAK_RESERVATION: int +IOCTL_STORAGE_GET_DEVICE_NUMBER: int +IOCTL_STORAGE_PREDICT_FAILURE: int +IOCTL_DISK_GET_DRIVE_GEOMETRY: int +IOCTL_DISK_GET_PARTITION_INFO: int +IOCTL_DISK_SET_PARTITION_INFO: int +IOCTL_DISK_GET_DRIVE_LAYOUT: int +IOCTL_DISK_SET_DRIVE_LAYOUT: int +IOCTL_DISK_VERIFY: int +IOCTL_DISK_FORMAT_TRACKS: int +IOCTL_DISK_REASSIGN_BLOCKS: int +IOCTL_DISK_PERFORMANCE: int +IOCTL_DISK_IS_WRITABLE: int +IOCTL_DISK_LOGGING: int +IOCTL_DISK_FORMAT_TRACKS_EX: int +IOCTL_DISK_HISTOGRAM_STRUCTURE: int +IOCTL_DISK_HISTOGRAM_DATA: int +IOCTL_DISK_HISTOGRAM_RESET: int +IOCTL_DISK_REQUEST_STRUCTURE: int +IOCTL_DISK_REQUEST_DATA: int +IOCTL_DISK_PERFORMANCE_OFF: int +IOCTL_DISK_CONTROLLER_NUMBER: int +SMART_GET_VERSION: int +SMART_SEND_DRIVE_COMMAND: int +SMART_RCV_DRIVE_DATA: int +IOCTL_DISK_GET_PARTITION_INFO_EX: int +IOCTL_DISK_SET_PARTITION_INFO_EX: int +IOCTL_DISK_GET_DRIVE_LAYOUT_EX: int +IOCTL_DISK_SET_DRIVE_LAYOUT_EX: int +IOCTL_DISK_CREATE_DISK: int +IOCTL_DISK_GET_LENGTH_INFO: int +IOCTL_DISK_GET_DRIVE_GEOMETRY_EX: int +IOCTL_DISK_REASSIGN_BLOCKS_EX: int +IOCTL_DISK_UPDATE_DRIVE_SIZE: int +IOCTL_DISK_GROW_PARTITION: int +IOCTL_DISK_GET_CACHE_INFORMATION: int +IOCTL_DISK_SET_CACHE_INFORMATION: int +OBSOLETE_IOCTL_STORAGE_RESET_BUS: int +OBSOLETE_IOCTL_STORAGE_RESET_DEVICE: int +OBSOLETE_DISK_GET_WRITE_CACHE_STATE: int +IOCTL_DISK_GET_WRITE_CACHE_STATE: int +IOCTL_DISK_DELETE_DRIVE_LAYOUT: int +IOCTL_DISK_UPDATE_PROPERTIES: int +IOCTL_DISK_FORMAT_DRIVE: int +IOCTL_DISK_SENSE_DEVICE: int +IOCTL_DISK_CHECK_VERIFY: int +IOCTL_DISK_MEDIA_REMOVAL: int +IOCTL_DISK_EJECT_MEDIA: int +IOCTL_DISK_LOAD_MEDIA: int +IOCTL_DISK_RESERVE: int +IOCTL_DISK_RELEASE: int +IOCTL_DISK_FIND_NEW_DEVICES: int +IOCTL_DISK_GET_MEDIA_TYPES: int +DISK_HISTOGRAM_SIZE: int +HISTOGRAM_BUCKET_SIZE: int +IOCTL_CHANGER_GET_PARAMETERS: int +IOCTL_CHANGER_GET_STATUS: int +IOCTL_CHANGER_GET_PRODUCT_DATA: int +IOCTL_CHANGER_SET_ACCESS: int +IOCTL_CHANGER_GET_ELEMENT_STATUS: int +IOCTL_CHANGER_INITIALIZE_ELEMENT_STATUS: int +IOCTL_CHANGER_SET_POSITION: int +IOCTL_CHANGER_EXCHANGE_MEDIUM: int +IOCTL_CHANGER_MOVE_MEDIUM: int +IOCTL_CHANGER_REINITIALIZE_TRANSPORT: int +IOCTL_CHANGER_QUERY_VOLUME_TAGS: int +IOCTL_SERIAL_LSRMST_INSERT: int +IOCTL_SERENUM_EXPOSE_HARDWARE: int +IOCTL_SERENUM_REMOVE_HARDWARE: int +IOCTL_SERENUM_PORT_DESC: int +IOCTL_SERENUM_GET_PORT_NAME: int +SERIAL_LSRMST_ESCAPE: int +SERIAL_LSRMST_LSR_DATA: int +SERIAL_LSRMST_LSR_NODATA: int +SERIAL_LSRMST_MST: int +SERIAL_IOC_FCR_FIFO_ENABLE: int +SERIAL_IOC_FCR_RCVR_RESET: int +SERIAL_IOC_FCR_XMIT_RESET: int +SERIAL_IOC_FCR_DMA_MODE: int +SERIAL_IOC_FCR_RES1: int +SERIAL_IOC_FCR_RES2: int +SERIAL_IOC_FCR_RCVR_TRIGGER_LSB: int +SERIAL_IOC_FCR_RCVR_TRIGGER_MSB: int +SERIAL_IOC_MCR_DTR: int +SERIAL_IOC_MCR_RTS: int +SERIAL_IOC_MCR_OUT1: int +SERIAL_IOC_MCR_OUT2: int +SERIAL_IOC_MCR_LOOP: int +FSCTL_REQUEST_OPLOCK_LEVEL_1: int +FSCTL_REQUEST_OPLOCK_LEVEL_2: int +FSCTL_REQUEST_BATCH_OPLOCK: int +FSCTL_OPLOCK_BREAK_ACKNOWLEDGE: int +FSCTL_OPBATCH_ACK_CLOSE_PENDING: int +FSCTL_OPLOCK_BREAK_NOTIFY: int +FSCTL_LOCK_VOLUME: int +FSCTL_UNLOCK_VOLUME: int +FSCTL_DISMOUNT_VOLUME: int +FSCTL_IS_VOLUME_MOUNTED: int +FSCTL_IS_PATHNAME_VALID: int +FSCTL_MARK_VOLUME_DIRTY: int +FSCTL_QUERY_RETRIEVAL_POINTERS: int +FSCTL_GET_COMPRESSION: int +FSCTL_SET_COMPRESSION: int +FSCTL_MARK_AS_SYSTEM_HIVE: int +FSCTL_OPLOCK_BREAK_ACK_NO_2: int +FSCTL_INVALIDATE_VOLUMES: int +FSCTL_QUERY_FAT_BPB: int +FSCTL_REQUEST_FILTER_OPLOCK: int +FSCTL_FILESYSTEM_GET_STATISTICS: int +FSCTL_GET_NTFS_VOLUME_DATA: int +FSCTL_GET_NTFS_FILE_RECORD: int +FSCTL_GET_VOLUME_BITMAP: int +FSCTL_GET_RETRIEVAL_POINTERS: int +FSCTL_MOVE_FILE: int +FSCTL_IS_VOLUME_DIRTY: int +FSCTL_ALLOW_EXTENDED_DASD_IO: int +FSCTL_FIND_FILES_BY_SID: int +FSCTL_SET_OBJECT_ID: int +FSCTL_GET_OBJECT_ID: int +FSCTL_DELETE_OBJECT_ID: int +FSCTL_SET_REPARSE_POINT: int +FSCTL_GET_REPARSE_POINT: int +FSCTL_DELETE_REPARSE_POINT: int +FSCTL_ENUM_USN_DATA: int +FSCTL_SECURITY_ID_CHECK: int +FSCTL_READ_USN_JOURNAL: int +FSCTL_SET_OBJECT_ID_EXTENDED: int +FSCTL_CREATE_OR_GET_OBJECT_ID: int +FSCTL_SET_SPARSE: int +FSCTL_SET_ZERO_DATA: int +FSCTL_QUERY_ALLOCATED_RANGES: int +FSCTL_SET_ENCRYPTION: int +FSCTL_ENCRYPTION_FSCTL_IO: int +FSCTL_WRITE_RAW_ENCRYPTED: int +FSCTL_READ_RAW_ENCRYPTED: int +FSCTL_CREATE_USN_JOURNAL: int +FSCTL_READ_FILE_USN_DATA: int +FSCTL_WRITE_USN_CLOSE_RECORD: int +FSCTL_EXTEND_VOLUME: int +FSCTL_QUERY_USN_JOURNAL: int +FSCTL_DELETE_USN_JOURNAL: int +FSCTL_MARK_HANDLE: int +FSCTL_SIS_COPYFILE: int +FSCTL_SIS_LINK_FILES: int +FSCTL_HSM_MSG: int +FSCTL_HSM_DATA: int +FSCTL_RECALL_FILE: int +FSCTL_READ_FROM_PLEX: int +FSCTL_FILE_PREFETCH: int +FSCTL_MAKE_MEDIA_COMPATIBLE: int +FSCTL_SET_DEFECT_MANAGEMENT: int +FSCTL_QUERY_SPARING_INFO: int +FSCTL_QUERY_ON_DISK_VOLUME_INFO: int +FSCTL_SET_VOLUME_COMPRESSION_STATE: int +FSCTL_TXFS_MODIFY_RM: int +FSCTL_TXFS_QUERY_RM_INFORMATION: int +FSCTL_TXFS_ROLLFORWARD_REDO: int +FSCTL_TXFS_ROLLFORWARD_UNDO: int +FSCTL_TXFS_START_RM: int +FSCTL_TXFS_SHUTDOWN_RM: int +FSCTL_TXFS_READ_BACKUP_INFORMATION: int +FSCTL_TXFS_WRITE_BACKUP_INFORMATION: int +FSCTL_TXFS_CREATE_SECONDARY_RM: int +FSCTL_TXFS_GET_METADATA_INFO: int +FSCTL_TXFS_GET_TRANSACTED_VERSION: int +FSCTL_TXFS_CREATE_MINIVERSION: int +FSCTL_TXFS_TRANSACTION_ACTIVE: int +FSCTL_SET_ZERO_ON_DEALLOCATION: int +FSCTL_SET_REPAIR: int +FSCTL_GET_REPAIR: int +FSCTL_WAIT_FOR_REPAIR: int +FSCTL_INITIATE_REPAIR: int +FSCTL_CSC_INTERNAL: int +FSCTL_SHRINK_VOLUME: int +FSCTL_SET_SHORT_NAME_BEHAVIOR: int +FSCTL_DFSR_SET_GHOST_HANDLE_STATE: int +FSCTL_QUERY_PAGEFILE_ENCRYPTION: int +IOCTL_VOLUME_BASE: int +IOCTL_VOLUME_GET_VOLUME_DISK_EXTENTS: int +IOCTL_VOLUME_ONLINE: int +IOCTL_VOLUME_OFFLINE: int +IOCTL_VOLUME_IS_CLUSTERED: int +IOCTL_VOLUME_GET_GPT_ATTRIBUTES: int +DDS_4mm: int +MiniQic: int +Travan: int +QIC: int +MP_8mm: int +AME_8mm: int +AIT1_8mm: int +DLT: int +NCTP: int +IBM_3480: int +IBM_3490E: int +IBM_Magstar_3590: int +IBM_Magstar_MP: int +STK_DATA_D3: int +SONY_DTF: int +DV_6mm: int +DMI: int +SONY_D2: int +CLEANER_CARTRIDGE: int +CD_ROM: int +CD_R: int +CD_RW: int +DVD_ROM: int +DVD_R: int +DVD_RW: int +MO_3_RW: int +MO_5_WO: int +MO_5_RW: int +MO_5_LIMDOW: int +PC_5_WO: int +PC_5_RW: int +PD_5_RW: int +ABL_5_WO: int +PINNACLE_APEX_5_RW: int +SONY_12_WO: int +PHILIPS_12_WO: int +HITACHI_12_WO: int +CYGNET_12_WO: int +KODAK_14_WO: int +MO_NFR_525: int +NIKON_12_RW: int +IOMEGA_ZIP: int +IOMEGA_JAZ: int +SYQUEST_EZ135: int +SYQUEST_EZFLYER: int +SYQUEST_SYJET: int +AVATAR_F2: int +MP2_8mm: int +DST_S: int +DST_M: int +DST_L: int +VXATape_1: int +VXATape_2: int +STK_9840: int +LTO_Ultrium: int +LTO_Accelis: int +DVD_RAM: int +AIT_8mm: int +ADR_1: int +ADR_2: int +STK_9940: int +BusTypeUnknown: int +BusTypeScsi: int +BusTypeAtapi: int +BusTypeAta: int +BusType1394: int +BusTypeSsa: int +BusTypeFibre: int +BusTypeUsb: int +BusTypeRAID: int +BusTypeiScsi: int +BusTypeSas: int +BusTypeSata: int +BusTypeMaxReserved: int +Unknown: int +F5_1Pt2_512: int +F3_1Pt44_512: int +F3_2Pt88_512: int +F3_20Pt8_512: int +F3_720_512: int +F5_360_512: int +F5_320_512: int +F5_320_1024: int +F5_180_512: int +F5_160_512: int +RemovableMedia: int +FixedMedia: int +F3_120M_512: int +F3_640_512: int +F5_640_512: int +F5_720_512: int +F3_1Pt2_512: int +F3_1Pt23_1024: int +F5_1Pt23_1024: int +F3_128Mb_512: int +F3_230Mb_512: int +F8_256_128: int +F3_200Mb_512: int +F3_240M_512: int +F3_32M_512: int +PARTITION_STYLE_MBR: int +PARTITION_STYLE_GPT: int +PARTITION_STYLE_RAW: int +DetectNone: int +DetectInt13: int +DetectExInt13: int +EqualPriority: int +KeepPrefetchedData: int +KeepReadData: int +DiskWriteCacheNormal: int +DiskWriteCacheForceDisable: int +DiskWriteCacheDisableNotSupported: int +RequestSize: int +RequestLocation: int +DeviceProblemNone: int +DeviceProblemHardware: int +DeviceProblemCHMError: int +DeviceProblemDoorOpen: int +DeviceProblemCalibrationError: int +DeviceProblemTargetFailure: int +DeviceProblemCHMMoveError: int +DeviceProblemCHMZeroError: int +DeviceProblemCartridgeInsertError: int +DeviceProblemPositionError: int +DeviceProblemSensorError: int +DeviceProblemCartridgeEjectError: int +DeviceProblemGripperError: int +DeviceProblemDriveError: int +FILE_READ_DATA: int +FILE_WRITE_DATA: int +FSCTL_TXFS_LIST_TRANSACTIONS: int +FSCTL_TXFS_LIST_TRANSACTION_LOCKED_FILES: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/winnt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/winnt.pyi new file mode 100644 index 00000000..9c49122d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/winnt.pyi @@ -0,0 +1,1137 @@ +from _typeshed import Incomplete + +APPLICATION_ERROR_MASK: int +ERROR_SEVERITY_SUCCESS: int +ERROR_SEVERITY_INFORMATIONAL: int +ERROR_SEVERITY_WARNING: int +ERROR_SEVERITY_ERROR: int +MINCHAR: int +MAXCHAR: int +MINSHORT: int +MAXSHORT: int +MINLONG: int +MAXLONG: int +MAXBYTE: int +MAXWORD: int +MAXDWORD: int +LANG_NEUTRAL: int +LANG_AFRIKAANS: int +LANG_ALBANIAN: int +LANG_ARABIC: int +LANG_BASQUE: int +LANG_BELARUSIAN: int +LANG_BULGARIAN: int +LANG_CATALAN: int +LANG_CHINESE: int +LANG_CROATIAN: int +LANG_CZECH: int +LANG_DANISH: int +LANG_DUTCH: int +LANG_ENGLISH: int +LANG_ESTONIAN: int +LANG_FAEROESE: int +LANG_FARSI: int +LANG_FINNISH: int +LANG_FRENCH: int +LANG_GERMAN: int +LANG_GREEK: int +LANG_HEBREW: int +LANG_HINDI: int +LANG_HUNGARIAN: int +LANG_ICELANDIC: int +LANG_INDONESIAN: int +LANG_ITALIAN: int +LANG_JAPANESE: int +LANG_KOREAN: int +LANG_LATVIAN: int +LANG_LITHUANIAN: int +LANG_MACEDONIAN: int +LANG_MALAY: int +LANG_NORWEGIAN: int +LANG_POLISH: int +LANG_PORTUGUESE: int +LANG_ROMANIAN: int +LANG_RUSSIAN: int +LANG_SERBIAN: int +LANG_SLOVAK: int +LANG_SLOVENIAN: int +LANG_SPANISH: int +LANG_SWAHILI: int +LANG_SWEDISH: int +LANG_THAI: int +LANG_TURKISH: int +LANG_UKRAINIAN: int +LANG_VIETNAMESE: int +SUBLANG_NEUTRAL: int +SUBLANG_DEFAULT: int +SUBLANG_SYS_DEFAULT: int +SUBLANG_ARABIC_SAUDI_ARABIA: int +SUBLANG_ARABIC_IRAQ: int +SUBLANG_ARABIC_EGYPT: int +SUBLANG_ARABIC_LIBYA: int +SUBLANG_ARABIC_ALGERIA: int +SUBLANG_ARABIC_MOROCCO: int +SUBLANG_ARABIC_TUNISIA: int +SUBLANG_ARABIC_OMAN: int +SUBLANG_ARABIC_YEMEN: int +SUBLANG_ARABIC_SYRIA: int +SUBLANG_ARABIC_JORDAN: int +SUBLANG_ARABIC_LEBANON: int +SUBLANG_ARABIC_KUWAIT: int +SUBLANG_ARABIC_UAE: int +SUBLANG_ARABIC_BAHRAIN: int +SUBLANG_ARABIC_QATAR: int +SUBLANG_CHINESE_TRADITIONAL: int +SUBLANG_CHINESE_SIMPLIFIED: int +SUBLANG_CHINESE_HONGKONG: int +SUBLANG_CHINESE_SINGAPORE: int +SUBLANG_CHINESE_MACAU: int +SUBLANG_DUTCH: int +SUBLANG_DUTCH_BELGIAN: int +SUBLANG_ENGLISH_US: int +SUBLANG_ENGLISH_UK: int +SUBLANG_ENGLISH_AUS: int +SUBLANG_ENGLISH_CAN: int +SUBLANG_ENGLISH_NZ: int +SUBLANG_ENGLISH_EIRE: int +SUBLANG_ENGLISH_SOUTH_AFRICA: int +SUBLANG_ENGLISH_JAMAICA: int +SUBLANG_ENGLISH_CARIBBEAN: int +SUBLANG_ENGLISH_BELIZE: int +SUBLANG_ENGLISH_TRINIDAD: int +SUBLANG_ENGLISH_ZIMBABWE: int +SUBLANG_ENGLISH_PHILIPPINES: int +SUBLANG_FRENCH: int +SUBLANG_FRENCH_BELGIAN: int +SUBLANG_FRENCH_CANADIAN: int +SUBLANG_FRENCH_SWISS: int +SUBLANG_FRENCH_LUXEMBOURG: int +SUBLANG_FRENCH_MONACO: int +SUBLANG_GERMAN: int +SUBLANG_GERMAN_SWISS: int +SUBLANG_GERMAN_AUSTRIAN: int +SUBLANG_GERMAN_LUXEMBOURG: int +SUBLANG_GERMAN_LIECHTENSTEIN: int +SUBLANG_ITALIAN: int +SUBLANG_ITALIAN_SWISS: int +SUBLANG_KOREAN: int +SUBLANG_KOREAN_JOHAB: int +SUBLANG_LITHUANIAN: int +SUBLANG_LITHUANIAN_CLASSIC: int +SUBLANG_MALAY_MALAYSIA: int +SUBLANG_MALAY_BRUNEI_DARUSSALAM: int +SUBLANG_NORWEGIAN_BOKMAL: int +SUBLANG_NORWEGIAN_NYNORSK: int +SUBLANG_PORTUGUESE: int +SUBLANG_PORTUGUESE_BRAZILIAN: int +SUBLANG_SERBIAN_LATIN: int +SUBLANG_SERBIAN_CYRILLIC: int +SUBLANG_SPANISH: int +SUBLANG_SPANISH_MEXICAN: int +SUBLANG_SPANISH_MODERN: int +SUBLANG_SPANISH_GUATEMALA: int +SUBLANG_SPANISH_COSTA_RICA: int +SUBLANG_SPANISH_PANAMA: int +SUBLANG_SPANISH_DOMINICAN_REPUBLIC: int +SUBLANG_SPANISH_VENEZUELA: int +SUBLANG_SPANISH_COLOMBIA: int +SUBLANG_SPANISH_PERU: int +SUBLANG_SPANISH_ARGENTINA: int +SUBLANG_SPANISH_ECUADOR: int +SUBLANG_SPANISH_CHILE: int +SUBLANG_SPANISH_URUGUAY: int +SUBLANG_SPANISH_PARAGUAY: int +SUBLANG_SPANISH_BOLIVIA: int +SUBLANG_SPANISH_EL_SALVADOR: int +SUBLANG_SPANISH_HONDURAS: int +SUBLANG_SPANISH_NICARAGUA: int +SUBLANG_SPANISH_PUERTO_RICO: int +SUBLANG_SWEDISH: int +SUBLANG_SWEDISH_FINLAND: int +SORT_DEFAULT: int +SORT_JAPANESE_XJIS: int +SORT_JAPANESE_UNICODE: int +SORT_CHINESE_BIG5: int +SORT_CHINESE_PRCP: int +SORT_CHINESE_UNICODE: int +SORT_CHINESE_PRC: int +SORT_KOREAN_KSC: int +SORT_KOREAN_UNICODE: int +SORT_GERMAN_PHONE_BOOK: int + +def PRIMARYLANGID(lgid): ... +def SUBLANGID(lgid): ... + +NLS_VALID_LOCALE_MASK: int + +def LANGIDFROMLCID(lcid): ... +def SORTIDFROMLCID(lcid): ... +def UNREFERENCED_PARAMETER(P): ... +def DBG_UNREFERENCED_PARAMETER(P): ... +def DBG_UNREFERENCED_LOCAL_VARIABLE(V): ... + +MAXIMUM_WAIT_OBJECTS: int +MAXIMUM_SUSPEND_COUNT: int +EXCEPTION_NONCONTINUABLE: int +EXCEPTION_MAXIMUM_PARAMETERS: int +PROCESS_TERMINATE: int +PROCESS_CREATE_THREAD: int +PROCESS_VM_OPERATION: int +PROCESS_VM_READ: int +PROCESS_VM_WRITE: int +PROCESS_DUP_HANDLE: int +PROCESS_CREATE_PROCESS: int +PROCESS_SET_QUOTA: int +PROCESS_SET_INFORMATION: int +PROCESS_QUERY_INFORMATION: int +PROCESS_SUSPEND_RESUME: int +PROCESS_QUERY_LIMITED_INFORMATION: int +PROCESS_SET_LIMITED_INFORMATION: int +MAXIMUM_PROCESSORS: int +THREAD_TERMINATE: int +THREAD_SUSPEND_RESUME: int +THREAD_GET_CONTEXT: int +THREAD_SET_CONTEXT: int +THREAD_SET_INFORMATION: int +THREAD_QUERY_INFORMATION: int +THREAD_SET_THREAD_TOKEN: int +THREAD_IMPERSONATE: int +THREAD_DIRECT_IMPERSONATION: int +THREAD_SET_LIMITED_INFORMATION: int +THREAD_QUERY_LIMITED_INFORMATION: int +THREAD_RESUME: int +JOB_OBJECT_ASSIGN_PROCESS: int +JOB_OBJECT_SET_ATTRIBUTES: int +JOB_OBJECT_QUERY: int +JOB_OBJECT_TERMINATE: int +TLS_MINIMUM_AVAILABLE: int +THREAD_BASE_PRIORITY_LOWRT: int +THREAD_BASE_PRIORITY_MAX: int +THREAD_BASE_PRIORITY_MIN: int +THREAD_BASE_PRIORITY_IDLE: int +JOB_OBJECT_LIMIT_WORKINGSET: int +JOB_OBJECT_LIMIT_PROCESS_TIME: int +JOB_OBJECT_LIMIT_JOB_TIME: int +JOB_OBJECT_LIMIT_ACTIVE_PROCESS: int +JOB_OBJECT_LIMIT_AFFINITY: int +JOB_OBJECT_LIMIT_PRIORITY_CLASS: int +JOB_OBJECT_LIMIT_VALID_FLAGS: int +EVENT_MODIFY_STATE: int +MUTANT_QUERY_STATE: int +SEMAPHORE_MODIFY_STATE: int +TIME_ZONE_ID_UNKNOWN: int +TIME_ZONE_ID_STANDARD: int +TIME_ZONE_ID_DAYLIGHT: int +PROCESSOR_INTEL_386: int +PROCESSOR_INTEL_486: int +PROCESSOR_INTEL_PENTIUM: int +PROCESSOR_MIPS_R4000: int +PROCESSOR_ALPHA_21064: int +PROCESSOR_HITACHI_SH3: int +PROCESSOR_HITACHI_SH3E: int +PROCESSOR_HITACHI_SH4: int +PROCESSOR_MOTOROLA_821: int +PROCESSOR_ARM_7TDMI: int +PROCESSOR_ARCHITECTURE_INTEL: int +PROCESSOR_ARCHITECTURE_MIPS: int +PROCESSOR_ARCHITECTURE_ALPHA: int +PROCESSOR_ARCHITECTURE_PPC: int +PROCESSOR_ARCHITECTURE_SH: int +PROCESSOR_ARCHITECTURE_ARM: int +PROCESSOR_ARCHITECTURE_IA64: int +PROCESSOR_ARCHITECTURE_ALPHA64: int +PROCESSOR_ARCHITECTURE_MSIL: int +PROCESSOR_ARCHITECTURE_AMD64: int +PROCESSOR_ARCHITECTURE_IA32_ON_WIN64: int +PROCESSOR_ARCHITECTURE_UNKNOWN: int +PF_FLOATING_POINT_PRECISION_ERRATA: int +PF_FLOATING_POINT_EMULATED: int +PF_COMPARE_EXCHANGE_DOUBLE: int +PF_MMX_INSTRUCTIONS_AVAILABLE: int +PF_PPC_MOVEMEM_64BIT_OK: int +PF_ALPHA_BYTE_INSTRUCTIONS: int +SECTION_QUERY: int +SECTION_MAP_WRITE: int +SECTION_MAP_READ: int +SECTION_MAP_EXECUTE: int +SECTION_EXTEND_SIZE: int +PAGE_NOACCESS: int +PAGE_READONLY: int +PAGE_READWRITE: int +PAGE_WRITECOPY: int +PAGE_EXECUTE: int +PAGE_EXECUTE_READ: int +PAGE_EXECUTE_READWRITE: int +PAGE_EXECUTE_WRITECOPY: int +PAGE_GUARD: int +PAGE_NOCACHE: int +MEM_COMMIT: int +MEM_RESERVE: int +MEM_DECOMMIT: int +MEM_RELEASE: int +MEM_FREE: int +MEM_PRIVATE: int +MEM_MAPPED: int +MEM_RESET: int +MEM_TOP_DOWN: int +MEM_4MB_PAGES: int +SEC_FILE: int +SEC_IMAGE: int +SEC_VLM: int +SEC_RESERVE: int +SEC_COMMIT: int +SEC_NOCACHE: int +MEM_IMAGE: int +FILE_READ_DATA: int +FILE_LIST_DIRECTORY: int +FILE_WRITE_DATA: int +FILE_ADD_FILE: int +FILE_APPEND_DATA: int +FILE_ADD_SUBDIRECTORY: int +FILE_CREATE_PIPE_INSTANCE: int +FILE_READ_EA: int +FILE_WRITE_EA: int +FILE_EXECUTE: int +FILE_TRAVERSE: int +FILE_DELETE_CHILD: int +FILE_READ_ATTRIBUTES: int +FILE_WRITE_ATTRIBUTES: int +FILE_SHARE_READ: int +FILE_SHARE_WRITE: int +FILE_SHARE_DELETE: int +FILE_ATTRIBUTE_READONLY: int +FILE_ATTRIBUTE_HIDDEN: int +FILE_ATTRIBUTE_SYSTEM: int +FILE_ATTRIBUTE_DIRECTORY: int +FILE_ATTRIBUTE_ARCHIVE: int +FILE_ATTRIBUTE_DEVICE: int +FILE_ATTRIBUTE_NORMAL: int +FILE_ATTRIBUTE_TEMPORARY: int +FILE_ATTRIBUTE_SPARSE_FILE: int +FILE_ATTRIBUTE_REPARSE_POINT: int +FILE_ATTRIBUTE_COMPRESSED: int +FILE_ATTRIBUTE_OFFLINE: int +FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: int +FILE_ATTRIBUTE_ENCRYPTED: int +FILE_ATTRIBUTE_VIRTUAL: int +FILE_NOTIFY_CHANGE_FILE_NAME: int +FILE_NOTIFY_CHANGE_DIR_NAME: int +FILE_NOTIFY_CHANGE_ATTRIBUTES: int +FILE_NOTIFY_CHANGE_SIZE: int +FILE_NOTIFY_CHANGE_LAST_WRITE: int +FILE_NOTIFY_CHANGE_LAST_ACCESS: int +FILE_NOTIFY_CHANGE_CREATION: int +FILE_NOTIFY_CHANGE_SECURITY: int +FILE_ACTION_ADDED: int +FILE_ACTION_REMOVED: int +FILE_ACTION_MODIFIED: int +FILE_ACTION_RENAMED_OLD_NAME: int +FILE_ACTION_RENAMED_NEW_NAME: int +FILE_CASE_SENSITIVE_SEARCH: int +FILE_CASE_PRESERVED_NAMES: int +FILE_UNICODE_ON_DISK: int +FILE_PERSISTENT_ACLS: int +FILE_FILE_COMPRESSION: int +FILE_VOLUME_QUOTAS: int +FILE_SUPPORTS_SPARSE_FILES: int +FILE_SUPPORTS_REPARSE_POINTS: int +FILE_SUPPORTS_REMOTE_STORAGE: int +FILE_VOLUME_IS_COMPRESSED: int +FILE_SUPPORTS_OBJECT_IDS: int +FILE_SUPPORTS_ENCRYPTION: int +MAXIMUM_REPARSE_DATA_BUFFER_SIZE: Incomplete +IO_REPARSE_TAG_RESERVED_ZERO: int +IO_REPARSE_TAG_RESERVED_ONE: int +IO_REPARSE_TAG_SYMBOLIC_LINK: int +IO_REPARSE_TAG_NSS: int +IO_REPARSE_TAG_FILTER_MANAGER: int +IO_REPARSE_TAG_DFS: int +IO_REPARSE_TAG_SIS: int +IO_REPARSE_TAG_MOUNT_POINT: int +IO_REPARSE_TAG_HSM: int +IO_REPARSE_TAG_NSSRECOVER: int +IO_REPARSE_TAG_RESERVED_MS_RANGE: int +IO_REPARSE_TAG_RESERVED_RANGE: int +IO_COMPLETION_MODIFY_STATE: int +DUPLICATE_CLOSE_SOURCE: int +DUPLICATE_SAME_ACCESS: int +DELETE: int +READ_CONTROL: int +WRITE_DAC: int +WRITE_OWNER: int +SYNCHRONIZE: int +STANDARD_RIGHTS_REQUIRED: int +STANDARD_RIGHTS_READ: int +STANDARD_RIGHTS_WRITE: int +STANDARD_RIGHTS_EXECUTE: int +STANDARD_RIGHTS_ALL: int +SPECIFIC_RIGHTS_ALL: int +IO_COMPLETION_ALL_ACCESS: Incomplete +ACCESS_SYSTEM_SECURITY: int +MAXIMUM_ALLOWED: int +GENERIC_READ: int +GENERIC_WRITE: int +GENERIC_EXECUTE: int +GENERIC_ALL: int +SID_REVISION: int +SID_MAX_SUB_AUTHORITIES: int +SID_RECOMMENDED_SUB_AUTHORITIES: int +SidTypeUser: int +SidTypeGroup: int +SidTypeDomain: int +SidTypeAlias: int +SidTypeWellKnownGroup: int +SidTypeDeletedAccount: int +SidTypeInvalid: int +SidTypeUnknown: int +SECURITY_NULL_RID: int +SECURITY_WORLD_RID: int +SECURITY_LOCAL_RID: int +SECURITY_CREATOR_OWNER_RID: int +SECURITY_CREATOR_GROUP_RID: int +SECURITY_CREATOR_OWNER_SERVER_RID: int +SECURITY_CREATOR_GROUP_SERVER_RID: int +SECURITY_DIALUP_RID: int +SECURITY_NETWORK_RID: int +SECURITY_BATCH_RID: int +SECURITY_INTERACTIVE_RID: int +SECURITY_SERVICE_RID: int +SECURITY_ANONYMOUS_LOGON_RID: int +SECURITY_PROXY_RID: int +SECURITY_SERVER_LOGON_RID: int +SECURITY_PRINCIPAL_SELF_RID: int +SECURITY_AUTHENTICATED_USER_RID: int +SECURITY_LOGON_IDS_RID: int +SECURITY_LOGON_IDS_RID_COUNT: int +SECURITY_LOCAL_SYSTEM_RID: int +SECURITY_NT_NON_UNIQUE: int +SECURITY_BUILTIN_DOMAIN_RID: int +DOMAIN_USER_RID_ADMIN: int +DOMAIN_USER_RID_GUEST: int +DOMAIN_GROUP_RID_ADMINS: int +DOMAIN_GROUP_RID_USERS: int +DOMAIN_GROUP_RID_GUESTS: int +DOMAIN_ALIAS_RID_ADMINS: int +DOMAIN_ALIAS_RID_USERS: int +DOMAIN_ALIAS_RID_GUESTS: int +DOMAIN_ALIAS_RID_POWER_USERS: int +DOMAIN_ALIAS_RID_ACCOUNT_OPS: int +DOMAIN_ALIAS_RID_SYSTEM_OPS: int +DOMAIN_ALIAS_RID_PRINT_OPS: int +DOMAIN_ALIAS_RID_BACKUP_OPS: int +DOMAIN_ALIAS_RID_REPLICATOR: int +SE_GROUP_MANDATORY: int +SE_GROUP_ENABLED_BY_DEFAULT: int +SE_GROUP_ENABLED: int +SE_GROUP_OWNER: int +SE_GROUP_LOGON_ID: int +ACL_REVISION: int +ACL_REVISION_DS: int +ACL_REVISION1: int +ACL_REVISION2: int +ACL_REVISION3: int +ACL_REVISION4: int +MAX_ACL_REVISION: int +ACCESS_MIN_MS_ACE_TYPE: int +ACCESS_ALLOWED_ACE_TYPE: int +ACCESS_DENIED_ACE_TYPE: int +SYSTEM_AUDIT_ACE_TYPE: int +SYSTEM_ALARM_ACE_TYPE: int +ACCESS_MAX_MS_V2_ACE_TYPE: int +ACCESS_ALLOWED_COMPOUND_ACE_TYPE: int +ACCESS_MAX_MS_V3_ACE_TYPE: int +ACCESS_MIN_MS_OBJECT_ACE_TYPE: int +ACCESS_ALLOWED_OBJECT_ACE_TYPE: int +ACCESS_DENIED_OBJECT_ACE_TYPE: int +SYSTEM_AUDIT_OBJECT_ACE_TYPE: int +SYSTEM_ALARM_OBJECT_ACE_TYPE: int +ACCESS_MAX_MS_OBJECT_ACE_TYPE: int +ACCESS_MAX_MS_V4_ACE_TYPE: int +ACCESS_MAX_MS_ACE_TYPE: int +ACCESS_ALLOWED_CALLBACK_ACE_TYPE: int +ACCESS_DENIED_CALLBACK_ACE_TYPE: int +ACCESS_ALLOWED_CALLBACK_OBJECT_ACE_TYPE: int +ACCESS_DENIED_CALLBACK_OBJECT_ACE_TYPE: int +SYSTEM_AUDIT_CALLBACK_ACE_TYPE: int +SYSTEM_ALARM_CALLBACK_ACE_TYPE: int +SYSTEM_AUDIT_CALLBACK_OBJECT_ACE_TYPE: int +SYSTEM_ALARM_CALLBACK_OBJECT_ACE_TYPE: int +SYSTEM_MANDATORY_LABEL_ACE_TYPE: int +ACCESS_MAX_MS_V5_ACE_TYPE: int +OBJECT_INHERIT_ACE: int +CONTAINER_INHERIT_ACE: int +NO_PROPAGATE_INHERIT_ACE: int +INHERIT_ONLY_ACE: int +INHERITED_ACE: int +VALID_INHERIT_FLAGS: int +SUCCESSFUL_ACCESS_ACE_FLAG: int +FAILED_ACCESS_ACE_FLAG: int +ACE_OBJECT_TYPE_PRESENT: int +ACE_INHERITED_OBJECT_TYPE_PRESENT: int +SECURITY_DESCRIPTOR_REVISION: int +SECURITY_DESCRIPTOR_REVISION1: int +SECURITY_DESCRIPTOR_MIN_LENGTH: int +SE_OWNER_DEFAULTED: int +SE_GROUP_DEFAULTED: int +SE_DACL_PRESENT: int +SE_DACL_DEFAULTED: int +SE_SACL_PRESENT: int +SE_SACL_DEFAULTED: int +SE_DACL_AUTO_INHERIT_REQ: int +SE_SACL_AUTO_INHERIT_REQ: int +SE_DACL_AUTO_INHERITED: int +SE_SACL_AUTO_INHERITED: int +SE_DACL_PROTECTED: int +SE_SACL_PROTECTED: int +SE_SELF_RELATIVE: int +ACCESS_OBJECT_GUID: int +ACCESS_PROPERTY_SET_GUID: int +ACCESS_PROPERTY_GUID: int +ACCESS_MAX_LEVEL: int +AUDIT_ALLOW_NO_PRIVILEGE: int +ACCESS_DS_SOURCE_A: str +ACCESS_DS_OBJECT_TYPE_NAME_A: str +SE_PRIVILEGE_ENABLED_BY_DEFAULT: int +SE_PRIVILEGE_ENABLED: int +SE_PRIVILEGE_USED_FOR_ACCESS: int +PRIVILEGE_SET_ALL_NECESSARY: int +SE_CREATE_TOKEN_NAME: str +SE_ASSIGNPRIMARYTOKEN_NAME: str +SE_LOCK_MEMORY_NAME: str +SE_INCREASE_QUOTA_NAME: str +SE_UNSOLICITED_INPUT_NAME: str +SE_MACHINE_ACCOUNT_NAME: str +SE_TCB_NAME: str +SE_SECURITY_NAME: str +SE_TAKE_OWNERSHIP_NAME: str +SE_LOAD_DRIVER_NAME: str +SE_SYSTEM_PROFILE_NAME: str +SE_SYSTEMTIME_NAME: str +SE_PROF_SINGLE_PROCESS_NAME: str +SE_INC_BASE_PRIORITY_NAME: str +SE_CREATE_PAGEFILE_NAME: str +SE_CREATE_PERMANENT_NAME: str +SE_BACKUP_NAME: str +SE_RESTORE_NAME: str +SE_SHUTDOWN_NAME: str +SE_DEBUG_NAME: str +SE_AUDIT_NAME: str +SE_SYSTEM_ENVIRONMENT_NAME: str +SE_CHANGE_NOTIFY_NAME: str +SE_REMOTE_SHUTDOWN_NAME: str +TOKEN_ASSIGN_PRIMARY: int +TOKEN_DUPLICATE: int +TOKEN_IMPERSONATE: int +TOKEN_QUERY: int +TOKEN_QUERY_SOURCE: int +TOKEN_ADJUST_PRIVILEGES: int +TOKEN_ADJUST_GROUPS: int +TOKEN_ADJUST_DEFAULT: int +TOKEN_ALL_ACCESS: Incomplete +TOKEN_READ: Incomplete +TOKEN_WRITE: Incomplete +TOKEN_EXECUTE: int +TOKEN_SOURCE_LENGTH: int +TokenPrimary: int +TokenImpersonation: int +TokenUser: int +TokenGroups: int +TokenPrivileges: int +TokenOwner: int +TokenPrimaryGroup: int +TokenDefaultDacl: int +TokenSource: int +TokenType: int +TokenImpersonationLevel: int +TokenStatistics: int +TokenRestrictedSids: int +TokenSessionId: int +TokenGroupsAndPrivileges: int +TokenSessionReference: int +TokenSandBoxInert: int +TokenAuditPolicy: int +TokenOrigin: int +TokenElevationType: int +TokenLinkedToken: int +TokenElevation: int +TokenHasRestrictions: int +TokenAccessInformation: int +TokenVirtualizationAllowed: int +TokenVirtualizationEnabled: int +TokenIntegrityLevel: int +TokenUIAccess: int +TokenMandatoryPolicy: int +TokenLogonSid: int +OWNER_SECURITY_INFORMATION: int +GROUP_SECURITY_INFORMATION: int +DACL_SECURITY_INFORMATION: int +SACL_SECURITY_INFORMATION: int +LABEL_SECURITY_INFORMATION: int +IMAGE_DOS_SIGNATURE: int +IMAGE_OS2_SIGNATURE: int +IMAGE_OS2_SIGNATURE_LE: int +IMAGE_VXD_SIGNATURE: int +IMAGE_NT_SIGNATURE: int +IMAGE_SIZEOF_FILE_HEADER: int +IMAGE_FILE_RELOCS_STRIPPED: int +IMAGE_FILE_EXECUTABLE_IMAGE: int +IMAGE_FILE_LINE_NUMS_STRIPPED: int +IMAGE_FILE_LOCAL_SYMS_STRIPPED: int +IMAGE_FILE_AGGRESIVE_WS_TRIM: int +IMAGE_FILE_LARGE_ADDRESS_AWARE: int +IMAGE_FILE_BYTES_REVERSED_LO: int +IMAGE_FILE_32BIT_MACHINE: int +IMAGE_FILE_DEBUG_STRIPPED: int +IMAGE_FILE_REMOVABLE_RUN_FROM_SWAP: int +IMAGE_FILE_NET_RUN_FROM_SWAP: int +IMAGE_FILE_SYSTEM: int +IMAGE_FILE_DLL: int +IMAGE_FILE_UP_SYSTEM_ONLY: int +IMAGE_FILE_BYTES_REVERSED_HI: int +IMAGE_FILE_MACHINE_UNKNOWN: int +IMAGE_FILE_MACHINE_I386: int +IMAGE_FILE_MACHINE_R3000: int +IMAGE_FILE_MACHINE_R4000: int +IMAGE_FILE_MACHINE_R10000: int +IMAGE_FILE_MACHINE_WCEMIPSV2: int +IMAGE_FILE_MACHINE_ALPHA: int +IMAGE_FILE_MACHINE_POWERPC: int +IMAGE_FILE_MACHINE_SH3: int +IMAGE_FILE_MACHINE_SH3E: int +IMAGE_FILE_MACHINE_SH4: int +IMAGE_FILE_MACHINE_ARM: int +IMAGE_NUMBEROF_DIRECTORY_ENTRIES: int +IMAGE_SIZEOF_ROM_OPTIONAL_HEADER: int +IMAGE_SIZEOF_STD_OPTIONAL_HEADER: int +IMAGE_SIZEOF_NT_OPTIONAL_HEADER: int +IMAGE_NT_OPTIONAL_HDR_MAGIC: int +IMAGE_ROM_OPTIONAL_HDR_MAGIC: int +IMAGE_SUBSYSTEM_UNKNOWN: int +IMAGE_SUBSYSTEM_NATIVE: int +IMAGE_SUBSYSTEM_WINDOWS_GUI: int +IMAGE_SUBSYSTEM_WINDOWS_CUI: int +IMAGE_SUBSYSTEM_WINDOWS_CE_GUI: int +IMAGE_SUBSYSTEM_OS2_CUI: int +IMAGE_SUBSYSTEM_POSIX_CUI: int +IMAGE_SUBSYSTEM_RESERVED8: int +IMAGE_DLLCHARACTERISTICS_WDM_DRIVER: int +IMAGE_DIRECTORY_ENTRY_EXPORT: int +IMAGE_DIRECTORY_ENTRY_IMPORT: int +IMAGE_DIRECTORY_ENTRY_RESOURCE: int +IMAGE_DIRECTORY_ENTRY_EXCEPTION: int +IMAGE_DIRECTORY_ENTRY_SECURITY: int +IMAGE_DIRECTORY_ENTRY_BASERELOC: int +IMAGE_DIRECTORY_ENTRY_DEBUG: int +IMAGE_DIRECTORY_ENTRY_COPYRIGHT: int +IMAGE_DIRECTORY_ENTRY_GLOBALPTR: int +IMAGE_DIRECTORY_ENTRY_TLS: int +IMAGE_DIRECTORY_ENTRY_LOAD_CONFIG: int +IMAGE_DIRECTORY_ENTRY_BOUND_IMPORT: int +IMAGE_DIRECTORY_ENTRY_IAT: int +IMAGE_SIZEOF_SHORT_NAME: int +IMAGE_SIZEOF_SECTION_HEADER: int +IMAGE_SCN_TYPE_NO_PAD: int +IMAGE_SCN_CNT_CODE: int +IMAGE_SCN_CNT_INITIALIZED_DATA: int +IMAGE_SCN_CNT_UNINITIALIZED_DATA: int +IMAGE_SCN_LNK_OTHER: int +IMAGE_SCN_LNK_INFO: int +IMAGE_SCN_LNK_REMOVE: int +IMAGE_SCN_LNK_COMDAT: int +IMAGE_SCN_MEM_FARDATA: int +IMAGE_SCN_MEM_PURGEABLE: int +IMAGE_SCN_MEM_16BIT: int +IMAGE_SCN_MEM_LOCKED: int +IMAGE_SCN_MEM_PRELOAD: int +IMAGE_SCN_ALIGN_1BYTES: int +IMAGE_SCN_ALIGN_2BYTES: int +IMAGE_SCN_ALIGN_4BYTES: int +IMAGE_SCN_ALIGN_8BYTES: int +IMAGE_SCN_ALIGN_16BYTES: int +IMAGE_SCN_ALIGN_32BYTES: int +IMAGE_SCN_ALIGN_64BYTES: int +IMAGE_SCN_LNK_NRELOC_OVFL: int +IMAGE_SCN_MEM_DISCARDABLE: int +IMAGE_SCN_MEM_NOT_CACHED: int +IMAGE_SCN_MEM_NOT_PAGED: int +IMAGE_SCN_MEM_SHARED: int +IMAGE_SCN_MEM_EXECUTE: int +IMAGE_SCN_MEM_READ: int +IMAGE_SCN_MEM_WRITE: int +IMAGE_SCN_SCALE_INDEX: int +IMAGE_SIZEOF_SYMBOL: int +IMAGE_SYM_TYPE_NULL: int +IMAGE_SYM_TYPE_VOID: int +IMAGE_SYM_TYPE_CHAR: int +IMAGE_SYM_TYPE_SHORT: int +IMAGE_SYM_TYPE_INT: int +IMAGE_SYM_TYPE_LONG: int +IMAGE_SYM_TYPE_FLOAT: int +IMAGE_SYM_TYPE_DOUBLE: int +IMAGE_SYM_TYPE_STRUCT: int +IMAGE_SYM_TYPE_UNION: int +IMAGE_SYM_TYPE_ENUM: int +IMAGE_SYM_TYPE_MOE: int +IMAGE_SYM_TYPE_BYTE: int +IMAGE_SYM_TYPE_WORD: int +IMAGE_SYM_TYPE_UINT: int +IMAGE_SYM_TYPE_DWORD: int +IMAGE_SYM_TYPE_PCODE: int +IMAGE_SYM_DTYPE_NULL: int +IMAGE_SYM_DTYPE_POINTER: int +IMAGE_SYM_DTYPE_FUNCTION: int +IMAGE_SYM_DTYPE_ARRAY: int +IMAGE_SYM_CLASS_NULL: int +IMAGE_SYM_CLASS_AUTOMATIC: int +IMAGE_SYM_CLASS_EXTERNAL: int +IMAGE_SYM_CLASS_STATIC: int +IMAGE_SYM_CLASS_REGISTER: int +IMAGE_SYM_CLASS_EXTERNAL_DEF: int +IMAGE_SYM_CLASS_LABEL: int +IMAGE_SYM_CLASS_UNDEFINED_LABEL: int +IMAGE_SYM_CLASS_MEMBER_OF_STRUCT: int +IMAGE_SYM_CLASS_ARGUMENT: int +IMAGE_SYM_CLASS_STRUCT_TAG: int +IMAGE_SYM_CLASS_MEMBER_OF_UNION: int +IMAGE_SYM_CLASS_UNION_TAG: int +IMAGE_SYM_CLASS_TYPE_DEFINITION: int +IMAGE_SYM_CLASS_UNDEFINED_STATIC: int +IMAGE_SYM_CLASS_ENUM_TAG: int +IMAGE_SYM_CLASS_MEMBER_OF_ENUM: int +IMAGE_SYM_CLASS_REGISTER_PARAM: int +IMAGE_SYM_CLASS_BIT_FIELD: int +IMAGE_SYM_CLASS_FAR_EXTERNAL: int +IMAGE_SYM_CLASS_BLOCK: int +IMAGE_SYM_CLASS_FUNCTION: int +IMAGE_SYM_CLASS_END_OF_STRUCT: int +IMAGE_SYM_CLASS_FILE: int +IMAGE_SYM_CLASS_SECTION: int +IMAGE_SYM_CLASS_WEAK_EXTERNAL: int +N_BTMASK: int +N_TMASK: int +N_TMASK1: int +N_TMASK2: int +N_BTSHFT: int +N_TSHIFT: int + +def BTYPE(x): ... +def ISPTR(x): ... +def ISFCN(x): ... +def ISARY(x): ... +def INCREF(x): ... +def DECREF(x): ... + +IMAGE_SIZEOF_AUX_SYMBOL: int +IMAGE_COMDAT_SELECT_NODUPLICATES: int +IMAGE_COMDAT_SELECT_ANY: int +IMAGE_COMDAT_SELECT_SAME_SIZE: int +IMAGE_COMDAT_SELECT_EXACT_MATCH: int +IMAGE_COMDAT_SELECT_ASSOCIATIVE: int +IMAGE_COMDAT_SELECT_LARGEST: int +IMAGE_COMDAT_SELECT_NEWEST: int +IMAGE_WEAK_EXTERN_SEARCH_NOLIBRARY: int +IMAGE_WEAK_EXTERN_SEARCH_LIBRARY: int +IMAGE_WEAK_EXTERN_SEARCH_ALIAS: int +IMAGE_SIZEOF_RELOCATION: int +IMAGE_REL_I386_ABSOLUTE: int +IMAGE_REL_I386_DIR16: int +IMAGE_REL_I386_REL16: int +IMAGE_REL_I386_DIR32: int +IMAGE_REL_I386_DIR32NB: int +IMAGE_REL_I386_SEG12: int +IMAGE_REL_I386_SECTION: int +IMAGE_REL_I386_SECREL: int +IMAGE_REL_I386_REL32: int +IMAGE_REL_MIPS_ABSOLUTE: int +IMAGE_REL_MIPS_REFHALF: int +IMAGE_REL_MIPS_REFWORD: int +IMAGE_REL_MIPS_JMPADDR: int +IMAGE_REL_MIPS_REFHI: int +IMAGE_REL_MIPS_REFLO: int +IMAGE_REL_MIPS_GPREL: int +IMAGE_REL_MIPS_LITERAL: int +IMAGE_REL_MIPS_SECTION: int +IMAGE_REL_MIPS_SECREL: int +IMAGE_REL_MIPS_SECRELLO: int +IMAGE_REL_MIPS_SECRELHI: int +IMAGE_REL_MIPS_REFWORDNB: int +IMAGE_REL_MIPS_PAIR: int +IMAGE_REL_ALPHA_ABSOLUTE: int +IMAGE_REL_ALPHA_REFLONG: int +IMAGE_REL_ALPHA_REFQUAD: int +IMAGE_REL_ALPHA_GPREL32: int +IMAGE_REL_ALPHA_LITERAL: int +IMAGE_REL_ALPHA_LITUSE: int +IMAGE_REL_ALPHA_GPDISP: int +IMAGE_REL_ALPHA_BRADDR: int +IMAGE_REL_ALPHA_HINT: int +IMAGE_REL_ALPHA_INLINE_REFLONG: int +IMAGE_REL_ALPHA_REFHI: int +IMAGE_REL_ALPHA_REFLO: int +IMAGE_REL_ALPHA_PAIR: int +IMAGE_REL_ALPHA_MATCH: int +IMAGE_REL_ALPHA_SECTION: int +IMAGE_REL_ALPHA_SECREL: int +IMAGE_REL_ALPHA_REFLONGNB: int +IMAGE_REL_ALPHA_SECRELLO: int +IMAGE_REL_ALPHA_SECRELHI: int +IMAGE_REL_PPC_ABSOLUTE: int +IMAGE_REL_PPC_ADDR64: int +IMAGE_REL_PPC_ADDR32: int +IMAGE_REL_PPC_ADDR24: int +IMAGE_REL_PPC_ADDR16: int +IMAGE_REL_PPC_ADDR14: int +IMAGE_REL_PPC_REL24: int +IMAGE_REL_PPC_REL14: int +IMAGE_REL_PPC_TOCREL16: int +IMAGE_REL_PPC_TOCREL14: int +IMAGE_REL_PPC_ADDR32NB: int +IMAGE_REL_PPC_SECREL: int +IMAGE_REL_PPC_SECTION: int +IMAGE_REL_PPC_IFGLUE: int +IMAGE_REL_PPC_IMGLUE: int +IMAGE_REL_PPC_SECREL16: int +IMAGE_REL_PPC_REFHI: int +IMAGE_REL_PPC_REFLO: int +IMAGE_REL_PPC_PAIR: int +IMAGE_REL_PPC_SECRELLO: int +IMAGE_REL_PPC_SECRELHI: int +IMAGE_REL_PPC_TYPEMASK: int +IMAGE_REL_PPC_NEG: int +IMAGE_REL_PPC_BRTAKEN: int +IMAGE_REL_PPC_BRNTAKEN: int +IMAGE_REL_PPC_TOCDEFN: int +IMAGE_REL_SH3_ABSOLUTE: int +IMAGE_REL_SH3_DIRECT16: int +IMAGE_REL_SH3_DIRECT32: int +IMAGE_REL_SH3_DIRECT8: int +IMAGE_REL_SH3_DIRECT8_WORD: int +IMAGE_REL_SH3_DIRECT8_LONG: int +IMAGE_REL_SH3_DIRECT4: int +IMAGE_REL_SH3_DIRECT4_WORD: int +IMAGE_REL_SH3_DIRECT4_LONG: int +IMAGE_REL_SH3_PCREL8_WORD: int +IMAGE_REL_SH3_PCREL8_LONG: int +IMAGE_REL_SH3_PCREL12_WORD: int +IMAGE_REL_SH3_STARTOF_SECTION: int +IMAGE_REL_SH3_SIZEOF_SECTION: int +IMAGE_REL_SH3_SECTION: int +IMAGE_REL_SH3_SECREL: int +IMAGE_REL_SH3_DIRECT32_NB: int +IMAGE_SIZEOF_LINENUMBER: int +IMAGE_SIZEOF_BASE_RELOCATION: int +IMAGE_REL_BASED_ABSOLUTE: int +IMAGE_REL_BASED_HIGH: int +IMAGE_REL_BASED_LOW: int +IMAGE_REL_BASED_HIGHLOW: int +IMAGE_REL_BASED_HIGHADJ: int +IMAGE_REL_BASED_MIPS_JMPADDR: int +IMAGE_REL_BASED_SECTION: int +IMAGE_REL_BASED_REL32: int +IMAGE_ARCHIVE_START_SIZE: int +IMAGE_ARCHIVE_START: str +IMAGE_ARCHIVE_END: str +IMAGE_ARCHIVE_PAD: str +IMAGE_ARCHIVE_LINKER_MEMBER: str +IMAGE_SIZEOF_ARCHIVE_MEMBER_HDR: int +IMAGE_ORDINAL_FLAG: int + +def IMAGE_SNAP_BY_ORDINAL(Ordina): ... +def IMAGE_ORDINAL(Ordina): ... + +IMAGE_RESOURCE_NAME_IS_STRING: int +IMAGE_RESOURCE_DATA_IS_DIRECTORY: int +IMAGE_DEBUG_TYPE_UNKNOWN: int +IMAGE_DEBUG_TYPE_COFF: int +IMAGE_DEBUG_TYPE_CODEVIEW: int +IMAGE_DEBUG_TYPE_FPO: int +IMAGE_DEBUG_TYPE_MISC: int +IMAGE_DEBUG_TYPE_EXCEPTION: int +IMAGE_DEBUG_TYPE_FIXUP: int +IMAGE_DEBUG_TYPE_OMAP_TO_SRC: int +IMAGE_DEBUG_TYPE_OMAP_FROM_SRC: int +IMAGE_DEBUG_TYPE_BORLAND: int +FRAME_FPO: int +FRAME_TRAP: int +FRAME_TSS: int +FRAME_NONFPO: int +SIZEOF_RFPO_DATA: int +IMAGE_DEBUG_MISC_EXENAME: int +IMAGE_SEPARATE_DEBUG_SIGNATURE: int +IMAGE_SEPARATE_DEBUG_FLAGS_MASK: int +IMAGE_SEPARATE_DEBUG_MISMATCH: int +NULL: int +HEAP_NO_SERIALIZE: int +HEAP_GROWABLE: int +HEAP_GENERATE_EXCEPTIONS: int +HEAP_ZERO_MEMORY: int +HEAP_REALLOC_IN_PLACE_ONLY: int +HEAP_TAIL_CHECKING_ENABLED: int +HEAP_FREE_CHECKING_ENABLED: int +HEAP_DISABLE_COALESCE_ON_FREE: int +HEAP_CREATE_ALIGN_16: int +HEAP_CREATE_ENABLE_TRACING: int +HEAP_MAXIMUM_TAG: int +HEAP_PSEUDO_TAG_FLAG: int +HEAP_TAG_SHIFT: int +IS_TEXT_UNICODE_ASCII16: int +IS_TEXT_UNICODE_REVERSE_ASCII16: int +IS_TEXT_UNICODE_STATISTICS: int +IS_TEXT_UNICODE_REVERSE_STATISTICS: int +IS_TEXT_UNICODE_CONTROLS: int +IS_TEXT_UNICODE_REVERSE_CONTROLS: int +IS_TEXT_UNICODE_SIGNATURE: int +IS_TEXT_UNICODE_REVERSE_SIGNATURE: int +IS_TEXT_UNICODE_ILLEGAL_CHARS: int +IS_TEXT_UNICODE_ODD_LENGTH: int +IS_TEXT_UNICODE_DBCS_LEADBYTE: int +IS_TEXT_UNICODE_NULL_BYTES: int +IS_TEXT_UNICODE_UNICODE_MASK: int +IS_TEXT_UNICODE_REVERSE_MASK: int +IS_TEXT_UNICODE_NOT_UNICODE_MASK: int +IS_TEXT_UNICODE_NOT_ASCII_MASK: int +COMPRESSION_FORMAT_NONE: int +COMPRESSION_FORMAT_DEFAULT: int +COMPRESSION_FORMAT_LZNT1: int +COMPRESSION_ENGINE_STANDARD: int +COMPRESSION_ENGINE_MAXIMUM: int +MESSAGE_RESOURCE_UNICODE: int +RTL_CRITSECT_TYPE: int +RTL_RESOURCE_TYPE: int +SEF_DACL_AUTO_INHERIT: int +SEF_SACL_AUTO_INHERIT: int +SEF_DEFAULT_DESCRIPTOR_FOR_OBJECT: int +SEF_AVOID_PRIVILEGE_CHECK: int +DLL_PROCESS_ATTACH: int +DLL_THREAD_ATTACH: int +DLL_THREAD_DETACH: int +DLL_PROCESS_DETACH: int +EVENTLOG_SEQUENTIAL_READ: int +EVENTLOG_SEEK_READ: int +EVENTLOG_FORWARDS_READ: int +EVENTLOG_BACKWARDS_READ: int +EVENTLOG_SUCCESS: int +EVENTLOG_ERROR_TYPE: int +EVENTLOG_WARNING_TYPE: int +EVENTLOG_INFORMATION_TYPE: int +EVENTLOG_AUDIT_SUCCESS: int +EVENTLOG_AUDIT_FAILURE: int +EVENTLOG_START_PAIRED_EVENT: int +EVENTLOG_END_PAIRED_EVENT: int +EVENTLOG_END_ALL_PAIRED_EVENTS: int +EVENTLOG_PAIRED_EVENT_ACTIVE: int +EVENTLOG_PAIRED_EVENT_INACTIVE: int +KEY_QUERY_VALUE: int +KEY_SET_VALUE: int +KEY_CREATE_SUB_KEY: int +KEY_ENUMERATE_SUB_KEYS: int +KEY_NOTIFY: int +KEY_CREATE_LINK: int +KEY_READ: Incomplete +KEY_WRITE: Incomplete +KEY_EXECUTE: Incomplete +KEY_ALL_ACCESS: Incomplete +REG_OPTION_RESERVED: int +REG_OPTION_NON_VOLATILE: int +REG_OPTION_VOLATILE: int +REG_OPTION_CREATE_LINK: int +REG_OPTION_BACKUP_RESTORE: int +REG_OPTION_OPEN_LINK: int +REG_LEGAL_OPTION: Incomplete +REG_CREATED_NEW_KEY: int +REG_OPENED_EXISTING_KEY: int +REG_STANDARD_FORMAT: int +REG_LATEST_FORMAT: int +REG_NO_COMPRESSION: int +REG_WHOLE_HIVE_VOLATILE: int +REG_REFRESH_HIVE: int +REG_NO_LAZY_FLUSH: int +REG_FORCE_RESTORE: int +REG_NOTIFY_CHANGE_NAME: int +REG_NOTIFY_CHANGE_ATTRIBUTES: int +REG_NOTIFY_CHANGE_LAST_SET: int +REG_NOTIFY_CHANGE_SECURITY: int +REG_LEGAL_CHANGE_FILTER: Incomplete +REG_NONE: int +REG_SZ: int +REG_EXPAND_SZ: int +REG_BINARY: int +REG_DWORD: int +REG_DWORD_LITTLE_ENDIAN: int +REG_DWORD_BIG_ENDIAN: int +REG_LINK: int +REG_MULTI_SZ: int +REG_RESOURCE_LIST: int +REG_FULL_RESOURCE_DESCRIPTOR: int +REG_RESOURCE_REQUIREMENTS_LIST: int +SERVICE_KERNEL_DRIVER: int +SERVICE_FILE_SYSTEM_DRIVER: int +SERVICE_ADAPTER: int +SERVICE_RECOGNIZER_DRIVER: int +SERVICE_DRIVER: Incomplete +SERVICE_WIN32_OWN_PROCESS: int +SERVICE_WIN32_SHARE_PROCESS: int +SERVICE_WIN32: Incomplete +SERVICE_INTERACTIVE_PROCESS: int +SERVICE_TYPE_ALL: Incomplete +SERVICE_BOOT_START: int +SERVICE_SYSTEM_START: int +SERVICE_AUTO_START: int +SERVICE_DEMAND_START: int +SERVICE_DISABLED: int +SERVICE_ERROR_IGNORE: int +SERVICE_ERROR_NORMAL: int +SERVICE_ERROR_SEVERE: int +SERVICE_ERROR_CRITICAL: int +TAPE_ERASE_SHORT: int +TAPE_ERASE_LONG: int +TAPE_LOAD: int +TAPE_UNLOAD: int +TAPE_TENSION: int +TAPE_LOCK: int +TAPE_UNLOCK: int +TAPE_FORMAT: int +TAPE_SETMARKS: int +TAPE_FILEMARKS: int +TAPE_SHORT_FILEMARKS: int +TAPE_LONG_FILEMARKS: int +TAPE_ABSOLUTE_POSITION: int +TAPE_LOGICAL_POSITION: int +TAPE_PSEUDO_LOGICAL_POSITION: int +TAPE_REWIND: int +TAPE_ABSOLUTE_BLOCK: int +TAPE_LOGICAL_BLOCK: int +TAPE_PSEUDO_LOGICAL_BLOCK: int +TAPE_SPACE_END_OF_DATA: int +TAPE_SPACE_RELATIVE_BLOCKS: int +TAPE_SPACE_FILEMARKS: int +TAPE_SPACE_SEQUENTIAL_FMKS: int +TAPE_SPACE_SETMARKS: int +TAPE_SPACE_SEQUENTIAL_SMKS: int +TAPE_DRIVE_FIXED: int +TAPE_DRIVE_SELECT: int +TAPE_DRIVE_INITIATOR: int +TAPE_DRIVE_ERASE_SHORT: int +TAPE_DRIVE_ERASE_LONG: int +TAPE_DRIVE_ERASE_BOP_ONLY: int +TAPE_DRIVE_ERASE_IMMEDIATE: int +TAPE_DRIVE_TAPE_CAPACITY: int +TAPE_DRIVE_TAPE_REMAINING: int +TAPE_DRIVE_FIXED_BLOCK: int +TAPE_DRIVE_VARIABLE_BLOCK: int +TAPE_DRIVE_WRITE_PROTECT: int +TAPE_DRIVE_EOT_WZ_SIZE: int +TAPE_DRIVE_ECC: int +TAPE_DRIVE_COMPRESSION: int +TAPE_DRIVE_PADDING: int +TAPE_DRIVE_REPORT_SMKS: int +TAPE_DRIVE_GET_ABSOLUTE_BLK: int +TAPE_DRIVE_GET_LOGICAL_BLK: int +TAPE_DRIVE_SET_EOT_WZ_SIZE: int +TAPE_DRIVE_EJECT_MEDIA: int +TAPE_DRIVE_RESERVED_BIT: int +TAPE_DRIVE_LOAD_UNLOAD: int +TAPE_DRIVE_TENSION: int +TAPE_DRIVE_LOCK_UNLOCK: int +TAPE_DRIVE_REWIND_IMMEDIATE: int +TAPE_DRIVE_SET_BLOCK_SIZE: int +TAPE_DRIVE_LOAD_UNLD_IMMED: int +TAPE_DRIVE_TENSION_IMMED: int +TAPE_DRIVE_LOCK_UNLK_IMMED: int +TAPE_DRIVE_SET_ECC: int +TAPE_DRIVE_SET_COMPRESSION: int +TAPE_DRIVE_SET_PADDING: int +TAPE_DRIVE_SET_REPORT_SMKS: int +TAPE_DRIVE_ABSOLUTE_BLK: int +TAPE_DRIVE_ABS_BLK_IMMED: int +TAPE_DRIVE_LOGICAL_BLK: int +TAPE_DRIVE_LOG_BLK_IMMED: int +TAPE_DRIVE_END_OF_DATA: int +TAPE_DRIVE_RELATIVE_BLKS: int +TAPE_DRIVE_FILEMARKS: int +TAPE_DRIVE_SEQUENTIAL_FMKS: int +TAPE_DRIVE_SETMARKS: int +TAPE_DRIVE_SEQUENTIAL_SMKS: int +TAPE_DRIVE_REVERSE_POSITION: int +TAPE_DRIVE_SPACE_IMMEDIATE: int +TAPE_DRIVE_WRITE_SETMARKS: int +TAPE_DRIVE_WRITE_FILEMARKS: int +TAPE_DRIVE_WRITE_SHORT_FMKS: int +TAPE_DRIVE_WRITE_LONG_FMKS: int +TAPE_DRIVE_WRITE_MARK_IMMED: int +TAPE_DRIVE_FORMAT: int +TAPE_DRIVE_FORMAT_IMMEDIATE: int +TAPE_DRIVE_HIGH_FEATURES: int +TAPE_FIXED_PARTITIONS: int +TAPE_SELECT_PARTITIONS: int +TAPE_INITIATOR_PARTITIONS: int +TRANSACTIONMANAGER_QUERY_INFORMATION: int +TRANSACTIONMANAGER_SET_INFORMATION: int +TRANSACTIONMANAGER_RECOVER: int +TRANSACTIONMANAGER_RENAME: int +TRANSACTIONMANAGER_CREATE_RM: int +TRANSACTIONMANAGER_BIND_TRANSACTION: int +TRANSACTIONMANAGER_GENERIC_READ: Incomplete +TRANSACTIONMANAGER_GENERIC_WRITE: Incomplete +TRANSACTIONMANAGER_GENERIC_EXECUTE: int +TRANSACTIONMANAGER_ALL_ACCESS: Incomplete +TRANSACTION_QUERY_INFORMATION: int +TRANSACTION_SET_INFORMATION: int +TRANSACTION_ENLIST: int +TRANSACTION_COMMIT: int +TRANSACTION_ROLLBACK: int +TRANSACTION_PROPAGATE: int +TRANSACTION_SAVEPOINT: int +TRANSACTION_MARSHALL: int +TRANSACTION_GENERIC_READ: Incomplete +TRANSACTION_GENERIC_WRITE: Incomplete +TRANSACTION_GENERIC_EXECUTE: Incomplete +TRANSACTION_ALL_ACCESS: Incomplete +TRANSACTION_RESOURCE_MANAGER_RIGHTS: Incomplete +RESOURCEMANAGER_QUERY_INFORMATION: int +RESOURCEMANAGER_SET_INFORMATION: int +RESOURCEMANAGER_RECOVER: int +RESOURCEMANAGER_ENLIST: int +RESOURCEMANAGER_GET_NOTIFICATION: int +RESOURCEMANAGER_REGISTER_PROTOCOL: int +RESOURCEMANAGER_COMPLETE_PROPAGATION: int +RESOURCEMANAGER_GENERIC_READ: Incomplete +RESOURCEMANAGER_GENERIC_WRITE: Incomplete +RESOURCEMANAGER_GENERIC_EXECUTE: Incomplete +RESOURCEMANAGER_ALL_ACCESS: Incomplete +ENLISTMENT_QUERY_INFORMATION: int +ENLISTMENT_SET_INFORMATION: int +ENLISTMENT_RECOVER: int +ENLISTMENT_SUBORDINATE_RIGHTS: int +ENLISTMENT_SUPERIOR_RIGHTS: int +ENLISTMENT_GENERIC_READ: Incomplete +ENLISTMENT_GENERIC_WRITE: Incomplete +ENLISTMENT_GENERIC_EXECUTE: Incomplete +ENLISTMENT_ALL_ACCESS: Incomplete +TransactionOutcomeUndetermined: int +TransactionOutcomeCommitted: int +TransactionOutcomeAborted: int +TransactionStateNormal: int +TransactionStateIndoubt: int +TransactionStateCommittedNotify: int +TransactionBasicInformation: int +TransactionPropertiesInformation: int +TransactionEnlistmentInformation: int +TransactionFullInformation: int +TransactionManagerBasicInformation: int +TransactionManagerLogInformation: int +TransactionManagerLogPathInformation: int +TransactionManagerOnlineProbeInformation: int +ResourceManagerBasicInformation: int +ResourceManagerCompletionInformation: int +ResourceManagerFullInformation: int +ResourceManagerNameInformation: int +EnlistmentBasicInformation: int +EnlistmentRecoveryInformation: int +EnlistmentFullInformation: int +EnlistmentNameInformation: int +KTMOBJECT_TRANSACTION: int +KTMOBJECT_TRANSACTION_MANAGER: int +KTMOBJECT_RESOURCE_MANAGER: int +KTMOBJECT_ENLISTMENT: int +KTMOBJECT_INVALID: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/winperf.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/winperf.pyi new file mode 100644 index 00000000..bcda5bf3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/winperf.pyi @@ -0,0 +1,73 @@ +from _typeshed import Incomplete + +PERF_DATA_VERSION: int +PERF_DATA_REVISION: int +PERF_NO_INSTANCES: int +PERF_SIZE_DWORD: int +PERF_SIZE_LARGE: int +PERF_SIZE_ZERO: int +PERF_SIZE_VARIABLE_LEN: int +PERF_TYPE_NUMBER: int +PERF_TYPE_COUNTER: int +PERF_TYPE_TEXT: int +PERF_TYPE_ZERO: int +PERF_NUMBER_HEX: int +PERF_NUMBER_DECIMAL: int +PERF_NUMBER_DEC_1000: int +PERF_COUNTER_VALUE: int +PERF_COUNTER_RATE: int +PERF_COUNTER_FRACTION: int +PERF_COUNTER_BASE: int +PERF_COUNTER_ELAPSED: int +PERF_COUNTER_QUEUELEN: int +PERF_COUNTER_HISTOGRAM: int +PERF_TEXT_UNICODE: int +PERF_TEXT_ASCII: int +PERF_TIMER_TICK: int +PERF_TIMER_100NS: int +PERF_OBJECT_TIMER: int +PERF_DELTA_COUNTER: int +PERF_DELTA_BASE: int +PERF_INVERSE_COUNTER: int +PERF_MULTI_COUNTER: int +PERF_DISPLAY_NO_SUFFIX: int +PERF_DISPLAY_PER_SEC: int +PERF_DISPLAY_PERCENT: int +PERF_DISPLAY_SECONDS: int +PERF_DISPLAY_NOSHOW: int +PERF_COUNTER_COUNTER: Incomplete +PERF_COUNTER_TIMER: Incomplete +PERF_COUNTER_QUEUELEN_TYPE: Incomplete +PERF_COUNTER_LARGE_QUEUELEN_TYPE: Incomplete +PERF_COUNTER_BULK_COUNT: Incomplete +PERF_COUNTER_TEXT: Incomplete +PERF_COUNTER_RAWCOUNT: Incomplete +PERF_COUNTER_LARGE_RAWCOUNT: Incomplete +PERF_COUNTER_RAWCOUNT_HEX: Incomplete +PERF_COUNTER_LARGE_RAWCOUNT_HEX: Incomplete +PERF_SAMPLE_FRACTION: Incomplete +PERF_SAMPLE_COUNTER: Incomplete +PERF_COUNTER_NODATA: Incomplete +PERF_COUNTER_TIMER_INV: Incomplete +PERF_SAMPLE_BASE: Incomplete +PERF_AVERAGE_TIMER: Incomplete +PERF_AVERAGE_BASE: Incomplete +PERF_AVERAGE_BULK: Incomplete +PERF_100NSEC_TIMER: Incomplete +PERF_100NSEC_TIMER_INV: Incomplete +PERF_COUNTER_MULTI_TIMER: Incomplete +PERF_COUNTER_MULTI_TIMER_INV: Incomplete +PERF_COUNTER_MULTI_BASE: Incomplete +PERF_100NSEC_MULTI_TIMER: Incomplete +PERF_100NSEC_MULTI_TIMER_INV: Incomplete +PERF_RAW_FRACTION: Incomplete +PERF_RAW_BASE: Incomplete +PERF_ELAPSED_TIME: Incomplete +PERF_COUNTER_HISTOGRAM_TYPE: int +PERF_COUNTER_DELTA: Incomplete +PERF_COUNTER_LARGE_DELTA: Incomplete +PERF_DETAIL_NOVICE: int +PERF_DETAIL_ADVANCED: int +PERF_DETAIL_EXPERT: int +PERF_DETAIL_WIZARD: int +PERF_NO_UNIQUE_ID: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/winxptheme.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/winxptheme.pyi new file mode 100644 index 00000000..080eb532 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/lib/winxptheme.pyi @@ -0,0 +1,25 @@ +import _win32typing + +def OpenThemeData(hwnd: int, pszClasslist: str) -> _win32typing.PyHTHEME: ... +def CloseThemeData(hTheme: _win32typing.PyHTHEME) -> None: ... +def DrawThemeBackground(hTheme: _win32typing.PyHTHEME, hdc, iPartId, iStateId, pRect, pClipRect) -> None: ... +def DrawThemeText( + hTheme: _win32typing.PyHTHEME, hdc, iPartId, iStateId, pszText: str, dwCharCount, dwTextFlags, dwTextFlags2, pRect +) -> None: ... +def GetThemeBackgroundContentRect(hTheme: _win32typing.PyHTHEME, hdc, iPartId, iStateId, pBoundingRect): ... +def GetThemeBackgroundExtent(hTheme: _win32typing.PyHTHEME, hdc, iPartId, iStateId, pContentRect): ... +def IsThemeActive() -> int: ... +def IsAppThemed() -> int: ... +def GetWindowTheme(hwnd: int) -> _win32typing.PyHTHEME: ... +def EnableThemeDialogTexture(hdlg, dwFlags) -> None: ... +def IsThemeDialogTextureEnabled(__hdlg: int | None) -> bool: ... +def GetThemeAppProperties(): ... +def EnableTheming(fEnable) -> None: ... +def SetWindowTheme(hwnd: int, pszSubAppName: str, pszSubIdlist: str) -> None: ... +def GetCurrentThemeName() -> tuple[str, str, str]: ... + +ETDT_DISABLE: int +ETDT_ENABLE: int +ETDT_ENABLETAB: int +ETDT_USETABTEXTURE: int +UNICODE: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/mmapfile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/mmapfile.pyi new file mode 100644 index 00000000..8d943c37 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/mmapfile.pyi @@ -0,0 +1,6 @@ +import _win32typing +from win32.lib.pywintypes import error as error + +def mmapfile( + File, Name, MaximumSize: int = ..., FileOffset: int = ..., NumberOfBytesToMap: int = ... +) -> _win32typing.Pymmapfile: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/odbc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/odbc.pyi new file mode 100644 index 00000000..8ba629f3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/odbc.pyi @@ -0,0 +1,29 @@ +from _typeshed import Incomplete +from typing_extensions import Literal, TypeAlias + +import _win32typing + +def odbc(connectionString: str) -> _win32typing.connection: ... +def SQLDataSources(direction) -> tuple[Incomplete, Incomplete]: ... + +_odbcError: TypeAlias = type # noqa: Y042 # Does not exist at runtime, but odbc.odbcError is a valid type. +DATE: str +NUMBER: str +RAW: str +SQL_FETCH_ABSOLUTE: int +SQL_FETCH_FIRST: int +SQL_FETCH_FIRST_SYSTEM: int +SQL_FETCH_FIRST_USER: int +SQL_FETCH_LAST: int +SQL_FETCH_NEXT: int +SQL_FETCH_PRIOR: int +SQL_FETCH_RELATIVE: int +STRING: str +TYPES: tuple[Literal["STRING"], Literal["RAW"], Literal["NUMBER"], Literal["DATE"]] +dataError: Incomplete +error: _odbcError +integrityError: Incomplete +internalError: Incomplete +noError: Incomplete +opError: Incomplete +progError: Incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/perfmon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/perfmon.pyi new file mode 100644 index 00000000..0656bec3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/perfmon.pyi @@ -0,0 +1,12 @@ +import _win32typing + +def LoadPerfCounterTextStrings(__commandLine: str) -> None: ... +def UnloadPerfCounterTextStrings(__commandLine: str) -> None: ... +def CounterDefinition() -> _win32typing.PyPERF_COUNTER_DEFINITION: ... +def ObjectType() -> _win32typing.PyPERF_OBJECT_TYPE: ... +def PerfMonManager( + serviceName: str, + seqPerfObTypes: list[_win32typing.PyPERF_OBJECT_TYPE], + mappingName: str | None = ..., + eventSourceName: str | None = ..., +) -> _win32typing.PyPerfMonManager: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/servicemanager.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/servicemanager.pyi new file mode 100644 index 00000000..80f4b7e2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/servicemanager.pyi @@ -0,0 +1,34 @@ +from _typeshed import Incomplete + +def CoInitializeEx() -> None: ... +def CoUninitialize() -> None: ... +def RegisterServiceCtrlHandler(serviceName: str, callback, extra_args: bool = ...): ... +def LogMsg(__errorType: int, __eventId: int, __inserts: tuple[str, str] | None = ...) -> None: ... +def LogInfoMsg(msg: str) -> None: ... +def LogErrorMsg(msg: str) -> None: ... +def LogWarningMsg(msg: str) -> None: ... +def PumpWaitingMessages(__firstMessage: int = ..., __lastMessage: int = ...) -> int: ... +def Debugging(newVal: int = ...): ... +def Initialize(eventSourceName: str | None = ..., eventSourceFile: str | None = ...) -> None: ... +def Finalize() -> None: ... +def PrepareToHostSingle(klass: Incomplete | None = ...) -> None: ... +def PrepareToHostMultiple(service_name: str, klass) -> None: ... +def RunningAsService(): ... +def SetEventSourceName(sourceName: str, registerNow: bool = ...) -> None: ... +def StartServiceCtrlDispatcher(*args, **kwargs): ... # incomplete + +COINIT_APARTMENTTHREADED: int +COINIT_DISABLE_OLE1DDE: int +COINIT_MULTITHREADED: int +COINIT_SPEED_OVER_MEMORY: int +EVENTLOG_AUDIT_FAILURE: int +EVENTLOG_AUDIT_SUCCESS: int +EVENTLOG_ERROR_TYPE: int +EVENTLOG_INFORMATION_TYPE: int +EVENTLOG_WARNING_TYPE: int +PYS_SERVICE_STARTED: int +PYS_SERVICE_STARTING: int +PYS_SERVICE_STOPPED: int +PYS_SERVICE_STOPPING: int + +class startup_error(Exception): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/timer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/timer.pyi new file mode 100644 index 00000000..751757b8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/timer.pyi @@ -0,0 +1,6 @@ +from win32.lib.pywintypes import error as error + +def set_timer(Elapse, TimerFunc): ... +def kill_timer(timer_id): ... + +__version__: bytes diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32api.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32api.pyi new file mode 100644 index 00000000..855559c7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32api.pyi @@ -0,0 +1,311 @@ +from _typeshed import Incomplete, ReadableBuffer +from collections.abc import Callable, Iterable +from typing_extensions import TypedDict + +import _win32typing +from win32.lib.pywintypes import error as error + +class _MonitorInfo(TypedDict): + Monitor: tuple[int, int, int, int] + Work: tuple[int, int, int, int] + Flags: int + Device: str + +def AbortSystemShutdown(computerName: str) -> None: ... +def InitiateSystemShutdown(computerName: str, message: str, timeOut, bForceClose, bRebootAfterShutdown) -> None: ... +def Apply(exceptionHandler, func, args): ... +def Beep(freq, dur) -> None: ... +def BeginUpdateResource(filename: str, delete) -> int: ... +def ChangeDisplaySettings(DevMode: _win32typing.PyDEVMODE, Flags): ... +def ChangeDisplaySettingsEx(DeviceName: Incomplete | None = ..., DevMode: _win32typing.PyDEVMODE | None = ..., Flags=...): ... +def ClipCursor(arg: tuple[Incomplete, Incomplete, Incomplete, Incomplete]) -> None: ... +def CloseHandle(__handle: int) -> None: ... +def CopyFile(src, dest: str, bFailOnExist: int = ...) -> None: ... +def DebugBreak() -> None: ... +def DeleteFile(fileName: str) -> None: ... +def DragQueryFile(hDrop, fileNum: int = ...) -> str: ... +def DragFinish(hDrop) -> None: ... +def DuplicateHandle( + __hSourceProcess: int, + __hSource: int, + __hTargetProcessHandle: int, + __desiredAccess: int, + __bInheritHandle: int, + __options: int, +) -> int: ... +def EndUpdateResource(handle: int, discard) -> None: ... +def EnumDisplayDevices(Device: str | None = ..., DevNum: int = ..., Flags: int = ...) -> _win32typing.PyDISPLAY_DEVICE: ... +def EnumDisplayMonitors( + hdc: int | None = ..., rcClip: _win32typing.PyRECT | None = ... +) -> list[tuple[_win32typing.PyHANDLE, _win32typing.PyHANDLE, tuple[int, int, int, int]]]: ... +def EnumDisplaySettings(DeviceName: str | None = ..., ModeNum: int = ...) -> _win32typing.PyDEVMODE: ... +def EnumDisplaySettingsEx(ModeNum, DeviceName: str | None = ..., Flags=...) -> _win32typing.PyDEVMODE: ... +def EnumResourceLanguages( + hmodule: int, lpType: _win32typing.PyResourceId, lpName: _win32typing.PyResourceId +) -> list[Incomplete]: ... +def EnumResourceNames(hmodule: int, resType: _win32typing.PyResourceId) -> list[str]: ... +def EnumResourceTypes(hmodule: int) -> list[Incomplete]: ... +def ExpandEnvironmentStrings(_in: str) -> str: ... +def ExitWindows(reserved1: int = ..., reserved2: int = ...) -> None: ... +def ExitWindowsEx(flags, reserved: int = ...) -> None: ... +def FindFiles(fileSpec: str): ... +def FindFirstChangeNotification(pathName: str, bSubDirs, _filter): ... +def FindNextChangeNotification(handle: int) -> None: ... +def FindCloseChangeNotification(handle) -> None: ... +def FindExecutable(filename: str, _dir: str) -> tuple[Incomplete, str]: ... +def FormatMessage( + __flags: int, + __source: str | None = ..., + __messageId: int = ..., + __languageID: int = ..., + __inserts: Iterable[str] | None = ..., +) -> str: ... +def FormatMessageW( + __flags: int, + __source: int | None = ..., + __messageId: int = ..., + __languageID: int = ..., + __inserts: Iterable[str] | None = ..., +) -> str: ... +def FreeLibrary(hModule: int) -> None: ... +def GenerateConsoleCtrlEvent(__controlEvent: int, __processGroupId: int) -> None: ... +def GetAsyncKeyState(key): ... +def GetCommandLine() -> str: ... +def GetComputerName() -> str: ... +def GetComputerNameEx(NameType) -> str: ... +def GetComputerObjectName(NameFormat) -> str: ... +def GetMonitorInfo(hMonitor: int) -> _MonitorInfo: ... +def GetUserName() -> str: ... +def GetUserNameEx(__NameFormat: int) -> str: ... +def GetCursorPos() -> tuple[Incomplete, Incomplete]: ... +def GetCurrentThread(): ... +def GetCurrentThreadId(): ... +def GetCurrentProcessId(): ... +def GetCurrentProcess() -> int: ... +def GetConsoleTitle() -> str: ... +def GetDateFormat(locale, flags, time: _win32typing.PyTime, _format: str) -> str: ... +def GetDiskFreeSpace(rootPath: str): ... +def GetDiskFreeSpaceEx(__rootPath: str) -> tuple[int, int, int]: ... +def GetDllDirectory() -> str: ... +def GetDomainName() -> str: ... +def GetEnvironmentVariable(variable): ... +def GetEnvironmentVariableW(Name) -> str: ... +def GetFileAttributes(pathName: str): ... +def GetFileVersionInfo(Filename: str, SubBlock: str) -> None: ... +def GetFocus(): ... +def GetFullPathName(fileName: str) -> str: ... +def GetHandleInformation(__Object: int): ... +def GetKeyboardLayout(threadId: int = ...): ... +def GetKeyboardLayoutName(): ... +def GetKeyboardState() -> str: ... +def GetKeyState(key): ... +def GetLastError(): ... +def GetLastInputInfo(): ... +def GetLocalTime(): ... +def GetLongPathName(__fileName: str) -> str: ... +def GetLongPathNameW(fileName: str) -> str: ... +def GetLogicalDrives(): ... +def GetLogicalDriveStrings() -> str: ... +def GetModuleFileName(hModule: int) -> str: ... +def GetModuleFileNameW(hModule: int) -> str: ... +def GetModuleHandle(__fileName: str | None = ...) -> int: ... +def GetPwrCapabilities(): ... +def GetProfileSection(section: str, iniName: str | None = ...): ... +def GetProcAddress(hModule: int, functionName: _win32typing.PyResourceId): ... +def GetProfileVal(section: str, entry: str, defValue: str, iniName: str | None = ...) -> str: ... +def GetShortPathName(path: str) -> str: ... +def GetStdHandle(__handle: int) -> _win32typing.PyHANDLE: ... +def GetSysColor(index): ... +def GetSystemDefaultLangID(): ... +def GetSystemDefaultLCID(): ... +def GetSystemDirectory() -> str: ... +def GetSystemFileCacheSize(): ... +def SetSystemFileCacheSize(MinimumFileCacheSize, MaximumFileCacheSize, Flags=...) -> None: ... +def GetSystemInfo(): ... +def GetNativeSystemInfo(): ... +def GetSystemMetrics(index): ... +def GetSystemTime(): ... +def GetTempFileName(path: str, prefix: str, nUnique): ... +def GetTempPath() -> str: ... +def GetThreadLocale(): ... +def GetTickCount() -> int: ... +def GetTimeFormat(locale, flags, time: _win32typing.PyTime, _format: str) -> str: ... +def GetTimeZoneInformation(times_as_tuples: bool = ...): ... +def GetVersion(): ... +def GetVersionEx(_format: int = ...): ... +def GetVolumeInformation(path: str): ... +def GetWindowsDirectory() -> str: ... +def GetWindowLong(__hwnd: int | None, __offset: int) -> int: ... +def GetUserDefaultLangID(): ... +def GetUserDefaultLCID(): ... +def GlobalMemoryStatus(): ... +def GlobalMemoryStatusEx() -> dict[str, int]: ... +def keybd_event(bVk, bScan, dwFlags: int = ..., dwExtraInfo: int = ...) -> None: ... +def mouse_event(dx, dy, dwData, dwFlags: int = ..., dwExtraInfo=...) -> None: ... +def LoadCursor(hInstance: int, cursorid: _win32typing.PyResourceId) -> int: ... +def LoadKeyboardLayout(KLID: str, Flags: int = ...): ... +def LoadLibrary(fileName: str): ... +def LoadLibraryEx(fileName: str, handle: int, handle1) -> int: ... +def LoadResource(handle: int, _type: _win32typing.PyResourceId, name: _win32typing.PyResourceId, language) -> str: ... +def LoadString(handle: int, stringId, numChars: int = ...) -> str: ... +def MessageBeep(arg): ... +def MessageBox(hwnd: int, message: str, title: str, arg, arg1): ... +def MonitorFromPoint(pt: tuple[Incomplete, Incomplete], Flags: int = ...) -> int: ... +def MonitorFromRect(__rc: _win32typing.PyRECT | tuple[int, int, int, int], __Flags: int = ...) -> int: ... +def MonitorFromWindow(hwnd: int, Flags: int = ...) -> int: ... +def MoveFile(srcName: str, destName: str) -> None: ... +def MoveFileEx(srcName: str, destName: str, flag) -> None: ... +def OpenProcess(__reqdAccess: int, __bInherit: int | bool, __pid: int) -> int: ... +def OutputDebugString(msg: str) -> None: ... +def PostMessage(hwnd: int, idMessage, wParam: Incomplete | None = ..., lParam: Incomplete | None = ...) -> None: ... +def PostQuitMessage(__exitCode: int = ...) -> None: ... +def PostThreadMessage(tid, idMessage, wParam: Incomplete | None = ..., lParam: Incomplete | None = ...) -> None: ... +def RegCloseKey(key: _win32typing.PyHKEY) -> None: ... +def RegConnectRegistry(computerName: str, key): ... +def RegCopyTree(KeySrc: _win32typing.PyHKEY, SubKey: str, KeyDest: _win32typing.PyHKEY) -> None: ... +def RegCreateKey(key: _win32typing.PyHKEY | int, subKey: str) -> _win32typing.PyHKEY: ... +def RegCreateKeyEx( + Key: _win32typing.PyHKEY, + SubKey: str, + samDesired, + Options, + Class: str | None = ..., + SecurityAttributes: _win32typing.PySECURITY_ATTRIBUTES | None = ..., + Transaction: int | None = ..., +) -> tuple[_win32typing.PyHKEY, Incomplete]: ... +def RegDeleteKey(key: _win32typing.PyHKEY, subKey: str) -> None: ... +def RegDeleteKeyEx(Key: _win32typing.PyHKEY, SubKey: str, samDesired: int = ..., Transaction: int | None = ...) -> None: ... +def RegDeleteTree(Key: _win32typing.PyHKEY, SubKey: str) -> None: ... +def RegDeleteValue(key: _win32typing.PyHKEY, value: str) -> None: ... +def RegEnumKey(key: _win32typing.PyHKEY, index) -> str: ... +def RegEnumKeyEx(Key: _win32typing.PyHKEY): ... +def RegEnumKeyExW(Key: _win32typing.PyHKEY): ... +def RegEnumValue(key: _win32typing.PyHKEY, index) -> tuple[str, Incomplete, Incomplete]: ... +def RegFlushKey(key: _win32typing.PyHKEY) -> None: ... +def RegGetKeySecurity(key: _win32typing.PyHKEY, security_info) -> _win32typing.PySECURITY_DESCRIPTOR: ... +def RegLoadKey(key: _win32typing.PyHKEY, subKey: str, filename: str) -> None: ... +def RegOpenCurrentUser(samDesired) -> _win32typing.PyHKEY: ... +def RegOpenKey( + __key: _win32typing.PyHKEY | int, __subkey: str | None, __reserved: bool = ..., __sam: int = ... +) -> _win32typing.PyHKEY: ... +def RegOpenKeyEx(__key: _win32typing.PyHKEY, __subKey: str, __sam: int, __reserved: bool = ...) -> _win32typing.PyHKEY: ... +def RegOpenKeyTransacted( + Key: _win32typing.PyHKEY, SubKey: str, samDesired, Transaction: int, Options: int = ... +) -> _win32typing.PyHKEY: ... +def RegOverridePredefKey(Key: _win32typing.PyHKEY, NewKey: _win32typing.PyHKEY) -> None: ... +def RegQueryValue(key: _win32typing.PyHKEY, subKey: str) -> str: ... +def RegQueryValueEx(__key: _win32typing.PyHKEY | int, __valueName: str | None) -> tuple[str, int]: ... +def RegQueryInfoKey(key: _win32typing.PyHKEY) -> tuple[Incomplete, Incomplete, Incomplete]: ... +def RegQueryInfoKeyW(Key: _win32typing.PyHKEY): ... +def RegRestoreKey(Key: _win32typing.PyHKEY, File: str, Flags: int = ...) -> None: ... +def RegSaveKey(key: _win32typing.PyHKEY, filename: str, sa: _win32typing.PySECURITY_ATTRIBUTES | None = ...) -> None: ... +def RegSaveKeyEx( + Key: _win32typing.PyHKEY, File: str, Flags, SecurityAttributes: _win32typing.PySECURITY_ATTRIBUTES | None = ... +) -> None: ... +def RegSetKeySecurity(key: _win32typing.PyHKEY, security_info, sd: _win32typing.PySECURITY_DESCRIPTOR) -> None: ... +def RegSetValue(key: _win32typing.PyHKEY, subKey: str | None, _type, value: str) -> None: ... +def RegSetValueEx(key: _win32typing.PyHKEY, valueName: str, reserved, _type, value) -> None: ... +def RegUnLoadKey(key: _win32typing.PyHKEY, subKey: str) -> None: ... +def RegisterWindowMessage(msgString: str) -> None: ... +def RegNotifyChangeKeyValue(key: _win32typing.PyHKEY, bWatchSubTree, dwNotifyFilter, hKey: int, fAsynchronous) -> None: ... +def SearchPath(path: str, fileName: str, fileExt: str | None = ...): ... +def SendMessage(hwnd: int, idMessage, wParam: str | None = ..., lParam: str | None = ...) -> None: ... +def SetConsoleCtrlHandler(__ctrlHandler: Callable[[int], bool], __bAdd: bool) -> None: ... +def SetConsoleTitle(title: str) -> None: ... +def SetCursorPos(arg: tuple[Incomplete, Incomplete]) -> None: ... +def SetDllDirectory(PathName: str) -> None: ... +def SetErrorMode(errorMode): ... +def SetFileAttributes(pathName: str, attrs): ... +def SetLastError(): ... +def SetSysColors(Elements, RgbValues) -> None: ... +def SetLocalTime(SystemTime: _win32typing.PyTime) -> None: ... +def SetSystemTime(year, month, dayOfWeek, day, hour, minute, second, millseconds): ... +def SetClassLong(hwnd: int, offset, val): ... +def SetClassWord(hwnd: int, offset, val): ... +def SetCursor(hCursor: int) -> int: ... +def SetEnvironmentVariable(Name, Value) -> None: ... +def SetEnvironmentVariableW(Name, Value) -> None: ... +def SetHandleInformation(Object: int, Mask, Flags) -> None: ... +def SetStdHandle(handle, handle1: int) -> None: ... +def SetSystemPowerState(Suspend, Force) -> None: ... +def SetThreadLocale(lcid) -> None: ... +def SetTimeZoneInformation(tzi): ... +def SetWindowLong(__hwnd: int | None, __offset: int, __value: float) -> int: ... +def ShellExecute(hwnd: int, op: str, file: str, params: str, _dir: str, bShow): ... +def ShowCursor(show): ... +def Sleep(time, bAlterable: int = ...): ... +def TerminateProcess(__handle: int, __exitCode: int) -> None: ... +def ToAsciiEx(vk, scancode, keyboardstate, flags: int = ..., hlayout: Incomplete | None = ...): ... +def Unicode() -> str: ... +def UpdateResource( + __handle: int, + __type: _win32typing.PyResourceId | int, + __name: _win32typing.PyResourceId | int, + __data: ReadableBuffer | None, + language: int = ..., +) -> None: ... +def VkKeyScan(char, char1): ... +def WinExec(cmdLine: str, arg) -> None: ... +def WinHelp(hwnd: int, hlpFile: str, cmd, data: str | int = ...) -> None: ... +def WriteProfileSection(section: str, data: str, iniName: str | None = ...): ... +def WriteProfileVal(section: str, entry: str, value: str, iniName: str | None = ...) -> None: ... +def HIBYTE(val): ... +def LOBYTE(val): ... +def HIWORD(val): ... +def LOWORD(val): ... +def RGB(red, green, blue): ... +def MAKELANGID(PrimaryLanguage, SubLanguage): ... +def MAKEWORD(low, high): ... +def MAKELONG(low, high): ... +def CommandLineToArgv(*args, **kwargs): ... # incomplete +def GetKeyboardLayoutList(*args, **kwargs): ... # incomplete +def MapVirtualKey(*args, **kwargs): ... # incomplete +def MessageBoxEx(*args, **kwargs): ... # incomplete +def OpenThread(*args, **kwargs): ... # incomplete +def SleepEx(*args, **kwargs): ... # incomplete +def VkKeyScanEx(*args, **kwargs): ... # incomplete + +NameCanonical: int +NameCanonicalEx: int +NameDisplay: int +NameFullyQualifiedDN: int +NameSamCompatible: int +NameServicePrincipal: int +NameUniqueId: int +NameUnknown: int +NameUserPrincipal: int +PyDISPLAY_DEVICEType = _win32typing.PyDISPLAY_DEVICE +REG_NOTIFY_CHANGE_ATTRIBUTES: int +REG_NOTIFY_CHANGE_LAST_SET: int +REG_NOTIFY_CHANGE_NAME: int +REG_NOTIFY_CHANGE_SECURITY: int +STD_ERROR_HANDLE: int +STD_INPUT_HANDLE: int +STD_OUTPUT_HANDLE: int +VFT_APP: int +VFT_DLL: int +VFT_DRV: int +VFT_FONT: int +VFT_STATIC_LIB: int +VFT_UNKNOWN: int +VFT_VXD: int +VOS_DOS: int +VOS_DOS_WINDOWS16: int +VOS_DOS_WINDOWS32: int +VOS_NT: int +VOS_NT_WINDOWS32: int +VOS_OS216: int +VOS_OS216_PM16: int +VOS_OS232: int +VOS_OS232_PM32: int +VOS_UNKNOWN: int +VOS__PM16: int +VOS__PM32: int +VOS__WINDOWS16: int +VOS__WINDOWS32: int +VS_FF_DEBUG: int +VS_FF_INFOINFERRED: int +VS_FF_PATCHED: int +VS_FF_PRERELEASE: int +VS_FF_PRIVATEBUILD: int +VS_FF_SPECIALBUILD: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32clipboard.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32clipboard.pyi new file mode 100644 index 00000000..5fbc2c40 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32clipboard.pyi @@ -0,0 +1,47 @@ +from win32.lib.pywintypes import error as error + +def ChangeClipboardChain(hWndRemove: int, hWndNewNext: int): ... +def CloseClipboard(): ... +def CountClipboardFormats(): ... +def EmptyClipboard(): ... +def EnumClipboardFormats(_format: int = ...): ... +def GetClipboardData(_format) -> str: ... +def GetClipboardDataHandle(_format): ... +def GetClipboardFormatName(_format) -> str: ... +def GetClipboardOwner(): ... +def GetClipboardSequenceNumber(): ... +def GetClipboardViewer(): ... +def GetGlobalMemory(hglobal: int) -> str: ... +def GetOpenClipboardWindow(): ... +def GetPriorityClipboardFormat(formats): ... +def IsClipboardFormatAvailable(__format: int) -> int: ... +def OpenClipboard(hWnd: int | None = ...): ... +def RegisterClipboardFormat(name: str): ... +def SetClipboardData(_format, hMem): ... +def SetClipboardText(text, _format): ... +def SetClipboardViewer(hWndNewViewer: int) -> int: ... + +CF_BITMAP: int +CF_DIB: int +CF_DIBV5: int +CF_DIF: int +CF_DSPBITMAP: int +CF_DSPENHMETAFILE: int +CF_DSPMETAFILEPICT: int +CF_DSPTEXT: int +CF_ENHMETAFILE: int +CF_HDROP: int +CF_LOCALE: int +CF_MAX: int +CF_METAFILEPICT: int +CF_OEMTEXT: int +CF_OWNERDISPLAY: int +CF_PALETTE: int +CF_PENDATA: int +CF_RIFF: int +CF_SYLK: int +CF_TEXT: int +CF_TIFF: int +CF_UNICODETEXT: int +CF_WAVE: int +UNICODE: bool diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32console.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32console.pyi new file mode 100644 index 00000000..e66038af --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32console.pyi @@ -0,0 +1,74 @@ +import _win32typing +from win32.lib.pywintypes import error as error + +def CreateConsoleScreenBuffer( + DesiredAccess, ShareMode, Flags, SecurityAttributes: _win32typing.PySECURITY_ATTRIBUTES | None = ... +) -> _win32typing.PyConsoleScreenBuffer: ... +def GetConsoleDisplayMode(): ... +def AttachConsole(ProcessId) -> None: ... +def AllocConsole() -> None: ... +def FreeConsole() -> None: ... +def GetConsoleCP(): ... +def GetConsoleOutputCP(): ... +def SetConsoleCP(CodePageId) -> None: ... +def SetConsoleOutputCP(CodePageID) -> None: ... +def GetConsoleSelectionInfo(): ... +def AddConsoleAlias(Source, Target, ExeName) -> None: ... +def GetConsoleAliases(ExeName): ... +def GetConsoleAliasExes(): ... +def GetConsoleWindow(): ... +def GetNumberOfConsoleFonts(): ... +def SetConsoleTitle(ConsoleTitle) -> None: ... +def GetConsoleTitle(): ... +def GenerateConsoleCtrlEvent(__CtrlEvent: int, __ProcessGroupId: int = ...) -> None: ... +def GetStdHandle(__StdHandle: int) -> _win32typing.PyConsoleScreenBuffer: ... +def GetConsoleProcessList(*args, **kwargs): ... # incomplete + +ATTACH_PARENT_PROCESS: int +BACKGROUND_BLUE: int +BACKGROUND_GREEN: int +BACKGROUND_INTENSITY: int +BACKGROUND_RED: int +COMMON_LVB_GRID_HORIZONTAL: int +COMMON_LVB_GRID_LVERTICAL: int +COMMON_LVB_GRID_RVERTICAL: int +COMMON_LVB_LEADING_BYTE: int +COMMON_LVB_REVERSE_VIDEO: int +COMMON_LVB_TRAILING_BYTE: int +COMMON_LVB_UNDERSCORE: int +CONSOLE_FULLSCREEN: int +CONSOLE_FULLSCREEN_HARDWARE: int +CONSOLE_FULLSCREEN_MODE: int +CONSOLE_MOUSE_DOWN: int +CONSOLE_MOUSE_SELECTION: int +CONSOLE_NO_SELECTION: int +CONSOLE_SELECTION_IN_PROGRESS: int +CONSOLE_SELECTION_NOT_EMPTY: int +CONSOLE_TEXTMODE_BUFFER: int +CONSOLE_WINDOWED_MODE: int +CTRL_BREAK_EVENT: int +CTRL_C_EVENT: int +ENABLE_ECHO_INPUT: int +ENABLE_LINE_INPUT: int +ENABLE_MOUSE_INPUT: int +ENABLE_PROCESSED_INPUT: int +ENABLE_PROCESSED_OUTPUT: int +ENABLE_WINDOW_INPUT: int +ENABLE_WRAP_AT_EOL_OUTPUT: int +FOCUS_EVENT: int +FOREGROUND_BLUE: int +FOREGROUND_GREEN: int +FOREGROUND_INTENSITY: int +FOREGROUND_RED: int +KEY_EVENT: int +LOCALE_USER_DEFAULT: int +MENU_EVENT: int +MOUSE_EVENT: int +PyCOORDType = _win32typing.PyCOORD +PyConsoleScreenBufferType = _win32typing.PyConsoleScreenBuffer +PyINPUT_RECORDType = _win32typing.PyINPUT_RECORD +PySMALL_RECTType = _win32typing.PySMALL_RECT +STD_ERROR_HANDLE: int +STD_INPUT_HANDLE: int +STD_OUTPUT_HANDLE: int +WINDOW_BUFFER_SIZE_EVENT: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32cred.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32cred.pyi new file mode 100644 index 00000000..a915399d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32cred.pyi @@ -0,0 +1,85 @@ +from _typeshed import Incomplete + +def CredMarshalCredential(CredType, Credential: str) -> str: ... +def CredUnmarshalCredential(MarshaledCredential: str) -> tuple[Incomplete, str]: ... +def CredIsMarshaledCredential(MarshaledCredential: str): ... +def CredEnumerate(Filter: str | None = ..., Flags: int = ...) -> tuple[Incomplete, ...]: ... +def CredGetTargetInfo(TargetName: str, Flags: int = ...): ... +def CredWriteDomainCredentials(TargetInfo, Credential, Flags: int = ...) -> None: ... +def CredReadDomainCredentials(TargetInfo, Flags: int = ...) -> tuple[Incomplete, ...]: ... +def CredDelete(TargetName: str, Type, Flags: int = ...) -> None: ... +def CredWrite(Credential, Flags: int = ...) -> None: ... +def CredRead(TargetName: str, Type, Flags: int = ...): ... +def CredRename(OldTargetName: str, NewTargetName: str, Type, Flags: int = ...): ... +def CredUICmdLinePromptForCredentials( + TargetName: str, Flags, AuthError: int = ..., UserName: str | None = ..., Password: str | None = ..., Save: int = ... +) -> tuple[str, str, Incomplete]: ... +def CredUIPromptForCredentials( + TargetName: str, + AuthError: int = ..., + UserName: str | None = ..., + Password: str | None = ..., + Save: bool = ..., + Flags: int = ..., + UiInfo: Incomplete | None = ..., +) -> tuple[str, str, Incomplete]: ... +def CredUIConfirmCredentials(TargetName: str, Confirm) -> None: ... +def CredUIReadSSOCredW(Realm: str | None = ...) -> str: ... +def CredUIStoreSSOCredW(Realm: str, Username: str, Password: str, Persist) -> None: ... +def CredUIParseUserName(UserName: str) -> tuple[str, str]: ... + +CREDUI_FLAGS_ALWAYS_SHOW_UI: int +CREDUI_FLAGS_COMPLETE_USERNAME: int +CREDUI_FLAGS_DO_NOT_PERSIST: int +CREDUI_FLAGS_EXCLUDE_CERTIFICATES: int +CREDUI_FLAGS_EXPECT_CONFIRMATION: int +CREDUI_FLAGS_GENERIC_CREDENTIALS: int +CREDUI_FLAGS_INCORRECT_PASSWORD: int +CREDUI_FLAGS_KEEP_USERNAME: int +CREDUI_FLAGS_PASSWORD_ONLY_OK: int +CREDUI_FLAGS_PERSIST: int +CREDUI_FLAGS_PROMPT_VALID: int +CREDUI_FLAGS_REQUEST_ADMINISTRATOR: int +CREDUI_FLAGS_REQUIRE_CERTIFICATE: int +CREDUI_FLAGS_REQUIRE_SMARTCARD: int +CREDUI_FLAGS_SERVER_CREDENTIAL: int +CREDUI_FLAGS_SHOW_SAVE_CHECK_BOX: int +CREDUI_FLAGS_USERNAME_TARGET_CREDENTIALS: int +CREDUI_FLAGS_VALIDATE_USERNAME: int +CREDUI_MAX_CAPTION_LENGTH: int +CREDUI_MAX_DOMAIN_TARGET_LENGTH: int +CREDUI_MAX_GENERIC_TARGET_LENGTH: int +CREDUI_MAX_MESSAGE_LENGTH: int +CREDUI_MAX_PASSWORD_LENGTH: int +CREDUI_MAX_USERNAME_LENGTH: int +CRED_ALLOW_NAME_RESOLUTION: int +CRED_CACHE_TARGET_INFORMATION: int +CRED_FLAGS_OWF_CRED_BLOB: int +CRED_FLAGS_PASSWORD_FOR_CERT: int +CRED_FLAGS_PROMPT_NOW: int +CRED_FLAGS_USERNAME_TARGET: int +CRED_FLAGS_VALID_FLAGS: int +CRED_MAX_ATTRIBUTES: int +CRED_MAX_DOMAIN_TARGET_NAME_LENGTH: int +CRED_MAX_GENERIC_TARGET_NAME_LENGTH: int +CRED_MAX_STRING_LENGTH: int +CRED_MAX_USERNAME_LENGTH: int +CRED_MAX_VALUE_SIZE: int +CRED_PERSIST_ENTERPRISE: int +CRED_PERSIST_LOCAL_MACHINE: int +CRED_PERSIST_NONE: int +CRED_PERSIST_SESSION: int +CRED_PRESERVE_CREDENTIAL_BLOB: int +CRED_TI_CREATE_EXPLICIT_CRED: int +CRED_TI_DOMAIN_FORMAT_UNKNOWN: int +CRED_TI_ONLY_PASSWORD_REQUIRED: int +CRED_TI_SERVER_FORMAT_UNKNOWN: int +CRED_TI_USERNAME_TARGET: int +CRED_TI_VALID_FLAGS: int +CRED_TI_WORKGROUP_MEMBER: int +CRED_TYPE_DOMAIN_CERTIFICATE: int +CRED_TYPE_DOMAIN_PASSWORD: int +CRED_TYPE_DOMAIN_VISIBLE_PASSWORD: int +CRED_TYPE_GENERIC: int +CertCredential: int +UsernameTargetCredential: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32crypt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32crypt.pyi new file mode 100644 index 00000000..9a5d4427 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32crypt.pyi @@ -0,0 +1,100 @@ +from _typeshed import Incomplete + +import _win32typing + +def CryptProtectData( + DataIn, + DataDescr: str | None = ..., + OptionalEntropy: Incomplete | None = ..., + Reserved: Incomplete | None = ..., + PromptStruct: _win32typing.PyCRYPTPROTECT_PROMPTSTRUCT | None = ..., + Flags: int = ..., +): ... +def CryptUnprotectData( + DataIn, + OptionalEntropy: Incomplete | None = ..., + Reserved: Incomplete | None = ..., + PromptStruct: _win32typing.PyCRYPTPROTECT_PROMPTSTRUCT | None = ..., + Flags: int = ..., +) -> tuple[Incomplete, Incomplete]: ... +def CryptEnumProviders() -> list[tuple[str, Incomplete]]: ... +def CryptEnumProviderTypes() -> list[tuple[str, Incomplete]]: ... +def CryptGetDefaultProvider(ProvType, Flags) -> str: ... +def CryptSetProviderEx(ProvName: str, ProvType, Flags) -> None: ... +def CryptAcquireContext(Container: str, Provider: str, ProvType, Flags) -> _win32typing.PyCRYPTPROV: ... +def CryptFindLocalizedName(CryptName: str) -> str: ... +def CertEnumSystemStore(dwFlags, pvSystemStoreLocationPara: Incomplete | None = ...) -> list[Incomplete]: ... +def CertEnumSystemStoreLocation(Flags: int = ...) -> list[Incomplete]: ... +def CertEnumPhysicalStore(pvSystemStore: str, dwFlags) -> list[Incomplete]: ... +def CertRegisterSystemStore(SystemStore: str, Flags) -> None: ... +def CertUnregisterSystemStore(SystemStore: str, Flags) -> None: ... +def CertOpenStore( + StoreProvider, MsgAndCertEncodingType, CryptProv: _win32typing.PyCRYPTPROV, Flags, Para: Incomplete | None = ... +) -> _win32typing.PyCERTSTORE: ... +def CertOpenSystemStore(SubsystemProtocol: str, Prov: _win32typing.PyCRYPTPROV | None = ...) -> _win32typing.PyCERTSTORE: ... +def CryptFindOIDInfo(KeyType, Key, GroupId: int = ...): ... +def CertAlgIdToOID(AlgId) -> str: ... +def CertOIDToAlgId(ObjId: str): ... +def CryptGetKeyIdentifierProperty(KeyIdentifier: str, PropId, Flags: int = ..., ComputerName: str | None = ...): ... +def CryptEnumKeyIdentifierProperties( + KeyIdentifier: str | None = ..., PropId: int = ..., Flags: int = ..., ComputerName: str | None = ... +): ... +def CryptEnumOIDInfo(GroupId: int = ...): ... +def CertAddSerializedElementToStore( + CertStore: _win32typing.PyCERTSTORE, Element, AddDisposition, ContextTypeFlags, Flags: int = ... +) -> _win32typing.PyCERT_CONTEXT: ... +def CryptQueryObject(ObjectType, Object, ExpectedContentTypeFlags, ExpectedFormatTypeFlags, Flags: int = ...): ... +def CryptDecodeMessage( + EncodedBlob, + DecryptPara, + MsgTypeFlags, + VerifyPara: Incomplete | None = ..., + SignerIndex: int = ..., + PrevInnerContentType: int = ..., + ReturnData: bool = ..., +): ... +def CryptEncryptMessage( + EncryptPara: _win32typing.PyCRYPT_ENCRYPT_MESSAGE_PARA, RecipientCert: tuple[_win32typing.PyCERT_CONTEXT, ...], ToBeEncrypted +): ... +def CryptDecryptMessage( + DecryptPara: _win32typing.PyCRYPT_DECRYPT_MESSAGE_PARA, EncryptedBlob +) -> tuple[Incomplete, _win32typing.PyCERT_CONTEXT]: ... +def CryptSignAndEncryptMessage( + SignPara: _win32typing.PyCRYPT_SIGN_MESSAGE_PARA, + EncryptPara: _win32typing.PyCRYPT_ENCRYPT_MESSAGE_PARA, + RecipientCert: tuple[_win32typing.PyCERT_CONTEXT, ...], + ToBeSignedAndEncrypted, +): ... +def CryptVerifyMessageSignature( + SignedBlob, SignerIndex: int = ..., VerifyPara: _win32typing.PyCRYPT_VERIFY_MESSAGE_PARA | None = ..., ReturnData: bool = ... +) -> tuple[_win32typing.PyCERT_CONTEXT, Incomplete]: ... +def CryptGetMessageCertificates( + SignedBlob, MsgAndCertEncodingType, CryptProv: _win32typing.PyCRYPTPROV | None = ..., Flags: int = ... +) -> _win32typing.PyCERTSTORE: ... +def CryptGetMessageSignerCount(SignedBlob, MsgEncodingType): ... +def CryptSignMessage( + SignPara: _win32typing.PyCRYPT_SIGN_MESSAGE_PARA, ToBeSigned: tuple[Incomplete, ...], DetachedSignature: bool = ... +): ... +def CryptVerifyDetachedMessageSignature( + SignerIndex, + DetachedSignBlob, + ToBeSigned: tuple[Incomplete, ...], + VerifyPara: _win32typing.PyCRYPT_VERIFY_MESSAGE_PARA | None = ..., +) -> _win32typing.PyCERT_CONTEXT: ... +def CryptDecryptAndVerifyMessageSignature( + EncryptedBlob, + DecryptPara: _win32typing.PyCRYPT_DECRYPT_MESSAGE_PARA, + VerifyPara: _win32typing.PyCRYPT_VERIFY_MESSAGE_PARA | None = ..., + SignerIndex: int = ..., +): ... +def CryptEncodeObjectEx(StructType, StructInfo, CertEncodingType, Flags: int = ..., EncodePara: Incomplete | None = ...): ... +def CryptDecodeObjectEx(StructType, Encoded, CertEncodingType, Flags: int = ..., DecodePara: Incomplete | None = ...): ... +def CertNameToStr(Name, StrType, CertEncodingType): ... +def CryptFormatObject( + StructType, Encoded, CertEncodingType, FormatStrType: int = ..., FormatType: int = ..., FormatStruct: Incomplete | None = ... +): ... +def PFXImportCertStore(PFX, Password, Flags) -> _win32typing.PyCERTSTORE: ... +def PFXVerifyPassword(PFX, Password, Flags): ... +def PFXIsPFXBlob(PFX): ... +def CryptBinaryToString(Binary, Flags): ... +def CryptStringToBinary(String, Flags) -> tuple[Incomplete, Incomplete, Incomplete]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32event.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32event.pyi new file mode 100644 index 00000000..344afc11 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32event.pyi @@ -0,0 +1,71 @@ +from collections.abc import Iterable + +import _win32typing +from win32.lib.pywintypes import error as error + +def CancelWaitableTimer() -> None: ... +def CreateEvent( + __EventAttributes: _win32typing.PySECURITY_ATTRIBUTES | None, + __bManualReset: int | bool, + __bInitialState: int | bool, + __Name: str | None, +) -> int: ... +def CreateMutex(MutexAttributes: _win32typing.PySECURITY_ATTRIBUTES, InitialOwner, Name: str) -> int: ... +def CreateSemaphore( + SemaphoreAttributes: _win32typing.PySECURITY_ATTRIBUTES, InitialCount, MaximumCount, SemaphoreName +) -> int: ... +def CreateWaitableTimer(TimerAttributes: _win32typing.PySECURITY_ATTRIBUTES, ManualReset, TimerName) -> int: ... +def CreateWaitableTimerEx( + __lpTimerAttributes: _win32typing.PySECURITY_ATTRIBUTES | None, + __lpTimerName: str | None, + __dwFlags: int, + __dwDesiredAccess: int, +) -> _win32typing.PyHANDLE: ... +def MsgWaitForMultipleObjects(__handlelist: Iterable[int], __bWaitAll: int, __milliseconds: int, __wakeMask: int) -> int: ... +def MsgWaitForMultipleObjectsEx(handlelist: list[int], milliseconds, wakeMask, waitFlags): ... +def OpenEvent(desiredAccess, bInheritHandle, name: str) -> int: ... +def OpenMutex(desiredAccess, bInheritHandle, name: str) -> int: ... +def OpenSemaphore(desiredAccess, bInheritHandle, name: str) -> int: ... +def OpenWaitableTimer(desiredAccess, bInheritHandle, timerName) -> int: ... +def PulseEvent(hEvent: int) -> None: ... +def ReleaseMutex(hEvent: int) -> None: ... +def ReleaseSemaphore(hEvent: int, lReleaseCount): ... +def ResetEvent(__hEvent: int) -> None: ... +def SetEvent(__hEvent: int) -> None: ... +def SetWaitableTimer(handle: int, dueTime, period, func, param, resume_state) -> None: ... +def WaitForMultipleObjects(handlelist: list[int], bWaitAll, milliseconds): ... +def WaitForMultipleObjectsEx(handlelist: list[int], bWaitAll, milliseconds, bAlertable): ... +def WaitForSingleObject(__hHandle: int, __milliseconds: int) -> int: ... +def WaitForSingleObjectEx(hHandle: int, milliseconds, bAlertable): ... +def WaitForInputIdle(hProcess: int, milliseconds): ... +def SignalObjectAndWait(*args, **kwargs): ... # incomplete + +CREATE_WAITABLE_TIMER_HIGH_RESOLUTION: int +CREATE_WAITABLE_TIMER_MANUAL_RESET: int +EVENT_ALL_ACCESS: int +EVENT_MODIFY_STATE: int +INFINITE: int +MAXIMUM_WAIT_OBJECTS: int +QS_ALLEVENTS: int +QS_ALLINPUT: int +QS_HOTKEY: int +QS_INPUT: int +QS_KEY: int +QS_MOUSE: int +QS_MOUSEBUTTON: int +QS_MOUSEMOVE: int +QS_PAINT: int +QS_POSTMESSAGE: int +QS_SENDMESSAGE: int +QS_TIMER: int +SYNCHRONIZE: int +TIMER_ALL_ACCESS: int +TIMER_MODIFY_STATE: int +TIMER_QUERY_STATE: int +WAIT_ABANDONED: int +WAIT_ABANDONED_0: int +WAIT_FAILED: int +WAIT_IO_COMPLETION: int +WAIT_OBJECT_0: int +WAIT_TIMEOUT: int +UNICODE: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32evtlog.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32evtlog.pyi new file mode 100644 index 00000000..bf282398 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32evtlog.pyi @@ -0,0 +1,271 @@ +from _typeshed import Incomplete +from collections.abc import Iterable + +import _win32typing +from win32.lib.pywintypes import error as error + +def ReadEventLog( + __Handle: _win32typing.PyEVTLOG_HANDLE, __Flags: int, __Offset: int, Size=... +) -> list[_win32typing.PyEventLogRecord]: ... +def ClearEventLog(handle: _win32typing.PyEVTLOG_HANDLE, eventLogName: str) -> None: ... +def BackupEventLog(handle, eventLogName: str) -> None: ... +def CloseEventLog(__handle: _win32typing.PyEVTLOG_HANDLE) -> None: ... +def DeregisterEventSource(handle) -> None: ... +def NotifyChangeEventLog(handle, handle1) -> None: ... +def GetNumberOfEventLogRecords(handle: _win32typing.PyEVTLOG_HANDLE) -> int: ... +def GetOldestEventLogRecord(): ... +def OpenEventLog(__serverName: str | None, __sourceName: str) -> _win32typing.PyEVTLOG_HANDLE: ... +def RegisterEventSource(__serverName: str | None, __sourceName: str): ... +def OpenBackupEventLog(serverName: str, fileName: str) -> _win32typing.PyEVTLOG_HANDLE: ... +def ReportEvent( + __EventLog: int, + __Type: int, + __Category: int, + __EventID: int, + __UserSid: _win32typing.PySID | None, + __Strings: Iterable[str] | None, + __RawData: bytes | None, +) -> None: ... +def EvtOpenChannelEnum(Session: _win32typing.PyEVT_HANDLE | None = ..., Flags: int = ...) -> _win32typing.PyEVT_HANDLE: ... +def EvtNextChannelPath(ChannelEnum: _win32typing.PyEVT_HANDLE): ... +def EvtOpenLog(Path, Flags, Session: _win32typing.PyEVT_HANDLE | None = ...) -> _win32typing.PyEVT_HANDLE: ... +def EvtClearLog( + ChannelPath, TargetFilePath: Incomplete | None = ..., Session: _win32typing.PyEVT_HANDLE | None = ..., Flags: int = ... +) -> None: ... +def EvtExportLog( + Path, TargetFilePath, Flags, Query: Incomplete | None = ..., Session: _win32typing.PyEVT_HANDLE | None = ... +) -> None: ... +def EvtArchiveExportedLog(LogFilePath, Locale, Session: _win32typing.PyEVT_HANDLE | None = ..., Flags=...) -> None: ... +def EvtGetExtendedStatus(): ... +def EvtQuery( + __Path: str, __Flags: int, __Query: str | None = ..., __Session: _win32typing.PyEVT_HANDLE | None = ... +) -> _win32typing.PyEVT_HANDLE: ... +def EvtNext( + __ResultSet: _win32typing.PyEVT_HANDLE, __Count: int, __Timeout: int = ..., __Flags: int = ... +) -> tuple[_win32typing.PyEVT_HANDLE, ...]: ... +def EvtSeek( + ResultSet: _win32typing.PyEVT_HANDLE, Position, Flags, Bookmark: _win32typing.PyEVT_HANDLE | None = ..., Timeout: int = ... +) -> None: ... +def EvtRender(__Event: _win32typing.PyEVT_HANDLE, __Flags: int, Context=...): ... +def EvtSubscribe( + ChannelPath, + Flags, + SignalEvent: Incomplete | None = ..., + Callback: Incomplete | None = ..., + Context: Incomplete | None = ..., + Query: Incomplete | None = ..., + Session: _win32typing.PyEVT_HANDLE | None = ..., + Bookmark: _win32typing.PyEVT_HANDLE | None = ..., +) -> _win32typing.PyEVT_HANDLE: ... +def EvtCreateBookmark(BookmarkXML: Incomplete | None = ...) -> _win32typing.PyEVT_HANDLE: ... +def EvtUpdateBookmark(Bookmark: _win32typing.PyEVT_HANDLE, Event: _win32typing.PyEVT_HANDLE) -> _win32typing.PyEVT_HANDLE: ... +def EvtGetChannelConfigProperty( + ChannelConfig: _win32typing.PyEVT_HANDLE, PropertyId, Flags=... +) -> tuple[Incomplete, Incomplete]: ... +def EvtOpenChannelConfig( + ChannelPath, Session: _win32typing.PyEVT_HANDLE | None = ..., Flags=... +) -> _win32typing.PyEVT_HANDLE: ... +def EvtOpenSession( + Login: _win32typing.PyEVT_RPC_LOGIN, LoginClass, Timeout: int = ..., Flags=... +) -> _win32typing.PyEVT_HANDLE: ... +def EvtOpenPublisherEnum(Session: _win32typing.PyEVT_HANDLE | None = ..., Flags: int = ...) -> _win32typing.PyEVT_HANDLE: ... +def EvtNextPublisherId(PublisherEnum: _win32typing.PyEVT_HANDLE): ... +def EvtOpenPublisherMetadata( + PublisherIdentity, + Session: _win32typing.PyEVT_HANDLE | None = ..., + LogFilePath: Incomplete | None = ..., + Locale: int = ..., + Flags: int = ..., +) -> _win32typing.PyEVT_HANDLE: ... +def EvtGetPublisherMetadataProperty( + PublisherMetadata: _win32typing.PyEVT_HANDLE, PropertyId, Flags=... +) -> tuple[Incomplete, Incomplete]: ... +def EvtOpenEventMetadataEnum(PublisherMetadata: _win32typing.PyEVT_HANDLE, Flags=...) -> _win32typing.PyEVT_HANDLE: ... +def EvtNextEventMetadata(EventMetadataEnum: _win32typing.PyEVT_HANDLE, Flags=...) -> _win32typing.PyEVT_HANDLE: ... +def EvtGetEventMetadataProperty( + EventMetadata: _win32typing.PyEVT_HANDLE, PropertyId, Flags=... +) -> tuple[Incomplete, Incomplete]: ... +def EvtGetLogInfo(Log: _win32typing.PyEVT_HANDLE, PropertyId) -> tuple[Incomplete, Incomplete]: ... +def EvtGetEventInfo(Event: _win32typing.PyEVT_HANDLE, PropertyId) -> tuple[Incomplete, Incomplete]: ... +def EvtGetObjectArraySize(ObjectArray: _win32typing.PyEVT_HANDLE): ... +def EvtGetObjectArrayProperty( + ObjectArray: _win32typing.PyEVT_HANDLE, PropertyId, ArrayIndex, Flags=... +) -> tuple[Incomplete, Incomplete]: ... +def EvtCreateRenderContext(*args, **kwargs): ... # incomplete +def EvtFormatMessage(*args, **kwargs): ... # incomplete + +EVENTLOG_AUDIT_FAILURE: int +EVENTLOG_AUDIT_SUCCESS: int +EVENTLOG_BACKWARDS_READ: int +EVENTLOG_END_ALL_PAIRED_EVENTS: int +EVENTLOG_END_PAIRED_EVENT: int +EVENTLOG_ERROR_TYPE: int +EVENTLOG_FORWARDS_READ: int +EVENTLOG_INFORMATION_TYPE: int +EVENTLOG_PAIRED_EVENT_ACTIVE: int +EVENTLOG_PAIRED_EVENT_INACTIVE: int +EVENTLOG_SEEK_READ: int +EVENTLOG_SEQUENTIAL_READ: int +EVENTLOG_START_PAIRED_EVENT: int +EVENTLOG_SUCCESS: int +EVENTLOG_WARNING_TYPE: int +EventMetadataEventChannel: int +EventMetadataEventID: int +EventMetadataEventKeyword: int +EventMetadataEventLevel: int +EventMetadataEventMessageID: int +EventMetadataEventOpcode: int +EventMetadataEventTask: int +EventMetadataEventTemplate: int +EventMetadataEventVersion: int +EvtChannelConfigAccess: int +EvtChannelConfigClassicEventlog: int +EvtChannelConfigEnabled: int +EvtChannelConfigIsolation: int +EvtChannelConfigOwningPublisher: int +EvtChannelConfigPropertyIdEND: int +EvtChannelConfigType: int +EvtChannelLoggingConfigAutoBackup: int +EvtChannelLoggingConfigLogFilePath: int +EvtChannelLoggingConfigMaxSize: int +EvtChannelLoggingConfigRetention: int +EvtChannelPublishingConfigBufferSize: int +EvtChannelPublishingConfigClockType: int +EvtChannelPublishingConfigControlGuid: int +EvtChannelPublishingConfigKeywords: int +EvtChannelPublishingConfigLatency: int +EvtChannelPublishingConfigLevel: int +EvtChannelPublishingConfigMaxBuffers: int +EvtChannelPublishingConfigMinBuffers: int +EvtChannelPublishingConfigSidType: int +EvtEventMetadataPropertyIdEND: int +EvtEventPath: int +EvtEventPropertyIdEND: int +EvtEventQueryIDs: int +EvtExportLogChannelPath: int +EvtExportLogFilePath: int +EvtExportLogTolerateQueryErrors: int +EvtLogAttributes: int +EvtLogCreationTime: int +EvtLogFileSize: int +EvtLogFull: int +EvtLogLastAccessTime: int +EvtLogLastWriteTime: int +EvtLogNumberOfLogRecords: int +EvtLogOldestRecordNumber: int +EvtOpenChannelPath: int +EvtOpenFilePath: int +EvtPublisherMetadataChannelReferenceFlags: int +EvtPublisherMetadataChannelReferenceID: int +EvtPublisherMetadataChannelReferenceIndex: int +EvtPublisherMetadataChannelReferenceMessageID: int +EvtPublisherMetadataChannelReferencePath: int +EvtPublisherMetadataChannelReferences: int +EvtPublisherMetadataHelpLink: int +EvtPublisherMetadataKeywordMessageID: int +EvtPublisherMetadataKeywordName: int +EvtPublisherMetadataKeywords: int +EvtPublisherMetadataKeywordValue: int +EvtPublisherMetadataLevelMessageID: int +EvtPublisherMetadataLevelName: int +EvtPublisherMetadataLevels: int +EvtPublisherMetadataLevelValue: int +EvtPublisherMetadataMessageFilePath: int +EvtPublisherMetadataOpcodeMessageID: int +EvtPublisherMetadataOpcodeName: int +EvtPublisherMetadataOpcodes: int +EvtPublisherMetadataOpcodeValue: int +EvtPublisherMetadataParameterFilePath: int +EvtPublisherMetadataPropertyIdEND: int +EvtPublisherMetadataPublisherGuid: int +EvtPublisherMetadataPublisherMessageID: int +EvtPublisherMetadataResourceFilePath: int +EvtPublisherMetadataTaskEventGuid: int +EvtPublisherMetadataTaskMessageID: int +EvtPublisherMetadataTaskName: int +EvtPublisherMetadataTasks: int +EvtPublisherMetadataTaskValue: int +EvtQueryChannelPath: int +EvtQueryFilePath: int +EvtQueryForwardDirection: int +EvtQueryReverseDirection: int +EvtQueryTolerateQueryErrors: int +EvtRenderBookmark: int +EvtRenderEventValues: int +EvtRenderEventXml: int +EvtRpcLogin: int +EvtRpcLoginAuthDefault: int +EvtRpcLoginAuthKerberos: int +EvtRpcLoginAuthNegotiate: int +EvtRpcLoginAuthNTLM: int +EvtSeekOriginMask: int +EvtSeekRelativeToBookmark: int +EvtSeekRelativeToCurrent: int +EvtSeekRelativeToFirst: int +EvtSeekRelativeToLast: int +EvtSeekStrict: int +EvtSubscribeActionDeliver: int +EvtSubscribeActionError: int +EvtSubscribeOriginMask: int +EvtSubscribeStartAfterBookmark: int +EvtSubscribeStartAtOldestRecord: int +EvtSubscribeStrict: int +EvtSubscribeToFutureEvents: int +EvtSubscribeTolerateQueryErrors: int +EvtVarTypeAnsiString: int +EvtVarTypeBinary: int +EvtVarTypeBoolean: int +EvtVarTypeByte: int +EvtVarTypeDouble: int +EvtVarTypeEvtHandle: int +EvtVarTypeEvtXml: int +EvtVarTypeFileTime: int +EvtVarTypeGuid: int +EvtVarTypeHexInt32: int +EvtVarTypeHexInt64: int +EvtVarTypeInt16: int +EvtVarTypeInt32: int +EvtVarTypeInt64: int +EvtVarTypeNull: int +EvtVarTypeSByte: int +EvtVarTypeSid: int +EvtVarTypeSingle: int +EvtVarTypeSizeT: int +EvtVarTypeString: int +EvtVarTypeSysTime: int +EvtVarTypeUInt16: int +EvtVarTypeUInt32: int +EvtVarTypeUInt64: int +EvtChannelPublisherList: int +EvtFormatMessageChannel: int +EvtFormatMessageEvent: int +EvtFormatMessageId: int +EvtFormatMessageKeyword: int +EvtFormatMessageLevel: int +EvtFormatMessageOpcode: int +EvtFormatMessageProvider: int +EvtFormatMessageTask: int +EvtFormatMessageXml: int +EvtRenderContextSystem: int +EvtRenderContextUser: int +EvtRenderContextValues: int +EvtSystemActivityID: int +EvtSystemChannel: int +EvtSystemComputer: int +EvtSystemEventID: int +EvtSystemEventRecordId: int +EvtSystemKeywords: int +EvtSystemLevel: int +EvtSystemOpcode: int +EvtSystemProcessID: int +EvtSystemPropertyIdEND: int +EvtSystemProviderGuid: int +EvtSystemProviderName: int +EvtSystemQualifiers: int +EvtSystemRelatedActivityID: int +EvtSystemTask: int +EvtSystemThreadID: int +EvtSystemTimeCreated: int +EvtSystemUserID: int +EvtSystemVersion: int +UNICODE: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32file.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32file.pyi new file mode 100644 index 00000000..a66b91c9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32file.pyi @@ -0,0 +1,447 @@ +from _typeshed import Incomplete +from socket import socket +from typing import overload + +import _win32typing +from win32.lib.pywintypes import error as error + +def AreFileApisANSI(): ... +def CancelIo(handle: int) -> None: ... +def CopyFile(_from: str, to: str, bFailIfExists) -> None: ... +def CopyFileW(_from: str, to: str, bFailIfExists) -> None: ... +def CreateDirectory(__name: str, __sa: _win32typing.PySECURITY_ATTRIBUTES) -> None: ... +def CreateDirectoryW(name: str, sa: _win32typing.PySECURITY_ATTRIBUTES) -> None: ... +def CreateDirectoryEx(templateName: str, newDirectory: str, sa: _win32typing.PySECURITY_ATTRIBUTES) -> None: ... +def CreateFile( + __fileName: str, + __desiredAccess: int, + __shareMode: int, + __attributes: _win32typing.PySECURITY_ATTRIBUTES | None, + __CreationDisposition: int, + __flagsAndAttributes: int, + __hTemplateFile: int | None, +) -> _win32typing.PyHANDLE: ... +def CreateIoCompletionPort(handle: int, existing: int, completionKey, numThreads) -> int: ... +def CreateMailslot(Name, MaxMessageSize, ReadTimeout, SecurityAttributes: _win32typing.PySECURITY_ATTRIBUTES) -> int: ... +def GetMailslotInfo(Mailslot: int) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... +def SetMailslotInfo(Mailslot: int, ReadTimeout) -> None: ... +def DefineDosDevice(flags, deviceName: str, targetPath: str) -> None: ... +def DefineDosDeviceW(flags, deviceName: str, targetPath: str) -> None: ... +def DeleteFile(fileName: str) -> None: ... +def DeviceIoControl(Device: int, IoControlCode, InBuffer, OutBuffer, Overlapped: _win32typing.PyOVERLAPPED | None = ...): ... +def FindClose(hFindFile) -> None: ... +def FindCloseChangeNotification(hChangeHandle) -> None: ... +def FindFirstChangeNotification(pathName: str, bWatchSubtree, notifyFilter): ... +def FindNextChangeNotification(hChangeHandle): ... +def FlushFileBuffers(hFile: int) -> None: ... +def GetBinaryType(appName: str): ... +def GetDiskFreeSpace(rootPathName: str) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... +def GetDiskFreeSpaceEx(__rootPathName: str) -> tuple[int, int, int]: ... +def GetDriveType(rootPathName: str): ... +def GetDriveTypeW(rootPathName: str): ... +def GetFileAttributes(fileName: str): ... +def GetFileAttributesW(fileName: str): ... +def GetFileTime( + handle: int, creationTime: _win32typing.PyTime, accessTime: _win32typing.PyTime, writeTime: _win32typing.PyTime +) -> tuple[_win32typing.PyTime, _win32typing.PyTime, _win32typing.PyTime]: ... +def SetFileTime( + File: int, + CreationTime: _win32typing.PyTime | None = ..., + LastAccessTime: _win32typing.PyTime | None = ..., + LastWriteTime: _win32typing.PyTime | None = ..., + UTCTimes: bool = ..., +) -> None: ... +def GetFileInformationByHandle(handle: int): ... +def GetCompressedFileSize(): ... +def GetFileSize(): ... +def AllocateReadBuffer(__bufSize: int) -> _win32typing.PyOVERLAPPEDReadBuffer: ... +@overload +def ReadFile(__hFile: int, __bufSize: int) -> tuple[int, str]: ... +@overload +def ReadFile( + __hFile: int, __buffer: _win32typing.PyOVERLAPPEDReadBuffer, __overlapped: _win32typing.PyOVERLAPPED | None +) -> tuple[int, str]: ... +def WriteFile( + __hFile: int, __data: str | bytes | _win32typing.PyOVERLAPPEDReadBuffer, __ol: _win32typing.PyOVERLAPPED | None = ... +) -> tuple[int, int]: ... +def CloseHandle(__handle: int) -> None: ... +def LockFileEx(hFile: int, _int, _int1, _int2, ol: _win32typing.PyOVERLAPPED | None = ...) -> None: ... +def UnlockFileEx(hFile: int, _int, _int1, ol: _win32typing.PyOVERLAPPED | None = ...) -> None: ... +def GetQueuedCompletionStatus(hPort: int, timeOut) -> tuple[Incomplete, Incomplete, Incomplete, _win32typing.PyOVERLAPPED]: ... +def PostQueuedCompletionStatus( + handle: int, numberOfbytes: int = ..., completionKey: int = ..., overlapped: _win32typing.PyOVERLAPPED | None = ... +): ... +def GetFileType(hFile: int): ... +def GetLogicalDrives(): ... +def GetOverlappedResult(__hFile: int, __overlapped: _win32typing.PyOVERLAPPED, __bWait: int | bool) -> int: ... +def LockFile(hFile: int, offsetLow, offsetHigh, nNumberOfBytesToLockLow, nNumberOfBytesToLockHigh) -> None: ... +def MoveFile(existingFileName: str, newFileName: str) -> None: ... +def MoveFileW(existingFileName: str, newFileName: str) -> None: ... +def MoveFileEx(existingFileName: str, newFileName: str, flags) -> None: ... +def MoveFileExW(existingFileName: str, newFileName: str, flags) -> None: ... +def QueryDosDevice(DeviceName: str) -> str: ... +def ReadDirectoryChangesW( + handle: int, size, bWatchSubtree, dwNotifyFilter, overlapped: _win32typing.PyOVERLAPPED | None = ... +) -> None: ... +def FILE_NOTIFY_INFORMATION(buffer: str, size) -> tuple[tuple[Incomplete, Incomplete], ...]: ... +def SetCurrentDirectory(lpPathName: str) -> None: ... +def SetEndOfFile(hFile: int) -> None: ... +def SetFileApisToANSI() -> None: ... +def SetFileApisToOEM() -> None: ... +def SetFileAttributes(__filename: str, __newAttributes: int) -> None: ... +def SetFilePointer(handle: int, offset, moveMethod) -> None: ... +def SetVolumeLabel(rootPathName: str, volumeName: str) -> None: ... +def UnlockFile(hFile: int, offsetLow, offsetHigh, nNumberOfBytesToUnlockLow, nNumberOfBytesToUnlockHigh) -> None: ... +def TransmitFile( + Socket, + File: int, + NumberOfBytesToWrite, + NumberOfBytesPerSend, + Overlapped: _win32typing.PyOVERLAPPED, + Flags, + Head: Incomplete | None = ..., + Tail: Incomplete | None = ..., +) -> None: ... +def ConnectEx( + s, name, Overlapped: _win32typing.PyOVERLAPPED, SendBuffer: Incomplete | None = ... +) -> tuple[Incomplete, Incomplete]: ... +def AcceptEx(slistening, sAccepting, buffer, ol: _win32typing.PyOVERLAPPED) -> None: ... +def CalculateSocketEndPointSize(socket): ... +def GetAcceptExSockaddrs( + sAccepting, buffer: _win32typing.PyOVERLAPPEDReadBuffer +) -> tuple[Incomplete, Incomplete, Incomplete]: ... +def WSAEventSelect(__socket: socket, __hEvent: int, __networkEvents: int) -> None: ... +def WSAEnumNetworkEvents(__s: socket, __hEvent: int) -> dict[int, int]: ... +def WSAAsyncSelect(socket, hwnd: int, _int, networkEvents) -> None: ... +def WSASend(s, buffer: str, ol: _win32typing.PyOVERLAPPED, dwFlags) -> tuple[Incomplete, Incomplete]: ... +def WSARecv(s, buffer, ol: _win32typing.PyOVERLAPPED, dwFlags) -> tuple[Incomplete, Incomplete]: ... +def BuildCommDCB(_def: str, dcb: _win32typing.PyDCB) -> _win32typing.PyDCB: ... +def ClearCommError(__handle: int) -> tuple[Incomplete, _win32typing.PyCOMSTAT]: ... +def EscapeCommFunction(handle: int) -> None: ... +def GetCommState(handle: int) -> _win32typing.PyDCB: ... +def SetCommState(handle: int, dcb: _win32typing.PyDCB) -> None: ... +def ClearCommBreak(handle: int) -> None: ... +def GetCommMask(handle: int): ... +def SetCommMask(handle: int, val): ... +def GetCommModemStatus(handle: int): ... +def GetCommTimeouts(handle: int): ... +def SetCommTimeouts(handle: int, val): ... +def PurgeComm(handle: int, action) -> None: ... +def SetCommBreak(handle: int) -> None: ... +def SetupComm(handle: int, dwInQueue, dwOutQueue) -> None: ... +def TransmitCommChar(handle: int, cChar) -> None: ... +def WaitCommEvent(handle: int, overlapped: _win32typing.PyOVERLAPPED) -> None: ... +def SetVolumeMountPoint(VolumeMountPoint: str, VolumeName: str) -> str: ... +def DeleteVolumeMountPoint(VolumeMountPoint: str) -> None: ... +def GetVolumeNameForVolumeMountPoint(VolumeMountPoint: str) -> str: ... +def GetVolumePathName(FileName: str, BufferLength: int = ...) -> str: ... +def GetVolumePathNamesForVolumeName(VolumeName: str) -> list[Incomplete]: ... +def CreateHardLink( + FileName: str, + ExistingFileName: str, + SecurityAttributes: _win32typing.PySECURITY_ATTRIBUTES | None = ..., + Transaction: int | None = ..., +) -> None: ... +def CreateSymbolicLink(SymlinkFileName: str, TargetFileName: str, Flags: int = ..., Transaction: int | None = ...) -> None: ... +def EncryptFile(filename: str) -> None: ... +def DecryptFile(filename: str) -> None: ... +def EncryptionDisable(DirName: str, Disable) -> None: ... +def FileEncryptionStatus(FileName: str): ... +def QueryUsersOnEncryptedFile(FileName: str) -> tuple[_win32typing.PySID, str, Incomplete]: ... +def QueryRecoveryAgentsOnEncryptedFile(FileName: str) -> tuple[_win32typing.PySID, str, Incomplete]: ... +def RemoveUsersFromEncryptedFile(FileName: str, pHashes: tuple[tuple[_win32typing.PySID, str, Incomplete], ...]) -> None: ... +def AddUsersToEncryptedFile(FileName: str, pUsers: tuple[tuple[_win32typing.PySID, str, Incomplete], ...]) -> None: ... +def DuplicateEncryptionInfoFile( + SrcFileName: str, + DstFileName: str, + CreationDisposition, + Attributes, + SecurityAttributes: _win32typing.PySECURITY_ATTRIBUTES | None = ..., +) -> None: ... +def BackupRead( + hFile: int, NumberOfBytesToRead, Buffer, bAbort, bProcessSecurity, lpContext +) -> tuple[Incomplete, Incomplete, Incomplete]: ... +def BackupSeek(hFile: int, NumberOfBytesToSeek, lpContext): ... +def BackupWrite( + hFile: int, NumberOfBytesToWrite, Buffer: str, bAbort, bProcessSecurity, lpContext +) -> tuple[Incomplete, Incomplete]: ... +def SetFileShortName(hFile: int, ShortName) -> None: ... +def CopyFileEx( + ExistingFileName, + NewFileName, + ProgressRoutine: _win32typing.CopyProgressRoutine | None = ..., + Data: Incomplete | None = ..., + Cancel: bool = ..., + CopyFlags: int = ..., + Transaction: int | None = ..., +) -> None: ... +def MoveFileWithProgress( + ExistingFileName, + NewFileName, + ProgressRoutine: _win32typing.CopyProgressRoutine | None = ..., + Data: Incomplete | None = ..., + Flags: int = ..., + Transaction: int | None = ..., +) -> None: ... +def ReplaceFile( + ReplacedFileName, + ReplacementFileName, + BackupFileName: Incomplete | None = ..., + ReplaceFlags: int = ..., + Exclude: Incomplete | None = ..., + Reserved: Incomplete | None = ..., +) -> None: ... +def OpenEncryptedFileRaw(FileName, Flags): ... +def ReadEncryptedFileRaw(ExportCallback, CallbackContext, Context) -> None: ... +def WriteEncryptedFileRaw(ImportCallback, CallbackContext, Context) -> None: ... +def CloseEncryptedFileRaw(Context) -> None: ... +def CreateFileW( + FileName: str, + DesiredAccess, + ShareMode, + SecurityAttributes: _win32typing.PySECURITY_ATTRIBUTES, + CreationDisposition, + FlagsAndAttributes, + TemplateFile: int | None = ..., + Transaction: int | None = ..., + MiniVersion: Incomplete | None = ..., + ExtendedParameter: Incomplete | None = ..., +) -> int: ... +def DeleteFileW(FileName: str, Transaction: int | None = ...) -> None: ... +def GetFileAttributesEx(FileName: str, InfoLevelId, Transaction: int | None = ...): ... +def SetFileAttributesW(FileName, FileAttributes, Transaction: int | None = ...) -> None: ... +def CreateDirectoryExW( + TemplateDirectory: str, + NewDirectory: str, + SecurityAttributes: _win32typing.PySECURITY_ATTRIBUTES | None = ..., + Transaction: int | None = ..., +) -> None: ... +def RemoveDirectory(PathName: str, Transaction: int | None = ...) -> None: ... +def FindFilesW(FileName: str, Transaction: int | None = ...): ... +def FindFilesIterator(FileName: str, Transaction: int | None = ...): ... +def FindStreams(FileName: str, Transaction: int | None = ...) -> list[tuple[Incomplete, str]]: ... +def FindFileNames(FileName: str, Transaction: int | None = ...) -> list[Incomplete]: ... +def GetFinalPathNameByHandle(File: int, Flags) -> str: ... +def SfcGetNextProtectedFile() -> list[Incomplete]: ... +def SfcIsFileProtected(ProtFileName: str): ... +def GetLongPathName(__ShortPath: str, __Transaction: int | None = ...) -> str: ... +def GetFullPathName(FileName, Transaction: int | None = ...): ... +def Wow64DisableWow64FsRedirection(): ... +def Wow64RevertWow64FsRedirection(OldValue) -> None: ... +def GetFileInformationByHandleEx(File: int, FileInformationClass): ... +def SetFileInformationByHandle(File: int, FileInformationClass, Information) -> None: ... +def ReOpenFile(OriginalFile: int, DesiredAccess, ShareMode, Flags) -> int: ... +def OpenFileById( + File: int, + FileId: _win32typing.PyIID, + DesiredAccess, + ShareMode, + Flags, + SecurityAttributes: _win32typing.PySECURITY_ATTRIBUTES | None = ..., +) -> int: ... +def DCB(*args, **kwargs): ... # incomplete +def GetFileAttributesExW(*args, **kwargs): ... # incomplete +def OVERLAPPED() -> _win32typing.PyOVERLAPPED: ... + +CALLBACK_CHUNK_FINISHED: int +CALLBACK_STREAM_SWITCH: int +CBR_110: int +CBR_115200: int +CBR_1200: int +CBR_128000: int +CBR_14400: int +CBR_19200: int +CBR_2400: int +CBR_256000: int +CBR_300: int +CBR_38400: int +CBR_4800: int +CBR_56000: int +CBR_57600: int +CBR_600: int +CBR_9600: int +CLRBREAK: int +CLRDTR: int +CLRRTS: int +COPY_FILE_ALLOW_DECRYPTED_DESTINATION: int +COPY_FILE_FAIL_IF_EXISTS: int +COPY_FILE_OPEN_SOURCE_FOR_WRITE: int +COPY_FILE_RESTARTABLE: int +CREATE_ALWAYS: int +CREATE_FOR_DIR: int +CREATE_FOR_IMPORT: int +CREATE_NEW: int +DRIVE_CDROM: int +DRIVE_FIXED: int +DRIVE_NO_ROOT_DIR: int +DRIVE_RAMDISK: int +DRIVE_REMOTE: int +DRIVE_REMOVABLE: int +DRIVE_UNKNOWN: int +DTR_CONTROL_DISABLE: int +DTR_CONTROL_ENABLE: int +DTR_CONTROL_HANDSHAKE: int +EV_BREAK: int +EV_CTS: int +EV_DSR: int +EV_ERR: int +EV_RING: int +EV_RLSD: int +EV_RXCHAR: int +EV_RXFLAG: int +EV_TXEMPTY: int +EVENPARITY: int +FD_ACCEPT: int +FD_CLOSE: int +FD_CONNECT: int +FD_GROUP_QOS: int +FD_OOB: int +FD_QOS: int +FD_READ: int +FD_ROUTING_INTERFACE_CHANGE: int +FD_WRITE: int +FILE_ALL_ACCESS: int +FILE_ATTRIBUTE_ARCHIVE: int +FILE_ATTRIBUTE_COMPRESSED: int +FILE_ATTRIBUTE_DIRECTORY: int +FILE_ATTRIBUTE_HIDDEN: int +FILE_ATTRIBUTE_NORMAL: int +FILE_ATTRIBUTE_OFFLINE: int +FILE_ATTRIBUTE_READONLY: int +FILE_ATTRIBUTE_SYSTEM: int +FILE_ATTRIBUTE_TEMPORARY: int +FILE_BEGIN: int +FILE_CURRENT: int +FILE_ENCRYPTABLE: int +FILE_END: int +FILE_FLAG_BACKUP_SEMANTICS: int +FILE_FLAG_DELETE_ON_CLOSE: int +FILE_FLAG_NO_BUFFERING: int +FILE_FLAG_OPEN_REPARSE_POINT: int +FILE_FLAG_OVERLAPPED: int +FILE_FLAG_POSIX_SEMANTICS: int +FILE_FLAG_RANDOM_ACCESS: int +FILE_FLAG_SEQUENTIAL_SCAN: int +FILE_FLAG_WRITE_THROUGH: int +FILE_GENERIC_READ: int +FILE_GENERIC_WRITE: int +FILE_IS_ENCRYPTED: int +FILE_READ_ONLY: int +FILE_ROOT_DIR: int +FILE_SHARE_DELETE: int +FILE_SHARE_READ: int +FILE_SHARE_WRITE: int +FILE_SYSTEM_ATTR: int +FILE_SYSTEM_DIR: int +FILE_SYSTEM_NOT_SUPPORT: int +FILE_TYPE_CHAR: int +FILE_TYPE_DISK: int +FILE_TYPE_PIPE: int +FILE_TYPE_UNKNOWN: int +FILE_UNKNOWN: int +FILE_USER_DISALLOWED: int +FileAllocationInfo: int +FileAttributeTagInfo: int +FileBasicInfo: int +FileCompressionInfo: int +FileDispositionInfo: int +FileEndOfFileInfo: int +FileIdBothDirectoryInfo: int +FileIdBothDirectoryRestartInfo: int +FileIdType: int +FileIoPriorityHintInfo: int +FileNameInfo: int +FileRenameInfo: int +FileStandardInfo: int +FileStreamInfo: int +GENERIC_EXECUTE: int +GENERIC_READ: int +GENERIC_WRITE: int +GetFileExInfoStandard: int +IoPriorityHintLow: int +IoPriorityHintNormal: int +IoPriorityHintVeryLow: int +MARKPARITY: int +MOVEFILE_COPY_ALLOWED: int +MOVEFILE_CREATE_HARDLINK: int +MOVEFILE_DELAY_UNTIL_REBOOT: int +MOVEFILE_FAIL_IF_NOT_TRACKABLE: int +MOVEFILE_REPLACE_EXISTING: int +MOVEFILE_WRITE_THROUGH: int +NOPARITY: int +ObjectIdType: int +ODDPARITY: int +ONE5STOPBITS: int +ONESTOPBIT: int +OPEN_ALWAYS: int +OPEN_EXISTING: int +OVERWRITE_HIDDEN: int +PROGRESS_CANCEL: int +PROGRESS_CONTINUE: int +PROGRESS_QUIET: int +PROGRESS_STOP: int +PURGE_RXABORT: int +PURGE_RXCLEAR: int +PURGE_TXABORT: int +PURGE_TXCLEAR: int +REPLACEFILE_IGNORE_MERGE_ERRORS: int +REPLACEFILE_WRITE_THROUGH: int +RTS_CONTROL_DISABLE: int +RTS_CONTROL_ENABLE: int +RTS_CONTROL_HANDSHAKE: int +RTS_CONTROL_TOGGLE: int +SCS_32BIT_BINARY: int +SCS_DOS_BINARY: int +SCS_OS216_BINARY: int +SCS_PIF_BINARY: int +SCS_POSIX_BINARY: int +SCS_WOW_BINARY: int +SECURITY_ANONYMOUS: int +SECURITY_CONTEXT_TRACKING: int +SECURITY_DELEGATION: int +SECURITY_EFFECTIVE_ONLY: int +SECURITY_IDENTIFICATION: int +SECURITY_IMPERSONATION: int +SETBREAK: int +SETDTR: int +SETRTS: int +SETXOFF: int +SETXON: int +SO_CONNECT_TIME: int +SO_UPDATE_ACCEPT_CONTEXT: int +SO_UPDATE_CONNECT_CONTEXT: int +SPACEPARITY: int +SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE: int +SYMBOLIC_LINK_FLAG_DIRECTORY: int +TF_DISCONNECT: int +TF_REUSE_SOCKET: int +TF_USE_DEFAULT_WORKER: int +TF_USE_KERNEL_APC: int +TF_USE_SYSTEM_THREAD: int +TF_WRITE_BEHIND: int +TRUNCATE_EXISTING: int +TWOSTOPBITS: int +WSA_IO_PENDING: int +WSA_OPERATION_ABORTED: int +WSAECONNABORTED: int +WSAECONNRESET: int +WSAEDISCON: int +WSAEFAULT: int +WSAEINPROGRESS: int +WSAEINTR: int +WSAEINVAL: int +WSAEMSGSIZE: int +WSAENETDOWN: int +WSAENETRESET: int +WSAENOBUFS: int +WSAENOTCONN: int +WSAENOTSOCK: int +WSAEOPNOTSUPP: int +WSAESHUTDOWN: int +WSAEWOULDBLOCK: int +FD_ADDRESS_LIST_CHANGE: int +INVALID_HANDLE_VALUE: int +UNICODE: int + +# win32pipe.FDCreatePipe is the only known public method to expose this. But it opens both read and write handles. +def _open_osfhandle(osfhandle: _win32typing.PyHANDLE, flags: int) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32gui.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32gui.pyi new file mode 100644 index 00000000..a233e3b4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32gui.pyi @@ -0,0 +1,514 @@ +from _typeshed import Incomplete, ReadableBuffer, WriteableBuffer +from collections.abc import Callable +from typing import TypeVar +from typing_extensions import Literal + +import _win32typing +from win32.lib.pywintypes import error as error + +_T = TypeVar("_T") + +def EnumFontFamilies(hdc: int, Family: str, EnumFontFamProc, Param): ... +def set_logger(logger) -> None: ... +def LOGFONT() -> _win32typing.PyLOGFONT: ... +def CreateFontIndirect(lplf: _win32typing.PyLOGFONT): ... +def GetObject(handle: int): ... +def GetObjectType(h: int): ... +def PyGetMemory(__addr: int, __len: int): ... +def PyGetString(addr, _len=...) -> str: ... +def PySetString(addr, String, maxLen): ... +def PySetMemory(addr, String): ... +def PyGetArraySignedLong(array, index): ... +def PyGetBufferAddressAndLen(obj): ... +def FlashWindow(hwnd: int, bInvert): ... +def FlashWindowEx(hwnd: int, dwFlags, uCount, dwTimeout): ... +def GetWindowLong(hwnd: int, index): ... +def GetClassLong(hwnd: int, index): ... +def SetWindowLong(hwnd: int, index, value): ... +def CallWindowProc(wndproc, hwnd: int, msg, wparam, lparam): ... +def SendMessage( + __hwnd: int | None, __message: int, __wparam: int | None = ..., __lparam: ReadableBuffer | float | None = ... +) -> int: ... +def SendMessageTimeout( + __hwnd: int, + __message: int, + __wparam: ReadableBuffer | float | None, + __lparam: ReadableBuffer | float | None, + __flags: int, + __timeout: int, +) -> tuple[int, int]: ... +def PostMessage( + __hwnd: int | None, __message: int, __wparam: int | None = ..., __lparam: ReadableBuffer | float | None = ... +) -> None: ... +def PostThreadMessage(threadId, message, wparam, lparam) -> None: ... +def ReplyMessage(result): ... +def RegisterWindowMessage(name: str): ... +def DefWindowProc( + hwnd: int | None, message: int, wparam: ReadableBuffer | float | None, lparam: ReadableBuffer | float | None +) -> int: ... +def EnumWindows(__callback: Callable[[int, _T], object], __extra: _T) -> None: ... +def EnumThreadWindows(dwThreadId, __callback: Callable[[int, _T], object], __extra: _T) -> None: ... +def EnumChildWindows(__hwnd: int | None, __callback: Callable[[int, _T], object], __extra: _T) -> None: ... +def DialogBox(hInstance: int, TemplateName: _win32typing.PyResourceId, hWndParent: int, DialogFunc, InitParam: int = ...): ... +def DialogBoxParam(): ... +def DialogBoxIndirect( + hInstance: int, controllist: _win32typing.PyDialogTemplate, hWndParent: int, DialogFunc, InitParam: int = ... +): ... +def DialogBoxIndirectParam(): ... +def CreateDialogIndirect( + hInstance: int, controllist: _win32typing.PyDialogTemplate, hWndParent: int, DialogFunc, InitParam: int = ... +): ... +def EndDialog(hwnd: int, result) -> None: ... +def GetDlgItem(hDlg: int, IDDlgItem): ... +def GetDlgItemInt(hDlg: int, IDDlgItem, Signed) -> None: ... +def SetDlgItemInt(hDlg: int, IDDlgItem, Value, Signed) -> None: ... +def GetDlgCtrlID(hwnd: int): ... +def GetDlgItemText(hDlg: int, IDDlgItem) -> str: ... +def SetDlgItemText(hDlg: int, IDDlgItem, String) -> None: ... +def GetNextDlgTabItem(hDlg, hCtl, bPrevious): ... +def GetNextDlgGroupItem(hDlg, hCtl, bPrevious): ... +def SetWindowText() -> None: ... +def GetWindowText(hwnd: int) -> str: ... +def InitCommonControls() -> None: ... +def InitCommonControlsEx(flag) -> None: ... +def LoadCursor(hinstance, resid): ... +def SetCursor(hcursor): ... +def GetCursor(): ... +def GetCursorInfo() -> tuple[int, int, int, int]: ... +def CreateAcceleratorTable(accels: tuple[tuple[Incomplete, Incomplete, Incomplete], ...]): ... +def LoadMenu(hinstance, resource_id: str): ... +def DestroyMenu() -> None: ... +def SetMenu(hwnd: int, hmenu) -> None: ... +def GetMenu(__hwnd: int) -> int: ... +def LoadIcon(__hinstance: int, __resource_id_or_name: str | int) -> _win32typing.PyWNDCLASS: ... +def CopyIcon(hicon): ... +def DrawIcon(hDC, X, Y, hicon) -> None: ... +def DrawIconEx( + hDC, xLeft, yTop, hIcon, cxWidth, cyWidth, istepIfAniCur, hbrFlickerFreeDraw: _win32typing.PyGdiHANDLE, diFlags +) -> None: ... +def CreateIconIndirect(iconinfo: _win32typing.PyICONINFO): ... +def CreateIconFromResource(bits: str, fIcon, ver: int = ...) -> int: ... +def LoadImage( + __hinst: int, __name: str, __type: int, __cxDesired: int, __cyDesired: int, __fuLoad: int +) -> _win32typing.PyGdiHANDLE: ... +def DeleteObject(handle: _win32typing.PyGdiHANDLE) -> None: ... +def BitBlt(hdcDest, x, y, width, height, hdcSrc, nXSrc, nYSrc, dwRop) -> None: ... +def StretchBlt(hdcDest, x, y, width, height, hdcSrc, nXSrc, nYSrc, nWidthSrc, nHeightSrc, dwRop) -> None: ... +def PatBlt(hdc: int, XLeft, YLeft, Width, Height, Rop) -> None: ... +def SetStretchBltMode(hdc: int, StretchMode): ... +def GetStretchBltMode(hdc: int): ... +def TransparentBlt( + Dest: int, XOriginDest, YOriginDest, WidthDest, HeightDest, Src: int, XOriginSrc, YOriginSrc, WidthSrc, HeightSrc, Transparent +) -> None: ... +def MaskBlt( + Dest: int, XDest, YDest, Width, Height, Src: int, XSrc, YSrc, Mask: _win32typing.PyGdiHANDLE, xMask, yMask, Rop +) -> None: ... +def AlphaBlend( + Dest: int, + XOriginDest, + YOriginDest, + WidthDest, + HeightDest, + Src: int, + XOriginSrc, + YOriginSrc, + WidthSrc, + HeightSrc, + blendFunction: _win32typing.PyBLENDFUNCTION, +) -> None: ... +def MessageBox(parent, text: str, caption: str, flags): ... +def MessageBeep(_type) -> None: ... +def CreateWindow( + __className: str | _win32typing.PyResourceId, + __windowTitle: str | None, + __style: int, + __x: int, + __y: int, + __width: int, + __height: int, + __parent: int, + __menu: int, + __hinstance: int, + __reserved: Incomplete | None, +) -> int: ... +def DestroyWindow(_hwnd: int) -> None: ... +def EnableWindow(hWnd: int, bEnable): ... +def FindWindow(__ClassName: _win32typing.PyResourceId | str | None, __WindowName: str | None) -> int: ... +def FindWindowEx( + __Parent: int | None, __ChildAfter: int | None, __ClassName: _win32typing.PyResourceId | str, __WindowName: str +) -> int: ... +def DragAcceptFiles(hwnd: int, fAccept) -> None: ... +def DragDetect(hwnd: int, point: tuple[Incomplete, Incomplete]) -> None: ... +def SetDoubleClickTime(newVal) -> None: ... +def GetDoubleClickTime(): ... +def HideCaret(hWnd: int) -> None: ... +def SetCaretPos(x, y) -> None: ... +def GetCaretPos() -> tuple[Incomplete, Incomplete]: ... +def ShowCaret(hWnd: int) -> None: ... +def ShowWindow(__hWnd: int | None, __cmdShow: int) -> int: ... +def IsWindowVisible(__hwnd: int | None) -> int: ... +def IsWindowEnabled(__hwnd: int | None) -> int: ... +def SetFocus(hwnd: int) -> None: ... +def GetFocus() -> None: ... +def UpdateWindow(__hwnd: int) -> None: ... +def BringWindowToTop(hwnd: int) -> None: ... +def SetActiveWindow(hwnd: int): ... +def GetActiveWindow(): ... +def SetForegroundWindow(__hwnd: int) -> None: ... +def GetForegroundWindow() -> int: ... +def GetClientRect(hwnd: int) -> tuple[int, int, int, int]: ... +def GetDC(hwnd: int): ... +def SaveDC(hdc: int): ... +def RestoreDC(hdc: int, SavedDC) -> None: ... +def DeleteDC(hdc) -> None: ... +def CreateCompatibleDC(dc): ... +def CreateCompatibleBitmap(hdc, width, height) -> _win32typing.PyGdiHANDLE: ... +def CreateBitmap(width, height, cPlanes, cBitsPerPixel, bitmap_bits) -> _win32typing.PyGdiHANDLE: ... +def SelectObject(hdc, _object): ... +def GetCurrentObject(hdc: int, ObjectType) -> int: ... +def GetWindowRect(hwnd: int) -> tuple[int, int, int, int]: ... +def GetStockObject(Object) -> int: ... +def PostQuitMessage(__rc: int) -> None: ... +def WaitMessage() -> None: ... +def SetWindowPos(__hWnd: int, __InsertAfter: int | None, __X: int, __Y: int, __cx: int, __cy: int, __Flags: int) -> None: ... +def GetWindowPlacement(__hwnd: int) -> tuple[int, int, tuple[int, int], tuple[int, int], tuple[int, int, int, int]]: ... +def SetWindowPlacement(hWnd: int, placement) -> None: ... +def RegisterClass(__wndClass: _win32typing.PyWNDCLASS) -> _win32typing.PyResourceId: ... +def UnregisterClass(__atom: _win32typing.PyResourceId, __hinst: int) -> None: ... +def PumpMessages() -> None: ... +def PumpWaitingMessages(__firstMessage: int = ..., __lastMessage: int = ...) -> int: ... +def GetMessage(hwnd: int, _min, _max): ... +def TranslateMessage(msg): ... +def DispatchMessage(msg): ... +def TranslateAccelerator(hwnd: int, haccel, msg): ... +def PeekMessage(hwnd: int, filterMin, filterMax, removalOptions): ... +def Shell_NotifyIcon(__Message: int, __nid: _win32typing.PyNOTIFYICONDATA) -> None: ... +def GetSystemMenu(hwnd: int, bRevert): ... +def DrawMenuBar(hwnd: int) -> None: ... +def MoveWindow(__hwnd: int, __x: int, __y: int, __width: int, __height: int, __bRepaint: bool) -> None: ... +def CloseWindow() -> None: ... +def DeleteMenu(hmenu, position, flags) -> None: ... +def RemoveMenu(hmenu, position, flags) -> None: ... +def CreateMenu(): ... +def CreatePopupMenu(): ... +def TrackPopupMenu(hmenu, flags, x, y, reserved, hwnd: int, prcRect: _win32typing.PyRECT): ... +def CommDlgExtendedError(): ... +def ExtractIcon(hinstance, moduleName: str, index): ... +def ExtractIconEx(moduleName: str, index, numIcons: int = ...): ... +def DestroyIcon(hicon) -> None: ... +def GetIconInfo(hicon: int) -> _win32typing.PyICONINFO: ... +def ScreenToClient(hWnd: int, Point: tuple[Incomplete, Incomplete]) -> tuple[Incomplete, Incomplete]: ... +def ClientToScreen(hWnd: int, Point: tuple[Incomplete, Incomplete]) -> tuple[Incomplete, Incomplete]: ... +def PaintDesktop(hdc: int) -> None: ... +def RedrawWindow(hWnd: int, rcUpdate: tuple[int, int, int, int], hrgnUpdate: _win32typing.PyGdiHANDLE, flags) -> None: ... +def GetTextExtentPoint32(hdc: int, _str: str) -> tuple[Incomplete, Incomplete]: ... +def GetTextMetrics(): ... +def GetTextCharacterExtra(hdc: int): ... +def SetTextCharacterExtra(hdc: int, CharExtra): ... +def GetTextAlign(hdc: int): ... +def SetTextAlign(hdc: int, Mode): ... +def GetTextFace(hdc: int) -> str: ... +def GetMapMode(hdc: int): ... +def SetMapMode(hdc: int, MapMode): ... +def GetGraphicsMode(hdc: int): ... +def SetGraphicsMode(hdc: int, Mode): ... +def GetLayout(hdc: int): ... +def SetLayout(hdc: int, Layout): ... +def GetPolyFillMode(hdc: int): ... +def SetPolyFillMode(hdc: int, PolyFillMode): ... +def GetWorldTransform(hdc: int) -> _win32typing.PyXFORM: ... +def SetWorldTransform(hdc: int, Xform: _win32typing.PyXFORM) -> None: ... +def ModifyWorldTransform(hdc: int, Xform: _win32typing.PyXFORM, Mode) -> None: ... +def CombineTransform(xform1: _win32typing.PyXFORM, xform2: _win32typing.PyXFORM) -> _win32typing.PyXFORM: ... +def GetWindowOrgEx(hdc: int) -> tuple[Incomplete, Incomplete]: ... +def SetWindowOrgEx(hdc: int, X, Y) -> tuple[Incomplete, Incomplete]: ... +def GetViewportOrgEx(hdc: int) -> tuple[Incomplete, Incomplete]: ... +def SetViewportOrgEx(hdc: int, X, Y) -> tuple[Incomplete, Incomplete]: ... +def GetWindowExtEx(hdc: int) -> tuple[Incomplete, Incomplete]: ... +def SetWindowExtEx(hdc: int, XExtent, YExtent) -> tuple[Incomplete, Incomplete]: ... +def GetViewportExtEx(hdc: int) -> tuple[Incomplete, Incomplete]: ... +def SetViewportExtEx(hdc: int, XExtent, YExtent) -> tuple[Incomplete, Incomplete]: ... +def GradientFill(hdc, Vertex: tuple[_win32typing.PyTRIVERTEX, ...], Mesh, Mode) -> None: ... +def GetOpenFileName(OPENFILENAME: str): ... +def InsertMenuItem(hMenu, uItem, fByPosition, menuItem) -> None: ... +def SetMenuItemInfo(hMenu, uItem, fByPosition, menuItem) -> None: ... +def GetMenuItemInfo(__hMenu: int, __uItem: int, __fByPosition: bool, __menuItem: ReadableBuffer) -> None: ... +def GetMenuItemCount(__hMenu: int | None) -> int: ... + +# Actually returns a list of int|tuple, but lists don't support positional types +def GetMenuItemRect(__hWnd: int | None, __hMenu: int | None, __uItem: int) -> tuple[int, tuple[int, int, int, int]]: ... +def GetMenuState(hMenu, uID, flags): ... +def SetMenuDefaultItem(hMenu, uItem, fByPos) -> None: ... +def GetMenuDefaultItem(hMenu, fByPos, flags): ... +def AppendMenu() -> None: ... +def InsertMenu() -> None: ... +def EnableMenuItem() -> None: ... +def CheckMenuItem(): ... +def GetSubMenu(hMenu, nPos): ... +def ModifyMenu(hMnu, uPosition, uFlags, uIDNewItem, newItem: str) -> None: ... +def GetMenuItemID(hMenu, nPos): ... +def SetMenuItemBitmaps( + hMenu, uPosition, uFlags, hBitmapUnchecked: _win32typing.PyGdiHANDLE, hBitmapChecked: _win32typing.PyGdiHANDLE +) -> None: ... +def CheckMenuRadioItem(hMenu, idFirst, idLast, idCheck, uFlags) -> None: ... +def SetMenuInfo(hmenu, info) -> None: ... +def GetMenuInfo(__hmenu: int, __info: WriteableBuffer) -> None: ... +def DrawFocusRect(hDC: int, rc: tuple[int, int, int, int]) -> None: ... +def DrawText(hDC: int, String, nCount, Rect: _win32typing.PyRECT, Format) -> tuple[Incomplete, _win32typing.PyRECT]: ... +def LineTo(hdc: int, XEnd, YEnd) -> None: ... +def Ellipse(hdc: int, LeftRect, TopRect, RightRect, BottomRect) -> None: ... +def Pie(hdc: int, LeftRect, TopRect, RightRect, BottomRect, XRadial1, YRadial1, XRadial2, YRadial2) -> None: ... +def Arc(hdc: int, LeftRect, TopRect, RightRect, BottomRect, XRadial1, YRadial1, XRadial2, YRadial2) -> None: ... +def ArcTo(hdc: int, LeftRect, TopRect, RightRect, BottomRect, XRadial1, YRadial1, XRadial2, YRadial2) -> None: ... +def AngleArc(hdc: int, Y, Y1, Radius, StartAngle: float, SweepAngle: float) -> None: ... +def Chord(hdc: int, LeftRect, TopRect, RightRect, BottomRect, XRadial1, YRadial1, XRadial2, YRadial2) -> None: ... +def ExtFloodFill(arg: int, XStart, YStart, Color, FillType) -> None: ... +def SetPixel(hdc: int, X, Y, Color): ... +def GetPixel(hdc: int, XPos, YPos): ... +def GetROP2(hdc: int): ... +def SetROP2(hdc: int, DrawMode): ... +def SetPixelV(hdc: int, X, Y, Color) -> None: ... +def MoveToEx(hdc: int, X, Y) -> tuple[Incomplete, Incomplete]: ... +def GetCurrentPositionEx(hdc: int) -> tuple[Incomplete, Incomplete]: ... +def GetArcDirection(hdc: int): ... +def SetArcDirection(hdc: int, ArcDirection): ... +def Polygon(hdc: int, Points: list[tuple[Incomplete, Incomplete]]) -> None: ... +def Polyline(hdc: int, Points: list[tuple[Incomplete, Incomplete]]) -> None: ... +def PolylineTo(hdc: int, Points: list[tuple[Incomplete, Incomplete]]) -> None: ... +def PolyBezier(hdc: int, Points: list[tuple[Incomplete, Incomplete]]) -> None: ... +def PolyBezierTo(hdc: int, Points: list[tuple[Incomplete, Incomplete]]) -> None: ... +def PlgBlt( + Dest: int, + Point, + Src: int, + XSrc, + YSrc, + Width, + Height, + Mask: _win32typing.PyGdiHANDLE | None = ..., + xMask: int = ..., + yMask: int = ..., +) -> None: ... +def CreatePolygonRgn(Points: list[tuple[Incomplete, Incomplete]], PolyFillMode) -> _win32typing.PyGdiHANDLE: ... +def ExtTextOut( + hdc: int, _int, _int1, _int2, rect: _win32typing.PyRECT, string, _tuple: tuple[tuple[Incomplete, Incomplete], ...] +): ... +def GetTextColor(hdc): ... +def SetTextColor(hdc, color): ... +def GetBkMode(hdc: int): ... +def SetBkMode(hdc: int, BkMode): ... +def GetBkColor(hdc: int): ... +def SetBkColor(hdc: int, color): ... +def DrawEdge(hdc: int, rc: _win32typing.PyRECT, edge, Flags) -> _win32typing.PyRECT: ... +def FillRect(hDC: int, rc: _win32typing.PyRECT, hbr: _win32typing.PyGdiHANDLE) -> None: ... +def FillRgn(hdc: int, hrgn: _win32typing.PyGdiHANDLE, hbr: _win32typing.PyGdiHANDLE) -> None: ... +def PaintRgn(hdc: int, hrgn: _win32typing.PyGdiHANDLE) -> None: ... +def FrameRgn(hdc: int, hrgn, hbr, Width, Height) -> None: ... +def InvertRgn(hdc: int, hrgn) -> None: ... +def EqualRgn(SrcRgn1, SrcRgn2): ... +def PtInRegion(hrgn, X, Y): ... +def PtInRect(rect: tuple[int, int, int, int], point: tuple[Incomplete, Incomplete]): ... +def RectInRegion(hrgn, rc: _win32typing.PyRECT): ... +def SetRectRgn(hrgn, LeftRect, TopRect, RightRect, BottomRect) -> None: ... +def CombineRgn(Dest, Src1, Src2, CombineMode): ... +def DrawAnimatedRects(hwnd: int, idAni, minCoords: _win32typing.PyRECT, restCoords: _win32typing.PyRECT) -> None: ... +def CreateSolidBrush(Color) -> _win32typing.PyGdiHANDLE: ... +def CreatePatternBrush(hbmp: _win32typing.PyGdiHANDLE) -> _win32typing.PyGdiHANDLE: ... +def CreateHatchBrush(Style, clrref) -> _win32typing.PyGdiHANDLE: ... +def CreatePen(PenStyle, Width, Color) -> _win32typing.PyGdiHANDLE: ... +def GetSysColor(Index): ... +def GetSysColorBrush(Index) -> _win32typing.PyGdiHANDLE: ... +def InvalidateRect(hWnd: int, Rect: _win32typing.PyRECT, Erase) -> None: ... +def FrameRect(hDC: int, rc: _win32typing.PyRECT, hbr: _win32typing.PyGdiHANDLE) -> None: ... +def InvertRect(hDC: int, rc: _win32typing.PyRECT) -> None: ... +def WindowFromDC(hDC: int) -> int: ... +def GetUpdateRgn(hWnd: int, hRgn: _win32typing.PyGdiHANDLE, Erase): ... +def GetWindowRgn(hWnd: int, hRgn: _win32typing.PyGdiHANDLE): ... +def SetWindowRgn(hWnd: int, hRgn: _win32typing.PyGdiHANDLE, Redraw) -> None: ... +def ValidateRgn(hWnd: int, hRgn: _win32typing.PyGdiHANDLE) -> None: ... +def InvalidateRgn(hWnd: int, hRgn: _win32typing.PyGdiHANDLE, Erase) -> None: ... +def GetRgnBox(hrgn: _win32typing.PyGdiHANDLE) -> tuple[Incomplete, _win32typing.PyRECT]: ... +def OffsetRgn(hrgn: _win32typing.PyGdiHANDLE, XOffset, YOffset): ... +def Rectangle(hdc: int, LeftRect, TopRect, RightRect, BottomRect) -> None: ... +def RoundRect(hdc: int, LeftRect, TopRect, RightRect, BottomRect, Width, Height) -> None: ... +def BeginPaint() -> tuple[Incomplete, Incomplete]: ... +def EndPaint(hwnd: int, ps) -> None: ... +def BeginPath(hdc: int) -> None: ... +def EndPath(hdc: int) -> None: ... +def AbortPath(hdc: int) -> None: ... +def CloseFigure(hdc: int) -> None: ... +def FlattenPath(hdc: int) -> None: ... +def FillPath(hdc: int) -> None: ... +def WidenPath(hdc: int) -> None: ... +def StrokePath(hdc: int) -> None: ... +def StrokeAndFillPath(hdc: int) -> None: ... +def GetMiterLimit(hdc: int) -> float: ... +def SetMiterLimit(hdc: int, NewLimit: float) -> float: ... +def PathToRegion(hdc: int) -> _win32typing.PyGdiHANDLE: ... +def GetPath(hdc: int) -> tuple[Incomplete, Incomplete]: ... +def CreateRoundRectRgn(LeftRect, TopRect, RightRect, BottomRect, WidthEllipse, HeightEllipse): ... +def CreateRectRgnIndirect(rc: _win32typing.PyRECT): ... +def CreateEllipticRgnIndirect(rc: _win32typing.PyRECT): ... +def CreateWindowEx( + dwExStyle, className: str, windowTitle: str, style, x, y, width, height, parent, menu, hinstance, reserved +): ... +def GetParent(child: int) -> int: ... +def SetParent(__child: int, __child1: int | None | _win32typing.PyHANDLE) -> int: ... +def GetCursorPos() -> tuple[Incomplete, Incomplete]: ... +def GetDesktopWindow(): ... +def GetWindow(__hWnd: int, __uCmd: int) -> int: ... +def GetWindowDC(hWnd: int) -> int: ... +def IsIconic(__hWnd: int) -> int: ... +def IsWindow(__hWnd: int) -> int: ... +def IsChild(__hWndParent: int, hWnd: int) -> int: ... +def ReleaseCapture() -> None: ... +def GetCapture(): ... +def SetCapture() -> None: ... +def ReleaseDC(hWnd: int, hDC): ... +def CreateCaret(hWnd: int, hBitmap: _win32typing.PyGdiHANDLE, nWidth, nHeight) -> None: ... +def DestroyCaret() -> None: ... +def ScrollWindowEx( + hWnd: int, dx, dy, rcScroll: _win32typing.PyRECT, rcClip: _win32typing.PyRECT, hrgnUpdate, flags +) -> tuple[Incomplete, _win32typing.PyRECT]: ... +def SetScrollInfo(hwnd: int, nBar, scollInfo: _win32typing.PySCROLLINFO, bRedraw=...) -> None: ... +def GetScrollInfo(hwnd: int, nBar, mask) -> _win32typing.PySCROLLINFO: ... +def GetClassName(hwnd: int) -> str: ... +def WindowFromPoint(point: tuple[int, int]) -> int: ... +def ChildWindowFromPoint(hwndParent: int, point: tuple[Incomplete, Incomplete]): ... +def CreateDC(Driver: str, Device: str, InitData: _win32typing.PyDEVMODE): ... +def GetSaveFileNameW( + hwndOwner: int | None = ..., + hInstance: int | None = ..., + Filter: Incomplete | None = ..., + CustomFilter: Incomplete | None = ..., + FilterIndex: int = ..., + File: Incomplete | None = ..., + MaxFile: int = ..., + InitialDir: Incomplete | None = ..., + Title: Incomplete | None = ..., + Flags: int = ..., + DefExt: Incomplete | None = ..., + TemplateName: _win32typing.PyResourceId | None = ..., +) -> tuple[Incomplete, Incomplete, Incomplete]: ... +def GetOpenFileNameW( + hwndOwner: int | None = ..., + hInstance: int | None = ..., + Filter: Incomplete | None = ..., + CustomFilter: Incomplete | None = ..., + FilterIndex: int = ..., + File: Incomplete | None = ..., + MaxFile: int = ..., + InitialDir: Incomplete | None = ..., + Title: Incomplete | None = ..., + Flags: int = ..., + DefExt: Incomplete | None = ..., + TemplateName: _win32typing.PyResourceId | None = ..., +) -> tuple[Incomplete, Incomplete, Incomplete]: ... +def SystemParametersInfo(Action, Param: Incomplete | None = ..., WinIni: int = ...) -> None: ... +def SetLayeredWindowAttributes(hwnd: int, Key, Alpha, Flags) -> None: ... +def GetLayeredWindowAttributes(hwnd: int) -> tuple[Incomplete, Incomplete, Incomplete]: ... +def UpdateLayeredWindow( + hwnd: int, + arg: tuple[int, int, int, int], + hdcDst: int | None = ..., + ptDst: tuple[Incomplete, Incomplete] | None = ..., + size: tuple[Incomplete, Incomplete] | None = ..., + hdcSrc: Incomplete | None = ..., + ptSrc: tuple[Incomplete, Incomplete] | None = ..., + Key: int = ..., + Flags: int = ..., +) -> None: ... +def AnimateWindow(hwnd: int, Time, Flags) -> None: ... +def CreateBrushIndirect(lb: _win32typing.PyLOGBRUSH) -> _win32typing.PyGdiHANDLE: ... +def ExtCreatePen(PenStyle, Width, lb: _win32typing.PyLOGBRUSH, Style: tuple[Incomplete, ...] | None = ...) -> int: ... +def DrawTextW(hDC: int, String: str, Count, Rect: _win32typing.PyRECT, Format) -> tuple[Incomplete, _win32typing.PyRECT]: ... +def EnumPropsEx(hWnd: int, EnumFunc, Param) -> None: ... +def RegisterDeviceNotification(handle: int, _filter, flags) -> _win32typing.PyHDEVNOTIFY: ... +def UnregisterDeviceNotification() -> None: ... +def RegisterHotKey(hWnd: int, _id, Modifiers, vk) -> None: ... +def GetAncestor(__hwnd: int, __gaFlags: int) -> int: ... +def GetTopWindow(__hWnd: int | None) -> int: ... +def ChildWindowFromPointEx(*args, **kwargs): ... # incomplete +def CreateDialogIndirectParam(*args, **kwargs): ... # incomplete +def DestroyAcceleratorTable(*args, **kwargs): ... # incomplete +def Edit_GetLine(*args, **kwargs): ... # incomplete +def GetModuleHandle(__lpModuleName: str | None) -> int: ... +def GetWindowTextLength(*args, **kwargs): ... # incomplete +def HIWORD(*args, **kwargs): ... # incomplete +def ImageList_Add(*args, **kwargs): ... # incomplete +def ImageList_Create(*args, **kwargs): ... # incomplete +def ImageList_Destroy(*args, **kwargs): ... # incomplete +def ImageList_Draw(*args, **kwargs): ... # incomplete +def ImageList_DrawEx(*args, **kwargs): ... # incomplete +def ImageList_GetIcon(*args, **kwargs): ... # incomplete +def ImageList_GetImageCount(*args, **kwargs): ... # incomplete +def ImageList_LoadBitmap(*args, **kwargs): ... # incomplete +def ImageList_LoadImage(*args, **kwargs): ... # incomplete +def ImageList_Remove(*args, **kwargs): ... # incomplete +def ImageList_Replace(*args, **kwargs): ... # incomplete +def ImageList_ReplaceIcon(*args, **kwargs): ... # incomplete +def ImageList_SetBkColor(*args, **kwargs): ... # incomplete +def ImageList_SetOverlayImage(*args, **kwargs): ... # incomplete +def LOWORD(*args, **kwargs): ... # incomplete +def ListView_SortItems(*args, **kwargs): ... # incomplete +def ListView_SortItemsEx(*args, **kwargs): ... # incomplete +def ValidateRect(*args, **kwargs): ... # incomplete +def WNDCLASS() -> _win32typing.PyWNDCLASS: ... +def lpstr(*args, **kwargs): ... # incomplete + +CLR_NONE: int +ILC_COLOR: int +ILC_COLOR16: int +ILC_COLOR24: int +ILC_COLOR32: int +ILC_COLOR4: int +ILC_COLOR8: int +ILC_COLORDDB: int +ILC_MASK: int +ILD_BLEND: int +ILD_BLEND25: int +ILD_BLEND50: int +ILD_FOCUS: int +ILD_MASK: int +ILD_NORMAL: int +ILD_SELECTED: int +ILD_TRANSPARENT: int +IMAGE_BITMAP: int +IMAGE_CURSOR: int +IMAGE_ICON: int +LR_CREATEDIBSECTION: int +LR_DEFAULTCOLOR: int +LR_DEFAULTSIZE: int +LR_LOADFROMFILE: int +LR_LOADMAP3DCOLORS: int +LR_LOADTRANSPARENT: int +LR_MONOCHROME: int +LR_SHARED: int +LR_VGACOLOR: int +NIF_ICON: int +NIF_INFO: int +NIF_MESSAGE: int +NIF_STATE: int +NIF_TIP: int +NIIF_ERROR: int +NIIF_ICON_MASK: int +NIIF_INFO: int +NIIF_NONE: int +NIIF_NOSOUND: int +NIIF_WARNING: int +NIM_ADD: int +NIM_DELETE: int +NIM_MODIFY: int +NIM_SETVERSION: int +TPM_BOTTOMALIGN: int +TPM_CENTERALIGN: int +TPM_LEFTALIGN: int +TPM_LEFTBUTTON: int +TPM_NONOTIFY: int +TPM_RETURNCMD: int +TPM_RIGHTALIGN: int +TPM_RIGHTBUTTON: int +TPM_TOPALIGN: int +TPM_VCENTERALIGN: int +UNICODE: Literal[True] +dllhandle: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32help.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32help.pyi new file mode 100644 index 00000000..7d8b0012 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32help.pyi @@ -0,0 +1,180 @@ +import _win32typing + +def WinHelp(hwnd: int, hlpFile: str, cmd, data: str | None = ...) -> None: ... +def HH_AKLINK() -> _win32typing.PyHH_AKLINK: ... +def HH_FTS_QUERY() -> _win32typing.PyHH_FTS_QUERY: ... +def HH_POPUP() -> _win32typing.PyHH_POPUP: ... +def HH_WINTYPE() -> _win32typing.PyHH_WINTYPE: ... +def NMHDR() -> _win32typing.PyNMHDR: ... +def HHN_NOTIFY() -> _win32typing.PyHHN_NOTIFY: ... +def HHNTRACK() -> _win32typing.PyHHNTRACK: ... +def HtmlHelp(hwnd: int, file: str, cmd, data: str | tuple[int] | int = ...): ... + +debug: int +HH_ALINK_LOOKUP: int +HH_CLOSE_ALL: int +HH_DISPLAY_INDEX: int +HH_DISPLAY_SEARCH: int +HH_DISPLAY_TEXT_POPUP: int +HH_DISPLAY_TOC: int +HH_DISPLAY_TOPIC: int +HH_ENUM_CATEGORY: int +HH_ENUM_CATEGORY_IT: int +HH_ENUM_INFO_TYPE: int +HH_FTS_DEFAULT_PROXIMITY: int +HH_GET_LAST_ERROR: int +HH_GET_WIN_HANDLE: int +HH_GET_WIN_TYPE: int +HH_GPROPID_CONTENT_LANGUAGE: int +HH_GPROPID_CURRENT_SUBSET: int +HH_GPROPID_SINGLETHREAD: int +HH_GPROPID_TOOLBAR_MARGIN: int +HH_GPROPID_UI_LANGUAGE: int +HH_HELP_CONTEXT: int +HH_HELP_FINDER: int +HH_INITIALIZE: int +HH_KEYWORD_LOOKUP: int +HH_MAX_TABS_CUSTOM: int +HH_PRETRANSLATEMESSAGE: int +HH_RESERVED1: int +HH_RESERVED2: int +HH_RESERVED3: int +HH_RESET_IT_FILTER: int +HH_SET_EXCLUSIVE_FILTER: int +HH_SET_GLOBAL_PROPERTY: int +HH_SET_INCLUSIVE_FILTER: int +HH_SET_INFO_TYPE: int +HH_SET_WIN_TYPE: int +HH_SYNC: int +HH_TAB_AUTHOR: int +HH_TAB_CONTENTS: int +HH_TAB_CUSTOM_FIRST: int +HH_TAB_CUSTOM_LAST: int +HH_TAB_FAVORITES: int +HH_TAB_HISTORY: int +HH_TAB_INDEX: int +HH_TAB_SEARCH: int +HH_TP_HELP_CONTEXTMENU: int +HH_TP_HELP_WM_HELP: int +HH_UNINITIALIZE: int +HHACT_BACK: int +HHACT_CONTRACT: int +HHACT_CUSTOMIZE: int +HHACT_EXPAND: int +HHACT_FORWARD: int +HHACT_HIGHLIGHT: int +HHACT_HOME: int +HHACT_JUMP1: int +HHACT_JUMP2: int +HHACT_LAST_ENUM: int +HHACT_NOTES: int +HHACT_OPTIONS: int +HHACT_PRINT: int +HHACT_REFRESH: int +HHACT_STOP: int +HHACT_SYNC: int +HHACT_TAB_CONTENTS: int +HHACT_TAB_FAVORITES: int +HHACT_TAB_HISTORY: int +HHACT_TAB_INDEX: int +HHACT_TAB_SEARCH: int +HHACT_TOC_NEXT: int +HHACT_TOC_PREV: int +HHACT_ZOOM: int +HHN_FIRST: int +HHN_LAST: int +HHN_NAVCOMPLETE: int +HHN_TRACK: int +HHN_WINDOW_CREATE: int +HHWIN_BUTTON_BACK: int +HHWIN_BUTTON_BROWSE_BCK: int +HHWIN_BUTTON_BROWSE_FWD: int +HHWIN_BUTTON_CONTENTS: int +HHWIN_BUTTON_EXPAND: int +HHWIN_BUTTON_FAVORITES: int +HHWIN_BUTTON_FORWARD: int +HHWIN_BUTTON_HISTORY: int +HHWIN_BUTTON_HOME: int +HHWIN_BUTTON_INDEX: int +HHWIN_BUTTON_JUMP1: int +HHWIN_BUTTON_JUMP2: int +HHWIN_BUTTON_NOTES: int +HHWIN_BUTTON_OPTIONS: int +HHWIN_BUTTON_PRINT: int +HHWIN_BUTTON_REFRESH: int +HHWIN_BUTTON_SEARCH: int +HHWIN_BUTTON_STOP: int +HHWIN_BUTTON_SYNC: int +HHWIN_BUTTON_TOC_NEXT: int +HHWIN_BUTTON_TOC_PREV: int +HHWIN_BUTTON_ZOOM: int +HHWIN_DEF_BUTTONS: int +HHWIN_NAVTAB_BOTTOM: int +HHWIN_NAVTAB_LEFT: int +HHWIN_NAVTAB_TOP: int +HHWIN_PARAM_CUR_TAB: int +HHWIN_PARAM_EXPANSION: int +HHWIN_PARAM_EXSTYLES: int +HHWIN_PARAM_HISTORY_COUNT: int +HHWIN_PARAM_INFOTYPES: int +HHWIN_PARAM_NAV_WIDTH: int +HHWIN_PARAM_PROPERTIES: int +HHWIN_PARAM_RECT: int +HHWIN_PARAM_SHOWSTATE: int +HHWIN_PARAM_STYLES: int +HHWIN_PARAM_TABORDER: int +HHWIN_PARAM_TABPOS: int +HHWIN_PARAM_TB_FLAGS: int +HHWIN_PROP_AUTO_SYNC: int +HHWIN_PROP_CHANGE_TITLE: int +HHWIN_PROP_MENU: int +HHWIN_PROP_NAV_ONLY_WIN: int +HHWIN_PROP_NO_TOOLBAR: int +HHWIN_PROP_NODEF_EXSTYLES: int +HHWIN_PROP_NODEF_STYLES: int +HHWIN_PROP_NOTB_TEXT: int +HHWIN_PROP_NOTITLEBAR: int +HHWIN_PROP_ONTOP: int +HHWIN_PROP_POST_QUIT: int +HHWIN_PROP_TAB_ADVSEARCH: int +HHWIN_PROP_TAB_AUTOHIDESHOW: int +HHWIN_PROP_TAB_CUSTOM1: int +HHWIN_PROP_TAB_CUSTOM2: int +HHWIN_PROP_TAB_CUSTOM3: int +HHWIN_PROP_TAB_CUSTOM4: int +HHWIN_PROP_TAB_CUSTOM5: int +HHWIN_PROP_TAB_CUSTOM6: int +HHWIN_PROP_TAB_CUSTOM7: int +HHWIN_PROP_TAB_CUSTOM8: int +HHWIN_PROP_TAB_CUSTOM9: int +HHWIN_PROP_TAB_FAVORITES: int +HHWIN_PROP_TAB_HISTORY: int +HHWIN_PROP_TAB_SEARCH: int +HHWIN_PROP_TRACKING: int +HHWIN_PROP_TRI_PANE: int +HHWIN_PROP_USER_POS: int +HHWIN_TB_MARGIN: int +IDTB_BACK: int +IDTB_BROWSE_BACK: int +IDTB_BROWSE_FWD: int +IDTB_CONTENTS: int +IDTB_CONTRACT: int +IDTB_CUSTOMIZE: int +IDTB_EXPAND: int +IDTB_FAVORITES: int +IDTB_FORWARD: int +IDTB_HISTORY: int +IDTB_HOME: int +IDTB_INDEX: int +IDTB_JUMP1: int +IDTB_JUMP2: int +IDTB_NOTES: int +IDTB_OPTIONS: int +IDTB_PRINT: int +IDTB_REFRESH: int +IDTB_SEARCH: int +IDTB_STOP: int +IDTB_SYNC: int +IDTB_TOC_NEXT: int +IDTB_TOC_PREV: int +IDTB_ZOOM: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32inet.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32inet.pyi new file mode 100644 index 00000000..ec5eb224 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32inet.pyi @@ -0,0 +1,69 @@ +from _typeshed import Incomplete + +import _win32typing +from win32.lib.pywintypes import error as error + +def InternetSetCookie(url: str, lpszCookieName: str, data: str) -> None: ... +def InternetGetCookie(Url: str, CookieName: str) -> str: ... +def InternetAttemptConnect(Reserved: int = ...) -> None: ... +def InternetCheckConnection(Url: str, Flags: int = ..., Reserved: int = ...) -> None: ... +def InternetGoOnline(Url: str, Parent: Incomplete | None = ..., Flags: int = ...) -> None: ... +def InternetCloseHandle(handle: _win32typing.PyHINTERNET) -> None: ... +def InternetConnect( + Internet: _win32typing.PyHINTERNET, + ServerName: str, + ServerPort, + Username: str, + Password: str, + Service, + Flags, + Context: Incomplete | None = ..., +) -> None: ... +def InternetOpen(agent: str, proxyName: str, proxyBypass: str, flags) -> None: ... +def InternetOpenUrl( + Internet: _win32typing.PyHINTERNET, Url: str, Headers: str | None = ..., Flags: int = ..., Context: Incomplete | None = ... +) -> _win32typing.PyHINTERNET: ... +def InternetCanonicalizeUrl(url: str, flags: int = ...) -> str: ... +def InternetGetLastResponseInfo() -> tuple[Incomplete, str]: ... +def InternetReadFile(hInternet: _win32typing.PyHINTERNET, size) -> str: ... +def InternetWriteFile(File: _win32typing.PyHINTERNET, Buffer: str): ... +def FtpOpenFile( + hConnect: _win32typing.PyHINTERNET, FileName: str, Access, Flags, Context: Incomplete | None = ... +) -> _win32typing.PyHINTERNET: ... +def FtpCommand( + Connect: _win32typing.PyHINTERNET, ExpectResponse, Flags, Command: str, Context: Incomplete | None = ... +) -> _win32typing.PyHINTERNET: ... +def InternetQueryOption(hInternet: _win32typing.PyHINTERNET, Option): ... +def InternetSetOption(hInternet: _win32typing.PyHINTERNET, Option, Buffer) -> None: ... +def FindFirstUrlCacheEntry(SearchPattern: Incomplete | None = ...) -> tuple[_win32typing.PyUrlCacheHANDLE, Incomplete]: ... +def FindNextUrlCacheEntry(EnumHandle: _win32typing.PyUrlCacheHANDLE): ... +def FindFirstUrlCacheEntryEx( + SearchPattern: Incomplete | None = ..., Flags: int = ..., Filter: int = ..., GroupId=... +) -> tuple[_win32typing.PyUrlCacheHANDLE, Incomplete]: ... +def FindNextUrlCacheEntryEx(EnumHandle: _win32typing.PyUrlCacheHANDLE): ... +def FindCloseUrlCache(EnumHandle: _win32typing.PyUrlCacheHANDLE) -> None: ... +def FindFirstUrlCacheGroup(Filter) -> tuple[_win32typing.PyUrlCacheHANDLE, Incomplete]: ... +def FindNextUrlCacheGroup(Find: int): ... +def GetUrlCacheEntryInfo(UrlName): ... +def DeleteUrlCacheGroup(GroupId, Attributes) -> None: ... +def CreateUrlCacheGroup(Flags: int = ...): ... +def CreateUrlCacheEntry(UrlName, ExpectedFileSize, FileExtension): ... +def CommitUrlCacheEntry( + UrlName, + LocalFileName, + CacheEntryType, + ExpireTime: _win32typing.PyTime | None = ..., + LastModifiedTime: _win32typing.PyTime | None = ..., + HeaderInfo: Incomplete | None = ..., + OriginalUrl: Incomplete | None = ..., +): ... +def SetUrlCacheEntryGroup(UrlName, Flags, GroupId) -> None: ... +def GetUrlCacheGroupAttribute(GroupId, Attributes): ... +def SetUrlCacheGroupAttribute(GroupId, Attributes, GroupInfo, Flags=...) -> None: ... +def DeleteUrlCacheEntry(UrlName) -> None: ... +def WinHttpGetDefaultProxyConfiguration(*args, **kwargs): ... # incomplete +def WinHttpGetIEProxyConfigForCurrentUser(*args, **kwargs): ... # incomplete +def WinHttpGetProxyForUrl(*args, **kwargs): ... # incomplete +def WinHttpOpen(*args, **kwargs): ... # incomplete + +UNICODE: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32job.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32job.pyi new file mode 100644 index 00000000..23b5d7af --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32job.pyi @@ -0,0 +1,74 @@ +import _win32typing +from win32.lib.pywintypes import error as error + +def AssignProcessToJobObject(hJob: int, hProcess: int) -> None: ... +def CreateJobObject(__jobAttributes: _win32typing.PySECURITY_ATTRIBUTES | None, __name: str) -> None: ... +def OpenJobObject(desiredAccess, inheritHandles, name) -> None: ... +def TerminateJobObject(hJob: int, exitCode) -> None: ... +def UserHandleGrantAccess(hUserHandle: int, hJob: int, grant) -> None: ... +def IsProcessInJob(__hProcess: int, __hJob: int): ... +def QueryInformationJobObject(Job: int, JobObjectInfoClass): ... +def SetInformationJobObject(Job: int, JobObjectInfoClass, JobObjectInfo) -> None: ... + +JOB_OBJECT_ALL_ACCESS: int +JOB_OBJECT_ASSIGN_PROCESS: int +JOB_OBJECT_BASIC_LIMIT_VALID_FLAGS: int +JOB_OBJECT_EXTENDED_LIMIT_VALID_FLAGS: int +JOB_OBJECT_LIMIT_ACTIVE_PROCESS: int +JOB_OBJECT_LIMIT_AFFINITY: int +JOB_OBJECT_LIMIT_BREAKAWAY_OK: int +JOB_OBJECT_LIMIT_DIE_ON_UNHANDLED_EXCEPTION: int +JOB_OBJECT_LIMIT_JOB_MEMORY: int +JOB_OBJECT_LIMIT_JOB_TIME: int +JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE: int +JOB_OBJECT_LIMIT_PRESERVE_JOB_TIME: int +JOB_OBJECT_LIMIT_PRIORITY_CLASS: int +JOB_OBJECT_LIMIT_PROCESS_MEMORY: int +JOB_OBJECT_LIMIT_PROCESS_TIME: int +JOB_OBJECT_LIMIT_SCHEDULING_CLASS: int +JOB_OBJECT_LIMIT_SILENT_BREAKAWAY_OK: int +JOB_OBJECT_LIMIT_VALID_FLAGS: int +JOB_OBJECT_LIMIT_WORKINGSET: int +JOB_OBJECT_MSG_ABNORMAL_EXIT_PROCESS: int +JOB_OBJECT_MSG_ACTIVE_PROCESS_LIMIT: int +JOB_OBJECT_MSG_ACTIVE_PROCESS_ZERO: int +JOB_OBJECT_MSG_END_OF_JOB_TIME: int +JOB_OBJECT_MSG_END_OF_PROCESS_TIME: int +JOB_OBJECT_MSG_EXIT_PROCESS: int +JOB_OBJECT_MSG_JOB_MEMORY_LIMIT: int +JOB_OBJECT_MSG_NEW_PROCESS: int +JOB_OBJECT_MSG_PROCESS_MEMORY_LIMIT: int +JOB_OBJECT_POST_AT_END_OF_JOB: int +JOB_OBJECT_QUERY: int +JOB_OBJECT_SECURITY_FILTER_TOKENS: int +JOB_OBJECT_SECURITY_NO_ADMIN: int +JOB_OBJECT_SECURITY_ONLY_TOKEN: int +JOB_OBJECT_SECURITY_RESTRICTED_TOKEN: int +JOB_OBJECT_SECURITY_VALID_FLAGS: int +JOB_OBJECT_SET_ATTRIBUTES: int +JOB_OBJECT_SET_SECURITY_ATTRIBUTES: int +JOB_OBJECT_TERMINATE: int +JOB_OBJECT_TERMINATE_AT_END_OF_JOB: int +JOB_OBJECT_UI_VALID_FLAGS: int +JOB_OBJECT_UILIMIT_ALL: int +JOB_OBJECT_UILIMIT_DESKTOP: int +JOB_OBJECT_UILIMIT_DISPLAYSETTINGS: int +JOB_OBJECT_UILIMIT_EXITWINDOWS: int +JOB_OBJECT_UILIMIT_GLOBALATOMS: int +JOB_OBJECT_UILIMIT_HANDLES: int +JOB_OBJECT_UILIMIT_NONE: int +JOB_OBJECT_UILIMIT_READCLIPBOARD: int +JOB_OBJECT_UILIMIT_SYSTEMPARAMETERS: int +JOB_OBJECT_UILIMIT_WRITECLIPBOARD: int +JobObjectAssociateCompletionPortInformation: int +JobObjectBasicAccountingInformation: int +JobObjectBasicAndIoAccountingInformation: int +JobObjectBasicLimitInformation: int +JobObjectBasicUIRestrictions: int +JobObjectEndOfJobTimeInformation: int +JobObjectExtendedLimitInformation: int +JobObjectJobSetInformation: int +JobObjectSecurityLimitInformation: int +MaxJobObjectInfoClass: int +JobObjectBasicProcessIdList: int +UNICODE: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32lz.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32lz.pyi new file mode 100644 index 00000000..21a271c8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32lz.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete + +from win32.lib.pywintypes import error as error + +def GetExpandedName(Source) -> str: ... +def Close(handle) -> None: ... +def Copy(hSrc, hDest): ... +def Init(handle) -> None: ... +def OpenFile(fileName: str, action) -> tuple[Incomplete, Incomplete]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32net.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32net.pyi new file mode 100644 index 00000000..45368106 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32net.pyi @@ -0,0 +1,90 @@ +from _typeshed import Incomplete + +from win32.lib.pywintypes import error as error + +def NetGetJoinInformation() -> tuple[str, Incomplete]: ... +def NetGroupGetInfo(server: str, groupname: str, level): ... +def NetGroupGetUsers( + server: str, groupName: str, level, resumeHandle: int = ..., prefLen: int = ... +) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... +def NetGroupSetUsers(server: str, group: str, level, members: tuple[Incomplete, Incomplete]) -> None: ... +def NetGroupSetInfo(server: str, groupname: str, level, data) -> None: ... +def NetGroupAdd(server: str, level, data) -> None: ... +def NetGroupAddUser(server: str, group: str, username: str) -> None: ... +def NetGroupDel(server: str, groupname: str) -> None: ... +def NetGroupDelUser(server: str, group: str, username: str) -> None: ... +def NetGroupEnum(server: str, level, prefLen, resumeHandle=...) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... +def NetLocalGroupAddMembers(server: str, group: str, level, members: tuple[Incomplete, Incomplete]) -> None: ... +def NetLocalGroupDelMembers(server: str, group: str, members: list[str]) -> None: ... +def NetLocalGroupGetMembers( + server: str, groupName: str, level, resumeHandle: int = ..., prefLen: int = ... +) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... +def NetLocalGroupSetMembers(server: str, group: str, level, members: tuple[Incomplete, Incomplete]) -> None: ... +def NetMessageBufferSend(domain: str, userName: str, fromName: str, message: str) -> None: ... +def NetMessageNameAdd(server, msgname) -> None: ... +def NetMessageNameDel(server, msgname) -> None: ... +def NetMessageNameEnum(Server) -> None: ... +def NetServerEnum( + server: str, level, _type, prefLen, domain: str | None = ..., resumeHandle: int = ... +) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... +def NetServerGetInfo(server: str, level): ... +def NetServerSetInfo(server: str, level, data) -> None: ... +def NetShareAdd(server: str, level, data) -> None: ... +def NetShareDel(server: str, shareName: str, reserved: int = ...) -> None: ... +def NetShareCheck(server: str, deviceName: str) -> tuple[Incomplete, Incomplete]: ... +def NetShareEnum( + server: str, level, prefLen, serverName, resumeHandle=... +) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... +def NetShareGetInfo(server: str, netname: str, level): ... +def NetShareSetInfo(server: str, netname: str, level, data) -> None: ... +def NetUserAdd(server: str, level, data) -> None: ... +def NetUserChangePassword(server: str, username: str, oldPassword: str, newPassword: str) -> None: ... +def NetUserEnum(server: str, level, arg, prefLen, resumeHandle=...) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... +def NetUserGetGroups(serverName: str, userName: str) -> list[tuple[Incomplete, Incomplete]]: ... +def NetUserGetInfo(server: str, username: str, level): ... +def NetUserGetLocalGroups(serverName: str, userName: str, flags) -> list[Incomplete]: ... +def NetUserSetInfo(server: str, username: str, level, data) -> None: ... +def NetUserDel(server: str, username: str) -> None: ... +def NetUserModalsGet(server: str, level): ... +def NetUserModalsSet(server: str, level, data) -> None: ... +def NetWkstaUserEnum(server: str, level, prefLen, resumeHandle=...) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... +def NetWkstaGetInfo(server: str, level): ... +def NetWkstaSetInfo(server: str, level, data) -> None: ... +def NetWkstaTransportEnum( + server: str, level, prefLen, resumeHandle=... +) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... +def NetWkstaTransportAdd(server: str, level, data) -> None: ... +def NetWkstaTransportDel(server: str, TransportName: str, ucond: int = ...) -> None: ... +def NetServerDiskEnum(server: str, level): ... +def NetUseAdd(server: str, level, data) -> None: ... +def NetUseDel(server: str, useName: str, forceCond: int = ...) -> None: ... +def NetUseEnum(server: str, level, prefLen, resumeHandle=...) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... +def NetUseGetInfo(server: str, usename: str, level: int = ...): ... +def NetGetAnyDCName(server: str | None = ..., domain: str | None = ...) -> str: ... +def NetGetDCName(server: str | None = ..., domain: str | None = ...) -> str: ... +def NetSessionEnum( + level, server: str | None = ..., client: str | None = ..., username: str | None = ... +) -> tuple[Incomplete, ...]: ... +def NetSessionDel(server: str, client: str | None = ..., username: str | None = ...) -> None: ... +def NetSessionGetInfo(level, server: str, client: str, username: str): ... +def NetFileEnum( + level, servername: str | None = ..., basepath: str | None = ..., username: str | None = ... +) -> tuple[Incomplete, ...]: ... +def NetFileClose(servername: str, fileid) -> None: ... +def NetFileGetInfo(level, servername: str, fileid): ... +def NetStatisticsGet(server: str, service: str, level, options): ... +def NetServerComputerNameAdd(ServerName: str, EmulatedDomainName: str, EmulatedServerName: str) -> None: ... +def NetServerComputerNameDel(ServerName: str, EmulatedServerName: str) -> None: ... +def NetValidateName(Server: str, Name: str, NameType, Account: str | None = ..., Password: str | None = ...) -> None: ... +def NetValidatePasswordPolicy(Server: str, Qualifier, ValidationType, arg) -> None: ... +def NetLocalGroupAdd(*args, **kwargs): ... # incomplete +def NetLocalGroupDel(*args, **kwargs): ... # incomplete +def NetLocalGroupEnum(*args, **kwargs): ... # incomplete +def NetLocalGroupGetInfo(*args, **kwargs): ... # incomplete +def NetLocalGroupSetInfo(*args, **kwargs): ... # incomplete + +SERVICE_SERVER: str +SERVICE_WORKSTATION: str +USE_FORCE: int +USE_LOTS_OF_FORCE: int +USE_NOFORCE: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32pdh.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32pdh.pyi new file mode 100644 index 00000000..c2b768e1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32pdh.pyi @@ -0,0 +1,60 @@ +from _typeshed import Incomplete + +from win32.lib.pywintypes import error as error + +def AddCounter(hQuery, path: str, userData: int = ...): ... +def AddEnglishCounter(hQuery, path: str, userData: int = ...): ... +def RemoveCounter(handle) -> None: ... +def EnumObjectItems(DataSource: str | None, machine: str | None, _object: str, detailLevel, flags=...): ... +def EnumObjects(DataSource: str | None, machine: str | None, detailLevel: int, refresh: bool = ...): ... +def OpenQuery(DataSource: Incomplete | None = ..., userData: int = ...): ... +def CloseQuery(handle) -> None: ... +def MakeCounterPath( + elements: tuple[Incomplete, Incomplete, Incomplete, Incomplete, Incomplete, Incomplete], flags=... +) -> None: ... +def GetCounterInfo(handle, bRetrieveExplainText) -> None: ... +def GetFormattedCounterValue(handle, _format) -> tuple[Incomplete, Incomplete]: ... +def CollectQueryData(hQuery) -> None: ... +def ValidatePath(path: str): ... +def ExpandCounterPath(wildCardPath: str) -> tuple[Incomplete, Incomplete]: ... +def ParseCounterPath(path: str, flags=...) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete, Incomplete, Incomplete]: ... +def ParseInstanceName(instanceName: str) -> tuple[Incomplete, Incomplete, Incomplete]: ... +def SetCounterScaleFactor(hCounter, factor) -> None: ... +def BrowseCounters( + Flags: tuple[Incomplete, ...] | None, + hWndOwner: int, + CallBack1, + CallBack2, + DialogBoxCaption: str | None = ..., + InitialPath: Incomplete | None = ..., + DataSource: Incomplete | None = ..., + ReturnMultiple: bool = ..., + CallBackArg: Incomplete | None = ..., +) -> str: ... +def ConnectMachine(machineName: str) -> str: ... +def LookupPerfIndexByName(machineName: str, instanceName: str): ... +def LookupPerfNameByIndex(machineName: str | None, index) -> str: ... +def GetFormattedCounterArray(*args, **kwargs): ... # incomplete + +PDH_FMT_1000: int +PDH_FMT_ANSI: int +PDH_FMT_DOUBLE: int +PDH_FMT_LARGE: int +PDH_FMT_LONG: int +PDH_FMT_NODATA: int +PDH_FMT_NOSCALE: int +PDH_FMT_RAW: int +PDH_FMT_UNICODE: int +PDH_MAX_SCALE: int +PDH_MIN_SCALE: int +PDH_PATH_WBEM_INPUT: int +PDH_PATH_WBEM_RESULT: int +PDH_VERSION: int +PERF_DETAIL_ADVANCED: int +PERF_DETAIL_EXPERT: int +PERF_DETAIL_NOVICE: int +PERF_DETAIL_WIZARD: int + +class counter_status_error(Exception): ... + +PDH_FMT_NOCAP100: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32pipe.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32pipe.pyi new file mode 100644 index 00000000..7fa9ca0f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32pipe.pyi @@ -0,0 +1,60 @@ +from _typeshed import Incomplete + +import _win32typing +from win32.lib.pywintypes import error as error + +def GetNamedPipeHandleState(hPipe: int, bGetCollectionData=...) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete, str]: ... +def SetNamedPipeHandleState( + __hPipe: int, __Mode: int, __MaxCollectionCount: None | Incomplete, __CollectDataTimeout: None | Incomplete +) -> None: ... +def ConnectNamedPipe(hPipe: int, overlapped: _win32typing.PyOVERLAPPED | None = ...): ... +def TransactNamedPipe( + pipeName, + writeData: str, + buffer_bufSize: _win32typing.PyOVERLAPPEDReadBuffer, + overlapped: _win32typing.PyOVERLAPPED | None = ..., +) -> str: ... +def CallNamedPipe(pipeName, data: str, bufSize, timeOut) -> str: ... +def CreatePipe(__sa: _win32typing.PySECURITY_ATTRIBUTES, __nSize: int) -> tuple[int, int]: ... +def FdCreatePipe(sa: _win32typing.PySECURITY_ATTRIBUTES, nSize, mode) -> tuple[Incomplete, Incomplete]: ... +def CreateNamedPipe( + pipeName: str, + openMode, + pipeMode, + nMaxInstances, + nOutBufferSize, + nInBufferSize, + nDefaultTimeOut, + sa: _win32typing.PySECURITY_ATTRIBUTES, +) -> int: ... +def DisconnectNamedPipe(hFile: int) -> None: ... +def GetOverlappedResult(__hFile: int, __overlapped: _win32typing.PyOVERLAPPED, __bWait: int | bool) -> int: ... +def WaitNamedPipe(pipeName: str, timeout) -> None: ... +def GetNamedPipeInfo(hNamedPipe: int) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete]: ... +def PeekNamedPipe(__hPipe: int, __size: int) -> tuple[str, int, Incomplete]: ... +def GetNamedPipeClientProcessId(hPipe: int): ... +def GetNamedPipeServerProcessId(hPipe: int): ... +def GetNamedPipeClientSessionId(hPipe: int): ... +def GetNamedPipeServerSessionId(hPipe: int): ... +def popen(cmdstring: str, mode: str): ... +def popen2(*args, **kwargs): ... # incomplete +def popen3(*args, **kwargs): ... # incomplete +def popen4(*args, **kwargs): ... # incomplete + +FILE_FLAG_FIRST_PIPE_INSTANCE: int +PIPE_ACCEPT_REMOTE_CLIENTS: int +PIPE_REJECT_REMOTE_CLIENTS: int +NMPWAIT_NOWAIT: int +NMPWAIT_USE_DEFAULT_WAIT: int +NMPWAIT_WAIT_FOREVER: int +PIPE_ACCESS_DUPLEX: int +PIPE_ACCESS_INBOUND: int +PIPE_ACCESS_OUTBOUND: int +PIPE_NOWAIT: int +PIPE_READMODE_BYTE: int +PIPE_READMODE_MESSAGE: int +PIPE_TYPE_BYTE: int +PIPE_TYPE_MESSAGE: int +PIPE_UNLIMITED_INSTANCES: int +PIPE_WAIT: int +UNICODE: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32print.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32print.pyi new file mode 100644 index 00000000..7d726f42 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32print.pyi @@ -0,0 +1,197 @@ +from _typeshed import Incomplete + +import _win32typing + +def OpenPrinter(printer: str, Defaults: Incomplete | None = ...) -> _win32typing.PyPrinterHANDLE: ... +def GetPrinter(hPrinter: _win32typing.PyPrinterHANDLE, Level: int = ...): ... +def SetPrinter(hPrinter: _win32typing.PyPrinterHANDLE, Level, pPrinter, Command) -> None: ... +def ClosePrinter(hPrinter: _win32typing.PyPrinterHANDLE) -> None: ... +def AddPrinterConnection(printer: str): ... +def DeletePrinterConnection(printer: str): ... +def EnumPrinters(flags, name: str | None = ..., level: int = ...): ... +def GetDefaultPrinter() -> str: ... +def GetDefaultPrinterW() -> str: ... +def SetDefaultPrinter(printer: str): ... +def SetDefaultPrinterW(Printer: str): ... +def StartDocPrinter(hprinter: _win32typing.PyPrinterHANDLE, _tuple, level: int = ...): ... +def EndDocPrinter(hPrinter: _win32typing.PyPrinterHANDLE): ... +def AbortPrinter(hPrinter: _win32typing.PyPrinterHANDLE) -> None: ... +def StartPagePrinter(hprinter: _win32typing.PyPrinterHANDLE) -> None: ... +def EndPagePrinter(hprinter: _win32typing.PyPrinterHANDLE) -> None: ... +def StartDoc(hdc: int, docinfo): ... +def EndDoc(hdc: int) -> None: ... +def AbortDoc(hdc: int) -> None: ... +def StartPage(hdc: int) -> None: ... +def EndPage(hdc: int) -> None: ... +def WritePrinter(hprinter: _win32typing.PyPrinterHANDLE, buf: str): ... +def EnumJobs(hPrinter: _win32typing.PyPrinterHANDLE, FirstJob, NoJobs, Level=...): ... +def GetJob(hPrinter: _win32typing.PyPrinterHANDLE, JobID, Level: int = ...): ... +def SetJob(hPrinter: _win32typing.PyPrinterHANDLE, JobID, Level, JobInfo, Command): ... +def DocumentProperties( + HWnd: int, + hPrinter: _win32typing.PyPrinterHANDLE, + DeviceName: str, + DevModeOutput: _win32typing.PyDEVMODE, + DevModeInput: _win32typing.PyDEVMODE, + Mode, +): ... +def EnumPrintProcessors(Server: str | None = ..., Environment: str | None = ...) -> tuple[str, ...]: ... +def EnumPrintProcessorDatatypes(ServerName: str, PrintProcessorName: str) -> tuple[str, ...]: ... +def EnumPrinterDrivers(Server: str | None = ..., Environment: str | None = ..., Level=...) -> tuple[Incomplete, ...]: ... +def EnumForms(hprinter: _win32typing.PyPrinterHANDLE) -> tuple[_win32typing.FORM_INFO_1, ...]: ... +def AddForm(hprinter: _win32typing.PyPrinterHANDLE, Form) -> None: ... +def DeleteForm(hprinter: _win32typing.PyPrinterHANDLE, FormName: str) -> None: ... +def GetForm(hprinter: _win32typing.PyPrinterHANDLE, FormName: str) -> None: ... +def SetForm(hprinter: _win32typing.PyPrinterHANDLE, FormName: str, Form) -> None: ... +def AddJob(hprinter: _win32typing.PyPrinterHANDLE) -> None: ... +def ScheduleJob(hprinter: _win32typing.PyPrinterHANDLE, JobId) -> None: ... +def DeviceCapabilities(Device: str, Port: str, Capability, DEVMODE: _win32typing.PyDEVMODE | None = ...) -> None: ... +def GetDeviceCaps(hdc: int, Index): ... +def EnumMonitors(Name: str, Level) -> tuple[Incomplete, ...]: ... +def EnumPorts(Name: str, Level) -> tuple[Incomplete, ...]: ... +def GetPrintProcessorDirectory(Name: str, Environment: str) -> str: ... +def GetPrinterDriverDirectory(Name: str, Environment: str) -> str: ... +def AddPrinter(Name: str, Level, pPrinter) -> _win32typing.PyPrinterHANDLE: ... +def DeletePrinter(hPrinter: _win32typing.PyPrinterHANDLE) -> None: ... +def DeletePrinterDriver(Server: str, Environment: str, DriverName: str) -> None: ... +def DeletePrinterDriverEx(Server: str, Environment: str, DriverName: str, DeleteFlag, VersionFlag) -> None: ... +def FlushPrinter(Printer: _win32typing.PyPrinterHANDLE, Buf, Sleep): ... + +DEF_PRIORITY: int +DI_APPBANDING: int +DI_ROPS_READ_DESTINATION: int +DPD_DELETE_ALL_FILES: int +DPD_DELETE_SPECIFIC_VERSION: int +DPD_DELETE_UNUSED_FILES: int +DSPRINT_PENDING: int +DSPRINT_PUBLISH: int +DSPRINT_REPUBLISH: int +DSPRINT_UNPUBLISH: int +DSPRINT_UPDATE: int +FORM_BUILTIN: int +FORM_PRINTER: int +FORM_USER: int +JOB_ACCESS_ADMINISTER: int +JOB_ACCESS_READ: int +JOB_ALL_ACCESS: int +JOB_CONTROL_CANCEL: int +JOB_CONTROL_DELETE: int +JOB_CONTROL_LAST_PAGE_EJECTED: int +JOB_CONTROL_PAUSE: int +JOB_CONTROL_RESTART: int +JOB_CONTROL_RESUME: int +JOB_CONTROL_SENT_TO_PRINTER: int +JOB_EXECUTE: int +JOB_INFO_1: int +JOB_POSITION_UNSPECIFIED: int +JOB_READ: int +JOB_STATUS_BLOCKED_DEVQ: int +JOB_STATUS_COMPLETE: int +JOB_STATUS_DELETED: int +JOB_STATUS_DELETING: int +JOB_STATUS_ERROR: int +JOB_STATUS_OFFLINE: int +JOB_STATUS_PAPEROUT: int +JOB_STATUS_PAUSED: int +JOB_STATUS_PRINTED: int +JOB_STATUS_PRINTING: int +JOB_STATUS_RESTART: int +JOB_STATUS_SPOOLING: int +JOB_STATUS_USER_INTERVENTION: int +JOB_WRITE: int +MAX_PRIORITY: int +MIN_PRIORITY: int +PORT_STATUS_DOOR_OPEN: int +PORT_STATUS_NO_TONER: int +PORT_STATUS_OFFLINE: int +PORT_STATUS_OUTPUT_BIN_FULL: int +PORT_STATUS_OUT_OF_MEMORY: int +PORT_STATUS_PAPER_JAM: int +PORT_STATUS_PAPER_OUT: int +PORT_STATUS_PAPER_PROBLEM: int +PORT_STATUS_POWER_SAVE: int +PORT_STATUS_TONER_LOW: int +PORT_STATUS_TYPE_ERROR: int +PORT_STATUS_TYPE_INFO: int +PORT_STATUS_TYPE_WARNING: int +PORT_STATUS_USER_INTERVENTION: int +PORT_STATUS_WARMING_UP: int +PORT_TYPE_NET_ATTACHED: int +PORT_TYPE_READ: int +PORT_TYPE_REDIRECTED: int +PORT_TYPE_WRITE: int +PRINTER_ACCESS_ADMINISTER: int +PRINTER_ACCESS_USE: int +PRINTER_ALL_ACCESS: int +PRINTER_ATTRIBUTE_DEFAULT: int +PRINTER_ATTRIBUTE_DIRECT: int +PRINTER_ATTRIBUTE_DO_COMPLETE_FIRST: int +PRINTER_ATTRIBUTE_ENABLE_BIDI: int +PRINTER_ATTRIBUTE_ENABLE_DEVQ: int +PRINTER_ATTRIBUTE_FAX: int +PRINTER_ATTRIBUTE_HIDDEN: int +PRINTER_ATTRIBUTE_KEEPPRINTEDJOBS: int +PRINTER_ATTRIBUTE_LOCAL: int +PRINTER_ATTRIBUTE_NETWORK: int +PRINTER_ATTRIBUTE_PUBLISHED: int +PRINTER_ATTRIBUTE_QUEUED: int +PRINTER_ATTRIBUTE_RAW_ONLY: int +PRINTER_ATTRIBUTE_SHARED: int +PRINTER_ATTRIBUTE_TS: int +PRINTER_ATTRIBUTE_WORK_OFFLINE: int +PRINTER_CONTROL_PAUSE: int +PRINTER_CONTROL_PURGE: int +PRINTER_CONTROL_RESUME: int +PRINTER_CONTROL_SET_STATUS: int +PRINTER_ENUM_CONNECTIONS: int +PRINTER_ENUM_CONTAINER: int +PRINTER_ENUM_DEFAULT: int +PRINTER_ENUM_EXPAND: int +PRINTER_ENUM_ICON1: int +PRINTER_ENUM_ICON2: int +PRINTER_ENUM_ICON3: int +PRINTER_ENUM_ICON4: int +PRINTER_ENUM_ICON5: int +PRINTER_ENUM_ICON6: int +PRINTER_ENUM_ICON7: int +PRINTER_ENUM_ICON8: int +PRINTER_ENUM_LOCAL: int +PRINTER_ENUM_NAME: int +PRINTER_ENUM_NETWORK: int +PRINTER_ENUM_REMOTE: int +PRINTER_ENUM_SHARED: int +PRINTER_EXECUTE: int +PRINTER_INFO_1: int +PRINTER_READ: int +PRINTER_STATUS_BUSY: int +PRINTER_STATUS_DOOR_OPEN: int +PRINTER_STATUS_ERROR: int +PRINTER_STATUS_INITIALIZING: int +PRINTER_STATUS_IO_ACTIVE: int +PRINTER_STATUS_MANUAL_FEED: int +PRINTER_STATUS_NOT_AVAILABLE: int +PRINTER_STATUS_NO_TONER: int +PRINTER_STATUS_OFFLINE: int +PRINTER_STATUS_OUTPUT_BIN_FULL: int +PRINTER_STATUS_OUT_OF_MEMORY: int +PRINTER_STATUS_PAGE_PUNT: int +PRINTER_STATUS_PAPER_JAM: int +PRINTER_STATUS_PAPER_OUT: int +PRINTER_STATUS_PAPER_PROBLEM: int +PRINTER_STATUS_PAUSED: int +PRINTER_STATUS_PENDING_DELETION: int +PRINTER_STATUS_POWER_SAVE: int +PRINTER_STATUS_PRINTING: int +PRINTER_STATUS_PROCESSING: int +PRINTER_STATUS_SERVER_UNKNOWN: int +PRINTER_STATUS_TONER_LOW: int +PRINTER_STATUS_USER_INTERVENTION: int +PRINTER_STATUS_WAITING: int +PRINTER_STATUS_WARMING_UP: int +PRINTER_WRITE: int +SERVER_ACCESS_ADMINISTER: int +SERVER_ACCESS_ENUMERATE: int +SERVER_ALL_ACCESS: int +SERVER_EXECUTE: int +SERVER_READ: int +SERVER_WRITE: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32process.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32process.pyi new file mode 100644 index 00000000..c80062a0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32process.pyi @@ -0,0 +1,122 @@ +from _typeshed import Incomplete + +import _win32typing +from win32.lib.pywintypes import error as error + +def STARTUPINFO() -> _win32typing.PySTARTUPINFO: ... +def beginthreadex(sa: _win32typing.PySECURITY_ATTRIBUTES, stackSize, entryPoint, args, flags) -> tuple[int, Incomplete]: ... +def CreateRemoteThread( + hprocess: int, sa: _win32typing.PySECURITY_ATTRIBUTES, stackSize, entryPoint, Parameter, flags +) -> tuple[int, Incomplete]: ... +def CreateProcess( + __appName: str | None, + __commandLine: str, + __processAttributes: _win32typing.PySECURITY_ATTRIBUTES | None, + __threadAttributes: _win32typing.PySECURITY_ATTRIBUTES | None, + __bInheritHandles: int | bool, + __dwCreationFlags: int, + __newEnvironment: dict[str, str] | None, + __currentDirectory: str | None, + __startupinfo: _win32typing.PySTARTUPINFO, +) -> tuple[int, int, Incomplete, Incomplete]: ... +def CreateProcessAsUser( + hToken: int, + appName: str, + commandLine: str, + processAttributes: _win32typing.PySECURITY_ATTRIBUTES, + threadAttributes: _win32typing.PySECURITY_ATTRIBUTES, + bInheritHandles, + dwCreationFlags, + newEnvironment, + currentDirectory: str, + startupinfo: _win32typing.PySTARTUPINFO, +) -> tuple[int, int, Incomplete, Incomplete]: ... +def GetCurrentProcess() -> int: ... +def GetProcessVersion(processId): ... +def GetCurrentProcessId(): ... +def GetStartupInfo() -> _win32typing.PySTARTUPINFO: ... +def GetPriorityClass(handle: int): ... +def GetExitCodeThread(handle: int): ... +def GetExitCodeProcess(__handle: int) -> int: ... +def GetWindowThreadProcessId(__hwnd: int | None) -> tuple[int, int]: ... +def SetThreadPriority(handle: int, nPriority) -> None: ... +def GetThreadPriority(handle: int): ... +def GetProcessPriorityBoost(Process: int): ... +def SetProcessPriorityBoost(Process: int, DisablePriorityBoost) -> None: ... +def GetThreadPriorityBoost(Thread: int): ... +def SetThreadPriorityBoost(Thread: int, DisablePriorityBoost) -> None: ... +def GetThreadIOPendingFlag(Thread: int): ... +def GetThreadTimes(Thread: int): ... +def GetProcessId(Process: int): ... +def SetPriorityClass(__handle: int, __dwPriorityClass: int) -> None: ... +def AttachThreadInput(idAttach, idAttachTo, Attach) -> None: ... +def SetThreadIdealProcessor(handle: int, dwIdealProcessor): ... +def GetProcessAffinityMask(hProcess: int) -> tuple[Incomplete, Incomplete]: ... +def SetProcessAffinityMask(hProcess: int, mask) -> None: ... +def SetThreadAffinityMask(hThread: int, ThreadAffinityMask): ... +def SuspendThread(handle: int): ... +def ResumeThread(handle: int): ... +def TerminateProcess(__handle: int, __exitCode: int) -> None: ... +def ExitProcess(exitCode) -> None: ... +def EnumProcesses() -> tuple[Incomplete, Incomplete]: ... +def EnumProcessModules(hProcess: int) -> tuple[Incomplete, Incomplete]: ... +def EnumProcessModulesEx(hProcess: int, FilterFlag) -> tuple[Incomplete, Incomplete]: ... +def GetModuleFileNameEx(hProcess: int, hModule: int): ... +def GetProcessMemoryInfo(hProcess: int): ... +def GetProcessTimes(hProcess: int): ... +def GetProcessIoCounters(hProcess: int): ... +def GetProcessWindowStation() -> None: ... +def GetProcessWorkingSetSize(hProcess: int) -> tuple[Incomplete, Incomplete]: ... +def SetProcessWorkingSetSize(hProcess: int, MinimumWorkingSetSize, MaximumWorkingSetSize) -> None: ... +def GetProcessShutdownParameters() -> tuple[Incomplete, Incomplete]: ... +def SetProcessShutdownParameters(Level, Flags) -> None: ... +def GetGuiResources(Process: int, Flags): ... +def IsWow64Process(__Process: int | None = ...) -> bool: ... +def ReadProcessMemory(*args, **kwargs): ... # incomplete +def VirtualAllocEx(*args, **kwargs): ... # incomplete +def VirtualFreeEx(*args, **kwargs): ... # incomplete +def WriteProcessMemory(*args, **kwargs): ... # incomplete + +ABOVE_NORMAL_PRIORITY_CLASS: int +BELOW_NORMAL_PRIORITY_CLASS: int +CREATE_BREAKAWAY_FROM_JOB: int +CREATE_DEFAULT_ERROR_MODE: int +CREATE_NEW_CONSOLE: int +CREATE_NEW_PROCESS_GROUP: int +CREATE_NO_WINDOW: int +CREATE_PRESERVE_CODE_AUTHZ_LEVEL: int +CREATE_SEPARATE_WOW_VDM: int +CREATE_SHARED_WOW_VDM: int +CREATE_SUSPENDED: int +CREATE_UNICODE_ENVIRONMENT: int +DEBUG_ONLY_THIS_PROCESS: int +DEBUG_PROCESS: int +DETACHED_PROCESS: int +HIGH_PRIORITY_CLASS: int +IDLE_PRIORITY_CLASS: int +MAXIMUM_PROCESSORS: int +NORMAL_PRIORITY_CLASS: int +REALTIME_PRIORITY_CLASS: int +STARTF_FORCEOFFFEEDBACK: int +STARTF_FORCEONFEEDBACK: int +STARTF_RUNFULLSCREEN: int +STARTF_USECOUNTCHARS: int +STARTF_USEFILLATTRIBUTE: int +STARTF_USEPOSITION: int +STARTF_USESHOWWINDOW: int +STARTF_USESIZE: int +STARTF_USESTDHANDLES: int +THREAD_MODE_BACKGROUND_BEGIN: int +THREAD_MODE_BACKGROUND_END: int +THREAD_PRIORITY_ABOVE_NORMAL: int +THREAD_PRIORITY_BELOW_NORMAL: int +THREAD_PRIORITY_HIGHEST: int +THREAD_PRIORITY_IDLE: int +THREAD_PRIORITY_LOWEST: int +THREAD_PRIORITY_NORMAL: int +THREAD_PRIORITY_TIME_CRITICAL: int +LIST_MODULES_32BIT: int +LIST_MODULES_64BIT: int +LIST_MODULES_ALL: int +LIST_MODULES_DEFAULT: int +UNICODE: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32profile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32profile.pyi new file mode 100644 index 00000000..155a77cb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32profile.pyi @@ -0,0 +1,19 @@ +import _win32typing + +def CreateEnvironmentBlock(Token: int, Inherit): ... +def DeleteProfile(SidString: str, ProfilePath: str | None = ..., ComputerName: str | None = ...) -> None: ... +def ExpandEnvironmentStringsForUser(Token: int, Src: str) -> str: ... +def GetAllUsersProfileDirectory() -> str: ... +def GetDefaultUserProfileDirectory() -> str: ... +def GetEnvironmentStrings(): ... +def GetProfilesDirectory() -> str: ... +def GetProfileType(): ... +def GetUserProfileDirectory(Token: int) -> str: ... +def LoadUserProfile(hToken: int, ProfileInfo: _win32typing.PyPROFILEINFO) -> _win32typing.PyHKEY: ... +def UnloadUserProfile(Token: int, Profile: _win32typing.PyHKEY) -> None: ... + +PI_APPLYPOLICY: int +PI_NOUI: int +PT_MANDATORY: int +PT_ROAMING: int +PT_TEMPORARY: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32ras.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32ras.pyi new file mode 100644 index 00000000..57be9a17 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32ras.pyi @@ -0,0 +1,54 @@ +from _typeshed import Incomplete + +import _win32typing +from win32.lib.pywintypes import error as error + +def CreatePhonebookEntry(hWnd: int, fileName: str | None = ...) -> None: ... +def Dial(dialExtensions, fileName: str, RasDialParams: _win32typing.RASDIALPARAMS, callback) -> tuple[Incomplete, Incomplete]: ... +def EditPhonebookEntry(hWnd: int, fileName: str, entryName: str | None = ...) -> None: ... +def EnumConnections(): ... +def EnumEntries(reserved: str | None = ..., fileName: str | None = ...) -> None: ... +def GetConnectStatus(hrasconn) -> tuple[Incomplete, Incomplete, str, str]: ... +def GetEntryDialParams( + fileName: str, entryName: str +) -> tuple[Incomplete, Incomplete, Incomplete, Incomplete, Incomplete, Incomplete, Incomplete]: ... +def GetErrorString(error) -> str: ... # noqa: F811 +def HangUp(hras) -> None: ... +def IsHandleValid(__hras: int | None) -> bool: ... +def SetEntryDialParams(fileName: str, RasDialParams, bSavePassword) -> None: ... +def RASDIALEXTENSIONS(*args, **kwargs): ... # incomplete + +RASCS_AllDevicesConnected: int +RASCS_AuthAck: int +RASCS_AuthCallback: int +RASCS_AuthChangePassword: int +RASCS_Authenticate: int +RASCS_Authenticated: int +RASCS_AuthLinkSpeed: int +RASCS_AuthNotify: int +RASCS_AuthProject: int +RASCS_AuthRetry: int +RASCS_CallbackComplete: int +RASCS_CallbackSetByCaller: int +RASCS_ConnectDevice: int +RASCS_Connected: int +RASCS_DeviceConnected: int +RASCS_Disconnected: int +RASCS_Interactive: int +RASCS_LogonNetwork: int +RASCS_OpenPort: int +RASCS_PasswordExpired: int +RASCS_PortOpened: int +RASCS_PrepareForCallback: int +RASCS_Projected: int +RASCS_ReAuthenticate: int +RASCS_RetryAuthentication: int +RASCS_StartAuthentication: int +RASCS_WaitForCallback: int +RASCS_WaitForModemReset: int + +def GetEapUserIdentity(*args, **kwargs): ... # incomplete + +RASEAPF_Logon: int +RASEAPF_NonInteractive: int +RASEAPF_Preview: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32security.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32security.pyi new file mode 100644 index 00000000..d47f00a5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32security.pyi @@ -0,0 +1,571 @@ +from _typeshed import Incomplete + +import _win32typing +from win32.lib.pywintypes import error as error + +def DsGetSpn( + ServiceType, + ServiceClass: str, + ServiceName: str, + InstancePort: int = ..., + InstanceNames: tuple[str, ...] | None = ..., + InstancePorts: tuple[Incomplete, ...] | None = ..., +) -> tuple[str, ...]: ... +def DsWriteAccountSpn(hDS: _win32typing.PyDS_HANDLE, Operation, Account: str, Spns: tuple[str, ...]) -> None: ... +def DsBind(DomainController: str, DnsDomainName: str) -> _win32typing.PyDS_HANDLE: ... +def DsUnBind(hDS: _win32typing.PyDS_HANDLE) -> None: ... +def DsGetDcName( + computerName: str | None = ..., + domainName: str | None = ..., + domainGUID: _win32typing.PyIID | None = ..., + siteName: str | None = ..., + flags: int = ..., +): ... +def DsCrackNames( + hds: _win32typing.PyDS_HANDLE, flags, formatOffered, formatDesired, names: list[Incomplete] +) -> tuple[Incomplete, Incomplete, Incomplete]: ... +def ACL(__bufSize: int = ...) -> _win32typing.PyACL: ... +def SID() -> _win32typing.PySID: ... +def SECURITY_ATTRIBUTES() -> _win32typing.PySECURITY_ATTRIBUTES: ... +def SECURITY_DESCRIPTOR() -> _win32typing.PySECURITY_DESCRIPTOR: ... +def ImpersonateNamedPipeClient(handle) -> None: ... +def ImpersonateLoggedOnUser(handle: int) -> None: ... +def ImpersonateAnonymousToken(ThreadHandle: int) -> None: ... +def IsTokenRestricted(__TokenHandle: int | None) -> bool: ... +def RevertToSelf() -> None: ... +def LogonUser( + __Username: str, __Domain: str | None, __Password: str, __LogonType: int, __LogonProvider: int +) -> _win32typing.PyHANDLE: ... +def LogonUserEx( + Username: str, Domain: str, Password: str, LogonType, LogonProvider +) -> tuple[int, _win32typing.PySID, Incomplete, Incomplete]: ... +def LookupAccountName(__systemName: str | None, __accountName: str) -> tuple[_win32typing.PySID, str, int]: ... +def LookupAccountSid(__systemName: str, __sid: _win32typing.PySID) -> tuple[str, str, Incomplete]: ... +def GetBinarySid(__SID: str) -> _win32typing.PySID: ... +def SetSecurityInfo( + handle: int, + ObjectType, + SecurityInfo, + Owner: _win32typing.PySID, + Group: _win32typing.PySID, + Dacl: _win32typing.PyACL, + Sacl: _win32typing.PyACL, +) -> None: ... +def GetSecurityInfo(handle: int, ObjectType, SecurityInfo) -> _win32typing.PySECURITY_DESCRIPTOR: ... +def SetNamedSecurityInfo( + __ObjectName: str, + __ObjectType: int, + __SecurityInfo: int, + __Owner: _win32typing.PySID | None, + __Group: _win32typing.PySID | None, + __Dacl: _win32typing.PyACL | None, + __Sacl: _win32typing.PyACL | None, +) -> None: ... +def GetNamedSecurityInfo(__ObjectName: str, __ObjectType: int, __SecurityInfo: int) -> _win32typing.PySECURITY_DESCRIPTOR: ... +def OpenProcessToken(processHandle, desiredAccess) -> int: ... +def LookupPrivilegeValue(systemName: str, privilegeName: str) -> _win32typing.LARGE_INTEGER: ... +def LookupPrivilegeName(SystemName: str, luid: _win32typing.LARGE_INTEGER) -> str: ... +def LookupPrivilegeDisplayName(SystemName: str, Name: str) -> str: ... +def AdjustTokenPrivileges( + TokenHandle: int, bDisableAllPrivileges, NewState: _win32typing.PyTOKEN_PRIVILEGES +) -> _win32typing.PyTOKEN_PRIVILEGES: ... +def AdjustTokenGroups(TokenHandle: int, ResetToDefault, NewState: _win32typing.PyTOKEN_GROUPS) -> _win32typing.PyTOKEN_GROUPS: ... +def GetTokenInformation(TokenHandle: int, TokenInformationClass): ... +def OpenThreadToken(handle: int, desiredAccess, openAsSelf): ... +def SetThreadToken(Thread: int, Token: int) -> None: ... +def GetFileSecurity(__filename: str, __info: int = ...) -> _win32typing.PySECURITY_DESCRIPTOR: ... +def SetFileSecurity(__filename: str, __info: int, __security: _win32typing.PySECURITY_DESCRIPTOR) -> None: ... +def GetUserObjectSecurity(handle: int, info) -> _win32typing.PySECURITY_DESCRIPTOR: ... +def SetUserObjectSecurity(handle: int, info, security: _win32typing.PySECURITY_DESCRIPTOR) -> None: ... +def GetKernelObjectSecurity(handle: int, info) -> _win32typing.PySECURITY_DESCRIPTOR: ... +def SetKernelObjectSecurity(handle: int, info, security: _win32typing.PySECURITY_DESCRIPTOR) -> None: ... +def SetTokenInformation(TokenHandle: int, TokenInformationClass, TokenInformation) -> None: ... +def LsaOpenPolicy(system_name: str, access_mask) -> _win32typing.PyLSA_HANDLE: ... +def LsaClose(PolicyHandle: int) -> None: ... +def LsaQueryInformationPolicy(PolicyHandle: _win32typing.PyLSA_HANDLE, InformationClass) -> None: ... +def LsaSetInformationPolicy(PolicyHandle: _win32typing.PyLSA_HANDLE, InformationClass, Information) -> None: ... +def LsaAddAccountRights( + PolicyHandle: _win32typing.PyLSA_HANDLE, AccountSid: _win32typing.PySID, UserRights: tuple[Incomplete, ...] +) -> None: ... +def LsaRemoveAccountRights( + PolicyHandle: _win32typing.PyLSA_HANDLE, AccountSid: _win32typing.PySID, AllRights, UserRights: tuple[Incomplete, ...] +) -> None: ... +def LsaEnumerateAccountRights(PolicyHandle: _win32typing.PyLSA_HANDLE, AccountSid: _win32typing.PySID) -> list[str]: ... +def LsaEnumerateAccountsWithUserRight(PolicyHandle: _win32typing.PyLSA_HANDLE, UserRight) -> tuple[_win32typing.PySID, ...]: ... +def ConvertSidToStringSid(__Sid: _win32typing.PySID) -> str: ... +def ConvertStringSidToSid(__StringSid: str) -> _win32typing.PySID: ... +def ConvertSecurityDescriptorToStringSecurityDescriptor( + SecurityDescriptor: _win32typing.PySECURITY_DESCRIPTOR, RequestedStringSDRevision, SecurityInformation +) -> str: ... +def ConvertStringSecurityDescriptorToSecurityDescriptor( + StringSecurityDescriptor: str, StringSDRevision +) -> _win32typing.PySECURITY_DESCRIPTOR: ... +def LsaStorePrivateData(PolicyHandle: _win32typing.PyLSA_HANDLE, KeyName: str, PrivateData) -> None: ... +def LsaRetrievePrivateData(PolicyHandle: _win32typing.PyLSA_HANDLE, KeyName: str) -> str: ... +def LsaRegisterPolicyChangeNotification(InformationClass, NotificationEventHandle: int) -> None: ... +def LsaUnregisterPolicyChangeNotification(InformationClass, NotificationEventHandle: int) -> None: ... +def CryptEnumProviders() -> list[tuple[str, Incomplete]]: ... +def EnumerateSecurityPackages() -> tuple[Incomplete, ...]: ... +def AllocateLocallyUniqueId() -> None: ... +def ImpersonateSelf(ImpersonationLevel) -> None: ... +def DuplicateToken(ExistingTokenHandle: int, ImpersonationLevel) -> int: ... +def DuplicateTokenEx( + ExistingToken: int, + ImpersonationLevel, + DesiredAccess, + TokenType, + TokenAttributes: _win32typing.PySECURITY_ATTRIBUTES | None = ..., +) -> int: ... +def CheckTokenMembership(TokenHandle: int, SidToCheck: _win32typing.PySID): ... +def CreateRestrictedToken( + ExistingTokenHandle: int, + Flags, + SidsToDisable: tuple[_win32typing.PySID_AND_ATTRIBUTES, ...], + PrivilegesToDelete: tuple[_win32typing.PyLUID_AND_ATTRIBUTES, ...], + SidsToRestrict: tuple[_win32typing.PySID_AND_ATTRIBUTES, ...], +) -> int: ... +def LsaRegisterLogonProcess(LogonProcessName: str) -> _win32typing.PyLsaLogon_HANDLE: ... +def LsaConnectUntrusted() -> _win32typing.PyLsaLogon_HANDLE: ... +def LsaDeregisterLogonProcess(LsaHandle: _win32typing.PyLsaLogon_HANDLE) -> None: ... +def LsaLookupAuthenticationPackage(LsaHandle: _win32typing.PyLsaLogon_HANDLE, PackageName: str): ... +def LsaEnumerateLogonSessions() -> tuple[Incomplete, ...]: ... +def LsaGetLogonSessionData(LogonId) -> tuple[Incomplete, ...]: ... +def AcquireCredentialsHandle( + Principal, Package, CredentialUse, LogonID, AuthData +) -> tuple[_win32typing.PyCredHandle, _win32typing.PyTime]: ... +def InitializeSecurityContext( + Credential: _win32typing.PyCredHandle, + Context: _win32typing.PyCtxtHandle, + TargetName, + ContextReq, + TargetDataRep, + pInput: _win32typing.PySecBufferDesc, + NewContext: _win32typing.PyCtxtHandle, + pOutput: _win32typing.PySecBufferDesc, +) -> tuple[Incomplete, Incomplete, _win32typing.PyTime]: ... +def AcceptSecurityContext( + Credential: _win32typing.PyCredHandle, + Context: _win32typing.PyCtxtHandle, + pInput: _win32typing.PySecBufferDesc, + ContextReq, + TargetDataRep, + NewContext: _win32typing.PyCtxtHandle, + pOutput: _win32typing.PySecBufferDesc, +) -> tuple[Incomplete, Incomplete, Incomplete]: ... +def QuerySecurityPackageInfo(PackageName): ... +def LsaCallAuthenticationPackage( + LsaHandle: _win32typing.PyLsaLogon_HANDLE, AuthenticationPackage, MessageType, ProtocolSubmitBuffer +) -> None: ... +def TranslateName(accountName: str, accountNameFormat, accountNameFormat1, numChars=...) -> str: ... +def CreateWellKnownSid(WellKnownSidType, DomainSid: _win32typing.PySID | None = ...) -> _win32typing.PySID: ... +def MapGenericMask(AccessMask, GenericMapping: tuple[Incomplete, Incomplete, Incomplete, Incomplete]): ... + +ACCESS_ALLOWED_ACE_TYPE: int +ACCESS_ALLOWED_OBJECT_ACE_TYPE: int +ACCESS_DENIED_ACE_TYPE: int +ACCESS_DENIED_OBJECT_ACE_TYPE: int +ACL_REVISION: int +ACL_REVISION_DS: int +AuditCategoryAccountLogon: int +AuditCategoryAccountManagement: int +AuditCategoryDetailedTracking: int +AuditCategoryDirectoryServiceAccess: int +AuditCategoryLogon: int +AuditCategoryObjectAccess: int +AuditCategoryPolicyChange: int +AuditCategoryPrivilegeUse: int +AuditCategorySystem: int +CONTAINER_INHERIT_ACE: int +DACL_SECURITY_INFORMATION: int +DENY_ACCESS: int +DISABLE_MAX_PRIVILEGE: int +DS_SPN_ADD_SPN_OP: int +DS_SPN_DELETE_SPN_OP: int +DS_SPN_DN_HOST: int +DS_SPN_DNS_HOST: int +DS_SPN_DOMAIN: int +DS_SPN_NB_DOMAIN: int +DS_SPN_NB_HOST: int +DS_SPN_REPLACE_SPN_OP: int +DS_SPN_SERVICE: int +FAILED_ACCESS_ACE_FLAG: int +GRANT_ACCESS: int +GROUP_SECURITY_INFORMATION: int +INHERIT_ONLY_ACE: int +INHERITED_ACE: int +LABEL_SECURITY_INFORMATION: int +LOGON32_LOGON_BATCH: int +LOGON32_LOGON_INTERACTIVE: int +LOGON32_LOGON_NETWORK: int +LOGON32_LOGON_NETWORK_CLEARTEXT: int +LOGON32_LOGON_NEW_CREDENTIALS: int +LOGON32_LOGON_SERVICE: int +LOGON32_LOGON_UNLOCK: int +LOGON32_PROVIDER_DEFAULT: int +LOGON32_PROVIDER_WINNT35: int +LOGON32_PROVIDER_WINNT40: int +LOGON32_PROVIDER_WINNT50: int +NO_INHERITANCE: int +NO_PROPAGATE_INHERIT_ACE: int +NOT_USED_ACCESS: int +OBJECT_INHERIT_ACE: int +OWNER_SECURITY_INFORMATION: int +POLICY_ALL_ACCESS: int +POLICY_AUDIT_EVENT_FAILURE: int +POLICY_AUDIT_EVENT_NONE: int +POLICY_AUDIT_EVENT_SUCCESS: int +POLICY_AUDIT_EVENT_UNCHANGED: int +POLICY_AUDIT_LOG_ADMIN: int +POLICY_CREATE_ACCOUNT: int +POLICY_CREATE_PRIVILEGE: int +POLICY_CREATE_SECRET: int +POLICY_EXECUTE: int +POLICY_GET_PRIVATE_INFORMATION: int +POLICY_LOOKUP_NAMES: int +POLICY_NOTIFICATION: int +POLICY_READ: int +POLICY_SERVER_ADMIN: int +POLICY_SET_AUDIT_REQUIREMENTS: int +POLICY_SET_DEFAULT_QUOTA_LIMITS: int +POLICY_TRUST_ADMIN: int +POLICY_VIEW_AUDIT_INFORMATION: int +POLICY_VIEW_LOCAL_INFORMATION: int +POLICY_WRITE: int +PolicyAccountDomainInformation: int +PolicyAuditEventsInformation: int +PolicyAuditFullQueryInformation: int +PolicyAuditFullSetInformation: int +PolicyAuditLogInformation: int +PolicyDefaultQuotaInformation: int +PolicyDnsDomainInformation: int +PolicyLsaServerRoleInformation: int +PolicyModificationInformation: int +PolicyNotifyAccountDomainInformation: int +PolicyNotifyAuditEventsInformation: int +PolicyNotifyDnsDomainInformation: int +PolicyNotifyDomainEfsInformation: int +PolicyNotifyDomainKerberosTicketInformation: int +PolicyNotifyMachineAccountPasswordInformation: int +PolicyNotifyServerRoleInformation: int +PolicyPdAccountInformation: int +PolicyPrimaryDomainInformation: int +PolicyReplicaSourceInformation: int +PolicyServerDisabled: int +PolicyServerEnabled: int +PolicyServerRoleBackup: int +PolicyServerRolePrimary: int +PROTECTED_DACL_SECURITY_INFORMATION: int +PROTECTED_SACL_SECURITY_INFORMATION: int +REVOKE_ACCESS: int +SACL_SECURITY_INFORMATION: int +SANDBOX_INERT: int +SDDL_REVISION_1: int +SE_DACL_AUTO_INHERITED: int +SE_DACL_DEFAULTED: int +SE_DACL_PRESENT: int +SE_DACL_PROTECTED: int +SE_DS_OBJECT: int +SE_DS_OBJECT_ALL: int +SE_FILE_OBJECT: int +SE_GROUP_DEFAULTED: int +SE_GROUP_ENABLED: int +SE_GROUP_ENABLED_BY_DEFAULT: int +SE_GROUP_LOGON_ID: int +SE_GROUP_MANDATORY: int +SE_GROUP_OWNER: int +SE_GROUP_RESOURCE: int +SE_GROUP_USE_FOR_DENY_ONLY: int +SE_KERNEL_OBJECT: int +SE_LMSHARE: int +SE_OWNER_DEFAULTED: int +SE_PRINTER: int +SE_PRIVILEGE_ENABLED: int +SE_PRIVILEGE_ENABLED_BY_DEFAULT: int +SE_PRIVILEGE_REMOVED: int +SE_PRIVILEGE_USED_FOR_ACCESS: int +SE_PROVIDER_DEFINED_OBJECT: int +SE_REGISTRY_KEY: int +SE_REGISTRY_WOW64_32KEY: int +SE_SACL_AUTO_INHERITED: int +SE_SACL_DEFAULTED: int +SE_SACL_PRESENT: int +SE_SACL_PROTECTED: int +SE_SELF_RELATIVE: int +SE_SERVICE: int +SE_UNKNOWN_OBJECT_TYPE: int +SE_WINDOW_OBJECT: int +SE_WMIGUID_OBJECT: int +SECPKG_CRED_BOTH: int +SECPKG_CRED_INBOUND: int +SECPKG_CRED_OUTBOUND: int +SECPKG_FLAG_ACCEPT_WIN32_NAME: int +SECPKG_FLAG_CLIENT_ONLY: int +SECPKG_FLAG_CONNECTION: int +SECPKG_FLAG_DATAGRAM: int +SECPKG_FLAG_EXTENDED_ERROR: int +SECPKG_FLAG_IMPERSONATION: int +SECPKG_FLAG_INTEGRITY: int +SECPKG_FLAG_MULTI_REQUIRED: int +SECPKG_FLAG_PRIVACY: int +SECPKG_FLAG_STREAM: int +SECPKG_FLAG_TOKEN_ONLY: int +SECURITY_CREATOR_SID_AUTHORITY: int +SECURITY_LOCAL_SID_AUTHORITY: int +SECURITY_NON_UNIQUE_AUTHORITY: int +SECURITY_NT_AUTHORITY: int +SECURITY_NULL_SID_AUTHORITY: int +SECURITY_RESOURCE_MANAGER_AUTHORITY: int +SECURITY_WORLD_SID_AUTHORITY: int +SecurityAnonymous: int +SecurityDelegation: int +SecurityIdentification: int +SecurityImpersonation: int +SET_ACCESS: int +SET_AUDIT_FAILURE: int +SET_AUDIT_SUCCESS: int +SidTypeAlias: int +SidTypeComputer: int +SidTypeDeletedAccount: int +SidTypeDomain: int +SidTypeGroup: int +SidTypeInvalid: int +SidTypeUnknown: int +SidTypeUser: int +SidTypeWellKnownGroup: int +STYPE_DEVICE: int +STYPE_DISKTREE: int +STYPE_IPC: int +STYPE_PRINTQ: int +STYPE_SPECIAL: int +STYPE_TEMPORARY: int +SUB_CONTAINERS_AND_OBJECTS_INHERIT: int +SUB_CONTAINERS_ONLY_INHERIT: int +SUB_OBJECTS_ONLY_INHERIT: int +SUCCESSFUL_ACCESS_ACE_FLAG: int +SYSTEM_AUDIT_ACE_TYPE: int +SYSTEM_AUDIT_OBJECT_ACE_TYPE: int +TOKEN_ADJUST_DEFAULT: int +TOKEN_ADJUST_GROUPS: int +TOKEN_ADJUST_PRIVILEGES: int +TOKEN_ALL_ACCESS: int +TOKEN_ASSIGN_PRIMARY: int +TOKEN_DUPLICATE: int +TOKEN_EXECUTE: int +TOKEN_IMPERSONATE: int +TOKEN_QUERY: int +TOKEN_QUERY_SOURCE: int +TOKEN_READ: int +TOKEN_WRITE: int +TokenImpersonation: int +TokenPrimary: int +TrustedControllersInformation: int +TrustedDomainAuthInformation: int +TrustedDomainAuthInformationInternal: int +TrustedDomainFullInformation: int +TrustedDomainFullInformation2Internal: int +TrustedDomainFullInformationInternal: int +TrustedDomainInformationBasic: int +TrustedDomainInformationEx: int +TrustedDomainInformationEx2Internal: int +TrustedDomainNameInformation: int +TrustedPasswordInformation: int +TrustedPosixOffsetInformation: int +TRUSTEE_BAD_FORM: int +TRUSTEE_IS_ALIAS: int +TRUSTEE_IS_COMPUTER: int +TRUSTEE_IS_DELETED: int +TRUSTEE_IS_DOMAIN: int +TRUSTEE_IS_GROUP: int +TRUSTEE_IS_INVALID: int +TRUSTEE_IS_NAME: int +TRUSTEE_IS_OBJECTS_AND_NAME: int +TRUSTEE_IS_OBJECTS_AND_SID: int +TRUSTEE_IS_SID: int +TRUSTEE_IS_UNKNOWN: int +TRUSTEE_IS_USER: int +TRUSTEE_IS_WELL_KNOWN_GROUP: int +UNPROTECTED_DACL_SECURITY_INFORMATION: int +UNPROTECTED_SACL_SECURITY_INFORMATION: int +CredHandleType = _win32typing.PyCredHandle +CtxtHandleType = _win32typing.PyCtxtHandle + +def DsListDomainsInSite(*args, **kwargs): ... # incomplete +def DsListInfoForServer(*args, **kwargs): ... # incomplete +def DsListRoles(*args, **kwargs): ... # incomplete +def DsListServersForDomainInSite(*args, **kwargs): ... # incomplete +def DsListServersInSite(*args, **kwargs): ... # incomplete +def DsListSites(*args, **kwargs): ... # incomplete +def GetPolicyHandle(*args, **kwargs): ... # incomplete + +MICROSOFT_KERBEROS_NAME_A: bytes +MSV1_0_PACKAGE_NAME: bytes +PyCredHandleType = _win32typing.PyCredHandle +PyCtxtHandleType = _win32typing.PyCtxtHandle +PySecBufferDescType = _win32typing.PySecBufferDesc +PySecBufferType = _win32typing.PySecBuffer +SE_ASSIGNPRIMARYTOKEN_NAME: str +SE_AUDIT_NAME: str +SE_BACKUP_NAME: str +SE_BATCH_LOGON_NAME: str +SE_CHANGE_NOTIFY_NAME: str +SE_CREATE_GLOBAL_NAME: str +SE_CREATE_PAGEFILE_NAME: str +SE_CREATE_PERMANENT_NAME: str +SE_CREATE_SYMBOLIC_LINK_NAME: str +SE_CREATE_TOKEN_NAME: str +SE_DEBUG_NAME: str +SE_DENY_BATCH_LOGON_NAME: str +SE_DENY_INTERACTIVE_LOGON_NAME: str +SE_DENY_NETWORK_LOGON_NAME: str +SE_DENY_REMOTE_INTERACTIVE_LOGON_NAME: str +SE_DENY_SERVICE_LOGON_NAME: str +SE_ENABLE_DELEGATION_NAME: str +SE_GROUP_INTEGRITY: int +SE_GROUP_INTEGRITY_ENABLED: int +SE_IMPERSONATE_NAME: str +SE_INCREASE_QUOTA_NAME: str +SE_INC_BASE_PRIORITY_NAME: str +SE_INC_WORKING_SET_NAME: str +SE_INTERACTIVE_LOGON_NAME: str +SE_LOAD_DRIVER_NAME: str +SE_LOCK_MEMORY_NAME: str +SE_MACHINE_ACCOUNT_NAME: str +SE_MANAGE_VOLUME_NAME: str +SE_NETWORK_LOGON_NAME: str +SE_PROF_SINGLE_PROCESS_NAME: str +SE_RELABEL_NAME: str +SE_REMOTE_INTERACTIVE_LOGON_NAME: str +SE_REMOTE_SHUTDOWN_NAME: str +SE_RESTORE_NAME: str +SE_SECURITY_NAME: str +SE_SERVICE_LOGON_NAME: str +SE_SHUTDOWN_NAME: str +SE_SYNC_AGENT_NAME: str +SE_SYSTEMTIME_NAME: str +SE_SYSTEM_ENVIRONMENT_NAME: str +SE_SYSTEM_PROFILE_NAME: str +SE_TAKE_OWNERSHIP_NAME: str +SE_TCB_NAME: str +SE_TIME_ZONE_NAME: str +SE_TRUSTED_CREDMAN_ACCESS_NAME: str +SE_UNDOCK_NAME: str +SE_UNSOLICITED_INPUT_NAME: str +SYSTEM_MANDATORY_LABEL_NO_EXECUTE_UP: int +SYSTEM_MANDATORY_LABEL_NO_READ_UP: int +SYSTEM_MANDATORY_LABEL_NO_WRITE_UP: int +SYSTEM_MANDATORY_LABEL_VALID_MASK: int +SecBufferDescType = _win32typing.PySecBufferDesc +SecBufferType = _win32typing.PySecBuffer +TOKEN_MANDATORY_POLICY_NEW_PROCESS_MIN: int +TOKEN_MANDATORY_POLICY_NO_WRITE_UP: int +TOKEN_MANDATORY_POLICY_OFF: int +TOKEN_MANDATORY_POLICY_VALID_MASK: int +TokenAccessInformation: int +TokenAuditPolicy: int +TokenDefaultDacl: int +TokenElevation: int +TokenElevationType: int +TokenElevationTypeDefault: int +TokenElevationTypeFull: int +TokenElevationTypeLimited: int +TokenGroups: int +TokenGroupsAndPrivileges: int +TokenHasRestrictions: int +TokenImpersonationLevel: int +TokenIntegrityLevel: int +TokenLinkedToken: int +TokenLogonSid: int +TokenMandatoryPolicy: int +TokenOrigin: int +TokenOwner: int +TokenPrimaryGroup: int +TokenPrivileges: int +TokenRestrictedSids: int +TokenSandBoxInert: int +TokenSessionId: int +TokenSessionReference: int +TokenSource: int +TokenStatistics: int +TokenType: int +TokenUIAccess: int +TokenUser: int +TokenVirtualizationAllowed: int +TokenVirtualizationEnabled: int +UNICODE: int +WinAccountAdministratorSid: int +WinAccountCertAdminsSid: int +WinAccountComputersSid: int +WinAccountControllersSid: int +WinAccountDomainAdminsSid: int +WinAccountDomainGuestsSid: int +WinAccountDomainUsersSid: int +WinAccountEnterpriseAdminsSid: int +WinAccountGuestSid: int +WinAccountKrbtgtSid: int +WinAccountPolicyAdminsSid: int +WinAccountRasAndIasServersSid: int +WinAccountReadonlyControllersSid: int +WinAccountSchemaAdminsSid: int +WinAnonymousSid: int +WinAuthenticatedUserSid: int +WinBatchSid: int +WinBuiltinAccountOperatorsSid: int +WinBuiltinAdministratorsSid: int +WinBuiltinAuthorizationAccessSid: int +WinBuiltinBackupOperatorsSid: int +WinBuiltinCryptoOperatorsSid: int +WinBuiltinDCOMUsersSid: int +WinBuiltinDomainSid: int +WinBuiltinEventLogReadersGroup: int +WinBuiltinGuestsSid: int +WinBuiltinIUsersSid: int +WinBuiltinIncomingForestTrustBuildersSid: int +WinBuiltinNetworkConfigurationOperatorsSid: int +WinBuiltinPerfLoggingUsersSid: int +WinBuiltinPerfMonitoringUsersSid: int +WinBuiltinPowerUsersSid: int +WinBuiltinPreWindows2000CompatibleAccessSid: int +WinBuiltinPrintOperatorsSid: int +WinBuiltinRemoteDesktopUsersSid: int +WinBuiltinReplicatorSid: int +WinBuiltinSystemOperatorsSid: int +WinBuiltinTerminalServerLicenseServersSid: int +WinBuiltinUsersSid: int +WinCacheablePrincipalsGroupSid: int +WinCreatorGroupServerSid: int +WinCreatorGroupSid: int +WinCreatorOwnerRightsSid: int +WinCreatorOwnerServerSid: int +WinCreatorOwnerSid: int +WinDialupSid: int +WinDigestAuthenticationSid: int +WinEnterpriseControllersSid: int +WinEnterpriseReadonlyControllersSid: int +WinHighLabelSid: int +WinIUserSid: int +WinInteractiveSid: int +WinLocalServiceSid: int +WinLocalSid: int +WinLocalSystemSid: int +WinLogonIdsSid: int +WinLowLabelSid: int +WinMediumLabelSid: int +WinNTLMAuthenticationSid: int +WinNetworkServiceSid: int +WinNetworkSid: int +WinNonCacheablePrincipalsGroupSid: int +WinNtAuthoritySid: int +WinNullSid: int +WinOtherOrganizationSid: int +WinProxySid: int +WinRemoteLogonIdSid: int +WinRestrictedCodeSid: int +WinSChannelAuthenticationSid: int +WinSelfSid: int +WinServiceSid: int +WinSystemLabelSid: int +WinTerminalServerSid: int +WinThisOrganizationSid: int +WinUntrustedLabelSid: int +WinWorldSid: int +WinWriteRestrictedCodeSid: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32service.pyi new file mode 100644 index 00000000..47492ad6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32service.pyi @@ -0,0 +1,179 @@ +from _typeshed import Incomplete +from collections.abc import Iterable + +import _win32typing +from win32.lib.pywintypes import error as error + +def GetThreadDesktop(ThreadId) -> _win32typing.PyHDESK: ... +def EnumWindowStations() -> tuple[tuple[str, Incomplete], ...]: ... +def GetUserObjectInformation(Handle: int, _type) -> None: ... +def SetUserObjectInformation(Handle: int, info, _type) -> None: ... +def OpenWindowStation(szWinSta, Inherit, DesiredAccess) -> _win32typing.PyHWINSTA: ... +def OpenDesktop(szDesktop, Flags, Inherit, DesiredAccess) -> _win32typing.PyHDESK: ... +def CreateDesktop( + Desktop, Flags, DesiredAccess, SecurityAttributes: _win32typing.PySECURITY_ATTRIBUTES +) -> _win32typing.PyHDESK: ... +def OpenInputDesktop(Flags, Inherit, DesiredAccess) -> _win32typing.PyHDESK: ... +def GetProcessWindowStation() -> _win32typing.PyHWINSTA: ... +def CreateWindowStation( + WindowStation, Flags, DesiredAccess, SecurityAttributes: _win32typing.PySECURITY_ATTRIBUTES +) -> _win32typing.PyHWINSTA: ... +def EnumServicesStatus(hSCManager: _win32typing.PySC_HANDLE, ServiceType, ServiceState) -> tuple[Incomplete, ...]: ... +def EnumServicesStatusEx( + SCManager: _win32typing.PySC_HANDLE, ServiceType, ServiceState, InfoLevel, GroupName: Incomplete | None = ... +) -> tuple[Incomplete, ...]: ... +def EnumDependentServices(hService: _win32typing.PySC_HANDLE, ServiceState) -> tuple[Incomplete, ...]: ... +def QueryServiceConfig(hService: _win32typing.PySC_HANDLE): ... +def StartService(hService: _win32typing.PySC_HANDLE, args: Iterable[str] | None) -> None: ... +def OpenService(scHandle: _win32typing.PySC_HANDLE, name: str, desiredAccess) -> _win32typing.PySC_HANDLE: ... +def OpenSCManager(machineName: str | None, dbName: str | None, desiredAccess: int) -> _win32typing.PySC_HANDLE: ... +def CloseServiceHandle(scHandle: _win32typing.PySC_HANDLE) -> None: ... +def QueryServiceStatus(hService: _win32typing.PySC_HANDLE) -> _win32typing.SERVICE_STATUS: ... +def QueryServiceStatusEx(hService: _win32typing.PySC_HANDLE) -> _win32typing.SERVICE_STATUS: ... +def SetServiceObjectSecurity( + Handle: _win32typing.PySC_HANDLE, SecurityInformation, SecurityDescriptor: _win32typing.PySECURITY_DESCRIPTOR +) -> None: ... +def QueryServiceObjectSecurity(Handle: _win32typing.PySC_HANDLE, SecurityInformation) -> _win32typing.PySECURITY_DESCRIPTOR: ... +def GetServiceKeyName(hSCManager: _win32typing.PySC_HANDLE, DisplayName): ... +def GetServiceDisplayName(hSCManager: _win32typing.PySC_HANDLE, ServiceName): ... +def SetServiceStatus(scHandle, serviceStatus: _win32typing.SERVICE_STATUS | tuple[int, int, int, int, int, int, int]) -> None: ... +def ControlService(scHandle: _win32typing.PySC_HANDLE, code) -> _win32typing.SERVICE_STATUS: ... +def DeleteService(scHandle: _win32typing.PySC_HANDLE) -> None: ... +def CreateService( + scHandle: _win32typing.PySC_HANDLE, + name: str, + displayName: str, + desiredAccess: int, + serviceType: int, + startType: int, + errorControl: int, + binaryFile: str, + loadOrderGroup: str | None, + bFetchTag: bool, + serviceDeps: Iterable[Incomplete] | None, + acctName: str | None, + password: str | None, +) -> _win32typing.PySC_HANDLE: ... +def ChangeServiceConfig( + hService: _win32typing.PySC_HANDLE, + serviceType: int, + startType: int, + errorControl: int, + binaryFile: str | None, + loadOrderGroup: str | None, + bFetchTag: bool, + serviceDeps: Iterable[Incomplete] | None, + acctName: str | None, + password: str | None, + displayName: str | None, +): ... +def LockServiceDatabase(sc_handle: _win32typing.PySC_HANDLE): ... +def UnlockServiceDatabase(lock): ... +def QueryServiceLockStatus(hSCManager: _win32typing.PySC_HANDLE) -> tuple[Incomplete, str, Incomplete]: ... +def ChangeServiceConfig2(hService: _win32typing.PySC_HANDLE, InfoLevel, info) -> None: ... +def QueryServiceConfig2(hService: _win32typing.PySC_HANDLE, InfoLevel): ... + +DBT_CONFIGCHANGECANCELED: int +DBT_CONFIGCHANGED: int +DBT_CUSTOMEVENT: int +DBT_DEVICEARRIVAL: int +DBT_DEVICEQUERYREMOVE: int +DBT_DEVICEQUERYREMOVEFAILED: int +DBT_DEVICEREMOVECOMPLETE: int +DBT_DEVICEREMOVEPENDING: int +DBT_DEVICETYPESPECIFIC: int +DBT_QUERYCHANGECONFIG: int +DF_ALLOWOTHERACCOUNTHOOK: int +SC_ACTION_NONE: int +SC_ACTION_REBOOT: int +SC_ACTION_RESTART: int +SC_ACTION_RUN_COMMAND: int +SC_ENUM_PROCESS_INFO: int +SC_GROUP_IDENTIFIER: int +SC_MANAGER_ALL_ACCESS: int +SC_MANAGER_CONNECT: int +SC_MANAGER_CREATE_SERVICE: int +SC_MANAGER_ENUMERATE_SERVICE: int +SC_MANAGER_LOCK: int +SC_MANAGER_MODIFY_BOOT_CONFIG: int +SC_MANAGER_QUERY_LOCK_STATUS: int +SERVICE_ACCEPT_HARDWAREPROFILECHANGE: int +SERVICE_ACCEPT_NETBINDCHANGE: int +SERVICE_ACCEPT_PARAMCHANGE: int +SERVICE_ACCEPT_PAUSE_CONTINUE: int +SERVICE_ACCEPT_POWEREVENT: int +SERVICE_ACCEPT_PRESHUTDOWN: int +SERVICE_ACCEPT_SESSIONCHANGE: int +SERVICE_ACCEPT_SHUTDOWN: int +SERVICE_ACCEPT_STOP: int +SERVICE_ACTIVE: int +SERVICE_ALL_ACCESS: int +SERVICE_AUTO_START: int +SERVICE_BOOT_START: int +SERVICE_CHANGE_CONFIG: int +SERVICE_CONFIG_DELAYED_AUTO_START_INFO: int +SERVICE_CONFIG_DESCRIPTION: int +SERVICE_CONFIG_FAILURE_ACTIONS: int +SERVICE_CONFIG_FAILURE_ACTIONS_FLAG: int +SERVICE_CONFIG_PRESHUTDOWN_INFO: int +SERVICE_CONFIG_REQUIRED_PRIVILEGES_INFO: int +SERVICE_CONFIG_SERVICE_SID_INFO: int +SERVICE_CONTINUE_PENDING: int +SERVICE_CONTROL_CONTINUE: int +SERVICE_CONTROL_DEVICEEVENT: int +SERVICE_CONTROL_HARDWAREPROFILECHANGE: int +SERVICE_CONTROL_INTERROGATE: int +SERVICE_CONTROL_NETBINDADD: int +SERVICE_CONTROL_NETBINDDISABLE: int +SERVICE_CONTROL_NETBINDENABLE: int +SERVICE_CONTROL_NETBINDREMOVE: int +SERVICE_CONTROL_PARAMCHANGE: int +SERVICE_CONTROL_PAUSE: int +SERVICE_CONTROL_POWEREVENT: int +SERVICE_CONTROL_PRESHUTDOWN: int +SERVICE_CONTROL_SESSIONCHANGE: int +SERVICE_CONTROL_SHUTDOWN: int +SERVICE_CONTROL_STOP: int +SERVICE_DEMAND_START: int +SERVICE_DISABLED: int +SERVICE_DRIVER: int +SERVICE_ENUMERATE_DEPENDENTS: int +SERVICE_ERROR_CRITICAL: int +SERVICE_ERROR_IGNORE: int +SERVICE_ERROR_NORMAL: int +SERVICE_ERROR_SEVERE: int +SERVICE_FILE_SYSTEM_DRIVER: int +SERVICE_INACTIVE: int +SERVICE_INTERACTIVE_PROCESS: int +SERVICE_INTERROGATE: int +SERVICE_KERNEL_DRIVER: int +SERVICE_NO_CHANGE: int +SERVICE_PAUSE_CONTINUE: int +SERVICE_PAUSE_PENDING: int +SERVICE_PAUSED: int +SERVICE_QUERY_CONFIG: int +SERVICE_QUERY_STATUS: int +SERVICE_RUNNING: int +SERVICE_SID_TYPE_NONE: int +SERVICE_SID_TYPE_RESTRICTED: int +SERVICE_SID_TYPE_UNRESTRICTED: int +SERVICE_SPECIFIC_ERROR: int +SERVICE_START: int +SERVICE_START_PENDING: int +SERVICE_STATE_ALL: int +SERVICE_STOP: int +SERVICE_STOP_PENDING: int +SERVICE_STOPPED: int +SERVICE_SYSTEM_START: int +SERVICE_USER_DEFINED_CONTROL: int +SERVICE_WIN32: int +SERVICE_WIN32_OWN_PROCESS: int +SERVICE_WIN32_SHARE_PROCESS: int +UOI_FLAGS: int +UOI_NAME: int +UOI_TYPE: int +UOI_USER_SID: int +WSF_VISIBLE: int +HDESKType = _win32typing.PyHDESK +HWINSTAType = _win32typing.PyHWINSTA +UNICODE: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32trace.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32trace.pyi new file mode 100644 index 00000000..542d733f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32trace.pyi @@ -0,0 +1,13 @@ +from win32.lib.pywintypes import error as error + +def GetHandle(*args, **kwargs): ... # incomplete +def GetTracer(*args, **kwargs): ... # incomplete +def InitRead(*args, **kwargs): ... # incomplete +def InitWrite(*args, **kwargs): ... # incomplete +def TermRead(*args, **kwargs): ... # incomplete +def TermWrite(*args, **kwargs): ... # incomplete +def blockingread(*args, **kwargs): ... # incomplete +def flush(*args, **kwargs): ... # incomplete +def read(*args, **kwargs): ... # incomplete +def setprint(*args, **kwargs): ... # incomplete +def write(*args, **kwargs): ... # incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32transaction.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32transaction.pyi new file mode 100644 index 00000000..26d5077d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32transaction.pyi @@ -0,0 +1,18 @@ +import _win32typing +from win32.lib.pywintypes import error as error + +def CreateTransaction( + TransactionAttributes: _win32typing.PySECURITY_ATTRIBUTES | None = ..., + UOW: _win32typing.PyIID | None = ..., + CreateOptions: int = ..., + IsolationLevel: int = ..., + IsolationFlags: int = ..., + Timeout: int = ..., + Description: str | None = ..., +) -> int: ... +def RollbackTransaction(TransactionHandle: int) -> None: ... +def RollbackTransactionAsync(TransactionHandle: int) -> None: ... +def CommitTransaction(TransactionHandle: int) -> None: ... +def CommitTransactionAsync(TransactionHandle: int) -> None: ... +def GetTransactionId(TransactionHandle: int) -> _win32typing.PyIID: ... +def OpenTransaction(DesiredAccess, TransactionId: _win32typing.PyIID) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32ts.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32ts.pyi new file mode 100644 index 00000000..7e37d2ec --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32ts.pyi @@ -0,0 +1,96 @@ +from _typeshed import Incomplete + +def WTSOpenServer(ServerName: str) -> int: ... +def WTSCloseServer(Server: int) -> None: ... +def WTSQueryUserConfig(ServerName: str, UserName: str, ConfigClass): ... +def WTSSetUserConfig(ServerName: str, UserName: str, ConfigClass) -> None: ... +def WTSEnumerateServers(DomainName: str | None = ..., Version: int = ..., Reserved=...) -> tuple[str, ...]: ... +def WTSEnumerateSessions(__Server: int, __Version: int = ..., __Reserved=...) -> tuple[dict[str, str | int], ...]: ... +def WTSLogoffSession(__Server: int, __SessionId: int, __Wait: bool) -> None: ... +def WTSDisconnectSession(__Server: int, __SessionId: int, __Wait: bool) -> None: ... +def WTSQuerySessionInformation(__Server: int, __SessionId: int, __WTSInfoClass: int) -> str: ... +def WTSEnumerateProcesses(Server: int, Version: int = ..., Reserved: int = ...) -> tuple[str, ...]: ... +def WTSQueryUserToken(SessionId) -> int: ... +def WTSShutdownSystem(Server: int, ShutdownFlag) -> None: ... +def WTSTerminateProcess(Server: int, ProcessId, ExitCode) -> None: ... +def ProcessIdToSessionId(ProcessId): ... +def WTSGetActiveConsoleSessionId(): ... +def WTSRegisterSessionNotification(Wnd: int, Flags) -> None: ... +def WTSUnRegisterSessionNotification(Wnd: int) -> None: ... +def WTSWaitSystemEvent(Server: int, EventMask): ... +def WTSSendMessage(Server: int, SessionId, Title: str, Message: str, Style, Timeout, Wait): ... + +NOTIFY_FOR_ALL_SESSIONS: int +NOTIFY_FOR_THIS_SESSION: int +WTSActive: int +WTSApplicationName: int +WTSClientAddress: int +WTSClientBuildNumber: int +WTSClientDirectory: int +WTSClientDisplay: int +WTSClientHardwareId: int +WTSClientName: int +WTSClientProductId: int +WTSClientProtocolType: int +WTSConnectQuery: int +WTSConnectState: int +WTSConnected: int +WTSDisconnected: int +WTSDomainName: int +WTSDown: int +WTSIdle: int +WTSInit: int +WTSInitialProgram: int +WTSListen: int +WTSOEMId: int +WTSReset: int +WTSSessionId: int +WTSShadow: int +WTSUserConfigBrokenTimeoutSettings: int +WTSUserConfigInitialProgram: int +WTSUserConfigModemCallbackPhoneNumber: int +WTSUserConfigModemCallbackSettings: int +WTSUserConfigReconnectSettings: int +WTSUserConfigShadowingSettings: int +WTSUserConfigTerminalServerHomeDir: int +WTSUserConfigTerminalServerHomeDirDrive: int +WTSUserConfigTerminalServerProfilePath: int +WTSUserConfigTimeoutSettingsConnections: int +WTSUserConfigTimeoutSettingsDisconnections: int +WTSUserConfigTimeoutSettingsIdle: int +WTSUserConfigWorkingDirectory: int +WTSUserConfigfAllowLogonTerminalServer: int +WTSUserConfigfDeviceClientDefaultPrinter: int +WTSUserConfigfDeviceClientDrives: int +WTSUserConfigfDeviceClientPrinters: int +WTSUserConfigfInheritInitialProgram: int +WTSUserConfigfTerminalServerRemoteHomeDir: int +WTSUserName: int +WTSVirtualClientData: int +WTSVirtualFileHandle: int +WTSWinStationName: int +WTSWorkingDirectory: int +WTS_CURRENT_SERVER: int +WTS_CURRENT_SERVER_HANDLE: int +WTS_CURRENT_SERVER_NAME: Incomplete +WTS_CURRENT_SESSION: int +WTS_EVENT_ALL: int +WTS_EVENT_CONNECT: int +WTS_EVENT_CREATE: int +WTS_EVENT_DELETE: int +WTS_EVENT_DISCONNECT: int +WTS_EVENT_FLUSH: int +WTS_EVENT_LICENSE: int +WTS_EVENT_LOGOFF: int +WTS_EVENT_LOGON: int +WTS_EVENT_NONE: int +WTS_EVENT_RENAME: int +WTS_EVENT_STATECHANGE: int +WTS_PROTOCOL_TYPE_CONSOLE: int +WTS_PROTOCOL_TYPE_ICA: int +WTS_PROTOCOL_TYPE_RDP: int +WTS_WSD_FASTREBOOT: int +WTS_WSD_LOGOFF: int +WTS_WSD_POWEROFF: int +WTS_WSD_REBOOT: int +WTS_WSD_SHUTDOWN: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32wnet.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32wnet.pyi new file mode 100644 index 00000000..320fac3a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/win32wnet.pyi @@ -0,0 +1,35 @@ +from _typeshed import Incomplete + +import _win32typing +from win32.lib.pywintypes import error as error + +def NCBBuffer(size): ... +def Netbios(ncb: _win32typing.NCB): ... +def WNetAddConnection2( + NetResource: _win32typing.PyNETRESOURCE, + Password: Incomplete | None = ..., + UserName: Incomplete | None = ..., + Flags: int = ..., +) -> None: ... +def WNetAddConnection3( + HwndParent: int, + NetResource: _win32typing.PyNETRESOURCE, + Password: Incomplete | None = ..., + UserName: Incomplete | None = ..., + Flags: int = ..., +) -> None: ... +def WNetCancelConnection2(name: str, flags, force) -> None: ... +def WNetOpenEnum(scope, _type, usage, resource: _win32typing.PyNETRESOURCE) -> int: ... +def WNetCloseEnum(handle: int) -> None: ... +def WNetEnumResource(handle: int, maxExtries: int = ...) -> list[_win32typing.PyNETRESOURCE]: ... +def WNetGetUser(connection: str | None = ...) -> str: ... +def WNetGetUniversalName(localPath: str, infoLevel) -> str: ... +def WNetGetResourceInformation(NetResource: _win32typing.PyNETRESOURCE) -> tuple[_win32typing.PyNETRESOURCE, Incomplete]: ... +def WNetGetLastError() -> tuple[Incomplete, Incomplete, Incomplete]: ... +def WNetGetResourceParent(NetResource: _win32typing.PyNETRESOURCE) -> _win32typing.PyNETRESOURCE: ... +def WNetGetConnection(connection: str | None = ...) -> str: ... + +NCB = _win32typing.PyNCB +NCBType = _win32typing.PyNCB +NETRESOURCE = _win32typing.PyNETRESOURCE +NETRESOURCEType = _win32typing.PyNETRESOURCE diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/winxpgui.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/winxpgui.pyi new file mode 100644 index 00000000..363045bf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32/winxpgui.pyi @@ -0,0 +1,6 @@ +from win32.win32gui import * + +def GetConsoleWindow() -> int: ... + +# Actually returns a list of int|tuple, but lists don't support positional types +def GetWindowRgnBox(__hWnd: int) -> tuple[int, tuple[int, int, int, int]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32api.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32api.pyi new file mode 100644 index 00000000..8beb8a8a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32api.pyi @@ -0,0 +1 @@ +from win32.win32api import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32clipboard.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32clipboard.pyi new file mode 100644 index 00000000..77dbe667 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32clipboard.pyi @@ -0,0 +1 @@ +from win32.win32clipboard import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/__init__.pyi new file mode 100644 index 00000000..3ceebb09 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/__init__.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete + +__gen_path__: str +__build_path__: Incomplete + +def SetupEnvironment() -> None: ... +def __PackageSupportBuildPath__(package_path) -> None: ... + +gen_py: Incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/adsi/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/adsi/__init__.pyi new file mode 100644 index 00000000..548c6c61 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/adsi/__init__.pyi @@ -0,0 +1 @@ +from win32comext.adsi import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/adsi/adsi.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/adsi/adsi.pyi new file mode 100644 index 00000000..a6269d42 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/adsi/adsi.pyi @@ -0,0 +1 @@ +from win32comext.adsi.adsi import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/adsi/adsicon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/adsi/adsicon.pyi new file mode 100644 index 00000000..1776450e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/adsi/adsicon.pyi @@ -0,0 +1 @@ +from win32comext.adsi.adsicon import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/authorization/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/authorization/__init__.pyi new file mode 100644 index 00000000..975c5a82 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/authorization/__init__.pyi @@ -0,0 +1 @@ +from win32comext.authorization import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/authorization/authorization.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/authorization/authorization.pyi new file mode 100644 index 00000000..0ad104f7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/authorization/authorization.pyi @@ -0,0 +1 @@ +from win32comext.authorization.authorization import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axcontrol/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axcontrol/__init__.pyi new file mode 100644 index 00000000..fde8be56 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axcontrol/__init__.pyi @@ -0,0 +1 @@ +from win32comext.axcontrol import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axcontrol/axcontrol.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axcontrol/axcontrol.pyi new file mode 100644 index 00000000..d3c73448 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axcontrol/axcontrol.pyi @@ -0,0 +1 @@ +from win32comext.axcontrol.axcontrol import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/__init__.pyi new file mode 100644 index 00000000..97d083e9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/__init__.pyi @@ -0,0 +1 @@ +from win32comext.axdebug import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/adb.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/adb.pyi new file mode 100644 index 00000000..48966a11 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/adb.pyi @@ -0,0 +1 @@ +from win32comext.axdebug.adb import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/axdebug.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/axdebug.pyi new file mode 100644 index 00000000..45b11132 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/axdebug.pyi @@ -0,0 +1 @@ +from win32comext.axdebug.axdebug import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/codecontainer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/codecontainer.pyi new file mode 100644 index 00000000..60d3a50f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/codecontainer.pyi @@ -0,0 +1 @@ +from win32comext.axdebug.codecontainer import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/contexts.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/contexts.pyi new file mode 100644 index 00000000..c1c9fbfd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/contexts.pyi @@ -0,0 +1 @@ +from win32comext.axdebug.contexts import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/debugger.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/debugger.pyi new file mode 100644 index 00000000..83a0e9c9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/debugger.pyi @@ -0,0 +1 @@ +from win32comext.axdebug.debugger import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/documents.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/documents.pyi new file mode 100644 index 00000000..e0d1bb74 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/documents.pyi @@ -0,0 +1 @@ +from win32comext.axdebug.documents import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/expressions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/expressions.pyi new file mode 100644 index 00000000..3816f764 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/expressions.pyi @@ -0,0 +1 @@ +from win32comext.axdebug.expressions import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/gateways.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/gateways.pyi new file mode 100644 index 00000000..517204a5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/gateways.pyi @@ -0,0 +1 @@ +from win32comext.axdebug.gateways import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/stackframe.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/stackframe.pyi new file mode 100644 index 00000000..3184248d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/stackframe.pyi @@ -0,0 +1 @@ +from win32comext.axdebug.stackframe import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/util.pyi new file mode 100644 index 00000000..1ea282e4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axdebug/util.pyi @@ -0,0 +1 @@ +from win32comext.axdebug.util import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/__init__.pyi new file mode 100644 index 00000000..afc72ec7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/__init__.pyi @@ -0,0 +1 @@ +from win32comext.axscript import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/asputil.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/asputil.pyi new file mode 100644 index 00000000..1e04c857 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/asputil.pyi @@ -0,0 +1 @@ +from win32comext.axscript.asputil import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/axscript.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/axscript.pyi new file mode 100644 index 00000000..7ec0b348 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/axscript.pyi @@ -0,0 +1 @@ +from win32comext.axscript.axscript import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/client/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/client/__init__.pyi new file mode 100644 index 00000000..203d0ab3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/client/__init__.pyi @@ -0,0 +1 @@ +from win32comext.axscript.client import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/client/error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/client/error.pyi new file mode 100644 index 00000000..f1a3310b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/client/error.pyi @@ -0,0 +1 @@ +from win32comext.axscript.client.error import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/server/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/server/__init__.pyi new file mode 100644 index 00000000..6cb0b07b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/server/__init__.pyi @@ -0,0 +1 @@ +from win32comext.axscript.server import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/server/axsite.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/server/axsite.pyi new file mode 100644 index 00000000..e0710346 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/server/axsite.pyi @@ -0,0 +1 @@ +from win32comext.axscript.server.axsite import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/server/error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/server/error.pyi new file mode 100644 index 00000000..6de1e5dd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/axscript/server/error.pyi @@ -0,0 +1 @@ +from win32comext.axscript.server.error import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/bits/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/bits/__init__.pyi new file mode 100644 index 00000000..020ede08 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/bits/__init__.pyi @@ -0,0 +1 @@ +from win32comext.bits import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/bits/bits.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/bits/bits.pyi new file mode 100644 index 00000000..fcea1eb7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/bits/bits.pyi @@ -0,0 +1 @@ +from win32comext.bits.bits import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/client/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/client/__init__.pyi new file mode 100644 index 00000000..930b06f0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/client/__init__.pyi @@ -0,0 +1,74 @@ +from _typeshed import Incomplete +from typing_extensions import TypeAlias + +import _win32typing +from win32com.client import dynamic as dynamic, gencache as gencache + +_Stringifiable: TypeAlias = object + +def GetObject(Pathname: str | None = ..., Class: Incomplete | None = ..., clsctx: Incomplete | None = ...) -> CDispatch: ... +def GetActiveObject(Class, clsctx=...): ... +def Moniker(Pathname, clsctx=...): ... +def Dispatch( + dispatch: str | dynamic.PyIDispatchType | dynamic._GoodDispatchTypes | dynamic.PyIUnknownType, + userName: str | None = ..., + resultCLSID: _Stringifiable | None = ..., + typeinfo: _win32typing.PyITypeInfo | None = ..., + UnicodeToString: None = ..., + clsctx: int = ..., +) -> dynamic.CDispatch: ... +def DispatchEx( + clsid, + machine: Incomplete | None = ..., + userName: Incomplete | None = ..., + resultCLSID: Incomplete | None = ..., + typeinfo: Incomplete | None = ..., + UnicodeToString: None = ..., + clsctx: Incomplete | None = ..., +): ... + +class CDispatch(dynamic.CDispatch): + def __dir__(self): ... + +def CastTo(ob, target, typelib: Incomplete | None = ...): ... + +class Constants: + __dicts__: Incomplete + def __getattr__(self, a: str): ... + +constants: Incomplete + +class EventsProxy: + def __init__(self, ob) -> None: ... + def __del__(self) -> None: ... + def __getattr__(self, attr: str): ... + def __setattr__(self, attr: str, val) -> None: ... + +def DispatchWithEvents(clsid, user_event_class): ... +def WithEvents(disp, user_event_class): ... +def getevents(clsid): ... +def Record(name, object): ... + +class DispatchBaseClass: + def __init__(self, oobj: Incomplete | None = ...) -> None: ... + def __dir__(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __getattr__(self, attr: str): ... + def __setattr__(self, attr: str, value) -> None: ... + +class CoClassBaseClass: + def __init__(self, oobj: Incomplete | None = ...) -> None: ... + def __getattr__(self, attr: str): ... + def __setattr__(self, attr: str, value) -> None: ... + def __maybe__call__(self, *args, **kwargs): ... + def __maybe__str__(self, *args): ... + def __maybe__int__(self, *args): ... + def __maybe__iter__(self): ... + def __maybe__len__(self): ... + def __maybe__nonzero__(self): ... + +class VARIANT: + varianttype: Incomplete + def __init__(self, vt, value) -> None: ... + value: Incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/client/build.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/client/build.pyi new file mode 100644 index 00000000..cbbcd2e9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/client/build.pyi @@ -0,0 +1,36 @@ +from _typeshed import Incomplete + +class OleItem: + typename: str + doc: Incomplete + python_name: Incomplete + bWritten: int + bIsDispatch: int + bIsSink: int + clsid: Incomplete + co_class: Incomplete + def __init__(self, doc: Incomplete | None = ...) -> None: ... + +class DispatchItem(OleItem): + typename: str + propMap: Incomplete + propMapGet: Incomplete + propMapPut: Incomplete + mapFuncs: Incomplete + defaultDispatchName: Incomplete + hidden: int + def __init__( + self, typeinfo: Incomplete | None = ..., attr: Incomplete | None = ..., doc: Incomplete | None = ..., bForUser: int = ... + ) -> None: ... + clsid: Incomplete + bIsDispatch: Incomplete + def Build(self, typeinfo, attr, bForUser: int = ...) -> None: ... + def CountInOutOptArgs(self, argTuple): ... + def MakeFuncMethod(self, entry, name, bMakeClass: int = ...): ... + def MakeDispatchFuncMethod(self, entry, name, bMakeClass: int = ...): ... + def MakeVarArgsFuncMethod(self, entry, name, bMakeClass: int = ...): ... + +class LazyDispatchItem(DispatchItem): + typename: str + clsid: Incomplete + def __init__(self, attr, doc) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/client/dynamic.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/client/dynamic.pyi new file mode 100644 index 00000000..d9cdc85b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/client/dynamic.pyi @@ -0,0 +1,87 @@ +from _typeshed import Incomplete +from typing import Any, Protocol, TypeVar, overload +from typing_extensions import TypeAlias + +import _win32typing +from win32.lib.pywintypes import IIDType +from win32com.client import build as build + +_T_co = TypeVar("_T_co", covariant=True) +_T = TypeVar("_T") + +class _DispatchCreateClass(Protocol[_T_co]): + @staticmethod + def __call__( + IDispatch: str | PyIDispatchType | _GoodDispatchTypes | PyIUnknownType, + olerepr: build.DispatchItem | build.LazyDispatchItem, + userName: str | None = ..., + UnicodeToString: None = ..., + lazydata: Incomplete | None = ..., + ) -> _T_co: ... + +debugging: int +debugging_attr: int +LCID: int +ERRORS_BAD_CONTEXT: Incomplete +ALL_INVOKE_TYPES: Incomplete + +def debug_print(*args) -> None: ... +def debug_attr_print(*args) -> None: ... +def MakeMethod(func, inst, cls): ... + +PyIDispatchType = _win32typing.PyIDispatch +PyIUnknownType = _win32typing.PyIUnknown + +_GoodDispatchTypes: TypeAlias = tuple[type[str], type[IIDType]] + +@overload +def Dispatch( + IDispatch: str | PyIDispatchType | _GoodDispatchTypes | PyIUnknownType, + userName: str | None, + createClass: _DispatchCreateClass[_T], + typeinfo: _win32typing.PyITypeInfo | None = ..., + UnicodeToString: None = ..., + clsctx: int = ..., +) -> _T: ... +@overload +def Dispatch( + IDispatch: str | PyIDispatchType | _GoodDispatchTypes | PyIUnknownType, + userName: str | None = ..., + createClass: None = ..., + typeinfo: _win32typing.PyITypeInfo | None = ..., + UnicodeToString: None = ..., + clsctx: int = ..., +) -> CDispatch: ... +def MakeOleRepr(IDispatch, typeinfo, typecomp): ... +def DumbDispatch( + IDispatch, + userName: Incomplete | None = ..., + createClass: Incomplete | None = ..., + UnicodeToString: Incomplete | None = ..., + clsctx=..., +): ... + +class CDispatch: + def __init__( + self, + IDispatch, + olerepr, + userName: Incomplete | None = ..., + UnicodeToString: None = ..., + lazydata: Incomplete | None = ..., + ) -> None: ... + def __call__(self, *args): ... + def __bool__(self) -> bool: ... + def __dir__(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __int__(self) -> int: ... + def __len__(self) -> int: ... + def __getitem__(self, index): ... + def __setitem__(self, index, *args) -> None: ... + def __LazyMap__(self, attr): ... + def __AttrToID__(self, attr): ... + ob: Incomplete + # CDispatch objects are dynamically generated and too complex to type + def __getattr__(self, attr: str) -> Any: ... + def __setattr__(self, attr: str, value: Any) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/client/gencache.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/client/gencache.pyi new file mode 100644 index 00000000..825af08d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/client/gencache.pyi @@ -0,0 +1,5 @@ +from win32com.client import dynamic + +def EnsureDispatch( + prog_id: str | dynamic.PyIDispatchType | dynamic._GoodDispatchTypes | dynamic.PyIUnknownType, bForDemand: int = ... +) -> dynamic.CDispatch: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/directsound/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/directsound/__init__.pyi new file mode 100644 index 00000000..6ac36b68 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/directsound/__init__.pyi @@ -0,0 +1 @@ +from win32comext.directsound import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/directsound/directsound.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/directsound/directsound.pyi new file mode 100644 index 00000000..ec66ab40 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/directsound/directsound.pyi @@ -0,0 +1 @@ +from win32comext.directsound.directsound import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/ifilter/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/ifilter/__init__.pyi new file mode 100644 index 00000000..258d3a36 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/ifilter/__init__.pyi @@ -0,0 +1 @@ +from win32comext.ifilter import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/ifilter/ifilter.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/ifilter/ifilter.pyi new file mode 100644 index 00000000..01872b20 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/ifilter/ifilter.pyi @@ -0,0 +1 @@ +from win32comext.ifilter.ifilter import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/ifilter/ifiltercon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/ifilter/ifiltercon.pyi new file mode 100644 index 00000000..8f2260e7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/ifilter/ifiltercon.pyi @@ -0,0 +1 @@ +from win32comext.ifilter.ifiltercon import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/internet/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/internet/__init__.pyi new file mode 100644 index 00000000..8fb46e57 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/internet/__init__.pyi @@ -0,0 +1 @@ +from win32comext.internet import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/internet/inetcon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/internet/inetcon.pyi new file mode 100644 index 00000000..58e4a623 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/internet/inetcon.pyi @@ -0,0 +1 @@ +from win32comext.internet.inetcon import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/internet/internet.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/internet/internet.pyi new file mode 100644 index 00000000..e08fbee4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/internet/internet.pyi @@ -0,0 +1 @@ +from win32comext.internet.internet import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/mapi/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/mapi/__init__.pyi new file mode 100644 index 00000000..cc183a97 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/mapi/__init__.pyi @@ -0,0 +1 @@ +from win32comext.mapi import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/mapi/_exchdapi.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/mapi/_exchdapi.pyi new file mode 100644 index 00000000..fe5f8ed0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/mapi/_exchdapi.pyi @@ -0,0 +1 @@ +from win32comext.mapi._exchdapi import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/mapi/emsabtags.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/mapi/emsabtags.pyi new file mode 100644 index 00000000..d38198bb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/mapi/emsabtags.pyi @@ -0,0 +1 @@ +from win32comext.mapi.emsabtags import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/mapi/exchange.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/mapi/exchange.pyi new file mode 100644 index 00000000..1d4b98a4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/mapi/exchange.pyi @@ -0,0 +1 @@ +from win32comext.mapi.exchange import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/mapi/mapi.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/mapi/mapi.pyi new file mode 100644 index 00000000..e7bacd91 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/mapi/mapi.pyi @@ -0,0 +1 @@ +from win32comext.mapi.mapi import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/mapi/mapitags.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/mapi/mapitags.pyi new file mode 100644 index 00000000..f4ff53e2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/mapi/mapitags.pyi @@ -0,0 +1 @@ +from win32comext.mapi.mapitags import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/mapi/mapiutil.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/mapi/mapiutil.pyi new file mode 100644 index 00000000..cd19df1c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/mapi/mapiutil.pyi @@ -0,0 +1 @@ +from win32comext.mapi.mapiutil import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/olectl.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/olectl.pyi new file mode 100644 index 00000000..33c11076 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/olectl.pyi @@ -0,0 +1,54 @@ +FACILITY_CONTROL: int + +def MAKE_SCODE(sev: int, fac: int, code: int) -> int: ... +def STD_CTL_SCODE(n: int) -> int: ... + +CTL_E_ILLEGALFUNCTIONCALL: int +CTL_E_OVERFLOW: int +CTL_E_OUTOFMEMORY: int +CTL_E_DIVISIONBYZERO: int +CTL_E_OUTOFSTRINGSPACE: int +CTL_E_OUTOFSTACKSPACE: int +CTL_E_BADFILENAMEORNUMBER: int +CTL_E_FILENOTFOUND: int +CTL_E_BADFILEMODE: int +CTL_E_FILEALREADYOPEN: int +CTL_E_DEVICEIOERROR: int +CTL_E_FILEALREADYEXISTS: int +CTL_E_BADRECORDLENGTH: int +CTL_E_DISKFULL: int +CTL_E_BADRECORDNUMBER: int +CTL_E_BADFILENAME: int +CTL_E_TOOMANYFILES: int +CTL_E_DEVICEUNAVAILABLE: int +CTL_E_PERMISSIONDENIED: int +CTL_E_DISKNOTREADY: int +CTL_E_PATHFILEACCESSERROR: int +CTL_E_PATHNOTFOUND: int +CTL_E_INVALIDPATTERNSTRING: int +CTL_E_INVALIDUSEOFNULL: int +CTL_E_INVALIDFILEFORMAT: int +CTL_E_INVALIDPROPERTYVALUE: int +CTL_E_INVALIDPROPERTYARRAYINDEX: int +CTL_E_SETNOTSUPPORTEDATRUNTIME: int +CTL_E_SETNOTSUPPORTED: int +CTL_E_NEEDPROPERTYARRAYINDEX: int +CTL_E_SETNOTPERMITTED: int +CTL_E_GETNOTSUPPORTEDATRUNTIME: int +CTL_E_GETNOTSUPPORTED: int +CTL_E_PROPERTYNOTFOUND: int +CTL_E_INVALIDCLIPBOARDFORMAT: int +CTL_E_INVALIDPICTURE: int +CTL_E_PRINTERERROR: int +CTL_E_CANTSAVEFILETOTEMP: int +CTL_E_SEARCHTEXTNOTFOUND: int +CTL_E_REPLACEMENTSTOOLONG: int +CONNECT_E_FIRST: int +CONNECT_E_LAST: int +CONNECT_S_FIRST: int +CONNECT_S_LAST: int +CONNECT_E_NOCONNECTION: int +CONNECT_E_ADVISELIMIT: int +CONNECT_E_CANNOTCONNECT: int +CONNECT_E_OVERRIDDEN: int +CLASS_E_NOTLICENSED: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/propsys/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/propsys/__init__.pyi new file mode 100644 index 00000000..34b72105 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/propsys/__init__.pyi @@ -0,0 +1 @@ +from win32comext.propsys import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/propsys/propsys.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/propsys/propsys.pyi new file mode 100644 index 00000000..c5afd3cb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/propsys/propsys.pyi @@ -0,0 +1 @@ +from win32comext.propsys.propsys import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/propsys/pscon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/propsys/pscon.pyi new file mode 100644 index 00000000..0f35b473 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/propsys/pscon.pyi @@ -0,0 +1 @@ +from win32comext.propsys.pscon import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/server/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/server/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/server/connect.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/server/connect.pyi new file mode 100644 index 00000000..cf8670a6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/server/connect.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete + +from win32com import olectl as olectl +from win32com.server.exception import Exception as Exception + +IConnectionPointContainer_methods: Incomplete +IConnectionPoint_methods: Incomplete + +class ConnectableServer: + cookieNo: int + connections: Incomplete + def EnumConnections(self) -> None: ... + def GetConnectionInterface(self) -> None: ... + def GetConnectionPointContainer(self): ... + def Advise(self, pUnk): ... + def Unadvise(self, cookie) -> None: ... + def EnumConnectionPoints(self) -> None: ... + def FindConnectionPoint(self, iid): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/server/dispatcher.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/server/dispatcher.pyi new file mode 100644 index 00000000..d88bf4f4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/server/dispatcher.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete +from typing_extensions import TypeAlias + +from win32com.server.exception import IsCOMServerException as IsCOMServerException +from win32com.util import IIDToInterfaceName as IIDToInterfaceName + +class DispatcherBase: + policy: Incomplete + logger: Incomplete + def __init__(self, policyClass, object) -> None: ... + +class DispatcherTrace(DispatcherBase): ... + +class DispatcherWin32trace(DispatcherTrace): + def __init__(self, policyClass, object) -> None: ... + +class DispatcherOutputDebugString(DispatcherTrace): ... + +class DispatcherWin32dbg(DispatcherBase): + def __init__(self, policyClass, ob) -> None: ... + +DefaultDebugDispatcher: TypeAlias = DispatcherTrace diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/server/exception.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/server/exception.pyi new file mode 100644 index 00000000..6d27f709 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/server/exception.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +import pythoncom + +class COMException(pythoncom.com_error): + scode: Incomplete + description: Incomplete + source: Incomplete + helpfile: Incomplete + helpcontext: Incomplete + def __init__( + self, + description: Incomplete | None = ..., + scode: Incomplete | None = ..., + source: Incomplete | None = ..., + helpfile: Incomplete | None = ..., + helpContext: Incomplete | None = ..., + desc: Incomplete | None = ..., + hresult: Incomplete | None = ..., + ) -> None: ... + +Exception = COMException + +def IsCOMException(t: Incomplete | None = ...): ... +def IsCOMServerException(t: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/server/policy.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/server/policy.pyi new file mode 100644 index 00000000..8edf97e2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/server/policy.pyi @@ -0,0 +1,53 @@ +from _typeshed import Incomplete +from abc import ABC, abstractmethod + +from pythoncom import ( + DISPID_COLLECT as DISPID_COLLECT, + DISPID_CONSTRUCTOR as DISPID_CONSTRUCTOR, + DISPID_DESTRUCTOR as DISPID_DESTRUCTOR, + DISPID_UNKNOWN as DISPID_UNKNOWN, +) +from win32com.server.dispatcher import DispatcherTrace as DispatcherTrace, DispatcherWin32trace as DispatcherWin32trace +from win32com.server.exception import COMException as COMException + +S_OK: int +IDispatchType: Incomplete +IUnknownType: Incomplete +error: Incomplete +regSpec: str +regPolicy: str +regDispatcher: str +regAddnPath: str + +def CreateInstance(clsid, reqIID): ... + +class BasicWrapPolicy(ABC): + def __init__(self, object) -> None: ... + def _InvokeEx_(self, dispid, lcid, wFlags, args, kwargs, serviceProvider) -> tuple[Incomplete]: ... + @abstractmethod + def _invokeex_(self, dispid, lcid, wFlags, args, kwargs, serviceProvider) -> tuple[Incomplete]: ... + +class MappedWrapPolicy(BasicWrapPolicy): + _dispid_to_func_: dict[int, str] + def _invokeex_(self, dispid, lcid, wFlags, args, kwargs, serviceProvider) -> tuple[Incomplete]: ... + +class DesignatedWrapPolicy(MappedWrapPolicy): ... +class EventHandlerPolicy(DesignatedWrapPolicy): ... + +class DynamicPolicy(BasicWrapPolicy): + def _invokeex_(self, dispid, lcid, wFlags, args, kwargs, serviceProvider) -> tuple[Incomplete]: ... + +DefaultPolicy = DesignatedWrapPolicy + +def resolve_func(spec): ... +def call_func(spec, *args): ... + +DISPATCH_METHOD: int +DISPATCH_PROPERTYGET: int +DISPATCH_PROPERTYPUT: int +DISPATCH_PROPERTYPUTREF: int +DISPID_EVALUATE: int +DISPID_NEWENUM: int +DISPID_PROPERTYPUT: int +DISPID_STARTENUM: int +DISPID_VALUE: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/server/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/server/util.pyi new file mode 100644 index 00000000..14fe4b0f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/server/util.pyi @@ -0,0 +1,40 @@ +from _typeshed import Incomplete + +from win32com.server import policy as policy +from win32com.server.exception import COMException as COMException + +def wrap(ob, iid: Incomplete | None = ..., usePolicy: Incomplete | None = ..., useDispatcher: Incomplete | None = ...): ... +def unwrap(ob): ... + +class ListEnumerator: + index: Incomplete + def __init__(self, data, index: int = ..., iid=...) -> None: ... + def Next(self, count): ... + def Skip(self, count) -> None: ... + def Reset(self) -> None: ... + def Clone(self): ... + +class ListEnumeratorGateway(ListEnumerator): + def Next(self, count): ... + +def NewEnum(seq, cls=..., iid=..., usePolicy: Incomplete | None = ..., useDispatcher: Incomplete | None = ...): ... + +class Collection: + data: Incomplete + def __init__(self, data: Incomplete | None = ..., readOnly: int = ...) -> None: ... + def Item(self, *args): ... + def Count(self): ... + def Add(self, value) -> None: ... + def Remove(self, index) -> None: ... + def Insert(self, index, value) -> None: ... + +def NewCollection(seq, cls=...): ... + +class FileStream: + file: Incomplete + def __init__(self, file) -> None: ... + def Read(self, amount): ... + def Write(self, data): ... + def Clone(self): ... + def CopyTo(self, dest, cb): ... + def Seek(self, offset, origin): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/shell/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/shell/__init__.pyi new file mode 100644 index 00000000..1074dc68 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/shell/__init__.pyi @@ -0,0 +1 @@ +from win32comext.shell import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/shell/shell.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/shell/shell.pyi new file mode 100644 index 00000000..63f04216 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/shell/shell.pyi @@ -0,0 +1 @@ +from win32comext.shell.shell import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/shell/shellcon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/shell/shellcon.pyi new file mode 100644 index 00000000..d10057f4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/shell/shellcon.pyi @@ -0,0 +1 @@ +from win32comext.shell.shellcon import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/storagecon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/storagecon.pyi new file mode 100644 index 00000000..3242473c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/storagecon.pyi @@ -0,0 +1,113 @@ +STGC_DEFAULT: int +STGC_OVERWRITE: int +STGC_ONLYIFCURRENT: int +STGC_DANGEROUSLYCOMMITMERELYTODISKCACHE: int +STGC_CONSOLIDATE: int +STGTY_STORAGE: int +STGTY_STREAM: int +STGTY_LOCKBYTES: int +STGTY_PROPERTY: int +STREAM_SEEK_SET: int +STREAM_SEEK_CUR: int +STREAM_SEEK_END: int +LOCK_WRITE: int +LOCK_EXCLUSIVE: int +LOCK_ONLYONCE: int +CWCSTORAGENAME: int +STGM_DIRECT: int +STGM_TRANSACTED: int +STGM_SIMPLE: int +STGM_READ: int +STGM_WRITE: int +STGM_READWRITE: int +STGM_SHARE_DENY_NONE: int +STGM_SHARE_DENY_READ: int +STGM_SHARE_DENY_WRITE: int +STGM_SHARE_EXCLUSIVE: int +STGM_PRIORITY: int +STGM_DELETEONRELEASE: int +STGM_NOSCRATCH: int +STGM_CREATE: int +STGM_CONVERT: int +STGM_FAILIFTHERE: int +STGM_NOSNAPSHOT: int +ASYNC_MODE_COMPATIBILITY: int +ASYNC_MODE_DEFAULT: int +STGTY_REPEAT: int +STG_TOEND: int +STG_LAYOUT_SEQUENTIAL: int +STG_LAYOUT_INTERLEAVED: int +COM_RIGHTS_EXECUTE: int +COM_RIGHTS_EXECUTE_LOCAL: int +COM_RIGHTS_EXECUTE_REMOTE: int +COM_RIGHTS_ACTIVATE_LOCAL: int +COM_RIGHTS_ACTIVATE_REMOTE: int +STGFMT_DOCUMENT: int +STGFMT_STORAGE: int +STGFMT_NATIVE: int +STGFMT_FILE: int +STGFMT_ANY: int +STGFMT_DOCFILE: int +PID_DICTIONARY: int +PID_CODEPAGE: int +PID_FIRST_USABLE: int +PID_FIRST_NAME_DEFAULT: int +PID_LOCALE: int +PID_MODIFY_TIME: int +PID_SECURITY: int +PID_BEHAVIOR: int +PID_ILLEGAL: int +PID_MIN_READONLY: int +PID_MAX_READONLY: int +PIDDI_THUMBNAIL: int +PIDSI_TITLE: int +PIDSI_SUBJECT: int +PIDSI_AUTHOR: int +PIDSI_KEYWORDS: int +PIDSI_COMMENTS: int +PIDSI_TEMPLATE: int +PIDSI_LASTAUTHOR: int +PIDSI_REVNUMBER: int +PIDSI_EDITTIME: int +PIDSI_LASTPRINTED: int +PIDSI_CREATE_DTM: int +PIDSI_LASTSAVE_DTM: int +PIDSI_PAGECOUNT: int +PIDSI_WORDCOUNT: int +PIDSI_CHARCOUNT: int +PIDSI_THUMBNAIL: int +PIDSI_APPNAME: int +PIDSI_DOC_SECURITY: int +PIDDSI_CATEGORY: int +PIDDSI_PRESFORMAT: int +PIDDSI_BYTECOUNT: int +PIDDSI_LINECOUNT: int +PIDDSI_PARCOUNT: int +PIDDSI_SLIDECOUNT: int +PIDDSI_NOTECOUNT: int +PIDDSI_HIDDENCOUNT: int +PIDDSI_MMCLIPCOUNT: int +PIDDSI_SCALE: int +PIDDSI_HEADINGPAIR: int +PIDDSI_DOCPARTS: int +PIDDSI_MANAGER: int +PIDDSI_COMPANY: int +PIDDSI_LINKSDIRTY: int +PIDMSI_EDITOR: int +PIDMSI_SUPPLIER: int +PIDMSI_SOURCE: int +PIDMSI_SEQUENCE_NO: int +PIDMSI_PROJECT: int +PIDMSI_STATUS: int +PIDMSI_OWNER: int +PIDMSI_RATING: int +PIDMSI_PRODUCTION: int +PIDMSI_COPYRIGHT: int +PROPSETFLAG_DEFAULT: int +PROPSETFLAG_NONSIMPLE: int +PROPSETFLAG_ANSI: int +PROPSETFLAG_UNBUFFERED: int +PROPSETFLAG_CASE_SENSITIVE: int +STGMOVE_MOVE: int +STGMOVE_COPY: int +STGMOVE_SHALLOWCOPY: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/taskscheduler/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/taskscheduler/__init__.pyi new file mode 100644 index 00000000..2d22e3f9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/taskscheduler/__init__.pyi @@ -0,0 +1 @@ +from win32comext.taskscheduler import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/taskscheduler/taskscheduler.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/taskscheduler/taskscheduler.pyi new file mode 100644 index 00000000..7771ba73 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/taskscheduler/taskscheduler.pyi @@ -0,0 +1 @@ +from win32comext.taskscheduler.taskscheduler import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/universal.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/universal.pyi new file mode 100644 index 00000000..f10b28b8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/universal.pyi @@ -0,0 +1,27 @@ +from _typeshed import Incomplete + +from pythoncom import com_error as com_error +from win32com.client import gencache as gencache + +def RegisterInterfaces(typelibGUID, lcid, major, minor, interface_names: Incomplete | None = ...): ... + +class Arg: + name: Incomplete + size: Incomplete + offset: int + def __init__(self, arg_info, name: Incomplete | None = ...) -> None: ... + +class Method: + dispid: Incomplete + invkind: Incomplete + name: Incomplete + args: Incomplete + cbArgs: Incomplete + def __init__(self, method_info, isEventSink: int = ...) -> None: ... + +class Definition: + def __init__(self, iid, is_dispatch, method_defs) -> None: ... + def iid(self): ... + def vtbl_argsizes(self): ... + def vtbl_argcounts(self): ... + def dispatch(self, ob, index, argPtr, ReadFromInTuple=..., WriteFromOutTuple=...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/util.pyi new file mode 100644 index 00000000..6c896a88 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32com/util.pyi @@ -0,0 +1 @@ +def IIDToInterfaceName(iid): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/adsi/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/adsi/__init__.pyi new file mode 100644 index 00000000..8a3afe5f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/adsi/__init__.pyi @@ -0,0 +1,73 @@ +from _typeshed import Incomplete + +import _win32typing +import win32com.client +from win32comext.adsi.adsi import ( + DBPROPSET_ADSISEARCH as DBPROPSET_ADSISEARCH, + ADsBuildEnumerator as ADsBuildEnumerator, + ADsEnumerateNext as ADsEnumerateNext, + ADsGetLastError as ADsGetLastError, + CLSID_AccessControlEntry as CLSID_AccessControlEntry, + CLSID_AccessControlList as CLSID_AccessControlList, + CLSID_ADsDSOObject as CLSID_ADsDSOObject, + CLSID_DsObjectPicker as CLSID_DsObjectPicker, + CLSID_SecurityDescriptor as CLSID_SecurityDescriptor, + DBGUID_LDAPDialect as DBGUID_LDAPDialect, + DSOP_SCOPE_INIT_INFOs as DSOP_SCOPE_INIT_INFOs, + IID_IADs as IID_IADs, + IID_IADsClass as IID_IADsClass, + IID_IADsCollection as IID_IADsCollection, + IID_IADsComputer as IID_IADsComputer, + IID_IADsComputerOperations as IID_IADsComputerOperations, + IID_IADsContainer as IID_IADsContainer, + IID_IADsDeleteOps as IID_IADsDeleteOps, + IID_IADsDomain as IID_IADsDomain, + IID_IADsFileService as IID_IADsFileService, + IID_IADsFileServiceOperations as IID_IADsFileServiceOperations, + IID_IADsFileShare as IID_IADsFileShare, + IID_IADsGroup as IID_IADsGroup, + IID_IADsLocality as IID_IADsLocality, + IID_IADsMembers as IID_IADsMembers, + IID_IADsNamespaces as IID_IADsNamespaces, + IID_IADsO as IID_IADsO, + IID_IADsOpenDSObject as IID_IADsOpenDSObject, + IID_IADsOU as IID_IADsOU, + IID_IADsPrintJob as IID_IADsPrintJob, + IID_IADsPrintJobOperations as IID_IADsPrintJobOperations, + IID_IADsPrintQueue as IID_IADsPrintQueue, + IID_IADsPrintQueueOperations as IID_IADsPrintQueueOperations, + IID_IADsProperty as IID_IADsProperty, + IID_IADsPropertyList as IID_IADsPropertyList, + IID_IADsResource as IID_IADsResource, + IID_IADsSearch as IID_IADsSearch, + IID_IADsService as IID_IADsService, + IID_IADsServiceOperations as IID_IADsServiceOperations, + IID_IADsSession as IID_IADsSession, + IID_IADsSyntax as IID_IADsSyntax, + IID_IADsUser as IID_IADsUser, + IID_IDirectoryObject as IID_IDirectoryObject, + IID_IDirectorySearch as IID_IDirectorySearch, + IID_IDsObjectPicker as IID_IDsObjectPicker, + LIBID_ADs as LIBID_ADs, + StringAsDS_SELECTION_LIST as StringAsDS_SELECTION_LIST, +) + +LCID: int +IDispatchType: Incomplete +IADsContainerType: Incomplete + +class ADSIEnumerator: + index: int + def __init__(self, ob) -> None: ... + def __getitem__(self, index): ... + def __call__(self, index): ... + +class ADSIDispatch(win32com.client.CDispatch): + def __getattr__(self, attr: str): ... + def QueryInterface(self, iid): ... + +# Redefinition making "iid" optional. +def ADsGetObject(path, iid: _win32typing.PyIID = ...): ... + +# Redefinition with flipped "reserved" and "iid" arguments. +def ADsOpenObject(path, username, password, reserved: int = ..., iid: _win32typing.PyIID = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/adsi/adsi.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/adsi/adsi.pyi new file mode 100644 index 00000000..4004c541 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/adsi/adsi.pyi @@ -0,0 +1,58 @@ +from _typeshed import Incomplete +from typing_extensions import TypeAlias + +import _win32typing +from win32.lib.pywintypes import com_error + +error: TypeAlias = com_error # noqa: Y042 + +def ADsOpenObject(path, username, password, iid: _win32typing.PyIID, reserved: int = ...): ... +def ADsGetObject(path, iid: _win32typing.PyIID): ... +def ADsBuildEnumerator(container: _win32typing.PyIADsContainer): ... +def ADsEnumerateNext(enum, num: int = ...): ... +def ADsGetLastError() -> tuple[Incomplete, Incomplete, Incomplete]: ... +def StringAsDS_SELECTION_LIST(*args, **kwargs): ... # incomplete + +DSOP_SCOPE_INIT_INFOs = _win32typing.PyDSOP_SCOPE_INIT_INFOs +CLSID_ADsDSOObject: _win32typing.PyIID +CLSID_AccessControlEntry: _win32typing.PyIID +CLSID_AccessControlList: _win32typing.PyIID +CLSID_DsObjectPicker: _win32typing.PyIID +CLSID_SecurityDescriptor: _win32typing.PyIID +DBGUID_LDAPDialect: _win32typing.PyIID +DBPROPSET_ADSISEARCH: _win32typing.PyIID +IID_IADs: _win32typing.PyIID +IID_IADsClass: _win32typing.PyIID +IID_IADsCollection: _win32typing.PyIID +IID_IADsComputer: _win32typing.PyIID +IID_IADsComputerOperations: _win32typing.PyIID +IID_IADsContainer: _win32typing.PyIID +IID_IADsDeleteOps: _win32typing.PyIID +IID_IADsDomain: _win32typing.PyIID +IID_IADsFileService: _win32typing.PyIID +IID_IADsFileServiceOperations: _win32typing.PyIID +IID_IADsFileShare: _win32typing.PyIID +IID_IADsGroup: _win32typing.PyIID +IID_IADsLocality: _win32typing.PyIID +IID_IADsMembers: _win32typing.PyIID +IID_IADsNamespaces: _win32typing.PyIID +IID_IADsO: _win32typing.PyIID +IID_IADsOU: _win32typing.PyIID +IID_IADsOpenDSObject: _win32typing.PyIID +IID_IADsPrintJob: _win32typing.PyIID +IID_IADsPrintJobOperations: _win32typing.PyIID +IID_IADsPrintQueue: _win32typing.PyIID +IID_IADsPrintQueueOperations: _win32typing.PyIID +IID_IADsProperty: _win32typing.PyIID +IID_IADsPropertyList: _win32typing.PyIID +IID_IADsResource: _win32typing.PyIID +IID_IADsSearch: _win32typing.PyIID +IID_IADsService: _win32typing.PyIID +IID_IADsServiceOperations: _win32typing.PyIID +IID_IADsSession: _win32typing.PyIID +IID_IADsSyntax: _win32typing.PyIID +IID_IADsUser: _win32typing.PyIID +IID_IDirectoryObject: _win32typing.PyIID +IID_IDirectorySearch: _win32typing.PyIID +IID_IDsObjectPicker: _win32typing.PyIID +LIBID_ADs: _win32typing.PyIID diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/adsi/adsicon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/adsi/adsicon.pyi new file mode 100644 index 00000000..f31229dd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/adsi/adsicon.pyi @@ -0,0 +1,318 @@ +from _typeshed import Incomplete + +ADS_ATTR_CLEAR: int +ADS_ATTR_UPDATE: int +ADS_ATTR_APPEND: int +ADS_ATTR_DELETE: int +ADS_EXT_MINEXTDISPID: int +ADS_EXT_MAXEXTDISPID: int +ADS_EXT_INITCREDENTIALS: int +ADS_EXT_INITIALIZE_COMPLETE: int +ADS_SEARCHPREF_ASYNCHRONOUS: int +ADS_SEARCHPREF_DEREF_ALIASES: int +ADS_SEARCHPREF_SIZE_LIMIT: int +ADS_SEARCHPREF_TIME_LIMIT: int +ADS_SEARCHPREF_ATTRIBTYPES_ONLY: int +ADS_SEARCHPREF_SEARCH_SCOPE: int +ADS_SEARCHPREF_TIMEOUT: int +ADS_SEARCHPREF_PAGESIZE: int +ADS_SEARCHPREF_PAGED_TIME_LIMIT: int +ADS_SEARCHPREF_CHASE_REFERRALS: int +ADS_SEARCHPREF_SORT_ON: int +ADS_SEARCHPREF_CACHE_RESULTS: int +ADS_SEARCHPREF_DIRSYNC: int +ADS_SEARCHPREF_TOMBSTONE: int +ADS_SCOPE_BASE: int +ADS_SCOPE_ONELEVEL: int +ADS_SCOPE_SUBTREE: int +ADS_SECURE_AUTHENTICATION: int +ADS_USE_ENCRYPTION: int +ADS_USE_SSL: int +ADS_READONLY_SERVER: int +ADS_PROMPT_CREDENTIALS: int +ADS_NO_AUTHENTICATION: int +ADS_FAST_BIND: int +ADS_USE_SIGNING: int +ADS_USE_SEALING: int +ADS_USE_DELEGATION: int +ADS_SERVER_BIND: int +ADSTYPE_INVALID: int +ADSTYPE_DN_STRING: Incomplete +ADSTYPE_CASE_EXACT_STRING: Incomplete +ADSTYPE_CASE_IGNORE_STRING: Incomplete +ADSTYPE_PRINTABLE_STRING: Incomplete +ADSTYPE_NUMERIC_STRING: Incomplete +ADSTYPE_BOOLEAN: Incomplete +ADSTYPE_INTEGER: Incomplete +ADSTYPE_OCTET_STRING: Incomplete +ADSTYPE_UTC_TIME: Incomplete +ADSTYPE_LARGE_INTEGER: Incomplete +ADSTYPE_PROV_SPECIFIC: Incomplete +ADSTYPE_OBJECT_CLASS: Incomplete +ADSTYPE_CASEIGNORE_LIST: Incomplete +ADSTYPE_OCTET_LIST: Incomplete +ADSTYPE_PATH: Incomplete +ADSTYPE_POSTALADDRESS: Incomplete +ADSTYPE_TIMESTAMP: Incomplete +ADSTYPE_BACKLINK: Incomplete +ADSTYPE_TYPEDNAME: Incomplete +ADSTYPE_HOLD: Incomplete +ADSTYPE_NETADDRESS: Incomplete +ADSTYPE_REPLICAPOINTER: Incomplete +ADSTYPE_FAXNUMBER: Incomplete +ADSTYPE_EMAIL: Incomplete +ADSTYPE_NT_SECURITY_DESCRIPTOR: Incomplete +ADSTYPE_UNKNOWN: Incomplete +ADSTYPE_DN_WITH_BINARY: Incomplete +ADSTYPE_DN_WITH_STRING: Incomplete +ADS_PROPERTY_CLEAR: int +ADS_PROPERTY_UPDATE: int +ADS_PROPERTY_APPEND: int +ADS_PROPERTY_DELETE: int +ADS_SYSTEMFLAG_DISALLOW_DELETE: int +ADS_SYSTEMFLAG_CONFIG_ALLOW_RENAME: int +ADS_SYSTEMFLAG_CONFIG_ALLOW_MOVE: int +ADS_SYSTEMFLAG_CONFIG_ALLOW_LIMITED_MOVE: int +ADS_SYSTEMFLAG_DOMAIN_DISALLOW_RENAME: int +ADS_SYSTEMFLAG_DOMAIN_DISALLOW_MOVE: int +ADS_SYSTEMFLAG_CR_NTDS_NC: int +ADS_SYSTEMFLAG_CR_NTDS_DOMAIN: int +ADS_SYSTEMFLAG_ATTR_NOT_REPLICATED: int +ADS_SYSTEMFLAG_ATTR_IS_CONSTRUCTED: int +ADS_GROUP_TYPE_GLOBAL_GROUP: int +ADS_GROUP_TYPE_DOMAIN_LOCAL_GROUP: int +ADS_GROUP_TYPE_LOCAL_GROUP: int +ADS_GROUP_TYPE_UNIVERSAL_GROUP: int +ADS_GROUP_TYPE_SECURITY_ENABLED: int +ADS_UF_SCRIPT: int +ADS_UF_ACCOUNTDISABLE: int +ADS_UF_HOMEDIR_REQUIRED: int +ADS_UF_LOCKOUT: int +ADS_UF_PASSWD_NOTREQD: int +ADS_UF_PASSWD_CANT_CHANGE: int +ADS_UF_ENCRYPTED_TEXT_PASSWORD_ALLOWED: int +ADS_UF_TEMP_DUPLICATE_ACCOUNT: int +ADS_UF_NORMAL_ACCOUNT: int +ADS_UF_INTERDOMAIN_TRUST_ACCOUNT: int +ADS_UF_WORKSTATION_TRUST_ACCOUNT: int +ADS_UF_SERVER_TRUST_ACCOUNT: int +ADS_UF_DONT_EXPIRE_PASSWD: int +ADS_UF_MNS_LOGON_ACCOUNT: int +ADS_UF_SMARTCARD_REQUIRED: int +ADS_UF_TRUSTED_FOR_DELEGATION: int +ADS_UF_NOT_DELEGATED: int +ADS_UF_USE_DES_KEY_ONLY: int +ADS_UF_DONT_REQUIRE_PREAUTH: int +ADS_UF_PASSWORD_EXPIRED: int +ADS_UF_TRUSTED_TO_AUTHENTICATE_FOR_DELEGATION: int +ADS_RIGHT_DELETE: int +ADS_RIGHT_READ_CONTROL: int +ADS_RIGHT_WRITE_DAC: int +ADS_RIGHT_WRITE_OWNER: int +ADS_RIGHT_SYNCHRONIZE: int +ADS_RIGHT_ACCESS_SYSTEM_SECURITY: int +ADS_RIGHT_GENERIC_READ: int +ADS_RIGHT_GENERIC_WRITE: int +ADS_RIGHT_GENERIC_EXECUTE: int +ADS_RIGHT_GENERIC_ALL: int +ADS_RIGHT_DS_CREATE_CHILD: int +ADS_RIGHT_DS_DELETE_CHILD: int +ADS_RIGHT_ACTRL_DS_LIST: int +ADS_RIGHT_DS_SELF: int +ADS_RIGHT_DS_READ_PROP: int +ADS_RIGHT_DS_WRITE_PROP: int +ADS_RIGHT_DS_DELETE_TREE: int +ADS_RIGHT_DS_LIST_OBJECT: int +ADS_RIGHT_DS_CONTROL_ACCESS: int +ADS_ACETYPE_ACCESS_ALLOWED: int +ADS_ACETYPE_ACCESS_DENIED: int +ADS_ACETYPE_SYSTEM_AUDIT: int +ADS_ACETYPE_ACCESS_ALLOWED_OBJECT: int +ADS_ACETYPE_ACCESS_DENIED_OBJECT: int +ADS_ACETYPE_SYSTEM_AUDIT_OBJECT: int +ADS_ACETYPE_SYSTEM_ALARM_OBJECT: int +ADS_ACETYPE_ACCESS_ALLOWED_CALLBACK: int +ADS_ACETYPE_ACCESS_DENIED_CALLBACK: int +ADS_ACETYPE_ACCESS_ALLOWED_CALLBACK_OBJECT: int +ADS_ACETYPE_ACCESS_DENIED_CALLBACK_OBJECT: int +ADS_ACETYPE_SYSTEM_AUDIT_CALLBACK: int +ADS_ACETYPE_SYSTEM_ALARM_CALLBACK: int +ADS_ACETYPE_SYSTEM_AUDIT_CALLBACK_OBJECT: int +ADS_ACETYPE_SYSTEM_ALARM_CALLBACK_OBJECT: int +ADS_ACEFLAG_INHERIT_ACE: int +ADS_ACEFLAG_NO_PROPAGATE_INHERIT_ACE: int +ADS_ACEFLAG_INHERIT_ONLY_ACE: int +ADS_ACEFLAG_INHERITED_ACE: int +ADS_ACEFLAG_VALID_INHERIT_FLAGS: int +ADS_ACEFLAG_SUCCESSFUL_ACCESS: int +ADS_ACEFLAG_FAILED_ACCESS: int +ADS_FLAG_OBJECT_TYPE_PRESENT: int +ADS_FLAG_INHERITED_OBJECT_TYPE_PRESENT: int +ADS_SD_CONTROL_SE_OWNER_DEFAULTED: int +ADS_SD_CONTROL_SE_GROUP_DEFAULTED: int +ADS_SD_CONTROL_SE_DACL_PRESENT: int +ADS_SD_CONTROL_SE_DACL_DEFAULTED: int +ADS_SD_CONTROL_SE_SACL_PRESENT: int +ADS_SD_CONTROL_SE_SACL_DEFAULTED: int +ADS_SD_CONTROL_SE_DACL_AUTO_INHERIT_REQ: int +ADS_SD_CONTROL_SE_SACL_AUTO_INHERIT_REQ: int +ADS_SD_CONTROL_SE_DACL_AUTO_INHERITED: int +ADS_SD_CONTROL_SE_SACL_AUTO_INHERITED: int +ADS_SD_CONTROL_SE_DACL_PROTECTED: int +ADS_SD_CONTROL_SE_SACL_PROTECTED: int +ADS_SD_CONTROL_SE_SELF_RELATIVE: int +ADS_SD_REVISION_DS: int +ADS_NAME_TYPE_1779: int +ADS_NAME_TYPE_CANONICAL: int +ADS_NAME_TYPE_NT4: int +ADS_NAME_TYPE_DISPLAY: int +ADS_NAME_TYPE_DOMAIN_SIMPLE: int +ADS_NAME_TYPE_ENTERPRISE_SIMPLE: int +ADS_NAME_TYPE_GUID: int +ADS_NAME_TYPE_UNKNOWN: int +ADS_NAME_TYPE_USER_PRINCIPAL_NAME: int +ADS_NAME_TYPE_CANONICAL_EX: int +ADS_NAME_TYPE_SERVICE_PRINCIPAL_NAME: int +ADS_NAME_TYPE_SID_OR_SID_HISTORY_NAME: int +ADS_NAME_INITTYPE_DOMAIN: int +ADS_NAME_INITTYPE_SERVER: int +ADS_NAME_INITTYPE_GC: int +ADS_OPTION_SERVERNAME: int +ADS_OPTION_REFERRALS: Incomplete +ADS_OPTION_PAGE_SIZE: Incomplete +ADS_OPTION_SECURITY_MASK: Incomplete +ADS_OPTION_MUTUAL_AUTH_STATUS: Incomplete +ADS_OPTION_QUOTA: Incomplete +ADS_OPTION_PASSWORD_PORTNUMBER: Incomplete +ADS_OPTION_PASSWORD_METHOD: Incomplete +ADS_SECURITY_INFO_OWNER: int +ADS_SECURITY_INFO_GROUP: int +ADS_SECURITY_INFO_DACL: int +ADS_SECURITY_INFO_SACL: int +ADS_SETTYPE_FULL: int +ADS_SETTYPE_PROVIDER: int +ADS_SETTYPE_SERVER: int +ADS_SETTYPE_DN: int +ADS_FORMAT_WINDOWS: int +ADS_FORMAT_WINDOWS_NO_SERVER: int +ADS_FORMAT_WINDOWS_DN: int +ADS_FORMAT_WINDOWS_PARENT: int +ADS_FORMAT_X500: int +ADS_FORMAT_X500_NO_SERVER: int +ADS_FORMAT_X500_DN: int +ADS_FORMAT_X500_PARENT: int +ADS_FORMAT_SERVER: int +ADS_FORMAT_PROVIDER: int +ADS_FORMAT_LEAF: int +ADS_DISPLAY_FULL: int +ADS_DISPLAY_VALUE_ONLY: int +ADS_ESCAPEDMODE_DEFAULT: int +ADS_ESCAPEDMODE_ON: int +ADS_ESCAPEDMODE_OFF: int +ADS_ESCAPEDMODE_OFF_EX: int +ADS_PATH_FILE: int +ADS_PATH_FILESHARE: int +ADS_PATH_REGISTRY: int +ADS_SD_FORMAT_IID: int +ADS_SD_FORMAT_RAW: int +ADS_SD_FORMAT_HEXSTRING: int +E_ADS_BAD_PATHNAME: Incomplete +E_ADS_INVALID_DOMAIN_OBJECT: Incomplete +E_ADS_INVALID_USER_OBJECT: Incomplete +E_ADS_INVALID_COMPUTER_OBJECT: Incomplete +E_ADS_UNKNOWN_OBJECT: Incomplete +E_ADS_PROPERTY_NOT_SET: Incomplete +E_ADS_PROPERTY_NOT_SUPPORTED: Incomplete +E_ADS_PROPERTY_INVALID: Incomplete +E_ADS_BAD_PARAMETER: Incomplete +E_ADS_OBJECT_UNBOUND: Incomplete +E_ADS_PROPERTY_NOT_MODIFIED: Incomplete +E_ADS_PROPERTY_MODIFIED: Incomplete +E_ADS_CANT_CONVERT_DATATYPE: Incomplete +E_ADS_PROPERTY_NOT_FOUND: Incomplete +E_ADS_OBJECT_EXISTS: Incomplete +E_ADS_SCHEMA_VIOLATION: Incomplete +E_ADS_COLUMN_NOT_SET: Incomplete +S_ADS_ERRORSOCCURRED: Incomplete +S_ADS_NOMORE_ROWS: Incomplete +S_ADS_NOMORE_COLUMNS: Incomplete +E_ADS_INVALID_FILTER: Incomplete +ADS_DEREF_NEVER: int +ADS_DEREF_SEARCHING: int +ADS_DEREF_FINDING: int +ADS_DEREF_ALWAYS: int +ADSIPROP_ASYNCHRONOUS: int +ADSIPROP_DEREF_ALIASES: int +ADSIPROP_SIZE_LIMIT: int +ADSIPROP_TIME_LIMIT: int +ADSIPROP_ATTRIBTYPES_ONLY: int +ADSIPROP_SEARCH_SCOPE: int +ADSIPROP_TIMEOUT: int +ADSIPROP_PAGESIZE: int +ADSIPROP_PAGED_TIME_LIMIT: int +ADSIPROP_CHASE_REFERRALS: int +ADSIPROP_SORT_ON: int +ADSIPROP_CACHE_RESULTS: int +ADSIPROP_ADSIFLAG: int +ADSI_DIALECT_LDAP: int +ADSI_DIALECT_SQL: int +ADS_CHASE_REFERRALS_NEVER: int +ADS_CHASE_REFERRALS_SUBORDINATE: int +ADS_CHASE_REFERRALS_EXTERNAL: int +ADS_CHASE_REFERRALS_ALWAYS: Incomplete +DSOP_SCOPE_TYPE_TARGET_COMPUTER: int +DSOP_SCOPE_TYPE_UPLEVEL_JOINED_DOMAIN: int +DSOP_SCOPE_TYPE_DOWNLEVEL_JOINED_DOMAIN: int +DSOP_SCOPE_TYPE_ENTERPRISE_DOMAIN: int +DSOP_SCOPE_TYPE_GLOBAL_CATALOG: int +DSOP_SCOPE_TYPE_EXTERNAL_UPLEVEL_DOMAIN: int +DSOP_SCOPE_TYPE_EXTERNAL_DOWNLEVEL_DOMAIN: int +DSOP_SCOPE_TYPE_WORKGROUP: int +DSOP_SCOPE_TYPE_USER_ENTERED_UPLEVEL_SCOPE: int +DSOP_SCOPE_TYPE_USER_ENTERED_DOWNLEVEL_SCOPE: int +DSOP_SCOPE_FLAG_STARTING_SCOPE: int +DSOP_SCOPE_FLAG_WANT_PROVIDER_WINNT: int +DSOP_SCOPE_FLAG_WANT_PROVIDER_LDAP: int +DSOP_SCOPE_FLAG_WANT_PROVIDER_GC: int +DSOP_SCOPE_FLAG_WANT_SID_PATH: int +DSOP_SCOPE_FLAG_WANT_DOWNLEVEL_BUILTIN_PATH: int +DSOP_SCOPE_FLAG_DEFAULT_FILTER_USERS: int +DSOP_SCOPE_FLAG_DEFAULT_FILTER_GROUPS: int +DSOP_SCOPE_FLAG_DEFAULT_FILTER_COMPUTERS: int +DSOP_SCOPE_FLAG_DEFAULT_FILTER_CONTACTS: int +DSOP_FILTER_INCLUDE_ADVANCED_VIEW: int +DSOP_FILTER_USERS: int +DSOP_FILTER_BUILTIN_GROUPS: int +DSOP_FILTER_WELL_KNOWN_PRINCIPALS: int +DSOP_FILTER_UNIVERSAL_GROUPS_DL: int +DSOP_FILTER_UNIVERSAL_GROUPS_SE: int +DSOP_FILTER_GLOBAL_GROUPS_DL: int +DSOP_FILTER_GLOBAL_GROUPS_SE: int +DSOP_FILTER_DOMAIN_LOCAL_GROUPS_DL: int +DSOP_FILTER_DOMAIN_LOCAL_GROUPS_SE: int +DSOP_FILTER_CONTACTS: int +DSOP_FILTER_COMPUTERS: int +DSOP_DOWNLEVEL_FILTER_USERS: int +DSOP_DOWNLEVEL_FILTER_LOCAL_GROUPS: int +DSOP_DOWNLEVEL_FILTER_GLOBAL_GROUPS: int +DSOP_DOWNLEVEL_FILTER_COMPUTERS: int +DSOP_DOWNLEVEL_FILTER_WORLD: int +DSOP_DOWNLEVEL_FILTER_AUTHENTICATED_USER: int +DSOP_DOWNLEVEL_FILTER_ANONYMOUS: int +DSOP_DOWNLEVEL_FILTER_BATCH: int +DSOP_DOWNLEVEL_FILTER_CREATOR_OWNER: int +DSOP_DOWNLEVEL_FILTER_CREATOR_GROUP: int +DSOP_DOWNLEVEL_FILTER_DIALUP: int +DSOP_DOWNLEVEL_FILTER_INTERACTIVE: int +DSOP_DOWNLEVEL_FILTER_NETWORK: int +DSOP_DOWNLEVEL_FILTER_SERVICE: int +DSOP_DOWNLEVEL_FILTER_SYSTEM: int +DSOP_DOWNLEVEL_FILTER_EXCLUDE_BUILTIN_GROUPS: int +DSOP_DOWNLEVEL_FILTER_TERMINAL_SERVER: int +DSOP_DOWNLEVEL_FILTER_ALL_WELLKNOWN_SIDS: int +DSOP_DOWNLEVEL_FILTER_LOCAL_SERVICE: int +DSOP_DOWNLEVEL_FILTER_NETWORK_SERVICE: int +DSOP_DOWNLEVEL_FILTER_REMOTE_LOGON: int +DSOP_FLAG_MULTISELECT: int +DSOP_FLAG_SKIP_TARGET_COMPUTER_DC_CHECK: int +CFSTR_DSOP_DS_SELECTION_LIST: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/authorization/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/authorization/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/authorization/authorization.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/authorization/authorization.pyi new file mode 100644 index 00000000..0a988184 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/authorization/authorization.pyi @@ -0,0 +1,5 @@ +import _win32typing + +def EditSecurity(*args, **kwargs): ... # incomplete + +IID_ISecurityInformation: _win32typing.PyIID diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axcontrol/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axcontrol/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axcontrol/axcontrol.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axcontrol/axcontrol.pyi new file mode 100644 index 00000000..c4f346bc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axcontrol/axcontrol.pyi @@ -0,0 +1,60 @@ +import _win32typing + +def OleCreate( + clsid, + clsid1, + obCLSID: _win32typing.PyIID, + obIID: _win32typing.PyIID, + renderopt, + obFormatEtc, + obOleClientSite: _win32typing.PyIOleClientSite, + obStorage: _win32typing.PyIStorage, +) -> _win32typing.PyIOleObject: ... +def OleLoadPicture( + stream: _win32typing.PyIStream, size, runMode, arg: _win32typing.PyIID, arg1: _win32typing.PyIID +) -> _win32typing.PyIUnknown: ... +def OleLoadPicturePath( + url_or_path: str, unk, reserved, clr, arg: _win32typing.PyIID, arg1: _win32typing.PyIID +) -> _win32typing.PyIUnknown: ... +def OleSetContainedObject(unk: _win32typing.PyIUnknown, fContained) -> None: ... +def OleTranslateAccelerator(frame: _win32typing.PyIOleInPlaceFrame, frame_info, msg: _win32typing.PyMSG) -> None: ... + +EMBDHLP_CREATENOW: int +EMBDHLP_DELAYCREATE: int +EMBDHLP_INPROC_HANDLER: int +EMBDHLP_INPROC_SERVER: int +OLECLOSE_NOSAVE: int +OLECLOSE_PROMPTSAVE: int +OLECLOSE_SAVEIFDIRTY: int +OLECMDF_ENABLED: int +OLECMDF_LATCHED: int +OLECMDF_NINCHED: int +OLECMDF_SUPPORTED: int +OLECMDTEXTF_NAME: int +OLECMDTEXTF_NONE: int +OLECMDTEXTF_STATUS: int +OLECREATE_LEAVERUNNING: int +OLEIVERB_DISCARDUNDOSTATE: int +OLEIVERB_HIDE: int +OLEIVERB_INPLACEACTIVATE: int +OLEIVERB_OPEN: int +OLEIVERB_PRIMARY: int +OLEIVERB_SHOW: int +OLEIVERB_UIACTIVATE: int +IID_IObjectWithSite: _win32typing.PyIID +IID_IOleClientSite: _win32typing.PyIID +IID_IOleCommandTarget: _win32typing.PyIID +IID_IOleControl: _win32typing.PyIID +IID_IOleControlSite: _win32typing.PyIID +IID_IOleInPlaceActiveObject: _win32typing.PyIID +IID_IOleInPlaceFrame: _win32typing.PyIID +IID_IOleInPlaceObject: _win32typing.PyIID +IID_IOleInPlaceSite: _win32typing.PyIID +IID_IOleInPlaceSiteEx: _win32typing.PyIID +IID_IOleInPlaceSiteWindowless: _win32typing.PyIID +IID_IOleInPlaceUIWindow: _win32typing.PyIID +IID_IOleLink: _win32typing.PyIID +IID_IOleObject: _win32typing.PyIID +IID_ISpecifyPropertyPages: _win32typing.PyIID +IID_IViewObject: _win32typing.PyIID +IID_IViewObject2: _win32typing.PyIID diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/adb.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/adb.pyi new file mode 100644 index 00000000..70d39be2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/adb.pyi @@ -0,0 +1,71 @@ +import bdb +from _typeshed import Incomplete + +from win32com.axdebug.util import trace +from win32com.server.util import unwrap as unwrap +from win32comext.axdebug import gateways as gateways + +def fnull(*args) -> None: ... + +debugging: int +traceenter = fnull +tracev = fnull +traceenter = trace +tracev = trace + +class OutputReflector: + writefunc: Incomplete + file: Incomplete + def __init__(self, file, writefunc) -> None: ... + def __getattr__(self, name: str): ... + def write(self, message) -> None: ... + +g_adb: Incomplete + +def OnSetBreakPoint(codeContext, breakPointState, lineNo) -> None: ... + +class Adb(bdb.Bdb, gateways.RemoteDebugApplicationEvents): + debugApplication: Incomplete + debuggingThread: Incomplete + debuggingThreadStateHandle: Incomplete + stackSnifferCookie: Incomplete + codeContainerProvider: Incomplete + breakFlags: Incomplete + breakReason: Incomplete + appDebugger: Incomplete + appEventConnection: Incomplete + logicalbotframe: Incomplete + currentframe: Incomplete + recursiveData: Incomplete + def canonic(self, fname): ... + def reset(self) -> None: ... + def stop_here(self, frame): ... + def break_here(self, frame): ... + def break_anywhere(self, frame): ... + def dispatch_return(self, frame, arg): ... + def dispatch_line(self, frame): ... + def dispatch_call(self, frame, arg): ... + def trace_dispatch(self, frame, event, arg): ... + def user_line(self, frame) -> None: ... + def user_return(self, frame, return_value) -> None: ... + def user_exception(self, frame, exc_info) -> None: ... + def set_trace(self) -> None: ... # type: ignore[override] + def CloseApp(self) -> None: ... + stackSniffer: Incomplete + def AttachApp(self, debugApplication, codeContainerProvider) -> None: ... + def ResetAXDebugging(self) -> None: ... + botframe: Incomplete + stopframe: Incomplete + def SetupAXDebugging(self, baseFrame: Incomplete | None = ..., userFrame: Incomplete | None = ...) -> None: ... + def OnConnectDebugger(self, appDebugger): ... + def OnDisconnectDebugger(self) -> None: ... + def OnSetName(self, name) -> None: ... + def OnDebugOutput(self, string) -> None: ... + def OnClose(self) -> None: ... + def OnEnterBreakPoint(self, rdat) -> None: ... + def OnLeaveBreakPoint(self, rdat) -> None: ... + def OnCreateThread(self, rdat) -> None: ... + def OnDestroyThread(self, rdat) -> None: ... + def OnBreakFlagChange(self, abf, rdat) -> None: ... + +def Debugger(): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/axdebug.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/axdebug.pyi new file mode 100644 index 00000000..2ecd3063 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/axdebug.pyi @@ -0,0 +1,124 @@ +# Can't generate with stubgen because: +# "ImportError: DLL load failed while importing axdebug: The specified module could not be found." +# https://github.com/python/mypy/issues/13822 +import _win32typing + +APPBREAKFLAG_DEBUGGER_BLOCK: int +APPBREAKFLAG_DEBUGGER_HALT: int +APPBREAKFLAG_STEP: int +BREAKPOINT_DELETED: int +BREAKPOINT_DISABLED: int +BREAKPOINT_ENABLED: int +BREAKREASON_BREAKPOINT: int +BREAKREASON_DEBUGGER_BLOCK: int +BREAKREASON_DEBUGGER_HALT: int +BREAKREASON_ERROR: int +BREAKREASON_HOST_INITIATED: int +BREAKREASON_LANGUAGE_INITIATED: int +BREAKREASON_STEP: int +BREAKRESUMEACTION_ABORT: int +BREAKRESUMEACTION_CONTINUE: int +BREAKRESUMEACTION_STEP_INTO: int +BREAKRESUMEACTION_STEP_OUT: int +BREAKRESUMEACTION_STEP_OVER: int +CLSID_DefaultDebugSessionProvider: int +CLSID_MachineDebugManager: int +CLSID_ProcessDebugManager: int +DBGPROP_ATTRIB_ACCESS_FINAL: int +DBGPROP_ATTRIB_ACCESS_PRIVATE: int +DBGPROP_ATTRIB_ACCESS_PROTECTED: int +DBGPROP_ATTRIB_ACCESS_PUBLIC: int +DBGPROP_ATTRIB_HAS_EXTENDED_ATTRIBS: int +DBGPROP_ATTRIB_NO_ATTRIB: int +DBGPROP_ATTRIB_STORAGE_FIELD: int +DBGPROP_ATTRIB_STORAGE_GLOBAL: int +DBGPROP_ATTRIB_STORAGE_STATIC: int +DBGPROP_ATTRIB_STORAGE_VIRTUAL: int +DBGPROP_ATTRIB_TYPE_IS_CONSTANT: int +DBGPROP_ATTRIB_TYPE_IS_SYNCHRONIZED: int +DBGPROP_ATTRIB_TYPE_IS_VOLATILE: int +DBGPROP_ATTRIB_VALUE_IS_EXPANDABLE: int +DBGPROP_ATTRIB_VALUE_IS_INVALID: int +DBGPROP_ATTRIB_VALUE_READONLY: int +DBGPROP_INFO_ATTRIBUTES: int +DBGPROP_INFO_AUTOEXPAND: int +DBGPROP_INFO_DEBUGPROP: int +DBGPROP_INFO_FULLNAME: int +DBGPROP_INFO_NAME: int +DBGPROP_INFO_TYPE: int +DBGPROP_INFO_VALUE: int +DEBUG_TEXT_ALLOWBREAKPOINTS: int +DEBUG_TEXT_ISEXPRESSION: int +DOCUMENTNAMETYPE_APPNODE: int +DOCUMENTNAMETYPE_FILE_TAIL: int +DOCUMENTNAMETYPE_TITLE: int +DOCUMENTNAMETYPE_URL: int +ERRORRESUMEACTION_AbortCallAndReturnErrorToCaller: int +ERRORRESUMEACTION_ReexecuteErrorStatement: int +ERRORRESUMEACTION_SkipErrorStatement: int +EX_DBGPROP_INFO_DEBUGEXTPROP: int +EX_DBGPROP_INFO_ID: int +EX_DBGPROP_INFO_LOCKBYTES: int +EX_DBGPROP_INFO_NTYPE: int +EX_DBGPROP_INFO_NVALUE: int +SOURCETEXT_ATTR_COMMENT: int +SOURCETEXT_ATTR_FUNCTION_START: int +SOURCETEXT_ATTR_KEYWORD: int +SOURCETEXT_ATTR_NONSOURCE: int +SOURCETEXT_ATTR_NUMBER: int +SOURCETEXT_ATTR_OPERATOR: int +SOURCETEXT_ATTR_STRING: int +TEXT_DOC_ATTR_READONLY: int +APPBREAKFLAG_IN_BREAKPOINT: int +APPBREAKFLAG_STEPTYPE_BYTECODE: int +APPBREAKFLAG_STEPTYPE_MACHINE: int +APPBREAKFLAG_STEPTYPE_MASK: int +APPBREAKFLAG_STEPTYPE_SOURCE: int + +def GetStackAddress(*args, **kwargs): ... # incomplete +def GetThreadStateHandle(*args, **kwargs): ... # incomplete + +IID_IActiveScriptDebug: _win32typing.PyIID +IID_IActiveScriptErrorDebug: _win32typing.PyIID +IID_IActiveScriptSiteDebug: _win32typing.PyIID +IID_IApplicationDebugger: _win32typing.PyIID +IID_IDebugApplication: _win32typing.PyIID +IID_IDebugApplicationNode: _win32typing.PyIID +IID_IDebugApplicationNodeEvents: _win32typing.PyIID +IID_IDebugApplicationThread: _win32typing.PyIID +IID_IDebugCodeContext: _win32typing.PyIID +IID_IDebugDocument: _win32typing.PyIID +IID_IDebugDocumentContext: _win32typing.PyIID +IID_IDebugDocumentHelper: _win32typing.PyIID +IID_IDebugDocumentHost: _win32typing.PyIID +IID_IDebugDocumentInfo: _win32typing.PyIID +IID_IDebugDocumentProvider: _win32typing.PyIID +IID_IDebugDocumentText: _win32typing.PyIID +IID_IDebugDocumentTextAuthor: _win32typing.PyIID +IID_IDebugDocumentTextEvents: _win32typing.PyIID +IID_IDebugDocumentTextExternalAuthor: _win32typing.PyIID +IID_IDebugExpression: _win32typing.PyIID +IID_IDebugExpressionCallBack: _win32typing.PyIID +IID_IDebugExpressionContext: _win32typing.PyIID +IID_IDebugProperty: _win32typing.PyIID +IID_IDebugSessionProvider: _win32typing.PyIID +IID_IDebugStackFrame: _win32typing.PyIID +IID_IDebugStackFrameSniffer: _win32typing.PyIID +IID_IDebugStackFrameSnifferEx: _win32typing.PyIID +IID_IDebugSyncOperation: _win32typing.PyIID +IID_IEnumDebugApplicationNodes: _win32typing.PyIID +IID_IEnumDebugCodeContexts: _win32typing.PyIID +IID_IEnumDebugExpressionContexts: _win32typing.PyIID +IID_IEnumDebugPropertyInfo: _win32typing.PyIID +IID_IEnumDebugStackFrames: _win32typing.PyIID +IID_IEnumRemoteDebugApplicationThreads: _win32typing.PyIID +IID_IEnumRemoteDebugApplications: _win32typing.PyIID +IID_IMachineDebugManager: _win32typing.PyIID +IID_IMachineDebugManagerEvents: _win32typing.PyIID +IID_IProcessDebugManager: _win32typing.PyIID +IID_IProvideExpressionContexts: _win32typing.PyIID +IID_IRemoteDebugApplication: _win32typing.PyIID +IID_IRemoteDebugApplicationEvents: _win32typing.PyIID +IID_IRemoteDebugApplicationThread: _win32typing.PyIID + +def SetThreadStateTrace(*args, **kwargs): ... # incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/codecontainer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/codecontainer.pyi new file mode 100644 index 00000000..e42c470b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/codecontainer.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +from win32comext.axdebug import contexts as contexts +from win32comext.axdebug.util import RaiseNotImpl as RaiseNotImpl + +class SourceCodeContainer: + sourceContext: Incomplete + text: Incomplete + nextLineNo: int + fileName: Incomplete + codeContexts: Incomplete + site: Incomplete + startLineNumber: Incomplete + debugDocument: Incomplete + def __init__( + self, + text, + fileName: str = ..., + sourceContext: int = ..., + startLineNumber: int = ..., + site: Incomplete | None = ..., + debugDocument: Incomplete | None = ..., + ) -> None: ... + def GetText(self): ... + def GetName(self, dnt) -> None: ... + def GetFileName(self): ... + def GetPositionOfLine(self, cLineNumber): ... + def GetLineOfPosition(self, charPos): ... + def GetNextLine(self): ... + def GetLine(self, num): ... + def GetNumChars(self): ... + def GetNumLines(self): ... + lastPos: int + attrs: Incomplete + def GetSyntaxColorAttributes(self): ... + def GetCodeContextAtPosition(self, charPos): ... + +class SourceModuleContainer(SourceCodeContainer): + module: Incomplete + def __init__(self, module) -> None: ... + text: Incomplete + def GetText(self): ... + def GetName(self, dnt): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/contexts.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/contexts.pyi new file mode 100644 index 00000000..4561b453 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/contexts.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +from win32comext.axdebug import adb as adb, gateways +from win32comext.axdebug.util import trace as trace + +class DebugCodeContext(gateways.DebugCodeContext, gateways.DebugDocumentContext): + debugSite: Incomplete + offset: Incomplete + length: Incomplete + breakPointState: int + lineno: Incomplete + codeContainer: Incomplete + def __init__(self, lineNo, charPos, len, codeContainer, debugSite) -> None: ... + def GetDocumentContext(self): ... + def SetBreakPoint(self, bps) -> None: ... + def GetDocument(self): ... + def EnumCodeContexts(self): ... + +class EnumDebugCodeContexts(gateways.EnumDebugCodeContexts): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/debugger.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/debugger.pyi new file mode 100644 index 00000000..f8d294be --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/debugger.pyi @@ -0,0 +1,58 @@ +from _typeshed import Incomplete + +from win32com.axdebug import contexts as contexts, documents, gateways as gateways +from win32com.axdebug.util import trace as trace +from win32com.axscript import axscript as axscript + +currentDebugger: Incomplete + +class ModuleTreeNode: + moduleName: Incomplete + module: Incomplete + realNode: Incomplete + cont: Incomplete + def __init__(self, module) -> None: ... + def Attach(self, parentRealNode) -> None: ... + def Close(self) -> None: ... + +def BuildModule(module, built_nodes, rootNode, create_node_fn, create_node_args) -> None: ... +def RefreshAllModules(builtItems, rootNode, create_node, create_node_args) -> None: ... + +class CodeContainerProvider(documents.CodeContainerProvider): + axdebugger: Incomplete + currentNumModules: Incomplete + nodes: Incomplete + def __init__(self, axdebugger) -> None: ... + def FromFileName(self, fname): ... + def Close(self) -> None: ... + +class OriginalInterfaceMaker: + cookie: Incomplete + def MakeInterfaces(self, pdm): ... + def CloseInterfaces(self, pdm) -> None: ... + +class SimpleHostStyleInterfaceMaker: + def MakeInterfaces(self, pdm): ... + def CloseInterfaces(self, pdm) -> None: ... + +class AXDebugger: + pydebugger: Incomplete + pdm: Incomplete + interfaceMaker: Incomplete + expressionCookie: Incomplete + def __init__(self, interfaceMaker: Incomplete | None = ..., processName: Incomplete | None = ...) -> None: ... + def Break(self) -> None: ... + app: Incomplete + root: Incomplete + def Close(self) -> None: ... + def RefreshAllModules(self, nodes, containerProvider) -> None: ... + def CreateApplicationNode(self, node, containerProvider): ... + +def Break() -> None: ... + +brk = Break +set_trace = Break + +def dosomethingelse() -> None: ... +def dosomething() -> None: ... +def test() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/documents.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/documents.pyi new file mode 100644 index 00000000..7c1516b6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/documents.pyi @@ -0,0 +1,34 @@ +from _typeshed import Incomplete + +from win32com.server.exception import Exception as Exception +from win32comext.axdebug import codecontainer as codecontainer, contexts as contexts, gateways +from win32comext.axdebug.util import RaiseNotImpl as RaiseNotImpl, trace as trace + +def GetGoodFileName(fname): ... + +class DebugDocumentProvider(gateways.DebugDocumentProvider): + doc: Incomplete + def __init__(self, doc) -> None: ... + def GetName(self, dnt): ... + def GetDocumentClassId(self): ... + def GetDocument(self): ... + +# error: Cannot determine consistent method resolution order (MRO) for "DebugDocumentText" +# pyright doesn't have a specific error code for MRO error! +class DebugDocumentText(gateways.DebugDocumentInfo, gateways.DebugDocumentText, gateways.DebugDocument): # type: ignore[misc] # pyright: ignore + codeContainer: Incomplete + def __init__(self, codeContainer) -> None: ... + def GetName(self, dnt): ... + def GetDocumentClassId(self): ... + def GetSize(self): ... + def GetPositionOfLine(self, cLineNumber): ... + def GetLineOfPosition(self, charPos): ... + def GetText(self, charPos, maxChars, wantAttr): ... + def GetPositionOfContext(self, context): ... + def GetContextOfPosition(self, charPos, maxChars): ... + +class CodeContainerProvider: + ccsAndNodes: Incomplete + def AddCodeContainer(self, cc, node: Incomplete | None = ...) -> None: ... + def FromFileName(self, fname): ... + def Close(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/expressions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/expressions.pyi new file mode 100644 index 00000000..fac92805 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/expressions.pyi @@ -0,0 +1,69 @@ +from _typeshed import Incomplete + +from win32com.server.exception import COMException as COMException +from win32com.server.util import ListEnumeratorGateway +from win32comext.axdebug import gateways +from win32comext.axdebug.util import RaiseNotImpl as RaiseNotImpl + +def MakeNiceString(ob): ... + +class ProvideExpressionContexts(gateways.ProvideExpressionContexts): ... + +class ExpressionContext(gateways.DebugExpressionContext): + frame: Incomplete + def __init__(self, frame) -> None: ... + def ParseLanguageText(self, code, radix, delim, flags): ... + def GetLanguageInfo(self): ... + +class Expression(gateways.DebugExpression): + callback: Incomplete + frame: Incomplete + code: Incomplete + radix: Incomplete + delim: Incomplete + flags: Incomplete + isComplete: int + result: Incomplete + hresult: Incomplete + def __init__(self, frame, code, radix, delim, flags) -> None: ... + def Start(self, callback): ... + def Abort(self) -> None: ... + def QueryIsComplete(self): ... + def GetResultAsString(self): ... + def GetResultAsDebugProperty(self): ... + +def MakeEnumDebugProperty(object, dwFieldSpec, nRadix, iid, stackFrame: Incomplete | None = ...): ... +def GetPropertyInfo( + obname, + obvalue, + dwFieldSpec, + nRadix, + hresult: int = ..., + dictionary: Incomplete | None = ..., + stackFrame: Incomplete | None = ..., +): ... + +class EnumDebugPropertyInfo(ListEnumeratorGateway): + def GetCount(self): ... + +class DebugProperty: + name: Incomplete + value: Incomplete + parent: Incomplete + hresult: Incomplete + dictionary: Incomplete + stackFrame: Incomplete + def __init__( + self, + name, + value, + parent: Incomplete | None = ..., + hresult: int = ..., + dictionary: Incomplete | None = ..., + stackFrame: Incomplete | None = ..., + ) -> None: ... + def GetPropertyInfo(self, dwFieldSpec, nRadix): ... + def GetExtendedInfo(self) -> None: ... + def SetValueAsString(self, value, radix) -> None: ... + def EnumMembers(self, dwFieldSpec, nRadix, iid): ... + def GetParent(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/gateways.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/gateways.pyi new file mode 100644 index 00000000..8ed3bb31 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/gateways.pyi @@ -0,0 +1,114 @@ +from _typeshed import Incomplete + +from win32com.server.util import ListEnumeratorGateway + +class EnumDebugCodeContexts(ListEnumeratorGateway): ... +class EnumDebugStackFrames(ListEnumeratorGateway): ... +class EnumDebugApplicationNodes(ListEnumeratorGateway): ... +class EnumRemoteDebugApplications(ListEnumeratorGateway): ... +class EnumRemoteDebugApplicationThreads(ListEnumeratorGateway): ... + +class DebugDocumentInfo: + def GetName(self, dnt) -> None: ... + def GetDocumentClassId(self) -> None: ... + +class DebugDocumentProvider(DebugDocumentInfo): + def GetDocument(self) -> None: ... + +class DebugApplicationNode(DebugDocumentProvider): + def EnumChildren(self) -> None: ... + def GetParent(self) -> None: ... + def SetDocumentProvider(self, pddp) -> None: ... + def Close(self) -> None: ... + def Attach(self, parent) -> None: ... + def Detach(self) -> None: ... + +class DebugApplicationNodeEvents: + def onAddChild(self, child) -> None: ... + def onRemoveChild(self, child) -> None: ... + def onDetach(self) -> None: ... + def onAttach(self, parent) -> None: ... + +class DebugDocument(DebugDocumentInfo): ... + +class DebugDocumentText(DebugDocument): + def GetDocumentAttributes(self) -> None: ... + def GetSize(self) -> None: ... + def GetPositionOfLine(self, cLineNumber) -> None: ... + def GetLineOfPosition(self, charPos) -> None: ... + def GetText(self, charPos, maxChars, wantAttr) -> None: ... + def GetPositionOfContext(self, debugDocumentContext) -> None: ... + def GetContextOfPosition(self, charPos, maxChars) -> None: ... + +class DebugDocumentTextExternalAuthor: + def GetPathName(self) -> None: ... + def GetFileName(self) -> None: ... + def NotifyChanged(self) -> None: ... + +class DebugDocumentTextEvents: + def onDestroy(self) -> None: ... + def onInsertText(self, cCharacterPosition, cNumToInsert) -> None: ... + def onRemoveText(self, cCharacterPosition, cNumToRemove) -> None: ... + def onReplaceText(self, cCharacterPosition, cNumToReplace) -> None: ... + def onUpdateTextAttributes(self, cCharacterPosition, cNumToUpdate) -> None: ... + def onUpdateDocumentAttributes(self, textdocattr) -> None: ... + +class DebugDocumentContext: + def GetDocument(self) -> None: ... + def EnumCodeContexts(self) -> None: ... + +class DebugCodeContext: + def GetDocumentContext(self) -> None: ... + def SetBreakPoint(self, bps) -> None: ... + +class DebugStackFrame: + def GetCodeContext(self) -> None: ... + def GetDescriptionString(self, fLong) -> None: ... + def GetLanguageString(self) -> None: ... + def GetThread(self) -> None: ... + def GetDebugProperty(self) -> None: ... + +class DebugDocumentHost: + def GetDeferredText(self, dwTextStartCookie, maxChars, bWantAttr) -> None: ... + def GetScriptTextAttributes(self, codeText, delimterText, flags) -> None: ... + def OnCreateDocumentContext(self) -> None: ... + def GetPathName(self) -> None: ... + def GetFileName(self) -> None: ... + def NotifyChanged(self) -> None: ... + +class DebugDocumentTextConnectServer: + cookieNo: int + connections: Incomplete + def EnumConnections(self) -> None: ... + def GetConnectionInterface(self) -> None: ... + def GetConnectionPointContainer(self): ... + def Advise(self, pUnk): ... + def Unadvise(self, cookie): ... + def EnumConnectionPoints(self) -> None: ... + def FindConnectionPoint(self, iid): ... + +class RemoteDebugApplicationEvents: + def OnConnectDebugger(self, appDebugger) -> None: ... + def OnDisconnectDebugger(self) -> None: ... + def OnSetName(self, name) -> None: ... + def OnDebugOutput(self, string) -> None: ... + def OnClose(self) -> None: ... + def OnEnterBreakPoint(self, rdat) -> None: ... + def OnLeaveBreakPoint(self, rdat) -> None: ... + def OnCreateThread(self, rdat) -> None: ... + def OnDestroyThread(self, rdat) -> None: ... + def OnBreakFlagChange(self, abf, rdat) -> None: ... + +class DebugExpressionContext: + def ParseLanguageText(self, code, radix, delim, flags) -> None: ... + def GetLanguageInfo(self) -> None: ... + +class DebugExpression: + def Start(self, callback) -> None: ... + def Abort(self) -> None: ... + def QueryIsComplete(self) -> None: ... + def GetResultAsString(self) -> None: ... + def GetResultAsDebugProperty(self) -> None: ... + +class ProvideExpressionContexts: + def EnumExpressionContexts(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/stackframe.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/stackframe.pyi new file mode 100644 index 00000000..63832412 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/stackframe.pyi @@ -0,0 +1,35 @@ +from _typeshed import Incomplete + +from win32com.server.exception import COMException as COMException +from win32comext.axdebug import gateways +from win32comext.axdebug.util import RaiseNotImpl as RaiseNotImpl, trace as trace + +class EnumDebugStackFrames(gateways.EnumDebugStackFrames): + def __init__(self, debugger) -> None: ... + def Next(self, count): ... + +class DebugStackFrame(gateways.DebugStackFrame): + frame: Incomplete + lineno: Incomplete + codeContainer: Incomplete + expressionContext: Incomplete + def __init__(self, frame, lineno, codeContainer) -> None: ... + def GetThread(self) -> None: ... + def GetCodeContext(self): ... + def GetDescriptionString(self, fLong): ... + def GetLanguageString(self, fLong): ... + def GetDebugProperty(self): ... + +class DebugStackFrameSniffer: + debugger: Incomplete + def __init__(self, debugger) -> None: ... + def EnumStackFrames(self): ... + +class StackFrameDebugProperty: + frame: Incomplete + def __init__(self, frame) -> None: ... + def GetPropertyInfo(self, dwFieldSpec, nRadix) -> None: ... + def GetExtendedInfo(self) -> None: ... + def SetValueAsString(self, value, radix) -> None: ... + def EnumMembers(self, dwFieldSpec, nRadix, iid): ... + def GetParent(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/util.pyi new file mode 100644 index 00000000..5ec04b35 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axdebug/util.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete + +import win32com.server.policy + +debugging: int + +def trace(*args) -> None: ... + +all_wrapped: Incomplete + +def RaiseNotImpl(who: Incomplete | None = ...) -> None: ... + +class Dispatcher(win32com.server.policy.DispatcherWin32trace): + def __init__(self, policyClass, object) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/asputil.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/asputil.pyi new file mode 100644 index 00000000..37227df9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/asputil.pyi @@ -0,0 +1 @@ +def iif(cond, t, f): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/axscript.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/axscript.pyi new file mode 100644 index 00000000..8914786f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/axscript.pyi @@ -0,0 +1,52 @@ +import _win32typing + +CATID_ActiveScript: _win32typing.PyIID +CATID_ActiveScriptParse: _win32typing.PyIID +IID_IActiveScript: _win32typing.PyIID +IID_IActiveScriptError: _win32typing.PyIID +IID_IActiveScriptParse: _win32typing.PyIID +IID_IActiveScriptParseProcedure: _win32typing.PyIID +IID_IActiveScriptSite: _win32typing.PyIID +IID_IObjectSafety: _win32typing.PyIID +IID_IProvideMultipleClassInfo: _win32typing.PyIID +INTERFACESAFE_FOR_UNTRUSTED_CALLER: int +INTERFACESAFE_FOR_UNTRUSTED_DATA: int +INTERFACE_USES_DISPEX: int +INTERFACE_USES_SECURITY_MANAGER: int +MULTICLASSINFO_GETIIDPRIMARY: int +MULTICLASSINFO_GETIIDSOURCE: int +MULTICLASSINFO_GETNUMRESERVEDDISPIDS: int +MULTICLASSINFO_GETTYPEINFO: int +SCRIPTINFO_ALL_FLAGS: int +SCRIPTINFO_ITYPEINFO: int +SCRIPTINFO_IUNKNOWN: int +SCRIPTINTERRUPT_ALL_FLAGS: int +SCRIPTINTERRUPT_DEBUG: int +SCRIPTINTERRUPT_RAISEEXCEPTION: int +SCRIPTITEM_ALL_FLAGS: int +SCRIPTITEM_CODEONLY: int +SCRIPTITEM_GLOBALMEMBERS: int +SCRIPTITEM_ISPERSISTENT: int +SCRIPTITEM_ISSOURCE: int +SCRIPTITEM_ISVISIBLE: int +SCRIPTITEM_NOCODE: int +SCRIPTPROC_ALL_FLAGS: int +SCRIPTPROC_HOSTMANAGESSOURCE: int +SCRIPTPROC_IMPLICIT_PARENTS: int +SCRIPTPROC_IMPLICIT_THIS: int +SCRIPTSTATE_CLOSED: int +SCRIPTSTATE_CONNECTED: int +SCRIPTSTATE_DISCONNECTED: int +SCRIPTSTATE_INITIALIZED: int +SCRIPTSTATE_STARTED: int +SCRIPTSTATE_UNINITIALIZED: int +SCRIPTTEXT_ALL_FLAGS: int +SCRIPTTEXT_ISEXPRESSION: int +SCRIPTTEXT_ISPERSISTENT: int +SCRIPTTEXT_ISVISIBLE: int +SCRIPTTHREADSTATE_NOTINSCRIPT: int +SCRIPTTHREADSTATE_RUNNING: int +SCRIPTTYPELIB_ISCONTROL: int +SCRIPTTYPELIB_ISPERSISTENT: int +SCRIPT_E_REPORTED: int +TIFLAGS_EXTENDDISPATCHONLY: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/client/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/client/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/client/error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/client/error.pyi new file mode 100644 index 00000000..24ce7a52 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/client/error.pyi @@ -0,0 +1,21 @@ +from win32com.server.exception import COMException + +debugging: int + +def FormatForAX(text): ... +def ExpandTabs(text): ... +def AddCR(text): ... + +class IActiveScriptError: + def GetSourceLineText(self): ... + def GetSourcePosition(self): ... + def GetExceptionInfo(self): ... + +class AXScriptException(COMException): + sourceContext: int + startLineNo: int + linetext: str + def __init__(self, site, codeBlock, exc_type, exc_value, exc_traceback) -> None: ... + def ExtractTracebackInfo(self, tb, site): ... + +def ProcessAXScriptException(scriptingSite, debugManager, exceptionInstance): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/client/pyscript.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/client/pyscript.pyi new file mode 100644 index 00000000..548e95fe --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/client/pyscript.pyi @@ -0,0 +1,3 @@ +# Necessary for mypy to not fail with: +# 'error: Cannot find implementation or library stub for module named "win32comext.axscript.client.pyscript"' +# in: .gateways, .stackframe, .expressions, .adb, .contexts, .codecontainer, .documents, .debugger diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/server/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/server/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/server/axsite.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/server/axsite.pyi new file mode 100644 index 00000000..aa77ad51 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/server/axsite.pyi @@ -0,0 +1,32 @@ +from _typeshed import Incomplete + +class AXEngine: + eScript: Incomplete + eParse: Incomplete + eSafety: Incomplete + def __init__(self, site, engine) -> None: ... + def __del__(self) -> None: ... + def GetScriptDispatch(self, name: Incomplete | None = ...): ... + def AddNamedItem(self, item, flags): ... + def AddCode(self, code, flags: int = ...) -> None: ... + def EvalCode(self, code): ... + def Start(self) -> None: ... + def Close(self) -> None: ... + def SetScriptState(self, state) -> None: ... + +IActiveScriptSite_methods: Incomplete + +class AXSite: + lcid: Incomplete + objModel: Incomplete + engine: Incomplete + def __init__(self, objModel=..., engine: Incomplete | None = ..., lcid: int = ...) -> None: ... + def AddEngine(self, engine): ... + def GetLCID(self): ... + def GetItemInfo(self, name, returnMask): ... + def GetDocVersionString(self): ... + def OnScriptTerminate(self, result, excepInfo) -> None: ... + def OnStateChange(self, state) -> None: ... + def OnScriptError(self, errorInterface): ... + def OnEnterScript(self) -> None: ... + def OnLeaveScript(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/server/error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/server/error.pyi new file mode 100644 index 00000000..deb38c3c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/axscript/server/error.pyi @@ -0,0 +1,6 @@ +from _typeshed import Incomplete + +class Exception: + activeScriptError: Incomplete + def __init__(self, activeScriptError) -> None: ... + def __getattr__(self, attr: str): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/bits/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/bits/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/bits/bits.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/bits/bits.pyi new file mode 100644 index 00000000..8bc0c056 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/bits/bits.pyi @@ -0,0 +1,61 @@ +import _win32typing + +BG_AUTH_SCHEME_BASIC: int +BG_AUTH_SCHEME_DIGEST: int +BG_AUTH_SCHEME_NEGOTIATE: int +BG_AUTH_SCHEME_NTLM: int +BG_AUTH_SCHEME_PASSPORT: int +BG_AUTH_TARGET_PROXY: int +BG_AUTH_TARGET_SERVER: int +BG_CERT_STORE_LOCATION_CURRENT_SERVICE: int +BG_CERT_STORE_LOCATION_CURRENT_USER: int +BG_CERT_STORE_LOCATION_CURRENT_USER_GROUP_POLICY: int +BG_CERT_STORE_LOCATION_LOCAL_MACHINE: int +BG_CERT_STORE_LOCATION_LOCAL_MACHINE_ENTERPRISE: int +BG_CERT_STORE_LOCATION_LOCAL_MACHINE_GROUP_POLICY: int +BG_CERT_STORE_LOCATION_SERVICES: int +BG_CERT_STORE_LOCATION_USERS: int +BG_ERROR_CONTEXT_GENERAL_QUEUE_MANAGER: int +BG_ERROR_CONTEXT_GENERAL_TRANSPORT: int +BG_ERROR_CONTEXT_LOCAL_FILE: int +BG_ERROR_CONTEXT_NONE: int +BG_ERROR_CONTEXT_QUEUE_MANAGER_NOTIFICATION: int +BG_ERROR_CONTEXT_REMOTE_APPLICATION: int +BG_ERROR_CONTEXT_REMOTE_FILE: int +BG_ERROR_CONTEXT_UNKNOWN: int +BG_JOB_ENUM_ALL_USERS: int +BG_JOB_PRIORITY_FOREGROUND: int +BG_JOB_PRIORITY_HIGH: int +BG_JOB_PRIORITY_LOW: int +BG_JOB_PRIORITY_NORMAL: int +BG_JOB_PROXY_USAGE_AUTODETECT: int +BG_JOB_PROXY_USAGE_NO_PROXY: int +BG_JOB_PROXY_USAGE_OVERRIDE: int +BG_JOB_PROXY_USAGE_PRECONFIG: int +BG_JOB_STATE_ACKNOWLEDGED: int +BG_JOB_STATE_CANCELLED: int +BG_JOB_STATE_CONNECTING: int +BG_JOB_STATE_ERROR: int +BG_JOB_STATE_QUEUED: int +BG_JOB_STATE_SUSPENDED: int +BG_JOB_STATE_TRANSFERRED: int +BG_JOB_STATE_TRANSFERRING: int +BG_JOB_STATE_TRANSIENT_ERROR: int +BG_JOB_TYPE_DOWNLOAD: int +BG_JOB_TYPE_UPLOAD: int +BG_JOB_TYPE_UPLOAD_REPLY: int +BG_NOTIFY_DISABLE: int +BG_NOTIFY_JOB_ERROR: int +BG_NOTIFY_JOB_MODIFICATION: int +BG_NOTIFY_JOB_TRANSFERRED: int +CLSID_BackgroundCopyManager: _win32typing.PyIID +IID_IBackgroundCopyCallback: _win32typing.PyIID +IID_IBackgroundCopyError: _win32typing.PyIID +IID_IBackgroundCopyFile: _win32typing.PyIID +IID_IBackgroundCopyFile2: _win32typing.PyIID +IID_IBackgroundCopyJob: _win32typing.PyIID +IID_IBackgroundCopyJob2: _win32typing.PyIID +IID_IBackgroundCopyJob3: _win32typing.PyIID +IID_IBackgroundCopyManager: _win32typing.PyIID +IID_IEnumBackgroundCopyFiles: _win32typing.PyIID +IID_IEnumBackgroundCopyJobs: _win32typing.PyIID diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/directsound/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/directsound/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/directsound/directsound.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/directsound/directsound.pyi new file mode 100644 index 00000000..3fee3063 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/directsound/directsound.pyi @@ -0,0 +1,114 @@ +from _typeshed import Incomplete + +import _win32typing + +def DirectSoundCreate(guid: _win32typing.PyIID | None = ..., unk: Incomplete | None = ...) -> _win32typing.PyIUnknown: ... +def DirectSoundEnumerate(): ... +def DirectSoundCaptureCreate(guid: _win32typing.PyIID | None = ..., unk: Incomplete | None = ...) -> _win32typing.PyIUnknown: ... +def DirectSoundCaptureEnumerate(): ... +def DSCAPS() -> _win32typing.PyDSCAPS: ... +def DSBCAPS() -> _win32typing.PyDSBCAPS: ... +def DSCCAPS() -> _win32typing.PyDSCCAPS: ... +def DSCBCAPS() -> _win32typing.PyDSCBCAPS: ... +def DSBUFFERDESC() -> _win32typing.PyDSBUFFERDESC: ... +def DSCBUFFERDESC() -> _win32typing.PyDSCBUFFERDESC: ... + +DS3DMODE_DISABLE: int +DS3DMODE_HEADRELATIVE: int +DS3DMODE_NORMAL: int +DSBCAPS_CTRL3D: int +DSBCAPS_CTRLFREQUENCY: int +DSBCAPS_CTRLPAN: int +DSBCAPS_CTRLPOSITIONNOTIFY: int +DSBCAPS_CTRLVOLUME: int +DSBCAPS_GETCURRENTPOSITION2: int +DSBCAPS_GLOBALFOCUS: int +DSBCAPS_LOCHARDWARE: int +DSBCAPS_LOCSOFTWARE: int +DSBCAPS_MUTE3DATMAXDISTANCE: int +DSBCAPS_PRIMARYBUFFER: int +DSBCAPS_STATIC: int +DSBCAPS_STICKYFOCUS: int +DSBLOCK_ENTIREBUFFER: int +DSBLOCK_FROMWRITECURSOR: int +DSBPLAY_LOOPING: int +DSBSTATUS_BUFFERLOST: int +DSBSTATUS_LOOPING: int +DSBSTATUS_PLAYING: int +DSCAPS_CERTIFIED: int +DSCAPS_CONTINUOUSRATE: int +DSCAPS_EMULDRIVER: int +DSCAPS_PRIMARY16BIT: int +DSCAPS_PRIMARY8BIT: int +DSCAPS_PRIMARYMONO: int +DSCAPS_PRIMARYSTEREO: int +DSCAPS_SECONDARY16BIT: int +DSCAPS_SECONDARY8BIT: int +DSCAPS_SECONDARYMONO: int +DSCAPS_SECONDARYSTEREO: int +DSCBCAPS_WAVEMAPPED: int +DSCCAPS_EMULDRIVER: int +DSSCL_EXCLUSIVE: int +DSSCL_NORMAL: int +DSSCL_PRIORITY: int +DSSCL_WRITEPRIMARY: int +DSSPEAKER_GEOMETRY_MAX: int +DSSPEAKER_GEOMETRY_MIN: int +DSSPEAKER_GEOMETRY_NARROW: int +DSSPEAKER_GEOMETRY_WIDE: int +DSSPEAKER_HEADPHONE: int +DSSPEAKER_MONO: int +DSSPEAKER_QUAD: int +DSSPEAKER_STEREO: int +DSSPEAKER_SURROUND: int +DSBCAPSType = _win32typing.PyDSBCAPS +DSBFREQUENCY_MAX: int +DSBFREQUENCY_MIN: int +DSBFREQUENCY_ORIGINAL: int +DSBPAN_CENTER: int +DSBPAN_LEFT: int +DSBPAN_RIGHT: int +DSBPN_OFFSETSTOP: int +DSBSIZE_MAX: int +DSBSIZE_MIN: int +DSBUFFERDESCType = _win32typing.PyDSBUFFERDESC +DSBVOLUME_MAX: int +DSBVOLUME_MIN: int +DSCAPSType = _win32typing.PyDSCAPSType +DSCBCAPSType = _win32typing.PyDSCBCAPSType +DSCBLOCK_ENTIREBUFFER: int +DSCBSTART_LOOPING: int +DSCBSTATUS_CAPTURING: int +DSCBSTATUS_LOOPING: int +DSCBUFFERDESCType = _win32typing.PyDSCBUFFERDESC +DSCCAPSType = _win32typing.PyDSCCAPSType +DSERR_ACCESSDENIED: int +DSERR_ALLOCATED: int +DSERR_ALREADYINITIALIZED: int +DSERR_BADFORMAT: int +DSERR_BADSENDBUFFERGUID: int +DSERR_BUFFERLOST: int +DSERR_BUFFERTOOSMALL: int +DSERR_CONTROLUNAVAIL: int +DSERR_DS8_REQUIRED: int +DSERR_FXUNAVAILABLE: int +DSERR_GENERIC: int +DSERR_INVALIDCALL: int +DSERR_INVALIDPARAM: int +DSERR_NOAGGREGATION: int +DSERR_NODRIVER: int +DSERR_NOINTERFACE: int +DSERR_OBJECTNOTFOUND: int +DSERR_OTHERAPPHASPRIO: int +DSERR_OUTOFMEMORY: int +DSERR_PRIOLEVELNEEDED: int +DSERR_SENDLOOP: int +DSERR_UNINITIALIZED: int +DSERR_UNSUPPORTED: int +DS_NO_VIRTUALIZATION: int +DS_OK: int +IID_IDirectSound: _win32typing.PyIID +IID_IDirectSoundBuffer: _win32typing.PyIID +IID_IDirectSoundCapture: _win32typing.PyIID +IID_IDirectSoundCaptureBuffer: _win32typing.PyIID +IID_IDirectSoundNotify: _win32typing.PyIID diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/ifilter/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/ifilter/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/ifilter/ifilter.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/ifilter/ifilter.pyi new file mode 100644 index 00000000..9d7a5207 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/ifilter/ifilter.pyi @@ -0,0 +1,33 @@ +import _win32typing + +def BindIFilterFromStorage(*args, **kwargs): ... # incomplete +def BindIFilterFromStream(*args, **kwargs): ... # incomplete +def LoadIFilter(*args, **kwargs): ... # incomplete + +CHUNK_EOC: int +CHUNK_EOP: int +CHUNK_EOS: int +CHUNK_EOW: int +CHUNK_NO_BREAK: int +CHUNK_TEXT: int +CHUNK_VALUE: int +FILTER_E_ACCESS: int +FILTER_E_EMBEDDING_UNAVAILABLE: int +FILTER_E_END_OF_CHUNKS: int +FILTER_E_LINK_UNAVAILABLE: int +FILTER_E_NO_MORE_TEXT: int +FILTER_E_NO_MORE_VALUES: int +FILTER_E_NO_TEXT: int +FILTER_E_NO_VALUES: int +FILTER_E_PASSWORD: int +FILTER_S_LAST_TEXT: int +IFILTER_FLAGS_OLE_PROPERTIES: int +IFILTER_INIT_APPLY_INDEX_ATTRIBUTES: int +IFILTER_INIT_APPLY_OTHER_ATTRIBUTES: int +IFILTER_INIT_CANON_HYPHENS: int +IFILTER_INIT_CANON_PARAGRAPHS: int +IFILTER_INIT_CANON_SPACES: int +IFILTER_INIT_HARD_LINE_BREAKS: int +IFILTER_INIT_INDEXING_ONLY: int +IFILTER_INIT_SEARCH_LINKS: int +IID_IFilter: _win32typing.PyIID diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/ifilter/ifiltercon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/ifilter/ifiltercon.pyi new file mode 100644 index 00000000..2b662349 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/ifilter/ifiltercon.pyi @@ -0,0 +1,103 @@ +from _typeshed import Incomplete + +PSGUID_STORAGE: Incomplete +PSGUID_SUMMARYINFORMATION: Incomplete +PSGUID_HTMLINFORMATION: Incomplete +PSGUID_HTML2_INFORMATION: Incomplete +IFILTER_INIT_CANON_PARAGRAPHS: int +IFILTER_INIT_HARD_LINE_BREAKS: int +IFILTER_INIT_CANON_HYPHENS: int +IFILTER_INIT_CANON_SPACES: int +IFILTER_INIT_APPLY_INDEX_ATTRIBUTES: int +IFILTER_INIT_APPLY_CRAWL_ATTRIBUTES: int +IFILTER_INIT_APPLY_OTHER_ATTRIBUTES: int +IFILTER_INIT_INDEXING_ONLY: int +IFILTER_INIT_SEARCH_LINKS: int +IFILTER_INIT_FILTER_OWNED_VALUE_OK: int +IFILTER_FLAGS_OLE_PROPERTIES: int +CHUNK_TEXT: int +CHUNK_VALUE: int +CHUNK_NO_BREAK: int +CHUNK_EOW: int +CHUNK_EOS: int +CHUNK_EOP: int +CHUNK_EOC: int +NOT_AN_ERROR: int +FILTER_E_END_OF_CHUNKS: int +FILTER_E_NO_MORE_TEXT: int +FILTER_E_NO_MORE_VALUES: int +FILTER_E_ACCESS: int +FILTER_W_MONIKER_CLIPPED: int +FILTER_E_NO_TEXT: int +FILTER_E_NO_VALUES: int +FILTER_E_EMBEDDING_UNAVAILABLE: int +FILTER_E_LINK_UNAVAILABLE: int +FILTER_S_LAST_TEXT: int +FILTER_S_LAST_VALUES: int +FILTER_E_PASSWORD: int +FILTER_E_UNKNOWNFORMAT: int +PROPSETFLAG_DEFAULT: int +PROPSETFLAG_NONSIMPLE: int +PROPSETFLAG_ANSI: int +PROPSETFLAG_UNBUFFERED: int +PROPSETFLAG_CASE_SENSITIVE: int +PROPSET_BEHAVIOR_CASE_SENSITIVE: int +PID_DICTIONARY: int +PID_CODEPAGE: int +PID_FIRST_USABLE: int +PID_FIRST_NAME_DEFAULT: int +PID_LOCALE: int +PID_MODIFY_TIME: int +PID_SECURITY: int +PID_BEHAVIOR: int +PID_ILLEGAL: int +PID_MIN_READONLY: int +PID_MAX_READONLY: int +PIDDI_THUMBNAIL: int +PIDSI_TITLE: int +PIDSI_SUBJECT: int +PIDSI_AUTHOR: int +PIDSI_KEYWORDS: int +PIDSI_COMMENTS: int +PIDSI_TEMPLATE: int +PIDSI_LASTAUTHOR: int +PIDSI_REVNUMBER: int +PIDSI_EDITTIME: int +PIDSI_LASTPRINTED: int +PIDSI_CREATE_DTM: int +PIDSI_LASTSAVE_DTM: int +PIDSI_PAGECOUNT: int +PIDSI_WORDCOUNT: int +PIDSI_CHARCOUNT: int +PIDSI_THUMBNAIL: int +PIDSI_APPNAME: int +PIDSI_DOC_SECURITY: int +PIDDSI_CATEGORY: int +PIDDSI_PRESFORMAT: int +PIDDSI_BYTECOUNT: int +PIDDSI_LINECOUNT: int +PIDDSI_PARCOUNT: int +PIDDSI_SLIDECOUNT: int +PIDDSI_NOTECOUNT: int +PIDDSI_HIDDENCOUNT: int +PIDDSI_MMCLIPCOUNT: int +PIDDSI_SCALE: int +PIDDSI_HEADINGPAIR: int +PIDDSI_DOCPARTS: int +PIDDSI_MANAGER: int +PIDDSI_COMPANY: int +PIDDSI_LINKSDIRTY: int +PIDMSI_EDITOR: int +PIDMSI_SUPPLIER: int +PIDMSI_SOURCE: int +PIDMSI_SEQUENCE_NO: int +PIDMSI_PROJECT: int +PIDMSI_STATUS: int +PIDMSI_OWNER: int +PIDMSI_RATING: int +PIDMSI_PRODUCTION: int +PIDMSI_COPYRIGHT: int +PRSPEC_INVALID: int +PRSPEC_LPWSTR: int +PRSPEC_PROPID: int +CCH_MAX_PROPSTG_NAME: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/internet/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/internet/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/internet/inetcon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/internet/inetcon.pyi new file mode 100644 index 00000000..7a90c635 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/internet/inetcon.pyi @@ -0,0 +1,254 @@ +from _typeshed import Incomplete + +INET_E_USE_DEFAULT_PROTOCOLHANDLER: int +INET_E_USE_DEFAULT_SETTING: int +INET_E_DEFAULT_ACTION: int +INET_E_QUERYOPTION_UNKNOWN: int +INET_E_REDIRECTING: int +INET_E_INVALID_URL: int +INET_E_NO_SESSION: int +INET_E_CANNOT_CONNECT: int +INET_E_RESOURCE_NOT_FOUND: int +INET_E_OBJECT_NOT_FOUND: int +INET_E_DATA_NOT_AVAILABLE: int +INET_E_DOWNLOAD_FAILURE: int +INET_E_AUTHENTICATION_REQUIRED: int +INET_E_NO_VALID_MEDIA: int +INET_E_CONNECTION_TIMEOUT: int +INET_E_INVALID_REQUEST: int +INET_E_UNKNOWN_PROTOCOL: int +INET_E_SECURITY_PROBLEM: int +INET_E_CANNOT_LOAD_DATA: int +INET_E_CANNOT_INSTANTIATE_OBJECT: int +INET_E_INVALID_CERTIFICATE: int +INET_E_REDIRECT_FAILED: int +INET_E_REDIRECT_TO_DIR: int +INET_E_CANNOT_LOCK_REQUEST: int +INET_E_USE_EXTEND_BINDING: int +INET_E_TERMINATED_BIND: int +INET_E_CODE_DOWNLOAD_DECLINED: int +INET_E_RESULT_DISPATCHED: int +INET_E_CANNOT_REPLACE_SFP_FILE: int +INET_E_CODE_INSTALL_SUPPRESSED: int +INET_E_CODE_INSTALL_BLOCKED_BY_HASH_POLICY: int +MKSYS_URLMONIKER: int +URL_MK_LEGACY: int +URL_MK_UNIFORM: int +URL_MK_NO_CANONICALIZE: int +FIEF_FLAG_FORCE_JITUI: int +FIEF_FLAG_PEEK: int +FIEF_FLAG_SKIP_INSTALLED_VERSION_CHECK: int +FMFD_DEFAULT: int +FMFD_URLASFILENAME: int +FMFD_ENABLEMIMESNIFFING: int +FMFD_IGNOREMIMETEXTPLAIN: int +URLMON_OPTION_USERAGENT: int +URLMON_OPTION_USERAGENT_REFRESH: int +URLMON_OPTION_URL_ENCODING: int +URLMON_OPTION_USE_BINDSTRINGCREDS: int +URLMON_OPTION_USE_BROWSERAPPSDOCUMENTS: int +CF_NULL: int +Uri_CREATE_ALLOW_RELATIVE: int +Uri_CREATE_ALLOW_IMPLICIT_WILDCARD_SCHEME: int +Uri_CREATE_ALLOW_IMPLICIT_FILE_SCHEME: int +Uri_CREATE_NOFRAG: int +Uri_CREATE_NO_CANONICALIZE: int +Uri_CREATE_CANONICALIZE: int +Uri_CREATE_FILE_USE_DOS_PATH: int +Uri_CREATE_DECODE_EXTRA_INFO: int +Uri_CREATE_NO_DECODE_EXTRA_INFO: int +Uri_CREATE_CRACK_UNKNOWN_SCHEMES: int +Uri_CREATE_NO_CRACK_UNKNOWN_SCHEMES: int +Uri_CREATE_PRE_PROCESS_HTML_URI: int +Uri_CREATE_NO_PRE_PROCESS_HTML_URI: int +Uri_CREATE_IE_SETTINGS: int +Uri_CREATE_NO_IE_SETTINGS: int +Uri_CREATE_NO_ENCODE_FORBIDDEN_CHARACTERS: int +Uri_DISPLAY_NO_FRAGMENT: int +Uri_PUNYCODE_IDN_HOST: int +Uri_DISPLAY_IDN_HOST: int +Uri_ENCODING_USER_INFO_AND_PATH_IS_PERCENT_ENCODED_UTF8: int +Uri_ENCODING_USER_INFO_AND_PATH_IS_CP: int +Uri_ENCODING_HOST_IS_IDN: int +Uri_ENCODING_HOST_IS_PERCENT_ENCODED_UTF8: int +Uri_ENCODING_HOST_IS_PERCENT_ENCODED_CP: int +Uri_ENCODING_QUERY_AND_FRAGMENT_IS_PERCENT_ENCODED_UTF8: int +Uri_ENCODING_QUERY_AND_FRAGMENT_IS_CP: int +Uri_ENCODING_RFC: Incomplete +UriBuilder_USE_ORIGINAL_FLAGS: int +WININETINFO_OPTION_LOCK_HANDLE: int +URLOSTRM_USECACHEDCOPY_ONLY: int +URLOSTRM_USECACHEDCOPY: int +URLOSTRM_GETNEWESTVERSION: int +SET_FEATURE_ON_THREAD: int +SET_FEATURE_ON_PROCESS: int +SET_FEATURE_IN_REGISTRY: int +SET_FEATURE_ON_THREAD_LOCALMACHINE: int +SET_FEATURE_ON_THREAD_INTRANET: int +SET_FEATURE_ON_THREAD_TRUSTED: int +SET_FEATURE_ON_THREAD_INTERNET: int +SET_FEATURE_ON_THREAD_RESTRICTED: int +GET_FEATURE_FROM_THREAD: int +GET_FEATURE_FROM_PROCESS: int +GET_FEATURE_FROM_REGISTRY: int +GET_FEATURE_FROM_THREAD_LOCALMACHINE: int +GET_FEATURE_FROM_THREAD_INTRANET: int +GET_FEATURE_FROM_THREAD_TRUSTED: int +GET_FEATURE_FROM_THREAD_INTERNET: int +GET_FEATURE_FROM_THREAD_RESTRICTED: int +PROTOCOLFLAG_NO_PICS_CHECK: int +MUTZ_NOSAVEDFILECHECK: int +MUTZ_ISFILE: int +MUTZ_ACCEPT_WILDCARD_SCHEME: int +MUTZ_ENFORCERESTRICTED: int +MUTZ_RESERVED: int +MUTZ_REQUIRESAVEDFILECHECK: int +MUTZ_DONT_UNESCAPE: int +MUTZ_DONT_USE_CACHE: int +MUTZ_FORCE_INTRANET_FLAGS: int +MUTZ_IGNORE_ZONE_MAPPINGS: int +MAX_SIZE_SECURITY_ID: int +URLACTION_MIN: int +URLACTION_DOWNLOAD_MIN: int +URLACTION_DOWNLOAD_SIGNED_ACTIVEX: int +URLACTION_DOWNLOAD_UNSIGNED_ACTIVEX: int +URLACTION_DOWNLOAD_CURR_MAX: int +URLACTION_DOWNLOAD_MAX: int +URLACTION_ACTIVEX_MIN: int +URLACTION_ACTIVEX_RUN: int +URLPOLICY_ACTIVEX_CHECK_LIST: int +URLACTION_ACTIVEX_OVERRIDE_OBJECT_SAFETY: int +URLACTION_ACTIVEX_OVERRIDE_DATA_SAFETY: int +URLACTION_ACTIVEX_OVERRIDE_SCRIPT_SAFETY: int +URLACTION_SCRIPT_OVERRIDE_SAFETY: int +URLACTION_ACTIVEX_CONFIRM_NOOBJECTSAFETY: int +URLACTION_ACTIVEX_TREATASUNTRUSTED: int +URLACTION_ACTIVEX_NO_WEBOC_SCRIPT: int +URLACTION_ACTIVEX_OVERRIDE_REPURPOSEDETECTION: int +URLACTION_ACTIVEX_OVERRIDE_OPTIN: int +URLACTION_ACTIVEX_SCRIPTLET_RUN: int +URLACTION_ACTIVEX_DYNSRC_VIDEO_AND_ANIMATION: int +URLACTION_ACTIVEX_CURR_MAX: int +URLACTION_ACTIVEX_MAX: int +URLACTION_SCRIPT_MIN: int +URLACTION_SCRIPT_RUN: int +URLACTION_SCRIPT_JAVA_USE: int +URLACTION_SCRIPT_SAFE_ACTIVEX: int +URLACTION_CROSS_DOMAIN_DATA: int +URLACTION_SCRIPT_PASTE: int +URLACTION_ALLOW_XDOMAIN_SUBFRAME_RESIZE: int +URLACTION_SCRIPT_CURR_MAX: int +URLACTION_SCRIPT_MAX: int +URLACTION_HTML_MIN: int +URLACTION_HTML_SUBMIT_FORMS: int +URLACTION_HTML_SUBMIT_FORMS_FROM: int +URLACTION_HTML_SUBMIT_FORMS_TO: int +URLACTION_HTML_FONT_DOWNLOAD: int +URLACTION_HTML_JAVA_RUN: int +URLACTION_HTML_USERDATA_SAVE: int +URLACTION_HTML_SUBFRAME_NAVIGATE: int +URLACTION_HTML_META_REFRESH: int +URLACTION_HTML_MIXED_CONTENT: int +URLACTION_HTML_INCLUDE_FILE_PATH: int +URLACTION_HTML_MAX: int +URLACTION_SHELL_MIN: int +URLACTION_SHELL_INSTALL_DTITEMS: int +URLACTION_SHELL_MOVE_OR_COPY: int +URLACTION_SHELL_FILE_DOWNLOAD: int +URLACTION_SHELL_VERB: int +URLACTION_SHELL_WEBVIEW_VERB: int +URLACTION_SHELL_SHELLEXECUTE: int +URLACTION_SHELL_EXECUTE_HIGHRISK: int +URLACTION_SHELL_EXECUTE_MODRISK: int +URLACTION_SHELL_EXECUTE_LOWRISK: int +URLACTION_SHELL_POPUPMGR: int +URLACTION_SHELL_RTF_OBJECTS_LOAD: int +URLACTION_SHELL_ENHANCED_DRAGDROP_SECURITY: int +URLACTION_SHELL_EXTENSIONSECURITY: int +URLACTION_SHELL_SECURE_DRAGSOURCE: int +URLACTION_SHELL_CURR_MAX: int +URLACTION_SHELL_MAX: int +URLACTION_NETWORK_MIN: int +URLACTION_CREDENTIALS_USE: int +URLPOLICY_CREDENTIALS_SILENT_LOGON_OK: int +URLPOLICY_CREDENTIALS_MUST_PROMPT_USER: int +URLPOLICY_CREDENTIALS_CONDITIONAL_PROMPT: int +URLPOLICY_CREDENTIALS_ANONYMOUS_ONLY: int +URLACTION_AUTHENTICATE_CLIENT: int +URLPOLICY_AUTHENTICATE_CLEARTEXT_OK: int +URLPOLICY_AUTHENTICATE_CHALLENGE_RESPONSE: int +URLPOLICY_AUTHENTICATE_MUTUAL_ONLY: int +URLACTION_COOKIES: int +URLACTION_COOKIES_SESSION: int +URLACTION_CLIENT_CERT_PROMPT: int +URLACTION_COOKIES_THIRD_PARTY: int +URLACTION_COOKIES_SESSION_THIRD_PARTY: int +URLACTION_COOKIES_ENABLED: int +URLACTION_NETWORK_CURR_MAX: int +URLACTION_NETWORK_MAX: int +URLACTION_JAVA_MIN: int +URLACTION_JAVA_PERMISSIONS: int +URLPOLICY_JAVA_PROHIBIT: int +URLPOLICY_JAVA_HIGH: int +URLPOLICY_JAVA_MEDIUM: int +URLPOLICY_JAVA_LOW: int +URLPOLICY_JAVA_CUSTOM: int +URLACTION_JAVA_CURR_MAX: int +URLACTION_JAVA_MAX: int +URLACTION_INFODELIVERY_MIN: int +URLACTION_INFODELIVERY_NO_ADDING_CHANNELS: int +URLACTION_INFODELIVERY_NO_EDITING_CHANNELS: int +URLACTION_INFODELIVERY_NO_REMOVING_CHANNELS: int +URLACTION_INFODELIVERY_NO_ADDING_SUBSCRIPTIONS: int +URLACTION_INFODELIVERY_NO_EDITING_SUBSCRIPTIONS: int +URLACTION_INFODELIVERY_NO_REMOVING_SUBSCRIPTIONS: int +URLACTION_INFODELIVERY_NO_CHANNEL_LOGGING: int +URLACTION_INFODELIVERY_CURR_MAX: int +URLACTION_INFODELIVERY_MAX: int +URLACTION_CHANNEL_SOFTDIST_MIN: int +URLACTION_CHANNEL_SOFTDIST_PERMISSIONS: int +URLPOLICY_CHANNEL_SOFTDIST_PROHIBIT: int +URLPOLICY_CHANNEL_SOFTDIST_PRECACHE: int +URLPOLICY_CHANNEL_SOFTDIST_AUTOINSTALL: int +URLACTION_CHANNEL_SOFTDIST_MAX: int +URLACTION_BEHAVIOR_MIN: int +URLACTION_BEHAVIOR_RUN: int +URLPOLICY_BEHAVIOR_CHECK_LIST: int +URLACTION_FEATURE_MIN: int +URLACTION_FEATURE_MIME_SNIFFING: int +URLACTION_FEATURE_ZONE_ELEVATION: int +URLACTION_FEATURE_WINDOW_RESTRICTIONS: int +URLACTION_FEATURE_SCRIPT_STATUS_BAR: int +URLACTION_FEATURE_FORCE_ADDR_AND_STATUS: int +URLACTION_FEATURE_BLOCK_INPUT_PROMPTS: int +URLACTION_AUTOMATIC_DOWNLOAD_UI_MIN: int +URLACTION_AUTOMATIC_DOWNLOAD_UI: int +URLACTION_AUTOMATIC_ACTIVEX_UI: int +URLACTION_ALLOW_RESTRICTEDPROTOCOLS: int +URLACTION_ALLOW_APEVALUATION: int +URLACTION_WINDOWS_BROWSER_APPLICATIONS: int +URLACTION_XPS_DOCUMENTS: int +URLACTION_LOOSE_XAML: int +URLACTION_LOWRIGHTS: int +URLACTION_WINFX_SETUP: int +URLPOLICY_ALLOW: int +URLPOLICY_QUERY: int +URLPOLICY_DISALLOW: int +URLPOLICY_NOTIFY_ON_ALLOW: int +URLPOLICY_NOTIFY_ON_DISALLOW: int +URLPOLICY_LOG_ON_ALLOW: int +URLPOLICY_LOG_ON_DISALLOW: int +URLPOLICY_MASK_PERMISSIONS: int +URLPOLICY_DONTCHECKDLGBOX: int +URLZONE_ESC_FLAG: int +SECURITY_IE_STATE_GREEN: int +SECURITY_IE_STATE_RED: int +SOFTDIST_FLAG_USAGE_EMAIL: int +SOFTDIST_FLAG_USAGE_PRECACHE: int +SOFTDIST_FLAG_USAGE_AUTOINSTALL: int +SOFTDIST_FLAG_DELETE_SUBSCRIPTION: int +SOFTDIST_ADSTATE_NONE: int +SOFTDIST_ADSTATE_AVAILABLE: int +SOFTDIST_ADSTATE_DOWNLOADED: int +SOFTDIST_ADSTATE_INSTALLED: int +CONFIRMSAFETYACTION_LOADOBJECT: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/internet/internet.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/internet/internet.pyi new file mode 100644 index 00000000..8fd6b380 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/internet/internet.pyi @@ -0,0 +1,51 @@ +import _win32typing + +def CoInternetCreateSecurityManager(reserved) -> _win32typing.PyIInternetSecurityManager: ... +def CoInternetIsFeatureEnabled(flags): ... +def CoInternetSetFeatureEnabled(flags, enable): ... + +FEATURE_ADDON_MANAGEMENT: int +FEATURE_BEHAVIORS: int +FEATURE_DISABLE_MK_PROTOCOL: int +FEATURE_ENTRY_COUNT: int +FEATURE_GET_URL_DOM_FILEPATH_UNENCODED: int +FEATURE_HTTP_USERNAME_PASSWORD_DISABLE: int +FEATURE_LOCALMACHINE_LOCKDOWN: int +FEATURE_MIME_HANDLING: int +FEATURE_MIME_SNIFFING: int +FEATURE_OBJECT_CACHING: int +FEATURE_PROTOCOL_LOCKDOWN: int +FEATURE_RESTRICT_ACTIVEXINSTALL: int +FEATURE_RESTRICT_FILEDOWNLOAD: int +FEATURE_SAFE_BINDTOOBJECT: int +FEATURE_SECURITYBAND: int +FEATURE_UNC_SAVEDFILECHECK: int +FEATURE_VALIDATE_NAVIGATE_URL: int +FEATURE_WEBOC_POPUPMANAGEMENT: int +FEATURE_WINDOW_RESTRICTIONS: int +FEATURE_ZONE_ELEVATION: int +GET_FEATURE_FROM_PROCESS: int +GET_FEATURE_FROM_REGISTRY: int +GET_FEATURE_FROM_THREAD: int +GET_FEATURE_FROM_THREAD_INTERNET: int +GET_FEATURE_FROM_THREAD_INTRANET: int +GET_FEATURE_FROM_THREAD_LOCALMACHINE: int +GET_FEATURE_FROM_THREAD_RESTRICTED: int +GET_FEATURE_FROM_THREAD_TRUSTED: int +IID_IDocHostUIHandler: _win32typing.PyIID +IID_IHTMLOMWindowServices: _win32typing.PyIID +IID_IInternetBindInfo: _win32typing.PyIID +IID_IInternetPriority: _win32typing.PyIID +IID_IInternetProtocol: _win32typing.PyIID +IID_IInternetProtocolInfo: _win32typing.PyIID +IID_IInternetProtocolRoot: _win32typing.PyIID +IID_IInternetProtocolSink: _win32typing.PyIID +IID_IInternetSecurityManager: _win32typing.PyIID +SET_FEATURE_IN_REGISTRY: int +SET_FEATURE_ON_PROCESS: int +SET_FEATURE_ON_THREAD: int +SET_FEATURE_ON_THREAD_INTERNET: int +SET_FEATURE_ON_THREAD_INTRANET: int +SET_FEATURE_ON_THREAD_LOCALMACHINE: int +SET_FEATURE_ON_THREAD_RESTRICTED: int +SET_FEATURE_ON_THREAD_TRUSTED: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/mapi/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/mapi/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/mapi/_exchdapi.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/mapi/_exchdapi.pyi new file mode 100644 index 00000000..2d86d5bf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/mapi/_exchdapi.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete + +import _win32typing + +def HrInstallService() -> None: ... +def HrInstallMailboxAgent() -> None: ... +def HrCreateMailboxAgentProfile(serviceName: str, profile: str) -> None: ... +def HrCreateGatewayProfile(serviceName: str, profile: str) -> None: ... +def HrMailboxAgentExists(server: str, siteDN: str, rdn: str) -> None: ... +def HrAdminProgramExists() -> None: ... +def HrRemoveMailboxAgent(server: str, siteDN: str, rdn: str) -> None: ... +def HrRemoveProfile(profile: str) -> None: ... +def HrEnumOrganizations(rootDN: str, server: str) -> list[str]: ... +def HrEnumSites(server: str, organizationDN: str) -> list[str]: ... +def HrEnumContainers(server: str, siteDN: str, fSubtree) -> list[str]: ... +def HrEnumSiteAdmins(server: str, siteDN: str) -> list[str]: ... +def HrGetServiceAccountName(serviceName: str, serviceName1: str) -> str: ... +def HrCreateDirEntryIdEx(addrBook: _win32typing.PyIAddrBook, distinguishedName: str) -> str: ... +def HrCreateProfileName(profPrefix: str) -> str: ... +def HrFindExchangeGlobalAddresslist(addrBook: _win32typing.PyIAddrBook) -> str: ... +def HrGetExchangeStatus(server: str) -> tuple[Incomplete, Incomplete]: ... +def HrGetMailboxDN(session) -> str: ... +def HrGetServerDN(session) -> str: ... +def HrMAPIFindDefaultMsgStore(session: _win32typing.PyIMAPISession) -> str: ... +def HrMAPIFindFolder(folder: _win32typing.PyIMAPIFolder, name: str) -> str: ... +def HrMAPIFindFolderEx(msgStore: _win32typing.PyIMsgStore, sepString: str, path: str) -> str: ... +def HrMAPIFindIPMSubtree(msgStore: _win32typing.PyIMsgStore) -> str: ... +def HrMAPIFindInbox(msgStore: _win32typing.PyIMsgStore) -> str: ... +def HrMAPIFindStore(session: _win32typing.PyIMAPISession, name: str) -> _win32typing.PyIMsgStore: ... +def HrMAPIFindSubfolderEx(rootFolder: _win32typing.PyIMAPIFolder, sep: str, name: str) -> _win32typing.PyIMsgStore: ... +def HrMAPIOpenFolderEx(msgStore: _win32typing.PyIMsgStore, sep: str, name: str) -> _win32typing.PyIMAPIFolder: ... +def HrMAPISetPropBoolean(obj: _win32typing.PyIMAPIProp, tag) -> None: ... +def HrMAPISetPropLong(obj: _win32typing.PyIMAPIProp, tag) -> None: ... +def HrMailboxLogoff(inbox: _win32typing.PyIMsgStore) -> None: ... +def HrMailboxLogon( + session: _win32typing.PyIMAPISession, msgStore: _win32typing.PyIMsgStore, msgStoreDN: str, mailboxDN: str +) -> _win32typing.PyIMsgStore: ... +def HrOpenExchangePrivateStore(session: _win32typing.PyIMAPISession) -> _win32typing.PyIMsgStore: ... +def HrOpenExchangePublicFolders(store: _win32typing.PyIMsgStore) -> _win32typing.PyIMAPIFolder: ... +def HrOpenExchangePublicStore(session: _win32typing.PyIMAPISession) -> _win32typing.PyIMsgStore: ... +def HrOpenSessionObject(session: _win32typing.PyIMAPISession) -> _win32typing.PyIMAPIProp: ... +def HrOpenSiteContainer(session: _win32typing.PyIMAPISession) -> _win32typing.PyIMAPIProp: ... +def HrOpenSiteContainerAddressing(session: _win32typing.PyIMAPISession) -> _win32typing.PyIMAPIProp: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/mapi/emsabtags.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/mapi/emsabtags.pyi new file mode 100644 index 00000000..59cfd215 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/mapi/emsabtags.pyi @@ -0,0 +1,865 @@ +from _typeshed import Incomplete + +from win32comext.mapi.mapitags import ( + PROP_TAG as PROP_TAG, + PT_APPTIME as PT_APPTIME, + PT_BINARY as PT_BINARY, + PT_BOOLEAN as PT_BOOLEAN, + PT_CLSID as PT_CLSID, + PT_CURRENCY as PT_CURRENCY, + PT_DOUBLE as PT_DOUBLE, + PT_ERROR as PT_ERROR, + PT_FLOAT as PT_FLOAT, + PT_I2 as PT_I2, + PT_I4 as PT_I4, + PT_I8 as PT_I8, + PT_LONG as PT_LONG, + PT_LONGLONG as PT_LONGLONG, + PT_MV_APPTIME as PT_MV_APPTIME, + PT_MV_BINARY as PT_MV_BINARY, + PT_MV_CLSID as PT_MV_CLSID, + PT_MV_CURRENCY as PT_MV_CURRENCY, + PT_MV_DOUBLE as PT_MV_DOUBLE, + PT_MV_FLOAT as PT_MV_FLOAT, + PT_MV_I2 as PT_MV_I2, + PT_MV_I4 as PT_MV_I4, + PT_MV_I8 as PT_MV_I8, + PT_MV_LONG as PT_MV_LONG, + PT_MV_LONGLONG as PT_MV_LONGLONG, + PT_MV_R4 as PT_MV_R4, + PT_MV_R8 as PT_MV_R8, + PT_MV_SHORT as PT_MV_SHORT, + PT_MV_STRING8 as PT_MV_STRING8, + PT_MV_SYSTIME as PT_MV_SYSTIME, + PT_MV_TSTRING as PT_MV_TSTRING, + PT_MV_UNICODE as PT_MV_UNICODE, + PT_NULL as PT_NULL, + PT_OBJECT as PT_OBJECT, + PT_R4 as PT_R4, + PT_SHORT as PT_SHORT, + PT_STRING8 as PT_STRING8, + PT_SYSTIME as PT_SYSTIME, + PT_TSTRING as PT_TSTRING, + PT_UNICODE as PT_UNICODE, + PT_UNSPECIFIED as PT_UNSPECIFIED, +) + +AB_SHOW_PHANTOMS: int +AB_SHOW_OTHERS: int +EMS_AB_ADDRESS_LOOKUP: int +PR_EMS_AB_SERVER: Incomplete +PR_EMS_AB_SERVER_A: Incomplete +PR_EMS_AB_SERVER_W: Incomplete +PR_EMS_AB_CONTAINERID: Incomplete +PR_EMS_AB_DOS_ENTRYID: Incomplete +PR_EMS_AB_PARENT_ENTRYID: Incomplete +PR_EMS_AB_IS_MASTER: Incomplete +PR_EMS_AB_OBJECT_OID: Incomplete +PR_EMS_AB_HIERARCHY_PATH: Incomplete +PR_EMS_AB_HIERARCHY_PATH_A: Incomplete +PR_EMS_AB_HIERARCHY_PATH_W: Incomplete +PR_EMS_AB_CHILD_RDNS: Incomplete +MIN_EMS_AB_CONSTRUCTED_PROP_ID: int +PR_EMS_AB_OTHER_RECIPS: Incomplete +PR_EMS_AB_DISPLAY_NAME_PRINTABLE: Incomplete +PR_EMS_AB_DISPLAY_NAME_PRINTABLE_A: Incomplete +PR_EMS_AB_DISPLAY_NAME_PRINTABLE_W: Incomplete +PR_EMS_AB_ACCESS_CATEGORY: Incomplete +PR_EMS_AB_ACTIVATION_SCHEDULE: Incomplete +PR_EMS_AB_ACTIVATION_STYLE: Incomplete +PR_EMS_AB_ADDRESS_ENTRY_DISPLAY_TABLE: Incomplete +PR_EMS_AB_ADDRESS_ENTRY_DISPLAY_TABLE_MSDOS: Incomplete +PR_EMS_AB_ADDRESS_SYNTAX: Incomplete +PR_EMS_AB_ADDRESS_TYPE: Incomplete +PR_EMS_AB_ADDRESS_TYPE_A: Incomplete +PR_EMS_AB_ADDRESS_TYPE_W: Incomplete +PR_EMS_AB_ADMD: Incomplete +PR_EMS_AB_ADMD_A: Incomplete +PR_EMS_AB_ADMD_W: Incomplete +PR_EMS_AB_ADMIN_DESCRIPTION: Incomplete +PR_EMS_AB_ADMIN_DESCRIPTION_A: Incomplete +PR_EMS_AB_ADMIN_DESCRIPTION_W: Incomplete +PR_EMS_AB_ADMIN_DISPLAY_NAME: Incomplete +PR_EMS_AB_ADMIN_DISPLAY_NAME_A: Incomplete +PR_EMS_AB_ADMIN_DISPLAY_NAME_W: Incomplete +PR_EMS_AB_ADMIN_EXTENSION_DLL: Incomplete +PR_EMS_AB_ADMIN_EXTENSION_DLL_A: Incomplete +PR_EMS_AB_ADMIN_EXTENSION_DLL_W: Incomplete +PR_EMS_AB_ALIASED_OBJECT_NAME: Incomplete +PR_EMS_AB_ALIASED_OBJECT_NAME_A: Incomplete +PR_EMS_AB_ALIASED_OBJECT_NAME_W: Incomplete +PR_EMS_AB_ALIASED_OBJECT_NAME_O: Incomplete +PR_EMS_AB_ALIASED_OBJECT_NAME_T: Incomplete +PR_EMS_AB_ALT_RECIPIENT: Incomplete +PR_EMS_AB_ALT_RECIPIENT_A: Incomplete +PR_EMS_AB_ALT_RECIPIENT_W: Incomplete +PR_EMS_AB_ALT_RECIPIENT_O: Incomplete +PR_EMS_AB_ALT_RECIPIENT_T: Incomplete +PR_EMS_AB_ALT_RECIPIENT_BL: Incomplete +PR_EMS_AB_ALT_RECIPIENT_BL_A: Incomplete +PR_EMS_AB_ALT_RECIPIENT_BL_W: Incomplete +PR_EMS_AB_ALT_RECIPIENT_BL_O: Incomplete +PR_EMS_AB_ALT_RECIPIENT_BL_T: Incomplete +PR_EMS_AB_ANCESTOR_ID: Incomplete +PR_EMS_AB_ASSOC_NT_ACCOUNT: Incomplete +PR_EMS_AB_ASSOC_REMOTE_DXA: Incomplete +PR_EMS_AB_ASSOC_REMOTE_DXA_A: Incomplete +PR_EMS_AB_ASSOC_REMOTE_DXA_W: Incomplete +PR_EMS_AB_ASSOC_REMOTE_DXA_O: Incomplete +PR_EMS_AB_ASSOC_REMOTE_DXA_T: Incomplete +PR_EMS_AB_ASSOCIATION_LIFETIME: Incomplete +PR_EMS_AB_AUTH_ORIG_BL: Incomplete +PR_EMS_AB_AUTH_ORIG_BL_A: Incomplete +PR_EMS_AB_AUTH_ORIG_BL_W: Incomplete +PR_EMS_AB_AUTH_ORIG_BL_O: Incomplete +PR_EMS_AB_AUTH_ORIG_BL_T: Incomplete +PR_EMS_AB_AUTHORITY_REVOCATION_LIST: Incomplete +PR_EMS_AB_AUTHORIZED_DOMAIN: Incomplete +PR_EMS_AB_AUTHORIZED_DOMAIN_A: Incomplete +PR_EMS_AB_AUTHORIZED_DOMAIN_W: Incomplete +PR_EMS_AB_AUTHORIZED_PASSWORD: Incomplete +PR_EMS_AB_AUTHORIZED_USER: Incomplete +PR_EMS_AB_AUTHORIZED_USER_A: Incomplete +PR_EMS_AB_AUTHORIZED_USER_W: Incomplete +PR_EMS_AB_AUTOREPLY: Incomplete +PR_EMS_AB_AUTOREPLY_MESSAGE: Incomplete +PR_EMS_AB_AUTOREPLY_MESSAGE_A: Incomplete +PR_EMS_AB_AUTOREPLY_MESSAGE_W: Incomplete +PR_EMS_AB_AUTOREPLY_SUBJECT: Incomplete +PR_EMS_AB_AUTOREPLY_SUBJECT_A: Incomplete +PR_EMS_AB_AUTOREPLY_SUBJECT_W: Incomplete +PR_EMS_AB_BRIDGEHEAD_SERVERS: Incomplete +PR_EMS_AB_BRIDGEHEAD_SERVERS_A: Incomplete +PR_EMS_AB_BRIDGEHEAD_SERVERS_W: Incomplete +PR_EMS_AB_BRIDGEHEAD_SERVERS_O: Incomplete +PR_EMS_AB_BRIDGEHEAD_SERVERS_T: Incomplete +PR_EMS_AB_BUSINESS_CATEGORY: Incomplete +PR_EMS_AB_BUSINESS_CATEGORY_A: Incomplete +PR_EMS_AB_BUSINESS_CATEGORY_W: Incomplete +PR_EMS_AB_BUSINESS_ROLES: Incomplete +PR_EMS_AB_CA_CERTIFICATE: Incomplete +PR_EMS_AB_CAN_CREATE_PF: Incomplete +PR_EMS_AB_CAN_CREATE_PF_A: Incomplete +PR_EMS_AB_CAN_CREATE_PF_W: Incomplete +PR_EMS_AB_CAN_CREATE_PF_O: Incomplete +PR_EMS_AB_CAN_CREATE_PF_T: Incomplete +PR_EMS_AB_CAN_CREATE_PF_BL: Incomplete +PR_EMS_AB_CAN_CREATE_PF_BL_A: Incomplete +PR_EMS_AB_CAN_CREATE_PF_BL_W: Incomplete +PR_EMS_AB_CAN_CREATE_PF_BL_O: Incomplete +PR_EMS_AB_CAN_CREATE_PF_BL_T: Incomplete +PR_EMS_AB_CAN_CREATE_PF_DL: Incomplete +PR_EMS_AB_CAN_CREATE_PF_DL_A: Incomplete +PR_EMS_AB_CAN_CREATE_PF_DL_W: Incomplete +PR_EMS_AB_CAN_CREATE_PF_DL_O: Incomplete +PR_EMS_AB_CAN_CREATE_PF_DL_T: Incomplete +PR_EMS_AB_CAN_CREATE_PF_DL_BL: Incomplete +PR_EMS_AB_CAN_CREATE_PF_DL_BL_A: Incomplete +PR_EMS_AB_CAN_CREATE_PF_DL_BL_W: Incomplete +PR_EMS_AB_CAN_CREATE_PF_DL_BL_O: Incomplete +PR_EMS_AB_CAN_CREATE_PF_DL_BL_T: Incomplete +PR_EMS_AB_CAN_NOT_CREATE_PF: Incomplete +PR_EMS_AB_CAN_NOT_CREATE_PF_A: Incomplete +PR_EMS_AB_CAN_NOT_CREATE_PF_W: Incomplete +PR_EMS_AB_CAN_NOT_CREATE_PF_O: Incomplete +PR_EMS_AB_CAN_NOT_CREATE_PF_T: Incomplete +PR_EMS_AB_CAN_NOT_CREATE_PF_BL: Incomplete +PR_EMS_AB_CAN_NOT_CREATE_PF_BL_A: Incomplete +PR_EMS_AB_CAN_NOT_CREATE_PF_BL_W: Incomplete +PR_EMS_AB_CAN_NOT_CREATE_PF_BL_O: Incomplete +PR_EMS_AB_CAN_NOT_CREATE_PF_BL_T: Incomplete +PR_EMS_AB_CAN_NOT_CREATE_PF_DL: Incomplete +PR_EMS_AB_CAN_NOT_CREATE_PF_DL_A: Incomplete +PR_EMS_AB_CAN_NOT_CREATE_PF_DL_W: Incomplete +PR_EMS_AB_CAN_NOT_CREATE_PF_DL_O: Incomplete +PR_EMS_AB_CAN_NOT_CREATE_PF_DL_T: Incomplete +PR_EMS_AB_CAN_NOT_CREATE_PF_DL_BL: Incomplete +PR_EMS_AB_CAN_NOT_CREATE_PF_DL_BL_A: Incomplete +PR_EMS_AB_CAN_NOT_CREATE_PF_DL_BL_W: Incomplete +PR_EMS_AB_CAN_NOT_CREATE_PF_DL_BL_O: Incomplete +PR_EMS_AB_CAN_NOT_CREATE_PF_DL_BL_T: Incomplete +PR_EMS_AB_CAN_PRESERVE_DNS: Incomplete +PR_EMS_AB_CERTIFICATE_REVOCATION_LIST: Incomplete +PR_EMS_AB_CLOCK_ALERT_OFFSET: Incomplete +PR_EMS_AB_CLOCK_ALERT_REPAIR: Incomplete +PR_EMS_AB_CLOCK_WARNING_OFFSET: Incomplete +PR_EMS_AB_CLOCK_WARNING_REPAIR: Incomplete +PR_EMS_AB_COMPUTER_NAME: Incomplete +PR_EMS_AB_COMPUTER_NAME_A: Incomplete +PR_EMS_AB_COMPUTER_NAME_W: Incomplete +PR_EMS_AB_CONNECTED_DOMAINS: Incomplete +PR_EMS_AB_CONNECTED_DOMAINS_A: Incomplete +PR_EMS_AB_CONNECTED_DOMAINS_W: Incomplete +PR_EMS_AB_CONTAINER_INFO: Incomplete +PR_EMS_AB_COST: Incomplete +PR_EMS_AB_COUNTRY_NAME: Incomplete +PR_EMS_AB_COUNTRY_NAME_A: Incomplete +PR_EMS_AB_COUNTRY_NAME_W: Incomplete +PR_EMS_AB_CROSS_CERTIFICATE_PAIR: Incomplete +PR_EMS_AB_DELIV_CONT_LENGTH: Incomplete +PR_EMS_AB_DELIV_EITS: Incomplete +PR_EMS_AB_DELIV_EXT_CONT_TYPES: Incomplete +PR_EMS_AB_DELIVER_AND_REDIRECT: Incomplete +PR_EMS_AB_DELIVERY_MECHANISM: Incomplete +PR_EMS_AB_DESCRIPTION: Incomplete +PR_EMS_AB_DESCRIPTION_A: Incomplete +PR_EMS_AB_DESCRIPTION_W: Incomplete +PR_EMS_AB_DESTINATION_INDICATOR: Incomplete +PR_EMS_AB_DESTINATION_INDICATOR_A: Incomplete +PR_EMS_AB_DESTINATION_INDICATOR_W: Incomplete +PR_EMS_AB_DIAGNOSTIC_REG_KEY: Incomplete +PR_EMS_AB_DIAGNOSTIC_REG_KEY_A: Incomplete +PR_EMS_AB_DIAGNOSTIC_REG_KEY_W: Incomplete +PR_EMS_AB_DISPLAY_NAME_OVERRIDE: Incomplete +PR_EMS_AB_DL_MEM_REJECT_PERMS_BL: Incomplete +PR_EMS_AB_DL_MEM_REJECT_PERMS_BL_A: Incomplete +PR_EMS_AB_DL_MEM_REJECT_PERMS_BL_W: Incomplete +PR_EMS_AB_DL_MEM_REJECT_PERMS_BL_O: Incomplete +PR_EMS_AB_DL_MEM_REJECT_PERMS_BL_T: Incomplete +PR_EMS_AB_DL_MEM_SUBMIT_PERMS_BL: Incomplete +PR_EMS_AB_DL_MEM_SUBMIT_PERMS_BL_A: Incomplete +PR_EMS_AB_DL_MEM_SUBMIT_PERMS_BL_W: Incomplete +PR_EMS_AB_DL_MEM_SUBMIT_PERMS_BL_O: Incomplete +PR_EMS_AB_DL_MEM_SUBMIT_PERMS_BL_T: Incomplete +PR_EMS_AB_DL_MEMBER_RULE: Incomplete +PR_EMS_AB_DOMAIN_DEF_ALT_RECIP: Incomplete +PR_EMS_AB_DOMAIN_DEF_ALT_RECIP_A: Incomplete +PR_EMS_AB_DOMAIN_DEF_ALT_RECIP_W: Incomplete +PR_EMS_AB_DOMAIN_DEF_ALT_RECIP_O: Incomplete +PR_EMS_AB_DOMAIN_DEF_ALT_RECIP_T: Incomplete +PR_EMS_AB_DOMAIN_NAME: Incomplete +PR_EMS_AB_DOMAIN_NAME_A: Incomplete +PR_EMS_AB_DOMAIN_NAME_W: Incomplete +PR_EMS_AB_DSA_SIGNATURE: Incomplete +PR_EMS_AB_DXA_ADMIN_COPY: Incomplete +PR_EMS_AB_DXA_ADMIN_FORWARD: Incomplete +PR_EMS_AB_DXA_ADMIN_UPDATE: Incomplete +PR_EMS_AB_DXA_APPEND_REQCN: Incomplete +PR_EMS_AB_DXA_CONF_CONTAINER_LIST: Incomplete +PR_EMS_AB_DXA_CONF_CONTAINER_LIST_A: Incomplete +PR_EMS_AB_DXA_CONF_CONTAINER_LIST_W: Incomplete +PR_EMS_AB_DXA_CONF_CONTAINER_LIST_O: Incomplete +PR_EMS_AB_DXA_CONF_CONTAINER_LIST_T: Incomplete +PR_EMS_AB_DXA_CONF_REQ_TIME: Incomplete +PR_EMS_AB_DXA_CONF_SEQ: Incomplete +PR_EMS_AB_DXA_CONF_SEQ_A: Incomplete +PR_EMS_AB_DXA_CONF_SEQ_W: Incomplete +PR_EMS_AB_DXA_CONF_SEQ_USN: Incomplete +PR_EMS_AB_DXA_EXCHANGE_OPTIONS: Incomplete +PR_EMS_AB_DXA_EXPORT_NOW: Incomplete +PR_EMS_AB_DXA_FLAGS: Incomplete +PR_EMS_AB_DXA_IMP_SEQ: Incomplete +PR_EMS_AB_DXA_IMP_SEQ_A: Incomplete +PR_EMS_AB_DXA_IMP_SEQ_W: Incomplete +PR_EMS_AB_DXA_IMP_SEQ_TIME: Incomplete +PR_EMS_AB_DXA_IMP_SEQ_USN: Incomplete +PR_EMS_AB_DXA_IMPORT_NOW: Incomplete +PR_EMS_AB_DXA_IN_TEMPLATE_MAP: Incomplete +PR_EMS_AB_DXA_IN_TEMPLATE_MAP_A: Incomplete +PR_EMS_AB_DXA_IN_TEMPLATE_MAP_W: Incomplete +PR_EMS_AB_DXA_LOCAL_ADMIN: Incomplete +PR_EMS_AB_DXA_LOCAL_ADMIN_A: Incomplete +PR_EMS_AB_DXA_LOCAL_ADMIN_W: Incomplete +PR_EMS_AB_DXA_LOCAL_ADMIN_O: Incomplete +PR_EMS_AB_DXA_LOCAL_ADMIN_T: Incomplete +PR_EMS_AB_DXA_LOGGING_LEVEL: Incomplete +PR_EMS_AB_DXA_NATIVE_ADDRESS_TYPE: Incomplete +PR_EMS_AB_DXA_NATIVE_ADDRESS_TYPE_A: Incomplete +PR_EMS_AB_DXA_NATIVE_ADDRESS_TYPE_W: Incomplete +PR_EMS_AB_DXA_OUT_TEMPLATE_MAP: Incomplete +PR_EMS_AB_DXA_OUT_TEMPLATE_MAP_A: Incomplete +PR_EMS_AB_DXA_OUT_TEMPLATE_MAP_W: Incomplete +PR_EMS_AB_DXA_PASSWORD: Incomplete +PR_EMS_AB_DXA_PASSWORD_A: Incomplete +PR_EMS_AB_DXA_PASSWORD_W: Incomplete +PR_EMS_AB_DXA_PREV_EXCHANGE_OPTIONS: Incomplete +PR_EMS_AB_DXA_PREV_EXPORT_NATIVE_ONLY: Incomplete +PR_EMS_AB_DXA_PREV_IN_EXCHANGE_SENSITIVITY: Incomplete +PR_EMS_AB_DXA_PREV_REMOTE_ENTRIES: Incomplete +PR_EMS_AB_DXA_PREV_REMOTE_ENTRIES_A: Incomplete +PR_EMS_AB_DXA_PREV_REMOTE_ENTRIES_W: Incomplete +PR_EMS_AB_DXA_PREV_REMOTE_ENTRIES_O: Incomplete +PR_EMS_AB_DXA_PREV_REMOTE_ENTRIES_T: Incomplete +PR_EMS_AB_DXA_PREV_REPLICATION_SENSITIVITY: Incomplete +PR_EMS_AB_DXA_PREV_TEMPLATE_OPTIONS: Incomplete +PR_EMS_AB_DXA_PREV_TYPES: Incomplete +PR_EMS_AB_DXA_RECIPIENT_CP: Incomplete +PR_EMS_AB_DXA_RECIPIENT_CP_A: Incomplete +PR_EMS_AB_DXA_RECIPIENT_CP_W: Incomplete +PR_EMS_AB_DXA_REMOTE_CLIENT: Incomplete +PR_EMS_AB_DXA_REMOTE_CLIENT_A: Incomplete +PR_EMS_AB_DXA_REMOTE_CLIENT_W: Incomplete +PR_EMS_AB_DXA_REMOTE_CLIENT_O: Incomplete +PR_EMS_AB_DXA_REMOTE_CLIENT_T: Incomplete +PR_EMS_AB_DXA_REQ_SEQ: Incomplete +PR_EMS_AB_DXA_REQ_SEQ_A: Incomplete +PR_EMS_AB_DXA_REQ_SEQ_W: Incomplete +PR_EMS_AB_DXA_REQ_SEQ_TIME: Incomplete +PR_EMS_AB_DXA_REQ_SEQ_USN: Incomplete +PR_EMS_AB_DXA_REQNAME: Incomplete +PR_EMS_AB_DXA_REQNAME_A: Incomplete +PR_EMS_AB_DXA_REQNAME_W: Incomplete +PR_EMS_AB_DXA_SVR_SEQ: Incomplete +PR_EMS_AB_DXA_SVR_SEQ_A: Incomplete +PR_EMS_AB_DXA_SVR_SEQ_W: Incomplete +PR_EMS_AB_DXA_SVR_SEQ_TIME: Incomplete +PR_EMS_AB_DXA_SVR_SEQ_USN: Incomplete +PR_EMS_AB_DXA_TASK: Incomplete +PR_EMS_AB_DXA_TEMPLATE_OPTIONS: Incomplete +PR_EMS_AB_DXA_TEMPLATE_TIMESTAMP: Incomplete +PR_EMS_AB_DXA_TYPES: Incomplete +PR_EMS_AB_DXA_UNCONF_CONTAINER_LIST: Incomplete +PR_EMS_AB_DXA_UNCONF_CONTAINER_LIST_A: Incomplete +PR_EMS_AB_DXA_UNCONF_CONTAINER_LIST_W: Incomplete +PR_EMS_AB_DXA_UNCONF_CONTAINER_LIST_O: Incomplete +PR_EMS_AB_DXA_UNCONF_CONTAINER_LIST_T: Incomplete +PR_EMS_AB_ENABLED_PROTOCOLS: Incomplete +PR_EMS_AB_ENCAPSULATION_METHOD: Incomplete +PR_EMS_AB_ENCRYPT: Incomplete +PR_EMS_AB_ENCRYPT_ALG_LIST_NA: Incomplete +PR_EMS_AB_ENCRYPT_ALG_LIST_NA_A: Incomplete +PR_EMS_AB_ENCRYPT_ALG_LIST_NA_W: Incomplete +PR_EMS_AB_ENCRYPT_ALG_LIST_OTHER: Incomplete +PR_EMS_AB_ENCRYPT_ALG_LIST_OTHER_A: Incomplete +PR_EMS_AB_ENCRYPT_ALG_LIST_OTHER_W: Incomplete +PR_EMS_AB_ENCRYPT_ALG_SELECTED_NA: Incomplete +PR_EMS_AB_ENCRYPT_ALG_SELECTED_NA_A: Incomplete +PR_EMS_AB_ENCRYPT_ALG_SELECTED_NA_W: Incomplete +PR_EMS_AB_ENCRYPT_ALG_SELECTED_OTHER: Incomplete +PR_EMS_AB_ENCRYPT_ALG_SELECTED_OTHER_A: Incomplete +PR_EMS_AB_ENCRYPT_ALG_SELECTED_OTHER_W: Incomplete +PR_EMS_AB_EXPAND_DLS_LOCALLY: Incomplete +PR_EMS_AB_EXPIRATION_TIME: Incomplete +PR_EMS_AB_EXPORT_CONTAINERS: Incomplete +PR_EMS_AB_EXPORT_CONTAINERS_A: Incomplete +PR_EMS_AB_EXPORT_CONTAINERS_W: Incomplete +PR_EMS_AB_EXPORT_CONTAINERS_O: Incomplete +PR_EMS_AB_EXPORT_CONTAINERS_T: Incomplete +PR_EMS_AB_EXPORT_CUSTOM_RECIPIENTS: Incomplete +PR_EMS_AB_EXTENDED_CHARS_ALLOWED: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_1: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_1_A: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_1_W: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_10: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_10_A: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_10_W: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_2: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_2_A: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_2_W: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_3: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_3_A: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_3_W: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_4: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_4_A: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_4_W: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_5: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_5_A: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_5_W: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_6: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_6_A: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_6_W: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_7: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_7_A: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_7_W: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_8: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_8_A: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_8_W: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_9: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_9_A: Incomplete +PR_EMS_AB_EXTENSION_ATTRIBUTE_9_W: Incomplete +PR_EMS_AB_EXTENSION_DATA: Incomplete +PR_EMS_AB_EXTENSION_NAME: Incomplete +PR_EMS_AB_EXTENSION_NAME_A: Incomplete +PR_EMS_AB_EXTENSION_NAME_W: Incomplete +PR_EMS_AB_EXTENSION_NAME_INHERITED: Incomplete +PR_EMS_AB_EXTENSION_NAME_INHERITED_A: Incomplete +PR_EMS_AB_EXTENSION_NAME_INHERITED_W: Incomplete +PR_EMS_AB_FACSIMILE_TELEPHONE_NUMBER: Incomplete +PR_EMS_AB_FILE_VERSION: Incomplete +PR_EMS_AB_FILTER_LOCAL_ADDRESSES: Incomplete +PR_EMS_AB_FOLDER_PATHNAME: Incomplete +PR_EMS_AB_FOLDER_PATHNAME_A: Incomplete +PR_EMS_AB_FOLDER_PATHNAME_W: Incomplete +PR_EMS_AB_FOLDERS_CONTAINER: Incomplete +PR_EMS_AB_FOLDERS_CONTAINER_A: Incomplete +PR_EMS_AB_FOLDERS_CONTAINER_W: Incomplete +PR_EMS_AB_FOLDERS_CONTAINER_O: Incomplete +PR_EMS_AB_FOLDERS_CONTAINER_T: Incomplete +PR_EMS_AB_GARBAGE_COLL_PERIOD: Incomplete +PR_EMS_AB_GATEWAY_LOCAL_CRED: Incomplete +PR_EMS_AB_GATEWAY_LOCAL_CRED_A: Incomplete +PR_EMS_AB_GATEWAY_LOCAL_CRED_W: Incomplete +PR_EMS_AB_GATEWAY_LOCAL_DESIG: Incomplete +PR_EMS_AB_GATEWAY_LOCAL_DESIG_A: Incomplete +PR_EMS_AB_GATEWAY_LOCAL_DESIG_W: Incomplete +PR_EMS_AB_GATEWAY_PROXY: Incomplete +PR_EMS_AB_GATEWAY_PROXY_A: Incomplete +PR_EMS_AB_GATEWAY_PROXY_W: Incomplete +PR_EMS_AB_GATEWAY_ROUTING_TREE: Incomplete +PR_EMS_AB_GWART_LAST_MODIFIED: Incomplete +PR_EMS_AB_HAS_FULL_REPLICA_NCS: Incomplete +PR_EMS_AB_HAS_FULL_REPLICA_NCS_A: Incomplete +PR_EMS_AB_HAS_FULL_REPLICA_NCS_W: Incomplete +PR_EMS_AB_HAS_FULL_REPLICA_NCS_O: Incomplete +PR_EMS_AB_HAS_FULL_REPLICA_NCS_T: Incomplete +PR_EMS_AB_HAS_MASTER_NCS: Incomplete +PR_EMS_AB_HAS_MASTER_NCS_A: Incomplete +PR_EMS_AB_HAS_MASTER_NCS_W: Incomplete +PR_EMS_AB_HAS_MASTER_NCS_O: Incomplete +PR_EMS_AB_HAS_MASTER_NCS_T: Incomplete +PR_EMS_AB_HELP_DATA16: Incomplete +PR_EMS_AB_HELP_DATA32: Incomplete +PR_EMS_AB_HELP_FILE_NAME: Incomplete +PR_EMS_AB_HELP_FILE_NAME_A: Incomplete +PR_EMS_AB_HELP_FILE_NAME_W: Incomplete +PR_EMS_AB_HEURISTICS: Incomplete +PR_EMS_AB_HIDE_DL_MEMBERSHIP: Incomplete +PR_EMS_AB_HIDE_FROM_ADDRESS_BOOK: Incomplete +PR_EMS_AB_HOME_MDB: Incomplete +PR_EMS_AB_HOME_MDB_A: Incomplete +PR_EMS_AB_HOME_MDB_W: Incomplete +PR_EMS_AB_HOME_MDB_O: Incomplete +PR_EMS_AB_HOME_MDB_T: Incomplete +PR_EMS_AB_HOME_MDB_BL: Incomplete +PR_EMS_AB_HOME_MDB_BL_A: Incomplete +PR_EMS_AB_HOME_MDB_BL_W: Incomplete +PR_EMS_AB_HOME_MDB_BL_O: Incomplete +PR_EMS_AB_HOME_MDB_BL_T: Incomplete +PR_EMS_AB_HOME_MTA: Incomplete +PR_EMS_AB_HOME_MTA_A: Incomplete +PR_EMS_AB_HOME_MTA_W: Incomplete +PR_EMS_AB_HOME_MTA_O: Incomplete +PR_EMS_AB_HOME_MTA_T: Incomplete +PR_EMS_AB_HOME_PUBLIC_SERVER: Incomplete +PR_EMS_AB_HOME_PUBLIC_SERVER_A: Incomplete +PR_EMS_AB_HOME_PUBLIC_SERVER_W: Incomplete +PR_EMS_AB_HOME_PUBLIC_SERVER_O: Incomplete +PR_EMS_AB_HOME_PUBLIC_SERVER_T: Incomplete +PR_EMS_AB_IMPORT_CONTAINER: Incomplete +PR_EMS_AB_IMPORT_CONTAINER_A: Incomplete +PR_EMS_AB_IMPORT_CONTAINER_W: Incomplete +PR_EMS_AB_IMPORT_CONTAINER_O: Incomplete +PR_EMS_AB_IMPORT_CONTAINER_T: Incomplete +PR_EMS_AB_IMPORT_SENSITIVITY: Incomplete +PR_EMS_AB_IMPORTED_FROM: Incomplete +PR_EMS_AB_IMPORTED_FROM_A: Incomplete +PR_EMS_AB_IMPORTED_FROM_W: Incomplete +PR_EMS_AB_INBOUND_SITES: Incomplete +PR_EMS_AB_INBOUND_SITES_A: Incomplete +PR_EMS_AB_INBOUND_SITES_W: Incomplete +PR_EMS_AB_INBOUND_SITES_O: Incomplete +PR_EMS_AB_INBOUND_SITES_T: Incomplete +PR_EMS_AB_INSTANCE_TYPE: Incomplete +PR_EMS_AB_INTERNATIONAL_ISDN_NUMBER: Incomplete +PR_EMS_AB_INTERNATIONAL_ISDN_NUMBER_A: Incomplete +PR_EMS_AB_INTERNATIONAL_ISDN_NUMBER_W: Incomplete +PR_EMS_AB_INVOCATION_ID: Incomplete +PR_EMS_AB_IS_DELETED: Incomplete +PR_EMS_AB_IS_MEMBER_OF_DL: Incomplete +PR_EMS_AB_IS_MEMBER_OF_DL_A: Incomplete +PR_EMS_AB_IS_MEMBER_OF_DL_W: Incomplete +PR_EMS_AB_IS_MEMBER_OF_DL_O: Incomplete +PR_EMS_AB_IS_MEMBER_OF_DL_T: Incomplete +PR_EMS_AB_IS_SINGLE_VALUED: Incomplete +PR_EMS_AB_KCC_STATUS: Incomplete +PR_EMS_AB_KM_SERVER: Incomplete +PR_EMS_AB_KM_SERVER_A: Incomplete +PR_EMS_AB_KM_SERVER_W: Incomplete +PR_EMS_AB_KM_SERVER_O: Incomplete +PR_EMS_AB_KM_SERVER_T: Incomplete +PR_EMS_AB_KNOWLEDGE_INFORMATION: Incomplete +PR_EMS_AB_KNOWLEDGE_INFORMATION_A: Incomplete +PR_EMS_AB_KNOWLEDGE_INFORMATION_W: Incomplete +PR_EMS_AB_LANGUAGE: Incomplete +PR_EMS_AB_LDAP_DISPLAY_NAME: Incomplete +PR_EMS_AB_LDAP_DISPLAY_NAME_A: Incomplete +PR_EMS_AB_LDAP_DISPLAY_NAME_W: Incomplete +PR_EMS_AB_LINE_WRAP: Incomplete +PR_EMS_AB_LINK_ID: Incomplete +PR_EMS_AB_LOCAL_BRIDGE_HEAD: Incomplete +PR_EMS_AB_LOCAL_BRIDGE_HEAD_A: Incomplete +PR_EMS_AB_LOCAL_BRIDGE_HEAD_W: Incomplete +PR_EMS_AB_LOCAL_BRIDGE_HEAD_ADDRESS: Incomplete +PR_EMS_AB_LOCAL_BRIDGE_HEAD_ADDRESS_A: Incomplete +PR_EMS_AB_LOCAL_BRIDGE_HEAD_ADDRESS_W: Incomplete +PR_EMS_AB_LOCAL_INITIAL_TURN: Incomplete +PR_EMS_AB_LOCAL_SCOPE: Incomplete +PR_EMS_AB_LOCAL_SCOPE_A: Incomplete +PR_EMS_AB_LOCAL_SCOPE_W: Incomplete +PR_EMS_AB_LOCAL_SCOPE_O: Incomplete +PR_EMS_AB_LOCAL_SCOPE_T: Incomplete +PR_EMS_AB_LOG_FILENAME: Incomplete +PR_EMS_AB_LOG_FILENAME_A: Incomplete +PR_EMS_AB_LOG_FILENAME_W: Incomplete +PR_EMS_AB_LOG_ROLLOVER_INTERVAL: Incomplete +PR_EMS_AB_MAINTAIN_AUTOREPLY_HISTORY: Incomplete +PR_EMS_AB_MANAGER: Incomplete +PR_EMS_AB_MANAGER_A: Incomplete +PR_EMS_AB_MANAGER_W: Incomplete +PR_EMS_AB_MANAGER_O: Incomplete +PR_EMS_AB_MANAGER_T: Incomplete +PR_EMS_AB_MAPI_DISPLAY_TYPE: Incomplete +PR_EMS_AB_MAPI_ID: Incomplete +PR_EMS_AB_MAXIMUM_OBJECT_ID: Incomplete +PR_EMS_AB_MDB_BACKOFF_INTERVAL: Incomplete +PR_EMS_AB_MDB_MSG_TIME_OUT_PERIOD: Incomplete +PR_EMS_AB_MDB_OVER_QUOTA_LIMIT: Incomplete +PR_EMS_AB_MDB_STORAGE_QUOTA: Incomplete +PR_EMS_AB_MDB_UNREAD_LIMIT: Incomplete +PR_EMS_AB_MDB_USE_DEFAULTS: Incomplete +PR_EMS_AB_MEMBER: Incomplete +PR_EMS_AB_MEMBER_A: Incomplete +PR_EMS_AB_MEMBER_W: Incomplete +PR_EMS_AB_MEMBER_O: Incomplete +PR_EMS_AB_MEMBER_T: Incomplete +PR_EMS_AB_MESSAGE_TRACKING_ENABLED: Incomplete +PR_EMS_AB_MONITOR_CLOCK: Incomplete +PR_EMS_AB_MONITOR_SERVERS: Incomplete +PR_EMS_AB_MONITOR_SERVICES: Incomplete +PR_EMS_AB_MONITORED_CONFIGURATIONS: Incomplete +PR_EMS_AB_MONITORED_CONFIGURATIONS_A: Incomplete +PR_EMS_AB_MONITORED_CONFIGURATIONS_W: Incomplete +PR_EMS_AB_MONITORED_CONFIGURATIONS_O: Incomplete +PR_EMS_AB_MONITORED_CONFIGURATIONS_T: Incomplete +PR_EMS_AB_MONITORED_SERVERS: Incomplete +PR_EMS_AB_MONITORED_SERVERS_A: Incomplete +PR_EMS_AB_MONITORED_SERVERS_W: Incomplete +PR_EMS_AB_MONITORED_SERVERS_O: Incomplete +PR_EMS_AB_MONITORED_SERVERS_T: Incomplete +PR_EMS_AB_MONITORED_SERVICES: Incomplete +PR_EMS_AB_MONITORED_SERVICES_A: Incomplete +PR_EMS_AB_MONITORED_SERVICES_W: Incomplete +PR_EMS_AB_MONITORING_ALERT_DELAY: Incomplete +PR_EMS_AB_MONITORING_ALERT_UNITS: Incomplete +PR_EMS_AB_MONITORING_AVAILABILITY_STYLE: Incomplete +PR_EMS_AB_MONITORING_AVAILABILITY_WINDOW: Incomplete +PR_EMS_AB_MONITORING_CACHED_VIA_MAIL: Incomplete +PR_EMS_AB_MONITORING_CACHED_VIA_MAIL_A: Incomplete +PR_EMS_AB_MONITORING_CACHED_VIA_MAIL_W: Incomplete +PR_EMS_AB_MONITORING_CACHED_VIA_MAIL_O: Incomplete +PR_EMS_AB_MONITORING_CACHED_VIA_MAIL_T: Incomplete +PR_EMS_AB_MONITORING_CACHED_VIA_RPC: Incomplete +PR_EMS_AB_MONITORING_CACHED_VIA_RPC_A: Incomplete +PR_EMS_AB_MONITORING_CACHED_VIA_RPC_W: Incomplete +PR_EMS_AB_MONITORING_CACHED_VIA_RPC_O: Incomplete +PR_EMS_AB_MONITORING_CACHED_VIA_RPC_T: Incomplete +PR_EMS_AB_MONITORING_ESCALATION_PROCEDURE: Incomplete +PR_EMS_AB_MONITORING_HOTSITE_POLL_INTERVAL: Incomplete +PR_EMS_AB_MONITORING_HOTSITE_POLL_UNITS: Incomplete +PR_EMS_AB_MONITORING_MAIL_UPDATE_INTERVAL: Incomplete +PR_EMS_AB_MONITORING_MAIL_UPDATE_UNITS: Incomplete +PR_EMS_AB_MONITORING_NORMAL_POLL_INTERVAL: Incomplete +PR_EMS_AB_MONITORING_NORMAL_POLL_UNITS: Incomplete +PR_EMS_AB_MONITORING_RECIPIENTS: Incomplete +PR_EMS_AB_MONITORING_RECIPIENTS_A: Incomplete +PR_EMS_AB_MONITORING_RECIPIENTS_W: Incomplete +PR_EMS_AB_MONITORING_RECIPIENTS_O: Incomplete +PR_EMS_AB_MONITORING_RECIPIENTS_T: Incomplete +PR_EMS_AB_MONITORING_RECIPIENTS_NDR: Incomplete +PR_EMS_AB_MONITORING_RECIPIENTS_NDR_A: Incomplete +PR_EMS_AB_MONITORING_RECIPIENTS_NDR_W: Incomplete +PR_EMS_AB_MONITORING_RECIPIENTS_NDR_O: Incomplete +PR_EMS_AB_MONITORING_RECIPIENTS_NDR_T: Incomplete +PR_EMS_AB_MONITORING_RPC_UPDATE_INTERVAL: Incomplete +PR_EMS_AB_MONITORING_RPC_UPDATE_UNITS: Incomplete +PR_EMS_AB_MONITORING_WARNING_DELAY: Incomplete +PR_EMS_AB_MONITORING_WARNING_UNITS: Incomplete +PR_EMS_AB_MTA_LOCAL_CRED: Incomplete +PR_EMS_AB_MTA_LOCAL_CRED_A: Incomplete +PR_EMS_AB_MTA_LOCAL_CRED_W: Incomplete +PR_EMS_AB_MTA_LOCAL_DESIG: Incomplete +PR_EMS_AB_MTA_LOCAL_DESIG_A: Incomplete +PR_EMS_AB_MTA_LOCAL_DESIG_W: Incomplete +PR_EMS_AB_N_ADDRESS: Incomplete +PR_EMS_AB_N_ADDRESS_TYPE: Incomplete +PR_EMS_AB_NETWORK_ADDRESS: Incomplete +PR_EMS_AB_NETWORK_ADDRESS_A: Incomplete +PR_EMS_AB_NETWORK_ADDRESS_W: Incomplete +PR_EMS_AB_NNTP_CHARACTER_SET: Incomplete +PR_EMS_AB_NNTP_CHARACTER_SET_A: Incomplete +PR_EMS_AB_NNTP_CHARACTER_SET_W: Incomplete +PR_EMS_AB_NNTP_CONTENT_FORMAT: Incomplete +PR_EMS_AB_NNTP_CONTENT_FORMAT_A: Incomplete +PR_EMS_AB_NNTP_CONTENT_FORMAT_W: Incomplete +PR_EMS_AB_NT_MACHINE_NAME: Incomplete +PR_EMS_AB_NT_MACHINE_NAME_A: Incomplete +PR_EMS_AB_NT_MACHINE_NAME_W: Incomplete +PR_EMS_AB_NT_SECURITY_DESCRIPTOR: Incomplete +PR_EMS_AB_NUM_OF_OPEN_RETRIES: Incomplete +PR_EMS_AB_NUM_OF_TRANSFER_RETRIES: Incomplete +PR_EMS_AB_OBJ_DIST_NAME: Incomplete +PR_EMS_AB_OBJ_DIST_NAME_A: Incomplete +PR_EMS_AB_OBJ_DIST_NAME_W: Incomplete +PR_EMS_AB_OBJ_DIST_NAME_O: Incomplete +PR_EMS_AB_OBJ_DIST_NAME_T: Incomplete +PR_EMS_AB_OBJECT_CLASS_CATEGORY: Incomplete +PR_EMS_AB_OBJECT_VERSION: Incomplete +PR_EMS_AB_OFF_LINE_AB_CONTAINERS: Incomplete +PR_EMS_AB_OFF_LINE_AB_CONTAINERS_A: Incomplete +PR_EMS_AB_OFF_LINE_AB_CONTAINERS_W: Incomplete +PR_EMS_AB_OFF_LINE_AB_CONTAINERS_O: Incomplete +PR_EMS_AB_OFF_LINE_AB_CONTAINERS_T: Incomplete +PR_EMS_AB_OFF_LINE_AB_SCHEDULE: Incomplete +PR_EMS_AB_OFF_LINE_AB_SERVER: Incomplete +PR_EMS_AB_OFF_LINE_AB_SERVER_A: Incomplete +PR_EMS_AB_OFF_LINE_AB_SERVER_W: Incomplete +PR_EMS_AB_OFF_LINE_AB_SERVER_O: Incomplete +PR_EMS_AB_OFF_LINE_AB_SERVER_T: Incomplete +PR_EMS_AB_OFF_LINE_AB_STYLE: Incomplete +PR_EMS_AB_OID_TYPE: Incomplete +PR_EMS_AB_OM_OBJECT_CLASS: Incomplete +PR_EMS_AB_OM_SYNTAX: Incomplete +PR_EMS_AB_OOF_REPLY_TO_ORIGINATOR: Incomplete +PR_EMS_AB_OPEN_RETRY_INTERVAL: Incomplete +PR_EMS_AB_ORGANIZATION_NAME: Incomplete +PR_EMS_AB_ORGANIZATION_NAME_A: Incomplete +PR_EMS_AB_ORGANIZATION_NAME_W: Incomplete +PR_EMS_AB_ORGANIZATIONAL_UNIT_NAME: Incomplete +PR_EMS_AB_ORGANIZATIONAL_UNIT_NAME_A: Incomplete +PR_EMS_AB_ORGANIZATIONAL_UNIT_NAME_W: Incomplete +PR_EMS_AB_ORIGINAL_DISPLAY_TABLE: Incomplete +PR_EMS_AB_ORIGINAL_DISPLAY_TABLE_MSDOS: Incomplete +PR_EMS_AB_OUTBOUND_SITES: Incomplete +PR_EMS_AB_OUTBOUND_SITES_A: Incomplete +PR_EMS_AB_OUTBOUND_SITES_W: Incomplete +PR_EMS_AB_OUTBOUND_SITES_O: Incomplete +PR_EMS_AB_OUTBOUND_SITES_T: Incomplete +PR_EMS_AB_OWNER: Incomplete +PR_EMS_AB_OWNER_A: Incomplete +PR_EMS_AB_OWNER_W: Incomplete +PR_EMS_AB_OWNER_O: Incomplete +PR_EMS_AB_OWNER_T: Incomplete +PR_EMS_AB_OWNER_BL: Incomplete +PR_EMS_AB_OWNER_BL_A: Incomplete +PR_EMS_AB_OWNER_BL_W: Incomplete +PR_EMS_AB_OWNER_BL_O: Incomplete +PR_EMS_AB_OWNER_BL_T: Incomplete +PR_EMS_AB_P_SELECTOR: Incomplete +PR_EMS_AB_P_SELECTOR_INBOUND: Incomplete +PR_EMS_AB_PER_MSG_DIALOG_DISPLAY_TABLE: Incomplete +PR_EMS_AB_PER_RECIP_DIALOG_DISPLAY_TABLE: Incomplete +PR_EMS_AB_PERIOD_REP_SYNC_TIMES: Incomplete +PR_EMS_AB_PERIOD_REPL_STAGGER: Incomplete +PR_EMS_AB_PF_CONTACTS: Incomplete +PR_EMS_AB_PF_CONTACTS_A: Incomplete +PR_EMS_AB_PF_CONTACTS_W: Incomplete +PR_EMS_AB_PF_CONTACTS_O: Incomplete +PR_EMS_AB_PF_CONTACTS_T: Incomplete +PR_EMS_AB_POP_CHARACTER_SET: Incomplete +PR_EMS_AB_POP_CHARACTER_SET_A: Incomplete +PR_EMS_AB_POP_CHARACTER_SET_W: Incomplete +PR_EMS_AB_POP_CONTENT_FORMAT: Incomplete +PR_EMS_AB_POP_CONTENT_FORMAT_A: Incomplete +PR_EMS_AB_POP_CONTENT_FORMAT_W: Incomplete +PR_EMS_AB_POSTAL_ADDRESS: Incomplete +PR_EMS_AB_PREFERRED_DELIVERY_METHOD: Incomplete +PR_EMS_AB_PRMD: Incomplete +PR_EMS_AB_PRMD_A: Incomplete +PR_EMS_AB_PRMD_W: Incomplete +PR_EMS_AB_PROXY_ADDRESSES: Incomplete +PR_EMS_AB_PROXY_ADDRESSES_A: Incomplete +PR_EMS_AB_PROXY_ADDRESSES_W: Incomplete +PR_EMS_AB_PROXY_GENERATOR_DLL: Incomplete +PR_EMS_AB_PROXY_GENERATOR_DLL_A: Incomplete +PR_EMS_AB_PROXY_GENERATOR_DLL_W: Incomplete +PR_EMS_AB_PUBLIC_DELEGATES: Incomplete +PR_EMS_AB_PUBLIC_DELEGATES_A: Incomplete +PR_EMS_AB_PUBLIC_DELEGATES_W: Incomplete +PR_EMS_AB_PUBLIC_DELEGATES_O: Incomplete +PR_EMS_AB_PUBLIC_DELEGATES_T: Incomplete +PR_EMS_AB_PUBLIC_DELEGATES_BL: Incomplete +PR_EMS_AB_PUBLIC_DELEGATES_BL_A: Incomplete +PR_EMS_AB_PUBLIC_DELEGATES_BL_W: Incomplete +PR_EMS_AB_PUBLIC_DELEGATES_BL_O: Incomplete +PR_EMS_AB_PUBLIC_DELEGATES_BL_T: Incomplete +PR_EMS_AB_QUOTA_NOTIFICATION_SCHEDULE: Incomplete +PR_EMS_AB_QUOTA_NOTIFICATION_STYLE: Incomplete +PR_EMS_AB_RANGE_LOWER: Incomplete +PR_EMS_AB_RANGE_UPPER: Incomplete +PR_EMS_AB_RAS_CALLBACK_NUMBER: Incomplete +PR_EMS_AB_RAS_CALLBACK_NUMBER_A: Incomplete +PR_EMS_AB_RAS_CALLBACK_NUMBER_W: Incomplete +PR_EMS_AB_RAS_PHONE_NUMBER: Incomplete +PR_EMS_AB_RAS_PHONE_NUMBER_A: Incomplete +PR_EMS_AB_RAS_PHONE_NUMBER_W: Incomplete +PR_EMS_AB_RAS_PHONEBOOK_ENTRY_NAME: Incomplete +PR_EMS_AB_RAS_PHONEBOOK_ENTRY_NAME_A: Incomplete +PR_EMS_AB_RAS_PHONEBOOK_ENTRY_NAME_W: Incomplete +PR_EMS_AB_RAS_REMOTE_SRVR_NAME: Incomplete +PR_EMS_AB_RAS_REMOTE_SRVR_NAME_A: Incomplete +PR_EMS_AB_RAS_REMOTE_SRVR_NAME_W: Incomplete +PR_EMS_AB_REGISTERED_ADDRESS: Incomplete +PR_EMS_AB_REMOTE_BRIDGE_HEAD: Incomplete +PR_EMS_AB_REMOTE_BRIDGE_HEAD_A: Incomplete +PR_EMS_AB_REMOTE_BRIDGE_HEAD_W: Incomplete +PR_EMS_AB_REMOTE_BRIDGE_HEAD_ADDRESS: Incomplete +PR_EMS_AB_REMOTE_BRIDGE_HEAD_ADDRESS_A: Incomplete +PR_EMS_AB_REMOTE_BRIDGE_HEAD_ADDRESS_W: Incomplete +PR_EMS_AB_REMOTE_OUT_BH_SERVER: Incomplete +PR_EMS_AB_REMOTE_OUT_BH_SERVER_A: Incomplete +PR_EMS_AB_REMOTE_OUT_BH_SERVER_W: Incomplete +PR_EMS_AB_REMOTE_OUT_BH_SERVER_O: Incomplete +PR_EMS_AB_REMOTE_OUT_BH_SERVER_T: Incomplete +PR_EMS_AB_REMOTE_SITE: Incomplete +PR_EMS_AB_REMOTE_SITE_A: Incomplete +PR_EMS_AB_REMOTE_SITE_W: Incomplete +PR_EMS_AB_REMOTE_SITE_O: Incomplete +PR_EMS_AB_REMOTE_SITE_T: Incomplete +PR_EMS_AB_REPLICATION_MAIL_MSG_SIZE: Incomplete +PR_EMS_AB_REPLICATION_SENSITIVITY: Incomplete +PR_EMS_AB_REPLICATION_STAGGER: Incomplete +PR_EMS_AB_REPORT_TO_ORIGINATOR: Incomplete +PR_EMS_AB_REPORT_TO_OWNER: Incomplete +PR_EMS_AB_REPORTS: Incomplete +PR_EMS_AB_REPORTS_A: Incomplete +PR_EMS_AB_REPORTS_W: Incomplete +PR_EMS_AB_REPORTS_O: Incomplete +PR_EMS_AB_REPORTS_T: Incomplete +PR_EMS_AB_REQ_SEQ: Incomplete +PR_EMS_AB_RESPONSIBLE_LOCAL_DXA: Incomplete +PR_EMS_AB_RESPONSIBLE_LOCAL_DXA_A: Incomplete +PR_EMS_AB_RESPONSIBLE_LOCAL_DXA_W: Incomplete +PR_EMS_AB_RESPONSIBLE_LOCAL_DXA_O: Incomplete +PR_EMS_AB_RESPONSIBLE_LOCAL_DXA_T: Incomplete +PR_EMS_AB_RID_SERVER: Incomplete +PR_EMS_AB_RID_SERVER_A: Incomplete +PR_EMS_AB_RID_SERVER_W: Incomplete +PR_EMS_AB_RID_SERVER_O: Incomplete +PR_EMS_AB_RID_SERVER_T: Incomplete +PR_EMS_AB_ROLE_OCCUPANT: Incomplete +PR_EMS_AB_ROLE_OCCUPANT_A: Incomplete +PR_EMS_AB_ROLE_OCCUPANT_W: Incomplete +PR_EMS_AB_ROLE_OCCUPANT_O: Incomplete +PR_EMS_AB_ROLE_OCCUPANT_T: Incomplete +PR_EMS_AB_ROUTING_LIST: Incomplete +PR_EMS_AB_ROUTING_LIST_A: Incomplete +PR_EMS_AB_ROUTING_LIST_W: Incomplete +PR_EMS_AB_RTS_CHECKPOINT_SIZE: Incomplete +PR_EMS_AB_RTS_RECOVERY_TIMEOUT: Incomplete +PR_EMS_AB_RTS_WINDOW_SIZE: Incomplete +PR_EMS_AB_RUNS_ON: Incomplete +PR_EMS_AB_RUNS_ON_A: Incomplete +PR_EMS_AB_RUNS_ON_W: Incomplete +PR_EMS_AB_RUNS_ON_O: Incomplete +PR_EMS_AB_RUNS_ON_T: Incomplete +PR_EMS_AB_S_SELECTOR: Incomplete +PR_EMS_AB_S_SELECTOR_INBOUND: Incomplete +PR_EMS_AB_SCHEMA_FLAGS: Incomplete +PR_EMS_AB_SCHEMA_VERSION: Incomplete +PR_EMS_AB_SEARCH_FLAGS: Incomplete +PR_EMS_AB_SEARCH_GUIDE: Incomplete +PR_EMS_AB_SECURITY_PROTOCOL: Incomplete +PR_EMS_AB_SEE_ALSO: Incomplete +PR_EMS_AB_SEE_ALSO_A: Incomplete +PR_EMS_AB_SEE_ALSO_W: Incomplete +PR_EMS_AB_SEE_ALSO_O: Incomplete +PR_EMS_AB_SEE_ALSO_T: Incomplete +PR_EMS_AB_SERIAL_NUMBER: Incomplete +PR_EMS_AB_SERIAL_NUMBER_A: Incomplete +PR_EMS_AB_SERIAL_NUMBER_W: Incomplete +PR_EMS_AB_SERVICE_ACTION_FIRST: Incomplete +PR_EMS_AB_SERVICE_ACTION_OTHER: Incomplete +PR_EMS_AB_SERVICE_ACTION_SECOND: Incomplete +PR_EMS_AB_SERVICE_RESTART_DELAY: Incomplete +PR_EMS_AB_SERVICE_RESTART_MESSAGE: Incomplete +PR_EMS_AB_SERVICE_RESTART_MESSAGE_A: Incomplete +PR_EMS_AB_SERVICE_RESTART_MESSAGE_W: Incomplete +PR_EMS_AB_SESSION_DISCONNECT_TIMER: Incomplete +PR_EMS_AB_SITE_AFFINITY: Incomplete +PR_EMS_AB_SITE_AFFINITY_A: Incomplete +PR_EMS_AB_SITE_AFFINITY_W: Incomplete +PR_EMS_AB_SITE_FOLDER_GUID: Incomplete +PR_EMS_AB_SITE_FOLDER_SERVER: Incomplete +PR_EMS_AB_SITE_FOLDER_SERVER_A: Incomplete +PR_EMS_AB_SITE_FOLDER_SERVER_W: Incomplete +PR_EMS_AB_SITE_FOLDER_SERVER_O: Incomplete +PR_EMS_AB_SITE_FOLDER_SERVER_T: Incomplete +PR_EMS_AB_SITE_PROXY_SPACE: Incomplete +PR_EMS_AB_SITE_PROXY_SPACE_A: Incomplete +PR_EMS_AB_SITE_PROXY_SPACE_W: Incomplete +PR_EMS_AB_SPACE_LAST_COMPUTED: Incomplete +PR_EMS_AB_STREET_ADDRESS: Incomplete +PR_EMS_AB_STREET_ADDRESS_A: Incomplete +PR_EMS_AB_STREET_ADDRESS_W: Incomplete +PR_EMS_AB_SUB_REFS: Incomplete +PR_EMS_AB_SUB_REFS_A: Incomplete +PR_EMS_AB_SUB_REFS_W: Incomplete +PR_EMS_AB_SUB_REFS_O: Incomplete +PR_EMS_AB_SUB_REFS_T: Incomplete +PR_EMS_AB_SUB_SITE: Incomplete +PR_EMS_AB_SUB_SITE_A: Incomplete +PR_EMS_AB_SUB_SITE_W: Incomplete +PR_EMS_AB_SUBMISSION_CONT_LENGTH: Incomplete +PR_EMS_AB_SUPPORTED_APPLICATION_CONTEXT: Incomplete +PR_EMS_AB_SUPPORTING_STACK: Incomplete +PR_EMS_AB_SUPPORTING_STACK_A: Incomplete +PR_EMS_AB_SUPPORTING_STACK_W: Incomplete +PR_EMS_AB_SUPPORTING_STACK_O: Incomplete +PR_EMS_AB_SUPPORTING_STACK_T: Incomplete +PR_EMS_AB_SUPPORTING_STACK_BL: Incomplete +PR_EMS_AB_SUPPORTING_STACK_BL_A: Incomplete +PR_EMS_AB_SUPPORTING_STACK_BL_W: Incomplete +PR_EMS_AB_SUPPORTING_STACK_BL_O: Incomplete +PR_EMS_AB_SUPPORTING_STACK_BL_T: Incomplete +PR_EMS_AB_T_SELECTOR: Incomplete +PR_EMS_AB_T_SELECTOR_INBOUND: Incomplete +PR_EMS_AB_TARGET_ADDRESS: Incomplete +PR_EMS_AB_TARGET_ADDRESS_A: Incomplete +PR_EMS_AB_TARGET_ADDRESS_W: Incomplete +PR_EMS_AB_TARGET_MTAS: Incomplete +PR_EMS_AB_TARGET_MTAS_A: Incomplete +PR_EMS_AB_TARGET_MTAS_W: Incomplete +PR_EMS_AB_TELEPHONE_NUMBER: Incomplete +PR_EMS_AB_TELEPHONE_NUMBER_A: Incomplete +PR_EMS_AB_TELEPHONE_NUMBER_W: Incomplete +PR_EMS_AB_TELETEX_TERMINAL_IDENTIFIER: Incomplete +PR_EMS_AB_TEMP_ASSOC_THRESHOLD: Incomplete +PR_EMS_AB_TOMBSTONE_LIFETIME: Incomplete +PR_EMS_AB_TRACKING_LOG_PATH_NAME: Incomplete +PR_EMS_AB_TRACKING_LOG_PATH_NAME_A: Incomplete +PR_EMS_AB_TRACKING_LOG_PATH_NAME_W: Incomplete +PR_EMS_AB_TRANS_RETRY_MINS: Incomplete +PR_EMS_AB_TRANS_TIMEOUT_MINS: Incomplete +PR_EMS_AB_TRANSFER_RETRY_INTERVAL: Incomplete +PR_EMS_AB_TRANSFER_TIMEOUT_NON_URGENT: Incomplete +PR_EMS_AB_TRANSFER_TIMEOUT_NORMAL: Incomplete +PR_EMS_AB_TRANSFER_TIMEOUT_URGENT: Incomplete +PR_EMS_AB_TRANSLATION_TABLE_USED: Incomplete +PR_EMS_AB_TRANSPORT_EXPEDITED_DATA: Incomplete +PR_EMS_AB_TRUST_LEVEL: Incomplete +PR_EMS_AB_TURN_REQUEST_THRESHOLD: Incomplete +PR_EMS_AB_TWO_WAY_ALTERNATE_FACILITY: Incomplete +PR_EMS_AB_UNAUTH_ORIG_BL: Incomplete +PR_EMS_AB_UNAUTH_ORIG_BL_A: Incomplete +PR_EMS_AB_UNAUTH_ORIG_BL_W: Incomplete +PR_EMS_AB_UNAUTH_ORIG_BL_O: Incomplete +PR_EMS_AB_UNAUTH_ORIG_BL_T: Incomplete +PR_EMS_AB_USE_SERVER_VALUES: Incomplete +PR_EMS_AB_USER_PASSWORD: Incomplete +PR_EMS_AB_USN_CHANGED: Incomplete +PR_EMS_AB_USN_CREATED: Incomplete +PR_EMS_AB_USN_DSA_LAST_OBJ_REMOVED: Incomplete +PR_EMS_AB_USN_INTERSITE: Incomplete +PR_EMS_AB_USN_LAST_OBJ_REM: Incomplete +PR_EMS_AB_USN_SOURCE: Incomplete +PR_EMS_AB_WWW_HOME_PAGE: Incomplete +PR_EMS_AB_WWW_HOME_PAGE_A: Incomplete +PR_EMS_AB_WWW_HOME_PAGE_W: Incomplete +PR_EMS_AB_X121_ADDRESS: Incomplete +PR_EMS_AB_X121_ADDRESS_A: Incomplete +PR_EMS_AB_X121_ADDRESS_W: Incomplete +PR_EMS_AB_X25_CALL_USER_DATA_INCOMING: Incomplete +PR_EMS_AB_X25_CALL_USER_DATA_OUTGOING: Incomplete +PR_EMS_AB_X25_FACILITIES_DATA_INCOMING: Incomplete +PR_EMS_AB_X25_FACILITIES_DATA_OUTGOING: Incomplete +PR_EMS_AB_X25_LEASED_LINE_PORT: Incomplete +PR_EMS_AB_X25_LEASED_OR_SWITCHED: Incomplete +PR_EMS_AB_X25_REMOTE_MTA_PHONE: Incomplete +PR_EMS_AB_X25_REMOTE_MTA_PHONE_A: Incomplete +PR_EMS_AB_X25_REMOTE_MTA_PHONE_W: Incomplete +PR_EMS_AB_X400_ATTACHMENT_TYPE: Incomplete +PR_EMS_AB_X400_SELECTOR_SYNTAX: Incomplete +PR_EMS_AB_X500_ACCESS_CONTROL_LIST: Incomplete +PR_EMS_AB_XMIT_TIMEOUT_NON_URGENT: Incomplete +PR_EMS_AB_XMIT_TIMEOUT_NORMAL: Incomplete +PR_EMS_AB_XMIT_TIMEOUT_URGENT: Incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/mapi/exchange.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/mapi/exchange.pyi new file mode 100644 index 00000000..ac1a9741 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/mapi/exchange.pyi @@ -0,0 +1,9 @@ +import _win32typing + +OPENSTORE_HOME_LOGON: int +OPENSTORE_OVERRIDE_HOME_MDB: int +OPENSTORE_PUBLIC: int +OPENSTORE_TAKE_OWNERSHIP: int +OPENSTORE_USE_ADMIN_PRIVILEGE: int +IID_IExchangeManageStore: _win32typing.PyIID +IID_IExchangeManageStoreEx: _win32typing.PyIID diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/mapi/mapi.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/mapi/mapi.pyi new file mode 100644 index 00000000..2eddb48b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/mapi/mapi.pyi @@ -0,0 +1,331 @@ +from _typeshed import Incomplete + +import _win32typing + +def HexFromBin(val: str) -> str: ... +def BinFromHex(val: str) -> str: ... +def MAPIUninitialize() -> None: ... +def MAPIInitialize(init: _win32typing.MAPIINIT_0) -> None: ... +def MAPILogonEx(uiParam, profileName: str, password: str | None = ..., flags=...) -> _win32typing.PyIMAPISession: ... +def MAPIAdminProfiles(fFlags) -> _win32typing.PyIProfAdmin: ... +def HrQueryAllRows( + table: _win32typing.PyIMAPITable, + properties: _win32typing.PySPropTagArray, + restrictions: _win32typing.PySRestriction, + sortOrderSet: _win32typing.PySSortOrderSet, + rowsMax, +): ... +def RTFSync(message: _win32typing.PyIMessage, flags): ... +def WrapCompressedRTFStream(stream: _win32typing.PyIStream, flags) -> _win32typing.PyIStream: ... +def WrapCompressedRTFStreamEx() -> tuple[_win32typing.PyIStream, Incomplete]: ... +def OpenIMsgSession(): ... +def CloseIMsgSession() -> None: ... +def OpenIMsgOnIStg( + session, + support, + storage: _win32typing.PyIStorage, + callback: Incomplete | None = ..., + callbackData: int = ..., + flags: int = ..., +) -> _win32typing.PyIMessage: ... +def RTFStreamToHTML(The_stream_to_read_the_uncompressed_RTF_from: _win32typing.PyIStream) -> None: ... +def OpenStreamOnFile(filename: str, flags: int = ..., prefix: str | None = ...) -> _win32typing.PyIStream: ... +def OpenStreamOnFileW(filename, flags: int = ..., prefix: Incomplete | None = ...) -> _win32typing.PyIStream: ... +def HrGetOneProp(prop: _win32typing.PyIMAPIProp, propTag): ... +def HrSetOneProp(prop: _win32typing.PyIMAPIProp, propValue: _win32typing.PySPropValue): ... +def HrAllocAdviseSink(callback, context): ... +def HrThisThreadAdviseSink(_object): ... +def HrDispatchNotifications(*args, **kwargs): ... # incomplete +def MAPIUIDFromBinary(*args, **kwargs): ... # incomplete + +AB_NO_DIALOG: int +ATTACH_BY_REF_ONLY: int +ATTACH_BY_REF_RESOLVE: int +ATTACH_BY_REFERENCE: int +ATTACH_BY_VALUE: int +ATTACH_EMBEDDED_MSG: int +ATTACH_OLE: int +BMR_EQZ: int +BMR_NEZ: int +BOOKMARK_BEGINNING: int +BOOKMARK_CURRENT: int +BOOKMARK_END: int +CCSF_8BITHEADERS: int +CCSF_EMBEDDED_MESSAGE: int +CCSF_INCLUDE_BCC: int +CCSF_NO_MSGID: int +CCSF_NOHEADERS: int +CCSF_PLAIN_TEXT_ONLY: int +CCSF_PRESERVE_SOURCE: int +CCSF_SMTP: int +CCSF_USE_RTF: int +CCSF_USE_TNEF: int +CLEAR_NRN_PENDING: int +CLEAR_READ_FLAG: int +CLEAR_RN_PENDING: int +CONVENIENT_DEPTH: int +DEL_FOLDERS: int +DEL_MESSAGES: int +DELETE_HARD_DELETE: int +DIR_BACKWARD: int +FL_FULLSTRING: int +FL_IGNORECASE: int +FL_IGNORENONSPACE: int +FL_LOOSE: int +FL_PREFIX: int +FL_SUBSTRING: int +FLUSH_ASYNC_OK: int +FLUSH_DOWNLOAD: int +FLUSH_FORCE: int +FLUSH_NO_UI: int +FLUSH_UPLOAD: int +fnevCriticalError: int +fnevExtended: int +fnevNewMail: int +fnevObjectCopied: int +fnevObjectCreated: int +fnevObjectDeleted: int +fnevObjectModified: int +fnevObjectMoved: int +fnevReservedForMapi: int +fnevSearchComplete: int +fnevStatusObjectModified: int +fnevTableModified: int +FOLDER_DIALOG: int +FOLDER_GENERIC: int +FOLDER_SEARCH: int +FORCE_SAVE: int +GENERATE_RECEIPT_ONLY: int +KEEP_OPEN_READONLY: int +KEEP_OPEN_READWRITE: int +MAIL_E_NAMENOTFOUND: int +MAPI_ABCONT: int +MAPI_ADDRBOOK: int +MAPI_ALLOW_OTHERS: int +MAPI_ASSOCIATED: int +MAPI_ATTACH: int +MAPI_BCC: int +MAPI_BEST_ACCESS: int +MAPI_CC: int +MAPI_CREATE: int +MAPI_DEFAULT_SERVICES: int +MAPI_DEFERRED_ERRORS: int +MAPI_DIALOG: int +MAPI_E_ACCOUNT_DISABLED: int +MAPI_E_AMBIGUOUS_RECIP: int +MAPI_E_BAD_CHARWIDTH: int +MAPI_E_BAD_COLUMN: int +MAPI_E_BAD_VALUE: int +MAPI_E_BUSY: int +MAPI_E_CALL_FAILED: int +MAPI_E_CANCEL: int +MAPI_E_COLLISION: int +MAPI_E_COMPUTED: int +MAPI_E_CORRUPT_DATA: int +MAPI_E_CORRUPT_STORE: int +MAPI_E_DECLINE_COPY: int +MAPI_E_DISK_ERROR: int +MAPI_E_END_OF_SESSION: int +MAPI_E_EXTENDED_ERROR: int +MAPI_E_FAILONEPROVIDER: int +MAPI_E_FOLDER_CYCLE: int +MAPI_E_HAS_FOLDERS: int +MAPI_E_HAS_MESSAGES: int +MAPI_E_INTERFACE_NOT_SUPPORTED: int +MAPI_E_INVALID_ACCESS_TIME: int +MAPI_E_INVALID_BOOKMARK: int +MAPI_E_INVALID_ENTRYID: int +MAPI_E_INVALID_OBJECT: int +MAPI_E_INVALID_PARAMETER: int +MAPI_E_INVALID_TYPE: int +MAPI_E_INVALID_WORKSTATION_ACCOUNT: int +MAPI_E_LOCKID_LIMIT: int +MAPI_E_LOGON_FAILED: int +MAPI_E_MISSING_REQUIRED_COLUMN: int +MAPI_E_NAMED_PROP_QUOTA_EXCEEDED: int +MAPI_E_NETWORK_ERROR: int +MAPI_E_NO_ACCESS: int +MAPI_E_NO_RECIPIENTS: int +MAPI_E_NO_SUPPORT: int +MAPI_E_NO_SUPPRESS: int +MAPI_E_NON_STANDARD: int +MAPI_E_NOT_ENOUGH_DISK: int +MAPI_E_NOT_ENOUGH_MEMORY: int +MAPI_E_NOT_ENOUGH_RESOURCES: int +MAPI_E_NOT_FOUND: int +MAPI_E_NOT_IN_QUEUE: int +MAPI_E_NOT_INITIALIZED: int +MAPI_E_NOT_ME: int +MAPI_E_OBJECT_CHANGED: int +MAPI_E_OBJECT_DELETED: int +MAPI_E_OFFLINE: int +MAPI_E_PASSWORD_CHANGE_REQUIRED: int +MAPI_E_PASSWORD_EXPIRED: int +MAPI_E_PROFILE_DELETED: int +MAPI_E_RECONNECTED: int +MAPI_E_SESSION_LIMIT: int +MAPI_E_STORE_FULL: int +MAPI_E_STRING_TOO_LONG: int +MAPI_E_SUBMITTED: int +MAPI_E_TABLE_EMPTY: int +MAPI_E_TABLE_TOO_BIG: int +MAPI_E_TIMEOUT: int +MAPI_E_TOO_BIG: int +MAPI_E_TOO_COMPLEX: int +MAPI_E_TYPE_NO_SUPPORT: int +MAPI_E_UNABLE_TO_ABORT: int +MAPI_E_UNABLE_TO_COMPLETE: int +MAPI_E_UNCONFIGURED: int +MAPI_E_UNEXPECTED_ID: int +MAPI_E_UNEXPECTED_TYPE: int +MAPI_E_UNKNOWN_CPID: int +MAPI_E_UNKNOWN_ENTRYID: int +MAPI_E_UNKNOWN_FLAGS: int +MAPI_E_UNKNOWN_LCID: int +MAPI_E_USER_CANCEL: int +MAPI_E_VERSION: int +MAPI_E_WAIT: int +MAPI_EXPLICIT_PROFILE: int +MAPI_EXTENDED: int +MAPI_FOLDER: int +MAPI_FORCE_ACCESS: int +MAPI_FORCE_DOWNLOAD: int +MAPI_FORMINFO: int +MAPI_INIT_VERSION: int +MAPI_LOGON_UI: int +MAPI_MAILUSER: int +MAPI_MESSAGE: int +MAPI_MODIFY: int +MAPI_MOVE: int +MAPI_MULTITHREAD_NOTIFICATIONS: int +MAPI_NATIVE_BODY: int +MAPI_NATIVE_BODY_TYPE_HTML: int +MAPI_NATIVE_BODY_TYPE_PLAINTEXT: int +MAPI_NATIVE_BODY_TYPE_RTF: int +MAPI_NEW_SESSION: int +MAPI_NO_IDS: int +MAPI_NO_MAIL: int +MAPI_NO_STRINGS: int +MAPI_NOREPLACE: int +MAPI_NT_SERVICE: int +MAPI_P1: int +MAPI_PASSWORD_UI: int +MAPI_PROFSECT: int +MAPI_SERVICE_UI_ALWAYS: int +MAPI_SESSION: int +MAPI_STATUS: int +MAPI_STORE: int +MAPI_SUBMITTED: int +MAPI_TIMEOUT_SHORT: int +MAPI_TO: int +MAPI_UNICODE: int +MAPI_USE_DEFAULT: int +MAPI_W_APPROX_COUNT: int +MAPI_W_CANCEL_MESSAGE: int +MAPI_W_ERRORS_RETURNED: int +MAPI_W_NO_SERVICE: int +MAPI_W_PARTIAL_COMPLETION: int +MAPI_W_POSITION_CHANGED: int +MDB_NO_DIALOG: int +MDB_NO_MAIL: int +MDB_TEMPORARY: int +MDB_WRITE: int +MESSAGE_DIALOG: int +MODRECIP_ADD: int +MODRECIP_MODIFY: int +MODRECIP_REMOVE: int +NO_ATTACHMENT: int +OPEN_IF_EXISTS: int +PSTF_BEST_ENCRYPTION: int +PSTF_COMPRESSABLE_ENCRYPTION: int +PSTF_NO_ENCRYPTION: int +RELOP_EQ: int +RELOP_GE: int +RELOP_GT: int +RELOP_LE: int +RELOP_LT: int +RELOP_NE: int +RELOP_RE: int +RES_AND: int +RES_BITMASK: int +RES_COMMENT: int +RES_COMPAREPROPS: int +RES_CONTENT: int +RES_EXIST: int +RES_NOT: int +RES_OR: int +RES_PROPERTY: int +RES_SIZE: int +RES_SUBRESTRICTION: int +RTF_SYNC_BODY_CHANGED: int +RTF_SYNC_RTF_CHANGED: int +SERVICE_UI_ALLOWED: int +SERVICE_UI_ALWAYS: int +SHOW_SOFT_DELETES: int +SOF_UNIQUEFILENAME: int +STATUS_DEFAULT_STORE: int +STATUS_FLUSH_QUEUES: int +STATUS_INBOUND_FLUSH: int +STATUS_OUTBOUND_FLUSH: int +SUPPRESS_RECEIPT: int +TABLE_CHANGED: int +TABLE_ERROR: int +TABLE_RELOAD: int +TABLE_RESTRICT_DONE: int +TABLE_ROW_ADDED: int +TABLE_ROW_DELETED: int +TABLE_ROW_MODIFIED: int +TABLE_SETCOL_DONE: int +TABLE_SORT_ASCEND: int +TABLE_SORT_COMBINE: int +TABLE_SORT_DESCEND: int +TABLE_SORT_DONE: int +TBL_ALL_COLUMNS: int +TBL_ASYNC: int +TBL_BATCH: int +CLSID_IConverterSession: _win32typing.PyIID +CLSID_MailMessage: _win32typing.PyIID +IID_IABContainer: _win32typing.PyIID +IID_IAddrBook: _win32typing.PyIID +IID_IAttachment: _win32typing.PyIID +IID_IConverterSession: _win32typing.PyIID +IID_IDistList: _win32typing.PyIID +IID_IMAPIAdviseSink: _win32typing.PyIID +IID_IMAPIContainer: _win32typing.PyIID +IID_IMAPIFolder: _win32typing.PyIID +IID_IMAPIProp: _win32typing.PyIID +IID_IMAPISession: _win32typing.PyIID +IID_IMAPIStatus: _win32typing.PyIID +IID_IMAPITable: _win32typing.PyIID +IID_IMailUser: _win32typing.PyIID +IID_IMessage: _win32typing.PyIID +IID_IMsgServiceAdmin: _win32typing.PyIID +IID_IMsgServiceAdmin2: _win32typing.PyIID +IID_IMsgStore: _win32typing.PyIID +IID_IProfAdmin: _win32typing.PyIID +IID_IProfSect: _win32typing.PyIID +IID_IProviderAdmin: _win32typing.PyIID +MAPI_DISTLIST: int +MSPST_UID_PROVIDER: _win32typing.PyIID +PSETID_Address: _win32typing.PyIID +PSETID_AirSync: _win32typing.PyIID +PSETID_Appointment: _win32typing.PyIID +PSETID_Common: _win32typing.PyIID +PSETID_Log: _win32typing.PyIID +PSETID_Meeting: _win32typing.PyIID +PSETID_Messaging: _win32typing.PyIID +PSETID_Note: _win32typing.PyIID +PSETID_PostRss: _win32typing.PyIID +PSETID_Remote: _win32typing.PyIID +PSETID_Report: _win32typing.PyIID +PSETID_Sharing: _win32typing.PyIID +PSETID_Task: _win32typing.PyIID +PSETID_UnifiedMessaging: _win32typing.PyIID +PS_INTERNET_HEADERS: _win32typing.PyIID +PS_MAPI: _win32typing.PyIID +PS_PUBLIC_STRINGS: _win32typing.PyIID +PS_ROUTING_ADDRTYPE: _win32typing.PyIID +PS_ROUTING_DISPLAY_NAME: _win32typing.PyIID +PS_ROUTING_EMAIL_ADDRESSES: _win32typing.PyIID +PS_ROUTING_ENTRYID: _win32typing.PyIID +PS_ROUTING_SEARCH_KEY: _win32typing.PyIID diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/mapi/mapitags.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/mapi/mapitags.pyi new file mode 100644 index 00000000..660f492e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/mapi/mapitags.pyi @@ -0,0 +1,991 @@ +MV_FLAG: int +PT_UNSPECIFIED: int +PT_NULL: int +PT_I2: int +PT_LONG: int +PT_R4: int +PT_DOUBLE: int +PT_CURRENCY: int +PT_APPTIME: int +PT_ERROR: int +PT_BOOLEAN: int +PT_OBJECT: int +PT_I8: int +PT_STRING8: int +PT_UNICODE: int +PT_SYSTIME: int +PT_CLSID: int +PT_BINARY: int +PT_SHORT: int +PT_I4: int +PT_FLOAT: int +PT_R8: int +PT_LONGLONG: int +PT_MV_I2: int +PT_MV_LONG: int +PT_MV_R4: int +PT_MV_DOUBLE: int +PT_MV_CURRENCY: int +PT_MV_APPTIME: int +PT_MV_SYSTIME: int +PT_MV_STRING8: int +PT_MV_BINARY: int +PT_MV_UNICODE: int +PT_MV_CLSID: int +PT_MV_I8: int +PT_MV_SHORT: int +PT_MV_I4: int +PT_MV_FLOAT: int +PT_MV_R8: int +PT_MV_LONGLONG: int +PT_TSTRING: int +PT_MV_TSTRING: int +PROP_TYPE_MASK: int + +def PROP_TYPE(ulPropTag: int) -> int: ... +def PROP_ID(ulPropTag: int) -> int: ... +def PROP_TAG(ulPropType: int, ulPropID: int) -> int: ... + +PROP_ID_NULL: int +PROP_ID_INVALID: int +PR_NULL: int +PR_ACKNOWLEDGEMENT_MODE: int +PR_ALTERNATE_RECIPIENT_ALLOWED: int +PR_AUTHORIZING_USERS: int +PR_AUTO_FORWARD_COMMENT: int +PR_AUTO_FORWARD_COMMENT_W: int +PR_AUTO_FORWARD_COMMENT_A: int +PR_AUTO_FORWARDED: int +PR_CONTENT_CONFIDENTIALITY_ALGORITHM_ID: int +PR_CONTENT_CORRELATOR: int +PR_CONTENT_IDENTIFIER: int +PR_CONTENT_IDENTIFIER_W: int +PR_CONTENT_IDENTIFIER_A: int +PR_CONTENT_LENGTH: int +PR_CONTENT_RETURN_REQUESTED: int +PR_CONVERSATION_KEY: int +PR_CONVERSION_EITS: int +PR_CONVERSION_WITH_LOSS_PROHIBITED: int +PR_CONVERTED_EITS: int +PR_DEFERRED_DELIVERY_TIME: int +PR_DELIVER_TIME: int +PR_DISCARD_REASON: int +PR_DISCLOSURE_OF_RECIPIENTS: int +PR_DL_EXPANSION_HISTORY: int +PR_DL_EXPANSION_PROHIBITED: int +PR_EXPIRY_TIME: int +PR_IMPLICIT_CONVERSION_PROHIBITED: int +PR_IMPORTANCE: int +PR_IPM_ID: int +PR_LATEST_DELIVERY_TIME: int +PR_MESSAGE_CLASS: int +PR_MESSAGE_CLASS_W: int +PR_MESSAGE_CLASS_A: int +PR_MESSAGE_DELIVERY_ID: int +PR_MESSAGE_SECURITY_LABEL: int +PR_OBSOLETED_IPMS: int +PR_ORIGINALLY_INTENDED_RECIPIENT_NAME: int +PR_ORIGINAL_EITS: int +PR_ORIGINATOR_CERTIFICATE: int +PR_ORIGINATOR_DELIVERY_REPORT_REQUESTED: int +PR_ORIGINATOR_RETURN_ADDRESS: int +PR_PARENT_KEY: int +PR_PRIORITY: int +PR_ORIGIN_CHECK: int +PR_PROOF_OF_SUBMISSION_REQUESTED: int +PR_READ_RECEIPT_REQUESTED: int +PR_RECEIPT_TIME: int +PR_RECIPIENT_REASSIGNMENT_PROHIBITED: int +PR_REDIRECTION_HISTORY: int +PR_RELATED_IPMS: int +PR_ORIGINAL_SENSITIVITY: int +PR_LANGUAGES: int +PR_LANGUAGES_W: int +PR_LANGUAGES_A: int +PR_REPLY_TIME: int +PR_REPORT_TAG: int +PR_REPORT_TIME: int +PR_RETURNED_IPM: int +PR_SECURITY: int +PR_INCOMPLETE_COPY: int +PR_SENSITIVITY: int +PR_SUBJECT: int +PR_SUBJECT_W: int +PR_SUBJECT_A: int +PR_SUBJECT_IPM: int +PR_CLIENT_SUBMIT_TIME: int +PR_REPORT_NAME: int +PR_REPORT_NAME_W: int +PR_REPORT_NAME_A: int +PR_SENT_REPRESENTING_SEARCH_KEY: int +PR_X400_CONTENT_TYPE: int +PR_SUBJECT_PREFIX: int +PR_SUBJECT_PREFIX_W: int +PR_SUBJECT_PREFIX_A: int +PR_NON_RECEIPT_REASON: int +PR_RECEIVED_BY_ENTRYID: int +PR_RECEIVED_BY_NAME: int +PR_RECEIVED_BY_NAME_W: int +PR_RECEIVED_BY_NAME_A: int +PR_SENT_REPRESENTING_ENTRYID: int +PR_SENT_REPRESENTING_NAME: int +PR_SENT_REPRESENTING_NAME_W: int +PR_SENT_REPRESENTING_NAME_A: int +PR_RCVD_REPRESENTING_ENTRYID: int +PR_RCVD_REPRESENTING_NAME: int +PR_RCVD_REPRESENTING_NAME_W: int +PR_RCVD_REPRESENTING_NAME_A: int +PR_REPORT_ENTRYID: int +PR_READ_RECEIPT_ENTRYID: int +PR_MESSAGE_SUBMISSION_ID: int +PR_PROVIDER_SUBMIT_TIME: int +PR_ORIGINAL_SUBJECT: int +PR_ORIGINAL_SUBJECT_W: int +PR_ORIGINAL_SUBJECT_A: int +PR_DISC_VAL: int +PR_ORIG_MESSAGE_CLASS: int +PR_ORIG_MESSAGE_CLASS_W: int +PR_ORIG_MESSAGE_CLASS_A: int +PR_ORIGINAL_AUTHOR_ENTRYID: int +PR_ORIGINAL_AUTHOR_NAME: int +PR_ORIGINAL_AUTHOR_NAME_W: int +PR_ORIGINAL_AUTHOR_NAME_A: int +PR_ORIGINAL_SUBMIT_TIME: int +PR_REPLY_RECIPIENT_ENTRIES: int +PR_REPLY_RECIPIENT_NAMES: int +PR_REPLY_RECIPIENT_NAMES_W: int +PR_REPLY_RECIPIENT_NAMES_A: int +PR_RECEIVED_BY_SEARCH_KEY: int +PR_RCVD_REPRESENTING_SEARCH_KEY: int +PR_READ_RECEIPT_SEARCH_KEY: int +PR_REPORT_SEARCH_KEY: int +PR_ORIGINAL_DELIVERY_TIME: int +PR_ORIGINAL_AUTHOR_SEARCH_KEY: int +PR_MESSAGE_TO_ME: int +PR_MESSAGE_CC_ME: int +PR_MESSAGE_RECIP_ME: int +PR_ORIGINAL_SENDER_NAME: int +PR_ORIGINAL_SENDER_NAME_W: int +PR_ORIGINAL_SENDER_NAME_A: int +PR_ORIGINAL_SENDER_ENTRYID: int +PR_ORIGINAL_SENDER_SEARCH_KEY: int +PR_ORIGINAL_SENT_REPRESENTING_NAME: int +PR_ORIGINAL_SENT_REPRESENTING_NAME_W: int +PR_ORIGINAL_SENT_REPRESENTING_NAME_A: int +PR_ORIGINAL_SENT_REPRESENTING_ENTRYID: int +PR_ORIGINAL_SENT_REPRESENTING_SEARCH_KEY: int +PR_START_DATE: int +PR_END_DATE: int +PR_OWNER_APPT_ID: int +PR_RESPONSE_REQUESTED: int +PR_SENT_REPRESENTING_ADDRTYPE: int +PR_SENT_REPRESENTING_ADDRTYPE_W: int +PR_SENT_REPRESENTING_ADDRTYPE_A: int +PR_SENT_REPRESENTING_EMAIL_ADDRESS: int +PR_SENT_REPRESENTING_EMAIL_ADDRESS_W: int +PR_SENT_REPRESENTING_EMAIL_ADDRESS_A: int +PR_ORIGINAL_SENDER_ADDRTYPE: int +PR_ORIGINAL_SENDER_ADDRTYPE_W: int +PR_ORIGINAL_SENDER_ADDRTYPE_A: int +PR_ORIGINAL_SENDER_EMAIL_ADDRESS: int +PR_ORIGINAL_SENDER_EMAIL_ADDRESS_W: int +PR_ORIGINAL_SENDER_EMAIL_ADDRESS_A: int +PR_ORIGINAL_SENT_REPRESENTING_ADDRTYPE: int +PR_ORIGINAL_SENT_REPRESENTING_ADDRTYPE_W: int +PR_ORIGINAL_SENT_REPRESENTING_ADDRTYPE_A: int +PR_ORIGINAL_SENT_REPRESENTING_EMAIL_ADDRESS: int +PR_ORIGINAL_SENT_REPRESENTING_EMAIL_ADDRESS_W: int +PR_ORIGINAL_SENT_REPRESENTING_EMAIL_ADDRESS_A: int +PR_CONVERSATION_TOPIC: int +PR_CONVERSATION_TOPIC_W: int +PR_CONVERSATION_TOPIC_A: int +PR_CONVERSATION_INDEX: int +PR_ORIGINAL_DISPLAY_BCC: int +PR_ORIGINAL_DISPLAY_BCC_W: int +PR_ORIGINAL_DISPLAY_BCC_A: int +PR_ORIGINAL_DISPLAY_CC: int +PR_ORIGINAL_DISPLAY_CC_W: int +PR_ORIGINAL_DISPLAY_CC_A: int +PR_ORIGINAL_DISPLAY_TO: int +PR_ORIGINAL_DISPLAY_TO_W: int +PR_ORIGINAL_DISPLAY_TO_A: int +PR_RECEIVED_BY_ADDRTYPE: int +PR_RECEIVED_BY_ADDRTYPE_W: int +PR_RECEIVED_BY_ADDRTYPE_A: int +PR_RECEIVED_BY_EMAIL_ADDRESS: int +PR_RECEIVED_BY_EMAIL_ADDRESS_W: int +PR_RECEIVED_BY_EMAIL_ADDRESS_A: int +PR_RCVD_REPRESENTING_ADDRTYPE: int +PR_RCVD_REPRESENTING_ADDRTYPE_W: int +PR_RCVD_REPRESENTING_ADDRTYPE_A: int +PR_RCVD_REPRESENTING_EMAIL_ADDRESS: int +PR_RCVD_REPRESENTING_EMAIL_ADDRESS_W: int +PR_RCVD_REPRESENTING_EMAIL_ADDRESS_A: int +PR_ORIGINAL_AUTHOR_ADDRTYPE: int +PR_ORIGINAL_AUTHOR_ADDRTYPE_W: int +PR_ORIGINAL_AUTHOR_ADDRTYPE_A: int +PR_ORIGINAL_AUTHOR_EMAIL_ADDRESS: int +PR_ORIGINAL_AUTHOR_EMAIL_ADDRESS_W: int +PR_ORIGINAL_AUTHOR_EMAIL_ADDRESS_A: int +PR_ORIGINALLY_INTENDED_RECIP_ADDRTYPE: int +PR_ORIGINALLY_INTENDED_RECIP_ADDRTYPE_W: int +PR_ORIGINALLY_INTENDED_RECIP_ADDRTYPE_A: int +PR_ORIGINALLY_INTENDED_RECIP_EMAIL_ADDRESS: int +PR_ORIGINALLY_INTENDED_RECIP_EMAIL_ADDRESS_W: int +PR_ORIGINALLY_INTENDED_RECIP_EMAIL_ADDRESS_A: int +PR_TRANSPORT_MESSAGE_HEADERS: int +PR_TRANSPORT_MESSAGE_HEADERS_W: int +PR_TRANSPORT_MESSAGE_HEADERS_A: int +PR_DELEGATION: int +PR_TNEF_CORRELATION_KEY: int +PR_BODY: int +PR_BODY_W: int +PR_BODY_A: int +PR_BODY_HTML: int +PR_BODY_HTML_W: int +PR_BODY_HTML_A: int +PR_REPORT_TEXT: int +PR_REPORT_TEXT_W: int +PR_REPORT_TEXT_A: int +PR_ORIGINATOR_AND_DL_EXPANSION_HISTORY: int +PR_REPORTING_DL_NAME: int +PR_REPORTING_MTA_CERTIFICATE: int +PR_RTF_SYNC_BODY_CRC: int +PR_RTF_SYNC_BODY_COUNT: int +PR_RTF_SYNC_BODY_TAG: int +PR_RTF_SYNC_BODY_TAG_W: int +PR_RTF_SYNC_BODY_TAG_A: int +PR_RTF_COMPRESSED: int +PR_RTF_SYNC_PREFIX_COUNT: int +PR_RTF_SYNC_TRAILING_COUNT: int +PR_ORIGINALLY_INTENDED_RECIP_ENTRYID: int +PR_CONTENT_INTEGRITY_CHECK: int +PR_EXPLICIT_CONVERSION: int +PR_IPM_RETURN_REQUESTED: int +PR_MESSAGE_TOKEN: int +PR_NDR_REASON_CODE: int +PR_NDR_DIAG_CODE: int +PR_NON_RECEIPT_NOTIFICATION_REQUESTED: int +PR_DELIVERY_POINT: int +PR_ORIGINATOR_NON_DELIVERY_REPORT_REQUESTED: int +PR_ORIGINATOR_REQUESTED_ALTERNATE_RECIPIENT: int +PR_PHYSICAL_DELIVERY_BUREAU_FAX_DELIVERY: int +PR_PHYSICAL_DELIVERY_MODE: int +PR_PHYSICAL_DELIVERY_REPORT_REQUEST: int +PR_PHYSICAL_FORWARDING_ADDRESS: int +PR_PHYSICAL_FORWARDING_ADDRESS_REQUESTED: int +PR_PHYSICAL_FORWARDING_PROHIBITED: int +PR_PHYSICAL_RENDITION_ATTRIBUTES: int +PR_PROOF_OF_DELIVERY: int +PR_PROOF_OF_DELIVERY_REQUESTED: int +PR_RECIPIENT_CERTIFICATE: int +PR_RECIPIENT_NUMBER_FOR_ADVICE: int +PR_RECIPIENT_NUMBER_FOR_ADVICE_W: int +PR_RECIPIENT_NUMBER_FOR_ADVICE_A: int +PR_RECIPIENT_TYPE: int +PR_REGISTERED_MAIL_TYPE: int +PR_REPLY_REQUESTED: int +PR_REQUESTED_DELIVERY_METHOD: int +PR_SENDER_ENTRYID: int +PR_SENDER_NAME: int +PR_SENDER_NAME_W: int +PR_SENDER_NAME_A: int +PR_SUPPLEMENTARY_INFO: int +PR_SUPPLEMENTARY_INFO_W: int +PR_SUPPLEMENTARY_INFO_A: int +PR_TYPE_OF_MTS_USER: int +PR_SENDER_SEARCH_KEY: int +PR_SENDER_ADDRTYPE: int +PR_SENDER_ADDRTYPE_W: int +PR_SENDER_ADDRTYPE_A: int +PR_SENDER_EMAIL_ADDRESS: int +PR_SENDER_EMAIL_ADDRESS_W: int +PR_SENDER_EMAIL_ADDRESS_A: int +PR_CURRENT_VERSION: int +PR_DELETE_AFTER_SUBMIT: int +PR_DISPLAY_BCC: int +PR_DISPLAY_BCC_W: int +PR_DISPLAY_BCC_A: int +PR_DISPLAY_CC: int +PR_DISPLAY_CC_W: int +PR_DISPLAY_CC_A: int +PR_DISPLAY_TO: int +PR_DISPLAY_TO_W: int +PR_DISPLAY_TO_A: int +PR_PARENT_DISPLAY: int +PR_PARENT_DISPLAY_W: int +PR_PARENT_DISPLAY_A: int +PR_MESSAGE_DELIVERY_TIME: int +PR_MESSAGE_FLAGS: int +PR_MESSAGE_SIZE: int +PR_PARENT_ENTRYID: int +PR_SENTMAIL_ENTRYID: int +PR_CORRELATE: int +PR_CORRELATE_MTSID: int +PR_DISCRETE_VALUES: int +PR_RESPONSIBILITY: int +PR_SPOOLER_STATUS: int +PR_TRANSPORT_STATUS: int +PR_MESSAGE_RECIPIENTS: int +PR_MESSAGE_ATTACHMENTS: int +PR_SUBMIT_FLAGS: int +PR_RECIPIENT_STATUS: int +PR_TRANSPORT_KEY: int +PR_MSG_STATUS: int +PR_MESSAGE_DOWNLOAD_TIME: int +PR_CREATION_VERSION: int +PR_MODIFY_VERSION: int +PR_HASATTACH: int +PR_BODY_CRC: int +PR_NORMALIZED_SUBJECT: int +PR_NORMALIZED_SUBJECT_W: int +PR_NORMALIZED_SUBJECT_A: int +PR_RTF_IN_SYNC: int +PR_ATTACH_SIZE: int +PR_ATTACH_NUM: int +PR_PREPROCESS: int +PR_ORIGINATING_MTA_CERTIFICATE: int +PR_PROOF_OF_SUBMISSION: int +PR_ENTRYID: int +PR_OBJECT_TYPE: int +PR_ICON: int +PR_MINI_ICON: int +PR_STORE_ENTRYID: int +PR_STORE_RECORD_KEY: int +PR_RECORD_KEY: int +PR_MAPPING_SIGNATURE: int +PR_ACCESS_LEVEL: int +PR_INSTANCE_KEY: int +PR_ROW_TYPE: int +PR_ACCESS: int +PR_ROWID: int +PR_DISPLAY_NAME: int +PR_DISPLAY_NAME_W: int +PR_DISPLAY_NAME_A: int +PR_ADDRTYPE: int +PR_ADDRTYPE_W: int +PR_ADDRTYPE_A: int +PR_EMAIL_ADDRESS: int +PR_EMAIL_ADDRESS_W: int +PR_EMAIL_ADDRESS_A: int +PR_COMMENT: int +PR_COMMENT_W: int +PR_COMMENT_A: int +PR_DEPTH: int +PR_PROVIDER_DISPLAY: int +PR_PROVIDER_DISPLAY_W: int +PR_PROVIDER_DISPLAY_A: int +PR_CREATION_TIME: int +PR_LAST_MODIFICATION_TIME: int +PR_RESOURCE_FLAGS: int +PR_PROVIDER_DLL_NAME: int +PR_PROVIDER_DLL_NAME_W: int +PR_PROVIDER_DLL_NAME_A: int +PR_SEARCH_KEY: int +PR_PROVIDER_UID: int +PR_PROVIDER_ORDINAL: int +PR_FORM_VERSION: int +PR_FORM_VERSION_W: int +PR_FORM_VERSION_A: int +PR_FORM_CLSID: int +PR_FORM_CONTACT_NAME: int +PR_FORM_CONTACT_NAME_W: int +PR_FORM_CONTACT_NAME_A: int +PR_FORM_CATEGORY: int +PR_FORM_CATEGORY_W: int +PR_FORM_CATEGORY_A: int +PR_FORM_CATEGORY_SUB: int +PR_FORM_CATEGORY_SUB_W: int +PR_FORM_CATEGORY_SUB_A: int +PR_FORM_HOST_MAP: int +PR_FORM_HIDDEN: int +PR_FORM_DESIGNER_NAME: int +PR_FORM_DESIGNER_NAME_W: int +PR_FORM_DESIGNER_NAME_A: int +PR_FORM_DESIGNER_GUID: int +PR_FORM_MESSAGE_BEHAVIOR: int +PR_DEFAULT_STORE: int +PR_STORE_SUPPORT_MASK: int +PR_STORE_STATE: int +PR_IPM_SUBTREE_SEARCH_KEY: int +PR_IPM_OUTBOX_SEARCH_KEY: int +PR_IPM_WASTEBASKET_SEARCH_KEY: int +PR_IPM_SENTMAIL_SEARCH_KEY: int +PR_MDB_PROVIDER: int +PR_RECEIVE_FOLDER_SETTINGS: int +PR_VALID_FOLDER_MASK: int +PR_IPM_SUBTREE_ENTRYID: int +PR_IPM_OUTBOX_ENTRYID: int +PR_IPM_WASTEBASKET_ENTRYID: int +PR_IPM_SENTMAIL_ENTRYID: int +PR_VIEWS_ENTRYID: int +PR_COMMON_VIEWS_ENTRYID: int +PR_FINDER_ENTRYID: int +PR_CONTAINER_FLAGS: int +PR_FOLDER_TYPE: int +PR_CONTENT_COUNT: int +PR_CONTENT_UNREAD: int +PR_CREATE_TEMPLATES: int +PR_DETAILS_TABLE: int +PR_SEARCH: int +PR_SELECTABLE: int +PR_SUBFOLDERS: int +PR_STATUS: int +PR_ANR: int +PR_ANR_W: int +PR_ANR_A: int +PR_CONTENTS_SORT_ORDER: int +PR_CONTAINER_HIERARCHY: int +PR_CONTAINER_CONTENTS: int +PR_FOLDER_ASSOCIATED_CONTENTS: int +PR_DEF_CREATE_DL: int +PR_DEF_CREATE_MAILUSER: int +PR_CONTAINER_CLASS: int +PR_CONTAINER_CLASS_W: int +PR_CONTAINER_CLASS_A: int +PR_CONTAINER_MODIFY_VERSION: int +PR_AB_PROVIDER_ID: int +PR_DEFAULT_VIEW_ENTRYID: int +PR_ASSOC_CONTENT_COUNT: int +PR_ATTACHMENT_X400_PARAMETERS: int +PR_ATTACH_DATA_OBJ: int +PR_ATTACH_DATA_BIN: int +PR_ATTACH_ENCODING: int +PR_ATTACH_EXTENSION: int +PR_ATTACH_EXTENSION_W: int +PR_ATTACH_EXTENSION_A: int +PR_ATTACH_FILENAME: int +PR_ATTACH_FILENAME_W: int +PR_ATTACH_FILENAME_A: int +PR_ATTACH_METHOD: int +PR_ATTACH_LONG_FILENAME: int +PR_ATTACH_LONG_FILENAME_W: int +PR_ATTACH_LONG_FILENAME_A: int +PR_ATTACH_PATHNAME: int +PR_ATTACH_PATHNAME_W: int +PR_ATTACH_PATHNAME_A: int +PR_ATTACH_RENDERING: int +PR_ATTACH_TAG: int +PR_RENDERING_POSITION: int +PR_ATTACH_TRANSPORT_NAME: int +PR_ATTACH_TRANSPORT_NAME_W: int +PR_ATTACH_TRANSPORT_NAME_A: int +PR_ATTACH_LONG_PATHNAME: int +PR_ATTACH_LONG_PATHNAME_W: int +PR_ATTACH_LONG_PATHNAME_A: int +PR_ATTACH_MIME_TAG: int +PR_ATTACH_MIME_TAG_W: int +PR_ATTACH_MIME_TAG_A: int +PR_ATTACH_ADDITIONAL_INFO: int +PR_DISPLAY_TYPE: int +PR_TEMPLATEID: int +PR_PRIMARY_CAPABILITY: int +PR_7BIT_DISPLAY_NAME: int +PR_ACCOUNT: int +PR_ACCOUNT_W: int +PR_ACCOUNT_A: int +PR_ALTERNATE_RECIPIENT: int +PR_CALLBACK_TELEPHONE_NUMBER: int +PR_CALLBACK_TELEPHONE_NUMBER_W: int +PR_CALLBACK_TELEPHONE_NUMBER_A: int +PR_CONVERSION_PROHIBITED: int +PR_DISCLOSE_RECIPIENTS: int +PR_GENERATION: int +PR_GENERATION_W: int +PR_GENERATION_A: int +PR_GIVEN_NAME: int +PR_GIVEN_NAME_W: int +PR_GIVEN_NAME_A: int +PR_GOVERNMENT_ID_NUMBER: int +PR_GOVERNMENT_ID_NUMBER_W: int +PR_GOVERNMENT_ID_NUMBER_A: int +PR_BUSINESS_TELEPHONE_NUMBER: int +PR_BUSINESS_TELEPHONE_NUMBER_W: int +PR_BUSINESS_TELEPHONE_NUMBER_A: int +PR_OFFICE_TELEPHONE_NUMBER: int +PR_OFFICE_TELEPHONE_NUMBER_W: int +PR_OFFICE_TELEPHONE_NUMBER_A: int +PR_HOME_TELEPHONE_NUMBER: int +PR_HOME_TELEPHONE_NUMBER_W: int +PR_HOME_TELEPHONE_NUMBER_A: int +PR_INITIALS: int +PR_INITIALS_W: int +PR_INITIALS_A: int +PR_KEYWORD: int +PR_KEYWORD_W: int +PR_KEYWORD_A: int +PR_LANGUAGE: int +PR_LANGUAGE_W: int +PR_LANGUAGE_A: int +PR_LOCATION: int +PR_LOCATION_W: int +PR_LOCATION_A: int +PR_MAIL_PERMISSION: int +PR_MHS_COMMON_NAME: int +PR_MHS_COMMON_NAME_W: int +PR_MHS_COMMON_NAME_A: int +PR_ORGANIZATIONAL_ID_NUMBER: int +PR_ORGANIZATIONAL_ID_NUMBER_W: int +PR_ORGANIZATIONAL_ID_NUMBER_A: int +PR_SURNAME: int +PR_SURNAME_W: int +PR_SURNAME_A: int +PR_ORIGINAL_ENTRYID: int +PR_ORIGINAL_DISPLAY_NAME: int +PR_ORIGINAL_DISPLAY_NAME_W: int +PR_ORIGINAL_DISPLAY_NAME_A: int +PR_ORIGINAL_SEARCH_KEY: int +PR_POSTAL_ADDRESS: int +PR_POSTAL_ADDRESS_W: int +PR_POSTAL_ADDRESS_A: int +PR_COMPANY_NAME: int +PR_COMPANY_NAME_W: int +PR_COMPANY_NAME_A: int +PR_TITLE: int +PR_TITLE_W: int +PR_TITLE_A: int +PR_DEPARTMENT_NAME: int +PR_DEPARTMENT_NAME_W: int +PR_DEPARTMENT_NAME_A: int +PR_OFFICE_LOCATION: int +PR_OFFICE_LOCATION_W: int +PR_OFFICE_LOCATION_A: int +PR_PRIMARY_TELEPHONE_NUMBER: int +PR_PRIMARY_TELEPHONE_NUMBER_W: int +PR_PRIMARY_TELEPHONE_NUMBER_A: int +PR_BUSINESS2_TELEPHONE_NUMBER: int +PR_BUSINESS2_TELEPHONE_NUMBER_W: int +PR_BUSINESS2_TELEPHONE_NUMBER_A: int +PR_OFFICE2_TELEPHONE_NUMBER: int +PR_OFFICE2_TELEPHONE_NUMBER_W: int +PR_OFFICE2_TELEPHONE_NUMBER_A: int +PR_MOBILE_TELEPHONE_NUMBER: int +PR_MOBILE_TELEPHONE_NUMBER_W: int +PR_MOBILE_TELEPHONE_NUMBER_A: int +PR_CELLULAR_TELEPHONE_NUMBER: int +PR_CELLULAR_TELEPHONE_NUMBER_W: int +PR_CELLULAR_TELEPHONE_NUMBER_A: int +PR_RADIO_TELEPHONE_NUMBER: int +PR_RADIO_TELEPHONE_NUMBER_W: int +PR_RADIO_TELEPHONE_NUMBER_A: int +PR_CAR_TELEPHONE_NUMBER: int +PR_CAR_TELEPHONE_NUMBER_W: int +PR_CAR_TELEPHONE_NUMBER_A: int +PR_OTHER_TELEPHONE_NUMBER: int +PR_OTHER_TELEPHONE_NUMBER_W: int +PR_OTHER_TELEPHONE_NUMBER_A: int +PR_TRANSMITABLE_DISPLAY_NAME: int +PR_TRANSMITABLE_DISPLAY_NAME_W: int +PR_TRANSMITABLE_DISPLAY_NAME_A: int +PR_PAGER_TELEPHONE_NUMBER: int +PR_PAGER_TELEPHONE_NUMBER_W: int +PR_PAGER_TELEPHONE_NUMBER_A: int +PR_BEEPER_TELEPHONE_NUMBER: int +PR_BEEPER_TELEPHONE_NUMBER_W: int +PR_BEEPER_TELEPHONE_NUMBER_A: int +PR_USER_CERTIFICATE: int +PR_PRIMARY_FAX_NUMBER: int +PR_PRIMARY_FAX_NUMBER_W: int +PR_PRIMARY_FAX_NUMBER_A: int +PR_BUSINESS_FAX_NUMBER: int +PR_BUSINESS_FAX_NUMBER_W: int +PR_BUSINESS_FAX_NUMBER_A: int +PR_HOME_FAX_NUMBER: int +PR_HOME_FAX_NUMBER_W: int +PR_HOME_FAX_NUMBER_A: int +PR_COUNTRY: int +PR_COUNTRY_W: int +PR_COUNTRY_A: int +PR_BUSINESS_ADDRESS_COUNTRY: int +PR_BUSINESS_ADDRESS_COUNTRY_W: int +PR_BUSINESS_ADDRESS_COUNTRY_A: int +PR_LOCALITY: int +PR_LOCALITY_W: int +PR_LOCALITY_A: int +PR_BUSINESS_ADDRESS_CITY: int +PR_BUSINESS_ADDRESS_CITY_W: int +PR_BUSINESS_ADDRESS_CITY_A: int +PR_STATE_OR_PROVINCE: int +PR_STATE_OR_PROVINCE_W: int +PR_STATE_OR_PROVINCE_A: int +PR_BUSINESS_ADDRESS_STATE_OR_PROVINCE: int +PR_BUSINESS_ADDRESS_STATE_OR_PROVINCE_W: int +PR_BUSINESS_ADDRESS_STATE_OR_PROVINCE_A: int +PR_STREET_ADDRESS: int +PR_STREET_ADDRESS_W: int +PR_STREET_ADDRESS_A: int +PR_BUSINESS_ADDRESS_STREET: int +PR_BUSINESS_ADDRESS_STREET_W: int +PR_BUSINESS_ADDRESS_STREET_A: int +PR_POSTAL_CODE: int +PR_POSTAL_CODE_W: int +PR_POSTAL_CODE_A: int +PR_BUSINESS_ADDRESS_POSTAL_CODE: int +PR_BUSINESS_ADDRESS_POSTAL_CODE_W: int +PR_BUSINESS_ADDRESS_POSTAL_CODE_A: int +PR_POST_OFFICE_BOX: int +PR_POST_OFFICE_BOX_W: int +PR_POST_OFFICE_BOX_A: int +PR_BUSINESS_ADDRESS_POST_OFFICE_BOX: int +PR_BUSINESS_ADDRESS_POST_OFFICE_BOX_W: int +PR_BUSINESS_ADDRESS_POST_OFFICE_BOX_A: int +PR_TELEX_NUMBER: int +PR_TELEX_NUMBER_W: int +PR_TELEX_NUMBER_A: int +PR_ISDN_NUMBER: int +PR_ISDN_NUMBER_W: int +PR_ISDN_NUMBER_A: int +PR_ASSISTANT_TELEPHONE_NUMBER: int +PR_ASSISTANT_TELEPHONE_NUMBER_W: int +PR_ASSISTANT_TELEPHONE_NUMBER_A: int +PR_HOME2_TELEPHONE_NUMBER: int +PR_HOME2_TELEPHONE_NUMBER_W: int +PR_HOME2_TELEPHONE_NUMBER_A: int +PR_ASSISTANT: int +PR_ASSISTANT_W: int +PR_ASSISTANT_A: int +PR_SEND_RICH_INFO: int +PR_WEDDING_ANNIVERSARY: int +PR_BIRTHDAY: int +PR_HOBBIES: int +PR_HOBBIES_W: int +PR_HOBBIES_A: int +PR_MIDDLE_NAME: int +PR_MIDDLE_NAME_W: int +PR_MIDDLE_NAME_A: int +PR_DISPLAY_NAME_PREFIX: int +PR_DISPLAY_NAME_PREFIX_W: int +PR_DISPLAY_NAME_PREFIX_A: int +PR_PROFESSION: int +PR_PROFESSION_W: int +PR_PROFESSION_A: int +PR_PREFERRED_BY_NAME: int +PR_PREFERRED_BY_NAME_W: int +PR_PREFERRED_BY_NAME_A: int +PR_SPOUSE_NAME: int +PR_SPOUSE_NAME_W: int +PR_SPOUSE_NAME_A: int +PR_COMPUTER_NETWORK_NAME: int +PR_COMPUTER_NETWORK_NAME_W: int +PR_COMPUTER_NETWORK_NAME_A: int +PR_CUSTOMER_ID: int +PR_CUSTOMER_ID_W: int +PR_CUSTOMER_ID_A: int +PR_TTYTDD_PHONE_NUMBER: int +PR_TTYTDD_PHONE_NUMBER_W: int +PR_TTYTDD_PHONE_NUMBER_A: int +PR_FTP_SITE: int +PR_FTP_SITE_W: int +PR_FTP_SITE_A: int +PR_GENDER: int +PR_MANAGER_NAME: int +PR_MANAGER_NAME_W: int +PR_MANAGER_NAME_A: int +PR_NICKNAME: int +PR_NICKNAME_W: int +PR_NICKNAME_A: int +PR_PERSONAL_HOME_PAGE: int +PR_PERSONAL_HOME_PAGE_W: int +PR_PERSONAL_HOME_PAGE_A: int +PR_BUSINESS_HOME_PAGE: int +PR_BUSINESS_HOME_PAGE_W: int +PR_BUSINESS_HOME_PAGE_A: int +PR_CONTACT_VERSION: int +PR_CONTACT_ENTRYIDS: int +PR_CONTACT_ADDRTYPES: int +PR_CONTACT_ADDRTYPES_W: int +PR_CONTACT_ADDRTYPES_A: int +PR_CONTACT_DEFAULT_ADDRESS_INDEX: int +PR_CONTACT_EMAIL_ADDRESSES: int +PR_CONTACT_EMAIL_ADDRESSES_W: int +PR_CONTACT_EMAIL_ADDRESSES_A: int +PR_COMPANY_MAIN_PHONE_NUMBER: int +PR_COMPANY_MAIN_PHONE_NUMBER_W: int +PR_COMPANY_MAIN_PHONE_NUMBER_A: int +PR_CHILDRENS_NAMES: int +PR_CHILDRENS_NAMES_W: int +PR_CHILDRENS_NAMES_A: int +PR_HOME_ADDRESS_CITY: int +PR_HOME_ADDRESS_CITY_W: int +PR_HOME_ADDRESS_CITY_A: int +PR_HOME_ADDRESS_COUNTRY: int +PR_HOME_ADDRESS_COUNTRY_W: int +PR_HOME_ADDRESS_COUNTRY_A: int +PR_HOME_ADDRESS_POSTAL_CODE: int +PR_HOME_ADDRESS_POSTAL_CODE_W: int +PR_HOME_ADDRESS_POSTAL_CODE_A: int +PR_HOME_ADDRESS_STATE_OR_PROVINCE: int +PR_HOME_ADDRESS_STATE_OR_PROVINCE_W: int +PR_HOME_ADDRESS_STATE_OR_PROVINCE_A: int +PR_HOME_ADDRESS_STREET: int +PR_HOME_ADDRESS_STREET_W: int +PR_HOME_ADDRESS_STREET_A: int +PR_HOME_ADDRESS_POST_OFFICE_BOX: int +PR_HOME_ADDRESS_POST_OFFICE_BOX_W: int +PR_HOME_ADDRESS_POST_OFFICE_BOX_A: int +PR_OTHER_ADDRESS_CITY: int +PR_OTHER_ADDRESS_CITY_W: int +PR_OTHER_ADDRESS_CITY_A: int +PR_OTHER_ADDRESS_COUNTRY: int +PR_OTHER_ADDRESS_COUNTRY_W: int +PR_OTHER_ADDRESS_COUNTRY_A: int +PR_OTHER_ADDRESS_POSTAL_CODE: int +PR_OTHER_ADDRESS_POSTAL_CODE_W: int +PR_OTHER_ADDRESS_POSTAL_CODE_A: int +PR_OTHER_ADDRESS_STATE_OR_PROVINCE: int +PR_OTHER_ADDRESS_STATE_OR_PROVINCE_W: int +PR_OTHER_ADDRESS_STATE_OR_PROVINCE_A: int +PR_OTHER_ADDRESS_STREET: int +PR_OTHER_ADDRESS_STREET_W: int +PR_OTHER_ADDRESS_STREET_A: int +PR_OTHER_ADDRESS_POST_OFFICE_BOX: int +PR_OTHER_ADDRESS_POST_OFFICE_BOX_W: int +PR_OTHER_ADDRESS_POST_OFFICE_BOX_A: int +PR_STORE_PROVIDERS: int +PR_AB_PROVIDERS: int +PR_TRANSPORT_PROVIDERS: int +PR_DEFAULT_PROFILE: int +PR_AB_SEARCH_PATH: int +PR_AB_DEFAULT_DIR: int +PR_AB_DEFAULT_PAB: int +PR_FILTERING_HOOKS: int +PR_SERVICE_NAME: int +PR_SERVICE_NAME_W: int +PR_SERVICE_NAME_A: int +PR_SERVICE_DLL_NAME: int +PR_SERVICE_DLL_NAME_W: int +PR_SERVICE_DLL_NAME_A: int +PR_SERVICE_ENTRY_NAME: int +PR_SERVICE_UID: int +PR_SERVICE_EXTRA_UIDS: int +PR_SERVICES: int +PR_SERVICE_SUPPORT_FILES: int +PR_SERVICE_SUPPORT_FILES_W: int +PR_SERVICE_SUPPORT_FILES_A: int +PR_SERVICE_DELETE_FILES: int +PR_SERVICE_DELETE_FILES_W: int +PR_SERVICE_DELETE_FILES_A: int +PR_AB_SEARCH_PATH_UPDATE: int +PR_PROFILE_NAME: int +PR_PROFILE_NAME_A: int +PR_PROFILE_NAME_W: int +PR_IDENTITY_DISPLAY: int +PR_IDENTITY_DISPLAY_W: int +PR_IDENTITY_DISPLAY_A: int +PR_IDENTITY_ENTRYID: int +PR_RESOURCE_METHODS: int +PR_RESOURCE_TYPE: int +PR_STATUS_CODE: int +PR_IDENTITY_SEARCH_KEY: int +PR_OWN_STORE_ENTRYID: int +PR_RESOURCE_PATH: int +PR_RESOURCE_PATH_W: int +PR_RESOURCE_PATH_A: int +PR_STATUS_STRING: int +PR_STATUS_STRING_W: int +PR_STATUS_STRING_A: int +PR_X400_DEFERRED_DELIVERY_CANCEL: int +PR_HEADER_FOLDER_ENTRYID: int +PR_REMOTE_PROGRESS: int +PR_REMOTE_PROGRESS_TEXT: int +PR_REMOTE_PROGRESS_TEXT_W: int +PR_REMOTE_PROGRESS_TEXT_A: int +PR_REMOTE_VALIDATE_OK: int +PR_CONTROL_FLAGS: int +PR_CONTROL_STRUCTURE: int +PR_CONTROL_TYPE: int +PR_DELTAX: int +PR_DELTAY: int +PR_XPOS: int +PR_YPOS: int +PR_CONTROL_ID: int +PR_INITIAL_DETAILS_PANE: int +PROP_ID_SECURE_MIN: int +PROP_ID_SECURE_MAX: int +pidExchangeXmitReservedMin: int +pidExchangeNonXmitReservedMin: int +pidProfileMin: int +pidStoreMin: int +pidFolderMin: int +pidMessageReadOnlyMin: int +pidMessageWriteableMin: int +pidAttachReadOnlyMin: int +pidSpecialMin: int +pidAdminMin: int +pidSecureProfileMin: int +PR_PROFILE_VERSION: int +PR_PROFILE_CONFIG_FLAGS: int +PR_PROFILE_HOME_SERVER: int +PR_PROFILE_HOME_SERVER_DN: int +PR_PROFILE_HOME_SERVER_ADDRS: int +PR_PROFILE_USER: int +PR_PROFILE_CONNECT_FLAGS: int +PR_PROFILE_TRANSPORT_FLAGS: int +PR_PROFILE_UI_STATE: int +PR_PROFILE_UNRESOLVED_NAME: int +PR_PROFILE_UNRESOLVED_SERVER: int +PR_PROFILE_BINDING_ORDER: int +PR_PROFILE_MAX_RESTRICT: int +PR_PROFILE_AB_FILES_PATH: int +PR_PROFILE_OFFLINE_STORE_PATH: int +PR_PROFILE_OFFLINE_INFO: int +PR_PROFILE_ADDR_INFO: int +PR_PROFILE_OPTIONS_DATA: int +PR_PROFILE_SECURE_MAILBOX: int +PR_DISABLE_WINSOCK: int +PR_OST_ENCRYPTION: int +PR_PROFILE_OPEN_FLAGS: int +PR_PROFILE_TYPE: int +PR_PROFILE_MAILBOX: int +PR_PROFILE_SERVER: int +PR_PROFILE_SERVER_DN: int +PR_PROFILE_FAVFLD_DISPLAY_NAME: int +PR_PROFILE_FAVFLD_COMMENT: int +PR_PROFILE_ALLPUB_DISPLAY_NAME: int +PR_PROFILE_ALLPUB_COMMENT: int +OSTF_NO_ENCRYPTION: int +OSTF_COMPRESSABLE_ENCRYPTION: int +OSTF_BEST_ENCRYPTION: int +PR_NON_IPM_SUBTREE_ENTRYID: int +PR_EFORMS_REGISTRY_ENTRYID: int +PR_SPLUS_FREE_BUSY_ENTRYID: int +PR_OFFLINE_ADDRBOOK_ENTRYID: int +PR_EFORMS_FOR_LOCALE_ENTRYID: int +PR_FREE_BUSY_FOR_LOCAL_SITE_ENTRYID: int +PR_ADDRBOOK_FOR_LOCAL_SITE_ENTRYID: int +PR_OFFLINE_MESSAGE_ENTRYID: int +PR_IPM_FAVORITES_ENTRYID: int +PR_IPM_PUBLIC_FOLDERS_ENTRYID: int +PR_GW_MTSIN_ENTRYID: int +PR_GW_MTSOUT_ENTRYID: int +PR_TRANSFER_ENABLED: int +PR_TEST_LINE_SPEED: int +PR_HIERARCHY_SYNCHRONIZER: int +PR_CONTENTS_SYNCHRONIZER: int +PR_COLLECTOR: int +PR_FAST_TRANSFER: int +PR_STORE_OFFLINE: int +PR_IN_TRANSIT: int +PR_REPLICATION_STYLE: int +PR_REPLICATION_SCHEDULE: int +PR_REPLICATION_MESSAGE_PRIORITY: int +PR_OVERALL_MSG_AGE_LIMIT: int +PR_REPLICATION_ALWAYS_INTERVAL: int +PR_REPLICATION_MSG_SIZE: int +STYLE_ALWAYS_INTERVAL_DEFAULT: int +REPLICATION_MESSAGE_SIZE_LIMIT_DEFAULT: int +STYLE_NEVER: int +STYLE_NORMAL: int +STYLE_ALWAYS: int +STYLE_DEFAULT: int +PR_SOURCE_KEY: int +PR_PARENT_SOURCE_KEY: int +PR_CHANGE_KEY: int +PR_PREDECESSOR_CHANGE_LIST: int +PR_FOLDER_CHILD_COUNT: int +PR_RIGHTS: int +PR_ACL_TABLE: int +PR_RULES_TABLE: int +PR_HAS_RULES: int +PR_ADDRESS_BOOK_ENTRYID: int +PR_ACL_DATA: int +PR_RULES_DATA: int +PR_FOLDER_DESIGN_FLAGS: int +PR_DESIGN_IN_PROGRESS: int +PR_SECURE_ORIGINATION: int +PR_PUBLISH_IN_ADDRESS_BOOK: int +PR_RESOLVE_METHOD: int +PR_ADDRESS_BOOK_DISPLAY_NAME: int +PR_EFORMS_LOCALE_ID: int +PR_REPLICA_LIST: int +PR_OVERALL_AGE_LIMIT: int +RESOLVE_METHOD_DEFAULT: int +RESOLVE_METHOD_LAST_WRITER_WINS: int +RESOLVE_METHOD_NO_CONFLICT_NOTIFICATION: int +PR_PUBLIC_FOLDER_ENTRYID: int +PR_HAS_NAMED_PROPERTIES: int +PR_CREATOR_NAME: int +PR_CREATOR_ENTRYID: int +PR_LAST_MODIFIER_NAME: int +PR_LAST_MODIFIER_ENTRYID: int +PR_HAS_DAMS: int +PR_RULE_TRIGGER_HISTORY: int +PR_MOVE_TO_STORE_ENTRYID: int +PR_MOVE_TO_FOLDER_ENTRYID: int +PR_REPLICA_SERVER: int +PR_DEFERRED_SEND_NUMBER: int +PR_DEFERRED_SEND_UNITS: int +PR_EXPIRY_NUMBER: int +PR_EXPIRY_UNITS: int +PR_DEFERRED_SEND_TIME: int +PR_GW_ADMIN_OPERATIONS: int +PR_P1_CONTENT: int +PR_P1_CONTENT_TYPE: int +PR_CLIENT_ACTIONS: int +PR_DAM_ORIGINAL_ENTRYID: int +PR_DAM_BACK_PATCHED: int +PR_RULE_ERROR: int +PR_RULE_ACTION_TYPE: int +PR_RULE_ACTION_NUMBER: int +PR_RULE_FOLDER_ENTRYID: int +PR_CONFLICT_ENTRYID: int +PR_MESSAGE_LOCALE_ID: int +PR_STORAGE_QUOTA_LIMIT: int +PR_EXCESS_STORAGE_USED: int +PR_SVR_GENERATING_QUOTA_MSG: int +PR_DELEGATED_BY_RULE: int +MSGSTATUS_IN_CONFLICT: int +PR_IN_CONFLICT: int +PR_LONGTERM_ENTRYID_FROM_TABLE: int +PR_ORIGINATOR_NAME: int +PR_ORIGINATOR_ADDR: int +PR_ORIGINATOR_ADDRTYPE: int +PR_ORIGINATOR_ENTRYID: int +PR_ARRIVAL_TIME: int +PR_TRACE_INFO: int +PR_INTERNAL_TRACE_INFO: int +PR_SUBJECT_TRACE_INFO: int +PR_RECIPIENT_NUMBER: int +PR_MTS_SUBJECT_ID: int +PR_REPORT_DESTINATION_NAME: int +PR_REPORT_DESTINATION_ENTRYID: int +PR_CONTENT_SEARCH_KEY: int +PR_FOREIGN_ID: int +PR_FOREIGN_REPORT_ID: int +PR_FOREIGN_SUBJECT_ID: int +PR_MTS_ID: int +PR_MTS_REPORT_ID: int +PR_FOLDER_FLAGS: int +PR_LAST_ACCESS_TIME: int +PR_RESTRICTION_COUNT: int +PR_CATEG_COUNT: int +PR_CACHED_COLUMN_COUNT: int +PR_NORMAL_MSG_W_ATTACH_COUNT: int +PR_ASSOC_MSG_W_ATTACH_COUNT: int +PR_RECIPIENT_ON_NORMAL_MSG_COUNT: int +PR_RECIPIENT_ON_ASSOC_MSG_COUNT: int +PR_ATTACH_ON_NORMAL_MSG_COUNT: int +PR_ATTACH_ON_ASSOC_MSG_COUNT: int +PR_NORMAL_MESSAGE_SIZE: int +PR_NORMAL_MESSAGE_SIZE_EXTENDED: int +PR_ASSOC_MESSAGE_SIZE: int +PR_ASSOC_MESSAGE_SIZE_EXTENDED: int +PR_FOLDER_PATHNAME: int +PR_OWNER_COUNT: int +PR_CONTACT_COUNT: int +PR_MESSAGE_SIZE_EXTENDED: int +PR_USERFIELDS: int +PR_FORCE_USE_ENTRYID_SERVER: int +PR_PROFILE_MDB_DN: int +PST_EXTERN_PROPID_BASE: int +PR_PST_PATH: int +PR_PST_PATH_W: int +PR_PST_PATH_A: int +PR_PST_REMEMBER_PW: int +PR_PST_ENCRYPTION: int +PR_PST_PW_SZ_OLD: int +PR_PST_PW_SZ_OLD_W: int +PR_PST_PW_SZ_OLD_A: int +PR_PST_PW_SZ_NEW: int +PR_PST_PW_SZ_NEW_W: int +PR_PST_PW_SZ_NEW_A: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/mapi/mapiutil.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/mapi/mapiutil.pyi new file mode 100644 index 00000000..f370af55 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/mapi/mapiutil.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete + +from win32comext.mapi import mapi as mapi, mapitags as mapitags + +TupleType = tuple +ListType = list +IntType = int +prTable: Incomplete + +def GetPropTagName(pt): ... + +mapiErrorTable: Incomplete + +def GetScodeString(hr): ... + +ptTable: Incomplete + +def GetMapiTypeName(propType, rawType: bool = ...): ... +def GetProperties(obj, propList): ... +def GetAllProperties(obj, make_tag_names: bool = ...): ... +def SetPropertyValue(obj, prop, val) -> None: ... +def SetProperties(msg, propDict) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/propsys/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/propsys/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/propsys/propsys.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/propsys/propsys.pyi new file mode 100644 index 00000000..85137254 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/propsys/propsys.pyi @@ -0,0 +1,60 @@ +from typing_extensions import TypeAlias + +import _win32typing +from win32.lib.pywintypes import com_error + +error: TypeAlias = com_error # noqa: Y042 + +def PSGetItemPropertyHandler( + Item: _win32typing.PyIShellItem, riid: _win32typing.PyIID, ReadWrite: int +) -> _win32typing.PyIPropertyStore: ... +def PSGetPropertyDescription( + Key: _win32typing.PyPROPERTYKEY, riid: _win32typing.PyIID +) -> _win32typing.PyIPropertyDescription: ... +def PSGetPropertySystem(riid: _win32typing.PyIID) -> _win32typing.PyIPropertySystem: ... +def PSGetNameFromPropertyKey(Key: _win32typing.PyPROPERTYKEY) -> str: ... +def PSGetPropertyKeyFromName(Name) -> _win32typing.PyPROPERTYKEY: ... +def PSRegisterPropertySchema(filename) -> None: ... +def PSUnregisterPropertySchema(filename) -> None: ... +def SHGetPropertyStoreFromParsingName( + Path: str, Flags, riid: _win32typing.PyIID, BindCtx: _win32typing.PyIBindCtx | None = ... +) -> _win32typing.PyIPropertyStore: ... +def StgSerializePropVariant(propvar: _win32typing.PyPROPVARIANT): ... +def StgDeserializePropVariant(prop) -> _win32typing.PyPROPVARIANT: ... +def PSCreateMemoryPropertyStore(riid: _win32typing.PyIID) -> _win32typing.PyIPropertyStore: ... +def PSCreatePropertyStoreFromPropertySetStorage( + pss: _win32typing.PyIPropertySetStorage, Mode, riid: _win32typing.PyIID +) -> _win32typing.PyIPropertyStore: ... +def PSLookupPropertyHandlerCLSID(FilePath) -> _win32typing.PyIID: ... +def SHGetPropertyStoreForWindow(hwnd: int, riid: _win32typing.PyIID) -> _win32typing.PyIPropertyStore: ... +def PSGetPropertyFromPropertyStorage(ps, key: _win32typing.PyPROPERTYKEY) -> _win32typing.PyPROPVARIANT: ... +def PSGetNamedPropertyFromPropertyStorage(ps, name) -> _win32typing.PyPROPVARIANT: ... +def PSCreateSimplePropertyChange( + flags, key: _win32typing.PyPROPERTYKEY, val: _win32typing.PyPROPVARIANT, riid: _win32typing.PyIID +) -> _win32typing.PyIPropertyChange: ... +def PSCreatePropertyChangeArray() -> _win32typing.PyIPropertyChangeArray: ... +def SHSetDefaultProperties( + hwnd: int, + Item: _win32typing.PyIShellItem, + FileOpFlags: int = ..., + Sink: _win32typing.PyGFileOperationProgressSink | None = ..., +) -> None: ... + +IID_IInitializeWithFile: _win32typing.PyIID +IID_IInitializeWithStream: _win32typing.PyIID +IID_INamedPropertyStore: _win32typing.PyIID +IID_IObjectWithPropertyKey: _win32typing.PyIID +IID_IPersistSerializedPropStorage: _win32typing.PyIID +IID_IPropertyChange: _win32typing.PyIID +IID_IPropertyChangeArray: _win32typing.PyIID +IID_IPropertyDescription: _win32typing.PyIID +IID_IPropertyDescriptionAliasInfo: _win32typing.PyIID +IID_IPropertyDescriptionList: _win32typing.PyIID +IID_IPropertyDescriptionSearchInfo: _win32typing.PyIID +IID_IPropertyEnumType: _win32typing.PyIID +IID_IPropertyEnumTypeList: _win32typing.PyIID +IID_IPropertyStore: _win32typing.PyIID +IID_IPropertyStoreCache: _win32typing.PyIID +IID_IPropertyStoreCapabilities: _win32typing.PyIID +IID_IPropertySystem: _win32typing.PyIID +PROPVARIANTType = _win32typing.PyPROPVARIANT diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/propsys/pscon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/propsys/pscon.pyi new file mode 100644 index 00000000..f142fe8e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/propsys/pscon.pyi @@ -0,0 +1,694 @@ +from _typeshed import Incomplete + +PET_DISCRETEVALUE: int +PET_RANGEDVALUE: int +PET_DEFAULTVALUE: int +PET_ENDRANGE: int +PDTF_DEFAULT: int +PDTF_MULTIPLEVALUES: int +PDTF_ISINNATE: int +PDTF_ISGROUP: int +PDTF_CANGROUPBY: int +PDTF_CANSTACKBY: int +PDTF_ISTREEPROPERTY: int +PDTF_INCLUDEINFULLTEXTQUERY: int +PDTF_ISVIEWABLE: int +PDTF_ISQUERYABLE: int +PDTF_ISSYSTEMPROPERTY: int +PDTF_MASK_ALL: int +PDVF_DEFAULT: int +PDVF_CENTERALIGN: int +PDVF_RIGHTALIGN: int +PDVF_BEGINNEWGROUP: int +PDVF_FILLAREA: int +PDVF_SORTDESCENDING: int +PDVF_SHOWONLYIFPRESENT: int +PDVF_SHOWBYDEFAULT: int +PDVF_SHOWINPRIMARYLIST: int +PDVF_SHOWINSECONDARYLIST: int +PDVF_HIDELABEL: int +PDVF_HIDDEN: int +PDVF_CANWRAP: int +PDVF_MASK_ALL: int +PDDT_STRING: int +PDDT_NUMBER: int +PDDT_BOOLEAN: int +PDDT_DATETIME: int +PDDT_ENUMERATED: int +PDGR_DISCRETE: int +PDGR_ALPHANUMERIC: int +PDGR_SIZE: int +PDGR_DYNAMIC: int +PDGR_DATE: int +PDGR_PERCENT: int +PDGR_ENUMERATED: int +PDFF_DEFAULT: int +PDFF_PREFIXNAME: int +PDFF_FILENAME: int +PDFF_ALWAYSKB: int +PDFF_RESERVED_RIGHTTOLEFT: int +PDFF_SHORTTIME: int +PDFF_LONGTIME: int +PDFF_HIDETIME: int +PDFF_SHORTDATE: int +PDFF_LONGDATE: int +PDFF_HIDEDATE: int +PDFF_RELATIVEDATE: int +PDFF_USEEDITINVITATION: int +PDFF_READONLY: int +PDFF_NOAUTOREADINGORDER: int +PDSD_GENERAL: int +PDSD_A_Z: int +PDSD_LOWEST_HIGHEST: int +PDSD_SMALLEST_BIGGEST: int +PDSD_OLDEST_NEWEST: int +PDRDT_GENERAL: int +PDRDT_DATE: int +PDRDT_SIZE: int +PDRDT_COUNT: int +PDRDT_REVISION: int +PDRDT_LENGTH: int +PDRDT_DURATION: int +PDRDT_SPEED: int +PDRDT_RATE: int +PDRDT_RATING: int +PDRDT_PRIORITY: int +PDAT_DEFAULT: int +PDAT_FIRST: int +PDAT_SUM: int +PDAT_AVERAGE: int +PDAT_DATERANGE: int +PDAT_UNION: int +PDAT_MAX: int +PDAT_MIN: int +PDCOT_NONE: int +PDCOT_STRING: int +PDCOT_SIZE: int +PDCOT_DATETIME: int +PDCOT_BOOLEAN: int +PDCOT_NUMBER: int +PDSIF_DEFAULT: int +PDSIF_ININVERTEDINDEX: int +PDSIF_ISCOLUMN: int +PDSIF_ISCOLUMNSPARSE: int +PDCIT_NONE: int +PDCIT_ONDISK: int +PDCIT_INMEMORY: int +PDEF_ALL: int +PDEF_SYSTEM: int +PDEF_NONSYSTEM: int +PDEF_VIEWABLE: int +PDEF_QUERYABLE: int +PDEF_INFULLTEXTQUERY: int +PDEF_COLUMN: int +PSC_NORMAL: int +PSC_NOTINSOURCE: int +PSC_DIRTY: int +COP_IMPLICIT: int +COP_EQUAL: int +COP_NOTEQUAL: int +COP_LESSTHAN: int +COP_GREATERTHAN: int +COP_LESSTHANOREQUAL: int +COP_GREATERTHANOREQUAL: int +COP_VALUE_STARTSWITH: int +COP_VALUE_ENDSWITH: int +COP_VALUE_CONTAINS: int +COP_VALUE_NOTCONTAINS: int +COP_DOSWILDCARDS: int +COP_WORD_EQUAL: int +COP_WORD_STARTSWITH: int +COP_APPLICATION_SPECIFIC: int +FPSPS_READONLY: int +PKEY_PIDSTR_MAX: int +GUIDSTRING_MAX: Incomplete +PKEYSTR_MAX: Incomplete +PKEY_Audio_ChannelCount: Incomplete +PKEY_Audio_Compression: Incomplete +PKEY_Audio_EncodingBitrate: Incomplete +PKEY_Audio_Format: Incomplete +PKEY_Audio_IsVariableBitRate: Incomplete +PKEY_Audio_PeakValue: Incomplete +PKEY_Audio_SampleRate: Incomplete +PKEY_Audio_SampleSize: Incomplete +PKEY_Audio_StreamName: Incomplete +PKEY_Audio_StreamNumber: Incomplete +PKEY_Calendar_Duration: Incomplete +PKEY_Calendar_IsOnline: Incomplete +PKEY_Calendar_IsRecurring: Incomplete +PKEY_Calendar_Location: Incomplete +PKEY_Calendar_OptionalAttendeeAddresses: Incomplete +PKEY_Calendar_OptionalAttendeeNames: Incomplete +PKEY_Calendar_OrganizerAddress: Incomplete +PKEY_Calendar_OrganizerName: Incomplete +PKEY_Calendar_ReminderTime: Incomplete +PKEY_Calendar_RequiredAttendeeAddresses: Incomplete +PKEY_Calendar_RequiredAttendeeNames: Incomplete +PKEY_Calendar_Resources: Incomplete +PKEY_Calendar_ShowTimeAs: Incomplete +PKEY_Calendar_ShowTimeAsText: Incomplete +PKEY_Communication_AccountName: Incomplete +PKEY_Communication_Suffix: Incomplete +PKEY_Communication_TaskStatus: Incomplete +PKEY_Communication_TaskStatusText: Incomplete +PKEY_Computer_DecoratedFreeSpace: Incomplete +PKEY_Contact_Anniversary: Incomplete +PKEY_Contact_AssistantName: Incomplete +PKEY_Contact_AssistantTelephone: Incomplete +PKEY_Contact_Birthday: Incomplete +PKEY_Contact_BusinessAddress: Incomplete +PKEY_Contact_BusinessAddressCity: Incomplete +PKEY_Contact_BusinessAddressCountry: Incomplete +PKEY_Contact_BusinessAddressPostalCode: Incomplete +PKEY_Contact_BusinessAddressPostOfficeBox: Incomplete +PKEY_Contact_BusinessAddressState: Incomplete +PKEY_Contact_BusinessAddressStreet: Incomplete +PKEY_Contact_BusinessFaxNumber: Incomplete +PKEY_Contact_BusinessHomePage: Incomplete +PKEY_Contact_BusinessTelephone: Incomplete +PKEY_Contact_CallbackTelephone: Incomplete +PKEY_Contact_CarTelephone: Incomplete +PKEY_Contact_Children: Incomplete +PKEY_Contact_CompanyMainTelephone: Incomplete +PKEY_Contact_Department: Incomplete +PKEY_Contact_EmailAddress: Incomplete +PKEY_Contact_EmailAddress2: Incomplete +PKEY_Contact_EmailAddress3: Incomplete +PKEY_Contact_EmailAddresses: Incomplete +PKEY_Contact_EmailName: Incomplete +PKEY_Contact_FileAsName: Incomplete +PKEY_Contact_FirstName: Incomplete +PKEY_Contact_FullName: Incomplete +PKEY_Contact_Gender: Incomplete +PKEY_Contact_Hobbies: Incomplete +PKEY_Contact_HomeAddress: Incomplete +PKEY_Contact_HomeAddressCity: Incomplete +PKEY_Contact_HomeAddressCountry: Incomplete +PKEY_Contact_HomeAddressPostalCode: Incomplete +PKEY_Contact_HomeAddressPostOfficeBox: Incomplete +PKEY_Contact_HomeAddressState: Incomplete +PKEY_Contact_HomeAddressStreet: Incomplete +PKEY_Contact_HomeFaxNumber: Incomplete +PKEY_Contact_HomeTelephone: Incomplete +PKEY_Contact_IMAddress: Incomplete +PKEY_Contact_Initials: Incomplete +PKEY_Contact_JA_CompanyNamePhonetic: Incomplete +PKEY_Contact_JA_FirstNamePhonetic: Incomplete +PKEY_Contact_JA_LastNamePhonetic: Incomplete +PKEY_Contact_JobTitle: Incomplete +PKEY_Contact_Label: Incomplete +PKEY_Contact_LastName: Incomplete +PKEY_Contact_MailingAddress: Incomplete +PKEY_Contact_MiddleName: Incomplete +PKEY_Contact_MobileTelephone: Incomplete +PKEY_Contact_NickName: Incomplete +PKEY_Contact_OfficeLocation: Incomplete +PKEY_Contact_OtherAddress: Incomplete +PKEY_Contact_OtherAddressCity: Incomplete +PKEY_Contact_OtherAddressCountry: Incomplete +PKEY_Contact_OtherAddressPostalCode: Incomplete +PKEY_Contact_OtherAddressPostOfficeBox: Incomplete +PKEY_Contact_OtherAddressState: Incomplete +PKEY_Contact_OtherAddressStreet: Incomplete +PKEY_Contact_PagerTelephone: Incomplete +PKEY_Contact_PersonalTitle: Incomplete +PKEY_Contact_PrimaryAddressCity: Incomplete +PKEY_Contact_PrimaryAddressCountry: Incomplete +PKEY_Contact_PrimaryAddressPostalCode: Incomplete +PKEY_Contact_PrimaryAddressPostOfficeBox: Incomplete +PKEY_Contact_PrimaryAddressState: Incomplete +PKEY_Contact_PrimaryAddressStreet: Incomplete +PKEY_Contact_PrimaryEmailAddress: Incomplete +PKEY_Contact_PrimaryTelephone: Incomplete +PKEY_Contact_Profession: Incomplete +PKEY_Contact_SpouseName: Incomplete +PKEY_Contact_Suffix: Incomplete +PKEY_Contact_TelexNumber: Incomplete +PKEY_Contact_TTYTDDTelephone: Incomplete +PKEY_Contact_WebPage: Incomplete +PKEY_AcquisitionID: Incomplete +PKEY_ApplicationName: Incomplete +PKEY_Author: Incomplete +PKEY_Capacity: Incomplete +PKEY_Category: Incomplete +PKEY_Comment: Incomplete +PKEY_Company: Incomplete +PKEY_ComputerName: Incomplete +PKEY_ContainedItems: Incomplete +PKEY_ContentStatus: Incomplete +PKEY_ContentType: Incomplete +PKEY_Copyright: Incomplete +PKEY_DateAccessed: Incomplete +PKEY_DateAcquired: Incomplete +PKEY_DateArchived: Incomplete +PKEY_DateCompleted: Incomplete +PKEY_DateCreated: Incomplete +PKEY_DateImported: Incomplete +PKEY_DateModified: Incomplete +PKEY_DueDate: Incomplete +PKEY_EndDate: Incomplete +PKEY_FileAllocationSize: Incomplete +PKEY_FileAttributes: Incomplete +PKEY_FileCount: Incomplete +PKEY_FileDescription: Incomplete +PKEY_FileExtension: Incomplete +PKEY_FileFRN: Incomplete +PKEY_FileName: Incomplete +PKEY_FileOwner: Incomplete +PKEY_FileVersion: Incomplete +PKEY_FindData: Incomplete +PKEY_FlagColor: Incomplete +PKEY_FlagColorText: Incomplete +PKEY_FlagStatus: Incomplete +PKEY_FlagStatusText: Incomplete +PKEY_FreeSpace: Incomplete +PKEY_Identity: Incomplete +PKEY_Importance: Incomplete +PKEY_ImportanceText: Incomplete +PKEY_IsAttachment: Incomplete +PKEY_IsDeleted: Incomplete +PKEY_IsFlagged: Incomplete +PKEY_IsFlaggedComplete: Incomplete +PKEY_IsIncomplete: Incomplete +PKEY_IsRead: Incomplete +PKEY_IsSendToTarget: Incomplete +PKEY_IsShared: Incomplete +PKEY_ItemAuthors: Incomplete +PKEY_ItemDate: Incomplete +PKEY_ItemFolderNameDisplay: Incomplete +PKEY_ItemFolderPathDisplay: Incomplete +PKEY_ItemFolderPathDisplayNarrow: Incomplete +PKEY_ItemName: Incomplete +PKEY_ItemNameDisplay: Incomplete +PKEY_ItemNamePrefix: Incomplete +PKEY_ItemParticipants: Incomplete +PKEY_ItemPathDisplay: Incomplete +PKEY_ItemPathDisplayNarrow: Incomplete +PKEY_ItemType: Incomplete +PKEY_ItemTypeText: Incomplete +PKEY_ItemUrl: Incomplete +PKEY_Keywords: Incomplete +PKEY_Kind: Incomplete +PKEY_KindText: Incomplete +PKEY_Language: Incomplete +PKEY_MileageInformation: Incomplete +PKEY_MIMEType: Incomplete +PKEY_Null: Incomplete +PKEY_OfflineAvailability: Incomplete +PKEY_OfflineStatus: Incomplete +PKEY_OriginalFileName: Incomplete +PKEY_ParentalRating: Incomplete +PKEY_ParentalRatingReason: Incomplete +PKEY_ParentalRatingsOrganization: Incomplete +PKEY_ParsingBindContext: Incomplete +PKEY_ParsingName: Incomplete +PKEY_ParsingPath: Incomplete +PKEY_PerceivedType: Incomplete +PKEY_PercentFull: Incomplete +PKEY_Priority: Incomplete +PKEY_PriorityText: Incomplete +PKEY_Project: Incomplete +PKEY_ProviderItemID: Incomplete +PKEY_Rating: Incomplete +PKEY_RatingText: Incomplete +PKEY_Sensitivity: Incomplete +PKEY_SensitivityText: Incomplete +PKEY_SFGAOFlags: Incomplete +PKEY_SharedWith: Incomplete +PKEY_ShareUserRating: Incomplete +PKEY_Shell_OmitFromView: Incomplete +PKEY_SimpleRating: Incomplete +PKEY_Size: Incomplete +PKEY_SoftwareUsed: Incomplete +PKEY_SourceItem: Incomplete +PKEY_StartDate: Incomplete +PKEY_Status: Incomplete +PKEY_Subject: Incomplete +PKEY_Thumbnail: Incomplete +PKEY_ThumbnailCacheId: Incomplete +PKEY_ThumbnailStream: Incomplete +PKEY_Title: Incomplete +PKEY_TotalFileSize: Incomplete +PKEY_Trademarks: Incomplete +PKEY_Document_ByteCount: Incomplete +PKEY_Document_CharacterCount: Incomplete +PKEY_Document_ClientID: Incomplete +PKEY_Document_Contributor: Incomplete +PKEY_Document_DateCreated: Incomplete +PKEY_Document_DatePrinted: Incomplete +PKEY_Document_DateSaved: Incomplete +PKEY_Document_Division: Incomplete +PKEY_Document_DocumentID: Incomplete +PKEY_Document_HiddenSlideCount: Incomplete +PKEY_Document_LastAuthor: Incomplete +PKEY_Document_LineCount: Incomplete +PKEY_Document_Manager: Incomplete +PKEY_Document_MultimediaClipCount: Incomplete +PKEY_Document_NoteCount: Incomplete +PKEY_Document_PageCount: Incomplete +PKEY_Document_ParagraphCount: Incomplete +PKEY_Document_PresentationFormat: Incomplete +PKEY_Document_RevisionNumber: Incomplete +PKEY_Document_Security: Incomplete +PKEY_Document_SlideCount: Incomplete +PKEY_Document_Template: Incomplete +PKEY_Document_TotalEditingTime: Incomplete +PKEY_Document_Version: Incomplete +PKEY_Document_WordCount: Incomplete +PKEY_DRM_DatePlayExpires: Incomplete +PKEY_DRM_DatePlayStarts: Incomplete +PKEY_DRM_Description: Incomplete +PKEY_DRM_IsProtected: Incomplete +PKEY_DRM_PlayCount: Incomplete +PKEY_GPS_Altitude: Incomplete +PKEY_GPS_AltitudeDenominator: Incomplete +PKEY_GPS_AltitudeNumerator: Incomplete +PKEY_GPS_AltitudeRef: Incomplete +PKEY_GPS_AreaInformation: Incomplete +PKEY_GPS_Date: Incomplete +PKEY_GPS_DestBearing: Incomplete +PKEY_GPS_DestBearingDenominator: Incomplete +PKEY_GPS_DestBearingNumerator: Incomplete +PKEY_GPS_DestBearingRef: Incomplete +PKEY_GPS_DestDistance: Incomplete +PKEY_GPS_DestDistanceDenominator: Incomplete +PKEY_GPS_DestDistanceNumerator: Incomplete +PKEY_GPS_DestDistanceRef: Incomplete +PKEY_GPS_DestLatitude: Incomplete +PKEY_GPS_DestLatitudeDenominator: Incomplete +PKEY_GPS_DestLatitudeNumerator: Incomplete +PKEY_GPS_DestLatitudeRef: Incomplete +PKEY_GPS_DestLongitude: Incomplete +PKEY_GPS_DestLongitudeDenominator: Incomplete +PKEY_GPS_DestLongitudeNumerator: Incomplete +PKEY_GPS_DestLongitudeRef: Incomplete +PKEY_GPS_Differential: Incomplete +PKEY_GPS_DOP: Incomplete +PKEY_GPS_DOPDenominator: Incomplete +PKEY_GPS_DOPNumerator: Incomplete +PKEY_GPS_ImgDirection: Incomplete +PKEY_GPS_ImgDirectionDenominator: Incomplete +PKEY_GPS_ImgDirectionNumerator: Incomplete +PKEY_GPS_ImgDirectionRef: Incomplete +PKEY_GPS_Latitude: Incomplete +PKEY_GPS_LatitudeDenominator: Incomplete +PKEY_GPS_LatitudeNumerator: Incomplete +PKEY_GPS_LatitudeRef: Incomplete +PKEY_GPS_Longitude: Incomplete +PKEY_GPS_LongitudeDenominator: Incomplete +PKEY_GPS_LongitudeNumerator: Incomplete +PKEY_GPS_LongitudeRef: Incomplete +PKEY_GPS_MapDatum: Incomplete +PKEY_GPS_MeasureMode: Incomplete +PKEY_GPS_ProcessingMethod: Incomplete +PKEY_GPS_Satellites: Incomplete +PKEY_GPS_Speed: Incomplete +PKEY_GPS_SpeedDenominator: Incomplete +PKEY_GPS_SpeedNumerator: Incomplete +PKEY_GPS_SpeedRef: Incomplete +PKEY_GPS_Status: Incomplete +PKEY_GPS_Track: Incomplete +PKEY_GPS_TrackDenominator: Incomplete +PKEY_GPS_TrackNumerator: Incomplete +PKEY_GPS_TrackRef: Incomplete +PKEY_GPS_VersionID: Incomplete +PKEY_Image_BitDepth: Incomplete +PKEY_Image_ColorSpace: Incomplete +PKEY_Image_CompressedBitsPerPixel: Incomplete +PKEY_Image_CompressedBitsPerPixelDenominator: Incomplete +PKEY_Image_CompressedBitsPerPixelNumerator: Incomplete +PKEY_Image_Compression: Incomplete +PKEY_Image_CompressionText: Incomplete +PKEY_Image_Dimensions: Incomplete +PKEY_Image_HorizontalResolution: Incomplete +PKEY_Image_HorizontalSize: Incomplete +PKEY_Image_ImageID: Incomplete +PKEY_Image_ResolutionUnit: Incomplete +PKEY_Image_VerticalResolution: Incomplete +PKEY_Image_VerticalSize: Incomplete +PKEY_Journal_Contacts: Incomplete +PKEY_Journal_EntryType: Incomplete +PKEY_Link_Comment: Incomplete +PKEY_Link_DateVisited: Incomplete +PKEY_Link_Description: Incomplete +PKEY_Link_Status: Incomplete +PKEY_Link_TargetExtension: Incomplete +PKEY_Link_TargetParsingPath: Incomplete +PKEY_Link_TargetSFGAOFlags: Incomplete +PKEY_Media_AuthorUrl: Incomplete +PKEY_Media_AverageLevel: Incomplete +PKEY_Media_ClassPrimaryID: Incomplete +PKEY_Media_ClassSecondaryID: Incomplete +PKEY_Media_CollectionGroupID: Incomplete +PKEY_Media_CollectionID: Incomplete +PKEY_Media_ContentDistributor: Incomplete +PKEY_Media_ContentID: Incomplete +PKEY_Media_CreatorApplication: Incomplete +PKEY_Media_CreatorApplicationVersion: Incomplete +PKEY_Media_DateEncoded: Incomplete +PKEY_Media_DateReleased: Incomplete +PKEY_Media_Duration: Incomplete +PKEY_Media_DVDID: Incomplete +PKEY_Media_EncodedBy: Incomplete +PKEY_Media_EncodingSettings: Incomplete +PKEY_Media_FrameCount: Incomplete +PKEY_Media_MCDI: Incomplete +PKEY_Media_MetadataContentProvider: Incomplete +PKEY_Media_Producer: Incomplete +PKEY_Media_PromotionUrl: Incomplete +PKEY_Media_ProtectionType: Incomplete +PKEY_Media_ProviderRating: Incomplete +PKEY_Media_ProviderStyle: Incomplete +PKEY_Media_Publisher: Incomplete +PKEY_Media_SubscriptionContentId: Incomplete +PKEY_Media_SubTitle: Incomplete +PKEY_Media_UniqueFileIdentifier: Incomplete +PKEY_Media_UserNoAutoInfo: Incomplete +PKEY_Media_UserWebUrl: Incomplete +PKEY_Media_Writer: Incomplete +PKEY_Media_Year: Incomplete +PKEY_Message_AttachmentContents: Incomplete +PKEY_Message_AttachmentNames: Incomplete +PKEY_Message_BccAddress: Incomplete +PKEY_Message_BccName: Incomplete +PKEY_Message_CcAddress: Incomplete +PKEY_Message_CcName: Incomplete +PKEY_Message_ConversationID: Incomplete +PKEY_Message_ConversationIndex: Incomplete +PKEY_Message_DateReceived: Incomplete +PKEY_Message_DateSent: Incomplete +PKEY_Message_FromAddress: Incomplete +PKEY_Message_FromName: Incomplete +PKEY_Message_HasAttachments: Incomplete +PKEY_Message_IsFwdOrReply: Incomplete +PKEY_Message_MessageClass: Incomplete +PKEY_Message_SenderAddress: Incomplete +PKEY_Message_SenderName: Incomplete +PKEY_Message_Store: Incomplete +PKEY_Message_ToAddress: Incomplete +PKEY_Message_ToDoTitle: Incomplete +PKEY_Message_ToName: Incomplete +PKEY_Music_AlbumArtist: Incomplete +PKEY_Music_AlbumTitle: Incomplete +PKEY_Music_Artist: Incomplete +PKEY_Music_BeatsPerMinute: Incomplete +PKEY_Music_Composer: Incomplete +PKEY_Music_Conductor: Incomplete +PKEY_Music_ContentGroupDescription: Incomplete +PKEY_Music_Genre: Incomplete +PKEY_Music_InitialKey: Incomplete +PKEY_Music_Lyrics: Incomplete +PKEY_Music_Mood: Incomplete +PKEY_Music_PartOfSet: Incomplete +PKEY_Music_Period: Incomplete +PKEY_Music_SynchronizedLyrics: Incomplete +PKEY_Music_TrackNumber: Incomplete +PKEY_Note_Color: Incomplete +PKEY_Note_ColorText: Incomplete +PKEY_Photo_Aperture: Incomplete +PKEY_Photo_ApertureDenominator: Incomplete +PKEY_Photo_ApertureNumerator: Incomplete +PKEY_Photo_Brightness: Incomplete +PKEY_Photo_BrightnessDenominator: Incomplete +PKEY_Photo_BrightnessNumerator: Incomplete +PKEY_Photo_CameraManufacturer: Incomplete +PKEY_Photo_CameraModel: Incomplete +PKEY_Photo_CameraSerialNumber: Incomplete +PKEY_Photo_Contrast: Incomplete +PKEY_Photo_ContrastText: Incomplete +PKEY_Photo_DateTaken: Incomplete +PKEY_Photo_DigitalZoom: Incomplete +PKEY_Photo_DigitalZoomDenominator: Incomplete +PKEY_Photo_DigitalZoomNumerator: Incomplete +PKEY_Photo_Event: Incomplete +PKEY_Photo_EXIFVersion: Incomplete +PKEY_Photo_ExposureBias: Incomplete +PKEY_Photo_ExposureBiasDenominator: Incomplete +PKEY_Photo_ExposureBiasNumerator: Incomplete +PKEY_Photo_ExposureIndex: Incomplete +PKEY_Photo_ExposureIndexDenominator: Incomplete +PKEY_Photo_ExposureIndexNumerator: Incomplete +PKEY_Photo_ExposureProgram: Incomplete +PKEY_Photo_ExposureProgramText: Incomplete +PKEY_Photo_ExposureTime: Incomplete +PKEY_Photo_ExposureTimeDenominator: Incomplete +PKEY_Photo_ExposureTimeNumerator: Incomplete +PKEY_Photo_Flash: Incomplete +PKEY_Photo_FlashEnergy: Incomplete +PKEY_Photo_FlashEnergyDenominator: Incomplete +PKEY_Photo_FlashEnergyNumerator: Incomplete +PKEY_Photo_FlashManufacturer: Incomplete +PKEY_Photo_FlashModel: Incomplete +PKEY_Photo_FlashText: Incomplete +PKEY_Photo_FNumber: Incomplete +PKEY_Photo_FNumberDenominator: Incomplete +PKEY_Photo_FNumberNumerator: Incomplete +PKEY_Photo_FocalLength: Incomplete +PKEY_Photo_FocalLengthDenominator: Incomplete +PKEY_Photo_FocalLengthInFilm: Incomplete +PKEY_Photo_FocalLengthNumerator: Incomplete +PKEY_Photo_FocalPlaneXResolution: Incomplete +PKEY_Photo_FocalPlaneXResolutionDenominator: Incomplete +PKEY_Photo_FocalPlaneXResolutionNumerator: Incomplete +PKEY_Photo_FocalPlaneYResolution: Incomplete +PKEY_Photo_FocalPlaneYResolutionDenominator: Incomplete +PKEY_Photo_FocalPlaneYResolutionNumerator: Incomplete +PKEY_Photo_GainControl: Incomplete +PKEY_Photo_GainControlDenominator: Incomplete +PKEY_Photo_GainControlNumerator: Incomplete +PKEY_Photo_GainControlText: Incomplete +PKEY_Photo_ISOSpeed: Incomplete +PKEY_Photo_LensManufacturer: Incomplete +PKEY_Photo_LensModel: Incomplete +PKEY_Photo_LightSource: Incomplete +PKEY_Photo_MakerNote: Incomplete +PKEY_Photo_MakerNoteOffset: Incomplete +PKEY_Photo_MaxAperture: Incomplete +PKEY_Photo_MaxApertureDenominator: Incomplete +PKEY_Photo_MaxApertureNumerator: Incomplete +PKEY_Photo_MeteringMode: Incomplete +PKEY_Photo_MeteringModeText: Incomplete +PKEY_Photo_Orientation: Incomplete +PKEY_Photo_OrientationText: Incomplete +PKEY_Photo_PhotometricInterpretation: Incomplete +PKEY_Photo_PhotometricInterpretationText: Incomplete +PKEY_Photo_ProgramMode: Incomplete +PKEY_Photo_ProgramModeText: Incomplete +PKEY_Photo_RelatedSoundFile: Incomplete +PKEY_Photo_Saturation: Incomplete +PKEY_Photo_SaturationText: Incomplete +PKEY_Photo_Sharpness: Incomplete +PKEY_Photo_SharpnessText: Incomplete +PKEY_Photo_ShutterSpeed: Incomplete +PKEY_Photo_ShutterSpeedDenominator: Incomplete +PKEY_Photo_ShutterSpeedNumerator: Incomplete +PKEY_Photo_SubjectDistance: Incomplete +PKEY_Photo_SubjectDistanceDenominator: Incomplete +PKEY_Photo_SubjectDistanceNumerator: Incomplete +PKEY_Photo_TranscodedForSync: Incomplete +PKEY_Photo_WhiteBalance: Incomplete +PKEY_Photo_WhiteBalanceText: Incomplete +PKEY_PropGroup_Advanced: Incomplete +PKEY_PropGroup_Audio: Incomplete +PKEY_PropGroup_Calendar: Incomplete +PKEY_PropGroup_Camera: Incomplete +PKEY_PropGroup_Contact: Incomplete +PKEY_PropGroup_Content: Incomplete +PKEY_PropGroup_Description: Incomplete +PKEY_PropGroup_FileSystem: Incomplete +PKEY_PropGroup_General: Incomplete +PKEY_PropGroup_GPS: Incomplete +PKEY_PropGroup_Image: Incomplete +PKEY_PropGroup_Media: Incomplete +PKEY_PropGroup_MediaAdvanced: Incomplete +PKEY_PropGroup_Message: Incomplete +PKEY_PropGroup_Music: Incomplete +PKEY_PropGroup_Origin: Incomplete +PKEY_PropGroup_PhotoAdvanced: Incomplete +PKEY_PropGroup_RecordedTV: Incomplete +PKEY_PropGroup_Video: Incomplete +PKEY_PropList_ConflictPrompt: Incomplete +PKEY_PropList_ExtendedTileInfo: Incomplete +PKEY_PropList_FileOperationPrompt: Incomplete +PKEY_PropList_FullDetails: Incomplete +PKEY_PropList_InfoTip: Incomplete +PKEY_PropList_NonPersonal: Incomplete +PKEY_PropList_PreviewDetails: Incomplete +PKEY_PropList_PreviewTitle: Incomplete +PKEY_PropList_QuickTip: Incomplete +PKEY_PropList_TileInfo: Incomplete +PKEY_PropList_XPDetailsPanel: Incomplete +PKEY_RecordedTV_ChannelNumber: Incomplete +PKEY_RecordedTV_Credits: Incomplete +PKEY_RecordedTV_DateContentExpires: Incomplete +PKEY_RecordedTV_EpisodeName: Incomplete +PKEY_RecordedTV_IsATSCContent: Incomplete +PKEY_RecordedTV_IsClosedCaptioningAvailable: Incomplete +PKEY_RecordedTV_IsDTVContent: Incomplete +PKEY_RecordedTV_IsHDContent: Incomplete +PKEY_RecordedTV_IsRepeatBroadcast: Incomplete +PKEY_RecordedTV_IsSAP: Incomplete +PKEY_RecordedTV_NetworkAffiliation: Incomplete +PKEY_RecordedTV_OriginalBroadcastDate: Incomplete +PKEY_RecordedTV_ProgramDescription: Incomplete +PKEY_RecordedTV_RecordingTime: Incomplete +PKEY_RecordedTV_StationCallSign: Incomplete +PKEY_RecordedTV_StationName: Incomplete +PKEY_Search_AutoSummary: Incomplete +PKEY_Search_ContainerHash: Incomplete +PKEY_Search_Contents: Incomplete +PKEY_Search_EntryID: Incomplete +PKEY_Search_GatherTime: Incomplete +PKEY_Search_IsClosedDirectory: Incomplete +PKEY_Search_IsFullyContained: Incomplete +PKEY_Search_QueryFocusedSummary: Incomplete +PKEY_Search_Rank: Incomplete +PKEY_Search_Store: Incomplete +PKEY_Search_UrlToIndex: Incomplete +PKEY_Search_UrlToIndexWithModificationTime: Incomplete +PKEY_DescriptionID: Incomplete +PKEY_Link_TargetSFGAOFlagsStrings: Incomplete +PKEY_Link_TargetUrl: Incomplete +PKEY_Shell_SFGAOFlagsStrings: Incomplete +PKEY_Software_DateLastUsed: Incomplete +PKEY_Software_ProductName: Incomplete +PKEY_Sync_Comments: Incomplete +PKEY_Sync_ConflictDescription: Incomplete +PKEY_Sync_ConflictFirstLocation: Incomplete +PKEY_Sync_ConflictSecondLocation: Incomplete +PKEY_Sync_HandlerCollectionID: Incomplete +PKEY_Sync_HandlerID: Incomplete +PKEY_Sync_HandlerName: Incomplete +PKEY_Sync_HandlerType: Incomplete +PKEY_Sync_HandlerTypeLabel: Incomplete +PKEY_Sync_ItemID: Incomplete +PKEY_Sync_ItemName: Incomplete +PKEY_Task_BillingInformation: Incomplete +PKEY_Task_CompletionStatus: Incomplete +PKEY_Task_Owner: Incomplete +PKEY_Video_Compression: Incomplete +PKEY_Video_Director: Incomplete +PKEY_Video_EncodingBitrate: Incomplete +PKEY_Video_FourCC: Incomplete +PKEY_Video_FrameHeight: Incomplete +PKEY_Video_FrameRate: Incomplete +PKEY_Video_FrameWidth: Incomplete +PKEY_Video_HorizontalAspectRatio: Incomplete +PKEY_Video_SampleSize: Incomplete +PKEY_Video_StreamName: Incomplete +PKEY_Video_StreamNumber: Incomplete +PKEY_Video_TotalBitrate: Incomplete +PKEY_Video_VerticalAspectRatio: Incomplete +PKEY_Volume_FileSystem: Incomplete +PKEY_Volume_IsMappedDrive: Incomplete +PKEY_Volume_IsRoot: Incomplete +PKEY_AppUserModel_RelaunchCommand: Incomplete +PKEY_AppUserModel_RelaunchIconResource: Incomplete +PKEY_AppUserModel_RelaunchDisplayNameResource: Incomplete +PKEY_AppUserModel_ID: Incomplete +PKEY_AppUserModel_IsDestListSeparator: Incomplete +PKEY_AppUserModel_ExcludeFromShowInNewInstall: Incomplete +PKEY_AppUserModel_PreventPinning: Incomplete +PKA_SET: int +PKA_APPEND: int +PKA_DELETE: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/shell/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/shell/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/shell/shell.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/shell/shell.pyi new file mode 100644 index 00000000..35a07726 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/shell/shell.pyi @@ -0,0 +1,436 @@ +from _typeshed import Incomplete +from typing_extensions import TypeAlias + +import _win32typing +from win32.lib.pywintypes import com_error + +error: TypeAlias = com_error # noqa: Y042 + +def AssocCreate() -> _win32typing.PyIQueryAssociations: ... +def AssocCreateForClasses() -> _win32typing.PyIUnknown: ... +def DragQueryFile(hglobal: int, index) -> str: ... +def DragQueryFileW(hglobal: int, index) -> str: ... +def DragQueryPoint(hglobal: int) -> tuple[Incomplete, Incomplete, Incomplete]: ... +def IsUserAnAdmin() -> bool: ... +def SHCreateDataObject( + parent, children: list[Incomplete], do_inner: _win32typing.PyIDataObject, iid: _win32typing.PyIID +) -> _win32typing.PyIUnknown: ... +def SHCreateDefaultContextMenu(dcm, iid: _win32typing.PyIID) -> _win32typing.PyIUnknown: ... +def SHCreateDefaultExtractIcon() -> _win32typing.PyIDefaultExtractIconInit: ... +def SHCreateShellFolderView( + sf: _win32typing.PyIShellFolder, viewOuter: _win32typing.PyIShellView | None = ..., callbacks: Incomplete | None = ... +) -> _win32typing.PyIShellView: ... +def SHCreateShellItemArray( + parent: _win32typing.PyIDL, sf: _win32typing.PyIShellFolder, children: list[_win32typing.PyIDL] +) -> _win32typing.PyIShellItemArray: ... +def SHCreateShellItemArrayFromDataObject( + do: _win32typing.PyIDataObject, iid: _win32typing.PyIID +) -> _win32typing.PyIShellItemArray: ... +def SHCreateShellItemArrayFromShellItem( + si: _win32typing.PyIShellItem, riid: _win32typing.PyIID +) -> _win32typing.PyIShellItemArray: ... +def SHBrowseForFolder( + hwndOwner: int | None = ..., + pidlRoot: _win32typing.PyIDL | None = ..., + title: str | None = ..., + flags: int = ..., + callback: Incomplete | None = ..., + callback_data: Incomplete | None = ..., +) -> tuple[_win32typing.PyIDL, Incomplete, Incomplete]: ... +def SHGetFileInfo( + name: _win32typing.PyIDL | str, dwFileAttributes, uFlags, infoAttrs: int = ... +) -> tuple[Incomplete, _win32typing.SHFILEINFO]: ... +def SHGetFolderPath(hwndOwner: int, nFolder, handle: int, flags) -> str: ... +def SHSetFolderPath(csidl, Path, hToken: int | None = ...) -> None: ... +def SHGetFolderLocation(hwndOwner: int, nFolder, hToken: int | None = ..., reserved=...) -> _win32typing.PyIDL: ... +def SHGetSpecialFolderPath(hwndOwner: int, nFolder, bCreate: int = ...) -> str: ... +def SHGetSpecialFolderLocation(hwndOwner: int, nFolder) -> _win32typing.PyIDL: ... +def SHAddToRecentDocs(Flags, data) -> None: ... +def SHEmptyRecycleBin(hwnd: int, path: str, flags) -> None: ... +def SHQueryRecycleBin(RootPath: str | None = ...) -> tuple[Incomplete, Incomplete]: ... +def SHGetDesktopFolder() -> _win32typing.PyIShellFolder: ... +def SHUpdateImage(HashItem: str, Index, Flags, ImageIndex) -> None: ... +def SHChangeNotify(EventId, Flags, Item1, Item2) -> None: ... +def SHChangeNotifyRegister(hwnd: int, sources, events, msg): ... +def SHChangeNotifyDeregister(_id) -> None: ... +def SHCreateItemFromParsingName(name, ctx: _win32typing.PyIBindCtx, riid: _win32typing.PyIID) -> _win32typing.PyIShellItem: ... +def SHCreateItemFromRelativeName( + Parent: _win32typing.PyIShellItem, Name, ctx: _win32typing.PyIBindCtx, riid: _win32typing.PyIID +) -> _win32typing.PyIShellItem: ... +def SHCreateItemInKnownFolder( + FolderId: _win32typing.PyIID, Flags, Name, riid: _win32typing.PyIID +) -> _win32typing.PyIShellItem: ... +def SHCreateItemWithParent( + Parent: _win32typing.PyIDL, sfParent: _win32typing.PyIShellFolder, child: _win32typing.PyIDL, riid: _win32typing.PyIID +) -> _win32typing.PyIShellItem: ... +def SHGetInstanceExplorer() -> _win32typing.PyIUnknown: ... +def SHFileOperation(operation: _win32typing.SHFILEOPSTRUCT) -> tuple[Incomplete, Incomplete]: ... +def StringAsCIDA(pidl: str) -> tuple[_win32typing.PyIDL, Incomplete]: ... +def CIDAAsString(pidl: str) -> str: ... +def StringAsPIDL(pidl: str) -> _win32typing.PyIDL: ... +def AddressAsPIDL(address) -> _win32typing.PyIDL: ... +def PIDLAsString(pidl: _win32typing.PyIDL) -> str: ... +def SHGetSettings(mask: int = ...): ... +def FILEGROUPDESCRIPTORAsString(descriptors: list[Incomplete], arg) -> str: ... +def StringAsFILEGROUPDESCRIPTOR(buf, make_unicode: int = ...) -> list[Incomplete]: ... +def ShellExecuteEx( + lpVerb: str, + lpFile: str, + lpParameters: str, + lpDirectory: str, + lpIDlist: _win32typing.PyIDL, + obClass: str, + hkeyClass, + dwHotKey, + hIcon: int, + hMonitor: int, + fMask: int = ..., + hwnd: int = ..., + nShow: int = ..., +): ... +def SHGetViewStatePropertyBag( + pidl: _win32typing.PyIDL, BagName: str, Flags, riid: _win32typing.PyIID +) -> _win32typing.PyIPropertyBag: ... +def SHILCreateFromPath(Path: str, Flags) -> tuple[_win32typing.PyIDL, Incomplete]: ... +def SHCreateShellItem( + pidlParent: _win32typing.PyIDL, sfParent: _win32typing.PyIShellFolder, Child: _win32typing.PyIDL +) -> _win32typing.PyIShellItem: ... +def SHOpenFolderAndSelectItems(Folder: _win32typing.PyIDL, Items: tuple[_win32typing.PyIDL, ...], Flags=...) -> None: ... +def SHCreateStreamOnFileEx(File, Mode, Attributes, Create, Template: Incomplete | None = ...) -> _win32typing.PyIStream: ... +def SetCurrentProcessExplicitAppUserModelID(AppID) -> None: ... +def GetCurrentProcessExplicitAppUserModelID(): ... +def SHParseDisplayName( + Name, Attributes, BindCtx: _win32typing.PyIBindCtx | None = ... +) -> tuple[_win32typing.PyIDL, Incomplete]: ... +def SHCreateItemFromIDList(*args, **kwargs): ... # incomplete +def SHCreateShellItemArrayFromIDLists(*args, **kwargs): ... # incomplete +def SHGetIDListFromObject(*args, **kwargs): ... # incomplete +def SHGetNameFromIDList(*args, **kwargs): ... # incomplete +def SHGetPathFromIDList(*args, **kwargs): ... # incomplete +def SHGetPathFromIDListW(*args, **kwargs): ... # incomplete + +BHID_AssociationArray: _win32typing.PyIID +BHID_DataObject: _win32typing.PyIID +BHID_EnumItems: _win32typing.PyIID +BHID_Filter: _win32typing.PyIID +BHID_LinkTargetItem: _win32typing.PyIID +BHID_PropertyStore: _win32typing.PyIID +BHID_SFObject: _win32typing.PyIID +BHID_SFUIObject: _win32typing.PyIID +BHID_SFViewObject: _win32typing.PyIID +BHID_Storage: _win32typing.PyIID +BHID_StorageEnum: _win32typing.PyIID +BHID_Stream: _win32typing.PyIID +BHID_ThumbnailHandler: _win32typing.PyIID +BHID_Transfer: _win32typing.PyIID +CGID_DefView: _win32typing.PyIID +CGID_Explorer: _win32typing.PyIID +CGID_ExplorerBarDoc: _win32typing.PyIID +CGID_ShellDocView: _win32typing.PyIID +CGID_ShellServiceObject: _win32typing.PyIID +CLSID_ActiveDesktop: _win32typing.PyIID +CLSID_ApplicationDestinations: _win32typing.PyIID +CLSID_ApplicationDocumentLists: _win32typing.PyIID +CLSID_ControlPanel: _win32typing.PyIID +CLSID_DestinationList: _win32typing.PyIID +CLSID_DragDropHelper: _win32typing.PyIID +CLSID_EnumerableObjectCollection: _win32typing.PyIID +CLSID_FileOperation: _win32typing.PyIID +CLSID_Internet: _win32typing.PyIID +CLSID_InternetShortcut: _win32typing.PyIID +CLSID_KnownFolderManager: _win32typing.PyIID +CLSID_MyComputer: _win32typing.PyIID +CLSID_MyDocuments: _win32typing.PyIID +CLSID_NetworkDomain: _win32typing.PyIID +CLSID_NetworkPlaces: _win32typing.PyIID +CLSID_NetworkServer: _win32typing.PyIID +CLSID_NetworkShare: _win32typing.PyIID +CLSID_Printers: _win32typing.PyIID +CLSID_RecycleBin: _win32typing.PyIID +CLSID_ShellDesktop: _win32typing.PyIID +CLSID_ShellFSFolder: _win32typing.PyIID +CLSID_ShellItem: _win32typing.PyIID +CLSID_ShellLibrary: _win32typing.PyIID +CLSID_ShellLink: _win32typing.PyIID +CLSID_TaskbarList: _win32typing.PyIID +EP_AdvQueryPane: _win32typing.PyIID +EP_Commands: _win32typing.PyIID +EP_Commands_Organize: _win32typing.PyIID +EP_Commands_View: _win32typing.PyIID +EP_DetailsPane: _win32typing.PyIID +EP_NavPane: _win32typing.PyIID +EP_PreviewPane: _win32typing.PyIID +EP_QueryPane: _win32typing.PyIID +FMTID_AudioSummaryInformation: _win32typing.PyIID +FMTID_Briefcase: _win32typing.PyIID +FMTID_Displaced: _win32typing.PyIID +FMTID_ImageProperties: _win32typing.PyIID +FMTID_ImageSummaryInformation: _win32typing.PyIID +FMTID_InternetSite: _win32typing.PyIID +FMTID_Intshcut: _win32typing.PyIID +FMTID_MediaFileSummaryInformation: _win32typing.PyIID +FMTID_Misc: _win32typing.PyIID +FMTID_Query: _win32typing.PyIID +FMTID_ShellDetails: _win32typing.PyIID +FMTID_Storage: _win32typing.PyIID +FMTID_SummaryInformation: _win32typing.PyIID +FMTID_Volume: _win32typing.PyIID +FMTID_WebView: _win32typing.PyIID +FOLDERID_AddNewPrograms: _win32typing.PyIID +FOLDERID_AdminTools: _win32typing.PyIID +FOLDERID_AppUpdates: _win32typing.PyIID +FOLDERID_CDBurning: _win32typing.PyIID +FOLDERID_ChangeRemovePrograms: _win32typing.PyIID +FOLDERID_CommonAdminTools: _win32typing.PyIID +FOLDERID_CommonOEMLinks: _win32typing.PyIID +FOLDERID_CommonPrograms: _win32typing.PyIID +FOLDERID_CommonStartMenu: _win32typing.PyIID +FOLDERID_CommonStartup: _win32typing.PyIID +FOLDERID_CommonTemplates: _win32typing.PyIID +FOLDERID_ComputerFolder: _win32typing.PyIID +FOLDERID_ConflictFolder: _win32typing.PyIID +FOLDERID_ConnectionsFolder: _win32typing.PyIID +FOLDERID_Contacts: _win32typing.PyIID +FOLDERID_ControlPanelFolder: _win32typing.PyIID +FOLDERID_Cookies: _win32typing.PyIID +FOLDERID_Desktop: _win32typing.PyIID +FOLDERID_DeviceMetadataStore: _win32typing.PyIID +FOLDERID_Documents: _win32typing.PyIID +FOLDERID_DocumentsLibrary: _win32typing.PyIID +FOLDERID_Downloads: _win32typing.PyIID +FOLDERID_Favorites: _win32typing.PyIID +FOLDERID_Fonts: _win32typing.PyIID +FOLDERID_GameTasks: _win32typing.PyIID +FOLDERID_Games: _win32typing.PyIID +FOLDERID_History: _win32typing.PyIID +FOLDERID_HomeGroup: _win32typing.PyIID +FOLDERID_ImplicitAppShortcuts: _win32typing.PyIID +FOLDERID_InternetCache: _win32typing.PyIID +FOLDERID_InternetFolder: _win32typing.PyIID +FOLDERID_Libraries: _win32typing.PyIID +FOLDERID_Links: _win32typing.PyIID +FOLDERID_LocalAppData: _win32typing.PyIID +FOLDERID_LocalAppDataLow: _win32typing.PyIID +FOLDERID_LocalizedResourcesDir: _win32typing.PyIID +FOLDERID_Music: _win32typing.PyIID +FOLDERID_MusicLibrary: _win32typing.PyIID +FOLDERID_NetHood: _win32typing.PyIID +FOLDERID_NetworkFolder: _win32typing.PyIID +FOLDERID_OriginalImages: _win32typing.PyIID +FOLDERID_PhotoAlbums: _win32typing.PyIID +FOLDERID_Pictures: _win32typing.PyIID +FOLDERID_PicturesLibrary: _win32typing.PyIID +FOLDERID_Playlists: _win32typing.PyIID +FOLDERID_PrintHood: _win32typing.PyIID +FOLDERID_PrintersFolder: _win32typing.PyIID +FOLDERID_Profile: _win32typing.PyIID +FOLDERID_ProgramData: _win32typing.PyIID +FOLDERID_ProgramFiles: _win32typing.PyIID +FOLDERID_ProgramFilesCommon: _win32typing.PyIID +FOLDERID_ProgramFilesCommonX64: _win32typing.PyIID +FOLDERID_ProgramFilesCommonX86: _win32typing.PyIID +FOLDERID_ProgramFilesX64: _win32typing.PyIID +FOLDERID_ProgramFilesX86: _win32typing.PyIID +FOLDERID_Programs: _win32typing.PyIID +FOLDERID_Public: _win32typing.PyIID +FOLDERID_PublicDesktop: _win32typing.PyIID +FOLDERID_PublicDocuments: _win32typing.PyIID +FOLDERID_PublicDownloads: _win32typing.PyIID +FOLDERID_PublicGameTasks: _win32typing.PyIID +FOLDERID_PublicLibraries: _win32typing.PyIID +FOLDERID_PublicMusic: _win32typing.PyIID +FOLDERID_PublicPictures: _win32typing.PyIID +FOLDERID_PublicRingtones: _win32typing.PyIID +FOLDERID_PublicVideos: _win32typing.PyIID +FOLDERID_QuickLaunch: _win32typing.PyIID +FOLDERID_Recent: _win32typing.PyIID +FOLDERID_RecordedTVLibrary: _win32typing.PyIID +FOLDERID_RecycleBinFolder: _win32typing.PyIID +FOLDERID_ResourceDir: _win32typing.PyIID +FOLDERID_Ringtones: _win32typing.PyIID +FOLDERID_RoamingAppData: _win32typing.PyIID +FOLDERID_SEARCH_CSC: _win32typing.PyIID +FOLDERID_SEARCH_MAPI: _win32typing.PyIID +FOLDERID_SampleMusic: _win32typing.PyIID +FOLDERID_SamplePictures: _win32typing.PyIID +FOLDERID_SamplePlaylists: _win32typing.PyIID +FOLDERID_SampleVideos: _win32typing.PyIID +FOLDERID_SavedGames: _win32typing.PyIID +FOLDERID_SavedSearches: _win32typing.PyIID +FOLDERID_SearchHome: _win32typing.PyIID +FOLDERID_SendTo: _win32typing.PyIID +FOLDERID_SidebarDefaultParts: _win32typing.PyIID +FOLDERID_SidebarParts: _win32typing.PyIID +FOLDERID_StartMenu: _win32typing.PyIID +FOLDERID_Startup: _win32typing.PyIID +FOLDERID_SyncManagerFolder: _win32typing.PyIID +FOLDERID_SyncResultsFolder: _win32typing.PyIID +FOLDERID_SyncSetupFolder: _win32typing.PyIID +FOLDERID_System: _win32typing.PyIID +FOLDERID_SystemX86: _win32typing.PyIID +FOLDERID_Templates: _win32typing.PyIID +FOLDERID_UserPinned: _win32typing.PyIID +FOLDERID_UserProfiles: _win32typing.PyIID +FOLDERID_UserProgramFiles: _win32typing.PyIID +FOLDERID_UserProgramFilesCommon: _win32typing.PyIID +FOLDERID_UsersFiles: _win32typing.PyIID +FOLDERID_UsersLibraries: _win32typing.PyIID +FOLDERID_Videos: _win32typing.PyIID +FOLDERID_VideosLibrary: _win32typing.PyIID +FOLDERID_Windows: _win32typing.PyIID +FOLDERTYPEID_Communications: _win32typing.PyIID +FOLDERTYPEID_CompressedFolder: _win32typing.PyIID +FOLDERTYPEID_Contacts: _win32typing.PyIID +FOLDERTYPEID_ControlPanelCategory: _win32typing.PyIID +FOLDERTYPEID_ControlPanelClassic: _win32typing.PyIID +FOLDERTYPEID_Documents: _win32typing.PyIID +FOLDERTYPEID_Games: _win32typing.PyIID +FOLDERTYPEID_Generic: _win32typing.PyIID +FOLDERTYPEID_GenericLibrary: _win32typing.PyIID +FOLDERTYPEID_GenericSearchResults: _win32typing.PyIID +FOLDERTYPEID_Invalid: _win32typing.PyIID +FOLDERTYPEID_Music: _win32typing.PyIID +FOLDERTYPEID_NetworkExplorer: _win32typing.PyIID +FOLDERTYPEID_OpenSearch: _win32typing.PyIID +FOLDERTYPEID_OtherUsers: _win32typing.PyIID +FOLDERTYPEID_Pictures: _win32typing.PyIID +FOLDERTYPEID_Printers: _win32typing.PyIID +FOLDERTYPEID_PublishedItems: _win32typing.PyIID +FOLDERTYPEID_RecordedTV: _win32typing.PyIID +FOLDERTYPEID_RecycleBin: _win32typing.PyIID +FOLDERTYPEID_SavedGames: _win32typing.PyIID +FOLDERTYPEID_SearchConnector: _win32typing.PyIID +FOLDERTYPEID_SearchHome: _win32typing.PyIID +FOLDERTYPEID_Searches: _win32typing.PyIID +FOLDERTYPEID_SoftwareExplorer: _win32typing.PyIID +FOLDERTYPEID_StartMenu: _win32typing.PyIID +FOLDERTYPEID_UserFiles: _win32typing.PyIID +FOLDERTYPEID_UsersLibraries: _win32typing.PyIID +FOLDERTYPEID_Videos: _win32typing.PyIID +HOTKEYF_ALT: int +HOTKEYF_CONTROL: int +HOTKEYF_EXT: int +HOTKEYF_SHIFT: int +IID_CDefView: _win32typing.PyIID +IID_IADesktopP2: _win32typing.PyIID +IID_IActiveDesktop: _win32typing.PyIID +IID_IActiveDesktopP: _win32typing.PyIID +IID_IApplicationDestinations: _win32typing.PyIID +IID_IApplicationDocumentLists: _win32typing.PyIID +IID_IAsyncOperation: _win32typing.PyIID +IID_IBrowserFrameOptions: _win32typing.PyIID +IID_ICategorizer: _win32typing.PyIID +IID_ICategoryProvider: _win32typing.PyIID +IID_IColumnProvider: _win32typing.PyIID +IID_IContextMenu: _win32typing.PyIID +IID_IContextMenu2: _win32typing.PyIID +IID_IContextMenu3: _win32typing.PyIID +IID_ICopyHook: _win32typing.PyIID +IID_ICopyHookA: _win32typing.PyIID +IID_ICopyHookW: _win32typing.PyIID +IID_ICurrentItem: _win32typing.PyIID +IID_ICustomDestinationList: _win32typing.PyIID +IID_IDefaultExtractIconInit: _win32typing.PyIID +IID_IDeskBand: _win32typing.PyIID +IID_IDisplayItem: _win32typing.PyIID +IID_IDockingWindow: _win32typing.PyIID +IID_IDropTargetHelper: _win32typing.PyIID +IID_IEmptyVolumeCache: _win32typing.PyIID +IID_IEmptyVolumeCache2: _win32typing.PyIID +IID_IEmptyVolumeCacheCallBack: _win32typing.PyIID +IID_IEnumExplorerCommand: _win32typing.PyIID +IID_IEnumIDList: _win32typing.PyIID +IID_IEnumObjects: _win32typing.PyIID +IID_IEnumResources: _win32typing.PyIID +IID_IEnumShellItems: _win32typing.PyIID +IID_IExplorerBrowser: _win32typing.PyIID +IID_IExplorerBrowserEvents: _win32typing.PyIID +IID_IExplorerCommand: _win32typing.PyIID +IID_IExplorerCommandProvider: _win32typing.PyIID +IID_IExplorerPaneVisibility: _win32typing.PyIID +IID_IExtractIcon: _win32typing.PyIID +IID_IExtractIconW: _win32typing.PyIID +IID_IExtractImage: _win32typing.PyIID +IID_IFileOperation: _win32typing.PyIID +IID_IFileOperationProgressSink: _win32typing.PyIID +IID_IIdentityName: _win32typing.PyIID +IID_IKnownFolder: _win32typing.PyIID +IID_IKnownFolderManager: _win32typing.PyIID +IID_INameSpaceTreeControl: _win32typing.PyIID +IID_IObjectArray: _win32typing.PyIID +IID_IObjectCollection: _win32typing.PyIID +IID_IPersistFolder: _win32typing.PyIID +IID_IPersistFolder2: _win32typing.PyIID +IID_IQueryAssociations: _win32typing.PyIID +IID_IRelatedItem: _win32typing.PyIID +IID_IShellBrowser: _win32typing.PyIID +IID_IShellCopyHook: _win32typing.PyIID +IID_IShellCopyHookA: _win32typing.PyIID +IID_IShellCopyHookW: _win32typing.PyIID +IID_IShellExtInit: _win32typing.PyIID +IID_IShellFolder: _win32typing.PyIID +IID_IShellFolder2: _win32typing.PyIID +IID_IShellIcon: _win32typing.PyIID +IID_IShellIconOverlay: _win32typing.PyIID +IID_IShellIconOverlayIdentifier: _win32typing.PyIID +IID_IShellIconOverlayManager: _win32typing.PyIID +IID_IShellItem: _win32typing.PyIID +IID_IShellItem2: _win32typing.PyIID +IID_IShellItemArray: _win32typing.PyIID +IID_IShellItemResources: _win32typing.PyIID +IID_IShellLibrary: _win32typing.PyIID +IID_IShellLink: _win32typing.PyIID +IID_IShellLinkA: _win32typing.PyIID +IID_IShellLinkDataList: _win32typing.PyIID +IID_IShellLinkW: _win32typing.PyIID +IID_IShellView: _win32typing.PyIID +IID_ITaskbarList: _win32typing.PyIID +IID_ITransferAdviseSink: _win32typing.PyIID +IID_ITransferDestination: _win32typing.PyIID +IID_ITransferMediumItem: _win32typing.PyIID +IID_ITransferSource: _win32typing.PyIID +IID_IUniformResourceLocator: _win32typing.PyIID +ResourceTypeStream: _win32typing.PyIID +SID_CtxQueryAssociations: _win32typing.PyIID +SID_DefView: _win32typing.PyIID +SID_LinkSite: _win32typing.PyIID +SID_MenuShellFolder: _win32typing.PyIID +SID_SCommDlgBrowser: _win32typing.PyIID +SID_SGetViewFromViewDual: _win32typing.PyIID +SID_SInternetExplorer: _win32typing.PyIID +SID_SMenuBandBKContextMenu: _win32typing.PyIID +SID_SMenuBandBottom: _win32typing.PyIID +SID_SMenuBandBottomSelected: _win32typing.PyIID +SID_SMenuBandChild: _win32typing.PyIID +SID_SMenuBandContextMenuModifier: _win32typing.PyIID +SID_SMenuBandParent: _win32typing.PyIID +SID_SMenuBandTop: _win32typing.PyIID +SID_SMenuPopup: _win32typing.PyIID +SID_SProgressUI: _win32typing.PyIID +SID_SShellBrowser: _win32typing.PyIID +SID_SShellDesktop: _win32typing.PyIID +SID_STopLevelBrowser: _win32typing.PyIID +SID_STopWindow: _win32typing.PyIID +SID_SUrlHistory: _win32typing.PyIID +SID_SWebBrowserApp: _win32typing.PyIID +SID_ShellFolderViewCB: _win32typing.PyIID +SLGP_RAWPATH: int +SLGP_SHORTPATH: int +SLGP_UNCPRIORITY: int +SLR_ANY_MATCH: int +SLR_INVOKE_MSI: int +SLR_NOLINKINFO: int +SLR_NOSEARCH: int +SLR_NOTRACK: int +SLR_NOUPDATE: int +SLR_NO_UI: int +SLR_UPDATE: int +VID_Details: _win32typing.PyIID +VID_LargeIcons: _win32typing.PyIID +VID_List: _win32typing.PyIID +VID_SmallIcons: _win32typing.PyIID +VID_ThumbStrip: _win32typing.PyIID +VID_Thumbnails: _win32typing.PyIID +VID_Tile: _win32typing.PyIID + +def SHGetKnownFolderPath(*args, **kwargs): ... # incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/shell/shellcon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/shell/shellcon.pyi new file mode 100644 index 00000000..88e92081 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/shell/shellcon.pyi @@ -0,0 +1,1399 @@ +from _typeshed import Incomplete + +WM_USER: int +DROPEFFECT_NONE: int +DROPEFFECT_COPY: int +DROPEFFECT_MOVE: int +DROPEFFECT_LINK: int +DROPEFFECT_SCROLL: int +FO_MOVE: int +FO_COPY: int +FO_DELETE: int +FO_RENAME: int +FOF_MULTIDESTFILES: int +FOF_CONFIRMMOUSE: int +FOF_SILENT: int +FOF_RENAMEONCOLLISION: int +FOF_NOCONFIRMATION: int +FOF_WANTMAPPINGHANDLE: int +FOF_ALLOWUNDO: int +FOF_FILESONLY: int +FOF_SIMPLEPROGRESS: int +FOF_NOCONFIRMMKDIR: int +FOF_NOERRORUI: int +FOF_NOCOPYSECURITYATTRIBS: int +FOF_NORECURSION: int +FOF_NO_CONNECTED_ELEMENTS: int +FOF_WANTNUKEWARNING: int +FOF_NORECURSEREPARSE: int +FOF_NO_UI: Incomplete +FOFX_NOSKIPJUNCTIONS: int +FOFX_PREFERHARDLINK: int +FOFX_SHOWELEVATIONPROMPT: int +FOFX_EARLYFAILURE: int +FOFX_PRESERVEFILEEXTENSIONS: int +FOFX_KEEPNEWERFILE: int +FOFX_NOCOPYHOOKS: int +FOFX_NOMINIMIZEBOX: int +FOFX_MOVEACLSACROSSVOLUMES: int +FOFX_DONTDISPLAYSOURCEPATH: int +FOFX_DONTDISPLAYDESTPATH: int +FOFX_REQUIREELEVATION: int +FOFX_COPYASDOWNLOAD: int +FOFX_DONTDISPLAYLOCATIONS: int +PO_DELETE: int +PO_RENAME: int +PO_PORTCHANGE: int +PO_REN_PORT: int +SE_ERR_FNF: int +SE_ERR_PNF: int +SE_ERR_ACCESSDENIED: int +SE_ERR_OOM: int +SE_ERR_DLLNOTFOUND: int +SE_ERR_SHARE: int +SE_ERR_ASSOCINCOMPLETE: int +SE_ERR_DDETIMEOUT: int +SE_ERR_DDEFAIL: int +SE_ERR_DDEBUSY: int +SE_ERR_NOASSOC: int +SEE_MASK_CLASSNAME: int +SEE_MASK_CLASSKEY: int +SEE_MASK_IDLIST: int +SEE_MASK_INVOKEIDLIST: int +SEE_MASK_ICON: int +SEE_MASK_HOTKEY: int +SEE_MASK_NOCLOSEPROCESS: int +SEE_MASK_CONNECTNETDRV: int +SEE_MASK_FLAG_DDEWAIT: int +SEE_MASK_DOENVSUBST: int +SEE_MASK_FLAG_NO_UI: int +SEE_MASK_UNICODE: int +SEE_MASK_NO_CONSOLE: int +SEE_MASK_ASYNCOK: int +SEE_MASK_HMONITOR: int +SHERB_NOCONFIRMATION: int +SHERB_NOPROGRESSUI: int +SHERB_NOSOUND: int +NIM_ADD: int +NIM_MODIFY: int +NIM_DELETE: int +NIF_MESSAGE: int +NIF_ICON: int +NIF_TIP: int +SHGFI_ICON: int +SHGFI_DISPLAYNAME: int +SHGFI_TYPENAME: int +SHGFI_ATTRIBUTES: int +SHGFI_ICONLOCATION: int +SHGFI_EXETYPE: int +SHGFI_SYSICONINDEX: int +SHGFI_LINKOVERLAY: int +SHGFI_SELECTED: int +SHGFI_ATTR_SPECIFIED: int +SHGFI_LARGEICON: int +SHGFI_SMALLICON: int +SHGFI_OPENICON: int +SHGFI_SHELLICONSIZE: int +SHGFI_PIDL: int +SHGFI_USEFILEATTRIBUTES: int +SHGNLI_PIDL: int +SHGNLI_PREFIXNAME: int +SHGNLI_NOUNIQUE: int +PRINTACTION_OPEN: int +PRINTACTION_PROPERTIES: int +PRINTACTION_NETINSTALL: int +PRINTACTION_NETINSTALLLINK: int +PRINTACTION_TESTPAGE: int +PRINTACTION_OPENNETPRN: int +PRINTACTION_DOCUMENTDEFAULTS: int +PRINTACTION_SERVERPROPERTIES: int +CMF_NORMAL: int +CMF_DEFAULTONLY: int +CMF_VERBSONLY: int +CMF_EXPLORE: int +CMF_NOVERBS: int +CMF_CANRENAME: int +CMF_NODEFAULT: int +CMF_INCLUDESTATIC: int +CMF_ITEMMENU: int +CMF_EXTENDEDVERBS: int +CMF_DISABLEDVERBS: int +CMF_ASYNCVERBSTATE: int +CMF_OPTIMIZEFORINVOKE: int +CMF_SYNCCASCADEMENU: int +CMF_DONOTPICKDEFAULT: int +CMF_RESERVED: int +GCS_VERBA: int +GCS_HELPTEXTA: int +GCS_VALIDATEA: int +GCS_VERBW: int +GCS_HELPTEXTW: int +GCS_VALIDATEW: int +GCS_UNICODE: int +GCS_VERB: int +GCS_HELPTEXT: int +GCS_VALIDATE: int +CMDSTR_NEWFOLDERA: str +CMDSTR_VIEWLISTA: str +CMDSTR_VIEWDETAILSA: str +CMDSTR_NEWFOLDER: str +CMDSTR_VIEWLIST: str +CMDSTR_VIEWDETAILS: str +CMIC_MASK_HOTKEY: int +CMIC_MASK_ICON: int +CMIC_MASK_FLAG_NO_UI: int +CMIC_MASK_UNICODE: int +CMIC_MASK_NO_CONSOLE: int +CMIC_MASK_ASYNCOK: int +CMIC_MASK_PTINVOKE: int +GIL_OPENICON: int +GIL_FORSHELL: int +GIL_ASYNC: int +GIL_DEFAULTICON: int +GIL_FORSHORTCUT: int +GIL_CHECKSHIELD: int +GIL_SIMULATEDOC: int +GIL_PERINSTANCE: int +GIL_PERCLASS: int +GIL_NOTFILENAME: int +GIL_DONTCACHE: int +GIL_SHIELD: int +GIL_FORCENOSHIELD: int +ISIOI_ICONFILE: int +ISIOI_ICONINDEX: int +ISIOI_SYSIMAGELISTINDEX: int +FVSIF_RECT: int +FVSIF_PINNED: int +FVSIF_NEWFAILED: int +FVSIF_NEWFILE: int +FVSIF_CANVIEWIT: int +FCIDM_SHVIEWFIRST: int +FCIDM_SHVIEWLAST: int +FCIDM_BROWSERFIRST: int +FCIDM_BROWSERLAST: int +FCIDM_GLOBALFIRST: int +FCIDM_GLOBALLAST: int +FCIDM_MENU_FILE: Incomplete +FCIDM_MENU_EDIT: Incomplete +FCIDM_MENU_VIEW: Incomplete +FCIDM_MENU_VIEW_SEP_OPTIONS: Incomplete +FCIDM_MENU_TOOLS: Incomplete +FCIDM_MENU_TOOLS_SEP_GOTO: Incomplete +FCIDM_MENU_HELP: Incomplete +FCIDM_MENU_FIND: Incomplete +FCIDM_MENU_EXPLORE: Incomplete +FCIDM_MENU_FAVORITES: Incomplete +FCIDM_TOOLBAR: Incomplete +FCIDM_STATUS: Incomplete +IDC_OFFLINE_HAND: int +SBSP_DEFBROWSER: int +SBSP_SAMEBROWSER: int +SBSP_NEWBROWSER: int +SBSP_DEFMODE: int +SBSP_OPENMODE: int +SBSP_EXPLOREMODE: int +SBSP_ABSOLUTE: int +SBSP_RELATIVE: int +SBSP_PARENT: int +SBSP_NAVIGATEBACK: int +SBSP_NAVIGATEFORWARD: int +SBSP_ALLOW_AUTONAVIGATE: int +SBSP_INITIATEDBYHLINKFRAME: int +SBSP_REDIRECT: int +SBSP_WRITENOHISTORY: int +SBSP_NOAUTOSELECT: int +FCW_STATUS: int +FCW_TOOLBAR: int +FCW_TREE: int +FCW_INTERNETBAR: int +FCW_PROGRESS: int +FCT_MERGE: int +FCT_CONFIGABLE: int +FCT_ADDTOEND: int +CDBOSC_SETFOCUS: int +CDBOSC_KILLFOCUS: int +CDBOSC_SELCHANGE: int +CDBOSC_RENAME: int +SVSI_DESELECT: int +SVSI_SELECT: int +SVSI_EDIT: int +SVSI_DESELECTOTHERS: int +SVSI_ENSUREVISIBLE: int +SVSI_FOCUSED: int +SVSI_TRANSLATEPT: int +SVGIO_BACKGROUND: int +SVGIO_SELECTION: int +SVGIO_ALLVIEW: int +SVGIO_CHECKED: Incomplete +SVGIO_TYPE_MASK: Incomplete +SVGIO_FLAG_VIEWORDER: int +STRRET_WSTR: int +STRRET_OFFSET: int +STRRET_CSTR: int +CSIDL_DESKTOP: int +CSIDL_INTERNET: int +CSIDL_PROGRAMS: int +CSIDL_CONTROLS: int +CSIDL_PRINTERS: int +CSIDL_PERSONAL: int +CSIDL_FAVORITES: int +CSIDL_STARTUP: int +CSIDL_RECENT: int +CSIDL_SENDTO: int +CSIDL_BITBUCKET: int +CSIDL_STARTMENU: int +CSIDL_MYDOCUMENTS: int +CSIDL_MYMUSIC: int +CSIDL_MYVIDEO: int +CSIDL_DESKTOPDIRECTORY: int +CSIDL_DRIVES: int +CSIDL_NETWORK: int +CSIDL_NETHOOD: int +CSIDL_FONTS: int +CSIDL_TEMPLATES: int +CSIDL_COMMON_STARTMENU: int +CSIDL_COMMON_PROGRAMS: int +CSIDL_COMMON_STARTUP: int +CSIDL_COMMON_DESKTOPDIRECTORY: int +CSIDL_APPDATA: int +CSIDL_PRINTHOOD: int +CSIDL_LOCAL_APPDATA: int +CSIDL_ALTSTARTUP: int +CSIDL_COMMON_ALTSTARTUP: int +CSIDL_COMMON_FAVORITES: int +CSIDL_INTERNET_CACHE: int +CSIDL_COOKIES: int +CSIDL_HISTORY: int +CSIDL_COMMON_APPDATA: int +CSIDL_WINDOWS: int +CSIDL_SYSTEM: int +CSIDL_PROGRAM_FILES: int +CSIDL_MYPICTURES: int +CSIDL_PROFILE: int +CSIDL_SYSTEMX86: int +CSIDL_PROGRAM_FILESX86: int +CSIDL_PROGRAM_FILES_COMMON: int +CSIDL_PROGRAM_FILES_COMMONX86: int +CSIDL_COMMON_TEMPLATES: int +CSIDL_COMMON_DOCUMENTS: int +CSIDL_COMMON_ADMINTOOLS: int +CSIDL_ADMINTOOLS: int +CSIDL_CONNECTIONS: int +CSIDL_COMMON_MUSIC: int +CSIDL_COMMON_PICTURES: int +CSIDL_COMMON_VIDEO: int +CSIDL_RESOURCES: int +CSIDL_RESOURCES_LOCALIZED: int +CSIDL_COMMON_OEM_LINKS: int +CSIDL_CDBURN_AREA: int +CSIDL_COMPUTERSNEARME: int +BIF_RETURNONLYFSDIRS: int +BIF_DONTGOBELOWDOMAIN: int +BIF_STATUSTEXT: int +BIF_RETURNFSANCESTORS: int +BIF_EDITBOX: int +BIF_VALIDATE: int +BIF_BROWSEFORCOMPUTER: int +BIF_BROWSEFORPRINTER: int +BIF_BROWSEINCLUDEFILES: int +BFFM_INITIALIZED: int +BFFM_SELCHANGED: int +BFFM_VALIDATEFAILEDA: int +BFFM_VALIDATEFAILEDW: int +BFFM_SETSTATUSTEXTA: Incomplete +BFFM_ENABLEOK: Incomplete +BFFM_SETSELECTIONA: Incomplete +BFFM_SETSELECTIONW: Incomplete +BFFM_SETSTATUSTEXTW: Incomplete +BFFM_SETSTATUSTEXT: Incomplete +BFFM_SETSELECTION: Incomplete +BFFM_VALIDATEFAILED: int +SFGAO_CANCOPY: int +SFGAO_CANMOVE: int +SFGAO_CANLINK: int +SFGAO_CANRENAME: int +SFGAO_CANDELETE: int +SFGAO_HASPROPSHEET: int +SFGAO_DROPTARGET: int +SFGAO_CAPABILITYMASK: int +SFGAO_LINK: int +SFGAO_SHARE: int +SFGAO_READONLY: int +SFGAO_GHOSTED: int +SFGAO_HIDDEN: int +SFGAO_DISPLAYATTRMASK: int +SFGAO_FILESYSANCESTOR: int +SFGAO_FOLDER: int +SFGAO_FILESYSTEM: int +SFGAO_HASSUBFOLDER: int +SFGAO_CONTENTSMASK: int +SFGAO_VALIDATE: int +SFGAO_REMOVABLE: int +SFGAO_COMPRESSED: int +SFGAO_BROWSABLE: int +SFGAO_NONENUMERATED: int +SFGAO_NEWCONTENT: int +SFGAO_STORAGE: int +DWFRF_NORMAL: int +DWFRF_DELETECONFIGDATA: int +DWFAF_HIDDEN: int +DBIM_MINSIZE: int +DBIM_MAXSIZE: int +DBIM_INTEGRAL: int +DBIM_ACTUAL: int +DBIM_TITLE: int +DBIM_MODEFLAGS: int +DBIM_BKCOLOR: int +DBIMF_NORMAL: int +DBIMF_VARIABLEHEIGHT: int +DBIMF_DEBOSSED: int +DBIMF_BKCOLOR: int +DBIF_VIEWMODE_NORMAL: int +DBIF_VIEWMODE_VERTICAL: int +DBIF_VIEWMODE_FLOATING: int +DBIF_VIEWMODE_TRANSPARENT: int +COMPONENT_TOP: int +COMP_TYPE_HTMLDOC: int +COMP_TYPE_PICTURE: int +COMP_TYPE_WEBSITE: int +COMP_TYPE_CONTROL: int +COMP_TYPE_CFHTML: int +COMP_TYPE_MAX: int +AD_APPLY_SAVE: int +AD_APPLY_HTMLGEN: int +AD_APPLY_REFRESH: int +AD_APPLY_ALL: Incomplete +AD_APPLY_FORCE: int +AD_APPLY_BUFFERED_REFRESH: int +WPSTYLE_CENTER: int +WPSTYLE_TILE: int +WPSTYLE_STRETCH: int +WPSTYLE_MAX: int +COMP_ELEM_TYPE: int +COMP_ELEM_CHECKED: int +COMP_ELEM_DIRTY: int +COMP_ELEM_NOSCROLL: int +COMP_ELEM_POS_LEFT: int +COMP_ELEM_POS_TOP: int +COMP_ELEM_SIZE_WIDTH: int +COMP_ELEM_SIZE_HEIGHT: int +COMP_ELEM_POS_ZINDEX: int +COMP_ELEM_SOURCE: int +COMP_ELEM_FRIENDLYNAME: int +COMP_ELEM_SUBSCRIBEDURL: int +ADDURL_SILENT: int +CFSTR_SHELLIDLIST: str +CFSTR_SHELLIDLISTOFFSET: str +CFSTR_NETRESOURCES: str +CFSTR_FILEDESCRIPTORA: str +CFSTR_FILEDESCRIPTORW: str +CFSTR_FILECONTENTS: str +CFSTR_FILENAMEA: str +CFSTR_FILENAMEW: str +CFSTR_PRINTERGROUP: str +CFSTR_FILENAMEMAPA: str +CFSTR_FILENAMEMAPW: str +CFSTR_SHELLURL: str +CFSTR_INETURLA: str +CFSTR_INETURLW: str +CFSTR_PREFERREDDROPEFFECT: str +CFSTR_PERFORMEDDROPEFFECT: str +CFSTR_PASTESUCCEEDED: str +CFSTR_INDRAGLOOP: str +CFSTR_DRAGCONTEXT: str +CFSTR_MOUNTEDVOLUME: str +CFSTR_PERSISTEDDATAOBJECT: str +CFSTR_TARGETCLSID: str +CFSTR_LOGICALPERFORMEDDROPEFFECT: str +CFSTR_AUTOPLAY_SHELLIDLISTS: str +CFSTR_FILEDESCRIPTOR: str +CFSTR_FILENAME: str +CFSTR_FILENAMEMAP: str +DVASPECT_SHORTNAME: int +SHCNE_RENAMEITEM: int +SHCNE_CREATE: int +SHCNE_DELETE: int +SHCNE_MKDIR: int +SHCNE_RMDIR: int +SHCNE_MEDIAINSERTED: int +SHCNE_MEDIAREMOVED: int +SHCNE_DRIVEREMOVED: int +SHCNE_DRIVEADD: int +SHCNE_NETSHARE: int +SHCNE_NETUNSHARE: int +SHCNE_ATTRIBUTES: int +SHCNE_UPDATEDIR: int +SHCNE_UPDATEITEM: int +SHCNE_SERVERDISCONNECT: int +SHCNE_UPDATEIMAGE: int +SHCNE_DRIVEADDGUI: int +SHCNE_RENAMEFOLDER: int +SHCNE_FREESPACE: int +SHCNE_EXTENDED_EVENT: int +SHCNE_ASSOCCHANGED: int +SHCNE_DISKEVENTS: int +SHCNE_GLOBALEVENTS: int +SHCNE_ALLEVENTS: int +SHCNE_INTERRUPT: int +SHCNEE_ORDERCHANGED: int +SHCNF_IDLIST: int +SHCNF_PATHA: int +SHCNF_PRINTERA: int +SHCNF_DWORD: int +SHCNF_PATHW: int +SHCNF_PRINTERW: int +SHCNF_TYPE: int +SHCNF_FLUSH: int +SHCNF_FLUSHNOWAIT: int +SHCNF_PATH: int +SHCNF_PRINTER: int +QIF_CACHED: int +QIF_DONTEXPANDFOLDER: int +SHARD_PIDL: int +SHARD_PATHA: int +SHARD_PATHW: int +SHARD_APPIDINFO: int +SHARD_APPIDINFOIDLIST: int +SHARD_LINK: int +SHARD_APPIDINFOLINK: int +SHARD_SHELLITEM: int +SHARD_PATH: int +SHGDFIL_FINDDATA: int +SHGDFIL_NETRESOURCE: int +SHGDFIL_DESCRIPTIONID: int +SHDID_ROOT_REGITEM: int +SHDID_FS_FILE: int +SHDID_FS_DIRECTORY: int +SHDID_FS_OTHER: int +SHDID_COMPUTER_DRIVE35: int +SHDID_COMPUTER_DRIVE525: int +SHDID_COMPUTER_REMOVABLE: int +SHDID_COMPUTER_FIXED: int +SHDID_COMPUTER_NETDRIVE: int +SHDID_COMPUTER_CDROM: int +SHDID_COMPUTER_RAMDISK: int +SHDID_COMPUTER_OTHER: int +SHDID_NET_DOMAIN: int +SHDID_NET_SERVER: int +SHDID_NET_SHARE: int +SHDID_NET_RESTOFNET: int +SHDID_NET_OTHER: int +PID_IS_URL: int +PID_IS_NAME: int +PID_IS_WORKINGDIR: int +PID_IS_HOTKEY: int +PID_IS_SHOWCMD: int +PID_IS_ICONINDEX: int +PID_IS_ICONFILE: int +PID_IS_WHATSNEW: int +PID_IS_AUTHOR: int +PID_IS_DESCRIPTION: int +PID_IS_COMMENT: int +PID_INTSITE_WHATSNEW: int +PID_INTSITE_AUTHOR: int +PID_INTSITE_LASTVISIT: int +PID_INTSITE_LASTMOD: int +PID_INTSITE_VISITCOUNT: int +PID_INTSITE_DESCRIPTION: int +PID_INTSITE_COMMENT: int +PID_INTSITE_FLAGS: int +PID_INTSITE_CONTENTLEN: int +PID_INTSITE_CONTENTCODE: int +PID_INTSITE_RECURSE: int +PID_INTSITE_WATCH: int +PID_INTSITE_SUBSCRIPTION: int +PID_INTSITE_URL: int +PID_INTSITE_TITLE: int +PID_INTSITE_CODEPAGE: int +PID_INTSITE_TRACKING: int +PIDISF_RECENTLYCHANGED: int +PIDISF_CACHEDSTICKY: int +PIDISF_CACHEIMAGES: int +PIDISF_FOLLOWALLLINKS: int +PIDISM_GLOBAL: int +PIDISM_WATCH: int +PIDISM_DONTWATCH: int +SSF_SHOWALLOBJECTS: int +SSF_SHOWEXTENSIONS: int +SSF_SHOWCOMPCOLOR: int +SSF_SHOWSYSFILES: int +SSF_DOUBLECLICKINWEBVIEW: int +SSF_SHOWATTRIBCOL: int +SSF_DESKTOPHTML: int +SSF_WIN95CLASSIC: int +SSF_DONTPRETTYPATH: int +SSF_SHOWINFOTIP: int +SSF_MAPNETDRVBUTTON: int +SSF_NOCONFIRMRECYCLE: int +SSF_HIDEICONS: int +ABM_NEW: int +ABM_REMOVE: int +ABM_QUERYPOS: int +ABM_SETPOS: int +ABM_GETSTATE: int +ABM_GETTASKBARPOS: int +ABM_ACTIVATE: int +ABM_GETAUTOHIDEBAR: int +ABM_SETAUTOHIDEBAR: int +ABM_WINDOWPOSCHANGED: int +ABN_STATECHANGE: int +ABN_POSCHANGED: int +ABN_FULLSCREENAPP: int +ABN_WINDOWARRANGE: int +ABS_AUTOHIDE: int +ABS_ALWAYSONTOP: int +ABE_LEFT: int +ABE_TOP: int +ABE_RIGHT: int +ABE_BOTTOM: int + +def EIRESID(x): ... + +SHCONTF_FOLDERS: int +SHCONTF_NONFOLDERS: int +SHCONTF_INCLUDEHIDDEN: int +SHCONTF_INIT_ON_FIRST_NEXT: int +SHCONTF_NETPRINTERSRCH: int +SHCONTF_SHAREABLE: int +SHCONTF_STORAGE: int +SHGDN_NORMAL: int +SHGDN_INFOLDER: int +SHGDN_FOREDITING: int +SHGDN_INCLUDE_NONFILESYS: int +SHGDN_FORADDRESSBAR: int +SHGDN_FORPARSING: int +BFO_NONE: int +BFO_BROWSER_PERSIST_SETTINGS: int +BFO_RENAME_FOLDER_OPTIONS_TOINTERNET: int +BFO_BOTH_OPTIONS: int +BIF_PREFER_INTERNET_SHORTCUT: int +BFO_BROWSE_NO_IN_NEW_PROCESS: int +BFO_ENABLE_HYPERLINK_TRACKING: int +BFO_USE_IE_OFFLINE_SUPPORT: int +BFO_SUBSTITUE_INTERNET_START_PAGE: int +BFO_USE_IE_LOGOBANDING: int +BFO_ADD_IE_TOCAPTIONBAR: int +BFO_USE_DIALUP_REF: int +BFO_USE_IE_TOOLBAR: int +BFO_NO_PARENT_FOLDER_SUPPORT: int +BFO_NO_REOPEN_NEXT_RESTART: int +BFO_GO_HOME_PAGE: int +BFO_PREFER_IEPROCESS: int +BFO_SHOW_NAVIGATION_CANCELLED: int +BFO_QUERY_ALL: int +PID_FINDDATA: int +PID_NETRESOURCE: int +PID_DESCRIPTIONID: int +PID_WHICHFOLDER: int +PID_NETWORKLOCATION: int +PID_COMPUTERNAME: int +PID_DISPLACED_FROM: int +PID_DISPLACED_DATE: int +PID_SYNC_COPY_IN: int +PID_MISC_STATUS: int +PID_MISC_ACCESSCOUNT: int +PID_MISC_OWNER: int +PID_HTMLINFOTIPFILE: int +PID_MISC_PICS: int +PID_DISPLAY_PROPERTIES: int +PID_INTROTEXT: int +PIDSI_ARTIST: int +PIDSI_SONGTITLE: int +PIDSI_ALBUM: int +PIDSI_YEAR: int +PIDSI_COMMENT: int +PIDSI_TRACK: int +PIDSI_GENRE: int +PIDSI_LYRICS: int +PIDDRSI_PROTECTED: int +PIDDRSI_DESCRIPTION: int +PIDDRSI_PLAYCOUNT: int +PIDDRSI_PLAYSTARTS: int +PIDDRSI_PLAYEXPIRES: int +PIDVSI_STREAM_NAME: int +PIDVSI_FRAME_WIDTH: int +PIDVSI_FRAME_HEIGHT: int +PIDVSI_TIMELENGTH: int +PIDVSI_FRAME_COUNT: int +PIDVSI_FRAME_RATE: int +PIDVSI_DATA_RATE: int +PIDVSI_SAMPLE_SIZE: int +PIDVSI_COMPRESSION: int +PIDVSI_STREAM_NUMBER: int +PIDASI_FORMAT: int +PIDASI_TIMELENGTH: int +PIDASI_AVG_DATA_RATE: int +PIDASI_SAMPLE_RATE: int +PIDASI_SAMPLE_SIZE: int +PIDASI_CHANNEL_COUNT: int +PIDASI_STREAM_NUMBER: int +PIDASI_STREAM_NAME: int +PIDASI_COMPRESSION: int +PID_CONTROLPANEL_CATEGORY: int +PID_VOLUME_FREE: int +PID_VOLUME_CAPACITY: int +PID_VOLUME_FILESYSTEM: int +PID_SHARE_CSC_STATUS: int +PID_LINK_TARGET: int +PID_QUERY_RANK: int +PROPSETFLAG_DEFAULT: int +PROPSETFLAG_NONSIMPLE: int +PROPSETFLAG_ANSI: int +PROPSETFLAG_UNBUFFERED: int +PROPSETFLAG_CASE_SENSITIVE: int +PROPSET_BEHAVIOR_CASE_SENSITIVE: int +PID_DICTIONARY: int +PID_CODEPAGE: int +PID_FIRST_USABLE: int +PID_FIRST_NAME_DEFAULT: int +PID_LOCALE: int +PID_MODIFY_TIME: int +PID_SECURITY: int +PID_BEHAVIOR: int +PID_ILLEGAL: int +PID_MIN_READONLY: int +PID_MAX_READONLY: int +PIDDI_THUMBNAIL: int +PIDSI_TITLE: int +PIDSI_SUBJECT: int +PIDSI_AUTHOR: int +PIDSI_KEYWORDS: int +PIDSI_COMMENTS: int +PIDSI_TEMPLATE: int +PIDSI_LASTAUTHOR: int +PIDSI_REVNUMBER: int +PIDSI_EDITTIME: int +PIDSI_LASTPRINTED: int +PIDSI_CREATE_DTM: int +PIDSI_LASTSAVE_DTM: int +PIDSI_PAGECOUNT: int +PIDSI_WORDCOUNT: int +PIDSI_CHARCOUNT: int +PIDSI_THUMBNAIL: int +PIDSI_APPNAME: int +PIDSI_DOC_SECURITY: int +PIDDSI_CATEGORY: int +PIDDSI_PRESFORMAT: int +PIDDSI_BYTECOUNT: int +PIDDSI_LINECOUNT: int +PIDDSI_PARCOUNT: int +PIDDSI_SLIDECOUNT: int +PIDDSI_NOTECOUNT: int +PIDDSI_HIDDENCOUNT: int +PIDDSI_MMCLIPCOUNT: int +PIDDSI_SCALE: int +PIDDSI_HEADINGPAIR: int +PIDDSI_DOCPARTS: int +PIDDSI_MANAGER: int +PIDDSI_COMPANY: int +PIDDSI_LINKSDIRTY: int +PIDMSI_EDITOR: int +PIDMSI_SUPPLIER: int +PIDMSI_SOURCE: int +PIDMSI_SEQUENCE_NO: int +PIDMSI_PROJECT: int +PIDMSI_STATUS: int +PIDMSI_OWNER: int +PIDMSI_RATING: int +PIDMSI_PRODUCTION: int +PIDMSI_COPYRIGHT: int +PRSPEC_INVALID: int +PRSPEC_LPWSTR: int +PRSPEC_PROPID: int +SHCIDS_ALLFIELDS: int +SHCIDS_CANONICALONLY: int +SHCIDS_BITMASK: int +SHCIDS_COLUMNMASK: int +SFGAO_CANMONIKER: int +SFGAO_HASSTORAGE: int +SFGAO_STREAM: int +SFGAO_STORAGEANCESTOR: int +SFGAO_STORAGECAPMASK: int +MAXPROPPAGES: int +PSP_DEFAULT: int +PSP_DLGINDIRECT: int +PSP_USEHICON: int +PSP_USEICONID: int +PSP_USETITLE: int +PSP_RTLREADING: int +PSP_HASHELP: int +PSP_USEREFPARENT: int +PSP_USECALLBACK: int +PSP_PREMATURE: int +PSP_HIDEHEADER: int +PSP_USEHEADERTITLE: int +PSP_USEHEADERSUBTITLE: int +PSP_USEFUSIONCONTEXT: int +PSPCB_ADDREF: int +PSPCB_RELEASE: int +PSPCB_CREATE: int +PSH_DEFAULT: int +PSH_PROPTITLE: int +PSH_USEHICON: int +PSH_USEICONID: int +PSH_PROPSHEETPAGE: int +PSH_WIZARDHASFINISH: int +PSH_WIZARD: int +PSH_USEPSTARTPAGE: int +PSH_NOAPPLYNOW: int +PSH_USECALLBACK: int +PSH_HASHELP: int +PSH_MODELESS: int +PSH_RTLREADING: int +PSH_WIZARDCONTEXTHELP: int +PSH_WIZARD97: int +PSH_WATERMARK: int +PSH_USEHBMWATERMARK: int +PSH_USEHPLWATERMARK: int +PSH_STRETCHWATERMARK: int +PSH_HEADER: int +PSH_USEHBMHEADER: int +PSH_USEPAGELANG: int +PSH_WIZARD_LITE: int +PSH_NOCONTEXTHELP: int +PSCB_INITIALIZED: int +PSCB_PRECREATE: int +PSCB_BUTTONPRESSED: int +PSNRET_NOERROR: int +PSNRET_INVALID: int +PSNRET_INVALID_NOCHANGEPAGE: int +PSNRET_MESSAGEHANDLED: int +PSWIZB_BACK: int +PSWIZB_NEXT: int +PSWIZB_FINISH: int +PSWIZB_DISABLEDFINISH: int +PSBTN_BACK: int +PSBTN_NEXT: int +PSBTN_FINISH: int +PSBTN_OK: int +PSBTN_APPLYNOW: int +PSBTN_CANCEL: int +PSBTN_HELP: int +PSBTN_MAX: int +ID_PSRESTARTWINDOWS: int +ID_PSREBOOTSYSTEM: Incomplete +WIZ_CXDLG: int +WIZ_CYDLG: int +WIZ_CXBMP: int +WIZ_BODYX: int +WIZ_BODYCX: int +PROP_SM_CXDLG: int +PROP_SM_CYDLG: int +PROP_MED_CXDLG: int +PROP_MED_CYDLG: int +PROP_LG_CXDLG: int +PROP_LG_CYDLG: int +ISOLATION_AWARE_USE_STATIC_LIBRARY: int +ISOLATION_AWARE_BUILD_STATIC_LIBRARY: int +SHCOLSTATE_TYPE_STR: int +SHCOLSTATE_TYPE_INT: int +SHCOLSTATE_TYPE_DATE: int +SHCOLSTATE_TYPEMASK: int +SHCOLSTATE_ONBYDEFAULT: int +SHCOLSTATE_SLOW: int +SHCOLSTATE_EXTENDED: int +SHCOLSTATE_SECONDARYUI: int +SHCOLSTATE_HIDDEN: int +SHCOLSTATE_PREFER_VARCMP: int +FWF_AUTOARRANGE: int +FWF_ABBREVIATEDNAMES: int +FWF_SNAPTOGRID: int +FWF_OWNERDATA: int +FWF_BESTFITWINDOW: int +FWF_DESKTOP: int +FWF_SINGLESEL: int +FWF_NOSUBFOLDERS: int +FWF_TRANSPARENT: int +FWF_NOCLIENTEDGE: int +FWF_NOSCROLL: int +FWF_ALIGNLEFT: int +FWF_NOICONS: int +FWF_SHOWSELALWAYS: int +FWF_NOVISIBLE: int +FWF_SINGLECLICKACTIVATE: int +FWF_NOWEBVIEW: int +FWF_HIDEFILENAMES: int +FWF_CHECKSELECT: int +FVM_FIRST: int +FVM_ICON: int +FVM_SMALLICON: int +FVM_LIST: int +FVM_DETAILS: int +FVM_THUMBNAIL: int +FVM_TILE: int +FVM_THUMBSTRIP: int +SVUIA_DEACTIVATE: int +SVUIA_ACTIVATE_NOFOCUS: int +SVUIA_ACTIVATE_FOCUS: int +SVUIA_INPLACEACTIVATE: int +SHCNRF_InterruptLevel: int +SHCNRF_ShellLevel: int +SHCNRF_RecursiveInterrupt: int +SHCNRF_NewDelivery: int +FD_CLSID: int +FD_SIZEPOINT: int +FD_ATTRIBUTES: int +FD_CREATETIME: int +FD_ACCESSTIME: int +FD_WRITESTIME: int +FD_FILESIZE: int +FD_PROGRESSUI: int +FD_LINKUI: int +ASSOCF_INIT_NOREMAPCLSID: int +ASSOCF_INIT_BYEXENAME: int +ASSOCF_OPEN_BYEXENAME: int +ASSOCF_INIT_DEFAULTTOSTAR: int +ASSOCF_INIT_DEFAULTTOFOLDER: int +ASSOCF_NOUSERSETTINGS: int +ASSOCF_NOTRUNCATE: int +ASSOCF_VERIFY: int +ASSOCF_REMAPRUNDLL: int +ASSOCF_NOFIXUPS: int +ASSOCF_IGNOREBASECLASS: int +ASSOCSTR_COMMAND: int +ASSOCSTR_EXECUTABLE: int +ASSOCSTR_FRIENDLYDOCNAME: int +ASSOCSTR_FRIENDLYAPPNAME: int +ASSOCSTR_NOOPEN: int +ASSOCSTR_SHELLNEWVALUE: int +ASSOCSTR_DDECOMMAND: int +ASSOCSTR_DDEIFEXEC: int +ASSOCSTR_DDEAPPLICATION: int +ASSOCSTR_DDETOPIC: int +ASSOCSTR_INFOTIP: int +ASSOCSTR_QUICKTIP: int +ASSOCSTR_TILEINFO: int +ASSOCSTR_CONTENTTYPE: int +ASSOCSTR_DEFAULTICON: int +ASSOCSTR_SHELLEXTENSION: int +ASSOCKEY_SHELLEXECCLASS: int +ASSOCKEY_APP: int +ASSOCKEY_CLASS: int +ASSOCKEY_BASECLASS: int +ASSOCDATA_MSIDESCRIPTOR: int +ASSOCDATA_NOACTIVATEHANDLER: int +ASSOCDATA_QUERYCLASSSTORE: int +ASSOCDATA_HASPERUSERASSOC: int +ASSOCDATA_EDITFLAGS: int +ASSOCDATA_VALUE: int +SHGVSPB_PERUSER: int +SHGVSPB_ALLUSERS: int +SHGVSPB_PERFOLDER: int +SHGVSPB_ALLFOLDERS: int +SHGVSPB_INHERIT: int +SHGVSPB_ROAM: int +SHGVSPB_NOAUTODEFAULTS: int +SHGVSPB_FOLDER: Incomplete +SHGVSPB_FOLDERNODEFAULTS: Incomplete +SHGVSPB_USERDEFAULTS: Incomplete +SHGVSPB_GLOBALDEAFAULTS: Incomplete +SFVM_REARRANGE: int +SFVM_ADDOBJECT: int +SFVM_REMOVEOBJECT: int +SFVM_UPDATEOBJECT: int +SFVM_GETSELECTEDOBJECTS: int +SFVM_SETITEMPOS: int +SFVM_SETCLIPBOARD: int +SFVM_SETPOINTS: int +SLDF_HAS_ID_LIST: int +SLDF_HAS_LINK_INFO: int +SLDF_HAS_NAME: int +SLDF_HAS_RELPATH: int +SLDF_HAS_WORKINGDIR: int +SLDF_HAS_ARGS: int +SLDF_HAS_ICONLOCATION: int +SLDF_UNICODE: int +SLDF_FORCE_NO_LINKINFO: int +SLDF_HAS_EXP_SZ: int +SLDF_RUN_IN_SEPARATE: int +SLDF_HAS_LOGO3ID: int +SLDF_HAS_DARWINID: int +SLDF_RUNAS_USER: int +SLDF_NO_PIDL_ALIAS: int +SLDF_FORCE_UNCNAME: int +SLDF_HAS_EXP_ICON_SZ: int +SLDF_RUN_WITH_SHIMLAYER: int +SLDF_RESERVED: int +EXP_SPECIAL_FOLDER_SIG: int +NT_CONSOLE_PROPS_SIG: int +NT_FE_CONSOLE_PROPS_SIG: int +EXP_DARWIN_ID_SIG: int +EXP_LOGO3_ID_SIG: int +EXP_SZ_ICON_SIG: int +EXP_SZ_LINK_SIG: int +IURL_SETURL_FL_GUESS_PROTOCOL: int +IURL_SETURL_FL_USE_DEFAULT_PROTOCOL: int +IURL_INVOKECOMMAND_FL_ALLOW_UI: int +IURL_INVOKECOMMAND_FL_USE_DEFAULT_VERB: int +IURL_INVOKECOMMAND_FL_DDEWAIT: int +IS_NORMAL: int +IS_FULLSCREEN: int +IS_SPLIT: int +IS_VALIDSIZESTATEBITS: Incomplete +IS_VALIDSTATEBITS: Incomplete +AD_APPLY_DYNAMICREFRESH: int +COMP_ELEM_ORIGINAL_CSI: int +COMP_ELEM_RESTORED_CSI: int +COMP_ELEM_CURITEMSTATE: int +COMP_ELEM_ALL: Incomplete +DTI_ADDUI_DEFAULT: int +DTI_ADDUI_DISPSUBWIZARD: int +DTI_ADDUI_POSITIONITEM: int +COMPONENT_DEFAULT_LEFT: int +COMPONENT_DEFAULT_TOP: int +SSM_CLEAR: int +SSM_SET: int +SSM_REFRESH: int +SSM_UPDATE: int +SCHEME_DISPLAY: int +SCHEME_EDIT: int +SCHEME_LOCAL: int +SCHEME_GLOBAL: int +SCHEME_REFRESH: int +SCHEME_UPDATE: int +SCHEME_DONOTUSE: int +SCHEME_CREATE: int +GADOF_DIRTY: int +EVCF_HASSETTINGS: int +EVCF_ENABLEBYDEFAULT: int +EVCF_REMOVEFROMLIST: int +EVCF_ENABLEBYDEFAULT_AUTO: int +EVCF_DONTSHOWIFZERO: int +EVCF_SETTINGSMODE: int +EVCF_OUTOFDISKSPACE: int +EVCCBF_LASTNOTIFICATION: int +EBO_NONE: int +EBO_NAVIGATEONCE: int +EBO_SHOWFRAMES: int +EBO_ALWAYSNAVIGATE: int +EBO_NOTRAVELLOG: int +EBO_NOWRAPPERWINDOW: int +EBF_NONE: int +EBF_SELECTFROMDATAOBJECT: int +EBF_NODROPTARGET: int +ECS_ENABLED: int +ECS_DISABLED: int +ECS_HIDDEN: int +ECS_CHECKBOX: int +ECS_CHECKED: int +ECF_HASSUBCOMMANDS: int +ECF_HASSPLITBUTTON: int +ECF_HIDELABEL: int +ECF_ISSEPARATOR: int +ECF_HASLUASHIELD: int +SIATTRIBFLAGS_AND: int +SIATTRIBFLAGS_OR: int +SIATTRIBFLAGS_APPCOMPAT: int +SIATTRIBFLAGS_MASK: int +SIGDN_NORMALDISPLAY: int +SIGDN_PARENTRELATIVEPARSING: int +SIGDN_DESKTOPABSOLUTEPARSING: int +SIGDN_PARENTRELATIVEEDITING: int +SIGDN_DESKTOPABSOLUTEEDITING: int +SIGDN_FILESYSPATH: int +SIGDN_URL: int +SIGDN_PARENTRELATIVEFORADDRESSBAR: int +SIGDN_PARENTRELATIVE: int +SICHINT_DISPLAY: Incomplete +SICHINT_ALLFIELDS: int +SICHINT_CANONICAL: int +ASSOCCLASS_SHELL_KEY: int +ASSOCCLASS_PROGID_KEY: int +ASSOCCLASS_PROGID_STR: int +ASSOCCLASS_CLSID_KEY: int +ASSOCCLASS_CLSID_STR: int +ASSOCCLASS_APP_KEY: int +ASSOCCLASS_APP_STR: int +ASSOCCLASS_SYSTEM_STR: int +ASSOCCLASS_FOLDER: int +ASSOCCLASS_STAR: int +NSTCS_HASEXPANDOS: int +NSTCS_HASLINES: int +NSTCS_SINGLECLICKEXPAND: int +NSTCS_FULLROWSELECT: int +NSTCS_SPRINGEXPAND: int +NSTCS_HORIZONTALSCROLL: int +NSTCS_ROOTHASEXPANDO: int +NSTCS_SHOWSELECTIONALWAYS: int +NSTCS_NOINFOTIP: int +NSTCS_EVENHEIGHT: int +NSTCS_NOREPLACEOPEN: int +NSTCS_DISABLEDRAGDROP: int +NSTCS_NOORDERSTREAM: int +NSTCS_RICHTOOLTIP: int +NSTCS_BORDER: int +NSTCS_NOEDITLABELS: int +NSTCS_TABSTOP: int +NSTCS_FAVORITESMODE: int +NSTCS_AUTOHSCROLL: int +NSTCS_FADEINOUTEXPANDOS: int +NSTCS_EMPTYTEXT: int +NSTCS_CHECKBOXES: int +NSTCS_PARTIALCHECKBOXES: int +NSTCS_EXCLUSIONCHECKBOXES: int +NSTCS_DIMMEDCHECKBOXES: int +NSTCS_NOINDENTCHECKS: int +NSTCS_ALLOWJUNCTIONS: int +NSTCS_SHOWTABSBUTTON: int +NSTCS_SHOWDELETEBUTTON: int +NSTCS_SHOWREFRESHBUTTON: int +NSTCRS_VISIBLE: int +NSTCRS_HIDDEN: int +NSTCRS_EXPANDED: int +NSTCIS_NONE: int +NSTCIS_SELECTED: int +NSTCIS_EXPANDED: int +NSTCIS_BOLD: int +NSTCIS_DISABLED: int +NSTCGNI_NEXT: int +NSTCGNI_NEXTVISIBLE: int +NSTCGNI_PREV: int +NSTCGNI_PREVVISIBLE: int +NSTCGNI_PARENT: int +NSTCGNI_CHILD: int +NSTCGNI_FIRSTVISIBLE: int +NSTCGNI_LASTVISIBLE: int +CLSID_ExplorerBrowser: str +IBrowserFrame_Methods: Incomplete +ICategorizer_Methods: Incomplete +ICategoryProvider_Methods: Incomplete +IContextMenu_Methods: Incomplete +IExplorerCommand_Methods: Incomplete +IExplorerCommandProvider_Methods: Incomplete +IOleWindow_Methods: Incomplete +IPersist_Methods: Incomplete +IPersistFolder_Methods: Incomplete +IPersistFolder2_Methods: Incomplete +IShellExtInit_Methods: Incomplete +IShellView_Methods: Incomplete +IShellFolder_Methods: Incomplete +IShellFolder2_Methods: Incomplete +GPS_DEFAULT: int +GPS_HANDLERPROPERTIESONLY: int +GPS_READWRITE: int +GPS_TEMPORARY: int +GPS_FASTPROPERTIESONLY: int +GPS_OPENSLOWITEM: int +GPS_DELAYCREATION: int +GPS_BESTEFFORT: int +GPS_MASK_VALID: int +STR_AVOID_DRIVE_RESTRICTION_POLICY: str +STR_BIND_DELEGATE_CREATE_OBJECT: str +STR_BIND_FOLDERS_READ_ONLY: str +STR_BIND_FOLDER_ENUM_MODE: str +STR_BIND_FORCE_FOLDER_SHORTCUT_RESOLVE: str +STR_DONT_PARSE_RELATIVE: str +STR_DONT_RESOLVE_LINK: str +STR_FILE_SYS_BIND_DATA: str +STR_GET_ASYNC_HANDLER: str +STR_GPS_BESTEFFORT: str +STR_GPS_DELAYCREATION: str +STR_GPS_FASTPROPERTIESONLY: str +STR_GPS_HANDLERPROPERTIESONLY: str +STR_GPS_NO_OPLOCK: str +STR_GPS_OPENSLOWITEM: str +STR_IFILTER_FORCE_TEXT_FILTER_FALLBACK: str +STR_IFILTER_LOAD_DEFINED_FILTER: str +STR_INTERNAL_NAVIGATE: str +STR_INTERNETFOLDER_PARSE_ONLY_URLMON_BINDABLE: str +STR_ITEM_CACHE_CONTEXT: str +STR_NO_VALIDATE_FILENAME_CHARS: str +STR_PARSE_ALLOW_INTERNET_SHELL_FOLDERS: str +STR_PARSE_AND_CREATE_ITEM: str +STR_PARSE_DONT_REQUIRE_VALIDATED_URLS: str +STR_PARSE_EXPLICIT_ASSOCIATION_SUCCESSFUL: str +STR_PARSE_PARTIAL_IDLIST: str +STR_PARSE_PREFER_FOLDER_BROWSING: str +STR_PARSE_PREFER_WEB_BROWSING: str +STR_PARSE_PROPERTYSTORE: str +STR_PARSE_SHELL_PROTOCOL_TO_FILE_OBJECTS: str +STR_PARSE_SHOW_NET_DIAGNOSTICS_UI: str +STR_PARSE_SKIP_NET_CACHE: str +STR_PARSE_TRANSLATE_ALIASES: str +STR_PARSE_WITH_EXPLICIT_ASSOCAPP: str +STR_PARSE_WITH_EXPLICIT_PROGID: str +STR_PARSE_WITH_PROPERTIES: str +STR_SKIP_BINDING_CLSID: str +STR_TRACK_CLSID: str +KF_REDIRECTION_CAPABILITIES_ALLOW_ALL: int +KF_REDIRECTION_CAPABILITIES_REDIRECTABLE: int +KF_REDIRECTION_CAPABILITIES_DENY_ALL: int +KF_REDIRECTION_CAPABILITIES_DENY_POLICY_REDIRECTED: int +KF_REDIRECTION_CAPABILITIES_DENY_POLICY: int +KF_REDIRECTION_CAPABILITIES_DENY_PERMISSIONS: int +KF_REDIRECT_USER_EXCLUSIVE: int +KF_REDIRECT_COPY_SOURCE_DACL: int +KF_REDIRECT_OWNER_USER: int +KF_REDIRECT_SET_OWNER_EXPLICIT: int +KF_REDIRECT_CHECK_ONLY: int +KF_REDIRECT_WITH_UI: int +KF_REDIRECT_UNPIN: int +KF_REDIRECT_PIN: int +KF_REDIRECT_COPY_CONTENTS: int +KF_REDIRECT_DEL_SOURCE_CONTENTS: int +KF_REDIRECT_EXCLUDE_ALL_KNOWN_SUBFOLDERS: int +KF_CATEGORY_VIRTUAL: int +KF_CATEGORY_FIXED: int +KF_CATEGORY_COMMON: int +KF_CATEGORY_PERUSER: int +FFFP_EXACTMATCH: int +FFFP_NEARESTPARENTMATCH: int +KF_FLAG_CREATE: int +KF_FLAG_DONT_VERIFY: int +KF_FLAG_DONT_UNEXPAND: int +KF_FLAG_NO_ALIAS: int +KF_FLAG_INIT: int +KF_FLAG_DEFAULT_PATH: int +KF_FLAG_NOT_PARENT_RELATIVE: int +KF_FLAG_SIMPLE_IDLIST: int +ADLT_RECENT: int +ADLT_FREQUENT: int +KDC_FREQUENT: int +KDC_RECENT: int +LFF_FORCEFILESYSTEM: int +LFF_STORAGEITEMS: int +LFF_ALLITEMS: int +DSFT_DETECT: int +DSFT_PRIVATE: int +DSFT_PUBLIC: int +LOF_DEFAULT: int +LOF_PINNEDTONAVPANE: int +LOF_MASK_ALL: int +LSF_FAILIFTHERE: int +LSF_OVERRIDEEXISTING: int +LSF_MAKEUNIQUENAME: int +TSF_NORMAL: int +TSF_FAIL_EXIST: int +TSF_RENAME_EXIST: int +TSF_OVERWRITE_EXIST: int +TSF_ALLOW_DECRYPTION: int +TSF_NO_SECURITY: int +TSF_COPY_CREATION_TIME: int +TSF_COPY_WRITE_TIME: int +TSF_USE_FULL_ACCESS: int +TSF_DELETE_RECYCLE_IF_POSSIBLE: int +TSF_COPY_HARD_LINK: int +TSF_COPY_LOCALIZED_NAME: int +TSF_MOVE_AS_COPY_DELETE: int +TSF_SUSPEND_SHELLEVENTS: int +TS_NONE: int +TS_PERFORMING: int +TS_PREPARING: int +TS_INDETERMINATE: int +COPYENGINE_S_YES: int +COPYENGINE_S_NOT_HANDLED: int +COPYENGINE_S_USER_RETRY: int +COPYENGINE_S_USER_IGNORED: int +COPYENGINE_S_MERGE: int +COPYENGINE_S_DONT_PROCESS_CHILDREN: int +COPYENGINE_S_ALREADY_DONE: int +COPYENGINE_S_PENDING: int +COPYENGINE_S_KEEP_BOTH: int +COPYENGINE_S_CLOSE_PROGRAM: int +COPYENGINE_S_COLLISIONRESOLVED: int +COPYENGINE_E_USER_CANCELLED: int +COPYENGINE_E_CANCELLED: int +COPYENGINE_E_REQUIRES_ELEVATION: int +COPYENGINE_E_SAME_FILE: int +COPYENGINE_E_DIFF_DIR: int +COPYENGINE_E_MANY_SRC_1_DEST: int +COPYENGINE_E_DEST_SUBTREE: int +COPYENGINE_E_DEST_SAME_TREE: int +COPYENGINE_E_FLD_IS_FILE_DEST: int +COPYENGINE_E_FILE_IS_FLD_DEST: int +COPYENGINE_E_FILE_TOO_LARGE: int +COPYENGINE_E_REMOVABLE_FULL: int +COPYENGINE_E_DEST_IS_RO_CD: int +COPYENGINE_E_DEST_IS_RW_CD: int +COPYENGINE_E_DEST_IS_R_CD: int +COPYENGINE_E_DEST_IS_RO_DVD: int +COPYENGINE_E_DEST_IS_RW_DVD: int +COPYENGINE_E_DEST_IS_R_DVD: int +COPYENGINE_E_SRC_IS_RO_CD: int +COPYENGINE_E_SRC_IS_RW_CD: int +COPYENGINE_E_SRC_IS_R_CD: int +COPYENGINE_E_SRC_IS_RO_DVD: int +COPYENGINE_E_SRC_IS_RW_DVD: int +COPYENGINE_E_SRC_IS_R_DVD: int +COPYENGINE_E_INVALID_FILES_SRC: int +COPYENGINE_E_INVALID_FILES_DEST: int +COPYENGINE_E_PATH_TOO_DEEP_SRC: int +COPYENGINE_E_PATH_TOO_DEEP_DEST: int +COPYENGINE_E_ROOT_DIR_SRC: int +COPYENGINE_E_ROOT_DIR_DEST: int +COPYENGINE_E_ACCESS_DENIED_SRC: int +COPYENGINE_E_ACCESS_DENIED_DEST: int +COPYENGINE_E_PATH_NOT_FOUND_SRC: int +COPYENGINE_E_PATH_NOT_FOUND_DEST: int +COPYENGINE_E_NET_DISCONNECT_SRC: int +COPYENGINE_E_NET_DISCONNECT_DEST: int +COPYENGINE_E_SHARING_VIOLATION_SRC: int +COPYENGINE_E_SHARING_VIOLATION_DEST: int +COPYENGINE_E_ALREADY_EXISTS_NORMAL: int +COPYENGINE_E_ALREADY_EXISTS_READONLY: int +COPYENGINE_E_ALREADY_EXISTS_SYSTEM: int +COPYENGINE_E_ALREADY_EXISTS_FOLDER: int +COPYENGINE_E_STREAM_LOSS: int +COPYENGINE_E_EA_LOSS: int +COPYENGINE_E_PROPERTY_LOSS: int +COPYENGINE_E_PROPERTIES_LOSS: int +COPYENGINE_E_ENCRYPTION_LOSS: int +COPYENGINE_E_DISK_FULL: int +COPYENGINE_E_DISK_FULL_CLEAN: int +COPYENGINE_E_EA_NOT_SUPPORTED: int +COPYENGINE_E_CANT_REACH_SOURCE: int +COPYENGINE_E_RECYCLE_UNKNOWN_ERROR: int +COPYENGINE_E_RECYCLE_FORCE_NUKE: int +COPYENGINE_E_RECYCLE_SIZE_TOO_BIG: int +COPYENGINE_E_RECYCLE_PATH_TOO_LONG: int +COPYENGINE_E_RECYCLE_BIN_NOT_FOUND: int +COPYENGINE_E_NEWFILE_NAME_TOO_LONG: int +COPYENGINE_E_NEWFOLDER_NAME_TOO_LONG: int +COPYENGINE_E_DIR_NOT_EMPTY: int +COPYENGINE_E_FAT_MAX_IN_ROOT: int +COPYENGINE_E_ACCESSDENIED_READONLY: int +COPYENGINE_E_REDIRECTED_TO_WEBPAGE: int +COPYENGINE_E_SERVER_BAD_FILE_TYPE: int +FOLDERID_NetworkFolder: str +FOLDERID_ComputerFolder: str +FOLDERID_InternetFolder: str +FOLDERID_ControlPanelFolder: str +FOLDERID_PrintersFolder: str +FOLDERID_SyncManagerFolder: str +FOLDERID_SyncSetupFolder: str +FOLDERID_ConflictFolder: str +FOLDERID_SyncResultsFolder: str +FOLDERID_RecycleBinFolder: str +FOLDERID_ConnectionsFolder: str +FOLDERID_Fonts: str +FOLDERID_Desktop: str +FOLDERID_Startup: str +FOLDERID_Programs: str +FOLDERID_StartMenu: str +FOLDERID_Recent: str +FOLDERID_SendTo: str +FOLDERID_Documents: str +FOLDERID_Favorites: str +FOLDERID_NetHood: str +FOLDERID_PrintHood: str +FOLDERID_Templates: str +FOLDERID_CommonStartup: str +FOLDERID_CommonPrograms: str +FOLDERID_CommonStartMenu: str +FOLDERID_PublicDesktop: str +FOLDERID_ProgramData: str +FOLDERID_CommonTemplates: str +FOLDERID_PublicDocuments: str +FOLDERID_RoamingAppData: str +FOLDERID_LocalAppData: str +FOLDERID_LocalAppDataLow: str +FOLDERID_InternetCache: str +FOLDERID_Cookies: str +FOLDERID_History: str +FOLDERID_System: str +FOLDERID_SystemX86: str +FOLDERID_Windows: str +FOLDERID_Profile: str +FOLDERID_Pictures: str +FOLDERID_ProgramFilesX86: str +FOLDERID_ProgramFilesCommonX86: str +FOLDERID_ProgramFilesX64: str +FOLDERID_ProgramFilesCommonX64: str +FOLDERID_ProgramFiles: str +FOLDERID_ProgramFilesCommon: str +FOLDERID_UserProgramFiles: str +FOLDERID_UserProgramFilesCommon: str +FOLDERID_AdminTools: str +FOLDERID_CommonAdminTools: str +FOLDERID_Music: str +FOLDERID_Videos: str +FOLDERID_Ringtones: str +FOLDERID_PublicPictures: str +FOLDERID_PublicMusic: str +FOLDERID_PublicVideos: str +FOLDERID_PublicRingtones: str +FOLDERID_ResourceDir: str +FOLDERID_LocalizedResourcesDir: str +FOLDERID_CommonOEMLinks: str +FOLDERID_CDBurning: str +FOLDERID_UserProfiles: str +FOLDERID_Playlists: str +FOLDERID_SamplePlaylists: str +FOLDERID_SampleMusic: str +FOLDERID_SamplePictures: str +FOLDERID_SampleVideos: str +FOLDERID_PhotoAlbums: str +FOLDERID_Public: str +FOLDERID_ChangeRemovePrograms: str +FOLDERID_AppUpdates: str +FOLDERID_AddNewPrograms: str +FOLDERID_Downloads: str +FOLDERID_PublicDownloads: str +FOLDERID_SavedSearches: str +FOLDERID_QuickLaunch: str +FOLDERID_Contacts: str +FOLDERID_SidebarParts: str +FOLDERID_SidebarDefaultParts: str +FOLDERID_PublicGameTasks: str +FOLDERID_GameTasks: str +FOLDERID_SavedGames: str +FOLDERID_Games: str +FOLDERID_SEARCH_MAPI: str +FOLDERID_SEARCH_CSC: str +FOLDERID_Links: str +FOLDERID_UsersFiles: str +FOLDERID_UsersLibraries: str +FOLDERID_SearchHome: str +FOLDERID_OriginalImages: str +FOLDERID_DocumentsLibrary: str +FOLDERID_MusicLibrary: str +FOLDERID_PicturesLibrary: str +FOLDERID_VideosLibrary: str +FOLDERID_RecordedTVLibrary: str +FOLDERID_HomeGroup: str +FOLDERID_HomeGroupCurrentUser: str +FOLDERID_DeviceMetadataStore: str +FOLDERID_Libraries: str +FOLDERID_PublicLibraries: str +FOLDERID_UserPinned: str +FOLDERID_ImplicitAppShortcuts: str +FOLDERID_AccountPictures: str +FOLDERID_PublicUserTiles: str +FOLDERID_AppsFolder: str +FOLDERID_StartMenuAllPrograms: str +FOLDERID_CommonStartMenuPlaces: str +FOLDERID_ApplicationShortcuts: str +FOLDERID_RoamingTiles: str +FOLDERID_RoamedTileImages: str +FOLDERID_Screenshots: str +FOLDERID_CameraRoll: str +FOLDERID_SkyDrive: str +FOLDERID_OneDrive: str +FOLDERID_SkyDriveDocuments: str +FOLDERID_SkyDrivePictures: str +FOLDERID_SkyDriveMusic: str +FOLDERID_SkyDriveCameraRoll: str +FOLDERID_SearchHistory: str +FOLDERID_SearchTemplates: str +FOLDERID_CameraRollLibrary: str +FOLDERID_SavedPictures: str +FOLDERID_SavedPicturesLibrary: str +FOLDERID_RetailDemo: str +FOLDERID_Device: str +FOLDERID_DevelopmentFiles: str +FOLDERID_Objects3D: str +FOLDERID_AppCaptures: str +FOLDERID_LocalDocuments: str +FOLDERID_LocalPictures: str +FOLDERID_LocalVideos: str +FOLDERID_LocalMusic: str +FOLDERID_LocalDownloads: str +FOLDERID_RecordedCalls: str +KF_FLAG_DEFAULT: int +KF_FLAG_FORCE_APP_DATA_REDIRECTION: int +KF_FLAG_RETURN_FILTER_REDIRECTION_TARGET: int +KF_FLAG_FORCE_PACKAGE_REDIRECTION: int +KF_FLAG_NO_PACKAGE_REDIRECTION: int +KF_FLAG_FORCE_APPCONTAINER_REDIRECTION: int +KF_FLAG_NO_APPCONTAINER_REDIRECTION: int +KF_FLAG_ALIAS_ONLY: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/taskscheduler/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/taskscheduler/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/taskscheduler/taskscheduler.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/taskscheduler/taskscheduler.pyi new file mode 100644 index 00000000..b37d7224 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32comext/taskscheduler/taskscheduler.pyi @@ -0,0 +1,83 @@ +import _win32typing + +CLSID_CTask: _win32typing.PyIID +CLSID_CTaskScheduler: _win32typing.PyIID +HIGH_PRIORITY_CLASS: int +IDLE_PRIORITY_CLASS: int +IID_IProvideTaskPage: _win32typing.PyIID +IID_IScheduledWorkItem: _win32typing.PyIID +IID_ITask: _win32typing.PyIID +IID_ITaskScheduler: _win32typing.PyIID +IID_ITaskTrigger: _win32typing.PyIID +NORMAL_PRIORITY_CLASS: int +REALTIME_PRIORITY_CLASS: int +SCHED_E_ACCOUNT_DBASE_CORRUPT: int +SCHED_E_ACCOUNT_INFORMATION_NOT_SET: int +SCHED_E_ACCOUNT_NAME_NOT_FOUND: int +SCHED_E_CANNOT_OPEN_TASK: int +SCHED_E_INVALID_TASK: int +SCHED_E_SERVICE_NOT_INSTALLED: int +SCHED_E_TASK_NOT_READY: int +SCHED_E_TASK_NOT_RUNNING: int +SCHED_E_TRIGGER_NOT_FOUND: int +SCHED_E_UNKNOWN_OBJECT_VERSION: int +SCHED_S_EVENT_TRIGGER: int +SCHED_S_TASK_DISABLED: int +SCHED_S_TASK_HAS_NOT_RUN: int +SCHED_S_TASK_NOT_SCHEDULED: int +SCHED_S_TASK_NO_MORE_RUNS: int +SCHED_S_TASK_NO_VALID_TRIGGERS: int +SCHED_S_TASK_READY: int +SCHED_S_TASK_RUNNING: int +SCHED_S_TASK_TERMINATED: int +TASKPAGE_SCHEDULE: int +TASKPAGE_SETTINGS: int +TASKPAGE_TASK: int +TASK_APRIL: int +TASK_AUGUST: int +TASK_DECEMBER: int +TASK_EVENT_TRIGGER_AT_LOGON: int +TASK_EVENT_TRIGGER_AT_SYSTEMSTART: int +TASK_EVENT_TRIGGER_ON_IDLE: int +TASK_FEBRUARY: int +TASK_FIRST_WEEK: int +TASK_FLAG_DELETE_WHEN_DONE: int +TASK_FLAG_DISABLED: int +TASK_FLAG_DONT_START_IF_ON_BATTERIES: int +TASK_FLAG_HIDDEN: int +TASK_FLAG_INTERACTIVE: int +TASK_FLAG_KILL_IF_GOING_ON_BATTERIES: int +TASK_FLAG_KILL_ON_IDLE_END: int +TASK_FLAG_RESTART_ON_IDLE_RESUME: int +TASK_FLAG_RUN_IF_CONNECTED_TO_INTERNET: int +TASK_FLAG_RUN_ONLY_IF_DOCKED: int +TASK_FLAG_RUN_ONLY_IF_LOGGED_ON: int +TASK_FLAG_START_ONLY_IF_IDLE: int +TASK_FLAG_SYSTEM_REQUIRED: int +TASK_FOURTH_WEEK: int +TASK_FRIDAY: int +TASK_JANUARY: int +TASK_JULY: int +TASK_JUNE: int +TASK_LAST_WEEK: int +TASK_MARCH: int +TASK_MAY: int +TASK_MONDAY: int +TASK_NOVEMBER: int +TASK_OCTOBER: int +TASK_SATURDAY: int +TASK_SECOND_WEEK: int +TASK_SEPTEMBER: int +TASK_SUNDAY: int +TASK_THIRD_WEEK: int +TASK_THURSDAY: int +TASK_TIME_TRIGGER_DAILY: int +TASK_TIME_TRIGGER_MONTHLYDATE: int +TASK_TIME_TRIGGER_MONTHLYDOW: int +TASK_TIME_TRIGGER_ONCE: int +TASK_TIME_TRIGGER_WEEKLY: int +TASK_TRIGGER_FLAG_DISABLED: int +TASK_TRIGGER_FLAG_HAS_END_DATE: int +TASK_TRIGGER_FLAG_KILL_AT_DURATION_END: int +TASK_TUESDAY: int +TASK_WEDNESDAY: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32con.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32con.pyi new file mode 100644 index 00000000..54cfd26a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32con.pyi @@ -0,0 +1 @@ +from win32.lib.win32con import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32console.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32console.pyi new file mode 100644 index 00000000..f8539ed6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32console.pyi @@ -0,0 +1 @@ +from win32.win32console import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32cred.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32cred.pyi new file mode 100644 index 00000000..6cb3e268 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32cred.pyi @@ -0,0 +1 @@ +from win32.win32cred import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32crypt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32crypt.pyi new file mode 100644 index 00000000..223df5e3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32crypt.pyi @@ -0,0 +1 @@ +from win32.win32crypt import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32cryptcon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32cryptcon.pyi new file mode 100644 index 00000000..6df8b440 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32cryptcon.pyi @@ -0,0 +1 @@ +from win32.lib.win32cryptcon import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32event.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32event.pyi new file mode 100644 index 00000000..53191d41 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32event.pyi @@ -0,0 +1 @@ +from win32.win32event import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32evtlog.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32evtlog.pyi new file mode 100644 index 00000000..7c7ffe5c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32evtlog.pyi @@ -0,0 +1 @@ +from win32.win32evtlog import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32evtlogutil.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32evtlogutil.pyi new file mode 100644 index 00000000..c37e5dd7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32evtlogutil.pyi @@ -0,0 +1 @@ +from win32.lib.win32evtlogutil import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32file.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32file.pyi new file mode 100644 index 00000000..3a703a97 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32file.pyi @@ -0,0 +1 @@ +from win32.win32file import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32gui.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32gui.pyi new file mode 100644 index 00000000..3b2b41ae --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32gui.pyi @@ -0,0 +1 @@ +from win32.win32gui import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32gui_struct.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32gui_struct.pyi new file mode 100644 index 00000000..3c7cd0a8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32gui_struct.pyi @@ -0,0 +1 @@ +from win32.lib.win32gui_struct import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32help.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32help.pyi new file mode 100644 index 00000000..bd6fce50 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32help.pyi @@ -0,0 +1 @@ +from win32.win32help import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32inet.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32inet.pyi new file mode 100644 index 00000000..98e6e47f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32inet.pyi @@ -0,0 +1 @@ +from win32.win32inet import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32inetcon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32inetcon.pyi new file mode 100644 index 00000000..db893b68 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32inetcon.pyi @@ -0,0 +1 @@ +from win32.lib.win32inetcon import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32job.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32job.pyi new file mode 100644 index 00000000..9c8f7891 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32job.pyi @@ -0,0 +1 @@ +from win32.win32job import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32lz.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32lz.pyi new file mode 100644 index 00000000..64281098 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32lz.pyi @@ -0,0 +1 @@ +from win32.win32lz import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32net.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32net.pyi new file mode 100644 index 00000000..f318e55d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32net.pyi @@ -0,0 +1 @@ +from win32.win32net import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32netcon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32netcon.pyi new file mode 100644 index 00000000..f86b2ef4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32netcon.pyi @@ -0,0 +1 @@ +from win32.lib.win32netcon import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32pdh.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32pdh.pyi new file mode 100644 index 00000000..739ad216 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32pdh.pyi @@ -0,0 +1 @@ +from win32.win32pdh import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32pdhquery.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32pdhquery.pyi new file mode 100644 index 00000000..2d0976fb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32pdhquery.pyi @@ -0,0 +1 @@ +from win32.lib.win32pdhquery import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32pipe.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32pipe.pyi new file mode 100644 index 00000000..bf607d6b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32pipe.pyi @@ -0,0 +1 @@ +from win32.win32pipe import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32print.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32print.pyi new file mode 100644 index 00000000..ad251501 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32print.pyi @@ -0,0 +1 @@ +from win32.win32print import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32process.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32process.pyi new file mode 100644 index 00000000..86b050b2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32process.pyi @@ -0,0 +1 @@ +from win32.win32process import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32profile.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32profile.pyi new file mode 100644 index 00000000..b89eef14 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32profile.pyi @@ -0,0 +1 @@ +from win32.win32profile import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32ras.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32ras.pyi new file mode 100644 index 00000000..455d2763 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32ras.pyi @@ -0,0 +1 @@ +from win32.win32ras import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32security.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32security.pyi new file mode 100644 index 00000000..bc0b8d20 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32security.pyi @@ -0,0 +1 @@ +from win32.win32security import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32service.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32service.pyi new file mode 100644 index 00000000..b98158e9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32service.pyi @@ -0,0 +1 @@ +from win32.win32service import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32serviceutil.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32serviceutil.pyi new file mode 100644 index 00000000..1e989b02 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32serviceutil.pyi @@ -0,0 +1 @@ +from win32.lib.win32serviceutil import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32timezone.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32timezone.pyi new file mode 100644 index 00000000..bd22d677 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32timezone.pyi @@ -0,0 +1 @@ +from win32.lib.win32timezone import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32trace.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32trace.pyi new file mode 100644 index 00000000..3ece6297 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32trace.pyi @@ -0,0 +1 @@ +from win32.win32trace import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32transaction.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32transaction.pyi new file mode 100644 index 00000000..bd529b09 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32transaction.pyi @@ -0,0 +1 @@ +from win32.win32transaction import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32ts.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32ts.pyi new file mode 100644 index 00000000..dd0d8a23 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32ts.pyi @@ -0,0 +1 @@ +from win32.win32ts import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32ui.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32ui.pyi new file mode 100644 index 00000000..49ea5db9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32ui.pyi @@ -0,0 +1 @@ +from pythonwin.win32ui import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32uiole.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32uiole.pyi new file mode 100644 index 00000000..b0ae818d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32uiole.pyi @@ -0,0 +1 @@ +from pythonwin.win32uiole import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32wnet.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32wnet.pyi new file mode 100644 index 00000000..67facc0c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/win32wnet.pyi @@ -0,0 +1 @@ +from win32.win32wnet import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/winerror.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/winerror.pyi new file mode 100644 index 00000000..d0bdce6c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/winerror.pyi @@ -0,0 +1 @@ +from win32.lib.winerror import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/winioctlcon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/winioctlcon.pyi new file mode 100644 index 00000000..6e51730f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/winioctlcon.pyi @@ -0,0 +1 @@ +from win32.lib.winioctlcon import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/winnt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/winnt.pyi new file mode 100644 index 00000000..8bbdea59 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/winnt.pyi @@ -0,0 +1 @@ +from win32.lib.winnt import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/winperf.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/winperf.pyi new file mode 100644 index 00000000..d3138f48 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/winperf.pyi @@ -0,0 +1 @@ +from win32.lib.winperf import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/winxpgui.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/winxpgui.pyi new file mode 100644 index 00000000..9a8e6233 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/winxpgui.pyi @@ -0,0 +1 @@ +from win32.winxpgui import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/winxptheme.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/winxptheme.pyi new file mode 100644 index 00000000..e4b09f60 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/pywin32/winxptheme.pyi @@ -0,0 +1 @@ +from win32.lib.winxptheme import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..4256e9c4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/@tests/stubtest_allowlist.txt @@ -0,0 +1,74 @@ +redis.client.Pipeline.transaction # instance attribute has same name as superclass method + +# async def mismatch problems +redis.asyncio.client.Pipeline.command_info +redis.asyncio.client.Pipeline.debug_segfault +redis.asyncio.client.Pipeline.memory_doctor +redis.asyncio.client.Pipeline.memory_help +redis.asyncio.client.Pipeline.script_debug +redis.asyncio.client.Pipeline.shutdown + +# unclear problems +redis.asyncio.Sentinel.master_for +redis.asyncio.Sentinel.slave_for +redis.asyncio.sentinel.Sentinel.master_for +redis.asyncio.sentinel.Sentinel.slave_for +redis.sentinel.Sentinel.master_for +redis.sentinel.Sentinel.slave_for + +# Metaclass differs: +redis.RedisCluster +redis.asyncio.Redis +redis.asyncio.client.MonitorCommandInfo +redis.asyncio.client.Pipeline +redis.asyncio.client.Redis +redis.asyncio.connection.ConnectKwargs +redis.client.Pipeline +redis.client.Redis +redis.cluster.ClusterPipeline +redis.cluster.RedisCluster +redis.commands.AsyncCoreCommands +redis.commands.CoreCommands +redis.commands.RedisClusterCommands +redis.commands.cluster.ClusterDataAccessCommands +redis.commands.cluster.ClusterManagementCommands +redis.commands.cluster.ClusterMultiKeyCommands +redis.commands.cluster.RedisClusterCommands +redis.commands.core.ACLCommands +redis.commands.core.AsyncACLCommands +redis.commands.core.AsyncBasicKeyCommands +redis.commands.core.AsyncClusterCommands +redis.commands.core.AsyncCoreCommands +redis.commands.core.AsyncDataAccessCommands +redis.commands.core.AsyncGeoCommands +redis.commands.core.AsyncHashCommands +redis.commands.core.AsyncHyperlogCommands +redis.commands.core.AsyncListCommands +redis.commands.core.AsyncManagementCommands +redis.commands.core.AsyncModuleCommands +redis.commands.core.AsyncPubSubCommands +redis.commands.core.AsyncScanCommands +redis.commands.core.AsyncScriptCommands +redis.commands.core.AsyncSetCommands +redis.commands.core.AsyncSortedSetCommands +redis.commands.core.AsyncStreamCommands +redis.commands.core.BasicKeyCommands +redis.commands.core.ClusterCommands +redis.commands.core.CoreCommands +redis.commands.core.DataAccessCommands +redis.commands.core.GeoCommands +redis.commands.core.HashCommands +redis.commands.core.HyperlogCommands +redis.commands.core.ListCommands +redis.commands.core.ManagementCommands +redis.commands.core.ModuleCommands +redis.commands.core.PubSubCommands +redis.commands.core.ScanCommands +redis.commands.core.ScriptCommands +redis.commands.core.SetCommands +redis.commands.core.SortedSetCommands +redis.commands.core.StreamCommands +redis.commands.json.Pipeline +redis.commands.timeseries.Pipeline +redis.asyncio.cluster.ClusterPipeline +redis.asyncio.cluster.RedisCluster diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/METADATA.toml new file mode 100644 index 00000000..ab5b672a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/METADATA.toml @@ -0,0 +1,7 @@ +version = "4.5.1" +# Requires a version of cryptography with a `py.typed` file +requires = ["cryptography>=35.0.0", "types-pyOpenSSL"] + +[tool.stubtest] +ignore_missing_stub = true +extras = ["ocsp"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/__init__.pyi new file mode 100644 index 00000000..89f0cf2a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/__init__.pyi @@ -0,0 +1,64 @@ +from . import client, connection, exceptions, sentinel, utils +from .cluster import RedisCluster as RedisCluster + +__all__ = [ + "AuthenticationError", + "AuthenticationWrongNumberOfArgsError", + "BlockingConnectionPool", + "BusyLoadingError", + "ChildDeadlockedError", + "Connection", + "ConnectionError", + "ConnectionPool", + "DataError", + "from_url", + "InvalidResponse", + "PubSubError", + "ReadOnlyError", + "Redis", + "RedisCluster", + "RedisError", + "ResponseError", + "Sentinel", + "SentinelConnectionPool", + "SentinelManagedConnection", + "SentinelManagedSSLConnection", + "SSLConnection", + "StrictRedis", + "TimeoutError", + "UnixDomainSocketConnection", + "WatchError", +] + +Redis = client.Redis + +BlockingConnectionPool = connection.BlockingConnectionPool +Connection = connection.Connection +ConnectionPool = connection.ConnectionPool +SSLConnection = connection.SSLConnection +StrictRedis = client.StrictRedis +UnixDomainSocketConnection = connection.UnixDomainSocketConnection + +from_url = utils.from_url + +Sentinel = sentinel.Sentinel +SentinelConnectionPool = sentinel.SentinelConnectionPool +SentinelManagedConnection = sentinel.SentinelManagedConnection +SentinelManagedSSLConnection = sentinel.SentinelManagedSSLConnection + +AuthenticationError = exceptions.AuthenticationError +AuthenticationWrongNumberOfArgsError = exceptions.AuthenticationWrongNumberOfArgsError +BusyLoadingError = exceptions.BusyLoadingError +ChildDeadlockedError = exceptions.ChildDeadlockedError +ConnectionError = exceptions.ConnectionError +DataError = exceptions.DataError +InvalidResponse = exceptions.InvalidResponse +PubSubError = exceptions.PubSubError +ReadOnlyError = exceptions.ReadOnlyError +RedisError = exceptions.RedisError +ResponseError = exceptions.ResponseError +TimeoutError = exceptions.TimeoutError +WatchError = exceptions.WatchError + +__version__: str +VERSION: tuple[int | str, ...] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/__init__.pyi new file mode 100644 index 00000000..22c85ecc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/__init__.pyi @@ -0,0 +1,58 @@ +from redis.asyncio.client import Redis as Redis, StrictRedis as StrictRedis +from redis.asyncio.connection import ( + BlockingConnectionPool as BlockingConnectionPool, + Connection as Connection, + ConnectionPool as ConnectionPool, + SSLConnection as SSLConnection, + UnixDomainSocketConnection as UnixDomainSocketConnection, +) +from redis.asyncio.sentinel import ( + Sentinel as Sentinel, + SentinelConnectionPool as SentinelConnectionPool, + SentinelManagedConnection as SentinelManagedConnection, + SentinelManagedSSLConnection as SentinelManagedSSLConnection, +) +from redis.asyncio.utils import from_url as from_url +from redis.exceptions import ( + AuthenticationError as AuthenticationError, + AuthenticationWrongNumberOfArgsError as AuthenticationWrongNumberOfArgsError, + BusyLoadingError as BusyLoadingError, + ChildDeadlockedError as ChildDeadlockedError, + ConnectionError as ConnectionError, + DataError as DataError, + InvalidResponse as InvalidResponse, + PubSubError as PubSubError, + ReadOnlyError as ReadOnlyError, + RedisError as RedisError, + ResponseError as ResponseError, + TimeoutError as TimeoutError, + WatchError as WatchError, +) + +__all__ = [ + "AuthenticationError", + "AuthenticationWrongNumberOfArgsError", + "BlockingConnectionPool", + "BusyLoadingError", + "ChildDeadlockedError", + "Connection", + "ConnectionError", + "ConnectionPool", + "DataError", + "from_url", + "InvalidResponse", + "PubSubError", + "ReadOnlyError", + "Redis", + "RedisError", + "ResponseError", + "Sentinel", + "SentinelConnectionPool", + "SentinelManagedConnection", + "SentinelManagedSSLConnection", + "SSLConnection", + "StrictRedis", + "TimeoutError", + "UnixDomainSocketConnection", + "WatchError", +] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/client.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/client.pyi new file mode 100644 index 00000000..f1b2dcb1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/client.pyi @@ -0,0 +1,949 @@ +from _typeshed import Incomplete +from collections.abc import AsyncIterator, Awaitable, Callable, Iterable, Mapping, MutableMapping, Sequence +from datetime import datetime, timedelta +from typing import Any, ClassVar, Generic, NoReturn, Protocol, overload +from typing_extensions import Literal, Self, TypeAlias, TypedDict + +from redis import RedisError +from redis.asyncio.connection import ConnectCallbackT, Connection, ConnectionPool +from redis.asyncio.lock import Lock +from redis.asyncio.retry import Retry +from redis.client import AbstractRedis, _CommandOptions, _Key, _StrType, _Value +from redis.commands import AsyncCoreCommands, AsyncSentinelCommands, RedisModuleCommands +from redis.credentials import CredentialProvider +from redis.typing import ChannelT, EncodableT, KeyT, PatternT, StreamIdT + +PubSubHandler: TypeAlias = Callable[[dict[str, str]], Awaitable[None]] + +class ResponseCallbackProtocol(Protocol): + def __call__(self, response: Any, **kwargs): ... + +class AsyncResponseCallbackProtocol(Protocol): + async def __call__(self, response: Any, **kwargs): ... + +ResponseCallbackT: TypeAlias = ResponseCallbackProtocol | AsyncResponseCallbackProtocol + +class Redis(AbstractRedis, RedisModuleCommands, AsyncCoreCommands[_StrType], AsyncSentinelCommands, Generic[_StrType]): + response_callbacks: MutableMapping[str | bytes, ResponseCallbackT] + auto_close_connection_pool: bool + connection_pool: Any + single_connection_client: Any + connection: Any + @classmethod + def from_url(cls, url: str, **kwargs) -> Redis[Any]: ... + def __init__( + self, + *, + host: str = ..., + port: int = ..., + db: str | int = ..., + password: str | None = ..., + socket_timeout: float | None = ..., + socket_connect_timeout: float | None = ..., + socket_keepalive: bool | None = ..., + socket_keepalive_options: Mapping[int, int | bytes] | None = ..., + connection_pool: ConnectionPool | None = ..., + unix_socket_path: str | None = ..., + encoding: str = ..., + encoding_errors: str = ..., + decode_responses: bool = ..., + retry_on_timeout: bool = ..., + retry_on_error: list[type[RedisError]] | None = ..., + ssl: bool = ..., + ssl_keyfile: str | None = ..., + ssl_certfile: str | None = ..., + ssl_cert_reqs: str = ..., + ssl_ca_certs: str | None = ..., + ssl_ca_data: str | None = ..., + ssl_check_hostname: bool = ..., + max_connections: int | None = ..., + single_connection_client: bool = ..., + health_check_interval: int = ..., + client_name: str | None = ..., + username: str | None = ..., + retry: Retry | None = ..., + auto_close_connection_pool: bool = ..., + redis_connect_func: ConnectCallbackT | None = ..., + credential_provider: CredentialProvider | None = ..., + ) -> None: ... + def __await__(self): ... + async def initialize(self) -> Self: ... + def set_response_callback(self, command: str, callback: ResponseCallbackT): ... + def load_external_module(self, funcname, func) -> None: ... + def pipeline(self, transaction: bool = ..., shard_hint: str | None = ...) -> Pipeline[_StrType]: ... + async def transaction( + self, + func: Callable[[Pipeline[_StrType]], Any | Awaitable[Any]], + *watches: KeyT, + shard_hint: str | None = ..., + value_from_callable: bool = ..., + watch_delay: float | None = ..., + ): ... + def lock( + self, + name: KeyT, + timeout: float | None = ..., + sleep: float = ..., + blocking: bool = ..., + blocking_timeout: float | None = ..., + lock_class: type[Lock] | None = ..., + thread_local: bool = ..., + ) -> Lock: ... + def pubsub(self, **kwargs) -> PubSub: ... + def monitor(self) -> Monitor: ... + def client(self) -> Redis[_StrType]: ... + async def __aenter__(self) -> Self: ... + async def __aexit__(self, exc_type, exc_value, traceback) -> None: ... + def __del__(self, _warnings: Any = ...) -> None: ... + async def close(self, close_connection_pool: bool | None = ...) -> None: ... + async def execute_command(self, *args, **options): ... + async def parse_response(self, connection: Connection, command_name: str | bytes, **options): ... + +StrictRedis = Redis + +class MonitorCommandInfo(TypedDict): + time: float + db: int + client_address: str + client_port: str + client_type: str + command: str + +class Monitor: + monitor_re: Any + command_re: Any + connection_pool: Any + connection: Any + def __init__(self, connection_pool: ConnectionPool) -> None: ... + async def connect(self) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__(self, *args) -> None: ... + async def next_command(self) -> MonitorCommandInfo: ... + def listen(self) -> AsyncIterator[MonitorCommandInfo]: ... + +class PubSub: + PUBLISH_MESSAGE_TYPES: ClassVar[tuple[str, ...]] + UNSUBSCRIBE_MESSAGE_TYPES: ClassVar[tuple[str, ...]] + HEALTH_CHECK_MESSAGE: ClassVar[str] + connection_pool: Any + shard_hint: str | None + ignore_subscribe_messages: bool + connection: Any + encoder: Any + health_check_response: Iterable[str | bytes] + channels: Any + pending_unsubscribe_channels: Any + patterns: Any + pending_unsubscribe_patterns: Any + def __init__( + self, + connection_pool: ConnectionPool, + shard_hint: str | None = ..., + ignore_subscribe_messages: bool = ..., + encoder: Incomplete | None = ..., + ) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__(self, exc_type, exc_value, traceback) -> None: ... + def __del__(self) -> None: ... + async def reset(self) -> None: ... + def close(self) -> Awaitable[NoReturn]: ... + async def on_connect(self, connection: Connection): ... + @property + def subscribed(self) -> bool: ... + async def execute_command(self, *args: EncodableT): ... + async def parse_response(self, block: bool = ..., timeout: float = ...): ... + async def check_health(self) -> None: ... + async def psubscribe(self, *args: ChannelT, **kwargs: PubSubHandler): ... + def punsubscribe(self, *args: ChannelT) -> Awaitable[Any]: ... + async def subscribe(self, *args: ChannelT, **kwargs: Callable[..., Any]): ... + def unsubscribe(self, *args) -> Awaitable[Any]: ... + def listen(self) -> AsyncIterator[Any]: ... + async def get_message(self, ignore_subscribe_messages: bool = ..., timeout: float = ...): ... + def ping(self, message: Incomplete | None = ...) -> Awaitable[Any]: ... + async def handle_message(self, response, ignore_subscribe_messages: bool = ...): ... + async def run(self, *, exception_handler: PSWorkerThreadExcHandlerT | None = ..., poll_timeout: float = ...) -> None: ... + +class PubsubWorkerExceptionHandler(Protocol): + def __call__(self, e: BaseException, pubsub: PubSub): ... + +class AsyncPubsubWorkerExceptionHandler(Protocol): + async def __call__(self, e: BaseException, pubsub: PubSub): ... + +PSWorkerThreadExcHandlerT: TypeAlias = PubsubWorkerExceptionHandler | AsyncPubsubWorkerExceptionHandler +CommandT: TypeAlias = tuple[tuple[str | bytes, ...], Mapping[str, Any]] +CommandStackT: TypeAlias = list[CommandT] + +class Pipeline(Redis[_StrType], Generic[_StrType]): + UNWATCH_COMMANDS: ClassVar[set[str]] + connection_pool: Any + connection: Any + response_callbacks: Any + is_transaction: bool + shard_hint: str | None + watching: bool + command_stack: Any + scripts: Any + explicit_transaction: bool + def __init__( + self, + connection_pool: ConnectionPool, + response_callbacks: MutableMapping[str | bytes, ResponseCallbackT], + transaction: bool, + shard_hint: str | None, + ) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__(self, exc_type, exc_value, traceback) -> None: ... + def __await__(self): ... + def __len__(self) -> int: ... + def __bool__(self) -> bool: ... + async def reset(self) -> None: ... # type: ignore[override] + def multi(self) -> None: ... + def execute_command(self, *args, **kwargs) -> Pipeline[_StrType] | Awaitable[Pipeline[_StrType]]: ... + async def immediate_execute_command(self, *args, **options): ... + def pipeline_execute_command(self, *args, **options): ... + def raise_first_error(self, commands: CommandStackT, response: Iterable[Any]): ... + def annotate_exception(self, exception: Exception, number: int, command: Iterable[object]) -> None: ... + async def parse_response(self, connection: Connection, command_name: str | bytes, **options): ... + async def load_scripts(self) -> None: ... + async def execute(self, raise_on_error: bool = ...): ... + async def discard(self) -> None: ... + async def watch(self, *names: KeyT) -> bool: ... + async def unwatch(self) -> bool: ... + # region acl commands + def acl_cat(self, category: str | None = ..., **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def acl_deluser(self, *username: str, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def acl_genpass(self, bits: int | None = ..., **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def acl_getuser(self, username: str, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def acl_help(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def acl_list(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def acl_log(self, count: int | None = ..., **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def acl_log_reset(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def acl_load(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def acl_save(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def acl_setuser( # type: ignore[override] + self, + username: str, + enabled: bool = ..., + nopass: bool = ..., + passwords: Sequence[str] | None = ..., + hashed_passwords: Sequence[str] | None = ..., + categories: Sequence[str] | None = ..., + commands: Sequence[str] | None = ..., + keys: Sequence[str] | None = ..., + channels: Iterable[ChannelT] | None = ..., + selectors: Iterable[tuple[str, KeyT]] | None = ..., + reset: bool = False, + reset_keys: bool = False, + reset_channels: bool = False, + reset_passwords: bool = False, + **kwargs: _CommandOptions, + ) -> Pipeline[_StrType]: ... + def acl_users(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def acl_whoami(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + # endregion + # region cluster commands + def cluster(self, cluster_arg: str, *args, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def readwrite(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def readonly(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + # endregion + # region BasicKey commands + def append(self, key, value) -> Any: ... # type: ignore[override] + def bitcount(self, key: _Key, start: int | None = ..., end: int | None = ..., mode: str | None = ...) -> Any: ... # type: ignore[override] + def bitfield(self, key, default_overflow: Incomplete | None = ...) -> Any: ... # type: ignore[override] + def bitop(self, operation, dest, *keys) -> Any: ... # type: ignore[override] + def bitpos(self, key: _Key, bit: int, start: int | None = ..., end: int | None = ..., mode: str | None = ...) -> Any: ... # type: ignore[override] + def copy(self, source, destination, destination_db: Incomplete | None = ..., replace: bool = ...) -> Any: ... # type: ignore[override] + def decr(self, name, amount: int = ...) -> Any: ... # type: ignore[override] + def decrby(self, name, amount: int = ...) -> Any: ... # type: ignore[override] + def delete(self, *names: _Key) -> Any: ... # type: ignore[override] + def dump(self, name: _Key) -> Any: ... # type: ignore[override] + def exists(self, *names: _Key) -> Any: ... # type: ignore[override] + def expire( # type: ignore[override] + self, name: _Key, time: int | timedelta, nx: bool = ..., xx: bool = ..., gt: bool = ..., lt: bool = ... + ) -> Any: ... + def expireat(self, name, when, nx: bool = ..., xx: bool = ..., gt: bool = ..., lt: bool = ...) -> Any: ... # type: ignore[override] + def get(self, name: _Key) -> Any: ... # type: ignore[override] + def getdel(self, name: _Key) -> Any: ... # type: ignore[override] + def getex( # type: ignore[override] + self, + name, + ex: Incomplete | None = ..., + px: Incomplete | None = ..., + exat: Incomplete | None = ..., + pxat: Incomplete | None = ..., + persist: bool = ..., + ) -> Any: ... + def getbit(self, name: _Key, offset: int) -> Any: ... # type: ignore[override] + def getrange(self, key, start, end) -> Any: ... # type: ignore[override] + def getset(self, name, value) -> Any: ... # type: ignore[override] + def incr(self, name: _Key, amount: int = ...) -> Any: ... # type: ignore[override] + def incrby(self, name: _Key, amount: int = ...) -> Any: ... # type: ignore[override] + def incrbyfloat(self, name: _Key, amount: float = ...) -> Any: ... # type: ignore[override] + def keys(self, pattern: _Key = ..., **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def lmove( # type: ignore[override] + self, first_list: _Key, second_list: _Key, src: Literal["LEFT", "RIGHT"] = ..., dest: Literal["LEFT", "RIGHT"] = ... + ) -> Any: ... + def blmove( # type: ignore[override] + self, + first_list: _Key, + second_list: _Key, + timeout: float, + src: Literal["LEFT", "RIGHT"] = ..., + dest: Literal["LEFT", "RIGHT"] = ..., + ) -> Any: ... + def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... # type: ignore[override] + def mset(self, mapping: Mapping[_Key, _Value]) -> Any: ... # type: ignore[override] + def msetnx(self, mapping: Mapping[_Key, _Value]) -> Any: ... # type: ignore[override] + def move(self, name: _Key, db: int) -> Any: ... # type: ignore[override] + def persist(self, name: _Key) -> Any: ... # type: ignore[override] + def pexpire( # type: ignore[override] + self, name: _Key, time: int | timedelta, nx: bool = ..., xx: bool = ..., gt: bool = ..., lt: bool = ... + ) -> Any: ... + def pexpireat( # type: ignore[override] + self, name: _Key, when: int | datetime, nx: bool = ..., xx: bool = ..., gt: bool = ..., lt: bool = ... + ) -> Any: ... + def psetex(self, name, time_ms, value) -> Any: ... # type: ignore[override] + def pttl(self, name: _Key) -> Any: ... # type: ignore[override] + def hrandfield(self, key, count: Incomplete | None = ..., withvalues: bool = ...) -> Any: ... # type: ignore[override] + def randomkey(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def rename(self, src, dst) -> Any: ... # type: ignore[override] + def renamenx(self, src, dst) -> Any: ... # type: ignore[override] + def restore( # type: ignore[override] + self, + name, + ttl, + value, + replace: bool = ..., + absttl: bool = ..., + idletime: Incomplete | None = ..., + frequency: Incomplete | None = ..., + ) -> Any: ... + def set( # type: ignore[override] + self, + name: _Key, + value: _Value, + ex: None | int | timedelta = ..., + px: None | int | timedelta = ..., + nx: bool = ..., + xx: bool = ..., + keepttl: bool = ..., + get: bool = ..., + exat: Incomplete | None = ..., + pxat: Incomplete | None = ..., + ) -> Any: ... + def setbit(self, name: _Key, offset: int, value: int) -> Any: ... # type: ignore[override] + def setex(self, name: _Key, time: int | timedelta, value: _Value) -> Any: ... # type: ignore[override] + def setnx(self, name: _Key, value: _Value) -> Any: ... # type: ignore[override] + def setrange(self, name, offset, value) -> Any: ... # type: ignore[override] + def stralgo( # type: ignore[override] + self, + algo, + value1, + value2, + specific_argument: str = ..., + len: bool = ..., + idx: bool = ..., + minmatchlen: Incomplete | None = ..., + withmatchlen: bool = ..., + **kwargs: _CommandOptions, + ) -> Any: ... + def strlen(self, name) -> Any: ... # type: ignore[override] + def substr(self, name, start, end: int = ...) -> Any: ... # type: ignore[override] + def touch(self, *args) -> Any: ... # type: ignore[override] + def ttl(self, name: _Key) -> Any: ... # type: ignore[override] + def type(self, name) -> Any: ... # type: ignore[override] + def unlink(self, *names: _Key) -> Any: ... # type: ignore[override] + # endregion + # region hyperlog commands + def pfadd(self, name: _Key, *values: _Value) -> Any: ... # type: ignore[override] + def pfcount(self, name: _Key) -> Any: ... # type: ignore[override] + def pfmerge(self, dest: _Key, *sources: _Key) -> Any: ... # type: ignore[override] + # endregion + # region hash commands + def hdel(self, name: _Key, *keys: _Key) -> Any: ... # type: ignore[override] + def hexists(self, name: _Key, key: _Key) -> Any: ... # type: ignore[override] + def hget(self, name: _Key, key: _Key) -> Any: ... # type: ignore[override] + def hgetall(self, name: _Key) -> Any: ... # type: ignore[override] + def hincrby(self, name: _Key, key: _Key, amount: int = ...) -> Any: ... # type: ignore[override] + def hincrbyfloat(self, name: _Key, key: _Key, amount: float = ...) -> Any: ... # type: ignore[override] + def hkeys(self, name: _Key) -> Any: ... # type: ignore[override] + def hlen(self, name: _Key) -> Any: ... # type: ignore[override] + @overload + def hset( # type: ignore[override] + self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = ..., items: Incomplete | None = ... + ) -> Any: ... + @overload + def hset(self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value], items: Incomplete | None = ...) -> Any: ... # type: ignore[override] + @overload + def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value], items: Incomplete | None = ...) -> Any: ... # type: ignore[override] + def hsetnx(self, name: _Key, key: _Key, value: _Value) -> Any: ... # type: ignore[override] + def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> Any: ... # type: ignore[override] + def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... # type: ignore[override] + def hvals(self, name: _Key) -> Any: ... # type: ignore[override] + def hstrlen(self, name, key) -> Any: ... # type: ignore[override] + # endregion + # region geo commands + def geoadd(self, name, values, nx: bool = ..., xx: bool = ..., ch: bool = ...) -> Any: ... # type: ignore[override] + def geodist(self, name, place1, place2, unit: Incomplete | None = ...) -> Any: ... # type: ignore[override] + def geohash(self, name, *values) -> Any: ... # type: ignore[override] + def geopos(self, name, *values) -> Any: ... # type: ignore[override] + def georadius( # type: ignore[override] + self, + name, + longitude, + latitude, + radius, + unit: Incomplete | None = ..., + withdist: bool = ..., + withcoord: bool = ..., + withhash: bool = ..., + count: Incomplete | None = ..., + sort: Incomplete | None = ..., + store: Incomplete | None = ..., + store_dist: Incomplete | None = ..., + any: bool = ..., + ) -> Any: ... + def georadiusbymember( # type: ignore[override] + self, + name, + member, + radius, + unit: Incomplete | None = ..., + withdist: bool = ..., + withcoord: bool = ..., + withhash: bool = ..., + count: Incomplete | None = ..., + sort: Incomplete | None = ..., + store: Incomplete | None = ..., + store_dist: Incomplete | None = ..., + any: bool = ..., + ) -> Any: ... + def geosearch( # type: ignore[override] + self, + name, + member: Incomplete | None = ..., + longitude: Incomplete | None = ..., + latitude: Incomplete | None = ..., + unit: str = ..., + radius: Incomplete | None = ..., + width: Incomplete | None = ..., + height: Incomplete | None = ..., + sort: Incomplete | None = ..., + count: Incomplete | None = ..., + any: bool = ..., + withcoord: bool = ..., + withdist: bool = ..., + withhash: bool = ..., + ) -> Any: ... + def geosearchstore( # type: ignore[override] + self, + dest, + name, + member: Incomplete | None = ..., + longitude: Incomplete | None = ..., + latitude: Incomplete | None = ..., + unit: str = ..., + radius: Incomplete | None = ..., + width: Incomplete | None = ..., + height: Incomplete | None = ..., + sort: Incomplete | None = ..., + count: Incomplete | None = ..., + any: bool = ..., + storedist: bool = ..., + ) -> Any: ... + # endregion + # region list commands + @overload + def blpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = ...) -> Any: ... # type: ignore[override] + @overload + def blpop(self, keys: _Value | Iterable[_Value], timeout: float) -> Any: ... # type: ignore[override] + @overload + def brpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = ...) -> Any: ... # type: ignore[override] + @overload + def brpop(self, keys: _Value | Iterable[_Value], timeout: float) -> Any: ... # type: ignore[override] + def brpoplpush(self, src, dst, timeout: int | None = ...) -> Any: ... # type: ignore[override] + def lindex(self, name: _Key, index: int) -> Any: ... # type: ignore[override] + def linsert( # type: ignore[override] + self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value + ) -> Any: ... + def llen(self, name: _Key) -> Any: ... # type: ignore[override] + def lpop(self, name, count: int | None = ...) -> Any: ... # type: ignore[override] + def lpush(self, name: _Value, *values: _Value) -> Any: ... # type: ignore[override] + def lpushx(self, name, value) -> Any: ... # type: ignore[override] + def lrange(self, name: _Key, start: int, end: int) -> Any: ... # type: ignore[override] + def lrem(self, name: _Key, count: int, value: _Value) -> Any: ... # type: ignore[override] + def lset(self, name: _Key, index: int, value: _Value) -> Any: ... # type: ignore[override] + def ltrim(self, name: _Key, start: int, end: int) -> Any: ... # type: ignore[override] + def rpop(self, name, count: int | None = ...) -> Any: ... # type: ignore[override] + def rpoplpush(self, src, dst) -> Any: ... # type: ignore[override] + def rpush(self, name: _Value, *values: _Value) -> Any: ... # type: ignore[override] + def rpushx(self, name, value) -> Any: ... # type: ignore[override] + def lpos(self, name, value, rank: Incomplete | None = ..., count: Incomplete | None = ..., maxlen: Incomplete | None = ...) -> Any: ... # type: ignore[override] + @overload # type: ignore[override] + def sort( + self, + name: _Key, + start: int | None = ..., + num: int | None = ..., + by: _Key | None = ..., + get: _Key | Sequence[_Key] | None = ..., + desc: bool = ..., + alpha: bool = ..., + store: None = ..., + groups: bool = ..., + ) -> list[_StrType]: ... + @overload # type: ignore[override] + def sort( + self, + name: _Key, + start: int | None = ..., + num: int | None = ..., + by: _Key | None = ..., + get: _Key | Sequence[_Key] | None = ..., + desc: bool = ..., + alpha: bool = ..., + *, + store: _Key, + groups: bool = ..., + ) -> Any: ... + @overload # type: ignore[override] + def sort( + self, + name: _Key, + start: int | None, + num: int | None, + by: _Key | None, + get: _Key | Sequence[_Key] | None, + desc: bool, + alpha: bool, + store: _Key, + groups: bool = ..., + ) -> Any: ... + # endregion + # region scan commands + def scan( # type: ignore[override] + self, + cursor: int = ..., + match: _Key | None = ..., + count: int | None = ..., + _type: str | None = ..., + **kwargs: _CommandOptions, + ) -> Any: ... + def sscan(self, name: _Key, cursor: int = ..., match: _Key | None = ..., count: int | None = ...) -> Any: ... # type: ignore[override] + def hscan(self, name: _Key, cursor: int = ..., match: _Key | None = ..., count: int | None = ...) -> Any: ... # type: ignore[override] + @overload # type: ignore[override] + def zscan(self, name: _Key, cursor: int = ..., match: _Key | None = ..., count: int | None = ...) -> Any: ... + @overload # type: ignore[override] + def zscan( + self, + name: _Key, + cursor: int = ..., + match: _Key | None = ..., + count: int | None = ..., + *, + score_cast_func: Callable[[_StrType], Any], + ) -> Any: ... + @overload # type: ignore[override] + def zscan( + self, name: _Key, cursor: int, match: _Key | None, count: int | None, score_cast_func: Callable[[_StrType], Any] + ) -> Any: ... + # endregion + # region set commands + def sadd(self, name: _Key, *values: _Value) -> Any: ... # type: ignore[override] + def scard(self, name: _Key) -> Any: ... # type: ignore[override] + def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... # type: ignore[override] + def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... # type: ignore[override] + def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... # type: ignore[override] + def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... # type: ignore[override] + def sismember(self, name: _Key, value: _Value) -> Any: ... # type: ignore[override] + def smembers(self, name: _Key) -> Any: ... # type: ignore[override] + def smismember(self, name, values, *args) -> Any: ... # type: ignore[override] + def smove(self, src: _Key, dst: _Key, value: _Value) -> Any: ... # type: ignore[override] + @overload # type: ignore[override] + def spop(self, name: _Key, count: None = ...) -> Any: ... + @overload # type: ignore[override] + def spop(self, name: _Key, count: int) -> Any: ... + @overload # type: ignore[override] + def srandmember(self, name: _Key, number: None = ...) -> Any: ... + @overload # type: ignore[override] + def srandmember(self, name: _Key, number: int) -> Any: ... + def srem(self, name: _Key, *values: _Value) -> Any: ... # type: ignore[override] + def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... # type: ignore[override] + def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... # type: ignore[override] + # endregion + # region stream commands + def xack(self, name, groupname, *ids) -> Any: ... # type: ignore[override] + def xadd( # type: ignore[override] + self, + name, + fields, + id: str = ..., + maxlen=..., + approximate: bool = ..., + nomkstream: bool = ..., + minid: Incomplete | None = ..., + limit: Incomplete | None = ..., + ) -> Any: ... + def xautoclaim( # type: ignore[override] + self, + name, + groupname, + consumername, + min_idle_time, + start_id: StreamIdT = ..., + count: Incomplete | None = ..., + justid: bool = ..., + ) -> Any: ... + def xclaim( # type: ignore[override] + self, name, groupname, consumername, min_idle_time, message_ids, idle=..., time=..., retrycount=..., force=..., justid=... + ) -> Any: ... + def xdel(self, name, *ids) -> Any: ... # type: ignore[override] + def xgroup_create(self, name, groupname, id: str = ..., mkstream: bool = ..., entries_read: int | None = ...) -> Any: ... # type: ignore[override] + def xgroup_delconsumer(self, name, groupname, consumername) -> Any: ... # type: ignore[override] + def xgroup_destroy(self, name, groupname) -> Any: ... # type: ignore[override] + def xgroup_createconsumer(self, name, groupname, consumername) -> Any: ... # type: ignore[override] + def xgroup_setid(self, name, groupname, id, entries_read: int | None = ...) -> Any: ... # type: ignore[override] + def xinfo_consumers(self, name, groupname) -> Any: ... # type: ignore[override] + def xinfo_groups(self, name) -> Any: ... # type: ignore[override] + def xinfo_stream(self, name, full: bool = ...) -> Any: ... # type: ignore[override] + def xlen(self, name: _Key) -> Any: ... # type: ignore[override] + def xpending(self, name, groupname) -> Any: ... # type: ignore[override] + def xpending_range( # type: ignore[override] + self, name: _Key, groupname, min, max, count: int, consumername: Incomplete | None = ..., idle: int | None = ... + ) -> Any: ... + def xrange(self, name, min: str = ..., max: str = ..., count: Incomplete | None = ...) -> Any: ... # type: ignore[override] + def xread(self, streams, count: Incomplete | None = ..., block: Incomplete | None = ...) -> Any: ... # type: ignore[override] + def xreadgroup( # type: ignore[override] + self, groupname, consumername, streams, count: Incomplete | None = ..., block: Incomplete | None = ..., noack: bool = ... + ) -> Any: ... + def xrevrange(self, name, max: str = ..., min: str = ..., count: Incomplete | None = ...) -> Any: ... # type: ignore[override] + def xtrim( # type: ignore[override] + self, name, maxlen: int | None = ..., approximate: bool = ..., minid: Incomplete | None = ..., limit: int | None = ... + ) -> Any: ... + # endregion + # region sorted set commands + def zadd( # type: ignore[override] + self, + name: _Key, + mapping: Mapping[_Key, _Value], + nx: bool = ..., + xx: bool = ..., + ch: bool = ..., + incr: bool = ..., + gt: Incomplete | None = ..., + lt: Incomplete | None = ..., + ) -> Any: ... + def zcard(self, name: _Key) -> Any: ... # type: ignore[override] + def zcount(self, name: _Key, min: _Value, max: _Value) -> Any: ... # type: ignore[override] + def zdiff(self, keys, withscores: bool = ...) -> Any: ... # type: ignore[override] + def zdiffstore(self, dest, keys) -> Any: ... # type: ignore[override] + def zincrby(self, name: _Key, amount: float, value: _Value) -> Any: ... # type: ignore[override] + def zinter(self, keys, aggregate: Incomplete | None = ..., withscores: bool = ...) -> Any: ... # type: ignore[override] + def zinterstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = ...) -> Any: ... # type: ignore[override] + def zlexcount(self, name: _Key, min: _Value, max: _Value) -> Any: ... # type: ignore[override] + def zpopmax(self, name: _Key, count: int | None = ...) -> Any: ... # type: ignore[override] + def zpopmin(self, name: _Key, count: int | None = ...) -> Any: ... # type: ignore[override] + def zrandmember(self, key, count: Incomplete | None = ..., withscores: bool = ...) -> Any: ... # type: ignore[override] + @overload # type: ignore[override] + def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = ...) -> Any: ... + @overload # type: ignore[override] + def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float) -> Any: ... + @overload # type: ignore[override] + def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = ...) -> Any: ... + @overload # type: ignore[override] + def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float) -> Any: ... + @overload # type: ignore[override] + def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], Any], + byscore: bool = ..., + bylex: bool = ..., + offset: int | None = ..., + num: int | None = ..., + ) -> Any: ... + @overload # type: ignore[override] + def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], float] = ..., + byscore: bool = ..., + bylex: bool = ..., + offset: int | None = ..., + num: int | None = ..., + ) -> Any: ... + @overload # type: ignore[override] + def zrange( + self, + name: _Key, + start: int, + end: int, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], None], + byscore: bool = ..., + bylex: bool = ..., + offset: int | None = ..., + num: int | None = ..., + ) -> Any: ... + @overload # type: ignore[override] + def zrange( + self, + name: _Key, + start: int, + end: int, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], float] = ..., + byscore: bool = ..., + bylex: bool = ..., + offset: int | None = ..., + num: int | None = ..., + ) -> Any: ... + @overload # type: ignore[override] + def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool = ..., + withscores: bool = ..., + score_cast_func: Callable[[_StrType], Any] = ..., + byscore: bool = ..., + bylex: bool = ..., + offset: int | None = ..., + num: int | None = ..., + ) -> Any: ... + @overload # type: ignore[override] + def zrevrange( + self, name: _Key, start: int, end: int, withscores: Literal[True], score_cast_func: Callable[[_StrType], None] + ) -> Any: ... + @overload # type: ignore[override] + def zrevrange(self, name: _Key, start: int, end: int, withscores: Literal[True]) -> Any: ... + @overload # type: ignore[override] + def zrevrange( + self, name: _Key, start: int, end: int, withscores: bool = ..., score_cast_func: Callable[[Any], Any] = ... + ) -> Any: ... + def zrangestore( # type: ignore[override] + self, + dest, + name, + start, + end, + byscore: bool = ..., + bylex: bool = ..., + desc: bool = ..., + offset: Incomplete | None = ..., + num: Incomplete | None = ..., + ) -> Any: ... + def zrangebylex(self, name: _Key, min: _Value, max: _Value, start: int | None = ..., num: int | None = ...) -> Any: ... # type: ignore[override] + def zrevrangebylex(self, name: _Key, max: _Value, min: _Value, start: int | None = ..., num: int | None = ...) -> Any: ... # type: ignore[override] + @overload # type: ignore[override] + def zrangebyscore( + self, + name: _Key, + min: _Value, + max: _Value, + start: int | None = ..., + num: int | None = ..., + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], None], + ) -> Any: ... + @overload # type: ignore[override] + def zrangebyscore( + self, name: _Key, min: _Value, max: _Value, start: int | None = ..., num: int | None = ..., *, withscores: Literal[True] + ) -> Any: ... + @overload # type: ignore[override] + def zrangebyscore( + self, + name: _Key, + min: _Value, + max: _Value, + start: int | None = ..., + num: int | None = ..., + withscores: bool = ..., + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> Any: ... + @overload # type: ignore[override] + def zrevrangebyscore( + self, + name: _Key, + max: _Value, + min: _Value, + start: int | None = ..., + num: int | None = ..., + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], Any], + ) -> Any: ... + @overload # type: ignore[override] + def zrevrangebyscore( + self, name: _Key, max: _Value, min: _Value, start: int | None = ..., num: int | None = ..., *, withscores: Literal[True] + ) -> Any: ... + @overload # type: ignore[override] + def zrevrangebyscore( + self, + name: _Key, + max: _Value, + min: _Value, + start: int | None = ..., + num: int | None = ..., + withscores: bool = ..., + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> Any: ... + def zrank(self, name: _Key, value: _Value) -> Any: ... # type: ignore[override] + def zrem(self, name: _Key, *values: _Value) -> Any: ... # type: ignore[override] + def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> Any: ... # type: ignore[override] + def zremrangebyrank(self, name: _Key, min: int, max: int) -> Any: ... # type: ignore[override] + def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> Any: ... # type: ignore[override] + def zrevrank(self, name: _Key, value: _Value) -> Any: ... # type: ignore[override] + def zscore(self, name: _Key, value: _Value) -> Any: ... # type: ignore[override] + def zunion(self, keys, aggregate: Incomplete | None = ..., withscores: bool = ...) -> Any: ... # type: ignore[override] + def zunionstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = ...) -> Any: ... # type: ignore[override] + def zmscore(self, key, members) -> Any: ... # type: ignore[override] + # endregion + # region management commands + def bgrewriteaof(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def bgsave(self, schedule: bool = ..., **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def role(self) -> Any: ... # type: ignore[override] + def client_kill(self, address: str, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def client_kill_filter( # type: ignore[override] + self, + _id: Incomplete | None = ..., + _type: Incomplete | None = ..., + addr: Incomplete | None = ..., + skipme: Incomplete | None = ..., + laddr: Incomplete | None = ..., + user: Incomplete | None = ..., + **kwargs: _CommandOptions, + ) -> Any: ... + def client_info(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def client_list(self, _type: str | None = ..., client_id: list[str] = ..., **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def client_getname(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def client_getredir(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def client_reply(self, reply, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def client_id(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def client_tracking_on( # type: ignore[override] + self, + clientid: Incomplete | None = ..., + prefix=..., + bcast: bool = ..., + optin: bool = ..., + optout: bool = ..., + noloop: bool = ..., + ) -> Any: ... + def client_tracking_off( # type: ignore[override] + self, + clientid: Incomplete | None = ..., + prefix=..., + bcast: bool = ..., + optin: bool = ..., + optout: bool = ..., + noloop: bool = ..., + ) -> Any: ... + def client_tracking( # type: ignore[override] + self, + on: bool = ..., + clientid: Incomplete | None = ..., + prefix=..., + bcast: bool = ..., + optin: bool = ..., + optout: bool = ..., + noloop: bool = ..., + **kwargs: _CommandOptions, + ) -> Any: ... + def client_trackinginfo(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def client_setname(self, name: str, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def client_unblock(self, client_id, error: bool = ..., **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def client_pause(self, timeout, all: bool = ..., **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def client_unpause(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def command(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def command_info(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def command_count(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def config_get(self, pattern: PatternT = ..., *args: PatternT, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def config_set(self, name: KeyT, value: EncodableT, *args: KeyT | EncodableT, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def config_resetstat(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def config_rewrite(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def dbsize(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def debug_object(self, key, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def debug_segfault(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def echo(self, value: _Value, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def flushall(self, asynchronous: bool = ..., **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def flushdb(self, asynchronous: bool = ..., **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def sync(self) -> Any: ... # type: ignore[override] + def psync(self, replicationid, offset) -> Any: ... # type: ignore[override] + def swapdb(self, first, second, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def select(self, index, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def info(self, section: _Key | None = ..., *args: _Key, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def lastsave(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def lolwut(self, *version_numbers: _Value, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def migrate( # type: ignore[override] + self, + host, + port, + keys, + destination_db, + timeout, + copy: bool = ..., + replace: bool = ..., + auth: Incomplete | None = ..., + **kwargs: _CommandOptions, + ) -> Any: ... + def object(self, infotype, key, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def memory_doctor(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def memory_help(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def memory_stats(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def memory_malloc_stats(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def memory_usage(self, key, samples: Incomplete | None = ..., **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def memory_purge(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def ping(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def quit(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def replicaof(self, *args, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def save(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def shutdown( # type: ignore[override] + self, + save: bool = ..., + nosave: bool = ..., + now: bool = ..., + force: bool = ..., + abort: bool = ..., + **kwargs: _CommandOptions, + ) -> Any: ... + def slaveof(self, host: Incomplete | None = ..., port: Incomplete | None = ..., **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def slowlog_get(self, num: Incomplete | None = ..., **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def slowlog_len(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def slowlog_reset(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def time(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def wait(self, num_replicas, timeout, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + # endregion + # region module commands + def module_load(self, path, *args) -> Any: ... # type: ignore[override] + def module_unload(self, name) -> Any: ... # type: ignore[override] + def module_list(self) -> Any: ... # type: ignore[override] + def command_getkeys(self, *args) -> Any: ... # type: ignore[override] + # endregion + # region pubsub commands + def publish(self, channel: _Key, message: _Key, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def pubsub_channels(self, pattern: _Key = ..., **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def pubsub_numpat(self, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + def pubsub_numsub(self, *args: _Key, **kwargs: _CommandOptions) -> Any: ... # type: ignore[override] + # endregion + # region script commands + def eval(self, script, numkeys, *keys_and_args) -> Any: ... # type: ignore[override] + def evalsha(self, sha, numkeys, *keys_and_args) -> Any: ... # type: ignore[override] + def script_exists(self, *args) -> Any: ... # type: ignore[override] + def script_debug(self, *args) -> Any: ... # type: ignore[override] + def script_flush(self, sync_type: Incomplete | None = ...) -> Any: ... # type: ignore[override] + def script_kill(self) -> Any: ... # type: ignore[override] + def script_load(self, script) -> Any: ... # type: ignore[override] + def register_script(self, script: str | _StrType) -> Any: ... # type: ignore[override] + # endregion diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/cluster.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/cluster.pyi new file mode 100644 index 00000000..a5a3f563 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/cluster.pyi @@ -0,0 +1,164 @@ +from _typeshed import Incomplete +from collections.abc import Awaitable, Callable, Mapping +from typing import Any, Generic +from typing_extensions import Self + +from redis.asyncio.client import ResponseCallbackT +from redis.asyncio.connection import BaseParser, Connection, Encoder +from redis.asyncio.parser import CommandsParser +from redis.client import AbstractRedis +from redis.cluster import AbstractRedisCluster, LoadBalancer + +# TODO: add AsyncRedisClusterCommands stubs +# from redis.commands import AsyncRedisClusterCommands +from redis.commands.core import _StrType +from redis.credentials import CredentialProvider +from redis.retry import Retry +from redis.typing import AnyKeyT, EncodableT, KeyT + +# It uses `DefaultParser` in real life, but it is a dynamic base class. +class ClusterParser(BaseParser): ... + +class RedisCluster(AbstractRedis, AbstractRedisCluster, Generic[_StrType]): # TODO: AsyncRedisClusterCommands + retry: Retry | None + connection_kwargs: dict[str, Any] + nodes_manager: NodesManager + encoder: Encoder + read_from_replicas: bool + reinitialize_steps: int + cluster_error_retry_attempts: int + reinitialize_counter: int + commands_parser: CommandsParser + node_flags: set[str] + command_flags: dict[str, str] + response_callbacks: Incomplete + result_callbacks: dict[str, Callable[[Incomplete, Incomplete], Incomplete]] + def __init__( + self, + host: str | None = ..., + port: str | int = ..., + # Cluster related kwargs + startup_nodes: list[ClusterNode] | None = ..., + require_full_coverage: bool = ..., + read_from_replicas: bool = ..., + reinitialize_steps: int = ..., + cluster_error_retry_attempts: int = ..., + connection_error_retry_attempts: int = ..., + max_connections: int = ..., + # Client related kwargs + db: str | int = ..., + path: str | None = ..., + credential_provider: CredentialProvider | None = ..., + username: str | None = ..., + password: str | None = ..., + client_name: str | None = ..., + # Encoding related kwargs + encoding: str = ..., + encoding_errors: str = ..., + decode_responses: bool = ..., + # Connection related kwargs + health_check_interval: float = ..., + socket_connect_timeout: float | None = ..., + socket_keepalive: bool = ..., + socket_keepalive_options: Mapping[int, int | bytes] | None = ..., + socket_timeout: float | None = ..., + retry: Retry | None = ..., + retry_on_error: list[Exception] | None = ..., + # SSL related kwargs + ssl: bool = ..., + ssl_ca_certs: str | None = ..., + ssl_ca_data: str | None = ..., + ssl_cert_reqs: str = ..., + ssl_certfile: str | None = ..., + ssl_check_hostname: bool = ..., + ssl_keyfile: str | None = ..., + ) -> None: ... + async def initialize(self) -> Self: ... + async def close(self) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__(self, exc_type: object, exc_value: object, traceback: object) -> None: ... + def __await__(self) -> Awaitable[Self]: ... + def __del__(self) -> None: ... + async def on_connect(self, connection: Connection) -> None: ... + def get_nodes(self) -> list[ClusterNode]: ... + def get_primaries(self) -> list[ClusterNode]: ... + def get_replicas(self) -> list[ClusterNode]: ... + def get_random_node(self) -> ClusterNode: ... + def get_default_node(self) -> ClusterNode: ... + def set_default_node(self, node: ClusterNode) -> None: ... + def get_node(self, host: str | None = ..., port: int | None = ..., node_name: str | None = ...) -> ClusterNode | None: ... + def get_node_from_key(self, key: str, replica: bool = ...) -> ClusterNode | None: ... + def keyslot(self, key: EncodableT) -> int: ... + def get_encoder(self) -> Encoder: ... + def get_connection_kwargs(self) -> dict[str, Any | None]: ... + def set_response_callback(self, command: str, callback: ResponseCallbackT) -> None: ... + async def execute_command(self, *args: EncodableT, **kwargs: Any) -> Any: ... + def pipeline(self, transaction: Any | None = ..., shard_hint: Any | None = ...) -> ClusterPipeline[_StrType]: ... + @classmethod + def from_url(cls, url: str, **kwargs) -> Self: ... + +class ClusterNode: + host: str + port: str | int + name: str + server_type: str | None + max_connections: int + connection_class: type[Connection] + connection_kwargs: dict[str, Any] + response_callbacks: dict[Incomplete, Incomplete] + def __init__( + self, + host: str, + port: str | int, + server_type: str | None = ..., + *, + max_connections: int = ..., + connection_class: type[Connection] = ..., + **connection_kwargs: Any, + ) -> None: ... + def __eq__(self, obj: object) -> bool: ... + def __del__(self) -> None: ... + async def disconnect(self) -> None: ... + def acquire_connection(self) -> Connection: ... + async def parse_response(self, connection: Connection, command: str, **kwargs: Any) -> Any: ... + async def execute_command(self, *args: Any, **kwargs: Any) -> Any: ... + async def execute_pipeline(self, commands: list[PipelineCommand]) -> bool: ... + +class NodesManager: + startup_nodes: dict[str, ClusterNode] + require_full_coverage: bool + connection_kwargs: dict[str, Any] + default_node: ClusterNode | None + nodes_cache: dict[str, ClusterNode] + slots_cache: dict[int, list[ClusterNode]] + read_load_balancer: LoadBalancer + def __init__( + self, startup_nodes: list[ClusterNode], require_full_coverage: bool, connection_kwargs: dict[str, Any] + ) -> None: ... + def get_node(self, host: str | None = ..., port: int | None = ..., node_name: str | None = ...) -> ClusterNode | None: ... + def set_nodes(self, old: dict[str, ClusterNode], new: dict[str, ClusterNode], remove_old: bool = ...) -> None: ... + def get_node_from_slot(self, slot: int, read_from_replicas: bool = ...) -> ClusterNode: ... + def get_nodes_by_server_type(self, server_type: str) -> list[ClusterNode]: ... + async def initialize(self) -> None: ... + async def close(self, attr: str = ...) -> None: ... + +class ClusterPipeline(AbstractRedis, AbstractRedisCluster, Generic[_StrType]): # TODO: AsyncRedisClusterCommands + def __init__(self, client: RedisCluster[_StrType]) -> None: ... + async def initialize(self) -> Self: ... + async def __aenter__(self) -> Self: ... + async def __aexit__(self, exc_type: object, exc_value: object, traceback: object) -> None: ... + def __await__(self) -> Awaitable[Self]: ... + def __enter__(self) -> Self: ... + def __exit__(self, exc_type: object, exc_value: object, traceback: object) -> None: ... + def __bool__(self) -> bool: ... + def __len__(self) -> int: ... + def execute_command(self, *args: KeyT | EncodableT, **kwargs: Any) -> Self: ... + async def execute(self, raise_on_error: bool = ..., allow_redirections: bool = ...) -> list[Any]: ... + def mset_nonatomic(self, mapping: Mapping[AnyKeyT, EncodableT]) -> Self: ... + +class PipelineCommand: + args: Any + kwargs: Any + position: int + result: Exception | None | Any + def __init__(self, position: int, *args: Any, **kwargs: Any) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/connection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/connection.pyi new file mode 100644 index 00000000..435c5854 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/connection.pyi @@ -0,0 +1,282 @@ +import asyncio +import enum +import ssl +from _typeshed import Incomplete +from collections.abc import Callable, Iterable, Mapping +from typing import Any, Protocol, overload +from typing_extensions import Literal, TypeAlias, TypedDict + +from redis import RedisError +from redis.asyncio.retry import Retry +from redis.credentials import CredentialProvider +from redis.exceptions import ResponseError +from redis.typing import EncodableT, EncodedT + +hiredis: Any +SYM_STAR: bytes +SYM_DOLLAR: bytes +SYM_CRLF: bytes +SYM_LF: bytes +SYM_EMPTY: bytes +SERVER_CLOSED_CONNECTION_ERROR: str + +class _Sentinel(enum.Enum): + sentinel: Any + +SENTINEL: Any +MODULE_LOAD_ERROR: str +NO_SUCH_MODULE_ERROR: str +MODULE_UNLOAD_NOT_POSSIBLE_ERROR: str +MODULE_EXPORTS_DATA_TYPES_ERROR: str + +class Encoder: + encoding: Any + encoding_errors: Any + decode_responses: Any + def __init__(self, encoding: str, encoding_errors: str, decode_responses: bool) -> None: ... + def encode(self, value: EncodableT) -> EncodedT: ... + def decode(self, value: EncodableT, force: bool = ...) -> EncodableT: ... + +ExceptionMappingT: TypeAlias = Mapping[str, type[Exception] | Mapping[str, type[Exception]]] + +class BaseParser: + EXCEPTION_CLASSES: ExceptionMappingT + def __init__(self, socket_read_size: int) -> None: ... + def __del__(self) -> None: ... + def parse_error(self, response: str) -> ResponseError: ... + def on_disconnect(self) -> None: ... + def on_connect(self, connection: Connection): ... + async def read_response(self, disable_decoding: bool = ...) -> EncodableT | ResponseError | list[EncodableT] | None: ... + +class PythonParser(BaseParser): + encoder: Any + def __init__(self, socket_read_size: int) -> None: ... + def on_connect(self, connection: Connection): ... + def on_disconnect(self) -> None: ... + async def read_response(self, disable_decoding: bool = ...) -> EncodableT | ResponseError | None: ... + +class HiredisParser(BaseParser): + def __init__(self, socket_read_size: int) -> None: ... + def on_connect(self, connection: Connection): ... + def on_disconnect(self) -> None: ... + async def read_from_socket(self) -> Literal[True]: ... + async def read_response(self, disable_decoding: bool = ...) -> EncodableT | list[EncodableT]: ... + +DefaultParser: type[PythonParser | HiredisParser] + +class ConnectCallbackProtocol(Protocol): + def __call__(self, connection: Connection): ... + +class AsyncConnectCallbackProtocol(Protocol): + async def __call__(self, connection: Connection): ... + +ConnectCallbackT: TypeAlias = ConnectCallbackProtocol | AsyncConnectCallbackProtocol + +class Connection: + pid: Any + host: Any + port: Any + db: Any + username: Any + client_name: Any + password: Any + socket_timeout: Any + socket_connect_timeout: Any + socket_keepalive: Any + socket_keepalive_options: Any + socket_type: Any + retry_on_timeout: Any + retry_on_error: list[type[RedisError]] + retry: Retry + health_check_interval: Any + next_health_check: int + ssl_context: Any + encoder: Any + redis_connect_func: ConnectCallbackT | None + def __init__( + self, + *, + host: str = ..., + port: str | int = ..., + db: str | int = ..., + password: str | None = ..., + socket_timeout: float | None = ..., + socket_connect_timeout: float | None = ..., + socket_keepalive: bool = ..., + socket_keepalive_options: Mapping[int, int | bytes] | None = ..., + socket_type: int = ..., + retry_on_timeout: bool = ..., + retry_on_error: list[type[RedisError]] | _Sentinel = ..., + encoding: str = ..., + encoding_errors: str = ..., + decode_responses: bool = ..., + parser_class: type[BaseParser] = ..., + socket_read_size: int = ..., + health_check_interval: float = ..., + client_name: str | None = ..., + username: str | None = ..., + retry: Retry | None = ..., + redis_connect_func: ConnectCallbackT | None = ..., + encoder_class: type[Encoder] = ..., + credential_provider: CredentialProvider | None = ..., + ) -> None: ... + def repr_pieces(self): ... + def __del__(self) -> None: ... + @property + def is_connected(self): ... + def register_connect_callback(self, callback) -> None: ... + def clear_connect_callbacks(self) -> None: ... + def set_parser(self, parser_class) -> None: ... + async def connect(self) -> None: ... + async def on_connect(self) -> None: ... + async def disconnect(self, nowait: bool = ...) -> None: ... + async def check_health(self) -> None: ... + async def send_packed_command(self, command: bytes | str | Iterable[bytes], check_health: bool = ...): ... + async def send_command(self, *args, **kwargs) -> None: ... + @overload + async def read_response(self, *, timeout: float) -> Incomplete | None: ... + @overload + async def read_response(self, disable_decoding: bool, timeout: float) -> Incomplete | None: ... + @overload + async def read_response(self, disable_decoding: bool = False, timeout: None = None): ... + def pack_command(self, *args: EncodableT) -> list[bytes]: ... + def pack_commands(self, commands: Iterable[Iterable[EncodableT]]) -> list[bytes]: ... + +class SSLConnection(Connection): + ssl_context: Any + def __init__( + self, + ssl_keyfile: str | None = ..., + ssl_certfile: str | None = ..., + ssl_cert_reqs: str = ..., + ssl_ca_certs: str | None = ..., + ssl_ca_data: str | None = ..., + ssl_check_hostname: bool = ..., + **kwargs, + ) -> None: ... + @property + def keyfile(self): ... + @property + def certfile(self): ... + @property + def cert_reqs(self): ... + @property + def ca_certs(self): ... + @property + def ca_data(self): ... + @property + def check_hostname(self): ... + +class RedisSSLContext: + keyfile: Any + certfile: Any + cert_reqs: Any + ca_certs: Any + ca_data: Any + check_hostname: Any + context: Any + def __init__( + self, + keyfile: str | None = ..., + certfile: str | None = ..., + cert_reqs: str | None = ..., + ca_certs: str | None = ..., + ca_data: str | None = ..., + check_hostname: bool = ..., + ) -> None: ... + def get(self) -> ssl.SSLContext: ... + +class UnixDomainSocketConnection(Connection): + pid: Any + path: Any + db: Any + username: Any + client_name: Any + password: Any + socket_timeout: Any + socket_connect_timeout: Any + retry_on_timeout: Any + retry_on_error: list[type[RedisError]] + retry: Any + health_check_interval: Any + next_health_check: int + redis_connect_func: ConnectCallbackT | None + encoder: Any + def __init__( + self, + *, + path: str = ..., + db: str | int = ..., + username: str | None = ..., + password: str | None = ..., + socket_timeout: float | None = ..., + socket_connect_timeout: float | None = ..., + encoding: str = ..., + encoding_errors: str = ..., + decode_responses: bool = ..., + retry_on_timeout: bool = ..., + retry_on_error: list[type[RedisError]] | _Sentinel = ..., + parser_class: type[BaseParser] = ..., + socket_read_size: int = ..., + health_check_interval: float = ..., + client_name: str | None = ..., + retry: Retry | None = ..., + redis_connect_func: ConnectCallbackT | None = ..., + credential_provider: CredentialProvider | None = ..., + ) -> None: ... + def repr_pieces(self) -> Iterable[tuple[str, str | int]]: ... + +FALSE_STRINGS: Any + +def to_bool(value) -> bool | None: ... + +URL_QUERY_ARGUMENT_PARSERS: Mapping[str, Callable[..., object]] + +class ConnectKwargs(TypedDict): + username: str + password: str + connection_class: type[Connection] + host: str + port: int + db: int + path: str + +def parse_url(url: str) -> ConnectKwargs: ... + +class ConnectionPool: + @classmethod + def from_url(cls, url: str, **kwargs) -> ConnectionPool: ... + connection_class: Any + connection_kwargs: Any + max_connections: Any + encoder_class: Any + def __init__( + self, connection_class: type[Connection] = ..., max_connections: int | None = ..., **connection_kwargs + ) -> None: ... + pid: Any + def reset(self) -> None: ... + async def get_connection(self, command_name, *keys, **options): ... + def get_encoder(self): ... + def make_connection(self): ... + async def release(self, connection: Connection): ... + def owns_connection(self, connection: Connection): ... + async def disconnect(self, inuse_connections: bool = ...): ... + +class BlockingConnectionPool(ConnectionPool): + queue_class: Any + timeout: Any + def __init__( + self, + max_connections: int = ..., + timeout: int | None = ..., + connection_class: type[Connection] = ..., + queue_class: type[asyncio.Queue[Any]] = ..., + **connection_kwargs, + ) -> None: ... + pool: Any + pid: Any + def reset(self) -> None: ... + def make_connection(self): ... + async def get_connection(self, command_name, *keys, **options): ... + async def release(self, connection: Connection): ... + async def disconnect(self, inuse_connections: bool = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/lock.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/lock.pyi new file mode 100644 index 00000000..aa90042b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/lock.pyi @@ -0,0 +1,51 @@ +import threading +from collections.abc import Awaitable +from types import SimpleNamespace, TracebackType +from typing import Any, ClassVar +from typing_extensions import Self + +from redis.asyncio import Redis +from redis.commands.core import AsyncScript + +class Lock: + lua_release: ClassVar[AsyncScript | None] + lua_extend: ClassVar[AsyncScript | None] + lua_reacquire: ClassVar[AsyncScript | None] + LUA_RELEASE_SCRIPT: ClassVar[str] + LUA_EXTEND_SCRIPT: ClassVar[str] + LUA_REACQUIRE_SCRIPT: ClassVar[str] + redis: Redis[Any] + name: str | bytes | memoryview + timeout: float | None + sleep: float + blocking: bool + blocking_timeout: float | None + thread_local: bool + local: threading.local | SimpleNamespace + def __init__( + self, + redis: Redis[Any], + name: str | bytes | memoryview, + timeout: float | None = ..., + sleep: float = ..., + blocking: bool = ..., + blocking_timeout: float | None = ..., + thread_local: bool = ..., + ) -> None: ... + def register_scripts(self) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + async def acquire( + self, blocking: bool | None = ..., blocking_timeout: float | None = ..., token: str | bytes | None = ... + ) -> bool: ... + async def do_acquire(self, token: str | bytes) -> bool: ... + async def locked(self) -> bool: ... + async def owned(self) -> bool: ... + def release(self) -> Awaitable[None]: ... + async def do_release(self, expected_token: bytes) -> None: ... + def extend(self, additional_time: float, replace_ttl: bool = ...) -> Awaitable[bool]: ... + async def do_extend(self, additional_time: float, replace_ttl: bool) -> bool: ... + def reacquire(self) -> Awaitable[bool]: ... + async def do_reacquire(self) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/parser.pyi new file mode 100644 index 00000000..6053518e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/parser.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete +from typing import Any + +# TODO: define and use: +# from redis.asyncio.cluster import ClusterNode + +class CommandsParser: + async def initialize(self, node: Incomplete | None = ...) -> None: ... # TODO: ClusterNode + async def get_keys(self, *args: Any) -> tuple[str, ...] | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/retry.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/retry.pyi new file mode 100644 index 00000000..1579606a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/retry.pyi @@ -0,0 +1,12 @@ +from collections.abc import Awaitable, Callable, Iterable +from typing import TypeVar + +from redis.backoff import AbstractBackoff +from redis.exceptions import RedisError + +_T = TypeVar("_T") + +class Retry: + def __init__(self, backoff: AbstractBackoff, retries: int, supported_errors: tuple[type[RedisError], ...] = ...) -> None: ... + def update_supported_errors(self, specified_errors: Iterable[type[RedisError]]) -> None: ... + async def call_with_retry(self, do: Callable[[], Awaitable[_T]], fail: Callable[[RedisError], Awaitable[object]]) -> _T: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/sentinel.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/sentinel.pyi new file mode 100644 index 00000000..e452986c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/sentinel.pyi @@ -0,0 +1,67 @@ +from _typeshed import Incomplete +from collections.abc import AsyncIterator, Iterable, Mapping, Sequence +from typing import Any, overload + +from redis.asyncio.client import Redis +from redis.asyncio.connection import Connection, ConnectionPool, SSLConnection +from redis.commands import AsyncSentinelCommands +from redis.exceptions import ConnectionError +from redis.typing import EncodableT + +class MasterNotFoundError(ConnectionError): ... +class SlaveNotFoundError(ConnectionError): ... + +class SentinelManagedConnection(Connection): + connection_pool: Any + def __init__(self, **kwargs) -> None: ... + async def connect_to(self, address) -> None: ... + async def connect(self): ... + @overload + async def read_response(self, *, timeout: float) -> Incomplete | None: ... + @overload + async def read_response(self, disable_decoding: bool, timeout: float) -> Incomplete | None: ... + @overload + async def read_response(self, disable_decoding: bool = False, timeout: None = None): ... + +class SentinelManagedSSLConnection(SentinelManagedConnection, SSLConnection): ... + +class SentinelConnectionPool(ConnectionPool): + is_master: Any + check_connection: Any + service_name: Any + sentinel_manager: Any + master_address: Any + slave_rr_counter: Any + def __init__(self, service_name, sentinel_manager, **kwargs) -> None: ... + def reset(self) -> None: ... + def owns_connection(self, connection: Connection): ... + async def get_master_address(self): ... + async def rotate_slaves(self) -> AsyncIterator[Any]: ... + +class Sentinel(AsyncSentinelCommands): + sentinel_kwargs: Any + sentinels: Any + min_other_sentinels: Any + connection_kwargs: Any + def __init__( + self, sentinels, min_other_sentinels: int = ..., sentinel_kwargs: Incomplete | None = ..., **connection_kwargs + ) -> None: ... + async def execute_command(self, *args, **kwargs): ... + def check_master_state(self, state: dict[Any, Any], service_name: str) -> bool: ... + async def discover_master(self, service_name: str): ... + def filter_slaves(self, slaves: Iterable[Mapping[Any, Any]]) -> Sequence[tuple[EncodableT, EncodableT]]: ... + async def discover_slaves(self, service_name: str) -> Sequence[tuple[EncodableT, EncodableT]]: ... + def master_for( + self, + service_name: str, + redis_class: type[Redis[Any]] = ..., + connection_pool_class: type[SentinelConnectionPool] = ..., + **kwargs, + ): ... + def slave_for( + self, + service_name: str, + redis_class: type[Redis[Any]] = ..., + connection_pool_class: type[SentinelConnectionPool] = ..., + **kwargs, + ): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/utils.pyi new file mode 100644 index 00000000..f63aa2da --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/asyncio/utils.pyi @@ -0,0 +1,12 @@ +from typing import Any, Generic + +from redis.asyncio.client import Pipeline, Redis +from redis.client import _StrType + +def from_url(url: str, **kwargs) -> Redis[Any]: ... + +class pipeline(Generic[_StrType]): + p: Pipeline[_StrType] + def __init__(self, redis_obj: Redis[_StrType]) -> None: ... + async def __aenter__(self) -> Pipeline[_StrType]: ... + async def __aexit__(self, exc_type: object, exc_value: object, traceback: object) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/backoff.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/backoff.pyi new file mode 100644 index 00000000..03b559e5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/backoff.pyi @@ -0,0 +1,29 @@ +from abc import ABC, abstractmethod + +class AbstractBackoff(ABC): + def reset(self) -> None: ... + @abstractmethod + def compute(self, failures: int) -> float: ... + +class ConstantBackoff(AbstractBackoff): + def __init__(self, backoff: int) -> None: ... + def compute(self, failures: int) -> float: ... + +class NoBackoff(ConstantBackoff): + def __init__(self) -> None: ... + +class ExponentialBackoff(AbstractBackoff): + def __init__(self, cap: float = ..., base: float = ...) -> None: ... + def compute(self, failures: int) -> float: ... + +class FullJitterBackoff(AbstractBackoff): + def __init__(self, cap: float = ..., base: float = ...) -> None: ... + def compute(self, failures: int) -> float: ... + +class EqualJitterBackoff(AbstractBackoff): + def __init__(self, cap: float = ..., base: float = ...) -> None: ... + def compute(self, failures: int) -> float: ... + +class DecorrelatedJitterBackoff(AbstractBackoff): + def __init__(self, cap: float = ..., base: float = ...) -> None: ... + def compute(self, failures: int) -> float: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/client.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/client.pyi new file mode 100644 index 00000000..c69590db --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/client.pyi @@ -0,0 +1,732 @@ +import threading +from _typeshed import Incomplete, SupportsItems +from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence +from datetime import datetime, timedelta +from re import Pattern +from types import TracebackType +from typing import Any, ClassVar, Generic, TypeVar, overload +from typing_extensions import Literal, Self, TypeAlias + +from redis import RedisError + +from .commands import CoreCommands, RedisModuleCommands, SentinelCommands +from .connection import ConnectionPool, _ConnectFunc, _ConnectionPoolOptions +from .credentials import CredentialProvider +from .lock import Lock +from .retry import Retry +from .typing import ChannelT, EncodableT, KeyT, PatternT + +_Value: TypeAlias = bytes | float | int | str +_Key: TypeAlias = str | bytes + +# Lib returns str or bytes depending on value of decode_responses +_StrType = TypeVar("_StrType", bound=str | bytes) + +_VT = TypeVar("_VT") +_T = TypeVar("_T") + +# Keyword arguments that are passed to Redis.parse_response(). +_ParseResponseOptions: TypeAlias = Any +# Keyword arguments that are passed to Redis.execute_command(). +_CommandOptions: TypeAlias = _ConnectionPoolOptions | _ParseResponseOptions + +SYM_EMPTY: bytes +EMPTY_RESPONSE: str +NEVER_DECODE: str + +class CaseInsensitiveDict(dict[_StrType, _VT]): + def __init__(self, data: SupportsItems[_StrType, _VT]) -> None: ... + def update(self, data: SupportsItems[_StrType, _VT]) -> None: ... # type: ignore[override] + @overload + def get(self, k: _StrType, default: None = ...) -> _VT | None: ... + @overload + def get(self, k: _StrType, default: _VT | _T) -> _VT | _T: ... + # Overrides many other methods too, but without changing signature + +def list_or_args(keys, args): ... +def timestamp_to_datetime(response): ... +def string_keys_to_dict(key_string, callback): ... +def parse_debug_object(response): ... +def parse_object(response, infotype): ... +def parse_info(response): ... + +SENTINEL_STATE_TYPES: dict[str, type[int]] + +def parse_sentinel_state(item): ... +def parse_sentinel_master(response): ... +def parse_sentinel_masters(response): ... +def parse_sentinel_slaves_and_sentinels(response): ... +def parse_sentinel_get_master(response): ... +def pairs_to_dict(response, decode_keys: bool = ..., decode_string_values: bool = ...): ... +def pairs_to_dict_typed(response, type_info): ... +def zset_score_pairs(response, **options): ... +def sort_return_tuples(response, **options): ... +def int_or_none(response): ... +def float_or_none(response): ... +def bool_ok(response): ... +def parse_client_list(response, **options): ... +def parse_config_get(response, **options): ... +def parse_scan(response, **options): ... +def parse_hscan(response, **options): ... +def parse_zscan(response, **options): ... +def parse_slowlog_get(response, **options): ... + +_LockType = TypeVar("_LockType") + +class AbstractRedis: + RESPONSE_CALLBACKS: dict[str, Any] + +class Redis(AbstractRedis, RedisModuleCommands, CoreCommands[_StrType], SentinelCommands, Generic[_StrType]): + @overload + @classmethod + def from_url( + cls, + url: str, + *, + host: str | None = ..., + port: int | None = ..., + db: int | None = ..., + password: str | None = ..., + socket_timeout: float | None = ..., + socket_connect_timeout: float | None = ..., + socket_keepalive: bool | None = ..., + socket_keepalive_options: Mapping[str, int | str] | None = ..., + connection_pool: ConnectionPool | None = ..., + unix_socket_path: str | None = ..., + encoding: str = ..., + encoding_errors: str = ..., + charset: str | None = ..., + errors: str | None = ..., + decode_responses: Literal[True], + retry_on_timeout: bool = ..., + ssl: bool = ..., + ssl_keyfile: str | None = ..., + ssl_certfile: str | None = ..., + ssl_cert_reqs: str | int | None = ..., + ssl_ca_certs: str | None = ..., + ssl_check_hostname: bool = ..., + max_connections: int | None = ..., + single_connection_client: bool = ..., + health_check_interval: float = ..., + client_name: str | None = ..., + username: str | None = ..., + ) -> Redis[str]: ... + @overload + @classmethod + def from_url( + cls, + url: str, + *, + host: str | None = ..., + port: int | None = ..., + db: int | None = ..., + password: str | None = ..., + socket_timeout: float | None = ..., + socket_connect_timeout: float | None = ..., + socket_keepalive: bool | None = ..., + socket_keepalive_options: Mapping[str, int | str] | None = ..., + connection_pool: ConnectionPool | None = ..., + unix_socket_path: str | None = ..., + encoding: str = ..., + encoding_errors: str = ..., + charset: str | None = ..., + errors: str | None = ..., + decode_responses: Literal[False] = ..., + retry_on_timeout: bool = ..., + ssl: bool = ..., + ssl_keyfile: str | None = ..., + ssl_certfile: str | None = ..., + ssl_cert_reqs: str | int | None = ..., + ssl_ca_certs: str | None = ..., + ssl_check_hostname: bool = ..., + max_connections: int | None = ..., + single_connection_client: bool = ..., + health_check_interval: float = ..., + client_name: str | None = ..., + username: str | None = ..., + ) -> Redis[bytes]: ... + connection_pool: Any + response_callbacks: Any + @overload + def __init__( + self: Redis[str], + host: str, + port: int, + db: int, + password: str | None, + socket_timeout: float | None, + socket_connect_timeout: float | None, + socket_keepalive: bool | None, + socket_keepalive_options: Mapping[str, int | str] | None, + connection_pool: ConnectionPool | None, + unix_socket_path: str | None, + encoding: str, + encoding_errors: str, + charset: str | None, + errors: str | None, + decode_responses: Literal[True], + retry_on_timeout: bool = ..., + retry_on_error: list[type[RedisError]] | None = ..., + ssl: bool = ..., + ssl_keyfile: str | None = ..., + ssl_certfile: str | None = ..., + ssl_cert_reqs: str | int | None = ..., + ssl_ca_certs: str | None = ..., + ssl_ca_path: Incomplete | None = ..., + ssl_ca_data: Incomplete | None = ..., + ssl_check_hostname: bool = ..., + ssl_password: Incomplete | None = ..., + ssl_validate_ocsp: bool = ..., + ssl_validate_ocsp_stapled: bool = ..., # added in 4.1.1 + ssl_ocsp_context: Incomplete | None = ..., # added in 4.1.1 + ssl_ocsp_expected_cert: Incomplete | None = ..., # added in 4.1.1 + max_connections: int | None = ..., + single_connection_client: bool = ..., + health_check_interval: float = ..., + client_name: str | None = ..., + username: str | None = ..., + retry: Retry | None = ..., + redis_connect_func: _ConnectFunc | None = ..., + credential_provider: CredentialProvider | None = ..., + ) -> None: ... + @overload + def __init__( + self: Redis[str], + host: str = ..., + port: int = ..., + db: int = ..., + password: str | None = ..., + socket_timeout: float | None = ..., + socket_connect_timeout: float | None = ..., + socket_keepalive: bool | None = ..., + socket_keepalive_options: Mapping[str, int | str] | None = ..., + connection_pool: ConnectionPool | None = ..., + unix_socket_path: str | None = ..., + encoding: str = ..., + encoding_errors: str = ..., + charset: str | None = ..., + errors: str | None = ..., + *, + decode_responses: Literal[True], + retry_on_timeout: bool = ..., + ssl: bool = ..., + ssl_keyfile: str | None = ..., + ssl_certfile: str | None = ..., + ssl_cert_reqs: str | int | None = ..., + ssl_ca_certs: str | None = ..., + ssl_ca_data: Incomplete | None = ..., + ssl_check_hostname: bool = ..., + ssl_password: Incomplete | None = ..., + ssl_validate_ocsp: bool = ..., + ssl_validate_ocsp_stapled: bool = ..., # added in 4.1.1 + ssl_ocsp_context: Incomplete | None = ..., # added in 4.1.1 + ssl_ocsp_expected_cert: Incomplete | None = ..., # added in 4.1.1 + max_connections: int | None = ..., + single_connection_client: bool = ..., + health_check_interval: float = ..., + client_name: str | None = ..., + username: str | None = ..., + retry: Retry | None = ..., + redis_connect_func: _ConnectFunc | None = ..., + credential_provider: CredentialProvider | None = ..., + ) -> None: ... + @overload + def __init__( + self: Redis[bytes], + host: str = ..., + port: int = ..., + db: int = ..., + password: str | None = ..., + socket_timeout: float | None = ..., + socket_connect_timeout: float | None = ..., + socket_keepalive: bool | None = ..., + socket_keepalive_options: Mapping[str, int | str] | None = ..., + connection_pool: ConnectionPool | None = ..., + unix_socket_path: str | None = ..., + encoding: str = ..., + encoding_errors: str = ..., + charset: str | None = ..., + errors: str | None = ..., + decode_responses: Literal[False] = ..., + retry_on_timeout: bool = ..., + ssl: bool = ..., + ssl_keyfile: str | None = ..., + ssl_certfile: str | None = ..., + ssl_cert_reqs: str | int | None = ..., + ssl_ca_certs: str | None = ..., + ssl_ca_data: Incomplete | None = ..., + ssl_check_hostname: bool = ..., + ssl_password: Incomplete | None = ..., + ssl_validate_ocsp: bool = ..., + ssl_validate_ocsp_stapled: bool = ..., # added in 4.1.1 + ssl_ocsp_context: Incomplete | None = ..., # added in 4.1.1 + ssl_ocsp_expected_cert: Incomplete | None = ..., # added in 4.1.1 + max_connections: int | None = ..., + single_connection_client: bool = ..., + health_check_interval: float = ..., + client_name: str | None = ..., + username: str | None = ..., + retry: Retry | None = ..., + redis_connect_func: _ConnectFunc | None = ..., + credential_provider: CredentialProvider | None = ..., + ) -> None: ... + def get_encoder(self): ... + def get_connection_kwargs(self): ... + def set_response_callback(self, command, callback): ... + def pipeline(self, transaction: bool = ..., shard_hint: Any = ...) -> Pipeline[_StrType]: ... + def transaction(self, func, *watches, **kwargs): ... + @overload + def lock( + self, + name: _Key, + timeout: float | None = ..., + sleep: float = ..., + blocking: bool = ..., + blocking_timeout: float | None = ..., + lock_class: None = ..., + thread_local: bool = ..., + ) -> Lock: ... + @overload + def lock( + self, + name: _Key, + timeout: float | None, + sleep: float, + blocking: bool, + blocking_timeout: float | None, + lock_class: type[_LockType], + thread_local: bool = ..., + ) -> _LockType: ... + @overload + def lock( + self, + name: _Key, + timeout: float | None = ..., + sleep: float = ..., + blocking: bool = ..., + blocking_timeout: float | None = ..., + *, + lock_class: type[_LockType], + thread_local: bool = ..., + ) -> _LockType: ... + def pubsub(self, *, shard_hint: Any = ..., ignore_subscribe_messages: bool = ...) -> PubSub: ... + def execute_command(self, *args, **options: _CommandOptions): ... + def parse_response(self, connection, command_name, **options: _ParseResponseOptions): ... + def monitor(self) -> Monitor: ... + def __enter__(self) -> Redis[_StrType]: ... + def __exit__(self, exc_type, exc_value, traceback): ... + def __del__(self) -> None: ... + def close(self) -> None: ... + def client(self) -> Redis[_StrType]: ... + +StrictRedis = Redis + +class PubSub: + PUBLISH_MESSAGE_TYPES: ClassVar[tuple[str, str]] + UNSUBSCRIBE_MESSAGE_TYPES: ClassVar[tuple[str, str]] + HEALTH_CHECK_MESSAGE: ClassVar[str] + connection_pool: Any + shard_hint: Any + ignore_subscribe_messages: Any + connection: Any + subscribed_event: threading.Event + encoder: Any + health_check_response_b: bytes + health_check_response: list[str] | list[bytes] + def __init__( + self, + connection_pool, + shard_hint: Incomplete | None = ..., + ignore_subscribe_messages: bool = ..., + encoder: Incomplete | None = ..., + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __del__(self): ... + channels: Any + patterns: Any + def reset(self): ... + def close(self) -> None: ... + def on_connect(self, connection): ... + @property + def subscribed(self): ... + def execute_command(self, *args): ... + def clean_health_check_responses(self) -> None: ... + def parse_response(self, block: bool = ..., timeout: float = ...): ... + def is_health_check_response(self, response) -> bool: ... + def check_health(self) -> None: ... + def psubscribe(self, *args: _Key, **kwargs: Callable[[Any], None]): ... + def punsubscribe(self, *args: _Key) -> None: ... + def subscribe(self, *args: _Key, **kwargs: Callable[[Any], None]) -> None: ... + def unsubscribe(self, *args: _Key) -> None: ... + def listen(self): ... + def get_message(self, ignore_subscribe_messages: bool = ..., timeout: float = ...) -> dict[str, Any] | None: ... + def handle_message(self, response, ignore_subscribe_messages: bool = ...) -> dict[str, Any] | None: ... + def run_in_thread(self, sleep_time: float = ..., daemon: bool = ..., exception_handler: Incomplete | None = ...): ... + def ping(self, message: _Value | None = ...) -> None: ... + +class PubSubWorkerThread(threading.Thread): + daemon: Any + pubsub: Any + sleep_time: Any + exception_handler: Any + def __init__(self, pubsub, sleep_time, daemon: bool = ..., exception_handler: Incomplete | None = ...) -> None: ... + def run(self) -> None: ... + def stop(self) -> None: ... + +class Pipeline(Redis[_StrType], Generic[_StrType]): + UNWATCH_COMMANDS: Any + connection_pool: Any + connection: Any + response_callbacks: Any + transaction: bool + shard_hint: Any + watching: bool + + command_stack: Any + scripts: Any + explicit_transaction: Any + def __init__(self, connection_pool, response_callbacks, transaction, shard_hint) -> None: ... + def __enter__(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def __exit__(self, exc_type, exc_value, traceback) -> None: ... + def __del__(self) -> None: ... + def __len__(self) -> int: ... + def __bool__(self) -> bool: ... + def discard(self) -> None: ... + def reset(self) -> None: ... + def multi(self) -> None: ... + def execute_command(self, *args, **options): ... + def immediate_execute_command(self, *args, **options): ... + def pipeline_execute_command(self, *args, **options): ... + def raise_first_error(self, commands, response): ... + def annotate_exception(self, exception, number, command): ... + def parse_response(self, connection, command_name, **options): ... + def load_scripts(self): ... + def execute(self, raise_on_error: bool = ...) -> list[Any]: ... + def watch(self, *names: _Key) -> bool: ... + def unwatch(self) -> bool: ... + # in the Redis implementation, the following methods are inherited from client. + def set_response_callback(self, command, callback): ... + def pipeline(self, transaction: bool = ..., shard_hint: Any = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_cat(self, category: str | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_deluser(self, username: str) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_genpass(self, bits: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_getuser(self, username: str) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_list(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_load(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_setuser( # type: ignore[override] + self, + username: str, + enabled: bool = ..., + nopass: bool = ..., + passwords: Sequence[str] | None = ..., + hashed_passwords: Sequence[str] | None = ..., + categories: Sequence[str] | None = ..., + commands: Sequence[str] | None = ..., + keys: Sequence[str] | None = ..., + channels: Iterable[ChannelT] | None = ..., + selectors: Iterable[tuple[str, KeyT]] | None = ..., + reset: bool = False, + reset_keys: bool = False, + reset_channels: bool = False, + reset_passwords: bool = False, + **kwargs: _CommandOptions, + ) -> Pipeline[_StrType]: ... + def acl_users(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_whoami(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def bgrewriteaof(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def bgsave(self, schedule: bool = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def client_id(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def client_kill(self, address: str) -> Pipeline[_StrType]: ... # type: ignore[override] + def client_list(self, _type: str | None = ..., client_id: list[str] = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def client_getname(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def client_setname(self, name: str) -> Pipeline[_StrType]: ... # type: ignore[override] + def readwrite(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def readonly(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def config_get(self, pattern: PatternT = ..., *args: PatternT, **kwargs: _CommandOptions) -> Pipeline[_StrType]: ... # type: ignore[override] + def config_set(self, name: KeyT, value: EncodableT, *args: KeyT | EncodableT, **kwargs: _CommandOptions) -> Pipeline[_StrType]: ... # type: ignore[override] + def config_resetstat(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def config_rewrite(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def dbsize(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def debug_object(self, key) -> Pipeline[_StrType]: ... # type: ignore[override] + def echo(self, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def flushall(self, asynchronous: bool = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def flushdb(self, asynchronous: bool = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def info(self, section: _Key | None = ..., *args: _Key, **kwargs: _CommandOptions) -> Pipeline[_StrType]: ... # type: ignore[override] + def lastsave(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def object(self, infotype, key) -> Pipeline[_StrType]: ... # type: ignore[override] + def ping(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def save(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def sentinel_get_master_addr_by_name(self, service_name) -> Pipeline[_StrType]: ... # type: ignore[override] + def sentinel_master(self, service_name) -> Pipeline[_StrType]: ... # type: ignore[override] + def sentinel_masters(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def sentinel_monitor(self, name, ip, port, quorum) -> Pipeline[_StrType]: ... # type: ignore[override] + def sentinel_remove(self, name) -> Pipeline[_StrType]: ... # type: ignore[override] + def sentinel_sentinels(self, service_name) -> Pipeline[_StrType]: ... # type: ignore[override] + def sentinel_set(self, name, option, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def sentinel_slaves(self, service_name) -> Pipeline[_StrType]: ... # type: ignore[override] + def slaveof(self, host=..., port=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def slowlog_get(self, num=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def slowlog_len(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def slowlog_reset(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def time(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def append(self, key, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def bitcount(self, key: _Key, start: int | None = ..., end: int | None = ..., mode: str | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def bitop(self, operation, dest, *keys) -> Pipeline[_StrType]: ... # type: ignore[override] + def bitpos(self, key, bit, start=..., end=..., mode: str | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def decr(self, name, amount=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def delete(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def __delitem__(self, _Key) -> None: ... + def dump(self, name) -> Pipeline[_StrType]: ... # type: ignore[override] + def exists(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def __contains__(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def expire(self, name: _Key, time: int | timedelta, nx: bool = ..., xx: bool = ..., gt: bool = ..., lt: bool = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def expireat(self, name, when, nx: bool = ..., xx: bool = ..., gt: bool = ..., lt: bool = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def get(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def __getitem__(self, name) -> Pipeline[_StrType]: ... # type: ignore[override] + def getbit(self, name: _Key, offset: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def getrange(self, key, start, end) -> Pipeline[_StrType]: ... # type: ignore[override] + def getset(self, name, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def incr(self, name, amount=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def incrby(self, name, amount=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def incrbyfloat(self, name, amount=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def keys(self, pattern: _Key = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def mset(self, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ... # type: ignore[override] + def msetnx(self, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ... # type: ignore[override] + def move(self, name: _Key, db: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def persist(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def pexpire(self, name: _Key, time: int | timedelta, nx: bool = ..., xx: bool = ..., gt: bool = ..., lt: bool = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def pexpireat(self, name: _Key, when: int | datetime, nx: bool = ..., xx: bool = ..., gt: bool = ..., lt: bool = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def psetex(self, name, time_ms, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def pttl(self, name) -> Pipeline[_StrType]: ... # type: ignore[override] + def randomkey(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def rename(self, src, dst) -> Pipeline[_StrType]: ... # type: ignore[override] + def renamenx(self, src, dst) -> Pipeline[_StrType]: ... # type: ignore[override] + def restore(self, name, ttl, value, replace: bool = ..., absttl: bool = ..., idletime: Incomplete | None = ..., frequency: Incomplete | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def set( # type: ignore[override] + self, + name: _Key, + value: _Value, + ex: None | int | timedelta = ..., + px: None | int | timedelta = ..., + nx: bool = ..., + xx: bool = ..., + keepttl: bool = ..., + get: bool = ..., + exat: Incomplete | None = ..., + pxat: Incomplete | None = ..., + ) -> Pipeline[_StrType]: ... + def __setitem__(self, name, value) -> None: ... + def setbit(self, name: _Key, offset: int, value: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def setex(self, name: _Key, time: int | timedelta, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def setnx(self, name, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def setrange(self, name, offset, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def strlen(self, name) -> Pipeline[_StrType]: ... # type: ignore[override] + def substr(self, name, start, end=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def ttl(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def type(self, name) -> Pipeline[_StrType]: ... # type: ignore[override] + def unlink(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def blmove( # type: ignore[override] + self, + first_list: _Key, + second_list: _Key, + timeout: float, + src: Literal["LEFT", "RIGHT"] = ..., + dest: Literal["LEFT", "RIGHT"] = ..., + ) -> Pipeline[_StrType]: ... + def blpop(self, keys: _Value | Iterable[_Value], timeout: float = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def brpop(self, keys: _Value | Iterable[_Value], timeout: float = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def brpoplpush(self, src, dst, timeout=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def lindex(self, name: _Key, index: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def linsert( # type: ignore[override] + self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value + ) -> Pipeline[_StrType]: ... + def llen(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def lmove( # type: ignore[override] + self, first_list: _Key, second_list: _Key, src: Literal["LEFT", "RIGHT"] = ..., dest: Literal["LEFT", "RIGHT"] = ... + ) -> Pipeline[_StrType]: ... + def lpop(self, name, count: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def lpush(self, name: _Value, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def lpushx(self, name, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def lrange(self, name: _Key, start: int, end: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def lrem(self, name: _Key, count: int, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def lset(self, name: _Key, index: int, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def ltrim(self, name: _Key, start: int, end: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def rpop(self, name, count: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def rpoplpush(self, src, dst) -> Pipeline[_StrType]: ... # type: ignore[override] + def rpush(self, name: _Value, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def rpushx(self, name, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def sort( # type: ignore[override] + self, + name: _Key, + start: int | None = ..., + num: int | None = ..., + by: _Key | None = ..., + get: _Key | Sequence[_Key] | None = ..., + desc: bool = ..., + alpha: bool = ..., + store: _Key | None = ..., + groups: bool = ..., + ) -> Pipeline[_StrType]: ... + def scan(self, cursor: int = ..., match: _Key | None = ..., count: int | None = ..., _type: str | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def scan_iter(self, match: _Key | None = ..., count: int | None = ..., _type: str | None = ...) -> Iterator[Any]: ... # type: ignore[override] + def sscan(self, name: _Key, cursor: int = ..., match: _Key | None = ..., count: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def sscan_iter(self, name: _Key, match: _Key | None = ..., count: int | None = ...) -> Iterator[Any]: ... + def hscan(self, name: _Key, cursor: int = ..., match: _Key | None = ..., count: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def hscan_iter(self, name, match: _Key | None = ..., count: int | None = ...) -> Iterator[Any]: ... + def zscan_iter( + self, name: _Key, match: _Key | None = ..., count: int | None = ..., score_cast_func: Callable[[_StrType], Any] = ... + ) -> Iterator[Any]: ... + def sadd(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def scard(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sismember(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def smembers(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def smove(self, src: _Key, dst: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def spop(self, name: _Key, count: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def srandmember(self, name: _Key, number: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def srem(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def xack(self, name, groupname, *ids) -> Pipeline[_StrType]: ... # type: ignore[override] + def xadd(self, name, fields, id=..., maxlen=..., approximate: bool = ..., nomkstream: bool = ..., minid: Incomplete | None = ..., limit: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def xclaim( + self, name, groupname, consumername, min_idle_time, message_ids, idle=..., time=..., retrycount=..., force=..., justid=... + ) -> Pipeline[_StrType]: ... # type: ignore[override] + def xdel(self, name, *ids) -> Pipeline[_StrType]: ... # type: ignore[override] + def xgroup_create(self, name, groupname, id=..., mkstream=..., entries_read: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def xgroup_delconsumer(self, name, groupname, consumername) -> Pipeline[_StrType]: ... # type: ignore[override] + def xgroup_destroy(self, name, groupname) -> Pipeline[_StrType]: ... # type: ignore[override] + def xgroup_setid(self, name, groupname, id, entries_read: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def xinfo_consumers(self, name, groupname) -> Pipeline[_StrType]: ... # type: ignore[override] + def xinfo_groups(self, name) -> Pipeline[_StrType]: ... # type: ignore[override] + def xinfo_stream(self, name, full: bool = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def xlen(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def xpending(self, name, groupname) -> Pipeline[_StrType]: ... # type: ignore[override] + def xpending_range(self, name: _Key, groupname, min, max, count: int, consumername: Incomplete | None = ..., idle: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def xrange(self, name, min=..., max=..., count=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def xread(self, streams, count=..., block=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def xreadgroup(self, groupname, consumername, streams, count=..., block=..., noack=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def xrevrange(self, name, max=..., min=..., count=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def xtrim(self, name, maxlen: int | None = ..., approximate: bool = ..., minid: Incomplete | None = ..., limit: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def zadd( # type: ignore[override] + self, + name: _Key, + mapping: Mapping[_Key, _Value], + nx: bool = ..., + xx: bool = ..., + ch: bool = ..., + incr: bool = ..., + gt: Incomplete | None = ..., + lt: Incomplete | None = ..., + ) -> Pipeline[_StrType]: ... + def zcard(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def zcount(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zincrby(self, name: _Key, amount: float, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zinterstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def zlexcount(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zpopmax(self, name: _Key, count: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def zpopmin(self, name: _Key, count: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def zrange( # type: ignore[override] + self, + name: _Key, + start: int, + end: int, + desc: bool = ..., + withscores: bool = ..., + score_cast_func: Callable[[_StrType], Any] = ..., + byscore: bool = ..., + bylex: bool = ..., + offset: int | None = ..., + num: int | None = ..., + ) -> Pipeline[_StrType]: ... + def zrangebylex(self, name: _Key, min: _Value, max: _Value, start: int | None = ..., num: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def zrangebyscore( # type: ignore[override] + self, + name: _Key, + min: _Value, + max: _Value, + start: int | None = ..., + num: int | None = ..., + withscores: bool = ..., + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> Pipeline[_StrType]: ... + def zrank(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zrem(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zremrangebyrank(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zrevrange( # type: ignore[override] + self, name: _Key, start: int, end: int, withscores: bool = ..., score_cast_func: Callable[[_StrType], Any] = ... + ) -> Pipeline[_StrType]: ... + def zrevrangebyscore( # type: ignore[override] + self, + name: _Key, + max: _Value, + min: _Value, + start: int | None = ..., + num: int | None = ..., + withscores: bool = ..., + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> Pipeline[_StrType]: ... + def zrevrangebylex( # type: ignore[override] + self, name: _Key, max: _Value, min: _Value, start: int | None = ..., num: int | None = ... + ) -> Pipeline[_StrType]: ... + def zrevrank(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zscore(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zunionstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def pfadd(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def pfcount(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def pfmerge(self, dest: _Key, *sources: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hdel(self, name: _Key, *keys: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hexists(self, name: _Key, key: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hget(self, name: _Key, key: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hgetall(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hincrby(self, name: _Key, key: _Key, amount: int = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def hincrbyfloat(self, name: _Key, key: _Key, amount: float = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def hkeys(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hlen(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + @overload # type: ignore[override] + def hset( + self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = ..., items: Incomplete | None = ... + ) -> Pipeline[_StrType]: ... + @overload # type: ignore[override] + def hset( + self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value], items: Incomplete | None = ... + ) -> Pipeline[_StrType]: ... + @overload # type: ignore[override] + def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value], items: Incomplete | None = ...) -> Pipeline[_StrType]: ... + def hsetnx(self, name: _Key, key: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ... # type: ignore[override] + def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hvals(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def publish(self, channel: _Key, message: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def eval(self, script, numkeys, *keys_and_args) -> Pipeline[_StrType]: ... # type: ignore[override] + def evalsha(self, sha, numkeys, *keys_and_args) -> Pipeline[_StrType]: ... # type: ignore[override] + def script_exists(self, *args) -> Pipeline[_StrType]: ... # type: ignore[override] + def script_flush(self, sync_type: Incomplete | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def script_kill(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def script_load(self, script) -> Pipeline[_StrType]: ... # type: ignore[override] + def pubsub_channels(self, pattern: _Key = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def pubsub_numsub(self, *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def pubsub_numpat(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def monitor(self) -> Monitor: ... + def cluster(self, cluster_arg: str, *args: Any) -> Pipeline[_StrType]: ... # type: ignore[override] + def client(self) -> Any: ... + +class Monitor: + command_re: Pattern[str] + monitor_re: Pattern[str] + def __init__(self, connection_pool) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: object) -> None: ... + def next_command(self) -> dict[str, Any]: ... + def listen(self) -> Iterable[dict[str, Any]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/cluster.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/cluster.pyi new file mode 100644 index 00000000..fd4e906e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/cluster.pyi @@ -0,0 +1,258 @@ +from _typeshed import Incomplete, Unused +from collections.abc import Callable, Iterable, Sequence +from threading import Lock +from types import TracebackType +from typing import Any, ClassVar, Generic, NoReturn, Protocol +from typing_extensions import Literal, Self + +from redis.client import CaseInsensitiveDict, PubSub, Redis, _ParseResponseOptions +from redis.commands import CommandsParser, RedisClusterCommands +from redis.commands.core import _StrType +from redis.connection import BaseParser, Connection, ConnectionPool, Encoder, _ConnectionPoolOptions, _Encodable +from redis.exceptions import MovedError, RedisError +from redis.retry import Retry +from redis.typing import EncodableT + +def get_node_name(host: str, port: str | int) -> str: ... +def get_connection(redis_node: Redis[Any], *args, **options: _ConnectionPoolOptions) -> Connection: ... +def parse_scan_result(command: Unused, res, **options): ... +def parse_pubsub_numsub(command: Unused, res, **options: Unused): ... +def parse_cluster_slots(resp, **options) -> dict[tuple[int, int], dict[str, Any]]: ... + +PRIMARY: str +REPLICA: str +SLOT_ID: str +REDIS_ALLOWED_KEYS: tuple[str, ...] +KWARGS_DISABLED_KEYS: tuple[str, ...] +PIPELINE_BLOCKED_COMMANDS: tuple[str, ...] + +def cleanup_kwargs(**kwargs: Any) -> dict[str, Any]: ... + +# It uses `DefaultParser` in real life, but it is a dynamic base class. +class ClusterParser(BaseParser): ... + +class AbstractRedisCluster: + RedisClusterRequestTTL: ClassVar[int] + PRIMARIES: ClassVar[str] + REPLICAS: ClassVar[str] + ALL_NODES: ClassVar[str] + RANDOM: ClassVar[str] + DEFAULT_NODE: ClassVar[str] + NODE_FLAGS: ClassVar[set[str]] + COMMAND_FLAGS: ClassVar[dict[str, str]] + CLUSTER_COMMANDS_RESPONSE_CALLBACKS: ClassVar[dict[str, Any]] + RESULT_CALLBACKS: ClassVar[dict[str, Callable[[Incomplete, Incomplete], Incomplete]]] + ERRORS_ALLOW_RETRY: ClassVar[tuple[type[RedisError], ...]] + +class RedisCluster(AbstractRedisCluster, RedisClusterCommands[_StrType], Generic[_StrType]): + user_on_connect_func: Callable[[Connection], object] | None + encoder: Encoder + cluster_error_retry_attempts: int + command_flags: dict[str, str] + node_flags: set[str] + read_from_replicas: bool + reinitialize_counter: int + reinitialize_steps: int + nodes_manager: NodesManager + cluster_response_callbacks: CaseInsensitiveDict[str, Callable[..., Incomplete]] + result_callbacks: CaseInsensitiveDict[str, Callable[[Incomplete, Incomplete], Incomplete]] + commands_parser: CommandsParser + def __init__( # TODO: make @overloads, either `url` or `host:port` can be passed + self, + host: str | None = ..., + port: int | None = ..., + startup_nodes: list[ClusterNode] | None = ..., + cluster_error_retry_attempts: int = ..., + retry: Retry | None = ..., + require_full_coverage: bool = ..., + reinitialize_steps: int = ..., + read_from_replicas: bool = ..., + dynamic_startup_nodes: bool = ..., + url: str | None = ..., + **kwargs, + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __del__(self) -> None: ... + def disconnect_connection_pools(self) -> None: ... + @classmethod + def from_url(cls, url: str, **kwargs) -> Self: ... + def on_connect(self, connection: Connection) -> None: ... + def get_redis_connection(self, node: ClusterNode) -> Redis[Any]: ... + def get_node( + self, host: str | None = ..., port: str | int | None = ..., node_name: str | None = ... + ) -> ClusterNode | None: ... + def get_primaries(self) -> list[ClusterNode]: ... + def get_replicas(self) -> list[ClusterNode]: ... + def get_random_node(self) -> ClusterNode: ... + def get_nodes(self) -> list[ClusterNode]: ... + def get_node_from_key(self, key: _Encodable, replica: bool = ...) -> ClusterNode | None: ... + def get_default_node(self) -> ClusterNode | None: ... + def set_default_node(self, node: ClusterNode | None) -> bool: ... + def monitor(self, target_node: Incomplete | None = ...): ... + def pubsub(self, node: Incomplete | None = ..., host: Incomplete | None = ..., port: Incomplete | None = ..., **kwargs): ... + def pipeline(self, transaction: Incomplete | None = ..., shard_hint: Incomplete | None = ...): ... + def lock( + self, + name: str, + timeout: float | None = ..., + sleep: float = ..., + blocking: bool = ..., + blocking_timeout: float | None = ..., + lock_class: type[Incomplete] | None = ..., + thread_local: bool = ..., + ): ... + def keyslot(self, key: _Encodable) -> int: ... + def determine_slot(self, *args): ... + def get_encoder(self) -> Encoder: ... + def get_connection_kwargs(self) -> dict[str, Any]: ... + def execute_command(self, *args, **kwargs): ... + def close(self) -> None: ... + +class ClusterNode: + host: str + port: int + name: str + server_type: str | None + redis_connection: Redis[Incomplete] | None + def __init__( + self, host: str, port: int, server_type: str | None = ..., redis_connection: Redis[Incomplete] | None = ... + ) -> None: ... + def __eq__(self, obj: object) -> bool: ... + def __del__(self) -> None: ... + +class LoadBalancer: + primary_to_idx: dict[str, int] + start_index: int + def __init__(self, start_index: int = ...) -> None: ... + def get_server_index(self, primary: str, list_size: int) -> int: ... + def reset(self) -> None: ... + +class NodesManager: + nodes_cache: dict[str, ClusterNode] + slots_cache: dict[str, list[ClusterNode]] + startup_nodes: dict[str, ClusterNode] + default_node: ClusterNode | None + from_url: bool + connection_pool_class: type[ConnectionPool] + connection_kwargs: dict[str, Incomplete] # TODO: could be a TypedDict + read_load_balancer: LoadBalancer + def __init__( + self, + startup_nodes: Iterable[ClusterNode], + from_url: bool = False, + require_full_coverage: bool = False, + lock: Lock | None = None, + dynamic_startup_nodes: bool = True, + connection_pool_class: type[ConnectionPool] = ..., + **kwargs, # TODO: same type as connection_kwargs + ) -> None: ... + def get_node( + self, host: str | None = ..., port: int | str | None = ..., node_name: str | None = ... + ) -> ClusterNode | None: ... + def update_moved_exception(self, exception: MovedError) -> None: ... + def get_node_from_slot(self, slot: str, read_from_replicas: bool = ..., server_type: str | None = ...) -> ClusterNode: ... + def get_nodes_by_server_type(self, server_type: str) -> list[ClusterNode]: ... + def populate_startup_nodes(self, nodes: Iterable[ClusterNode]) -> None: ... + def check_slots_coverage(self, slots_cache: dict[str, list[ClusterNode]]) -> bool: ... + def create_redis_connections(self, nodes: Iterable[ClusterNode]) -> None: ... + def create_redis_node(self, host: str, port: int | str, **kwargs: Any) -> Redis[Incomplete]: ... + def initialize(self) -> None: ... + def close(self) -> None: ... + def reset(self) -> None: ... + +class ClusterPubSub(PubSub): + node: ClusterNode | None + cluster: RedisCluster[Any] + def __init__( + self, + redis_cluster: RedisCluster[Any], + node: ClusterNode | None = ..., + host: str | None = ..., + port: int | None = ..., + **kwargs, + ) -> None: ... + def set_pubsub_node( + self, cluster: RedisCluster[Any], node: ClusterNode | None = ..., host: str | None = ..., port: int | None = ... + ) -> None: ... + def get_pubsub_node(self) -> ClusterNode | None: ... + def execute_command(self, *args, **kwargs) -> None: ... + def get_redis_connection(self) -> Redis[Any] | None: ... + +class ClusterPipeline(RedisCluster[_StrType], Generic[_StrType]): + command_stack: list[Incomplete] + nodes_manager: Incomplete + refresh_table_asap: bool + result_callbacks: Incomplete + startup_nodes: Incomplete + read_from_replicas: bool + command_flags: Incomplete + cluster_response_callbacks: Incomplete + cluster_error_retry_attempts: int + reinitialize_counter: int + reinitialize_steps: int + encoder: Encoder + commands_parser: Incomplete + def __init__( + self, + nodes_manager, + commands_parser, + result_callbacks: Incomplete | None = ..., + cluster_response_callbacks: Incomplete | None = ..., + startup_nodes: Incomplete | None = ..., + read_from_replicas: bool = ..., + cluster_error_retry_attempts: int = ..., + reinitialize_steps: int = ..., + lock: Lock | None = ..., + **kwargs, + ) -> None: ... + def __len__(self) -> int: ... + def __bool__(self) -> Literal[True]: ... + def execute_command(self, *args, **kwargs): ... + def pipeline_execute_command(self, *args, **options): ... + def raise_first_error(self, stack) -> None: ... + def annotate_exception(self, exception, number, command) -> None: ... + def execute(self, raise_on_error: bool = ...): ... + scripts: set[Any] # is only set in `reset()` + watching: bool # is only set in `reset()` + explicit_transaction: bool # is only set in `reset()` + def reset(self) -> None: ... + def send_cluster_commands(self, stack, raise_on_error: bool = ..., allow_redirections: bool = ...): ... + def eval(self) -> None: ... + def multi(self) -> None: ... + def immediate_execute_command(self, *args, **options) -> None: ... + def load_scripts(self) -> None: ... + def watch(self, *names) -> None: ... + def unwatch(self) -> None: ... + def script_load_for_pipeline(self, *args, **kwargs) -> None: ... + def delete(self, *names): ... + +def block_pipeline_command(name: str) -> Callable[..., NoReturn]: ... + +class PipelineCommand: + args: Sequence[EncodableT] + options: _ParseResponseOptions + position: int | None + result: Any | Exception | None + node: Incomplete | None + asking: bool + def __init__( + self, args: Sequence[EncodableT], options: _ParseResponseOptions | None = ..., position: int | None = ... + ) -> None: ... + +class _ParseResponseCallback(Protocol): + def __call__(self, __connection: Connection, __command: EncodableT, **kwargs: Incomplete) -> Any: ... + +class NodeCommands: + parse_response: _ParseResponseCallback + connection_pool: ConnectionPool + connection: Connection + commands: list[PipelineCommand] + def __init__( + self, parse_response: _ParseResponseCallback, connection_pool: ConnectionPool, connection: Connection + ) -> None: ... + def append(self, c: PipelineCommand) -> None: ... + def write(self) -> None: ... + def read(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/__init__.pyi new file mode 100644 index 00000000..4959ea0f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/__init__.pyi @@ -0,0 +1,17 @@ +from .cluster import RedisClusterCommands as RedisClusterCommands +from .core import AsyncCoreCommands as AsyncCoreCommands, CoreCommands as CoreCommands +from .helpers import list_or_args as list_or_args +from .parser import CommandsParser as CommandsParser +from .redismodules import RedisModuleCommands as RedisModuleCommands +from .sentinel import AsyncSentinelCommands as AsyncSentinelCommands, SentinelCommands as SentinelCommands + +__all__ = [ + "RedisClusterCommands", + "CommandsParser", + "AsyncCoreCommands", + "CoreCommands", + "list_or_args", + "RedisModuleCommands", + "AsyncSentinelCommands", + "SentinelCommands", +] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/bf/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/bf/__init__.pyi new file mode 100644 index 00000000..d5ef70ee --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/bf/__init__.pyi @@ -0,0 +1,58 @@ +from typing import Any + +from .commands import * +from .info import BFInfo as BFInfo, CFInfo as CFInfo, CMSInfo as CMSInfo, TDigestInfo as TDigestInfo, TopKInfo as TopKInfo + +class AbstractBloom: + @staticmethod + def append_items(params, items) -> None: ... + @staticmethod + def append_error(params, error) -> None: ... + @staticmethod + def append_capacity(params, capacity) -> None: ... + @staticmethod + def append_expansion(params, expansion) -> None: ... + @staticmethod + def append_no_scale(params, noScale) -> None: ... + @staticmethod + def append_weights(params, weights) -> None: ... + @staticmethod + def append_no_create(params, noCreate) -> None: ... + @staticmethod + def append_items_and_increments(params, items, increments) -> None: ... + @staticmethod + def append_values_and_weights(params, items, weights) -> None: ... + @staticmethod + def append_max_iterations(params, max_iterations) -> None: ... + @staticmethod + def append_bucket_size(params, bucket_size) -> None: ... + +class CMSBloom(CMSCommands, AbstractBloom): + client: Any + commandmixin: Any + execute_command: Any + def __init__(self, client, **kwargs) -> None: ... + +class TOPKBloom(TOPKCommands, AbstractBloom): + client: Any + commandmixin: Any + execute_command: Any + def __init__(self, client, **kwargs) -> None: ... + +class CFBloom(CFCommands, AbstractBloom): + client: Any + commandmixin: Any + execute_command: Any + def __init__(self, client, **kwargs) -> None: ... + +class TDigestBloom(TDigestCommands, AbstractBloom): + client: Any + commandmixin: Any + execute_command: Any + def __init__(self, client, **kwargs) -> None: ... + +class BFBloom(BFCommands, AbstractBloom): + client: Any + commandmixin: Any + execute_command: Any + def __init__(self, client, **kwargs) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/bf/commands.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/bf/commands.pyi new file mode 100644 index 00000000..73b0beb1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/bf/commands.pyi @@ -0,0 +1,112 @@ +from _typeshed import Incomplete + +BF_RESERVE: str +BF_ADD: str +BF_MADD: str +BF_INSERT: str +BF_EXISTS: str +BF_MEXISTS: str +BF_SCANDUMP: str +BF_LOADCHUNK: str +BF_INFO: str +CF_RESERVE: str +CF_ADD: str +CF_ADDNX: str +CF_INSERT: str +CF_INSERTNX: str +CF_EXISTS: str +CF_DEL: str +CF_COUNT: str +CF_SCANDUMP: str +CF_LOADCHUNK: str +CF_INFO: str +CMS_INITBYDIM: str +CMS_INITBYPROB: str +CMS_INCRBY: str +CMS_QUERY: str +CMS_MERGE: str +CMS_INFO: str +TOPK_RESERVE: str +TOPK_ADD: str +TOPK_INCRBY: str +TOPK_QUERY: str +TOPK_COUNT: str +TOPK_LIST: str +TOPK_INFO: str +TDIGEST_CREATE: str +TDIGEST_RESET: str +TDIGEST_ADD: str +TDIGEST_MERGE: str +TDIGEST_CDF: str +TDIGEST_QUANTILE: str +TDIGEST_MIN: str +TDIGEST_MAX: str +TDIGEST_INFO: str + +class BFCommands: + def create(self, key, errorRate, capacity, expansion: Incomplete | None = ..., noScale: Incomplete | None = ...): ... + def add(self, key, item): ... + def madd(self, key, *items): ... + def insert( + self, + key, + items, + capacity: Incomplete | None = ..., + error: Incomplete | None = ..., + noCreate: Incomplete | None = ..., + expansion: Incomplete | None = ..., + noScale: Incomplete | None = ..., + ): ... + def exists(self, key, item): ... + def mexists(self, key, *items): ... + def scandump(self, key, iter): ... + def loadchunk(self, key, iter, data): ... + def info(self, key): ... + +class CFCommands: + def create( + self, + key, + capacity, + expansion: Incomplete | None = ..., + bucket_size: Incomplete | None = ..., + max_iterations: Incomplete | None = ..., + ): ... + def add(self, key, item): ... + def addnx(self, key, item): ... + def insert(self, key, items, capacity: Incomplete | None = ..., nocreate: Incomplete | None = ...): ... + def insertnx(self, key, items, capacity: Incomplete | None = ..., nocreate: Incomplete | None = ...): ... + def exists(self, key, item): ... + def delete(self, key, item): ... + def count(self, key, item): ... + def scandump(self, key, iter): ... + def loadchunk(self, key, iter, data): ... + def info(self, key): ... + +class TOPKCommands: + def reserve(self, key, k, width, depth, decay): ... + def add(self, key, *items): ... + def incrby(self, key, items, increments): ... + def query(self, key, *items): ... + def count(self, key, *items): ... + def list(self, key, withcount: bool = ...): ... + def info(self, key): ... + +class TDigestCommands: + def create(self, key, compression: int = ...): ... + def reset(self, key): ... + def add(self, key, values): ... + def merge(self, destination_key, num_keys, *keys, compression: int | None = ..., override: bool = ...): ... + def min(self, key): ... + def max(self, key): ... + def quantile(self, key, quantile, *quantiles): ... + def cdf(self, key, value, *values): ... + def info(self, key): ... + +class CMSCommands: + def initbydim(self, key, width, depth): ... + def initbyprob(self, key, error, probability): ... + def incrby(self, key, items, increments): ... + def query(self, key, *items): ... + def merge(self, destKey, numKeys, srcKeys, weights=...): ... + def info(self, key): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/bf/info.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/bf/info.pyi new file mode 100644 index 00000000..54d1cf04 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/bf/info.pyi @@ -0,0 +1,43 @@ +from typing import Any + +class BFInfo: + capacity: Any + size: Any + filterNum: Any + insertedNum: Any + expansionRate: Any + def __init__(self, args) -> None: ... + +class CFInfo: + size: Any + bucketNum: Any + filterNum: Any + insertedNum: Any + deletedNum: Any + bucketSize: Any + expansionRate: Any + maxIteration: Any + def __init__(self, args) -> None: ... + +class CMSInfo: + width: Any + depth: Any + count: Any + def __init__(self, args) -> None: ... + +class TopKInfo: + k: Any + width: Any + depth: Any + decay: Any + def __init__(self, args) -> None: ... + +class TDigestInfo: + compression: Any + capacity: Any + mergedNodes: Any + unmergedNodes: Any + mergedWeight: Any + unmergedWeight: Any + totalCompressions: Any + def __init__(self, args) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/cluster.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/cluster.pyi new file mode 100644 index 00000000..0a5981b0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/cluster.pyi @@ -0,0 +1,61 @@ +from _typeshed import Incomplete +from typing import Generic + +from .core import ACLCommands, DataAccessCommands, ManagementCommands, PubSubCommands, _StrType + +class ClusterMultiKeyCommands: + def mget_nonatomic(self, keys, *args): ... + def mset_nonatomic(self, mapping): ... + def exists(self, *keys): ... + def delete(self, *keys): ... + def touch(self, *keys): ... + def unlink(self, *keys): ... + +class ClusterManagementCommands(ManagementCommands): + def slaveof(self, *args, **kwargs) -> None: ... + def replicaof(self, *args, **kwargs) -> None: ... + def swapdb(self, *args, **kwargs) -> None: ... + +class ClusterDataAccessCommands(DataAccessCommands[_StrType], Generic[_StrType]): + def stralgo( + self, + algo, + value1, + value2, + specific_argument: str = ..., + len: bool = ..., + idx: bool = ..., + minmatchlen: Incomplete | None = ..., + withmatchlen: bool = ..., + **kwargs, + ): ... + +class RedisClusterCommands( + ClusterMultiKeyCommands, + ClusterManagementCommands, + ACLCommands[_StrType], + PubSubCommands, + ClusterDataAccessCommands[_StrType], + Generic[_StrType], +): + def cluster_addslots(self, target_node, *slots): ... + def cluster_countkeysinslot(self, slot_id): ... + def cluster_count_failure_report(self, node_id): ... + def cluster_delslots(self, *slots): ... + def cluster_failover(self, target_node, option: Incomplete | None = ...): ... + def cluster_info(self, target_nodes: Incomplete | None = ...): ... + def cluster_keyslot(self, key): ... + def cluster_meet(self, host, port, target_nodes: Incomplete | None = ...): ... + def cluster_nodes(self): ... + def cluster_replicate(self, target_nodes, node_id): ... + def cluster_reset(self, soft: bool = ..., target_nodes: Incomplete | None = ...): ... + def cluster_save_config(self, target_nodes: Incomplete | None = ...): ... + def cluster_get_keys_in_slot(self, slot, num_keys): ... + def cluster_set_config_epoch(self, epoch, target_nodes: Incomplete | None = ...): ... + def cluster_setslot(self, target_node, node_id, slot_id, state): ... + def cluster_setslot_stable(self, slot_id): ... + def cluster_replicas(self, node_id, target_nodes: Incomplete | None = ...): ... + def cluster_slots(self, target_nodes: Incomplete | None = ...): ... + read_from_replicas: bool + def readonly(self, target_nodes: Incomplete | None = ...): ... + def readwrite(self, target_nodes: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/core.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/core.pyi new file mode 100644 index 00000000..65aca2c5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/core.pyi @@ -0,0 +1,1696 @@ +import builtins +from _typeshed import Incomplete +from collections.abc import AsyncIterator, Awaitable, Callable, Iterable, Iterator, Mapping, Sequence +from datetime import datetime, timedelta +from typing import Any, Generic, TypeVar, overload +from typing_extensions import Literal + +from ..asyncio.client import Redis as AsyncRedis +from ..client import _CommandOptions, _Key, _Value +from ..typing import ChannelT, EncodableT, KeyT, PatternT, ScriptTextT, StreamIdT + +_ScoreCastFuncReturn = TypeVar("_ScoreCastFuncReturn") +_StrType = TypeVar("_StrType", bound=str | bytes) + +class ACLCommands(Generic[_StrType]): + def acl_cat(self, category: str | None = ..., **kwargs: _CommandOptions) -> list[str]: ... + def acl_deluser(self, *username: str, **kwargs: _CommandOptions) -> int: ... + def acl_genpass(self, bits: int | None = ..., **kwargs: _CommandOptions) -> str: ... + def acl_getuser(self, username: str, **kwargs: _CommandOptions) -> Any | None: ... + def acl_help(self, **kwargs: _CommandOptions): ... + def acl_list(self, **kwargs: _CommandOptions) -> list[str]: ... + def acl_log(self, count: int | None = ..., **kwargs: _CommandOptions): ... + def acl_log_reset(self, **kwargs: _CommandOptions): ... + def acl_load(self, **kwargs: _CommandOptions) -> bool: ... + def acl_save(self, **kwargs: _CommandOptions): ... + def acl_setuser( + self, + username: str, + enabled: bool = ..., + nopass: bool = ..., + passwords: Sequence[str] | None = ..., + hashed_passwords: Sequence[str] | None = ..., + categories: Sequence[str] | None = ..., + commands: Sequence[str] | None = ..., + keys: Sequence[str] | None = ..., + channels: Iterable[ChannelT] | None = ..., + selectors: Iterable[tuple[str, KeyT]] | None = ..., + reset: bool = False, + reset_keys: bool = False, + reset_channels: bool = False, + reset_passwords: bool = False, + **kwargs: _CommandOptions, + ) -> bool: ... + def acl_users(self, **kwargs: _CommandOptions) -> list[str]: ... + def acl_whoami(self, **kwargs: _CommandOptions) -> str: ... + +class AsyncACLCommands(Generic[_StrType]): + async def acl_cat(self, category: str | None = ..., **kwargs: _CommandOptions) -> list[str]: ... + async def acl_deluser(self, *username: str, **kwargs: _CommandOptions) -> int: ... + async def acl_genpass(self, bits: int | None = ..., **kwargs: _CommandOptions) -> str: ... + async def acl_getuser(self, username: str, **kwargs: _CommandOptions) -> Any | None: ... + async def acl_help(self, **kwargs: _CommandOptions): ... + async def acl_list(self, **kwargs: _CommandOptions) -> list[str]: ... + async def acl_log(self, count: int | None = ..., **kwargs: _CommandOptions): ... + async def acl_log_reset(self, **kwargs: _CommandOptions): ... + async def acl_load(self, **kwargs: _CommandOptions) -> bool: ... + async def acl_save(self, **kwargs: _CommandOptions): ... + async def acl_setuser( + self, + username: str, + enabled: bool = ..., + nopass: bool = ..., + passwords: Sequence[str] | None = ..., + hashed_passwords: Sequence[str] | None = ..., + categories: Sequence[str] | None = ..., + commands: Sequence[str] | None = ..., + keys: Sequence[str] | None = ..., + channels: Iterable[ChannelT] | None = ..., + selectors: Iterable[tuple[str, KeyT]] | None = ..., + reset: bool = False, + reset_keys: bool = False, + reset_channels: bool = False, + reset_passwords: bool = False, + **kwargs: _CommandOptions, + ) -> bool: ... + async def acl_users(self, **kwargs: _CommandOptions) -> list[str]: ... + async def acl_whoami(self, **kwargs: _CommandOptions) -> str: ... + +class ManagementCommands: + def bgrewriteaof(self, **kwargs: _CommandOptions): ... + def bgsave(self, schedule: bool = ..., **kwargs: _CommandOptions): ... + def role(self): ... + def client_kill(self, address: str, **kwargs: _CommandOptions) -> bool: ... + def client_kill_filter( + self, + _id: Incomplete | None = ..., + _type: Incomplete | None = ..., + addr: Incomplete | None = ..., + skipme: Incomplete | None = ..., + laddr: Incomplete | None = ..., + user: Incomplete | None = ..., + **kwargs: _CommandOptions, + ): ... + def client_info(self, **kwargs: _CommandOptions): ... + def client_list( + self, _type: str | None = ..., client_id: list[str] = ..., **kwargs: _CommandOptions + ) -> list[dict[str, str]]: ... + def client_getname(self, **kwargs: _CommandOptions) -> str | None: ... + def client_getredir(self, **kwargs: _CommandOptions): ... + def client_reply(self, reply, **kwargs: _CommandOptions): ... + def client_id(self, **kwargs: _CommandOptions) -> int: ... + def client_tracking_on( + self, + clientid: Incomplete | None = ..., + prefix=..., + bcast: bool = ..., + optin: bool = ..., + optout: bool = ..., + noloop: bool = ..., + ): ... + def client_tracking_off( + self, + clientid: Incomplete | None = ..., + prefix=..., + bcast: bool = ..., + optin: bool = ..., + optout: bool = ..., + noloop: bool = ..., + ): ... + def client_tracking( + self, + on: bool = ..., + clientid: Incomplete | None = ..., + prefix=..., + bcast: bool = ..., + optin: bool = ..., + optout: bool = ..., + noloop: bool = ..., + **kwargs: _CommandOptions, + ): ... + def client_trackinginfo(self, **kwargs: _CommandOptions): ... + def client_setname(self, name: str, **kwargs: _CommandOptions) -> bool: ... + def client_unblock(self, client_id, error: bool = ..., **kwargs: _CommandOptions): ... + def client_pause(self, timeout, all: bool = ..., **kwargs: _CommandOptions): ... + def client_unpause(self, **kwargs: _CommandOptions): ... + def command(self, **kwargs: _CommandOptions): ... + def command_info(self, **kwargs: _CommandOptions): ... + def command_count(self, **kwargs: _CommandOptions): ... + def config_get(self, pattern: PatternT = ..., *args: PatternT, **kwargs: _CommandOptions): ... + def config_set(self, name: KeyT, value: EncodableT, *args: KeyT | EncodableT, **kwargs: _CommandOptions): ... + def config_resetstat(self, **kwargs: _CommandOptions): ... + def config_rewrite(self, **kwargs: _CommandOptions): ... + def dbsize(self, **kwargs: _CommandOptions) -> int: ... + def debug_object(self, key, **kwargs: _CommandOptions): ... + def debug_segfault(self, **kwargs: _CommandOptions): ... + def echo(self, value: _Value, **kwargs: _CommandOptions) -> bytes: ... + def flushall(self, asynchronous: bool = ..., **kwargs: _CommandOptions) -> bool: ... + def flushdb(self, asynchronous: bool = ..., **kwargs: _CommandOptions) -> bool: ... + def sync(self): ... + def psync(self, replicationid, offset): ... + def swapdb(self, first, second, **kwargs: _CommandOptions): ... + def select(self, index, **kwargs: _CommandOptions): ... + def info(self, section: _Key | None = ..., *args: _Key, **kwargs: _CommandOptions) -> Mapping[str, Any]: ... + def lastsave(self, **kwargs: _CommandOptions): ... + def lolwut(self, *version_numbers: _Value, **kwargs: _CommandOptions) -> bytes: ... + def reset(self) -> None: ... + def migrate( + self, + host, + port, + keys, + destination_db, + timeout, + copy: bool = ..., + replace: bool = ..., + auth: Incomplete | None = ..., + **kwargs: _CommandOptions, + ): ... + def object(self, infotype, key, **kwargs: _CommandOptions): ... + def memory_doctor(self, **kwargs: _CommandOptions): ... + def memory_help(self, **kwargs: _CommandOptions): ... + def memory_stats(self, **kwargs: _CommandOptions) -> dict[str, Any]: ... + def memory_malloc_stats(self, **kwargs: _CommandOptions): ... + def memory_usage(self, key, samples: Incomplete | None = ..., **kwargs: _CommandOptions): ... + def memory_purge(self, **kwargs: _CommandOptions): ... + def ping(self, **kwargs: _CommandOptions) -> bool: ... + def quit(self, **kwargs: _CommandOptions): ... + def replicaof(self, *args, **kwargs: _CommandOptions): ... + def save(self, **kwargs: _CommandOptions) -> bool: ... + def shutdown( + self, + save: bool = ..., + nosave: bool = ..., + now: bool = ..., + force: bool = ..., + abort: bool = ..., + **kwargs: _CommandOptions, + ) -> None: ... + def slaveof(self, host: Incomplete | None = ..., port: Incomplete | None = ..., **kwargs: _CommandOptions): ... + def slowlog_get(self, num: Incomplete | None = ..., **kwargs: _CommandOptions): ... + def slowlog_len(self, **kwargs: _CommandOptions): ... + def slowlog_reset(self, **kwargs: _CommandOptions): ... + def time(self, **kwargs: _CommandOptions): ... + def wait(self, num_replicas, timeout, **kwargs: _CommandOptions): ... + +class AsyncManagementCommands: + async def bgrewriteaof(self, **kwargs: _CommandOptions): ... + async def bgsave(self, schedule: bool = ..., **kwargs: _CommandOptions): ... + async def role(self): ... + async def client_kill(self, address: str, **kwargs: _CommandOptions) -> bool: ... + async def client_kill_filter( + self, + _id: Incomplete | None = ..., + _type: Incomplete | None = ..., + addr: Incomplete | None = ..., + skipme: Incomplete | None = ..., + laddr: Incomplete | None = ..., + user: Incomplete | None = ..., + **kwargs: _CommandOptions, + ): ... + async def client_info(self, **kwargs: _CommandOptions): ... + async def client_list( + self, _type: str | None = ..., client_id: list[str] = ..., **kwargs: _CommandOptions + ) -> list[dict[str, str]]: ... + async def client_getname(self, **kwargs: _CommandOptions) -> str | None: ... + async def client_getredir(self, **kwargs: _CommandOptions): ... + async def client_reply(self, reply, **kwargs: _CommandOptions): ... + async def client_id(self, **kwargs: _CommandOptions) -> int: ... + async def client_tracking_on( + self, + clientid: Incomplete | None = ..., + prefix=..., + bcast: bool = ..., + optin: bool = ..., + optout: bool = ..., + noloop: bool = ..., + ): ... + async def client_tracking_off( + self, + clientid: Incomplete | None = ..., + prefix=..., + bcast: bool = ..., + optin: bool = ..., + optout: bool = ..., + noloop: bool = ..., + ): ... + async def client_tracking( + self, + on: bool = ..., + clientid: Incomplete | None = ..., + prefix=..., + bcast: bool = ..., + optin: bool = ..., + optout: bool = ..., + noloop: bool = ..., + **kwargs: _CommandOptions, + ): ... + async def client_trackinginfo(self, **kwargs: _CommandOptions): ... + async def client_setname(self, name: str, **kwargs: _CommandOptions) -> bool: ... + async def client_unblock(self, client_id, error: bool = ..., **kwargs: _CommandOptions): ... + async def client_pause(self, timeout, all: bool = ..., **kwargs: _CommandOptions): ... + async def client_unpause(self, **kwargs: _CommandOptions): ... + async def command(self, **kwargs: _CommandOptions): ... + async def command_info(self, **kwargs: _CommandOptions): ... + async def command_count(self, **kwargs: _CommandOptions): ... + async def config_get(self, pattern: PatternT = ..., *args: PatternT, **kwargs: _CommandOptions): ... + async def config_set(self, name: KeyT, value: EncodableT, *args: KeyT | EncodableT, **kwargs: _CommandOptions): ... + async def config_resetstat(self, **kwargs: _CommandOptions): ... + async def config_rewrite(self, **kwargs: _CommandOptions): ... + async def dbsize(self, **kwargs: _CommandOptions) -> int: ... + async def debug_object(self, key, **kwargs: _CommandOptions): ... + async def debug_segfault(self, **kwargs: _CommandOptions): ... + async def echo(self, value: _Value, **kwargs: _CommandOptions) -> bytes: ... + async def flushall(self, asynchronous: bool = ..., **kwargs: _CommandOptions) -> bool: ... + async def flushdb(self, asynchronous: bool = ..., **kwargs: _CommandOptions) -> bool: ... + async def sync(self): ... + async def psync(self, replicationid, offset): ... + async def swapdb(self, first, second, **kwargs: _CommandOptions): ... + async def select(self, index, **kwargs: _CommandOptions): ... + async def info(self, section: _Key | None = ..., *args: _Key, **kwargs: _CommandOptions) -> Mapping[str, Any]: ... + async def lastsave(self, **kwargs: _CommandOptions): ... + async def lolwut(self, *version_numbers: _Value, **kwargs: _CommandOptions) -> bytes: ... + async def reset(self) -> None: ... + async def migrate( + self, + host, + port, + keys, + destination_db, + timeout, + copy: bool = ..., + replace: bool = ..., + auth: Incomplete | None = ..., + **kwargs: _CommandOptions, + ): ... + async def object(self, infotype, key, **kwargs: _CommandOptions): ... + async def memory_doctor(self, **kwargs: _CommandOptions): ... + async def memory_help(self, **kwargs: _CommandOptions): ... + async def memory_stats(self, **kwargs: _CommandOptions) -> dict[str, Any]: ... + async def memory_malloc_stats(self, **kwargs: _CommandOptions): ... + async def memory_usage(self, key, samples: Incomplete | None = ..., **kwargs: _CommandOptions): ... + async def memory_purge(self, **kwargs: _CommandOptions): ... + async def ping(self, **kwargs: _CommandOptions) -> bool: ... + async def quit(self, **kwargs: _CommandOptions): ... + async def replicaof(self, *args, **kwargs: _CommandOptions): ... + async def save(self, **kwargs: _CommandOptions) -> bool: ... + async def shutdown( + self, + save: bool = ..., + nosave: bool = ..., + now: bool = ..., + force: bool = ..., + abort: bool = ..., + **kwargs: _CommandOptions, + ) -> None: ... + async def slaveof(self, host: Incomplete | None = ..., port: Incomplete | None = ..., **kwargs: _CommandOptions): ... + async def slowlog_get(self, num: Incomplete | None = ..., **kwargs: _CommandOptions): ... + async def slowlog_len(self, **kwargs: _CommandOptions): ... + async def slowlog_reset(self, **kwargs: _CommandOptions): ... + async def time(self, **kwargs: _CommandOptions): ... + async def wait(self, num_replicas, timeout, **kwargs: _CommandOptions): ... + +class BasicKeyCommands(Generic[_StrType]): + def append(self, key, value): ... + def bitcount(self, key: _Key, start: int | None = ..., end: int | None = ..., mode: str | None = ...) -> int: ... + def bitfield(self, key, default_overflow: Incomplete | None = ...): ... + def bitop(self, operation, dest, *keys): ... + def bitpos(self, key: _Key, bit: int, start: int | None = ..., end: int | None = ..., mode: str | None = ...): ... + def copy(self, source, destination, destination_db: Incomplete | None = ..., replace: bool = ...): ... + def decr(self, name, amount: int = ...) -> int: ... + def decrby(self, name, amount: int = ...) -> int: ... + def delete(self, *names: _Key) -> int: ... + def __delitem__(self, name: _Key) -> None: ... + def dump(self, name: _Key) -> _StrType | None: ... + def exists(self, *names: _Key) -> int: ... + __contains__ = exists + def expire( + self, name: _Key, time: int | timedelta, nx: bool = ..., xx: bool = ..., gt: bool = ..., lt: bool = ... + ) -> bool: ... + def expireat(self, name, when, nx: bool = ..., xx: bool = ..., gt: bool = ..., lt: bool = ...): ... + def get(self, name: _Key) -> _StrType | None: ... + def getdel(self, name: _Key) -> _StrType | None: ... + def getex( + self, + name, + ex: Incomplete | None = ..., + px: Incomplete | None = ..., + exat: Incomplete | None = ..., + pxat: Incomplete | None = ..., + persist: bool = ..., + ): ... + def __getitem__(self, name: str): ... + def getbit(self, name: _Key, offset: int) -> int: ... + def getrange(self, key, start, end): ... + def getset(self, name, value) -> _StrType | None: ... + def incr(self, name: _Key, amount: int = ...) -> int: ... + def incrby(self, name: _Key, amount: int = ...) -> int: ... + def incrbyfloat(self, name: _Key, amount: float = ...) -> float: ... + def keys(self, pattern: _Key = ..., **kwargs: _CommandOptions) -> list[_StrType]: ... + def lmove( + self, first_list: _Key, second_list: _Key, src: Literal["LEFT", "RIGHT"] = ..., dest: Literal["LEFT", "RIGHT"] = ... + ) -> _Value: ... + def blmove( + self, + first_list: _Key, + second_list: _Key, + timeout: float, + src: Literal["LEFT", "RIGHT"] = ..., + dest: Literal["LEFT", "RIGHT"] = ..., + ) -> _Value | None: ... + def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ... + def mset(self, mapping: Mapping[_Key, _Value]) -> Literal[True]: ... + def msetnx(self, mapping: Mapping[_Key, _Value]) -> bool: ... + def move(self, name: _Key, db: int) -> bool: ... + def persist(self, name: _Key) -> bool: ... + def pexpire( + self, name: _Key, time: int | timedelta, nx: bool = ..., xx: bool = ..., gt: bool = ..., lt: bool = ... + ) -> Literal[1, 0]: ... + def pexpireat( + self, name: _Key, when: int | datetime, nx: bool = ..., xx: bool = ..., gt: bool = ..., lt: bool = ... + ) -> Literal[1, 0]: ... + def psetex(self, name, time_ms, value): ... + def pttl(self, name: _Key) -> int: ... + def hrandfield(self, key, count: Incomplete | None = ..., withvalues: bool = ...): ... + def randomkey(self, **kwargs: _CommandOptions): ... + def rename(self, src, dst): ... + def renamenx(self, src, dst): ... + def restore( + self, + name, + ttl, + value, + replace: bool = ..., + absttl: bool = ..., + idletime: Incomplete | None = ..., + frequency: Incomplete | None = ..., + ): ... + def set( + self, + name: _Key, + value: _Value, + ex: None | float | timedelta = ..., + px: None | float | timedelta = ..., + nx: bool = ..., + xx: bool = ..., + keepttl: bool = ..., + get: bool = ..., + exat: Incomplete | None = ..., + pxat: Incomplete | None = ..., + ) -> bool | None: ... + def __setitem__(self, name, value) -> None: ... + def setbit(self, name: _Key, offset: int, value: int) -> int: ... + def setex(self, name: _Key, time: int | timedelta, value: _Value) -> bool: ... + def setnx(self, name: _Key, value: _Value) -> bool: ... + def setrange(self, name, offset, value): ... + def stralgo( + self, + algo, + value1, + value2, + specific_argument: str = ..., + len: bool = ..., + idx: bool = ..., + minmatchlen: Incomplete | None = ..., + withmatchlen: bool = ..., + **kwargs: _CommandOptions, + ): ... + def strlen(self, name): ... + def substr(self, name, start, end: int = ...): ... + def touch(self, *args): ... + def ttl(self, name: _Key) -> int: ... + def type(self, name): ... + def watch(self, *names): ... + def unwatch(self): ... + def unlink(self, *names: _Key) -> int: ... + +class AsyncBasicKeyCommands(Generic[_StrType]): + async def append(self, key, value): ... + async def bitcount(self, key: _Key, start: int | None = ..., end: int | None = ..., mode: str | None = ...) -> int: ... + async def bitfield(self, key, default_overflow: Incomplete | None = ...): ... + async def bitop(self, operation, dest, *keys): ... + async def bitpos(self, key: _Key, bit: int, start: int | None = ..., end: int | None = ..., mode: str | None = ...): ... + async def copy(self, source, destination, destination_db: Incomplete | None = ..., replace: bool = ...): ... + async def decr(self, name, amount: int = ...) -> int: ... + async def decrby(self, name, amount: int = ...) -> int: ... + async def delete(self, *names: _Key) -> int: ... + async def dump(self, name: _Key) -> _StrType | None: ... + async def exists(self, *names: _Key) -> int: ... + async def expire( + self, name: _Key, time: int | timedelta, nx: bool = ..., xx: bool = ..., gt: bool = ..., lt: bool = ... + ) -> bool: ... + async def expireat(self, name, when, nx: bool = ..., xx: bool = ..., gt: bool = ..., lt: bool = ...): ... + async def get(self, name: _Key) -> _StrType | None: ... + async def getdel(self, name: _Key) -> _StrType | None: ... + async def getex( + self, + name, + ex: Incomplete | None = ..., + px: Incomplete | None = ..., + exat: Incomplete | None = ..., + pxat: Incomplete | None = ..., + persist: bool = ..., + ): ... + async def getbit(self, name: _Key, offset: int) -> int: ... + async def getrange(self, key, start, end): ... + async def getset(self, name, value) -> _StrType | None: ... + async def incr(self, name: _Key, amount: int = ...) -> int: ... + async def incrby(self, name: _Key, amount: int = ...) -> int: ... + async def incrbyfloat(self, name: _Key, amount: float = ...) -> float: ... + async def keys(self, pattern: _Key = ..., **kwargs: _CommandOptions) -> list[_StrType]: ... + async def lmove( + self, first_list: _Key, second_list: _Key, src: Literal["LEFT", "RIGHT"] = ..., dest: Literal["LEFT", "RIGHT"] = ... + ) -> _Value: ... + async def blmove( + self, + first_list: _Key, + second_list: _Key, + timeout: float, + src: Literal["LEFT", "RIGHT"] = ..., + dest: Literal["LEFT", "RIGHT"] = ..., + ) -> _Value | None: ... + async def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ... + async def mset(self, mapping: Mapping[_Key, _Value]) -> Literal[True]: ... + async def msetnx(self, mapping: Mapping[_Key, _Value]) -> bool: ... + async def move(self, name: _Key, db: int) -> bool: ... + async def persist(self, name: _Key) -> bool: ... + async def pexpire( + self, name: _Key, time: int | timedelta, nx: bool = ..., xx: bool = ..., gt: bool = ..., lt: bool = ... + ) -> Literal[1, 0]: ... + async def pexpireat( + self, name: _Key, when: int | datetime, nx: bool = ..., xx: bool = ..., gt: bool = ..., lt: bool = ... + ) -> Literal[1, 0]: ... + async def psetex(self, name, time_ms, value): ... + async def pttl(self, name: _Key) -> int: ... + async def hrandfield(self, key, count: Incomplete | None = ..., withvalues: bool = ...): ... + async def randomkey(self, **kwargs: _CommandOptions): ... + async def rename(self, src, dst): ... + async def renamenx(self, src, dst): ... + async def restore( + self, + name, + ttl, + value, + replace: bool = ..., + absttl: bool = ..., + idletime: Incomplete | None = ..., + frequency: Incomplete | None = ..., + ): ... + async def set( + self, + name: _Key, + value: _Value, + ex: None | float | timedelta = ..., + px: None | float | timedelta = ..., + nx: bool = ..., + xx: bool = ..., + keepttl: bool = ..., + get: bool = ..., + exat: Incomplete | None = ..., + pxat: Incomplete | None = ..., + ) -> bool | None: ... + async def setbit(self, name: _Key, offset: int, value: int) -> int: ... + async def setex(self, name: _Key, time: int | timedelta, value: _Value) -> bool: ... + async def setnx(self, name: _Key, value: _Value) -> bool: ... + async def setrange(self, name, offset, value): ... + async def stralgo( + self, + algo, + value1, + value2, + specific_argument: str = ..., + len: bool = ..., + idx: bool = ..., + minmatchlen: Incomplete | None = ..., + withmatchlen: bool = ..., + **kwargs: _CommandOptions, + ): ... + async def strlen(self, name): ... + async def substr(self, name, start, end: int = ...): ... + async def touch(self, *args): ... + async def ttl(self, name: _Key) -> int: ... + async def type(self, name): ... + async def watch(self, *names): ... + async def unwatch(self): ... + async def unlink(self, *names: _Key) -> int: ... + def __getitem__(self, name: str): ... + def __setitem__(self, name, value) -> None: ... + def __delitem__(self, name: _Key) -> None: ... + def __contains__(self, name: _Key) -> None: ... + +class ListCommands(Generic[_StrType]): + @overload + def blpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = ...) -> tuple[_StrType, _StrType]: ... + @overload + def blpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ... + @overload + def brpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = ...) -> tuple[_StrType, _StrType]: ... + @overload + def brpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ... + def brpoplpush(self, src, dst, timeout: int | None = ...): ... + def lindex(self, name: _Key, index: int) -> _StrType | None: ... + def linsert( + self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value + ) -> int: ... + def llen(self, name: _Key) -> int: ... + def lpop(self, name, count: int | None = ...): ... + def lpush(self, name: _Value, *values: _Value) -> int: ... + def lpushx(self, name, value): ... + def lrange(self, name: _Key, start: int, end: int) -> list[_StrType]: ... + def lrem(self, name: _Key, count: int, value: _Value) -> int: ... + def lset(self, name: _Key, index: int, value: _Value) -> bool: ... + def ltrim(self, name: _Key, start: int, end: int) -> bool: ... + def rpop(self, name, count: int | None = ...): ... + def rpoplpush(self, src, dst): ... + def rpush(self, name: _Value, *values: _Value) -> int: ... + def rpushx(self, name, value): ... + def lpos( + self, name, value, rank: Incomplete | None = ..., count: Incomplete | None = ..., maxlen: Incomplete | None = ... + ): ... + @overload + def sort( + self, + name: _Key, + start: int | None = ..., + num: int | None = ..., + by: _Key | None = ..., + get: _Key | Sequence[_Key] | None = ..., + desc: bool = ..., + alpha: bool = ..., + store: None = ..., + groups: bool = ..., + ) -> list[_StrType]: ... + @overload + def sort( + self, + name: _Key, + start: int | None = ..., + num: int | None = ..., + by: _Key | None = ..., + get: _Key | Sequence[_Key] | None = ..., + desc: bool = ..., + alpha: bool = ..., + *, + store: _Key, + groups: bool = ..., + ) -> int: ... + @overload + def sort( + self, + name: _Key, + start: int | None, + num: int | None, + by: _Key | None, + get: _Key | Sequence[_Key] | None, + desc: bool, + alpha: bool, + store: _Key, + groups: bool = ..., + ) -> int: ... + +class AsyncListCommands(Generic[_StrType]): + @overload + async def blpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = ...) -> tuple[_StrType, _StrType]: ... + @overload + async def blpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ... + @overload + async def brpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = ...) -> tuple[_StrType, _StrType]: ... + @overload + async def brpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ... + async def brpoplpush(self, src, dst, timeout: int | None = ...): ... + async def lindex(self, name: _Key, index: int) -> _StrType | None: ... + async def linsert( + self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value + ) -> int: ... + async def llen(self, name: _Key) -> int: ... + async def lpop(self, name, count: int | None = ...): ... + async def lpush(self, name: _Value, *values: _Value) -> int: ... + async def lpushx(self, name, value): ... + async def lrange(self, name: _Key, start: int, end: int) -> list[_StrType]: ... + async def lrem(self, name: _Key, count: int, value: _Value) -> int: ... + async def lset(self, name: _Key, index: int, value: _Value) -> bool: ... + async def ltrim(self, name: _Key, start: int, end: int) -> bool: ... + async def rpop(self, name, count: int | None = ...): ... + async def rpoplpush(self, src, dst): ... + async def rpush(self, name: _Value, *values: _Value) -> int: ... + async def rpushx(self, name, value): ... + async def lpos( + self, name, value, rank: Incomplete | None = ..., count: Incomplete | None = ..., maxlen: Incomplete | None = ... + ): ... + @overload + async def sort( + self, + name: _Key, + start: int | None = ..., + num: int | None = ..., + by: _Key | None = ..., + get: _Key | Sequence[_Key] | None = ..., + desc: bool = ..., + alpha: bool = ..., + store: None = ..., + groups: bool = ..., + ) -> list[_StrType]: ... + @overload + async def sort( + self, + name: _Key, + start: int | None = ..., + num: int | None = ..., + by: _Key | None = ..., + get: _Key | Sequence[_Key] | None = ..., + desc: bool = ..., + alpha: bool = ..., + *, + store: _Key, + groups: bool = ..., + ) -> int: ... + @overload + async def sort( + self, + name: _Key, + start: int | None, + num: int | None, + by: _Key | None, + get: _Key | Sequence[_Key] | None, + desc: bool, + alpha: bool, + store: _Key, + groups: bool = ..., + ) -> int: ... + +class ScanCommands(Generic[_StrType]): + def scan( + self, + cursor: int = ..., + match: _Key | None = ..., + count: int | None = ..., + _type: str | None = ..., + **kwargs: _CommandOptions, + ) -> tuple[int, list[_StrType]]: ... + def scan_iter( + self, match: _Key | None = ..., count: int | None = ..., _type: str | None = ..., **kwargs: _CommandOptions + ) -> Iterator[_StrType]: ... + def sscan( + self, name: _Key, cursor: int = ..., match: _Key | None = ..., count: int | None = ... + ) -> tuple[int, list[_StrType]]: ... + def sscan_iter(self, name: _Key, match: _Key | None = ..., count: int | None = ...) -> Iterator[_StrType]: ... + def hscan( + self, name: _Key, cursor: int = ..., match: _Key | None = ..., count: int | None = ... + ) -> tuple[int, dict[_StrType, _StrType]]: ... + def hscan_iter( + self, name: _Key, match: _Key | None = ..., count: int | None = ... + ) -> Iterator[tuple[_StrType, _StrType]]: ... + @overload + def zscan( + self, name: _Key, cursor: int = ..., match: _Key | None = ..., count: int | None = ... + ) -> tuple[int, list[tuple[_StrType, float]]]: ... + @overload + def zscan( + self, + name: _Key, + cursor: int = ..., + match: _Key | None = ..., + count: int | None = ..., + *, + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> tuple[int, list[tuple[_StrType, _ScoreCastFuncReturn]]]: ... + @overload + def zscan( + self, + name: _Key, + cursor: int, + match: _Key | None, + count: int | None, + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> tuple[int, list[tuple[_StrType, _ScoreCastFuncReturn]]]: ... + @overload + def zscan_iter(self, name: _Key, match: _Key | None = ..., count: int | None = ...) -> Iterator[tuple[_StrType, float]]: ... + @overload + def zscan_iter( + self, + name: _Key, + match: _Key | None = ..., + count: int | None = ..., + *, + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> Iterator[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zscan_iter( + self, name: _Key, match: _Key | None, count: int | None, score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn] + ) -> Iterator[tuple[_StrType, _ScoreCastFuncReturn]]: ... + +class AsyncScanCommands(Generic[_StrType]): + async def scan( + self, + cursor: int = ..., + match: _Key | None = ..., + count: int | None = ..., + _type: str | None = ..., + **kwargs: _CommandOptions, + ) -> tuple[int, list[_StrType]]: ... + def scan_iter( + self, match: _Key | None = ..., count: int | None = ..., _type: str | None = ..., **kwargs: _CommandOptions + ) -> AsyncIterator[_StrType]: ... + async def sscan( + self, name: _Key, cursor: int = ..., match: _Key | None = ..., count: int | None = ... + ) -> tuple[int, list[_StrType]]: ... + def sscan_iter(self, name: _Key, match: _Key | None = ..., count: int | None = ...) -> AsyncIterator[_StrType]: ... + async def hscan( + self, name: _Key, cursor: int = ..., match: _Key | None = ..., count: int | None = ... + ) -> tuple[int, dict[_StrType, _StrType]]: ... + def hscan_iter( + self, name: _Key, match: _Key | None = ..., count: int | None = ... + ) -> AsyncIterator[tuple[_StrType, _StrType]]: ... + @overload + async def zscan( + self, name: _Key, cursor: int = ..., match: _Key | None = ..., count: int | None = ... + ) -> tuple[int, list[tuple[_StrType, float]]]: ... + @overload + async def zscan( + self, + name: _Key, + cursor: int = ..., + match: _Key | None = ..., + count: int | None = ..., + *, + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> tuple[int, list[tuple[_StrType, _ScoreCastFuncReturn]]]: ... + @overload + async def zscan( + self, + name: _Key, + cursor: int, + match: _Key | None, + count: int | None, + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> tuple[int, list[tuple[_StrType, _ScoreCastFuncReturn]]]: ... + @overload + def zscan_iter( + self, name: _Key, match: _Key | None = ..., count: int | None = ... + ) -> AsyncIterator[tuple[_StrType, float]]: ... + @overload + def zscan_iter( + self, + name: _Key, + match: _Key | None = ..., + count: int | None = ..., + *, + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> AsyncIterator[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zscan_iter( + self, name: _Key, match: _Key | None, count: int | None, score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn] + ) -> AsyncIterator[tuple[_StrType, _ScoreCastFuncReturn]]: ... + +class SetCommands(Generic[_StrType]): + def sadd(self, name: _Key, *values: _Value) -> int: ... + def scard(self, name: _Key) -> int: ... + def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ... + def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... + def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ... + def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... + def sismember(self, name: _Key, value: _Value) -> bool: ... + def smembers(self, name: _Key) -> builtins.set[_StrType]: ... + def smismember(self, name, values, *args): ... + def smove(self, src: _Key, dst: _Key, value: _Value) -> bool: ... + @overload + def spop(self, name: _Key, count: None = ...) -> _Value | None: ... + @overload + def spop(self, name: _Key, count: int) -> list[_Value]: ... + @overload + def srandmember(self, name: _Key, number: None = ...) -> _Value | None: ... + @overload + def srandmember(self, name: _Key, number: int) -> list[_Value]: ... + def srem(self, name: _Key, *values: _Value) -> int: ... + def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ... + def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... + +class AsyncSetCommands(Generic[_StrType]): + async def sadd(self, name: _Key, *values: _Value) -> int: ... + async def scard(self, name: _Key) -> int: ... + async def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ... + async def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... + async def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ... + async def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... + async def sismember(self, name: _Key, value: _Value) -> bool: ... + async def smembers(self, name: _Key) -> builtins.set[_StrType]: ... + async def smismember(self, name, values, *args): ... + async def smove(self, src: _Key, dst: _Key, value: _Value) -> bool: ... + @overload + async def spop(self, name: _Key, count: None = ...) -> _Value | None: ... + @overload + async def spop(self, name: _Key, count: int) -> list[_Value]: ... + @overload + async def srandmember(self, name: _Key, number: None = ...) -> _Value | None: ... + @overload + async def srandmember(self, name: _Key, number: int) -> list[_Value]: ... + async def srem(self, name: _Key, *values: _Value) -> int: ... + async def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ... + async def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... + +class StreamCommands: + def xack(self, name, groupname, *ids): ... + def xadd( + self, + name, + fields, + id: str = ..., + maxlen=..., + approximate: bool = ..., + nomkstream: bool = ..., + minid: Incomplete | None = ..., + limit: Incomplete | None = ..., + ): ... + def xautoclaim( + self, + name, + groupname, + consumername, + min_idle_time, + start_id: StreamIdT = ..., + count: Incomplete | None = ..., + justid: bool = ..., + ): ... + def xclaim( + self, name, groupname, consumername, min_idle_time, message_ids, idle=..., time=..., retrycount=..., force=..., justid=... + ): ... + def xdel(self, name, *ids): ... + def xgroup_create(self, name, groupname, id: str = ..., mkstream: bool = ..., entries_read: int | None = ...): ... + def xgroup_delconsumer(self, name, groupname, consumername): ... + def xgroup_destroy(self, name, groupname): ... + def xgroup_createconsumer(self, name, groupname, consumername): ... + def xgroup_setid(self, name, groupname, id, entries_read: int | None = ...): ... + def xinfo_consumers(self, name, groupname): ... + def xinfo_groups(self, name): ... + def xinfo_stream(self, name, full: bool = ...): ... + def xlen(self, name: _Key) -> int: ... + def xpending(self, name, groupname): ... + def xpending_range( + self, name: _Key, groupname, min, max, count: int, consumername: Incomplete | None = ..., idle: int | None = ... + ): ... + def xrange(self, name, min: str = ..., max: str = ..., count: Incomplete | None = ...): ... + def xread(self, streams, count: Incomplete | None = ..., block: Incomplete | None = ...): ... + def xreadgroup( + self, groupname, consumername, streams, count: Incomplete | None = ..., block: Incomplete | None = ..., noack: bool = ... + ): ... + def xrevrange(self, name, max: str = ..., min: str = ..., count: Incomplete | None = ...): ... + def xtrim( + self, name, maxlen: int | None = ..., approximate: bool = ..., minid: Incomplete | None = ..., limit: int | None = ... + ): ... + +class AsyncStreamCommands: + async def xack(self, name, groupname, *ids): ... + async def xadd( + self, + name, + fields, + id: str = ..., + maxlen=..., + approximate: bool = ..., + nomkstream: bool = ..., + minid: Incomplete | None = ..., + limit: Incomplete | None = ..., + ): ... + async def xautoclaim( + self, + name, + groupname, + consumername, + min_idle_time, + start_id: StreamIdT = ..., + count: Incomplete | None = ..., + justid: bool = ..., + ): ... + async def xclaim( + self, name, groupname, consumername, min_idle_time, message_ids, idle=..., time=..., retrycount=..., force=..., justid=... + ): ... + async def xdel(self, name, *ids): ... + async def xgroup_create(self, name, groupname, id: str = ..., mkstream: bool = ..., entries_read: int | None = ...): ... + async def xgroup_delconsumer(self, name, groupname, consumername): ... + async def xgroup_destroy(self, name, groupname): ... + async def xgroup_createconsumer(self, name, groupname, consumername): ... + async def xgroup_setid(self, name, groupname, id, entries_read: int | None = ...): ... + async def xinfo_consumers(self, name, groupname): ... + async def xinfo_groups(self, name): ... + async def xinfo_stream(self, name, full: bool = ...): ... + async def xlen(self, name: _Key) -> int: ... + async def xpending(self, name, groupname): ... + async def xpending_range( + self, name: _Key, groupname, min, max, count: int, consumername: Incomplete | None = ..., idle: int | None = ... + ): ... + async def xrange(self, name, min: str = ..., max: str = ..., count: Incomplete | None = ...): ... + async def xread(self, streams, count: Incomplete | None = ..., block: Incomplete | None = ...): ... + async def xreadgroup( + self, groupname, consumername, streams, count: Incomplete | None = ..., block: Incomplete | None = ..., noack: bool = ... + ): ... + async def xrevrange(self, name, max: str = ..., min: str = ..., count: Incomplete | None = ...): ... + async def xtrim( + self, name, maxlen: int | None = ..., approximate: bool = ..., minid: Incomplete | None = ..., limit: int | None = ... + ): ... + +class SortedSetCommands(Generic[_StrType]): + def zadd( + self, + name: _Key, + mapping: Mapping[_Key, _Value], + nx: bool = ..., + xx: bool = ..., + ch: bool = ..., + incr: bool = ..., + gt: Incomplete | None = ..., + lt: Incomplete | None = ..., + ) -> int: ... + def zcard(self, name: _Key) -> int: ... + def zcount(self, name: _Key, min: _Value, max: _Value) -> int: ... + def zdiff(self, keys, withscores: bool = ...): ... + def zdiffstore(self, dest, keys): ... + def zincrby(self, name: _Key, amount: float, value: _Value) -> float: ... + def zinter(self, keys, aggregate: Incomplete | None = ..., withscores: bool = ...): ... + def zinterstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = ...) -> int: ... + def zlexcount(self, name: _Key, min: _Value, max: _Value) -> int: ... + def zpopmax(self, name: _Key, count: int | None = ...) -> list[tuple[_StrType, float]]: ... + def zpopmin(self, name: _Key, count: int | None = ...) -> list[tuple[_StrType, float]]: ... + def zrandmember(self, key, count: Incomplete | None = ..., withscores: bool = ...): ... + @overload + def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = ...) -> tuple[_StrType, _StrType, float]: ... + @overload + def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float) -> tuple[_StrType, _StrType, float] | None: ... + @overload + def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = ...) -> tuple[_StrType, _StrType, float]: ... + @overload + def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float) -> tuple[_StrType, _StrType, float] | None: ... + @overload + def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + byscore: bool = ..., + bylex: bool = ..., + offset: int | None = ..., + num: int | None = ..., + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], float] = ..., + byscore: bool = ..., + bylex: bool = ..., + offset: int | None = ..., + num: int | None = ..., + ) -> list[tuple[_StrType, float]]: ... + @overload + def zrange( + self, + name: _Key, + start: int, + end: int, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + byscore: bool = ..., + bylex: bool = ..., + offset: int | None = ..., + num: int | None = ..., + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zrange( + self, + name: _Key, + start: int, + end: int, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], float] = ..., + byscore: bool = ..., + bylex: bool = ..., + offset: int | None = ..., + num: int | None = ..., + ) -> list[tuple[_StrType, float]]: ... + @overload + def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool = ..., + withscores: bool = ..., + score_cast_func: Callable[[_StrType], Any] = ..., + byscore: bool = ..., + bylex: bool = ..., + offset: int | None = ..., + num: int | None = ..., + ) -> list[_StrType]: ... + @overload + def zrevrange( + self, + name: _Key, + start: int, + end: int, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zrevrange(self, name: _Key, start: int, end: int, withscores: Literal[True]) -> list[tuple[_StrType, float]]: ... + @overload + def zrevrange( + self, name: _Key, start: int, end: int, withscores: bool = ..., score_cast_func: Callable[[Any], Any] = ... + ) -> list[_StrType]: ... + def zrangestore( + self, + dest, + name, + start, + end, + byscore: bool = ..., + bylex: bool = ..., + desc: bool = ..., + offset: Incomplete | None = ..., + num: Incomplete | None = ..., + ): ... + def zrangebylex( + self, name: _Key, min: _Value, max: _Value, start: int | None = ..., num: int | None = ... + ) -> list[_StrType]: ... + def zrevrangebylex( + self, name: _Key, max: _Value, min: _Value, start: int | None = ..., num: int | None = ... + ) -> list[_StrType]: ... + @overload + def zrangebyscore( + self, + name: _Key, + min: _Value, + max: _Value, + start: int | None = ..., + num: int | None = ..., + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zrangebyscore( + self, name: _Key, min: _Value, max: _Value, start: int | None = ..., num: int | None = ..., *, withscores: Literal[True] + ) -> list[tuple[_StrType, float]]: ... + @overload + def zrangebyscore( + self, + name: _Key, + min: _Value, + max: _Value, + start: int | None = ..., + num: int | None = ..., + withscores: bool = ..., + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> list[_StrType]: ... + @overload + def zrevrangebyscore( + self, + name: _Key, + max: _Value, + min: _Value, + start: int | None = ..., + num: int | None = ..., + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zrevrangebyscore( + self, name: _Key, max: _Value, min: _Value, start: int | None = ..., num: int | None = ..., *, withscores: Literal[True] + ) -> list[tuple[_StrType, float]]: ... + @overload + def zrevrangebyscore( + self, + name: _Key, + max: _Value, + min: _Value, + start: int | None = ..., + num: int | None = ..., + withscores: bool = ..., + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> list[_StrType]: ... + def zrank(self, name: _Key, value: _Value) -> int | None: ... + def zrem(self, name: _Key, *values: _Value) -> int: ... + def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> int: ... + def zremrangebyrank(self, name: _Key, min: int, max: int) -> int: ... + def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> int: ... + def zrevrank(self, name: _Key, value: _Value) -> int | None: ... + def zscore(self, name: _Key, value: _Value) -> float | None: ... + def zunion(self, keys, aggregate: Incomplete | None = ..., withscores: bool = ...): ... + def zunionstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = ...) -> int: ... + def zmscore(self, key, members): ... + +class AsyncSortedSetCommands(Generic[_StrType]): + async def zadd( + self, + name: _Key, + mapping: Mapping[_Key, _Value], + nx: bool = ..., + xx: bool = ..., + ch: bool = ..., + incr: bool = ..., + gt: Incomplete | None = ..., + lt: Incomplete | None = ..., + ) -> int: ... + async def zcard(self, name: _Key) -> int: ... + async def zcount(self, name: _Key, min: _Value, max: _Value) -> int: ... + async def zdiff(self, keys, withscores: bool = ...): ... + async def zdiffstore(self, dest, keys): ... + async def zincrby(self, name: _Key, amount: float, value: _Value) -> float: ... + async def zinter(self, keys, aggregate: Incomplete | None = ..., withscores: bool = ...): ... + async def zinterstore( + self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = ... + ) -> int: ... + async def zlexcount(self, name: _Key, min: _Value, max: _Value) -> int: ... + async def zpopmax(self, name: _Key, count: int | None = ...) -> list[tuple[_StrType, float]]: ... + async def zpopmin(self, name: _Key, count: int | None = ...) -> list[tuple[_StrType, float]]: ... + async def zrandmember(self, key, count: Incomplete | None = ..., withscores: bool = ...): ... + @overload + async def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = ...) -> tuple[_StrType, _StrType, float]: ... + @overload + async def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float) -> tuple[_StrType, _StrType, float] | None: ... + @overload + async def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = ...) -> tuple[_StrType, _StrType, float]: ... + @overload + async def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float) -> tuple[_StrType, _StrType, float] | None: ... + @overload + async def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + byscore: bool = ..., + bylex: bool = ..., + offset: int | None = ..., + num: int | None = ..., + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + async def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], float] = ..., + byscore: bool = ..., + bylex: bool = ..., + offset: int | None = ..., + num: int | None = ..., + ) -> list[tuple[_StrType, float]]: ... + @overload + async def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool = ..., + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + byscore: bool = ..., + bylex: bool = ..., + offset: int | None = ..., + num: int | None = ..., + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + async def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool = ..., + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], float] = ..., + byscore: bool = ..., + bylex: bool = ..., + offset: int | None = ..., + num: int | None = ..., + ) -> list[tuple[_StrType, float]]: ... + @overload + async def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool = ..., + withscores: bool = ..., + score_cast_func: Callable[[_StrType], Any] = ..., + byscore: bool = ..., + bylex: bool = ..., + offset: int | None = ..., + num: int | None = ..., + ) -> list[_StrType]: ... + @overload + async def zrevrange( + self, + name: _Key, + start: int, + end: int, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + async def zrevrange(self, name: _Key, start: int, end: int, withscores: Literal[True]) -> list[tuple[_StrType, float]]: ... + @overload + async def zrevrange( + self, name: _Key, start: int, end: int, withscores: bool = ..., score_cast_func: Callable[[Any], Any] = ... + ) -> list[_StrType]: ... + async def zrangestore( + self, + dest, + name, + start, + end, + byscore: bool = ..., + bylex: bool = ..., + desc: bool = ..., + offset: Incomplete | None = ..., + num: Incomplete | None = ..., + ): ... + async def zrangebylex( + self, name: _Key, min: _Value, max: _Value, start: int | None = ..., num: int | None = ... + ) -> list[_StrType]: ... + async def zrevrangebylex( + self, name: _Key, max: _Value, min: _Value, start: int | None = ..., num: int | None = ... + ) -> list[_StrType]: ... + @overload + async def zrangebyscore( + self, + name: _Key, + min: _Value, + max: _Value, + start: int | None = ..., + num: int | None = ..., + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + async def zrangebyscore( + self, name: _Key, min: _Value, max: _Value, start: int | None = ..., num: int | None = ..., *, withscores: Literal[True] + ) -> list[tuple[_StrType, float]]: ... + @overload + async def zrangebyscore( + self, + name: _Key, + min: _Value, + max: _Value, + start: int | None = ..., + num: int | None = ..., + withscores: bool = ..., + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> list[_StrType]: ... + @overload + async def zrevrangebyscore( + self, + name: _Key, + max: _Value, + min: _Value, + start: int | None = ..., + num: int | None = ..., + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + async def zrevrangebyscore( + self, name: _Key, max: _Value, min: _Value, start: int | None = ..., num: int | None = ..., *, withscores: Literal[True] + ) -> list[tuple[_StrType, float]]: ... + @overload + async def zrevrangebyscore( + self, + name: _Key, + max: _Value, + min: _Value, + start: int | None = ..., + num: int | None = ..., + withscores: bool = ..., + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> list[_StrType]: ... + async def zrank(self, name: _Key, value: _Value) -> int | None: ... + async def zrem(self, name: _Key, *values: _Value) -> int: ... + async def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> int: ... + async def zremrangebyrank(self, name: _Key, min: int, max: int) -> int: ... + async def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> int: ... + async def zrevrank(self, name: _Key, value: _Value) -> int | None: ... + async def zscore(self, name: _Key, value: _Value) -> float | None: ... + async def zunion(self, keys, aggregate: Incomplete | None = ..., withscores: bool = ...): ... + async def zunionstore( + self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = ... + ) -> int: ... + async def zmscore(self, key, members): ... + +class HyperlogCommands: + def pfadd(self, name: _Key, *values: _Value) -> int: ... + def pfcount(self, name: _Key) -> int: ... + def pfmerge(self, dest: _Key, *sources: _Key) -> bool: ... + +class AsyncHyperlogCommands: + async def pfadd(self, name: _Key, *values: _Value) -> int: ... + async def pfcount(self, name: _Key) -> int: ... + async def pfmerge(self, dest: _Key, *sources: _Key) -> bool: ... + +class HashCommands(Generic[_StrType]): + def hdel(self, name: _Key, *keys: _Key) -> int: ... + def hexists(self, name: _Key, key: _Key) -> bool: ... + def hget(self, name: _Key, key: _Key) -> _StrType | None: ... + def hgetall(self, name: _Key) -> dict[_StrType, _StrType]: ... + def hincrby(self, name: _Key, key: _Key, amount: int = ...) -> int: ... + def hincrbyfloat(self, name: _Key, key: _Key, amount: float = ...) -> float: ... + def hkeys(self, name: _Key) -> list[_StrType]: ... + def hlen(self, name: _Key) -> int: ... + @overload + def hset( + self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = ..., items: Incomplete | None = ... + ) -> int: ... + @overload + def hset(self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value], items: Incomplete | None = ...) -> int: ... + @overload + def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value], items: Incomplete | None = ...) -> int: ... + def hsetnx(self, name: _Key, key: _Key, value: _Value) -> int: ... + def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> bool: ... + def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ... + def hvals(self, name: _Key) -> list[_StrType]: ... + def hstrlen(self, name, key): ... + +class AsyncHashCommands(Generic[_StrType]): + async def hdel(self, name: _Key, *keys: _Key) -> int: ... + async def hexists(self, name: _Key, key: _Key) -> bool: ... + async def hget(self, name: _Key, key: _Key) -> _StrType | None: ... + async def hgetall(self, name: _Key) -> dict[_StrType, _StrType]: ... + async def hincrby(self, name: _Key, key: _Key, amount: int = ...) -> int: ... + async def hincrbyfloat(self, name: _Key, key: _Key, amount: float = ...) -> float: ... + async def hkeys(self, name: _Key) -> list[_StrType]: ... + async def hlen(self, name: _Key) -> int: ... + @overload + async def hset( + self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = ..., items: Incomplete | None = ... + ) -> int: ... + @overload + async def hset( + self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value], items: Incomplete | None = ... + ) -> int: ... + @overload + async def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value], items: Incomplete | None = ...) -> int: ... + async def hsetnx(self, name: _Key, key: _Key, value: _Value) -> int: ... + async def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> bool: ... + async def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ... + async def hvals(self, name: _Key) -> list[_StrType]: ... + async def hstrlen(self, name, key): ... + +class AsyncScript: + def __init__(self, registered_client: AsyncRedis[Any], script: ScriptTextT) -> None: ... + async def __call__( + self, keys: Sequence[KeyT] | None = ..., args: Iterable[EncodableT] | None = ..., client: AsyncRedis[Any] | None = ... + ): ... + +class PubSubCommands: + def publish(self, channel: _Key, message: _Key, **kwargs: _CommandOptions) -> int: ... + def pubsub_channels(self, pattern: _Key = ..., **kwargs: _CommandOptions) -> list[str]: ... + def pubsub_numpat(self, **kwargs: _CommandOptions) -> int: ... + def pubsub_numsub(self, *args: _Key, **kwargs: _CommandOptions) -> list[tuple[str, int]]: ... + +class AsyncPubSubCommands: + async def publish(self, channel: _Key, message: _Key, **kwargs: _CommandOptions) -> int: ... + async def pubsub_channels(self, pattern: _Key = ..., **kwargs: _CommandOptions) -> list[str]: ... + async def pubsub_numpat(self, **kwargs: _CommandOptions) -> int: ... + async def pubsub_numsub(self, *args: _Key, **kwargs: _CommandOptions) -> list[tuple[str, int]]: ... + +class ScriptCommands(Generic[_StrType]): + def eval(self, script, numkeys, *keys_and_args): ... + def evalsha(self, sha, numkeys, *keys_and_args): ... + def script_exists(self, *args): ... + def script_debug(self, *args): ... + def script_flush(self, sync_type: Incomplete | None = ...): ... + def script_kill(self): ... + def script_load(self, script): ... + def register_script(self, script: str | _StrType) -> Script: ... + +class AsyncScriptCommands(Generic[_StrType]): + async def eval(self, script, numkeys, *keys_and_args): ... + async def evalsha(self, sha, numkeys, *keys_and_args): ... + async def script_exists(self, *args): ... + async def script_debug(self, *args): ... + async def script_flush(self, sync_type: Incomplete | None = ...): ... + async def script_kill(self): ... + async def script_load(self, script): ... + def register_script(self, script: ScriptTextT) -> AsyncScript: ... # type: ignore[override] + +class GeoCommands: + def geoadd(self, name, values, nx: bool = ..., xx: bool = ..., ch: bool = ...): ... + def geodist(self, name, place1, place2, unit: Incomplete | None = ...): ... + def geohash(self, name, *values): ... + def geopos(self, name, *values): ... + def georadius( + self, + name, + longitude, + latitude, + radius, + unit: Incomplete | None = ..., + withdist: bool = ..., + withcoord: bool = ..., + withhash: bool = ..., + count: Incomplete | None = ..., + sort: Incomplete | None = ..., + store: Incomplete | None = ..., + store_dist: Incomplete | None = ..., + any: bool = ..., + ): ... + def georadiusbymember( + self, + name, + member, + radius, + unit: Incomplete | None = ..., + withdist: bool = ..., + withcoord: bool = ..., + withhash: bool = ..., + count: Incomplete | None = ..., + sort: Incomplete | None = ..., + store: Incomplete | None = ..., + store_dist: Incomplete | None = ..., + any: bool = ..., + ): ... + def geosearch( + self, + name, + member: Incomplete | None = ..., + longitude: Incomplete | None = ..., + latitude: Incomplete | None = ..., + unit: str = ..., + radius: Incomplete | None = ..., + width: Incomplete | None = ..., + height: Incomplete | None = ..., + sort: Incomplete | None = ..., + count: Incomplete | None = ..., + any: bool = ..., + withcoord: bool = ..., + withdist: bool = ..., + withhash: bool = ..., + ): ... + def geosearchstore( + self, + dest, + name, + member: Incomplete | None = ..., + longitude: Incomplete | None = ..., + latitude: Incomplete | None = ..., + unit: str = ..., + radius: Incomplete | None = ..., + width: Incomplete | None = ..., + height: Incomplete | None = ..., + sort: Incomplete | None = ..., + count: Incomplete | None = ..., + any: bool = ..., + storedist: bool = ..., + ): ... + +class AsyncGeoCommands: + async def geoadd(self, name, values, nx: bool = ..., xx: bool = ..., ch: bool = ...): ... + async def geodist(self, name, place1, place2, unit: Incomplete | None = ...): ... + async def geohash(self, name, *values): ... + async def geopos(self, name, *values): ... + async def georadius( + self, + name, + longitude, + latitude, + radius, + unit: Incomplete | None = ..., + withdist: bool = ..., + withcoord: bool = ..., + withhash: bool = ..., + count: Incomplete | None = ..., + sort: Incomplete | None = ..., + store: Incomplete | None = ..., + store_dist: Incomplete | None = ..., + any: bool = ..., + ): ... + async def georadiusbymember( + self, + name, + member, + radius, + unit: Incomplete | None = ..., + withdist: bool = ..., + withcoord: bool = ..., + withhash: bool = ..., + count: Incomplete | None = ..., + sort: Incomplete | None = ..., + store: Incomplete | None = ..., + store_dist: Incomplete | None = ..., + any: bool = ..., + ): ... + async def geosearch( + self, + name, + member: Incomplete | None = ..., + longitude: Incomplete | None = ..., + latitude: Incomplete | None = ..., + unit: str = ..., + radius: Incomplete | None = ..., + width: Incomplete | None = ..., + height: Incomplete | None = ..., + sort: Incomplete | None = ..., + count: Incomplete | None = ..., + any: bool = ..., + withcoord: bool = ..., + withdist: bool = ..., + withhash: bool = ..., + ): ... + async def geosearchstore( + self, + dest, + name, + member: Incomplete | None = ..., + longitude: Incomplete | None = ..., + latitude: Incomplete | None = ..., + unit: str = ..., + radius: Incomplete | None = ..., + width: Incomplete | None = ..., + height: Incomplete | None = ..., + sort: Incomplete | None = ..., + count: Incomplete | None = ..., + any: bool = ..., + storedist: bool = ..., + ): ... + +class ModuleCommands: + def module_load(self, path, *args): ... + def module_unload(self, name): ... + def module_list(self): ... + def command_info(self): ... + def command_count(self): ... + def command_getkeys(self, *args): ... + def command(self): ... + +class Script: + def __init__(self, registered_client, script) -> None: ... + def __call__(self, keys=..., args=..., client: Incomplete | None = ...): ... + +class BitFieldOperation: + def __init__(self, client, key, default_overflow: Incomplete | None = ...): ... + def reset(self) -> None: ... + def overflow(self, overflow): ... + def incrby(self, fmt, offset, increment, overflow: Incomplete | None = ...): ... + def get(self, fmt, offset): ... + def set(self, fmt, offset, value): ... + @property + def command(self): ... + def execute(self): ... + +class AsyncModuleCommands(ModuleCommands): + async def command_info(self) -> None: ... + +class ClusterCommands: + def cluster(self, cluster_arg: str, *args, **kwargs: _CommandOptions): ... + def readwrite(self, **kwargs: _CommandOptions) -> bool: ... + def readonly(self, **kwargs: _CommandOptions) -> bool: ... + +class AsyncClusterCommands: + async def cluster(self, cluster_arg: str, *args, **kwargs: _CommandOptions): ... + async def readwrite(self, **kwargs: _CommandOptions) -> bool: ... + async def readonly(self, **kwargs: _CommandOptions) -> bool: ... + +class FunctionCommands: + def function_load(self, code: str, replace: bool | None = ...) -> Awaitable[str] | str: ... + def function_delete(self, library: str) -> Awaitable[str] | str: ... + def function_flush(self, mode: str = ...) -> Awaitable[str] | str: ... + def function_list(self, library: str | None = ..., withcode: bool | None = ...) -> Awaitable[list[Any]] | list[Any]: ... + def fcall(self, function, numkeys: int, *keys_and_args: list[Any] | None) -> Awaitable[str] | str: ... + def fcall_ro(self, function, numkeys: int, *keys_and_args: list[Any] | None) -> Awaitable[str] | str: ... + def function_dump(self) -> Awaitable[str] | str: ... + def function_restore(self, payload: str, policy: str | None = ...) -> Awaitable[str] | str: ... + def function_kill(self) -> Awaitable[str] | str: ... + def function_stats(self) -> Awaitable[list[Any]] | list[Any]: ... + +class AsyncFunctionCommands: + async def function_load(self, code: str, replace: bool | None = ...) -> Awaitable[str] | str: ... + async def function_delete(self, library: str) -> Awaitable[str] | str: ... + async def function_flush(self, mode: str = ...) -> Awaitable[str] | str: ... + async def function_list(self, library: str | None = ..., withcode: bool | None = ...) -> Awaitable[list[Any]] | list[Any]: ... + async def fcall(self, function, numkeys: int, *keys_and_args: list[Any] | None) -> Awaitable[str] | str: ... + async def fcall_ro(self, function, numkeys: int, *keys_and_args: list[Any] | None) -> Awaitable[str] | str: ... + async def function_dump(self) -> Awaitable[str] | str: ... + async def function_restore(self, payload: str, policy: str | None = ...) -> Awaitable[str] | str: ... + async def function_kill(self) -> Awaitable[str] | str: ... + async def function_stats(self) -> Awaitable[list[Any]] | list[Any]: ... + +class DataAccessCommands( + BasicKeyCommands[_StrType], + HyperlogCommands, + HashCommands[_StrType], + GeoCommands, + ListCommands[_StrType], + ScanCommands[_StrType], + SetCommands[_StrType], + StreamCommands, + SortedSetCommands[_StrType], + Generic[_StrType], +): ... +class AsyncDataAccessCommands( + AsyncBasicKeyCommands[_StrType], + AsyncHyperlogCommands, + AsyncHashCommands[_StrType], + AsyncGeoCommands, + AsyncListCommands[_StrType], + AsyncScanCommands[_StrType], + AsyncSetCommands[_StrType], + AsyncStreamCommands, + AsyncSortedSetCommands[_StrType], + Generic[_StrType], +): ... +class CoreCommands( + ACLCommands[_StrType], + ClusterCommands, + DataAccessCommands[_StrType], + ManagementCommands, + ModuleCommands, + PubSubCommands, + ScriptCommands[_StrType], + Generic[_StrType], +): ... +class AsyncCoreCommands( + AsyncACLCommands[_StrType], + AsyncClusterCommands, + AsyncDataAccessCommands[_StrType], + AsyncManagementCommands, + AsyncModuleCommands, + AsyncPubSubCommands, + AsyncScriptCommands[_StrType], + AsyncFunctionCommands, + Generic[_StrType], +): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/graph/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/graph/__init__.pyi new file mode 100644 index 00000000..2586aedb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/graph/__init__.pyi @@ -0,0 +1,26 @@ +from typing import Any + +from .commands import GraphCommands as GraphCommands +from .edge import Edge as Edge +from .node import Node as Node +from .path import Path as Path + +class Graph(GraphCommands): + NAME: Any + client: Any + execute_command: Any + nodes: Any + edges: Any + version: int + def __init__(self, client, name=...) -> None: ... + @property + def name(self): ... + def get_label(self, idx): ... + def get_relation(self, idx): ... + def get_property(self, idx): ... + def add_node(self, node) -> None: ... + def add_edge(self, edge) -> None: ... + def call_procedure(self, procedure, *args, read_only: bool = ..., **kwagrs): ... + def labels(self): ... + def relationship_types(self): ... + def property_keys(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/graph/commands.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/graph/commands.pyi new file mode 100644 index 00000000..b5ccf851 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/graph/commands.pyi @@ -0,0 +1,20 @@ +from _typeshed import Incomplete +from typing import Any + +class GraphCommands: + def commit(self): ... + version: Any + def query( + self, q, params: Incomplete | None = ..., timeout: Incomplete | None = ..., read_only: bool = ..., profile: bool = ... + ): ... + def merge(self, pattern): ... + def delete(self): ... + nodes: Any + edges: Any + def flush(self) -> None: ... + def explain(self, query, params: Incomplete | None = ...): ... + def bulk(self, **kwargs) -> None: ... + def profile(self, query): ... + def slowlog(self): ... + def config(self, name, value: Incomplete | None = ..., set: bool = ...): ... + def list_keys(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/graph/edge.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/graph/edge.pyi new file mode 100644 index 00000000..15975579 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/graph/edge.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete +from typing import Any + +class Edge: + id: Any + relation: Any + properties: Any + src_node: Any + dest_node: Any + def __init__( + self, src_node, relation, dest_node, edge_id: Incomplete | None = ..., properties: Incomplete | None = ... + ) -> None: ... + def to_string(self): ... + def __eq__(self, rhs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/graph/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/graph/exceptions.pyi new file mode 100644 index 00000000..6069e055 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/graph/exceptions.pyi @@ -0,0 +1,5 @@ +from typing import Any + +class VersionMismatchException(Exception): + version: Any + def __init__(self, version) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/graph/node.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/graph/node.pyi new file mode 100644 index 00000000..9160daee --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/graph/node.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete +from typing import Any + +class Node: + id: Any + alias: Any + label: Any + labels: Any + properties: Any + def __init__( + self, + node_id: Incomplete | None = ..., + alias: Incomplete | None = ..., + label: str | list[str] | None = ..., + properties: Incomplete | None = ..., + ) -> None: ... + def to_string(self): ... + def __eq__(self, rhs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/graph/path.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/graph/path.pyi new file mode 100644 index 00000000..69106f89 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/graph/path.pyi @@ -0,0 +1,18 @@ +from typing import Any + +class Path: + append_type: Any + def __init__(self, nodes, edges) -> None: ... + @classmethod + def new_empty_path(cls): ... + def nodes(self): ... + def edges(self): ... + def get_node(self, index): ... + def get_relationship(self, index): ... + def first_node(self): ... + def last_node(self): ... + def edge_count(self): ... + def nodes_count(self): ... + def add_node(self, node): ... + def add_edge(self, edge): ... + def __eq__(self, other): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/graph/query_result.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/graph/query_result.pyi new file mode 100644 index 00000000..53cf3ebf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/graph/query_result.pyi @@ -0,0 +1,75 @@ +from typing import Any, ClassVar +from typing_extensions import Literal + +LABELS_ADDED: str +NODES_CREATED: str +NODES_DELETED: str +RELATIONSHIPS_DELETED: str +PROPERTIES_SET: str +RELATIONSHIPS_CREATED: str +INDICES_CREATED: str +INDICES_DELETED: str +CACHED_EXECUTION: str +INTERNAL_EXECUTION_TIME: str +STATS: Any + +class ResultSetColumnTypes: + COLUMN_UNKNOWN: ClassVar[Literal[0]] + COLUMN_SCALAR: ClassVar[Literal[1]] + COLUMN_NODE: ClassVar[Literal[2]] + COLUMN_RELATION: ClassVar[Literal[3]] + +class ResultSetScalarTypes: + VALUE_UNKNOWN: ClassVar[Literal[0]] + VALUE_NULL: ClassVar[Literal[1]] + VALUE_STRING: ClassVar[Literal[2]] + VALUE_INTEGER: ClassVar[Literal[3]] + VALUE_BOOLEAN: ClassVar[Literal[4]] + VALUE_DOUBLE: ClassVar[Literal[5]] + VALUE_ARRAY: ClassVar[Literal[6]] + VALUE_EDGE: ClassVar[Literal[7]] + VALUE_NODE: ClassVar[Literal[8]] + VALUE_PATH: ClassVar[Literal[9]] + VALUE_MAP: ClassVar[Literal[10]] + VALUE_POINT: ClassVar[Literal[11]] + +class QueryResult: + graph: Any + header: Any + result_set: Any + def __init__(self, graph, response, profile: bool = ...) -> None: ... + def parse_results(self, raw_result_set) -> None: ... + statistics: Any + def parse_statistics(self, raw_statistics) -> None: ... + def parse_header(self, raw_result_set): ... + def parse_records(self, raw_result_set): ... + def parse_entity_properties(self, props): ... + def parse_string(self, cell): ... + def parse_node(self, cell): ... + def parse_edge(self, cell): ... + def parse_path(self, cell): ... + def parse_map(self, cell): ... + def parse_point(self, cell): ... + def parse_scalar(self, cell): ... + def parse_profile(self, response) -> None: ... + def is_empty(self): ... + @property + def labels_added(self): ... + @property + def nodes_created(self): ... + @property + def nodes_deleted(self): ... + @property + def properties_set(self): ... + @property + def relationships_created(self): ... + @property + def relationships_deleted(self): ... + @property + def indices_created(self): ... + @property + def indices_deleted(self): ... + @property + def cached_execution(self): ... + @property + def run_time_ms(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/helpers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/helpers.pyi new file mode 100644 index 00000000..ec1bb28b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/helpers.pyi @@ -0,0 +1,10 @@ +def list_or_args(keys, args): ... +def nativestr(x): ... +def delist(x): ... +def parse_to_list(response): ... +def parse_list_to_dict(response): ... +def parse_to_dict(response): ... +def random_string(length: int = ...) -> str: ... +def quote_string(v): ... +def decode_dict_keys(obj): ... +def stringify_param_value(value): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/json/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/json/__init__.pyi new file mode 100644 index 00000000..ef69aceb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/json/__init__.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete +from typing import Any + +from ...client import Pipeline as ClientPipeline +from .commands import JSONCommands + +class JSON(JSONCommands): + MODULE_CALLBACKS: dict[str, Any] + client: Any + execute_command: Any + MODULE_VERSION: Incomplete | None + def __init__(self, client, version: Incomplete | None = ..., decoder=..., encoder=...) -> None: ... + def pipeline(self, transaction: bool = ..., shard_hint: Incomplete | None = ...) -> Pipeline: ... + +class Pipeline(JSONCommands, ClientPipeline[Incomplete]): ... # type: ignore[misc] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/json/commands.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/json/commands.pyi new file mode 100644 index 00000000..a4013852 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/json/commands.pyi @@ -0,0 +1,30 @@ +from _typeshed import Incomplete + +class JSONCommands: + def arrappend(self, name, path=..., *args): ... + def arrindex(self, name, path, scalar, start: int = ..., stop: int = ...): ... + def arrinsert(self, name, path, index, *args): ... + def arrlen(self, name, path=...): ... + def arrpop(self, name, path=..., index: int = ...): ... + def arrtrim(self, name, path, start, stop): ... + def type(self, name, path=...): ... + def resp(self, name, path=...): ... + def objkeys(self, name, path=...): ... + def objlen(self, name, path=...): ... + def numincrby(self, name, path, number): ... + def nummultby(self, name, path, number): ... + def clear(self, name, path=...): ... + def delete(self, key, path=...): ... + forget = delete + def get(self, name, *args, no_escape: bool = ...): ... + def mget(self, keys, path): ... + def set(self, name, path, obj, nx: bool = ..., xx: bool = ..., decode_keys: bool = ...): ... + def set_file(self, name, path, file_name, nx: bool = ..., xx: bool = ..., decode_keys: bool = ...): ... + def set_path(self, json_path, root_folder, nx: bool = ..., xx: bool = ..., decode_keys: bool = ...): ... + def strlen(self, name, path: Incomplete | None = ...): ... + def toggle(self, name, path=...): ... + def strappend(self, name, value, path=...): ... + def debug(self, subcommand, key: Incomplete | None = ..., path=...): ... + def jsonget(self, *args, **kwargs): ... + def jsonmget(self, *args, **kwargs): ... + def jsonset(self, *args, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/json/decoders.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/json/decoders.pyi new file mode 100644 index 00000000..ccea2438 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/json/decoders.pyi @@ -0,0 +1,4 @@ +def bulk_of_jsons(d): ... +def decode_dict_keys(obj): ... +def unstring(obj): ... +def decode_list(b): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/json/path.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/json/path.pyi new file mode 100644 index 00000000..bbc35c4f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/json/path.pyi @@ -0,0 +1,5 @@ +class Path: + strPath: str + @staticmethod + def root_path() -> str: ... + def __init__(self, path: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/parser.pyi new file mode 100644 index 00000000..f7649082 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/parser.pyi @@ -0,0 +1,8 @@ +from redis.client import AbstractRedis +from redis.typing import EncodableT + +class CommandsParser: + commands: dict[str, str] + def __init__(self, redis_connection: AbstractRedis) -> None: ... + def initialize(self, r: AbstractRedis) -> None: ... + def get_keys(self, redis_conn: AbstractRedis, *args: EncodableT) -> list[EncodableT] | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/redismodules.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/redismodules.pyi new file mode 100644 index 00000000..6f535b23 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/redismodules.pyi @@ -0,0 +1,14 @@ +from .json import JSON +from .search import Search +from .timeseries import TimeSeries + +class RedisModuleCommands: + def json(self, encoder=..., decoder=...) -> JSON: ... + def ft(self, index_name: str = ...) -> Search: ... + def ts(self) -> TimeSeries: ... + def bf(self): ... + def cf(self): ... + def cms(self): ... + def topk(self): ... + def tdigest(self): ... + def graph(self, index_name: str = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/search/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/search/__init__.pyi new file mode 100644 index 00000000..d62721a0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/search/__init__.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete + +from .commands import SearchCommands + +class Search(SearchCommands): + class BatchIndexer: + def __init__(self, client, chunk_size: int = ...) -> None: ... + def add_document( + self, + doc_id, + nosave: bool = ..., + score: float = ..., + payload: Incomplete | None = ..., + replace: bool = ..., + partial: bool = ..., + no_create: bool = ..., + **fields, + ): ... + def add_document_hash(self, doc_id, score: float = ..., replace: bool = ...): ... + def commit(self): ... + + def __init__(self, client, index_name: str = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/search/aggregation.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/search/aggregation.pyi new file mode 100644 index 00000000..0ccd5052 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/search/aggregation.pyi @@ -0,0 +1,54 @@ +from typing import Any, ClassVar +from typing_extensions import Literal + +FIELDNAME: Any + +class Limit: + offset: Any + count: Any + def __init__(self, offset: int = ..., count: int = ...) -> None: ... + def build_args(self): ... + +class Reducer: + NAME: ClassVar[None] + def __init__(self, *args) -> None: ... + def alias(self, alias): ... + @property + def args(self): ... + +class SortDirection: + DIRSTRING: ClassVar[str | None] + field: Any + def __init__(self, field) -> None: ... + +class Asc(SortDirection): + DIRSTRING: ClassVar[Literal["ASC"]] + +class Desc(SortDirection): + DIRSTRING: ClassVar[Literal["DESC"]] + +class AggregateRequest: + def __init__(self, query: str = ...) -> None: ... + def load(self, *fields): ... + def group_by(self, fields, *reducers): ... + def apply(self, **kwexpr): ... + def limit(self, offset, num): ... + def sort_by(self, *fields, **kwargs): ... + def filter(self, expressions): ... + def with_schema(self): ... + def verbatim(self): ... + def cursor(self, count: int = ..., max_idle: float = ...): ... + def build_args(self): ... + +class Cursor: + cid: Any + max_idle: int + count: int + def __init__(self, cid) -> None: ... + def build_args(self): ... + +class AggregateResult: + rows: Any + cursor: Any + schema: Any + def __init__(self, rows, cursor, schema) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/search/commands.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/search/commands.pyi new file mode 100644 index 00000000..29777c63 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/search/commands.pyi @@ -0,0 +1,111 @@ +from _typeshed import Incomplete +from collections.abc import Mapping +from typing import Any +from typing_extensions import Literal, TypeAlias + +from .aggregation import AggregateRequest, AggregateResult, Cursor +from .query import Query +from .result import Result + +_QueryParams: TypeAlias = Mapping[str, str | float] + +NUMERIC: Literal["NUMERIC"] + +CREATE_CMD: Literal["FT.CREATE"] +ALTER_CMD: Literal["FT.ALTER"] +SEARCH_CMD: Literal["FT.SEARCH"] +ADD_CMD: Literal["FT.ADD"] +ADDHASH_CMD: Literal["FT.ADDHASH"] +DROP_CMD: Literal["FT.DROP"] +EXPLAIN_CMD: Literal["FT.EXPLAIN"] +EXPLAINCLI_CMD: Literal["FT.EXPLAINCLI"] +DEL_CMD: Literal["FT.DEL"] +AGGREGATE_CMD: Literal["FT.AGGREGATE"] +PROFILE_CMD: Literal["FT.PROFILE"] +CURSOR_CMD: Literal["FT.CURSOR"] +SPELLCHECK_CMD: Literal["FT.SPELLCHECK"] +DICT_ADD_CMD: Literal["FT.DICTADD"] +DICT_DEL_CMD: Literal["FT.DICTDEL"] +DICT_DUMP_CMD: Literal["FT.DICTDUMP"] +GET_CMD: Literal["FT.GET"] +MGET_CMD: Literal["FT.MGET"] +CONFIG_CMD: Literal["FT.CONFIG"] +TAGVALS_CMD: Literal["FT.TAGVALS"] +ALIAS_ADD_CMD: Literal["FT.ALIASADD"] +ALIAS_UPDATE_CMD: Literal["FT.ALIASUPDATE"] +ALIAS_DEL_CMD: Literal["FT.ALIASDEL"] +INFO_CMD: Literal["FT.INFO"] +SUGADD_COMMAND: Literal["FT.SUGADD"] +SUGDEL_COMMAND: Literal["FT.SUGDEL"] +SUGLEN_COMMAND: Literal["FT.SUGLEN"] +SUGGET_COMMAND: Literal["FT.SUGGET"] +SYNUPDATE_CMD: Literal["FT.SYNUPDATE"] +SYNDUMP_CMD: Literal["FT.SYNDUMP"] + +NOOFFSETS: Literal["NOOFFSETS"] +NOFIELDS: Literal["NOFIELDS"] +STOPWORDS: Literal["STOPWORDS"] +WITHSCORES: Literal["WITHSCORES"] +FUZZY: Literal["FUZZY"] +WITHPAYLOADS: Literal["WITHPAYLOADS"] + +class SearchCommands: + def batch_indexer(self, chunk_size: int = ...): ... + def create_index( + self, + fields, + no_term_offsets: bool = ..., + no_field_flags: bool = ..., + stopwords: Incomplete | None = ..., + definition: Incomplete | None = ..., + max_text_fields: bool = ..., # added in 4.1.1 + temporary: Incomplete | None = ..., # added in 4.1.1 + no_highlight: bool = ..., # added in 4.1.1 + no_term_frequencies: bool = ..., # added in 4.1.1 + skip_initial_scan: bool = ..., # added in 4.1.1 + ): ... + def alter_schema_add(self, fields): ... + def dropindex(self, delete_documents: bool = ...): ... + def add_document( + self, + doc_id, + nosave: bool = ..., + score: float = ..., + payload: Incomplete | None = ..., + replace: bool = ..., + partial: bool = ..., + language: Incomplete | None = ..., + no_create: bool = ..., + **fields, + ): ... + def add_document_hash(self, doc_id, score: float = ..., language: Incomplete | None = ..., replace: bool = ...): ... + def delete_document(self, doc_id, conn: Incomplete | None = ..., delete_actual_document: bool = ...): ... + def load_document(self, id): ... + def get(self, *ids): ... + def info(self): ... + def get_params_args(self, query_params: _QueryParams) -> list[Any]: ... + def search(self, query: str | Query, query_params: _QueryParams | None = ...) -> Result: ... + def explain(self, query: str | Query, query_params: _QueryParams | None = ...): ... + def explain_cli(self, query): ... + def aggregate(self, query: AggregateRequest | Cursor, query_params: _QueryParams | None = ...) -> AggregateResult: ... + def profile( + self, query: str | Query | AggregateRequest, limited: bool = ..., query_params: Mapping[str, str | float] | None = ... + ) -> tuple[Incomplete, Incomplete]: ... + def spellcheck( + self, query, distance: Incomplete | None = ..., include: Incomplete | None = ..., exclude: Incomplete | None = ... + ): ... + def dict_add(self, name, *terms): ... + def dict_del(self, name, *terms): ... + def dict_dump(self, name): ... + def config_set(self, option: str, value: str) -> bool: ... + def config_get(self, option: str) -> dict[str, str]: ... + def tagvals(self, tagfield): ... + def aliasadd(self, alias): ... + def aliasupdate(self, alias): ... + def aliasdel(self, alias): ... + def sugadd(self, key, *suggestions, **kwargs): ... + def suglen(self, key): ... + def sugdel(self, key, string): ... + def sugget(self, key, prefix, fuzzy: bool = ..., num: int = ..., with_scores: bool = ..., with_payloads: bool = ...): ... + def synupdate(self, groupid, skipinitial: bool = ..., *terms): ... + def syndump(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/search/query.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/search/query.pyi new file mode 100644 index 00000000..e88f2a56 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/search/query.pyi @@ -0,0 +1,52 @@ +from _typeshed import Incomplete +from typing import Any + +class Query: + def __init__(self, query_string) -> None: ... + def query_string(self): ... + def limit_ids(self, *ids): ... + def return_fields(self, *fields): ... + def return_field(self, field, as_field: Incomplete | None = ...): ... + def summarize( + self, + fields: Incomplete | None = ..., + context_len: Incomplete | None = ..., + num_frags: Incomplete | None = ..., + sep: Incomplete | None = ..., + ): ... + def highlight(self, fields: Incomplete | None = ..., tags: Incomplete | None = ...): ... + def language(self, language): ... + def slop(self, slop): ... + def in_order(self): ... + def scorer(self, scorer): ... + def get_args(self): ... + def paging(self, offset, num): ... + def verbatim(self): ... + def no_content(self): ... + def no_stopwords(self): ... + def with_payloads(self): ... + def with_scores(self): ... + def limit_fields(self, *fields): ... + def add_filter(self, flt): ... + def sort_by(self, field, asc: bool = ...): ... + def expander(self, expander): ... + +class Filter: + args: Any + def __init__(self, keyword, field, *args) -> None: ... + +class NumericFilter(Filter): + INF: str + NEG_INF: str + def __init__(self, field, minval, maxval, minExclusive: bool = ..., maxExclusive: bool = ...) -> None: ... + +class GeoFilter(Filter): + METERS: str + KILOMETERS: str + FEET: str + MILES: str + def __init__(self, field, lon, lat, radius, unit=...) -> None: ... + +class SortbyField: + args: Any + def __init__(self, field, asc: bool = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/search/result.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/search/result.pyi new file mode 100644 index 00000000..2908b9a6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/search/result.pyi @@ -0,0 +1,7 @@ +from typing import Any + +class Result: + total: Any + duration: Any + docs: Any + def __init__(self, res, hascontent, duration: int = ..., has_payload: bool = ..., with_scores: bool = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/sentinel.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/sentinel.pyi new file mode 100644 index 00000000..b526a45f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/sentinel.pyi @@ -0,0 +1,17 @@ +class SentinelCommands: + def sentinel(self, *args): ... + def sentinel_get_master_addr_by_name(self, service_name): ... + def sentinel_master(self, service_name): ... + def sentinel_masters(self): ... + def sentinel_monitor(self, name, ip, port, quorum): ... + def sentinel_remove(self, name): ... + def sentinel_sentinels(self, service_name): ... + def sentinel_set(self, name, option, value): ... + def sentinel_slaves(self, service_name): ... + def sentinel_reset(self, pattern): ... + def sentinel_failover(self, new_master_name): ... + def sentinel_ckquorum(self, new_master_name): ... + def sentinel_flushconfig(self): ... + +class AsyncSentinelCommands(SentinelCommands): + async def sentinel(self, *args) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/timeseries/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/timeseries/__init__.pyi new file mode 100644 index 00000000..425d3a4c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/timeseries/__init__.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete +from typing import Any + +from ...client import Pipeline as ClientPipeline +from .commands import TimeSeriesCommands + +class TimeSeries(TimeSeriesCommands): + MODULE_CALLBACKS: dict[str, Any] + client: Any + execute_command: Any + def __init__(self, client: Incomplete | None = ..., **kwargs) -> None: ... + def pipeline(self, transaction: bool = ..., shard_hint: Incomplete | None = ...) -> Pipeline: ... + +class Pipeline(TimeSeriesCommands, ClientPipeline[Incomplete]): ... # type: ignore[misc] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/timeseries/commands.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/timeseries/commands.pyi new file mode 100644 index 00000000..dc15c850 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/timeseries/commands.pyi @@ -0,0 +1,155 @@ +from typing_extensions import Literal, TypeAlias + +_Key: TypeAlias = bytes | str | memoryview + +ADD_CMD: Literal["TS.ADD"] +ALTER_CMD: Literal["TS.ALTER"] +CREATERULE_CMD: Literal["TS.CREATERULE"] +CREATE_CMD: Literal["TS.CREATE"] +DECRBY_CMD: Literal["TS.DECRBY"] +DELETERULE_CMD: Literal["TS.DELETERULE"] +DEL_CMD: Literal["TS.DEL"] +GET_CMD: Literal["TS.GET"] +INCRBY_CMD: Literal["TS.INCRBY"] +INFO_CMD: Literal["TS.INFO"] +MADD_CMD: Literal["TS.MADD"] +MGET_CMD: Literal["TS.MGET"] +MRANGE_CMD: Literal["TS.MRANGE"] +MREVRANGE_CMD: Literal["TS.MREVRANGE"] +QUERYINDEX_CMD: Literal["TS.QUERYINDEX"] +RANGE_CMD: Literal["TS.RANGE"] +REVRANGE_CMD: Literal["TS.REVRANGE"] + +class TimeSeriesCommands: + def create( + self, + key: _Key, + retention_msecs: int | None = ..., + uncompressed: bool | None = ..., + labels: dict[str, str] | None = ..., + chunk_size: int | None = ..., + duplicate_policy: str | None = ..., + ): ... + def alter( + self, + key: _Key, + retention_msecs: int | None = ..., + labels: dict[str, str] | None = ..., + chunk_size: int | None = ..., + duplicate_policy: str | None = ..., + ): ... + def add( + self, + key: _Key, + timestamp: int | str, + value: float, + retention_msecs: int | None = ..., + uncompressed: bool | None = ..., + labels: dict[str, str] | None = ..., + chunk_size: int | None = ..., + duplicate_policy: str | None = ..., + ): ... + def madd(self, ktv_tuples): ... + def incrby( + self, + key: _Key, + value: float, + timestamp: int | str | None = ..., + retention_msecs: int | None = ..., + uncompressed: bool | None = ..., + labels: dict[str, str] | None = ..., + chunk_size: int | None = ..., + ): ... + def decrby( + self, + key: _Key, + value: float, + timestamp: int | str | None = ..., + retention_msecs: int | None = ..., + uncompressed: bool | None = ..., + labels: dict[str, str] | None = ..., + chunk_size: int | None = ..., + ): ... + def delete(self, key, from_time, to_time): ... + def createrule( + self, source_key: _Key, dest_key: _Key, aggregation_type: str, bucket_size_msec: int, align_timestamp: int | None = ... + ): ... + def deleterule(self, source_key, dest_key): ... + def range( + self, + key: _Key, + from_time: int | str, + to_time: int | str, + count: int | None = ..., + aggregation_type: str | None = ..., + bucket_size_msec: int | None = ..., + filter_by_ts: list[int] | None = ..., + filter_by_min_value: int | None = ..., + filter_by_max_value: int | None = ..., + align: int | str | None = ..., + latest: bool | None = ..., + bucket_timestamp: str | None = ..., + empty: bool | None = ..., + ): ... + def revrange( + self, + key: _Key, + from_time: int | str, + to_time: int | str, + count: int | None = ..., + aggregation_type: str | None = ..., + bucket_size_msec: int | None = ..., + filter_by_ts: list[int] | None = ..., + filter_by_min_value: int | None = ..., + filter_by_max_value: int | None = ..., + align: int | str | None = ..., + latest: bool | None = ..., + bucket_timestamp: str | None = ..., + empty: bool | None = ..., + ): ... + def mrange( + self, + from_time: int | str, + to_time: int | str, + filters: list[str], + count: int | None = ..., + aggregation_type: str | None = ..., + bucket_size_msec: int | None = ..., + with_labels: bool | None = ..., + filter_by_ts: list[int] | None = ..., + filter_by_min_value: int | None = ..., + filter_by_max_value: int | None = ..., + groupby: str | None = ..., + reduce: str | None = ..., + select_labels: list[str] | None = ..., + align: int | str | None = ..., + latest: bool | None = ..., + bucket_timestamp: str | None = ..., + empty: bool | None = ..., + ): ... + def mrevrange( + self, + from_time: int | str, + to_time: int | str, + filters: list[str], + count: int | None = ..., + aggregation_type: str | None = ..., + bucket_size_msec: int | None = ..., + with_labels: bool | None = ..., + filter_by_ts: list[int] | None = ..., + filter_by_min_value: int | None = ..., + filter_by_max_value: int | None = ..., + groupby: str | None = ..., + reduce: str | None = ..., + select_labels: list[str] | None = ..., + align: int | str | None = ..., + latest: bool | None = ..., + bucket_timestamp: str | None = ..., + empty: bool | None = ..., + ): ... + def get(self, key: _Key, latest: bool | None = ...): ... + def mget( + self, filters: list[str], with_labels: bool | None = ..., select_labels: list[str] | None = ..., latest: bool | None = ... + ): ... + def info(self, key): ... + def queryindex(self, filters): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/timeseries/info.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/timeseries/info.pyi new file mode 100644 index 00000000..8b082c7d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/timeseries/info.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete +from typing import Any + +class TSInfo: + rules: list[Any] + labels: list[Any] + sourceKey: Incomplete | None + chunk_count: Incomplete | None + memory_usage: Incomplete | None + total_samples: Incomplete | None + retention_msecs: Incomplete | None + last_time_stamp: Incomplete | None + first_time_stamp: Incomplete | None + + max_samples_per_chunk: Incomplete | None + chunk_size: Incomplete | None + duplicate_policy: Incomplete | None + def __init__(self, args) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/timeseries/utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/timeseries/utils.pyi new file mode 100644 index 00000000..4a0d52c4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/commands/timeseries/utils.pyi @@ -0,0 +1,5 @@ +def list_to_dict(aList): ... +def parse_range(response): ... +def parse_m_range(response): ... +def parse_get(response): ... +def parse_m_get(response): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/connection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/connection.pyi new file mode 100644 index 00000000..a409a9d8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/connection.pyi @@ -0,0 +1,232 @@ +from _typeshed import Incomplete, Unused +from collections.abc import Callable, Iterable, Mapping +from queue import Queue +from socket import socket +from typing import Any, ClassVar +from typing_extensions import Self, TypeAlias + +from .credentials import CredentialProvider +from .retry import Retry + +ssl_available: bool +SYM_STAR: bytes +SYM_DOLLAR: bytes +SYM_CRLF: bytes +SYM_EMPTY: bytes +SERVER_CLOSED_CONNECTION_ERROR: str +NONBLOCKING_EXCEPTIONS: tuple[type[Exception], ...] +NONBLOCKING_EXCEPTION_ERROR_NUMBERS: dict[type[Exception], int] +SENTINEL: object +MODULE_LOAD_ERROR: str +NO_SUCH_MODULE_ERROR: str +MODULE_UNLOAD_NOT_POSSIBLE_ERROR: str +MODULE_EXPORTS_DATA_TYPES_ERROR: str +FALSE_STRINGS: tuple[str, ...] +URL_QUERY_ARGUMENT_PARSERS: dict[str, Callable[[Any], Any]] + +# Options as passed to Pool.get_connection(). +_ConnectionPoolOptions: TypeAlias = Any +_ConnectFunc: TypeAlias = Callable[[Connection], object] + +class BaseParser: + EXCEPTION_CLASSES: ClassVar[dict[str, type[Exception] | dict[str, type[Exception]]]] + def parse_error(self, response: str) -> Exception: ... + +class SocketBuffer: + socket_read_size: int + bytes_written: int + bytes_read: int + socket_timeout: float | None + def __init__(self, socket: socket, socket_read_size: int, socket_timeout: float | None) -> None: ... + def unread_bytes(self) -> int: ... + def can_read(self, timeout: float | None) -> bool: ... + def read(self, length: int) -> bytes: ... + def readline(self) -> bytes: ... + def get_pos(self) -> int: ... + def rewind(self, pos: int) -> None: ... + def purge(self) -> None: ... + def close(self) -> None: ... + +class PythonParser(BaseParser): + encoding: str + socket_read_size: int + encoder: Encoder | None + def __init__(self, socket_read_size: int) -> None: ... + def __del__(self) -> None: ... + def on_connect(self, connection: Connection) -> None: ... + def on_disconnect(self) -> None: ... + def can_read(self, timeout: float | None) -> bool: ... + def read_response(self, disable_decoding: bool = ...) -> Any: ... # `str | bytes` or `list[str | bytes]` + +class HiredisParser(BaseParser): + socket_read_size: int + def __init__(self, socket_read_size: int) -> None: ... + def __del__(self) -> None: ... + def on_connect(self, connection: Connection, **kwargs) -> None: ... + def on_disconnect(self) -> None: ... + def can_read(self, timeout: float | None) -> bool: ... + def read_from_socket(self, timeout: float | None = ..., raise_on_timeout: bool = ...) -> bool: ... + def read_response(self, disable_decoding: bool = ...) -> Any: ... # `str | bytes` or `list[str | bytes]` + +DefaultParser: type[BaseParser] # Hiredis or PythonParser + +_Encodable: TypeAlias = str | bytes | memoryview | bool | float + +class Encoder: + encoding: str + encoding_errors: str + decode_responses: bool + def __init__(self, encoding: str, encoding_errors: str, decode_responses: bool) -> None: ... + def encode(self, value: _Encodable) -> bytes: ... + def decode(self, value: str | bytes | memoryview, force: bool = ...) -> str: ... + +class Connection: + pid: int + host: str + port: int + db: int + username: str | None + password: str | None + client_name: str | None + socket_timeout: float | None + socket_connect_timeout: float | None + socket_keepalive: bool + socket_keepalive_options: Mapping[str, int | str] + socket_type: int + retry_on_timeout: bool + retry_on_error: list[type[Exception]] + retry: Retry + redis_connect_func: _ConnectFunc | None + encoder: Encoder + next_health_check: int + health_check_interval: int + def __init__( + self, + host: str = "localhost", + port: int = 6379, + db: int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + socket_keepalive: bool = False, + socket_keepalive_options: Mapping[str, int | str] | None = None, + socket_type: int = 0, + retry_on_timeout: bool = False, + retry_on_error: list[type[Exception]] = ..., + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: type[BaseParser] = ..., + socket_read_size: int = 65536, + health_check_interval: int = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + redis_connect_func: _ConnectFunc | None = None, + credential_provider: CredentialProvider | None = None, + command_packer: Incomplete | None = None, + ) -> None: ... + def __del__(self) -> None: ... + def register_connect_callback(self, callback: _ConnectFunc) -> None: ... + def clear_connect_callbacks(self) -> None: ... + def set_parser(self, parser_class: type[BaseParser]) -> None: ... + def connect(self) -> None: ... + def on_connect(self) -> None: ... + def disconnect(self, *args: Unused) -> None: ... # 'args' added in redis 4.1.2 + def check_health(self) -> None: ... + def send_packed_command(self, command: str | Iterable[str], check_health: bool = ...) -> None: ... + def send_command(self, *args, **kwargs) -> None: ... + def can_read(self, timeout: float | None = ...) -> bool: ... + def read_response(self, disable_decoding: bool = ...) -> Any: ... # `str | bytes` or `list[str | bytes]` + def pack_command(self, *args) -> list[bytes]: ... + def pack_commands(self, commands: Iterable[Iterable[Incomplete]]) -> list[bytes]: ... + def repr_pieces(self) -> list[tuple[str, str]]: ... + +class SSLConnection(Connection): + keyfile: Any + certfile: Any + cert_reqs: Any + ca_certs: Any + ca_path: Incomplete | None + check_hostname: bool + certificate_password: Incomplete | None + ssl_validate_ocsp: bool + ssl_validate_ocsp_stapled: bool # added in 4.1.1 + ssl_ocsp_context: Incomplete | None # added in 4.1.1 + ssl_ocsp_expected_cert: Incomplete | None # added in 4.1.1 + def __init__( + self, + ssl_keyfile=..., + ssl_certfile=..., + ssl_cert_reqs=..., + ssl_ca_certs=..., + ssl_ca_data: Incomplete | None = ..., + ssl_check_hostname: bool = ..., + ssl_ca_path: Incomplete | None = ..., + ssl_password: Incomplete | None = ..., + ssl_validate_ocsp: bool = ..., + ssl_validate_ocsp_stapled: bool = ..., # added in 4.1.1 + ssl_ocsp_context: Incomplete | None = ..., # added in 4.1.1 + ssl_ocsp_expected_cert: Incomplete | None = ..., # added in 4.1.1 + **kwargs, + ) -> None: ... + +class UnixDomainSocketConnection(Connection): + path: str + def __init__( + self, + path: str = "", + db: int = 0, + username: str | None = None, + password: str | None = None, + socket_timeout: float | None = None, + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + retry_on_timeout: bool = False, + retry_on_error: list[type[Exception]] = ..., + parser_class: type[BaseParser] = ..., + socket_read_size: int = 65536, + health_check_interval: int = 0, + client_name: str | None = None, + retry: Retry | None = None, + redis_connect_func: _ConnectFunc | None = None, + credential_provider: CredentialProvider | None = None, + command_packer: Incomplete | None = None, + ) -> None: ... + +# TODO: make generic on `connection_class` +class ConnectionPool: + connection_class: type[Connection] + connection_kwargs: dict[str, Any] + max_connections: int + pid: int + @classmethod + def from_url(cls, url: str, *, db: int = ..., decode_components: bool = ..., **kwargs) -> Self: ... + def __init__( + self, connection_class: type[Connection] = ..., max_connections: int | None = ..., **connection_kwargs + ) -> None: ... + def reset(self) -> None: ... + def get_connection(self, command_name: Unused, *keys, **options: _ConnectionPoolOptions) -> Connection: ... + def make_connection(self) -> Connection: ... + def release(self, connection: Connection) -> None: ... + def disconnect(self, inuse_connections: bool = ...) -> None: ... + def get_encoder(self) -> Encoder: ... + def owns_connection(self, connection: Connection) -> bool: ... + +class BlockingConnectionPool(ConnectionPool): + queue_class: type[Queue[Any]] + timeout: float + pool: Queue[Connection | None] # might not be defined + def __init__( + self, + max_connections: int = ..., + timeout: float = ..., + connection_class: type[Connection] = ..., + queue_class: type[Queue[Any]] = ..., + **connection_kwargs, + ) -> None: ... + def disconnect(self) -> None: ... # type: ignore[override] + +def to_bool(value: object) -> bool: ... +def parse_url(url: str) -> dict[str, Any]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/crc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/crc.pyi new file mode 100644 index 00000000..87585a3f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/crc.pyi @@ -0,0 +1,5 @@ +from redis.typing import EncodedT + +REDIS_CLUSTER_HASH_SLOTS: int + +def key_slot(key: EncodedT, bucket: int = ...) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/credentials.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/credentials.pyi new file mode 100644 index 00000000..aecf728a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/credentials.pyi @@ -0,0 +1,11 @@ +from abc import abstractmethod + +class CredentialProvider: + @abstractmethod + def get_credentials(self) -> tuple[str] | tuple[str, str]: ... + +class UsernamePasswordCredentialProvider(CredentialProvider): + username: str + password: str + def __init__(self, username: str | None = ..., password: str | None = ...) -> None: ... + def get_credentials(self) -> tuple[str] | tuple[str, str]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/exceptions.pyi new file mode 100644 index 00000000..1820d3d9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/exceptions.pyi @@ -0,0 +1,42 @@ +class RedisError(Exception): ... +class AuthenticationError(RedisError): ... +class ConnectionError(RedisError): ... +class TimeoutError(RedisError): ... +class AuthorizationError(ConnectionError): ... +class BusyLoadingError(ConnectionError): ... +class InvalidResponse(RedisError): ... +class ResponseError(RedisError): ... +class DataError(RedisError): ... +class PubSubError(RedisError): ... +class WatchError(RedisError): ... +class NoScriptError(ResponseError): ... +class ExecAbortError(ResponseError): ... +class ReadOnlyError(ResponseError): ... +class NoPermissionError(ResponseError): ... +class ModuleError(ResponseError): ... +class LockError(RedisError, ValueError): ... +class LockNotOwnedError(LockError): ... +class ChildDeadlockedError(Exception): ... +class AuthenticationWrongNumberOfArgsError(ResponseError): ... +class RedisClusterException(Exception): ... +class ClusterError(RedisError): ... + +class ClusterDownError(ClusterError, ResponseError): + args: tuple[str] + message: str + def __init__(self, resp: str) -> None: ... + +class AskError(ResponseError): + args: tuple[str] + message: str + slot_id: int + node_addr: tuple[str, int] + host: str + port: int + def __init__(self, resp: str) -> None: ... + +class TryAgainError(ResponseError): ... +class ClusterCrossSlotError(ResponseError): ... +class MovedError(AskError): ... +class MasterDownError(ClusterDownError): ... +class SlotNotCoveredError(RedisClusterException): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/lock.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/lock.pyi new file mode 100644 index 00000000..bb0b7418 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/lock.pyi @@ -0,0 +1,49 @@ +from _typeshed import Incomplete +from types import TracebackType +from typing import Any, ClassVar, Protocol +from typing_extensions import Self + +from redis.client import Redis + +class _Local(Protocol): + token: str | bytes | None + +class Lock: + LUA_EXTEND_SCRIPT: ClassVar[str] + LUA_REACQUIRE_SCRIPT: ClassVar[str] + LUA_RELEASE_SCRIPT: ClassVar[str] + lua_extend: ClassVar[Incomplete | None] + lua_reacquire: ClassVar[Incomplete | None] + lua_release: ClassVar[Incomplete | None] + local: _Local + def __init__( + self, + redis: Redis[Any], + name: str, + timeout: float | None = ..., + sleep: float = ..., + blocking: bool = ..., + blocking_timeout: float | None = ..., + thread_local: bool = ..., + ) -> None: ... + def register_scripts(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> bool | None: ... + def acquire( + self, + sleep: float | None = ..., + blocking: bool | None = ..., + blocking_timeout: float | None = ..., + token: str | bytes | None = ..., + ) -> bool: ... + def do_acquire(self, token: str | bytes) -> bool: ... + def locked(self) -> bool: ... + def owned(self) -> bool: ... + def release(self) -> None: ... + def do_release(self, expected_token: str | bytes) -> None: ... + def extend(self, additional_time: float, replace_ttl: bool = ...) -> bool: ... + def do_extend(self, additional_time: float, replace_ttl: bool) -> bool: ... + def reacquire(self) -> bool: ... + def do_reacquire(self) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/ocsp.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/ocsp.pyi new file mode 100644 index 00000000..da9473a1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/ocsp.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete +from ssl import SSLObject, SSLSocket +from typing_extensions import Literal + +from cryptography.x509.base import Certificate +from OpenSSL.SSL import Connection + +def ocsp_staple_verifier(con: Connection, ocsp_bytes: bytes, expected: bytes | None = ...) -> Literal[True]: ... + +class OCSPVerifier: + SOCK: SSLObject | SSLSocket + HOST: str + PORT: int + CA_CERTS: str | None + def __init__(self, sock: SSLObject | SSLSocket, host: str, port: int, ca_certs: str | None = ...) -> None: ... + # cryptography.x509.general_name.GeneralName.value is typed as Any + def components_from_socket(self) -> tuple[Certificate, Incomplete | None, Incomplete]: ... + def components_from_direct_connection(self) -> tuple[Certificate, Incomplete | None, Incomplete]: ... + def build_certificate_url(self, server: str, cert: Certificate, issuer_cert: Certificate) -> str: ... + def check_certificate(self, server: str, cert: Certificate, issuer_url: str | bytes) -> Literal[True]: ... + def is_valid(self) -> Literal[True]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/retry.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/retry.pyi new file mode 100644 index 00000000..ec20224e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/retry.pyi @@ -0,0 +1,11 @@ +from collections.abc import Callable, Iterable +from typing import TypeVar + +from redis.backoff import AbstractBackoff + +_T = TypeVar("_T") + +class Retry: + def __init__(self, backoff: AbstractBackoff, retries: int, supported_errors: tuple[type[Exception], ...] = ...) -> None: ... + def update_supported_errors(self, specified_errors: Iterable[type[Exception]]) -> None: ... + def call_with_retry(self, do: Callable[[], _T], fail: Callable[[Exception], object]) -> _T: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/sentinel.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/sentinel.pyi new file mode 100644 index 00000000..ea13ae68 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/sentinel.pyi @@ -0,0 +1,62 @@ +from collections.abc import Iterable, Iterator +from typing import Any, TypeVar, overload +from typing_extensions import Literal, TypeAlias + +from redis.client import Redis +from redis.commands.sentinel import SentinelCommands +from redis.connection import Connection, ConnectionPool, SSLConnection +from redis.exceptions import ConnectionError + +_RedisT = TypeVar("_RedisT", bound=Redis[Any]) +_AddressAndPort: TypeAlias = tuple[str, int] +_SentinelState: TypeAlias = dict[str, Any] # TODO: this can be a TypedDict + +class MasterNotFoundError(ConnectionError): ... +class SlaveNotFoundError(ConnectionError): ... + +class SentinelManagedConnection(Connection): + connection_pool: SentinelConnectionPool + def __init__(self, **kwargs) -> None: ... + def connect_to(self, address: _AddressAndPort) -> None: ... + def connect(self) -> None: ... + # The result can be either `str | bytes` or `list[str | bytes]` + def read_response(self, disable_decoding: bool = ...) -> Any: ... + +class SentinelManagedSSLConnection(SentinelManagedConnection, SSLConnection): ... + +class SentinelConnectionPool(ConnectionPool): + is_master: bool + check_connection: bool + service_name: str + sentinel_manager: Sentinel + def __init__(self, service_name: str, sentinel_manager: Sentinel, **kwargs) -> None: ... + def reset(self) -> None: ... + def owns_connection(self, connection: Connection) -> bool: ... + def get_master_address(self) -> _AddressAndPort: ... + def rotate_slaves(self) -> Iterator[_AddressAndPort]: ... + +class Sentinel(SentinelCommands): + sentinel_kwargs: dict[str, Any] + sentinels: list[Redis[Any]] + min_other_sentinels: int + connection_kwargs: dict[str, Any] + def __init__( + self, + sentinels: Iterable[_AddressAndPort], + min_other_sentinels: int = ..., + sentinel_kwargs: dict[str, Any] | None = ..., + **connection_kwargs, + ) -> None: ... + def check_master_state(self, state: _SentinelState, service_name: str) -> bool: ... + def discover_master(self, service_name: str) -> _AddressAndPort: ... + def filter_slaves(self, slaves: Iterable[_SentinelState]) -> list[_AddressAndPort]: ... + def discover_slaves(self, service_name: str) -> list[_AddressAndPort]: ... + @overload + def master_for(self, service_name: str, *, connection_pool_class=..., **kwargs) -> Redis[Any]: ... + @overload + def master_for(self, service_name: str, redis_class: type[_RedisT], connection_pool_class=..., **kwargs) -> _RedisT: ... + @overload + def slave_for(self, service_name: str, *, connection_pool_class=..., **kwargs) -> Redis[Any]: ... + @overload + def slave_for(self, service_name: str, redis_class: type[_RedisT], connection_pool_class=..., **kwargs) -> _RedisT: ... + def execute_command(self, *args, **kwargs) -> Literal[True]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/typing.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/typing.pyi new file mode 100644 index 00000000..f351ed45 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/typing.pyi @@ -0,0 +1,34 @@ +from collections.abc import Iterable +from datetime import datetime, timedelta +from typing import Protocol, TypeVar +from typing_extensions import TypeAlias + +from redis.asyncio.connection import ConnectionPool as AsyncConnectionPool +from redis.connection import ConnectionPool + +# The following type aliases exist at runtime. +EncodedT: TypeAlias = bytes | memoryview +DecodedT: TypeAlias = str | int | float +EncodableT: TypeAlias = EncodedT | DecodedT +AbsExpiryT: TypeAlias = int | datetime +ExpiryT: TypeAlias = float | timedelta +ZScoreBoundT: TypeAlias = float | str +BitfieldOffsetT: TypeAlias = int | str +_StringLikeT: TypeAlias = bytes | str | memoryview # noqa: Y043 +KeyT: TypeAlias = _StringLikeT +PatternT: TypeAlias = _StringLikeT +FieldT: TypeAlias = EncodableT +KeysT: TypeAlias = KeyT | Iterable[KeyT] +ChannelT: TypeAlias = _StringLikeT +GroupT: TypeAlias = _StringLikeT +ConsumerT: TypeAlias = _StringLikeT +StreamIdT: TypeAlias = int | _StringLikeT +ScriptTextT: TypeAlias = _StringLikeT +TimeoutSecT: TypeAlias = int | float | _StringLikeT +AnyKeyT = TypeVar("AnyKeyT", bytes, str, memoryview) # noqa: Y001 +AnyFieldT = TypeVar("AnyFieldT", bytes, str, memoryview) # noqa: Y001 +AnyChannelT = TypeVar("AnyChannelT", bytes, str, memoryview) # noqa: Y001 + +class CommandsProtocol(Protocol): + connection_pool: AsyncConnectionPool | ConnectionPool + def execute_command(self, *args, **options): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/utils.pyi new file mode 100644 index 00000000..7a562c4b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/redis/redis/utils.pyi @@ -0,0 +1,23 @@ +from _typeshed import Unused +from collections.abc import Iterable, Mapping +from contextlib import AbstractContextManager +from typing import Any, TypeVar, overload +from typing_extensions import Literal + +from .client import Pipeline, Redis, _StrType + +_T = TypeVar("_T") + +HIREDIS_AVAILABLE: bool +CRYPTOGRAPHY_AVAILABLE: bool + +@overload +def from_url(url: str, *, db: int = ..., decode_responses: Literal[True], **kwargs: Any) -> Redis[str]: ... +@overload +def from_url(url: str, *, db: int = ..., decode_responses: Literal[False] = ..., **kwargs: Any) -> Redis[bytes]: ... +def pipeline(redis_obj: Redis[_StrType]) -> AbstractContextManager[Pipeline[_StrType]]: ... +def str_if_bytes(value: str | bytes) -> str: ... +def safe_str(value: object) -> str: ... +def dict_merge(*dicts: Mapping[str, _T]) -> dict[str, _T]: ... +def list_keys_to_dict(key_list, callback): ... # unused, alias for `dict.fromkeys` +def merge_result(command: Unused, res: Mapping[Any, Iterable[_T]]) -> list[_T]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/regex/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/regex/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..1591d737 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/regex/@tests/stubtest_allowlist.txt @@ -0,0 +1,18 @@ +# Not exported in C modules: +regex._regex.Splitter +regex._regex.Scanner + +# Implementation details: +regex._regex.compile +regex._regex.copyright +regex._regex.fold_case +regex._regex.get_all_cases +regex._regex.get_code_size +regex._regex.get_expand_on_folding +regex._regex.get_properties +regex._regex.has_property_value +regex._regex.CODE_SIZE +regex._regex.MAGIC + +# Tests: +regex.test_regex diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/regex/@tests/test_cases/check_finditer.py b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/regex/@tests/test_cases/check_finditer.py new file mode 100644 index 00000000..0b572973 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/regex/@tests/test_cases/check_finditer.py @@ -0,0 +1,11 @@ +from __future__ import annotations + +from typing import List +from typing_extensions import assert_type + +import regex + +# Regression tests for #9263 +assert_type(list(regex.finditer(r"foo", "foo")), List[regex.Match[str]]) +pat = regex.compile(rb"foo") +assert_type(list(pat.finditer(b"foo")), List[regex.Match[bytes]]) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/regex/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/regex/METADATA.toml new file mode 100644 index 00000000..f508c79b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/regex/METADATA.toml @@ -0,0 +1 @@ +version = "2022.10.31" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/regex/regex/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/regex/regex/__init__.pyi new file mode 100644 index 00000000..f310be63 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/regex/regex/__init__.pyi @@ -0,0 +1 @@ +from .regex import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/regex/regex/_regex.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/regex/regex/_regex.pyi new file mode 100644 index 00000000..a8744d0d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/regex/regex/_regex.pyi @@ -0,0 +1,26 @@ +# This is actually a C-extension module. +# Not all types defined in C are exported to Python. +# For example: `Pattern` and `Match` are not exported +# and are redefined in `regex.regex module. + +from typing import Any, AnyStr, Generic +from typing_extensions import Self, final + +from .regex import Match, Pattern + +@final +class Splitter(Generic[AnyStr]): + @property + def pattern(self) -> Pattern[AnyStr]: ... + def __iter__(self) -> Self: ... + def __next__(self) -> AnyStr | Any: ... + def split(self) -> AnyStr | Any: ... + +@final +class Scanner(Generic[AnyStr]): + @property + def pattern(self) -> Pattern[AnyStr]: ... + def __iter__(self) -> Self: ... + def __next__(self) -> Match[AnyStr]: ... + def match(self) -> Match[AnyStr] | None: ... + def search(self) -> Match[AnyStr] | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/regex/regex/_regex_core.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/regex/regex/_regex_core.pyi new file mode 100644 index 00000000..cca9a476 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/regex/regex/_regex_core.pyi @@ -0,0 +1,91 @@ +import enum +from collections.abc import Callable +from typing import Any, AnyStr, Generic +from typing_extensions import TypeAlias + +from .regex import Pattern + +class error(Exception): + def __init__(self, message: str, pattern: AnyStr | None = ..., pos: int | None = ...) -> None: ... + +class RegexFlag(enum.IntFlag): + A: int + ASCII: int + B: int + BESTMATCH: int + D: int + DEBUG: int + E: int + ENHANCEMATCH: int + F: int + FULLCASE: int + I: int + IGNORECASE: int + L: int + LOCALE: int + M: int + MULTILINE: int + P: int + POSIX: int + R: int + REVERSE: int + T: int + TEMPLATE: int + S: int + DOTALL: int + U: int + UNICODE: int + V0: int + VERSION0: int + V1: int + VERSION1: int + W: int + WORD: int + X: int + VERBOSE: int + +A: int +ASCII: int +B: int +BESTMATCH: int +D: int +DEBUG: int +E: int +ENHANCEMATCH: int +F: int +FULLCASE: int +I: int +IGNORECASE: int +L: int +LOCALE: int +M: int +MULTILINE: int +P: int +POSIX: int +R: int +REVERSE: int +T: int +TEMPLATE: int +S: int +DOTALL: int +U: int +UNICODE: int +V0: int +VERSION0: int +V1: int +VERSION1: int +W: int +WORD: int +X: int +VERBOSE: int + +DEFAULT_VERSION: int + +_Lexicon: TypeAlias = list[tuple[AnyStr, Callable[[Scanner[AnyStr], AnyStr], Any]]] + +class Scanner(Generic[AnyStr]): + lexicon: _Lexicon[AnyStr] + scanner: Pattern[AnyStr] + + def __init__(self, lexicon: _Lexicon[AnyStr], flags: int = ...) -> None: ... + def scan(self, string: AnyStr) -> tuple[list[Any], AnyStr]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/regex/regex/regex.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/regex/regex/regex.pyi new file mode 100644 index 00000000..6a6d25f4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/regex/regex/regex.pyi @@ -0,0 +1,650 @@ +import sys +from _typeshed import ReadableBuffer +from collections.abc import Callable, Mapping +from typing import Any, AnyStr, Generic, TypeVar, overload +from typing_extensions import Literal, Self, final + +from . import _regex +from ._regex_core import * + +if sys.version_info >= (3, 9): + from types import GenericAlias + +_T = TypeVar("_T") + +__version__: str + +def compile( + pattern: AnyStr | Pattern[AnyStr], + flags: int = ..., + ignore_unused: bool = ..., + cache_pattern: bool | None = ..., + **kwargs: Any, +) -> Pattern[AnyStr]: ... +@overload +def search( + pattern: str | Pattern[str], + string: str, + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + partial: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> Match[str] | None: ... +@overload +def search( + pattern: bytes | Pattern[bytes], + string: ReadableBuffer, + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + partial: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> Match[bytes] | None: ... +@overload +def match( + pattern: str | Pattern[str], + string: str, + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + partial: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> Match[str] | None: ... +@overload +def match( + pattern: bytes | Pattern[bytes], + string: ReadableBuffer, + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + partial: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> Match[bytes] | None: ... +@overload +def fullmatch( + pattern: str | Pattern[str], + string: str, + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + partial: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> Match[str] | None: ... +@overload +def fullmatch( + pattern: bytes | Pattern[bytes], + string: ReadableBuffer, + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + partial: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> Match[bytes] | None: ... +@overload +def split( + pattern: str | Pattern[str], + string: str, + maxsplit: int = ..., + flags: int = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> list[str | Any]: ... +@overload +def split( + pattern: ReadableBuffer | Pattern[bytes], + string: ReadableBuffer, + maxsplit: int = ..., + flags: int = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> list[bytes | Any]: ... +@overload +def splititer( + pattern: str | Pattern[str], + string: str, + maxsplit: int = ..., + flags: int = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> _regex.Splitter[str]: ... +@overload +def splititer( + pattern: ReadableBuffer | Pattern[bytes], + string: ReadableBuffer, + maxsplit: int = ..., + flags: int = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> _regex.Splitter[bytes]: ... +@overload +def findall( + pattern: str | Pattern[str], + string: str, + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + overlapped: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> list[Any]: ... +@overload +def findall( + pattern: ReadableBuffer | Pattern[bytes], + string: ReadableBuffer, + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + overlapped: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> list[Any]: ... +@overload +def finditer( + pattern: str | Pattern[str], + string: str, + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + overlapped: bool = ..., + partial: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> _regex.Scanner[str]: ... +@overload +def finditer( + pattern: ReadableBuffer | Pattern[bytes], + string: ReadableBuffer, + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + overlapped: bool = ..., + partial: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> _regex.Scanner[bytes]: ... +@overload +def sub( + pattern: str | Pattern[str], + repl: str | Callable[[Match[str]], str], + string: str, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> str: ... +@overload +def sub( + pattern: ReadableBuffer | Pattern[bytes], + repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], + string: ReadableBuffer, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> bytes: ... +@overload +def subf( + pattern: str | Pattern[str], + format: str | Callable[[Match[str]], str], + string: str, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> str: ... +@overload +def subf( + pattern: ReadableBuffer | Pattern[bytes], + format: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], + string: ReadableBuffer, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> bytes: ... +@overload +def subn( + pattern: str | Pattern[str], + repl: str | Callable[[Match[str]], str], + string: str, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> tuple[str, int]: ... +@overload +def subn( + pattern: ReadableBuffer | Pattern[bytes], + repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], + string: ReadableBuffer, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> tuple[bytes, int]: ... +@overload +def subfn( + pattern: str | Pattern[str], + format: str | Callable[[Match[str]], str], + string: str, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> tuple[str, int]: ... +@overload +def subfn( + pattern: ReadableBuffer | Pattern[bytes], + format: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], + string: ReadableBuffer, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> tuple[bytes, int]: ... +def purge() -> None: ... +@overload +def cache_all(value: bool = ...) -> None: ... +@overload +def cache_all(value: None) -> bool: ... +def escape(pattern: AnyStr, special_only: bool = ..., literal_spaces: bool = ...) -> AnyStr: ... +def template(pattern: AnyStr | Pattern[AnyStr], flags: int = ...) -> Pattern[AnyStr]: ... + +Regex = compile + +@final +class Pattern(Generic[AnyStr]): + @property + def flags(self) -> int: ... + @property + def groupindex(self) -> Mapping[str, int]: ... + @property + def groups(self) -> int: ... + @property + def pattern(self) -> AnyStr: ... + @property + def named_lists(self) -> Mapping[str, frozenset[AnyStr]]: ... + @overload + def search( + self: Pattern[str], + string: str, + pos: int = ..., + endpos: int = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> Match[str] | None: ... + @overload + def search( + self: Pattern[bytes], + string: ReadableBuffer, + pos: int = ..., + endpos: int = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> Match[bytes] | None: ... + @overload + def match( + self: Pattern[str], + string: str, + pos: int = ..., + endpos: int = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> Match[str] | None: ... + @overload + def match( + self: Pattern[bytes], + string: ReadableBuffer, + pos: int = ..., + endpos: int = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> Match[bytes] | None: ... + @overload + def fullmatch( + self: Pattern[str], + string: str, + pos: int = ..., + endpos: int = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> Match[str] | None: ... + @overload + def fullmatch( + self: Pattern[bytes], + string: ReadableBuffer, + pos: int = ..., + endpos: int = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> Match[bytes] | None: ... + @overload + def split( + self: Pattern[str], string: str, maxsplit: int = ..., concurrent: bool | None = ..., timeout: float | None = ... + ) -> list[str | Any]: ... + @overload + def split( + self: Pattern[bytes], + string: ReadableBuffer, + maxsplit: int = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> list[bytes | Any]: ... + @overload + def splititer( + self: Pattern[str], string: str, maxsplit: int = ..., concurrent: bool | None = ..., timeout: float | None = ... + ) -> _regex.Splitter[str]: ... + @overload + def splititer( + self: Pattern[bytes], + string: ReadableBuffer, + maxsplit: int = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> _regex.Splitter[bytes]: ... + @overload + def findall( + self: Pattern[str], + string: str, + pos: int = ..., + endpos: int = ..., + overlapped: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> list[Any]: ... + @overload + def findall( + self: Pattern[bytes], + string: ReadableBuffer, + pos: int = ..., + endpos: int = ..., + overlapped: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> list[Any]: ... + @overload + def finditer( + self: Pattern[str], + string: str, + pos: int = ..., + endpos: int = ..., + overlapped: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> _regex.Scanner[str]: ... + @overload + def finditer( + self: Pattern[bytes], + string: ReadableBuffer, + pos: int = ..., + endpos: int = ..., + overlapped: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> _regex.Scanner[bytes]: ... + @overload + def sub( + self: Pattern[str], + repl: str | Callable[[Match[str]], str], + string: str, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> str: ... + @overload + def sub( + self: Pattern[bytes], + repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], + string: ReadableBuffer, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> bytes: ... + @overload + def subf( + self: Pattern[str], + format: str | Callable[[Match[str]], str], + string: str, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> str: ... + @overload + def subf( + self: Pattern[bytes], + format: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], + string: ReadableBuffer, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> bytes: ... + @overload + def subn( + self: Pattern[str], + repl: str | Callable[[Match[str]], str], + string: str, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> tuple[str, int]: ... + @overload + def subn( + self: Pattern[bytes], + repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], + string: ReadableBuffer, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> tuple[bytes, int]: ... + @overload + def subfn( + self: Pattern[str], + format: str | Callable[[Match[str]], str], + string: str, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> tuple[str, int]: ... + @overload + def subfn( + self: Pattern[bytes], + format: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], + string: ReadableBuffer, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> tuple[bytes, int]: ... + @overload + def scanner( + self: Pattern[str], + string: str, + pos: int | None = ..., + endpos: int | None = ..., + overlapped: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> _regex.Scanner[str]: ... + @overload + def scanner( + self: Pattern[bytes], + string: bytes, + pos: int | None = ..., + endpos: int | None = ..., + overlapped: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> _regex.Scanner[bytes]: ... + def __copy__(self) -> Self: ... + def __deepcopy__(self) -> Self: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +@final +class Match(Generic[AnyStr]): + @property + def pos(self) -> int: ... + @property + def endpos(self) -> int: ... + @property + def lastindex(self) -> int | None: ... + @property + def lastgroup(self) -> str | None: ... + @property + def string(self) -> AnyStr: ... + @property + def re(self) -> Pattern[AnyStr]: ... + @property + def partial(self) -> bool: ... + @property + def regs(self) -> tuple[tuple[int, int], ...]: ... + @property + def fuzzy_counts(self) -> tuple[int, int, int]: ... + @property + def fuzzy_changes(self) -> tuple[list[int], list[int], list[int]]: ... + @overload + def group(self, __group: Literal[0] = ...) -> AnyStr: ... + @overload + def group(self, __group: int | str = ...) -> AnyStr | Any: ... + @overload + def group(self, __group1: int | str, __group2: int | str, *groups: int | str) -> tuple[AnyStr | Any, ...]: ... + @overload + def groups(self, default: None = ...) -> tuple[AnyStr | Any, ...]: ... + @overload + def groups(self, default: _T) -> tuple[AnyStr | _T, ...]: ... + @overload + def groupdict(self, default: None = ...) -> dict[str, AnyStr | Any]: ... + @overload + def groupdict(self, default: _T) -> dict[str, AnyStr | _T]: ... + @overload + def span(self, __group: int | str = ...) -> tuple[int, int]: ... + @overload + def span(self, __group1: int | str, __group2: int | str, *groups: int | str) -> tuple[tuple[int, int], ...]: ... + @overload + def spans(self, __group: int | str = ...) -> list[tuple[int, int]]: ... + @overload + def spans(self, __group1: int | str, __group2: int | str, *groups: int | str) -> tuple[list[tuple[int, int]], ...]: ... + @overload + def start(self, __group: int | str = ...) -> int: ... + @overload + def start(self, __group1: int | str, __group2: int | str, *groups: int | str) -> tuple[int, ...]: ... + @overload + def starts(self, __group: int | str = ...) -> list[int]: ... + @overload + def starts(self, __group1: int | str, __group2: int | str, *groups: int | str) -> tuple[list[int], ...]: ... + @overload + def end(self, __group: int | str = ...) -> int: ... + @overload + def end(self, __group1: int | str, __group2: int | str, *groups: int | str) -> tuple[int, ...]: ... + @overload + def ends(self, __group: int | str = ...) -> list[int]: ... + @overload + def ends(self, __group1: int | str, __group2: int | str, *groups: int | str) -> tuple[list[int], ...]: ... + def expand(self, template: AnyStr) -> AnyStr: ... + def expandf(self, format: AnyStr) -> AnyStr: ... + @overload + def captures(self, __group: int | str = ...) -> list[AnyStr]: ... + @overload + def captures(self, __group1: int | str, __group2: int | str, *groups: int | str) -> tuple[list[AnyStr], ...]: ... + def capturesdict(self) -> dict[str, list[AnyStr]]: ... + def detach_string(self) -> None: ... + def allcaptures(self) -> tuple[list[AnyStr]]: ... + def allspans(self) -> tuple[list[tuple[int, int]]]: ... + @overload + def __getitem__(self, __key: Literal[0]) -> AnyStr: ... + @overload + def __getitem__(self, __key: int | str) -> AnyStr | Any: ... + def __copy__(self) -> Self: ... + def __deepcopy__(self) -> Self: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..06ff5606 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/@tests/stubtest_allowlist.txt @@ -0,0 +1,17 @@ +# Re-exports from urllib3 (should be fixed in those stubs, not here) +requests.adapters.HTTPResponse.DECODER_ERROR_CLASSES +requests.adapters.PoolManager.connection_from_context +requests.adapters.PoolManager.connection_from_host +requests.adapters.PoolManager.connection_from_pool_key +requests.adapters.PoolManager.connection_from_url +requests.adapters.PoolManager.proxy_config +requests.adapters.PoolManager.urlopen +requests.adapters.Retry.DEFAULT + +# Loop variables that leak into the global scope +requests.packages.mod +requests.packages.package +requests.packages.target + +# Metaclass differs: +requests.adapters.Retry diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/@tests/test_cases/check_post.py b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/@tests/test_cases/check_post.py new file mode 100644 index 00000000..d68a484d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/@tests/test_cases/check_post.py @@ -0,0 +1,56 @@ +from __future__ import annotations + +from collections.abc import Iterable + +import requests + +# ================================================================================================= +# Regression test for #7988 (multiple files should be allowed for the "files" argument) +# This snippet comes from the requests documentation +# (https://requests.readthedocs.io/en/latest/user/advanced/#post-multiple-multipart-encoded-files), +# so should pass a type checker without error +# ================================================================================================= + + +url = "https://httpbin.org/post" +multiple_files = [ + ("images", ("foo.png", open("foo.png", "rb"), "image/png")), + ("images", ("bar.png", open("bar.png", "rb"), "image/png")), +] +r = requests.post(url, files=multiple_files) + + +# ================================================================================= +# Tests for various different types being passed into the "data" parameter +# (These all return "Any", so there's not much value in using assert_type here.) +# (Just test that type checkers don't emit an error if it doesn't fail at runtime.) +# ================================================================================= + + +# Arbitrary iterable +def gen() -> Iterable[bytes]: + yield b"foo" + yield b"bar" + + +requests.post("http://httpbin.org/anything", data=gen()).json()["data"] + +# bytes +requests.post("http://httpbin.org/anything", data=b"foobar").json()["data"] + +# str +requests.post("http://httpbin.org/anything", data="foobar").json()["data"] + +# Files +requests.post("http://httpbin.org/anything", data=open("/tmp/foobar", "rb", encoding="UTF-8")).json()["data"] +requests.post("http://httpbin.org/anything", data=open("/tmp/foobar", "r", encoding="UTF-8")).json()["data"] + +# Mappings +requests.post("http://httpbin.org/anything", data={b"foo": b"bar"}).json()["form"] +requests.post("http://httpbin.org/anything", data={"foo": "bar"}).json()["form"] + +# mappings represented by an list/tuple of key-values pairs +requests.post("http://httpbin.org/anything", data=[(b"foo", b"bar")]).json()["form"] +requests.post("http://httpbin.org/anything", data=[("foo", "bar")]).json()["form"] +requests.post("http://httpbin.org/anything", data=((b"foo", b"bar"),)).json()["form"] +requests.post("http://httpbin.org/anything", data=(("foo", "bar"),)).json()["form"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/METADATA.toml new file mode 100644 index 00000000..0bd4bd92 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/METADATA.toml @@ -0,0 +1,5 @@ +version = "2.28.*" +requires = ["types-urllib3<1.27"] # keep in sync with requests's setup.py + +[tool.stubtest] +extras = ["socks"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/__init__.pyi new file mode 100644 index 00000000..b3b37960 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/__init__.pyi @@ -0,0 +1,39 @@ +from .__version__ import ( + __author__ as __author__, + __author_email__ as __author_email__, + __build__ as __build__, + __cake__ as __cake__, + __copyright__ as __copyright__, + __description__ as __description__, + __license__ as __license__, + __title__ as __title__, + __url__ as __url__, + __version__ as __version__, +) +from .api import ( + delete as delete, + get as get, + head as head, + options as options, + patch as patch, + post as post, + put as put, + request as request, +) +from .exceptions import ( + ConnectionError as ConnectionError, + ConnectTimeout as ConnectTimeout, + FileModeWarning as FileModeWarning, + HTTPError as HTTPError, + JSONDecodeError as JSONDecodeError, + ReadTimeout as ReadTimeout, + RequestException as RequestException, + Timeout as Timeout, + TooManyRedirects as TooManyRedirects, + URLRequired as URLRequired, +) +from .models import PreparedRequest as PreparedRequest, Request as Request, Response as Response +from .sessions import Session as Session, session as session +from .status_codes import codes as codes + +def check_compatibility(urllib3_version: str, chardet_version: str | None, charset_normalizer_version: str | None) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/__version__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/__version__.pyi new file mode 100644 index 00000000..fe5db3df --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/__version__.pyi @@ -0,0 +1,10 @@ +__title__: str +__description__: str +__url__: str +__version__: str +__build__: int +__author__: str +__author_email__: str +__license__: str +__copyright__: str +__cake__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/adapters.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/adapters.pyi new file mode 100644 index 00000000..dbdd59f7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/adapters.pyi @@ -0,0 +1,79 @@ +from collections.abc import Mapping +from typing import Any + +from urllib3.contrib.socks import SOCKSProxyManager as SOCKSProxyManager +from urllib3.exceptions import ( + ConnectTimeoutError as ConnectTimeoutError, + MaxRetryError as MaxRetryError, + ProtocolError as ProtocolError, + ReadTimeoutError as ReadTimeoutError, + ResponseError as ResponseError, +) +from urllib3.poolmanager import PoolManager as PoolManager, proxy_from_url as proxy_from_url +from urllib3.response import HTTPResponse as HTTPResponse +from urllib3.util.retry import Retry as Retry + +from .cookies import extract_cookies_to_jar as extract_cookies_to_jar +from .exceptions import ( + ConnectionError as ConnectionError, + ConnectTimeout as ConnectTimeout, + ProxyError as ProxyError, + ReadTimeout as ReadTimeout, + RetryError as RetryError, + SSLError as SSLError, +) +from .models import PreparedRequest, Response as Response +from .structures import CaseInsensitiveDict as CaseInsensitiveDict +from .utils import ( + DEFAULT_CA_BUNDLE_PATH as DEFAULT_CA_BUNDLE_PATH, + get_auth_from_url as get_auth_from_url, + get_encoding_from_headers as get_encoding_from_headers, + prepend_scheme_if_needed as prepend_scheme_if_needed, + urldefragauth as urldefragauth, +) + +DEFAULT_POOLBLOCK: bool +DEFAULT_POOLSIZE: int +DEFAULT_RETRIES: int +DEFAULT_POOL_TIMEOUT: float | None + +class BaseAdapter: + def __init__(self) -> None: ... + def send( + self, + request: PreparedRequest, + stream: bool = ..., + timeout: None | float | tuple[float, float] | tuple[float, None] = ..., + verify: bool | str = ..., + cert: None | bytes | str | tuple[bytes | str, bytes | str] = ..., + proxies: Mapping[str, str] | None = ..., + ) -> Response: ... + def close(self) -> None: ... + +class HTTPAdapter(BaseAdapter): + __attrs__: Any + max_retries: Retry + config: Any + proxy_manager: Any + def __init__( + self, pool_connections: int = ..., pool_maxsize: int = ..., max_retries: Retry | int | None = ..., pool_block: bool = ... + ) -> None: ... + poolmanager: Any + def init_poolmanager(self, connections, maxsize, block=..., **pool_kwargs): ... + def proxy_manager_for(self, proxy, **proxy_kwargs): ... + def cert_verify(self, conn, url, verify, cert): ... + def build_response(self, req, resp): ... + def get_connection(self, url, proxies=...): ... + def close(self): ... + def request_url(self, request, proxies): ... + def add_headers(self, request, **kwargs): ... + def proxy_headers(self, proxy): ... + def send( + self, + request: PreparedRequest, + stream: bool = ..., + timeout: None | float | tuple[float, float] | tuple[float, None] = ..., + verify: bool | str = ..., + cert: None | bytes | str | tuple[bytes | str, bytes | str] = ..., + proxies: Mapping[str, str] | None = ..., + ) -> Response: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/api.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/api.pyi new file mode 100644 index 00000000..c07a4900 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/api.pyi @@ -0,0 +1,154 @@ +from _typeshed import Incomplete +from collections.abc import Mapping +from typing_extensions import TypeAlias + +from .models import Response +from .sessions import RequestsCookieJar, _Auth, _Cert, _Data, _Files, _HooksInput, _Params, _TextMapping, _Timeout, _Verify + +_HeadersMapping: TypeAlias = Mapping[str, str | bytes] + +def request( + method: str | bytes, + url: str | bytes, + *, + params: _Params | None = ..., + data: _Data | None = ..., + headers: _HeadersMapping | None = ..., + cookies: RequestsCookieJar | _TextMapping | None = ..., + files: _Files | None = ..., + auth: _Auth | None = ..., + timeout: _Timeout | None = ..., + allow_redirects: bool = ..., + proxies: _TextMapping | None = ..., + hooks: _HooksInput | None = ..., + stream: bool | None = ..., + verify: _Verify | None = ..., + cert: _Cert | None = ..., + json: Incomplete | None = ..., +) -> Response: ... +def get( + url: str | bytes, + params: _Params | None = ..., + *, + data: _Data | None = ..., + headers: _HeadersMapping | None = ..., + cookies: RequestsCookieJar | _TextMapping | None = ..., + files: _Files | None = ..., + auth: _Auth | None = ..., + timeout: _Timeout | None = ..., + allow_redirects: bool = ..., + proxies: _TextMapping | None = ..., + hooks: _HooksInput | None = ..., + stream: bool | None = ..., + verify: _Verify | None = ..., + cert: _Cert | None = ..., + json: Incomplete | None = ..., +) -> Response: ... +def options( + url: str | bytes, + *, + params: _Params | None = ..., + data: _Data | None = ..., + headers: _HeadersMapping | None = ..., + cookies: RequestsCookieJar | _TextMapping | None = ..., + files: _Files | None = ..., + auth: _Auth | None = ..., + timeout: _Timeout | None = ..., + allow_redirects: bool = ..., + proxies: _TextMapping | None = ..., + hooks: _HooksInput | None = ..., + stream: bool | None = ..., + verify: _Verify | None = ..., + cert: _Cert | None = ..., + json: Incomplete | None = ..., +) -> Response: ... +def head( + url: str | bytes, + *, + params: _Params | None = ..., + data: _Data | None = ..., + headers: _HeadersMapping | None = ..., + cookies: RequestsCookieJar | _TextMapping | None = ..., + files: _Files | None = ..., + auth: _Auth | None = ..., + timeout: _Timeout | None = ..., + allow_redirects: bool = ..., + proxies: _TextMapping | None = ..., + hooks: _HooksInput | None = ..., + stream: bool | None = ..., + verify: _Verify | None = ..., + cert: _Cert | None = ..., + json: Incomplete | None = ..., +) -> Response: ... +def post( + url: str | bytes, + data: _Data | None = ..., + json: Incomplete | None = ..., + *, + params: _Params | None = ..., + headers: _HeadersMapping | None = ..., + cookies: RequestsCookieJar | _TextMapping | None = ..., + files: _Files | None = ..., + auth: _Auth | None = ..., + timeout: _Timeout | None = ..., + allow_redirects: bool = ..., + proxies: _TextMapping | None = ..., + hooks: _HooksInput | None = ..., + stream: bool | None = ..., + verify: _Verify | None = ..., + cert: _Cert | None = ..., +) -> Response: ... +def put( + url: str | bytes, + data: _Data | None = ..., + *, + params: _Params | None = ..., + headers: _HeadersMapping | None = ..., + cookies: RequestsCookieJar | _TextMapping | None = ..., + files: _Files | None = ..., + auth: _Auth | None = ..., + timeout: _Timeout | None = ..., + allow_redirects: bool = ..., + proxies: _TextMapping | None = ..., + hooks: _HooksInput | None = ..., + stream: bool | None = ..., + verify: _Verify | None = ..., + cert: _Cert | None = ..., + json: Incomplete | None = ..., +) -> Response: ... +def patch( + url: str | bytes, + data: _Data | None = ..., + *, + params: _Params | None = ..., + headers: _HeadersMapping | None = ..., + cookies: RequestsCookieJar | _TextMapping | None = ..., + files: _Files | None = ..., + auth: _Auth | None = ..., + timeout: _Timeout | None = ..., + allow_redirects: bool = ..., + proxies: _TextMapping | None = ..., + hooks: _HooksInput | None = ..., + stream: bool | None = ..., + verify: _Verify | None = ..., + cert: _Cert | None = ..., + json: Incomplete | None = ..., +) -> Response: ... +def delete( + url: str | bytes, + *, + params: _Params | None = ..., + data: _Data | None = ..., + headers: _HeadersMapping | None = ..., + cookies: RequestsCookieJar | _TextMapping | None = ..., + files: _Files | None = ..., + auth: _Auth | None = ..., + timeout: _Timeout | None = ..., + allow_redirects: bool = ..., + proxies: _TextMapping | None = ..., + hooks: _HooksInput | None = ..., + stream: bool | None = ..., + verify: _Verify | None = ..., + cert: _Cert | None = ..., + json: Incomplete | None = ..., +) -> Response: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/auth.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/auth.pyi new file mode 100644 index 00000000..21ef4abf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/auth.pyi @@ -0,0 +1,39 @@ +from typing import Any + +from . import cookies, models, utils + +extract_cookies_to_jar = cookies.extract_cookies_to_jar +parse_dict_header = utils.parse_dict_header +to_native_string = utils.to_native_string + +CONTENT_TYPE_FORM_URLENCODED: Any +CONTENT_TYPE_MULTI_PART: Any + +def _basic_auth_str(username: bytes | str, password: bytes | str) -> str: ... + +class AuthBase: + def __call__(self, r: models.PreparedRequest) -> models.PreparedRequest: ... + +class HTTPBasicAuth(AuthBase): + username: bytes | str + password: bytes | str + def __init__(self, username: bytes | str, password: bytes | str) -> None: ... + def __call__(self, r): ... + +class HTTPProxyAuth(HTTPBasicAuth): + def __call__(self, r): ... + +class HTTPDigestAuth(AuthBase): + username: bytes | str + password: bytes | str + last_nonce: Any + nonce_count: Any + chal: Any + pos: Any + num_401_calls: Any + def __init__(self, username: bytes | str, password: bytes | str) -> None: ... + def build_digest_header(self, method, url): ... + def handle_redirect(self, r, **kwargs): ... + def handle_401(self, r, **kwargs): ... + def __call__(self, r): ... + def init_per_thread_state(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/certs.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/certs.pyi new file mode 100644 index 00000000..7c5857d6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/certs.pyi @@ -0,0 +1 @@ +# no public data diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/compat.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/compat.pyi new file mode 100644 index 00000000..bf90bf09 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/compat.pyi @@ -0,0 +1,25 @@ +from builtins import bytes as bytes, str as str +from collections import OrderedDict as OrderedDict +from typing_extensions import Literal, TypeAlias +from urllib.parse import ( + quote as quote, + quote_plus as quote_plus, + unquote as unquote, + unquote_plus as unquote_plus, + urldefrag as urldefrag, + urlencode as urlencode, + urljoin as urljoin, + urlparse as urlparse, + urlsplit as urlsplit, + urlunparse as urlunparse, +) +from urllib.request import getproxies as getproxies, parse_http_list as parse_http_list, proxy_bypass as proxy_bypass + +is_py2: Literal[False] +is_py3: Literal[True] +has_simplejson: bool + +builtin_str: TypeAlias = str # noqa: Y042 +basestring: tuple[type, ...] +numeric_types: tuple[type, ...] +integer_types: tuple[type, ...] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/cookies.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/cookies.pyi new file mode 100644 index 00000000..b7f0812a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/cookies.pyi @@ -0,0 +1,60 @@ +from collections.abc import MutableMapping +from http.cookiejar import CookieJar +from typing import Any + +class MockRequest: + type: Any + def __init__(self, request) -> None: ... + def get_type(self): ... + def get_host(self): ... + def get_origin_req_host(self): ... + def get_full_url(self): ... + def is_unverifiable(self): ... + def has_header(self, name): ... + def get_header(self, name, default=...): ... + def add_header(self, key, val): ... + def add_unredirected_header(self, name, value): ... + def get_new_headers(self): ... + @property + def unverifiable(self): ... + @property + def origin_req_host(self): ... + @property + def host(self): ... + +class MockResponse: + def __init__(self, headers) -> None: ... + def info(self): ... + def getheaders(self, name): ... + +def extract_cookies_to_jar(jar, request, response): ... +def get_cookie_header(jar, request): ... +def remove_cookie_by_name(cookiejar, name, domain=..., path=...): ... + +class CookieConflictError(RuntimeError): ... + +class RequestsCookieJar(CookieJar, MutableMapping[Any, Any]): + def get(self, name, default=..., domain=..., path=...): ... + def set(self, name, value, **kwargs): ... + def iterkeys(self): ... + def keys(self): ... + def itervalues(self): ... + def values(self): ... + def iteritems(self): ... + def items(self): ... + def list_domains(self): ... + def list_paths(self): ... + def multiple_domains(self): ... + def get_dict(self, domain=..., path=...): ... + def __getitem__(self, name): ... + def __setitem__(self, name, value) -> None: ... + def __delitem__(self, name) -> None: ... + def set_cookie(self, cookie, *args, **kwargs): ... + def update(self, other): ... + def copy(self): ... + def get_policy(self): ... + +def create_cookie(name, value, **kwargs): ... +def morsel_to_cookie(morsel): ... +def cookiejar_from_dict(cookie_dict, cookiejar=..., overwrite=...): ... +def merge_cookies(cookiejar, cookies): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/exceptions.pyi new file mode 100644 index 00000000..b642d5ad --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/exceptions.pyi @@ -0,0 +1,33 @@ +from typing import Any + +from urllib3.exceptions import HTTPError as BaseHTTPError + +class RequestException(OSError): + response: Any + request: Any + def __init__(self, *args, **kwargs) -> None: ... + +class InvalidJSONError(RequestException): ... +class JSONDecodeError(InvalidJSONError): ... +class HTTPError(RequestException): ... +class ConnectionError(RequestException): ... +class ProxyError(ConnectionError): ... +class SSLError(ConnectionError): ... +class Timeout(RequestException): ... +class ConnectTimeout(ConnectionError, Timeout): ... +class ReadTimeout(Timeout): ... +class URLRequired(RequestException): ... +class TooManyRedirects(RequestException): ... +class MissingSchema(RequestException, ValueError): ... +class InvalidSchema(RequestException, ValueError): ... +class InvalidURL(RequestException, ValueError): ... +class InvalidHeader(RequestException, ValueError): ... +class InvalidProxyURL(InvalidURL): ... +class ChunkedEncodingError(RequestException): ... +class ContentDecodingError(RequestException, BaseHTTPError): ... +class StreamConsumedError(RequestException, TypeError): ... +class RetryError(RequestException): ... +class UnrewindableBodyError(RequestException): ... +class RequestsWarning(Warning): ... +class FileModeWarning(RequestsWarning, DeprecationWarning): ... +class RequestsDependencyWarning(RequestsWarning): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/help.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/help.pyi new file mode 100644 index 00000000..e58cb3a8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/help.pyi @@ -0,0 +1,34 @@ +from typing_extensions import TypedDict + +class _VersionDict(TypedDict): + version: str + +class _OptionalVersionDict(TypedDict): + version: str | None + +class _PlatformDict(TypedDict): + system: str + release: str + +class _ImplementationDict(_VersionDict): + name: str + +class _PyOpenSSLDict(_OptionalVersionDict): + openssl_version: str + +class _InfoDict(TypedDict): + platform: _PlatformDict + implementation: _ImplementationDict + system_ssl: _VersionDict + using_pyopenssl: bool + using_charset_normalizer: bool + pyOpenSSL: _PyOpenSSLDict + urllib3: _VersionDict + chardet: _OptionalVersionDict + charset_normalizer: _OptionalVersionDict + cryptography: _VersionDict + idna: _VersionDict + requests: _VersionDict + +def info() -> _InfoDict: ... +def main() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/hooks.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/hooks.pyi new file mode 100644 index 00000000..f706016c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/hooks.pyi @@ -0,0 +1,6 @@ +from typing import Any + +HOOKS: Any + +def default_hooks(): ... +def dispatch_hook(key, hooks, hook_data, **kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/models.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/models.pyi new file mode 100644 index 00000000..cfbca908 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/models.pyi @@ -0,0 +1,143 @@ +import datetime +from collections.abc import Callable, Iterator +from json import JSONDecoder +from typing import Any +from typing_extensions import Self + +from urllib3 import exceptions as urllib3_exceptions, fields, filepost, util + +from . import auth, cookies, exceptions, hooks, status_codes, utils +from .cookies import RequestsCookieJar +from .structures import CaseInsensitiveDict as CaseInsensitiveDict + +default_hooks = hooks.default_hooks +HTTPBasicAuth = auth.HTTPBasicAuth +cookiejar_from_dict = cookies.cookiejar_from_dict +get_cookie_header = cookies.get_cookie_header +RequestField = fields.RequestField +encode_multipart_formdata = filepost.encode_multipart_formdata +parse_url = util.parse_url +DecodeError = urllib3_exceptions.DecodeError +ReadTimeoutError = urllib3_exceptions.ReadTimeoutError +ProtocolError = urllib3_exceptions.ProtocolError +LocationParseError = urllib3_exceptions.LocationParseError +HTTPError = exceptions.HTTPError +MissingSchema = exceptions.MissingSchema +InvalidURL = exceptions.InvalidURL +ChunkedEncodingError = exceptions.ChunkedEncodingError +ContentDecodingError = exceptions.ContentDecodingError +ConnectionError = exceptions.ConnectionError +StreamConsumedError = exceptions.StreamConsumedError +guess_filename = utils.guess_filename +get_auth_from_url = utils.get_auth_from_url +requote_uri = utils.requote_uri +stream_decode_response_unicode = utils.stream_decode_response_unicode +to_key_val_list = utils.to_key_val_list +parse_header_links = utils.parse_header_links +iter_slices = utils.iter_slices +guess_json_utf = utils.guess_json_utf +super_len = utils.super_len +to_native_string = utils.to_native_string +codes = status_codes.codes + +REDIRECT_STATI: Any +DEFAULT_REDIRECT_LIMIT: Any +CONTENT_CHUNK_SIZE: Any +ITER_CHUNK_SIZE: Any + +class RequestEncodingMixin: + @property + def path_url(self): ... + +class RequestHooksMixin: + def register_hook(self, event, hook): ... + def deregister_hook(self, event, hook): ... + +class Request(RequestHooksMixin): + hooks: Any + method: Any + url: Any + headers: Any + files: Any + data: Any + json: Any + params: Any + auth: Any + cookies: Any + def __init__( + self, method=..., url=..., headers=..., files=..., data=..., params=..., auth=..., cookies=..., hooks=..., json=... + ) -> None: ... + def prepare(self) -> PreparedRequest: ... + +class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): + method: str | None + url: str | None + headers: CaseInsensitiveDict[str] + body: bytes | str | None + hooks: Any + def __init__(self) -> None: ... + def prepare( + self, method=..., url=..., headers=..., files=..., data=..., params=..., auth=..., cookies=..., hooks=..., json=... + ) -> None: ... + def copy(self) -> PreparedRequest: ... + def prepare_method(self, method) -> None: ... + def prepare_url(self, url, params) -> None: ... + def prepare_headers(self, headers) -> None: ... + def prepare_body(self, data, files, json=...) -> None: ... + def prepare_content_length(self, body) -> None: ... + def prepare_auth(self, auth, url=...) -> None: ... + def prepare_cookies(self, cookies) -> None: ... + def prepare_hooks(self, hooks) -> None: ... + +class Response: + __attrs__: Any + _content: bytes | None # undocumented + status_code: int + headers: CaseInsensitiveDict[str] + raw: Any + url: str + encoding: str | None + history: list[Response] + reason: str + cookies: RequestsCookieJar + elapsed: datetime.timedelta + request: PreparedRequest + def __init__(self) -> None: ... + def __bool__(self) -> bool: ... + def __nonzero__(self) -> bool: ... + def __iter__(self) -> Iterator[bytes]: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: object) -> None: ... + @property + def next(self) -> PreparedRequest | None: ... + @property + def ok(self) -> bool: ... + @property + def is_redirect(self) -> bool: ... + @property + def is_permanent_redirect(self) -> bool: ... + @property + def apparent_encoding(self) -> str: ... + def iter_content(self, chunk_size: int | None = ..., decode_unicode: bool = ...) -> Iterator[Any]: ... + def iter_lines( + self, chunk_size: int | None = ..., decode_unicode: bool = ..., delimiter: str | bytes | None = ... + ) -> Iterator[Any]: ... + @property + def content(self) -> bytes: ... + @property + def text(self) -> str: ... + def json( + self, + *, + cls: type[JSONDecoder] | None = ..., + object_hook: Callable[[dict[Any, Any]], Any] | None = ..., + parse_float: Callable[[str], Any] | None = ..., + parse_int: Callable[[str], Any] | None = ..., + parse_constant: Callable[[str], Any] | None = ..., + object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = ..., + **kwds: Any, + ) -> Any: ... + @property + def links(self) -> dict[Any, Any]: ... + def raise_for_status(self) -> None: ... + def close(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/packages.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/packages.pyi new file mode 100644 index 00000000..22281ce4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/packages.pyi @@ -0,0 +1,3 @@ +# requests also imports urllib3, idna, and chardet below +# requests.packages. The stubs don't reflect that and it's recommended to +# import these packages directly if needed. diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/sessions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/sessions.pyi new file mode 100644 index 00000000..b0688d8e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/sessions.pyi @@ -0,0 +1,315 @@ +from _typeshed import Incomplete, SupportsItems, SupportsRead +from collections.abc import Callable, Iterable, Mapping, MutableMapping +from typing import Any +from typing_extensions import Self, TypeAlias, TypedDict + +from urllib3._collections import RecentlyUsedContainer + +from . import adapters, auth as _auth, compat, cookies, exceptions, hooks, models, status_codes, utils +from .models import Response +from .structures import CaseInsensitiveDict as CaseInsensitiveDict + +_BaseAdapter: TypeAlias = adapters.BaseAdapter +OrderedDict = compat.OrderedDict +cookiejar_from_dict = cookies.cookiejar_from_dict +extract_cookies_to_jar = cookies.extract_cookies_to_jar +RequestsCookieJar = cookies.RequestsCookieJar +merge_cookies = cookies.merge_cookies +Request = models.Request +PreparedRequest = models.PreparedRequest +DEFAULT_REDIRECT_LIMIT = models.DEFAULT_REDIRECT_LIMIT +default_hooks = hooks.default_hooks +dispatch_hook = hooks.dispatch_hook +to_key_val_list = utils.to_key_val_list +default_headers = utils.default_headers +to_native_string = utils.to_native_string +TooManyRedirects = exceptions.TooManyRedirects +InvalidSchema = exceptions.InvalidSchema +ChunkedEncodingError = exceptions.ChunkedEncodingError +ContentDecodingError = exceptions.ContentDecodingError +HTTPAdapter = adapters.HTTPAdapter +requote_uri = utils.requote_uri +get_environ_proxies = utils.get_environ_proxies +get_netrc_auth = utils.get_netrc_auth +should_bypass_proxies = utils.should_bypass_proxies +get_auth_from_url = utils.get_auth_from_url +codes = status_codes.codes +REDIRECT_STATI = models.REDIRECT_STATI + +def merge_setting(request_setting, session_setting, dict_class=...): ... +def merge_hooks(request_hooks, session_hooks, dict_class=...): ... + +class SessionRedirectMixin: + def resolve_redirects( + self, + resp, + req, + stream: bool = ..., + timeout: Incomplete | None = ..., + verify: bool = ..., + cert: Incomplete | None = ..., + proxies: Incomplete | None = ..., + yield_requests: bool = ..., + **adapter_kwargs, + ): ... + def rebuild_auth(self, prepared_request, response): ... + def rebuild_proxies(self, prepared_request, proxies): ... + def should_strip_auth(self, old_url, new_url): ... + def rebuild_method(self, prepared_request: PreparedRequest, response: Response) -> None: ... + def get_redirect_target(self, resp: Response) -> str | None: ... + +_Data: TypeAlias = ( + # used in requests.models.PreparedRequest.prepare_body + # + # case: is_stream + # see requests.adapters.HTTPAdapter.send + # will be sent directly to http.HTTPConnection.send(...) (through urllib3) + Iterable[bytes] + # case: not is_stream + # will be modified before being sent to urllib3.HTTPConnectionPool.urlopen(body=...) + # see requests.models.RequestEncodingMixin._encode_params + # see requests.models.RequestEncodingMixin._encode_files + # note that keys&values are converted from Any to str by urllib.parse.urlencode + | str + | bytes + | SupportsRead[str | bytes] + | list[tuple[Any, Any]] + | tuple[tuple[Any, Any], ...] + | Mapping[Any, Any] +) +_Auth: TypeAlias = tuple[str, str] | _auth.AuthBase | Callable[[PreparedRequest], PreparedRequest] +_Cert: TypeAlias = str | tuple[str, str] +# Files is passed to requests.utils.to_key_val_list() +_FileName: TypeAlias = str | None +_FileContent: TypeAlias = SupportsRead[str | bytes] | str | bytes +_FileContentType: TypeAlias = str +_FileCustomHeaders: TypeAlias = Mapping[str, str] +_FileSpecTuple2: TypeAlias = tuple[_FileName, _FileContent] +_FileSpecTuple3: TypeAlias = tuple[_FileName, _FileContent, _FileContentType] +_FileSpecTuple4: TypeAlias = tuple[_FileName, _FileContent, _FileContentType, _FileCustomHeaders] +_FileSpec: TypeAlias = _FileContent | _FileSpecTuple2 | _FileSpecTuple3 | _FileSpecTuple4 +_Files: TypeAlias = Mapping[str, _FileSpec] | Iterable[tuple[str, _FileSpec]] +_Hook: TypeAlias = Callable[[Response], Any] +_HooksInput: TypeAlias = Mapping[str, Iterable[_Hook] | _Hook] + +_ParamsMappingKeyType: TypeAlias = str | bytes | int | float +_ParamsMappingValueType: TypeAlias = str | bytes | int | float | Iterable[str | bytes | int | float] | None +_Params: TypeAlias = ( + SupportsItems[_ParamsMappingKeyType, _ParamsMappingValueType] + | tuple[_ParamsMappingKeyType, _ParamsMappingValueType] + | Iterable[tuple[_ParamsMappingKeyType, _ParamsMappingValueType]] + | str + | bytes +) +_TextMapping: TypeAlias = MutableMapping[str, str] +_HeadersUpdateMapping: TypeAlias = Mapping[str, str | bytes | None] +_Timeout: TypeAlias = float | tuple[float, float] | tuple[float, None] +_Verify: TypeAlias = bool | str + +class _Settings(TypedDict): + verify: _Verify | None + proxies: _TextMapping + stream: bool + cert: _Cert | None + +class Session(SessionRedirectMixin): + __attrs__: Any + # See https://github.com/psf/requests/issues/5020#issuecomment-989082461: + # requests sets this as a CaseInsensitiveDict, but users may set it to any MutableMapping + headers: MutableMapping[str, str | bytes] + auth: _Auth | None + proxies: _TextMapping + # Don't complain if: + # - value is assumed to be a list (which it is by default) + # - a _Hook is assigned directly, without wrapping it in a list (also works) + hooks: dict[str, list[_Hook] | Any] + params: _Params + stream: bool + verify: _Verify | None + cert: _Cert | None + max_redirects: int + trust_env: bool + cookies: RequestsCookieJar + adapters: MutableMapping[Any, Any] + redirect_cache: RecentlyUsedContainer[Any, Any] + def __init__(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args) -> None: ... + def prepare_request(self, request: Request) -> PreparedRequest: ... + def request( + self, + method: str | bytes, + url: str | bytes, + params: _Params | None = ..., + data: _Data | None = ..., + headers: _HeadersUpdateMapping | None = ..., + cookies: None | RequestsCookieJar | _TextMapping = ..., + files: _Files | None = ..., + auth: _Auth | None = ..., + timeout: _Timeout | None = ..., + allow_redirects: bool = ..., + proxies: _TextMapping | None = ..., + hooks: _HooksInput | None = ..., + stream: bool | None = ..., + verify: _Verify | None = ..., + cert: _Cert | None = ..., + json: Incomplete | None = ..., + ) -> Response: ... + def get( + self, + url: str | bytes, + *, + params: _Params | None = ..., + data: _Data | None = ..., + headers: _HeadersUpdateMapping | None = ..., + cookies: RequestsCookieJar | _TextMapping | None = ..., + files: _Files | None = ..., + auth: _Auth | None = ..., + timeout: _Timeout | None = ..., + allow_redirects: bool = ..., + proxies: _TextMapping | None = ..., + hooks: _HooksInput | None = ..., + stream: bool | None = ..., + verify: _Verify | None = ..., + cert: _Cert | None = ..., + json: Incomplete | None = ..., + ) -> Response: ... + def options( + self, + url: str | bytes, + *, + params: _Params | None = ..., + data: _Data | None = ..., + headers: _HeadersUpdateMapping | None = ..., + cookies: RequestsCookieJar | _TextMapping | None = ..., + files: _Files | None = ..., + auth: _Auth | None = ..., + timeout: _Timeout | None = ..., + allow_redirects: bool = ..., + proxies: _TextMapping | None = ..., + hooks: _HooksInput | None = ..., + stream: bool | None = ..., + verify: _Verify | None = ..., + cert: _Cert | None = ..., + json: Incomplete | None = ..., + ) -> Response: ... + def head( + self, + url: str | bytes, + *, + params: _Params | None = ..., + data: _Data | None = ..., + headers: _HeadersUpdateMapping | None = ..., + cookies: RequestsCookieJar | _TextMapping | None = ..., + files: _Files | None = ..., + auth: _Auth | None = ..., + timeout: _Timeout | None = ..., + allow_redirects: bool = ..., + proxies: _TextMapping | None = ..., + hooks: _HooksInput | None = ..., + stream: bool | None = ..., + verify: _Verify | None = ..., + cert: _Cert | None = ..., + json: Incomplete | None = ..., + ) -> Response: ... + def post( + self, + url: str | bytes, + data: _Data | None = ..., + json: Incomplete | None = ..., + *, + params: _Params | None = ..., + headers: _HeadersUpdateMapping | None = ..., + cookies: RequestsCookieJar | _TextMapping | None = ..., + files: _Files | None = ..., + auth: _Auth | None = ..., + timeout: _Timeout | None = ..., + allow_redirects: bool = ..., + proxies: _TextMapping | None = ..., + hooks: _HooksInput | None = ..., + stream: bool | None = ..., + verify: _Verify | None = ..., + cert: _Cert | None = ..., + ) -> Response: ... + def put( + self, + url: str | bytes, + data: _Data | None = ..., + *, + params: _Params | None = ..., + headers: _HeadersUpdateMapping | None = ..., + cookies: RequestsCookieJar | _TextMapping | None = ..., + files: _Files | None = ..., + auth: _Auth | None = ..., + timeout: _Timeout | None = ..., + allow_redirects: bool = ..., + proxies: _TextMapping | None = ..., + hooks: _HooksInput | None = ..., + stream: bool | None = ..., + verify: _Verify | None = ..., + cert: _Cert | None = ..., + json: Incomplete | None = ..., + ) -> Response: ... + def patch( + self, + url: str | bytes, + data: _Data | None = ..., + *, + params: _Params | None = ..., + headers: _HeadersUpdateMapping | None = ..., + cookies: RequestsCookieJar | _TextMapping | None = ..., + files: _Files | None = ..., + auth: _Auth | None = ..., + timeout: _Timeout | None = ..., + allow_redirects: bool = ..., + proxies: _TextMapping | None = ..., + hooks: _HooksInput | None = ..., + stream: bool | None = ..., + verify: _Verify | None = ..., + cert: _Cert | None = ..., + json: Incomplete | None = ..., + ) -> Response: ... + def delete( + self, + url: str | bytes, + *, + params: _Params | None = ..., + data: _Data | None = ..., + headers: _HeadersUpdateMapping | None = ..., + cookies: RequestsCookieJar | _TextMapping | None = ..., + files: _Files | None = ..., + auth: _Auth | None = ..., + timeout: _Timeout | None = ..., + allow_redirects: bool = ..., + proxies: _TextMapping | None = ..., + hooks: _HooksInput | None = ..., + stream: bool | None = ..., + verify: _Verify | None = ..., + cert: _Cert | None = ..., + json: Incomplete | None = ..., + ) -> Response: ... + def send( + self, + request: PreparedRequest, + *, + stream: bool | None = ..., + verify: _Verify | None = ..., + proxies: _TextMapping | None = ..., + cert: _Cert | None = ..., + timeout: _Timeout | None = ..., + allow_redirects: bool = ..., + **kwargs: Any, + ) -> Response: ... + def merge_environment_settings( + self, + url: str | bytes | None, + proxies: _TextMapping | None, + stream: bool | None, + verify: _Verify | None, + cert: _Cert | None, + ) -> _Settings: ... + def get_adapter(self, url: str) -> _BaseAdapter: ... + def close(self) -> None: ... + def mount(self, prefix: str | bytes, adapter: _BaseAdapter) -> None: ... + +def session() -> Session: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/status_codes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/status_codes.pyi new file mode 100644 index 00000000..4660b476 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/status_codes.pyi @@ -0,0 +1,3 @@ +from typing import Any + +codes: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/structures.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/structures.pyi new file mode 100644 index 00000000..14ef93c4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/structures.pyi @@ -0,0 +1,25 @@ +from collections.abc import Iterable, Iterator, Mapping, MutableMapping +from typing import Any, Generic, TypeVar, overload + +_D = TypeVar("_D") +_VT = TypeVar("_VT") + +class CaseInsensitiveDict(MutableMapping[str, _VT], Generic[_VT]): + def __init__(self, data: Mapping[str, _VT] | Iterable[tuple[str, _VT]] | None = ..., **kwargs: _VT) -> None: ... + def lower_items(self) -> Iterator[tuple[str, _VT]]: ... + def __setitem__(self, key: str, value: _VT) -> None: ... + def __getitem__(self, key: str) -> _VT: ... + def __delitem__(self, key: str) -> None: ... + def __iter__(self) -> Iterator[str]: ... + def __len__(self) -> int: ... + def copy(self) -> CaseInsensitiveDict[_VT]: ... + +class LookupDict(dict[str, _VT]): + name: Any + def __init__(self, name: Any = ...) -> None: ... + def __getitem__(self, key: str) -> _VT | None: ... # type: ignore[override] + def __setattr__(self, __attr: str, __value: _VT) -> None: ... + @overload + def get(self, key: str, default: None = ...) -> _VT | None: ... + @overload + def get(self, key: str, default: _D | _VT) -> _D | _VT: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/utils.pyi new file mode 100644 index 00000000..34cd796c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/requests/requests/utils.pyi @@ -0,0 +1,68 @@ +import sys +from _typeshed import StrOrBytesPath +from collections.abc import Generator, Iterable, Mapping +from contextlib import _GeneratorContextManager +from io import BufferedWriter +from typing import Any, AnyStr +from typing_extensions import TypeAlias + +from . import compat, cookies, exceptions, structures +from .models import PreparedRequest, Request + +_Uri: TypeAlias = str | bytes +OrderedDict = compat.OrderedDict +cookiejar_from_dict = cookies.cookiejar_from_dict +CaseInsensitiveDict = structures.CaseInsensitiveDict +InvalidURL = exceptions.InvalidURL + +NETRC_FILES: tuple[str, str] +DEFAULT_CA_BUNDLE_PATH: Any +DEFAULT_PORTS: dict[str, int] +DEFAULT_ACCEPT_ENCODING: str + +def dict_to_sequence(d): ... +def super_len(o): ... +def get_netrc_auth(url: _Uri, raise_errors: bool = ...) -> tuple[str, str] | None: ... +def guess_filename(obj): ... +def extract_zipped_paths(path): ... +def atomic_open(filename: StrOrBytesPath) -> _GeneratorContextManager[BufferedWriter]: ... +def from_key_val_list(value): ... +def to_key_val_list(value): ... +def parse_list_header(value): ... +def parse_dict_header(value): ... +def unquote_header_value(value, is_filename: bool = ...): ... +def dict_from_cookiejar(cj): ... +def add_dict_to_cookiejar(cj, cookie_dict): ... +def get_encodings_from_content(content): ... +def get_encoding_from_headers(headers): ... +def stream_decode_response_unicode(iterator, r): ... +def iter_slices(string: str, slice_length: int | None) -> Generator[str, None, None]: ... +def get_unicode_from_response(r): ... + +UNRESERVED_SET: frozenset[str] + +def unquote_unreserved(uri: str) -> str: ... +def requote_uri(uri: str) -> str: ... +def address_in_network(ip: str, net: str) -> bool: ... +def dotted_netmask(mask: int) -> str: ... +def is_ipv4_address(string_ip: str) -> bool: ... +def is_valid_cidr(string_network: str) -> bool: ... +def set_environ(env_name: str, value: None) -> _GeneratorContextManager[None]: ... +def should_bypass_proxies(url: _Uri, no_proxy: Iterable[str] | None) -> bool: ... +def get_environ_proxies(url: _Uri, no_proxy: Iterable[str] | None = ...) -> dict[Any, Any]: ... +def select_proxy(url: _Uri, proxies: Mapping[Any, Any] | None): ... +def resolve_proxies(request: Request | PreparedRequest, proxies: Mapping[str, str] | None, trust_env: bool = ...): ... +def default_user_agent(name: str = ...) -> str: ... +def default_headers() -> CaseInsensitiveDict[str]: ... +def parse_header_links(value: str) -> list[dict[str, str]]: ... +def guess_json_utf(data): ... +def prepend_scheme_if_needed(url, new_scheme): ... +def get_auth_from_url(url: _Uri) -> tuple[str, str]: ... +def to_native_string(string, encoding=...): ... +def urldefragauth(url: _Uri): ... +def rewind_body(prepared_request: PreparedRequest) -> None: ... +def check_header_validity(header: tuple[AnyStr, AnyStr]) -> None: ... + +if sys.platform == "win32": + def proxy_bypass_registry(host: str) -> bool: ... + def proxy_bypass(host: str) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/retry/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/retry/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..0e4e3d2e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/retry/@tests/stubtest_allowlist.txt @@ -0,0 +1,3 @@ +retry.compat +retry.tests +retry.tests.test_retry diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/retry/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/retry/METADATA.toml new file mode 100644 index 00000000..51e869b4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/retry/METADATA.toml @@ -0,0 +1 @@ +version = "0.9.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/retry/retry/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/retry/retry/__init__.pyi new file mode 100644 index 00000000..0cf7651b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/retry/retry/__init__.pyi @@ -0,0 +1 @@ +from .api import retry as retry diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/retry/retry/api.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/retry/retry/api.pyi new file mode 100644 index 00000000..2221d454 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/retry/retry/api.pyi @@ -0,0 +1,30 @@ +from _typeshed import IdentityFunction +from collections.abc import Callable, Sequence +from logging import Logger +from typing import Any, TypeVar + +_R = TypeVar("_R") + +logging_logger: Logger + +def retry_call( + f: Callable[..., _R], + fargs: Sequence[Any] | None = ..., + fkwargs: dict[str, Any] | None = ..., + exceptions: type[Exception] | tuple[type[Exception], ...] = ..., + tries: int = ..., + delay: float = ..., + max_delay: float | None = ..., + backoff: float = ..., + jitter: tuple[float, float] | float = ..., + logger: Logger | None = ..., +) -> _R: ... +def retry( + exceptions: type[Exception] | tuple[type[Exception], ...] = ..., + tries: int = ..., + delay: float = ..., + max_delay: float | None = ..., + backoff: float = ..., + jitter: tuple[float, float] | float = ..., + logger: Logger | None = ..., +) -> IdentityFunction: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..fdcdd0a5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/@tests/stubtest_allowlist.txt @@ -0,0 +1,35 @@ +pkg_resources.Distribution.__cmp__ +pkg_resources.Distribution.activate +pkg_resources.Distribution.get_entry_map +pkg_resources.EggMetadata.__init__ +pkg_resources.EggProvider.__init__ +pkg_resources.Environment.best_match +pkg_resources.Environment.obtain +pkg_resources.FileMetadata.__init__ +pkg_resources.IMetadataProvider.get_metadata +pkg_resources.IMetadataProvider.get_metadata_lines +pkg_resources.IMetadataProvider.has_metadata +pkg_resources.IMetadataProvider.metadata_isdir +pkg_resources.IMetadataProvider.metadata_listdir +pkg_resources.IMetadataProvider.run_script +pkg_resources.IResourceManager +pkg_resources.NullProvider.__init__ +pkg_resources.Requirement.__init__ +pkg_resources.WorkingSet.find_plugins +pkg_resources.WorkingSet.resolve +pkg_resources.WorkingSet.subscribe +pkg_resources.ZipProvider.__init__ +pkg_resources.declare_namespace +pkg_resources.fixup_namespace_packages +pkg_resources.get_entry_map +pkg_resources.get_provider +pkg_resources.py31compat +pkg_resources.split_sections +pkg_resources.to_filename + +# Only present if docutils is installed +setuptools._distutils.command.check.SilentReporter + +# Discrepancy in the value of the default +# between setuptools's version of distutils and the stdlib distutils +setuptools._distutils.core.Command.announce diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/METADATA.toml new file mode 100644 index 00000000..f698ef87 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/METADATA.toml @@ -0,0 +1,5 @@ +version = "67.4.*" +requires = ["types-docutils"] + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/pkg_resources/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/pkg_resources/__init__.pyi new file mode 100644 index 00000000..490184c9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/pkg_resources/__init__.pyi @@ -0,0 +1,280 @@ +import importlib.abc +import types +import zipimport +from _typeshed import Incomplete +from abc import ABCMeta +from collections.abc import Callable, Generator, Iterable, Sequence +from typing import IO, Any, TypeVar, overload +from typing_extensions import Self, TypeAlias + +_Version: TypeAlias = Incomplete # from packaging.version + +_T = TypeVar("_T") +_NestedStr: TypeAlias = str | Iterable[str | Iterable[Any]] +_InstallerType: TypeAlias = Callable[[Requirement], Distribution | None] +_EPDistType: TypeAlias = Distribution | Requirement | str +_MetadataType: TypeAlias = IResourceProvider | None +_PkgReqType: TypeAlias = str | Requirement +_DistFinderType: TypeAlias = Callable[[_Importer, str, bool], Generator[Distribution, None, None]] +_NSHandlerType: TypeAlias = Callable[[_Importer, str, str, types.ModuleType], str] + +def declare_namespace(name: str) -> None: ... +def fixup_namespace_packages(path_item: str) -> None: ... + +class WorkingSet: + entries: list[str] + def __init__(self, entries: Iterable[str] | None = ...) -> None: ... + def require(self, *requirements: _NestedStr) -> Sequence[Distribution]: ... + def run_script(self, requires: str, script_name: str) -> None: ... + def iter_entry_points(self, group: str, name: str | None = ...) -> Generator[EntryPoint, None, None]: ... + def add_entry(self, entry: str) -> None: ... + def __contains__(self, dist: Distribution) -> bool: ... + def __iter__(self) -> Generator[Distribution, None, None]: ... + def find(self, req: Requirement) -> Distribution | None: ... + def resolve( + self, requirements: Iterable[Requirement], env: Environment | None = ..., installer: _InstallerType | None = ... + ) -> list[Distribution]: ... + def add(self, dist: Distribution, entry: str | None = ..., insert: bool = ..., replace: bool = ...) -> None: ... + def subscribe(self, callback: Callable[[Distribution], object]) -> None: ... + def find_plugins( + self, plugin_env: Environment, full_env: Environment | None = ..., fallback: bool = ... + ) -> tuple[list[Distribution], dict[Distribution, Exception]]: ... + +working_set: WorkingSet = ... + +require = working_set.require +run_script = working_set.run_script +iter_entry_points = working_set.iter_entry_points +add_activation_listener = working_set.subscribe + +class Environment: + def __init__(self, search_path: Sequence[str] | None = ..., platform: str | None = ..., python: str | None = ...) -> None: ... + def __getitem__(self, project_name: str) -> list[Distribution]: ... + def __iter__(self) -> Generator[str, None, None]: ... + def add(self, dist: Distribution) -> None: ... + def remove(self, dist: Distribution) -> None: ... + def can_add(self, dist: Distribution) -> bool: ... + def __add__(self, other: Distribution | Environment) -> Environment: ... + def __iadd__(self, other: Distribution | Environment) -> Self: ... + @overload + def best_match(self, req: Requirement, working_set: WorkingSet, *, replace_conflicting: bool = ...) -> Distribution: ... + @overload + def best_match( + self, req: Requirement, working_set: WorkingSet, installer: Callable[[Requirement], _T], replace_conflicting: bool = ... + ) -> _T: ... + @overload + def obtain(self, requirement: Requirement) -> None: ... + @overload + def obtain(self, requirement: Requirement, installer: Callable[[Requirement], _T]) -> _T: ... + def scan(self, search_path: Sequence[str] | None = ...) -> None: ... + +def parse_requirements(strs: str | Iterable[str]) -> Generator[Requirement, None, None]: ... + +class Requirement: + unsafe_name: str + project_name: str + key: str + extras: tuple[str, ...] + specs: list[tuple[str, str]] + url: str | None + # TODO: change this to packaging.markers.Marker | None once we can import + # packaging.markers + marker: Incomplete | None + @staticmethod + def parse(s: str | Iterable[str]) -> Requirement: ... + def __contains__(self, item: Distribution | str | tuple[str, ...]) -> bool: ... + def __eq__(self, other_requirement: object) -> bool: ... + +def load_entry_point(dist: _EPDistType, group: str, name: str) -> Any: ... +def get_entry_info(dist: _EPDistType, group: str, name: str) -> EntryPoint | None: ... +@overload +def get_entry_map(dist: _EPDistType) -> dict[str, dict[str, EntryPoint]]: ... +@overload +def get_entry_map(dist: _EPDistType, group: str) -> dict[str, EntryPoint]: ... + +class EntryPoint: + name: str + module_name: str + attrs: tuple[str, ...] + extras: tuple[str, ...] + dist: Distribution | None + def __init__( + self, + name: str, + module_name: str, + attrs: tuple[str, ...] = ..., + extras: tuple[str, ...] = ..., + dist: Distribution | None = ..., + ) -> None: ... + @classmethod + def parse(cls, src: str, dist: Distribution | None = ...) -> EntryPoint: ... + @classmethod + def parse_group(cls, group: str, lines: str | Sequence[str], dist: Distribution | None = ...) -> dict[str, EntryPoint]: ... + @classmethod + def parse_map( + cls, data: dict[str, str | Sequence[str]] | str | Sequence[str], dist: Distribution | None = ... + ) -> dict[str, EntryPoint]: ... + def load(self, require: bool = ..., env: Environment | None = ..., installer: _InstallerType | None = ...) -> Any: ... + def require(self, env: Environment | None = ..., installer: _InstallerType | None = ...) -> None: ... + def resolve(self) -> Any: ... + +def find_distributions(path_item: str, only: bool = ...) -> Generator[Distribution, None, None]: ... +def get_distribution(dist: Requirement | str | Distribution) -> Distribution: ... + +class Distribution(IResourceProvider, IMetadataProvider): + PKG_INFO: str + location: str + project_name: str + @property + def key(self) -> str: ... + @property + def extras(self) -> list[str]: ... + @property + def version(self) -> str: ... + @property + def parsed_version(self) -> tuple[str, ...]: ... + py_version: str + platform: str | None + precedence: int + def __init__( + self, + location: str | None = ..., + metadata: _MetadataType = ..., + project_name: str | None = ..., + version: str | None = ..., + py_version: str = ..., + platform: str | None = ..., + precedence: int = ..., + ) -> None: ... + @classmethod + def from_location( + cls, location: str, basename: str, metadata: _MetadataType = ..., **kw: str | None | int + ) -> Distribution: ... + @classmethod + def from_filename(cls, filename: str, metadata: _MetadataType = ..., **kw: str | None | int) -> Distribution: ... + def activate(self, path: list[str] | None = ...) -> None: ... + def as_requirement(self) -> Requirement: ... + def requires(self, extras: tuple[str, ...] = ...) -> list[Requirement]: ... + def clone(self, **kw: str | int | None) -> Requirement: ... + def egg_name(self) -> str: ... + def __cmp__(self, other: Any) -> bool: ... + def get_entry_info(self, group: str, name: str) -> EntryPoint | None: ... + @overload + def get_entry_map(self) -> dict[str, dict[str, EntryPoint]]: ... + @overload + def get_entry_map(self, group: str) -> dict[str, EntryPoint]: ... + def load_entry_point(self, group: str, name: str) -> Any: ... + +EGG_DIST: int +BINARY_DIST: int +SOURCE_DIST: int +CHECKOUT_DIST: int +DEVELOP_DIST: int + +def resource_exists(package_or_requirement: _PkgReqType, resource_name: str) -> bool: ... +def resource_stream(package_or_requirement: _PkgReqType, resource_name: str) -> IO[bytes]: ... +def resource_string(package_or_requirement: _PkgReqType, resource_name: str) -> bytes: ... +def resource_isdir(package_or_requirement: _PkgReqType, resource_name: str) -> bool: ... +def resource_listdir(package_or_requirement: _PkgReqType, resource_name: str) -> list[str]: ... +def resource_filename(package_or_requirement: _PkgReqType, resource_name: str) -> str: ... +def set_extraction_path(path: str) -> None: ... +def cleanup_resources(force: bool = ...) -> list[str]: ... + +class IResourceManager: + def resource_exists(self, package_or_requirement: _PkgReqType, resource_name: str) -> bool: ... + def resource_stream(self, package_or_requirement: _PkgReqType, resource_name: str) -> IO[bytes]: ... + def resource_string(self, package_or_requirement: _PkgReqType, resource_name: str) -> bytes: ... + def resource_isdir(self, package_or_requirement: _PkgReqType, resource_name: str) -> bool: ... + def resource_listdir(self, package_or_requirement: _PkgReqType, resource_name: str) -> list[str]: ... + def resource_filename(self, package_or_requirement: _PkgReqType, resource_name: str) -> str: ... + def set_extraction_path(self, path: str) -> None: ... + def cleanup_resources(self, force: bool = ...) -> list[str]: ... + def get_cache_path(self, archive_name: str, names: Iterable[str] = ...) -> str: ... + def extraction_error(self) -> None: ... + def postprocess(self, tempname: str, filename: str) -> None: ... + +@overload +def get_provider(package_or_requirement: str) -> IResourceProvider: ... +@overload +def get_provider(package_or_requirement: Requirement) -> Distribution: ... + +class IMetadataProvider: + def has_metadata(self, name: str) -> bool: ... + def metadata_isdir(self, name: str) -> bool: ... + def metadata_listdir(self, name: str) -> list[str]: ... + def get_metadata(self, name: str) -> str: ... + def get_metadata_lines(self, name: str) -> Generator[str, None, None]: ... + def run_script(self, script_name: str, namespace: dict[str, Any]) -> None: ... + +class ResolutionError(Exception): ... + +class DistributionNotFound(ResolutionError): + @property + def req(self) -> Requirement: ... + @property + def requirers(self) -> set[str]: ... + @property + def requirers_str(self) -> str: ... + def report(self) -> str: ... + +class VersionConflict(ResolutionError): + @property + def dist(self) -> Any: ... + @property + def req(self) -> Any: ... + def report(self) -> str: ... + def with_context(self, required_by: set[Distribution | str]) -> VersionConflict: ... + +class ContextualVersionConflict(VersionConflict): + @property + def required_by(self) -> set[Distribution | str]: ... + +class UnknownExtra(ResolutionError): ... + +class ExtractionError(Exception): + manager: IResourceManager + cache_path: str + original_error: Exception + +class _Importer(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader, metaclass=ABCMeta): ... + +def register_finder(importer_type: type, distribution_finder: _DistFinderType) -> None: ... +def register_loader_type(loader_type: type, provider_factory: Callable[[types.ModuleType], IResourceProvider]) -> None: ... +def register_namespace_handler(importer_type: type, namespace_handler: _NSHandlerType) -> None: ... + +class IResourceProvider(IMetadataProvider): ... +class NullProvider: ... +class EggProvider(NullProvider): ... +class DefaultProvider(EggProvider): ... + +class PathMetadata(DefaultProvider, IResourceProvider): + def __init__(self, path: str, egg_info: str) -> None: ... + +class ZipProvider(EggProvider): ... + +class EggMetadata(ZipProvider, IResourceProvider): + def __init__(self, zipimporter: zipimport.zipimporter) -> None: ... + +class EmptyProvider(NullProvider): ... + +empty_provider: EmptyProvider + +class FileMetadata(EmptyProvider, IResourceProvider): + def __init__(self, path_to_pkg_info: str) -> None: ... + +parse_version = _Version + +def yield_lines(iterable: _NestedStr) -> Generator[str, None, None]: ... +def split_sections(strs: _NestedStr) -> Generator[tuple[str | None, list[str]], None, None]: ... +def safe_name(name: str) -> str: ... +def safe_version(version: str) -> str: ... +def safe_extra(extra: str) -> str: ... +def to_filename(name_or_version: str) -> str: ... +def get_build_platform() -> str: ... +def get_platform() -> str: ... +def get_supported_platform() -> str: ... +def compatible_platforms(provided: str | None, required: str | None) -> bool: ... +def get_default_cache() -> str: ... +def get_importer(path_item: str) -> _Importer: ... +def ensure_directory(path: str) -> None: ... +def normalize_path(filename: str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/pkg_resources/py31compat.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/pkg_resources/py31compat.pyi new file mode 100644 index 00000000..162da65e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/pkg_resources/py31compat.pyi @@ -0,0 +1,5 @@ +import os + +needs_makedirs: bool + +makedirs = os.makedirs diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/__init__.pyi new file mode 100644 index 00000000..7b3d0489 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/__init__.pyi @@ -0,0 +1,77 @@ +from abc import abstractmethod +from collections.abc import Iterable, Mapping, Sequence +from distutils.core import Command as _Command +from typing import Any + +from setuptools._deprecation_warning import SetuptoolsDeprecationWarning as SetuptoolsDeprecationWarning +from setuptools.depends import Require as Require +from setuptools.dist import Distribution as Distribution +from setuptools.extension import Extension as Extension + +__version__: str + +class PackageFinder: + @classmethod + def find(cls, where: str = ..., exclude: Iterable[str] = ..., include: Iterable[str] = ...) -> list[str]: ... + +class PEP420PackageFinder(PackageFinder): ... + +find_packages = PackageFinder.find +find_namespace_packages = PEP420PackageFinder.find + +def setup( + *, + name: str = ..., + version: str = ..., + description: str = ..., + long_description: str = ..., + author: str = ..., + author_email: str = ..., + maintainer: str = ..., + maintainer_email: str = ..., + url: str = ..., + download_url: str = ..., + packages: list[str] = ..., + py_modules: list[str] = ..., + scripts: list[str] = ..., + ext_modules: Sequence[Extension] = ..., + classifiers: list[str] = ..., + distclass: type[Distribution] = ..., + script_name: str = ..., + script_args: list[str] = ..., + options: Mapping[str, Any] = ..., + license: str = ..., + keywords: list[str] | str = ..., + platforms: list[str] | str = ..., + cmdclass: Mapping[str, type[_Command]] = ..., + data_files: list[tuple[str, list[str]]] = ..., + package_dir: Mapping[str, str] = ..., + obsoletes: list[str] = ..., + provides: list[str] = ..., + requires: list[str] = ..., + command_packages: list[str] = ..., + command_options: Mapping[str, Mapping[str, tuple[Any, Any]]] = ..., + package_data: Mapping[str, list[str]] = ..., + include_package_data: bool = ..., + libraries: list[str] = ..., + headers: list[str] = ..., + ext_package: str = ..., + include_dirs: list[str] = ..., + password: str = ..., + fullname: str = ..., + **attrs: Any, +) -> None: ... + +class Command(_Command): + command_consumes_arguments: bool + def __init__(self, dist: Distribution, **kw: Any) -> None: ... + def ensure_string_list(self, option: str | list[str]) -> None: ... + def reinitialize_command(self, command: _Command | str, reinit_subcommands: int = ..., **kw: Any) -> _Command: ... + @abstractmethod + def initialize_options(self) -> None: ... + @abstractmethod + def finalize_options(self) -> None: ... + @abstractmethod + def run(self) -> None: ... + +class sic(str): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_deprecation_warning.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_deprecation_warning.pyi new file mode 100644 index 00000000..43a7220c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_deprecation_warning.pyi @@ -0,0 +1 @@ +class SetuptoolsDeprecationWarning(Warning): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/archive_util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/archive_util.pyi new file mode 100644 index 00000000..38458fc0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/archive_util.pyi @@ -0,0 +1,20 @@ +def make_archive( + base_name: str, + format: str, + root_dir: str | None = ..., + base_dir: str | None = ..., + verbose: int = ..., + dry_run: int = ..., + owner: str | None = ..., + group: str | None = ..., +) -> str: ... +def make_tarball( + base_name: str, + base_dir: str, + compress: str | None = ..., + verbose: int = ..., + dry_run: int = ..., + owner: str | None = ..., + group: str | None = ..., +) -> str: ... +def make_zipfile(base_name: str, base_dir: str, verbose: int = ..., dry_run: int = ...) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/bcppcompiler.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/bcppcompiler.pyi new file mode 100644 index 00000000..3e432f94 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/bcppcompiler.pyi @@ -0,0 +1,3 @@ +from distutils.ccompiler import CCompiler + +class BCPPCompiler(CCompiler): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/ccompiler.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/ccompiler.pyi new file mode 100644 index 00000000..96a756bb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/ccompiler.pyi @@ -0,0 +1,152 @@ +from collections.abc import Callable +from typing import Any +from typing_extensions import TypeAlias + +_Macro: TypeAlias = tuple[str] | tuple[str, str | None] + +def gen_lib_options( + compiler: CCompiler, library_dirs: list[str], runtime_library_dirs: list[str], libraries: list[str] +) -> list[str]: ... +def gen_preprocess_options(macros: list[_Macro], include_dirs: list[str]) -> list[str]: ... +def get_default_compiler(osname: str | None = ..., platform: str | None = ...) -> str: ... +def new_compiler( + plat: str | None = ..., compiler: str | None = ..., verbose: int = ..., dry_run: int = ..., force: int = ... +) -> CCompiler: ... +def show_compilers() -> None: ... + +class CCompiler: + dry_run: bool + force: bool + verbose: bool + output_dir: str | None + macros: list[_Macro] + include_dirs: list[str] + libraries: list[str] + library_dirs: list[str] + runtime_library_dirs: list[str] + objects: list[str] + def __init__(self, verbose: int = ..., dry_run: int = ..., force: int = ...) -> None: ... + def add_include_dir(self, dir: str) -> None: ... + def set_include_dirs(self, dirs: list[str]) -> None: ... + def add_library(self, libname: str) -> None: ... + def set_libraries(self, libnames: list[str]) -> None: ... + def add_library_dir(self, dir: str) -> None: ... + def set_library_dirs(self, dirs: list[str]) -> None: ... + def add_runtime_library_dir(self, dir: str) -> None: ... + def set_runtime_library_dirs(self, dirs: list[str]) -> None: ... + def define_macro(self, name: str, value: str | None = ...) -> None: ... + def undefine_macro(self, name: str) -> None: ... + def add_link_object(self, object: str) -> None: ... + def set_link_objects(self, objects: list[str]) -> None: ... + def detect_language(self, sources: str | list[str]) -> str | None: ... + def find_library_file(self, dirs: list[str], lib: str, debug: bool = ...) -> str | None: ... + def has_function( + self, + funcname: str, + includes: list[str] | None = ..., + include_dirs: list[str] | None = ..., + libraries: list[str] | None = ..., + library_dirs: list[str] | None = ..., + ) -> bool: ... + def library_dir_option(self, dir: str) -> str: ... + def library_option(self, lib: str) -> str: ... + def runtime_library_dir_option(self, dir: str) -> str: ... + def set_executables(self, **args: str) -> None: ... + def compile( + self, + sources: list[str], + output_dir: str | None = ..., + macros: _Macro | None = ..., + include_dirs: list[str] | None = ..., + debug: bool = ..., + extra_preargs: list[str] | None = ..., + extra_postargs: list[str] | None = ..., + depends: list[str] | None = ..., + ) -> list[str]: ... + def create_static_lib( + self, + objects: list[str], + output_libname: str, + output_dir: str | None = ..., + debug: bool = ..., + target_lang: str | None = ..., + ) -> None: ... + def link( + self, + target_desc: str, + objects: list[str], + output_filename: str, + output_dir: str | None = ..., + libraries: list[str] | None = ..., + library_dirs: list[str] | None = ..., + runtime_library_dirs: list[str] | None = ..., + export_symbols: list[str] | None = ..., + debug: bool = ..., + extra_preargs: list[str] | None = ..., + extra_postargs: list[str] | None = ..., + build_temp: str | None = ..., + target_lang: str | None = ..., + ) -> None: ... + def link_executable( + self, + objects: list[str], + output_progname: str, + output_dir: str | None = ..., + libraries: list[str] | None = ..., + library_dirs: list[str] | None = ..., + runtime_library_dirs: list[str] | None = ..., + debug: bool = ..., + extra_preargs: list[str] | None = ..., + extra_postargs: list[str] | None = ..., + target_lang: str | None = ..., + ) -> None: ... + def link_shared_lib( + self, + objects: list[str], + output_libname: str, + output_dir: str | None = ..., + libraries: list[str] | None = ..., + library_dirs: list[str] | None = ..., + runtime_library_dirs: list[str] | None = ..., + export_symbols: list[str] | None = ..., + debug: bool = ..., + extra_preargs: list[str] | None = ..., + extra_postargs: list[str] | None = ..., + build_temp: str | None = ..., + target_lang: str | None = ..., + ) -> None: ... + def link_shared_object( + self, + objects: list[str], + output_filename: str, + output_dir: str | None = ..., + libraries: list[str] | None = ..., + library_dirs: list[str] | None = ..., + runtime_library_dirs: list[str] | None = ..., + export_symbols: list[str] | None = ..., + debug: bool = ..., + extra_preargs: list[str] | None = ..., + extra_postargs: list[str] | None = ..., + build_temp: str | None = ..., + target_lang: str | None = ..., + ) -> None: ... + def preprocess( + self, + source: str, + output_file: str | None = ..., + macros: list[_Macro] | None = ..., + include_dirs: list[str] | None = ..., + extra_preargs: list[str] | None = ..., + extra_postargs: list[str] | None = ..., + ) -> None: ... + def executable_filename(self, basename: str, strip_dir: int = ..., output_dir: str = ...) -> str: ... + def library_filename(self, libname: str, lib_type: str = ..., strip_dir: int = ..., output_dir: str = ...) -> str: ... + def object_filenames(self, source_filenames: list[str], strip_dir: int = ..., output_dir: str = ...) -> list[str]: ... + def shared_object_filename(self, basename: str, strip_dir: int = ..., output_dir: str = ...) -> str: ... + def execute(self, func: Callable[..., object], args: tuple[Any, ...], msg: str | None = ..., level: int = ...) -> None: ... + def spawn(self, cmd: list[str]) -> None: ... + def mkpath(self, name: str, mode: int = ...) -> None: ... + def move_file(self, src: str, dst: str) -> str: ... + def announce(self, msg: str, level: int = ...) -> None: ... + def warn(self, msg: str) -> None: ... + def debug_print(self, msg: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/cmd.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/cmd.pyi new file mode 100644 index 00000000..e706bdbc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/cmd.pyi @@ -0,0 +1,68 @@ +from abc import abstractmethod +from collections.abc import Callable, Iterable +from distutils.dist import Distribution +from typing import Any + +class Command: + sub_commands: list[tuple[str, Callable[[Command], bool] | None]] + def __init__(self, dist: Distribution) -> None: ... + @abstractmethod + def initialize_options(self) -> None: ... + @abstractmethod + def finalize_options(self) -> None: ... + @abstractmethod + def run(self) -> None: ... + def announce(self, msg: str, level: int = ...) -> None: ... + def debug_print(self, msg: str) -> None: ... + def ensure_string(self, option: str, default: str | None = ...) -> None: ... + def ensure_string_list(self, option: str | list[str]) -> None: ... + def ensure_filename(self, option: str) -> None: ... + def ensure_dirname(self, option: str) -> None: ... + def get_command_name(self) -> str: ... + def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None: ... + def get_finalized_command(self, command: str, create: int = ...) -> Command: ... + def reinitialize_command(self, command: Command | str, reinit_subcommands: int = ...) -> Command: ... + def run_command(self, command: str) -> None: ... + def get_sub_commands(self) -> list[str]: ... + def warn(self, msg: str) -> None: ... + def execute(self, func: Callable[..., object], args: Iterable[Any], msg: str | None = ..., level: int = ...) -> None: ... + def mkpath(self, name: str, mode: int = ...) -> None: ... + def copy_file( + self, + infile: str, + outfile: str, + preserve_mode: int = ..., + preserve_times: int = ..., + link: str | None = ..., + level: Any = ..., + ) -> tuple[str, bool]: ... # level is not used + def copy_tree( + self, + infile: str, + outfile: str, + preserve_mode: int = ..., + preserve_times: int = ..., + preserve_symlinks: int = ..., + level: Any = ..., + ) -> list[str]: ... # level is not used + def move_file(self, src: str, dst: str, level: Any = ...) -> str: ... # level is not used + def spawn(self, cmd: Iterable[str], search_path: int = ..., level: Any = ...) -> None: ... # level is not used + def make_archive( + self, + base_name: str, + format: str, + root_dir: str | None = ..., + base_dir: str | None = ..., + owner: str | None = ..., + group: str | None = ..., + ) -> str: ... + def make_file( + self, + infiles: str | list[str] | tuple[str, ...], + outfile: str, + func: Callable[..., object], + args: list[Any], + exec_msg: str | None = ..., + skip_msg: str | None = ..., + level: Any = ..., + ) -> None: ... # level is not used diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/bdist.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/bdist.pyi new file mode 100644 index 00000000..e1f141d3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/bdist.pyi @@ -0,0 +1,25 @@ +from typing import Any + +from ..cmd import Command + +def show_formats() -> None: ... + +class bdist(Command): + description: str + user_options: Any + boolean_options: Any + help_options: Any + no_format_option: Any + default_format: Any + format_commands: Any + format_command: Any + bdist_base: Any + plat_name: Any + formats: Any + dist_dir: Any + skip_build: int + group: Any + owner: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/bdist_dumb.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/bdist_dumb.pyi new file mode 100644 index 00000000..74cca4d1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/bdist_dumb.pyi @@ -0,0 +1,21 @@ +from typing import Any + +from ..cmd import Command + +class bdist_dumb(Command): + description: str + user_options: Any + boolean_options: Any + default_format: Any + bdist_dir: Any + plat_name: Any + format: Any + keep_temp: int + dist_dir: Any + skip_build: Any + relative: int + owner: Any + group: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/bdist_rpm.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/bdist_rpm.pyi new file mode 100644 index 00000000..76691310 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/bdist_rpm.pyi @@ -0,0 +1,52 @@ +from typing import Any + +from ..cmd import Command + +class bdist_rpm(Command): + description: str + user_options: Any + boolean_options: Any + negative_opt: Any + bdist_base: Any + rpm_base: Any + dist_dir: Any + python: Any + fix_python: Any + spec_only: Any + binary_only: Any + source_only: Any + use_bzip2: Any + distribution_name: Any + group: Any + release: Any + serial: Any + vendor: Any + packager: Any + doc_files: Any + changelog: Any + icon: Any + prep_script: Any + build_script: Any + install_script: Any + clean_script: Any + verify_script: Any + pre_install: Any + post_install: Any + pre_uninstall: Any + post_uninstall: Any + prep: Any + provides: Any + requires: Any + conflicts: Any + build_requires: Any + obsoletes: Any + keep_temp: int + use_rpm_opt_flags: int + rpm3_mode: int + no_autoreq: int + force_arch: Any + quiet: int + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def finalize_package_data(self) -> None: ... + def run(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/build.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/build.pyi new file mode 100644 index 00000000..cf3c8a56 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/build.pyi @@ -0,0 +1,31 @@ +from typing import Any + +from ..cmd import Command + +def show_compilers() -> None: ... + +class build(Command): + description: str + user_options: Any + boolean_options: Any + help_options: Any + build_base: str + build_purelib: Any + build_platlib: Any + build_lib: Any + build_temp: Any + build_scripts: Any + compiler: Any + plat_name: Any + debug: Any + force: int + executable: Any + parallel: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def has_pure_modules(self): ... + def has_c_libraries(self): ... + def has_ext_modules(self): ... + def has_scripts(self): ... + sub_commands: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/build_clib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/build_clib.pyi new file mode 100644 index 00000000..32ab182b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/build_clib.pyi @@ -0,0 +1,27 @@ +from typing import Any + +from ..cmd import Command + +def show_compilers() -> None: ... + +class build_clib(Command): + description: str + user_options: Any + boolean_options: Any + help_options: Any + build_clib: Any + build_temp: Any + libraries: Any + include_dirs: Any + define: Any + undef: Any + debug: Any + force: int + compiler: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def check_library_list(self, libraries) -> None: ... + def get_library_names(self): ... + def get_source_files(self): ... + def build_libraries(self, libraries) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/build_ext.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/build_ext.pyi new file mode 100644 index 00000000..80cd7893 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/build_ext.pyi @@ -0,0 +1,50 @@ +from typing import Any + +from ..cmd import Command + +extension_name_re: Any + +def show_compilers() -> None: ... + +class build_ext(Command): + description: str + sep_by: Any + user_options: Any + boolean_options: Any + help_options: Any + extensions: Any + build_lib: Any + plat_name: Any + build_temp: Any + inplace: int + package: Any + include_dirs: Any + define: Any + undef: Any + libraries: Any + library_dirs: Any + rpath: Any + link_objects: Any + debug: Any + force: Any + compiler: Any + swig: Any + swig_cpp: Any + swig_opts: Any + user: Any + parallel: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def check_extensions_list(self, extensions) -> None: ... + def get_source_files(self): ... + def get_outputs(self): ... + def build_extensions(self) -> None: ... + def build_extension(self, ext) -> None: ... + def swig_sources(self, sources, extension): ... + def find_swig(self): ... + def get_ext_fullpath(self, ext_name): ... + def get_ext_fullname(self, ext_name): ... + def get_ext_filename(self, ext_name): ... + def get_export_symbols(self, ext): ... + def get_libraries(self, ext): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/build_py.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/build_py.pyi new file mode 100644 index 00000000..f4803e30 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/build_py.pyi @@ -0,0 +1,38 @@ +from typing import Any + +from ..cmd import Command + +class build_py(Command): + description: str + user_options: Any + boolean_options: Any + negative_opt: Any + build_lib: Any + py_modules: Any + package: Any + package_data: Any + package_dir: Any + compile: int + optimize: int + force: Any + def initialize_options(self) -> None: ... + packages: Any + data_files: Any + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def get_data_files(self): ... + def find_data_files(self, package, src_dir): ... + def build_package_data(self) -> None: ... + def get_package_dir(self, package): ... + def check_package(self, package, package_dir): ... + def check_module(self, module, module_file): ... + def find_package_modules(self, package, package_dir): ... + def find_modules(self): ... + def find_all_modules(self): ... + def get_source_files(self): ... + def get_module_outfile(self, build_dir, package, module): ... + def get_outputs(self, include_bytecode: int = ...): ... + def build_module(self, module, module_file, package): ... + def build_modules(self) -> None: ... + def build_packages(self) -> None: ... + def byte_compile(self, files) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/build_scripts.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/build_scripts.pyi new file mode 100644 index 00000000..9ce9d7b8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/build_scripts.pyi @@ -0,0 +1,21 @@ +from re import Pattern +from typing import Any + +from ..cmd import Command + +first_line_re: Pattern[str] + +class build_scripts(Command): + description: str + user_options: Any + boolean_options: Any + build_dir: Any + scripts: Any + force: Any + executable: Any + outfiles: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def get_source_files(self): ... + def run(self) -> None: ... + def copy_scripts(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/check.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/check.pyi new file mode 100644 index 00000000..e788e6e5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/check.pyi @@ -0,0 +1,35 @@ +from _typeshed import Incomplete +from typing import Any + +import docutils.utils + +from ..cmd import Command + +# Only defined if docutils is installed. +class SilentReporter(docutils.utils.Reporter): + messages: Any + def __init__( + self, + source, + report_level, + halt_level, + stream: Incomplete | None = ..., + debug: int = ..., + encoding: str = ..., + error_handler: str = ..., + ) -> None: ... + def system_message(self, level, message, *children, **kwargs): ... + +class check(Command): + description: str + user_options: Any + boolean_options: Any + restructuredtext: int + metadata: int + strict: int + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def warn(self, msg): ... + def run(self) -> None: ... + def check_metadata(self) -> None: ... + def check_restructuredtext(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/clean.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/clean.pyi new file mode 100644 index 00000000..99560aa8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/clean.pyi @@ -0,0 +1,17 @@ +from typing import Any + +from ..cmd import Command + +class clean(Command): + description: str + user_options: Any + boolean_options: Any + build_base: Any + build_lib: Any + build_temp: Any + build_scripts: Any + bdist_base: Any + all: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/config.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/config.pyi new file mode 100644 index 00000000..8dffeefd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/config.pyi @@ -0,0 +1,83 @@ +from _typeshed import Incomplete +from collections.abc import Sequence +from re import Pattern + +from ..ccompiler import CCompiler +from ..cmd import Command + +LANG_EXT: dict[str, str] + +class config(Command): + description: str + # Tuple is full name, short name, description + user_options: Sequence[tuple[str, str | None, str]] + compiler: str | CCompiler + cc: str | None + include_dirs: Sequence[str] | None + libraries: Sequence[str] | None + library_dirs: Sequence[str] | None + noisy: int + dump_source: int + temp_files: Sequence[str] + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def try_cpp( + self, + body: str | None = ..., + headers: Sequence[str] | None = ..., + include_dirs: Sequence[str] | None = ..., + lang: str = ..., + ) -> bool: ... + def search_cpp( + self, + pattern: Pattern[str] | str, + body: str | None = ..., + headers: Sequence[str] | None = ..., + include_dirs: Sequence[str] | None = ..., + lang: str = ..., + ) -> bool: ... + def try_compile( + self, body: str, headers: Sequence[str] | None = ..., include_dirs: Sequence[str] | None = ..., lang: str = ... + ) -> bool: ... + def try_link( + self, + body: str, + headers: Sequence[str] | None = ..., + include_dirs: Sequence[str] | None = ..., + libraries: Sequence[str] | None = ..., + library_dirs: Sequence[str] | None = ..., + lang: str = ..., + ) -> bool: ... + def try_run( + self, + body: str, + headers: Sequence[str] | None = ..., + include_dirs: Sequence[str] | None = ..., + libraries: Sequence[str] | None = ..., + library_dirs: Sequence[str] | None = ..., + lang: str = ..., + ) -> bool: ... + def check_func( + self, + func: str, + headers: Sequence[str] | None = ..., + include_dirs: Sequence[str] | None = ..., + libraries: Sequence[str] | None = ..., + library_dirs: Sequence[str] | None = ..., + decl: int = ..., + call: int = ..., + ) -> bool: ... + def check_lib( + self, + library: str, + library_dirs: Sequence[str] | None = ..., + headers: Sequence[str] | None = ..., + include_dirs: Sequence[str] | None = ..., + other_libraries: list[str] = ..., + ) -> bool: ... + def check_header( + self, header: str, include_dirs: Sequence[str] | None = ..., library_dirs: Sequence[str] | None = ..., lang: str = ... + ) -> bool: ... + +def dump_file(filename: str, head: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/install.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/install.pyi new file mode 100644 index 00000000..661d256e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/install.pyi @@ -0,0 +1,63 @@ +from typing import Any + +from ..cmd import Command + +HAS_USER_SITE: bool +SCHEME_KEYS: tuple[str, ...] +INSTALL_SCHEMES: dict[str, dict[Any, Any]] + +class install(Command): + description: str + user_options: Any + boolean_options: Any + negative_opt: Any + prefix: str | None + exec_prefix: Any + home: str | None + user: bool + install_base: Any + install_platbase: Any + root: str | None + install_purelib: Any + install_platlib: Any + install_headers: Any + install_lib: str | None + install_scripts: Any + install_data: Any + install_userbase: Any + install_usersite: Any + compile: Any + optimize: Any + extra_path: Any + install_path_file: int + force: int + skip_build: int + warn_dir: int + build_base: Any + build_lib: Any + record: Any + def initialize_options(self) -> None: ... + config_vars: Any + install_libbase: Any + def finalize_options(self) -> None: ... + def dump_dirs(self, msg) -> None: ... + def finalize_unix(self) -> None: ... + def finalize_other(self) -> None: ... + def select_scheme(self, name) -> None: ... + def expand_basedirs(self) -> None: ... + def expand_dirs(self) -> None: ... + def convert_paths(self, *names) -> None: ... + path_file: Any + extra_dirs: Any + def handle_extra_path(self) -> None: ... + def change_roots(self, *names) -> None: ... + def create_home_path(self) -> None: ... + def run(self) -> None: ... + def create_path_file(self) -> None: ... + def get_outputs(self): ... + def get_inputs(self): ... + def has_lib(self): ... + def has_headers(self): ... + def has_scripts(self): ... + def has_data(self): ... + sub_commands: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/install_data.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/install_data.pyi new file mode 100644 index 00000000..6cc9b528 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/install_data.pyi @@ -0,0 +1,19 @@ +from typing import Any + +from ..cmd import Command + +class install_data(Command): + description: str + user_options: Any + boolean_options: Any + install_dir: Any + outfiles: Any + root: Any + force: int + data_files: Any + warn_dir: int + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def get_inputs(self): ... + def get_outputs(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/install_egg_info.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/install_egg_info.pyi new file mode 100644 index 00000000..776eafc1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/install_egg_info.pyi @@ -0,0 +1,18 @@ +from typing import Any, ClassVar + +from ..cmd import Command + +class install_egg_info(Command): + description: ClassVar[str] + user_options: ClassVar[list[tuple[str, str | None, str]]] + install_dir: Any + def initialize_options(self) -> None: ... + target: Any + outputs: Any + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def get_outputs(self) -> list[str]: ... + +def safe_name(name): ... +def safe_version(version): ... +def to_filename(name): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/install_headers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/install_headers.pyi new file mode 100644 index 00000000..795bd1cf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/install_headers.pyi @@ -0,0 +1,16 @@ +from typing import Any + +from ..cmd import Command + +class install_headers(Command): + description: str + user_options: Any + boolean_options: Any + install_dir: Any + force: int + outfiles: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def get_inputs(self): ... + def get_outputs(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/install_lib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/install_lib.pyi new file mode 100644 index 00000000..a6a5e4e7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/install_lib.pyi @@ -0,0 +1,25 @@ +from typing import Any + +from ..cmd import Command + +PYTHON_SOURCE_EXTENSION: str + +class install_lib(Command): + description: str + user_options: Any + boolean_options: Any + negative_opt: Any + install_dir: Any + build_dir: Any + force: int + compile: Any + optimize: Any + skip_build: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def build(self) -> None: ... + def install(self): ... + def byte_compile(self, files) -> None: ... + def get_outputs(self): ... + def get_inputs(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/install_scripts.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/install_scripts.pyi new file mode 100644 index 00000000..92728a16 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/install_scripts.pyi @@ -0,0 +1,18 @@ +from typing import Any + +from ..cmd import Command + +class install_scripts(Command): + description: str + user_options: Any + boolean_options: Any + install_dir: Any + force: int + build_dir: Any + skip_build: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + outfiles: Any + def run(self) -> None: ... + def get_inputs(self): ... + def get_outputs(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/py37compat.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/py37compat.pyi new file mode 100644 index 00000000..9d921db3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/py37compat.pyi @@ -0,0 +1,5 @@ +from _typeshed import Incomplete + +def compose(f1, f2): ... + +pythonlib: Incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/register.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/register.pyi new file mode 100644 index 00000000..f19dfcbf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/register.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete +from typing import Any + +from ..config import PyPIRCCommand + +class register(PyPIRCCommand): + description: str + sub_commands: Any + list_classifiers: int + strict: int + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def check_metadata(self) -> None: ... + def classifiers(self) -> None: ... + def verify_metadata(self) -> None: ... + def send_metadata(self) -> None: ... + def build_post_data(self, action): ... + def post_to_server(self, data, auth: Incomplete | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/sdist.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/sdist.pyi new file mode 100644 index 00000000..636c4a35 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/sdist.pyi @@ -0,0 +1,42 @@ +from typing import Any + +from ..cmd import Command + +def show_formats() -> None: ... + +class sdist(Command): + description: str + def checking_metadata(self): ... + user_options: Any + boolean_options: Any + help_options: Any + negative_opt: Any + sub_commands: Any + READMES: Any + template: Any + manifest: Any + use_defaults: int + prune: int + manifest_only: int + force_manifest: int + formats: Any + keep_temp: int + dist_dir: Any + archive_files: Any + metadata_check: int + owner: Any + group: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + filelist: Any + def run(self) -> None: ... + def check_metadata(self) -> None: ... + def get_file_list(self) -> None: ... + def add_defaults(self) -> None: ... + def read_template(self) -> None: ... + def prune_file_list(self) -> None: ... + def write_manifest(self) -> None: ... + def read_manifest(self) -> None: ... + def make_release_tree(self, base_dir, files) -> None: ... + def make_distribution(self) -> None: ... + def get_archive_files(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/upload.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/upload.pyi new file mode 100644 index 00000000..e6b77825 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/command/upload.pyi @@ -0,0 +1,17 @@ +from typing import Any, ClassVar + +from ..config import PyPIRCCommand + +class upload(PyPIRCCommand): + description: ClassVar[str] + username: str + password: str + show_response: int + sign: bool + identity: Any + def initialize_options(self) -> None: ... + repository: Any + realm: Any + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def upload_file(self, command: str, pyversion: str, filename: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/config.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/config.pyi new file mode 100644 index 00000000..5814a828 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/config.pyi @@ -0,0 +1,17 @@ +from abc import abstractmethod +from distutils.cmd import Command +from typing import ClassVar + +DEFAULT_PYPIRC: str + +class PyPIRCCommand(Command): + DEFAULT_REPOSITORY: ClassVar[str] + DEFAULT_REALM: ClassVar[str] + repository: None + realm: None + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + @abstractmethod + def run(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/core.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/core.pyi new file mode 100644 index 00000000..07334edc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/core.pyi @@ -0,0 +1,49 @@ +from collections.abc import Mapping, Sequence +from distutils.cmd import Command as Command +from distutils.dist import Distribution as Distribution +from distutils.extension import Extension as Extension +from typing import Any + +def setup( + *, + name: str = ..., + version: str = ..., + description: str = ..., + long_description: str = ..., + author: str = ..., + author_email: str = ..., + maintainer: str = ..., + maintainer_email: str = ..., + url: str = ..., + download_url: str = ..., + packages: list[str] = ..., + py_modules: list[str] = ..., + scripts: list[str] = ..., + ext_modules: Sequence[Extension] = ..., + classifiers: list[str] = ..., + distclass: type[Distribution] = ..., + script_name: str = ..., + script_args: list[str] = ..., + options: Mapping[str, Any] = ..., + license: str = ..., + keywords: list[str] | str = ..., + platforms: list[str] | str = ..., + cmdclass: Mapping[str, type[Command]] = ..., + data_files: list[tuple[str, list[str]]] = ..., + package_dir: Mapping[str, str] = ..., + obsoletes: list[str] = ..., + provides: list[str] = ..., + requires: list[str] = ..., + command_packages: list[str] = ..., + command_options: Mapping[str, Mapping[str, tuple[Any, Any]]] = ..., + package_data: Mapping[str, list[str]] = ..., + include_package_data: bool = ..., + libraries: list[str] = ..., + headers: list[str] = ..., + ext_package: str = ..., + include_dirs: list[str] = ..., + password: str = ..., + fullname: str = ..., + **attrs: Any, +) -> None: ... +def run_setup(script_name: str, script_args: list[str] | None = ..., stop_after: str = ...) -> Distribution: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/cygwinccompiler.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/cygwinccompiler.pyi new file mode 100644 index 00000000..1f85b254 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/cygwinccompiler.pyi @@ -0,0 +1,4 @@ +from distutils.unixccompiler import UnixCCompiler + +class CygwinCCompiler(UnixCCompiler): ... +class Mingw32CCompiler(CygwinCCompiler): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/debug.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/debug.pyi new file mode 100644 index 00000000..11f28a8b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/debug.pyi @@ -0,0 +1 @@ +DEBUG: bool | None diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/dep_util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/dep_util.pyi new file mode 100644 index 00000000..929d6ffd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/dep_util.pyi @@ -0,0 +1,3 @@ +def newer(source: str, target: str) -> bool: ... +def newer_pairwise(sources: list[str], targets: list[str]) -> list[tuple[str, str]]: ... +def newer_group(sources: list[str], target: str, missing: str = ...) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/dir_util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/dir_util.pyi new file mode 100644 index 00000000..ffe5ff1c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/dir_util.pyi @@ -0,0 +1,13 @@ +def mkpath(name: str, mode: int = ..., verbose: int = ..., dry_run: int = ...) -> list[str]: ... +def create_tree(base_dir: str, files: list[str], mode: int = ..., verbose: int = ..., dry_run: int = ...) -> None: ... +def copy_tree( + src: str, + dst: str, + preserve_mode: int = ..., + preserve_times: int = ..., + preserve_symlinks: int = ..., + update: int = ..., + verbose: int = ..., + dry_run: int = ..., +) -> list[str]: ... +def remove_tree(directory: str, verbose: int = ..., dry_run: int = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/dist.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/dist.pyi new file mode 100644 index 00000000..fc1bce26 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/dist.pyi @@ -0,0 +1,59 @@ +from _typeshed import FileDescriptorOrPath, SupportsWrite +from collections.abc import Iterable, Mapping +from distutils.cmd import Command +from typing import IO, Any + +class DistributionMetadata: + def __init__(self, path: FileDescriptorOrPath | None = ...) -> None: ... + name: str | None + version: str | None + author: str | None + author_email: str | None + maintainer: str | None + maintainer_email: str | None + url: str | None + license: str | None + description: str | None + long_description: str | None + keywords: str | list[str] | None + platforms: str | list[str] | None + classifiers: str | list[str] | None + download_url: str | None + provides: list[str] | None + requires: list[str] | None + obsoletes: list[str] | None + def read_pkg_file(self, file: IO[str]) -> None: ... + def write_pkg_info(self, base_dir: str) -> None: ... + def write_pkg_file(self, file: SupportsWrite[str]) -> None: ... + def get_name(self) -> str: ... + def get_version(self) -> str: ... + def get_fullname(self) -> str: ... + def get_author(self) -> str: ... + def get_author_email(self) -> str: ... + def get_maintainer(self) -> str: ... + def get_maintainer_email(self) -> str: ... + def get_contact(self) -> str: ... + def get_contact_email(self) -> str: ... + def get_url(self) -> str: ... + def get_license(self) -> str: ... + def get_licence(self) -> str: ... + def get_description(self) -> str: ... + def get_long_description(self) -> str: ... + def get_keywords(self) -> str | list[str]: ... + def get_platforms(self) -> str | list[str]: ... + def get_classifiers(self) -> str | list[str]: ... + def get_download_url(self) -> str: ... + def get_requires(self) -> list[str]: ... + def set_requires(self, value: Iterable[str]) -> None: ... + def get_provides(self) -> list[str]: ... + def set_provides(self, value: Iterable[str]) -> None: ... + def get_obsoletes(self) -> list[str]: ... + def set_obsoletes(self, value: Iterable[str]) -> None: ... + +class Distribution: + cmdclass: dict[str, type[Command]] + metadata: DistributionMetadata + def __init__(self, attrs: Mapping[str, Any] | None = ...) -> None: ... + def get_option_dict(self, command: str) -> dict[str, tuple[str, str]]: ... + def parse_config_files(self, filenames: Iterable[str] | None = ...) -> None: ... + def get_command_obj(self, command: str, create: bool = ...) -> Command | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/errors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/errors.pyi new file mode 100644 index 00000000..e483362b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/errors.pyi @@ -0,0 +1,19 @@ +class DistutilsError(Exception): ... +class DistutilsModuleError(DistutilsError): ... +class DistutilsClassError(DistutilsError): ... +class DistutilsGetoptError(DistutilsError): ... +class DistutilsArgError(DistutilsError): ... +class DistutilsFileError(DistutilsError): ... +class DistutilsOptionError(DistutilsError): ... +class DistutilsSetupError(DistutilsError): ... +class DistutilsPlatformError(DistutilsError): ... +class DistutilsExecError(DistutilsError): ... +class DistutilsInternalError(DistutilsError): ... +class DistutilsTemplateError(DistutilsError): ... +class DistutilsByteCompileError(DistutilsError): ... +class CCompilerError(Exception): ... +class PreprocessError(CCompilerError): ... +class CompileError(CCompilerError): ... +class LibError(CCompilerError): ... +class LinkError(CCompilerError): ... +class UnknownFileError(CCompilerError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/extension.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/extension.pyi new file mode 100644 index 00000000..5639f44a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/extension.pyi @@ -0,0 +1,36 @@ +class Extension: + name: str + sources: list[str] + include_dirs: list[str] + define_macros: list[tuple[str, str | None]] + undef_macros: list[str] + library_dirs: list[str] + libraries: list[str] + runtime_library_dirs: list[str] + extra_objects: list[str] + extra_compile_args: list[str] + extra_link_args: list[str] + export_symbols: list[str] + swig_opts: list[str] + depends: list[str] + language: str | None + optional: bool | None + def __init__( + self, + name: str, + sources: list[str], + include_dirs: list[str] | None = ..., + define_macros: list[tuple[str, str | None]] | None = ..., + undef_macros: list[str] | None = ..., + library_dirs: list[str] | None = ..., + libraries: list[str] | None = ..., + runtime_library_dirs: list[str] | None = ..., + extra_objects: list[str] | None = ..., + extra_compile_args: list[str] | None = ..., + extra_link_args: list[str] | None = ..., + export_symbols: list[str] | None = ..., + swig_opts: list[str] | None = ..., + depends: list[str] | None = ..., + language: str | None = ..., + optional: bool | None = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/fancy_getopt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/fancy_getopt.pyi new file mode 100644 index 00000000..6a7124bd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/fancy_getopt.pyi @@ -0,0 +1,24 @@ +from collections.abc import Iterable, Mapping +from typing import Any, overload +from typing_extensions import TypeAlias + +_Option: TypeAlias = tuple[str, str | None, str] +_GR: TypeAlias = tuple[list[str], OptionDummy] + +def fancy_getopt( + options: list[_Option], negative_opt: Mapping[_Option, _Option], object: Any, args: list[str] | None +) -> list[str] | _GR: ... +def wrap_text(text: str, width: int) -> list[str]: ... + +class FancyGetopt: + def __init__(self, option_table: list[_Option] | None = ...) -> None: ... + # TODO kinda wrong, `getopt(object=object())` is invalid + @overload + def getopt(self, args: list[str] | None = ...) -> _GR: ... + @overload + def getopt(self, args: list[str] | None, object: Any) -> list[str]: ... + def get_option_order(self) -> list[tuple[str, str]]: ... + def generate_help(self, header: str | None = ...) -> list[str]: ... + +class OptionDummy: + def __init__(self, options: Iterable[str] = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/file_util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/file_util.pyi new file mode 100644 index 00000000..b3127841 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/file_util.pyi @@ -0,0 +1,14 @@ +from collections.abc import Sequence + +def copy_file( + src: str, + dst: str, + preserve_mode: bool = ..., + preserve_times: bool = ..., + update: bool = ..., + link: str | None = ..., + verbose: bool = ..., + dry_run: bool = ..., +) -> tuple[str, str]: ... +def move_file(src: str, dst: str, verbose: bool = ..., dry_run: bool = ...) -> str: ... +def write_file(filename: str, contents: Sequence[str]) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/filelist.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/filelist.pyi new file mode 100644 index 00000000..1cfdcf08 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/filelist.pyi @@ -0,0 +1,51 @@ +from collections.abc import Iterable +from re import Pattern +from typing import overload +from typing_extensions import Literal + +# class is entirely undocumented +class FileList: + allfiles: Iterable[str] | None + files: list[str] + def __init__(self, warn: None = ..., debug_print: None = ...) -> None: ... + def set_allfiles(self, allfiles: Iterable[str]) -> None: ... + def findall(self, dir: str = ...) -> None: ... + def debug_print(self, msg: str) -> None: ... + def append(self, item: str) -> None: ... + def extend(self, items: Iterable[str]) -> None: ... + def sort(self) -> None: ... + def remove_duplicates(self) -> None: ... + def process_template_line(self, line: str) -> None: ... + @overload + def include_pattern( + self, pattern: str, anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: Literal[0, False] = ... + ) -> bool: ... + @overload + def include_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1] = ...) -> bool: ... + @overload + def include_pattern( + self, pattern: str | Pattern[str], anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: int = ... + ) -> bool: ... + @overload + def exclude_pattern( + self, pattern: str, anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: Literal[0, False] = ... + ) -> bool: ... + @overload + def exclude_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1] = ...) -> bool: ... + @overload + def exclude_pattern( + self, pattern: str | Pattern[str], anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: int = ... + ) -> bool: ... + +def findall(dir: str = ...) -> list[str]: ... +def glob_to_re(pattern: str) -> str: ... +@overload +def translate_pattern( + pattern: str, anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: Literal[False, 0] = ... +) -> Pattern[str]: ... +@overload +def translate_pattern(pattern: str | Pattern[str], *, is_regex: Literal[True, 1] = ...) -> Pattern[str]: ... +@overload +def translate_pattern( + pattern: str | Pattern[str], anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: int = ... +) -> Pattern[str]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/log.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/log.pyi new file mode 100644 index 00000000..7f4020b3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/log.pyi @@ -0,0 +1,16 @@ +from typing import Any + +DEBUG: int +INFO: int +WARN: int +ERROR: int +FATAL: int + +def log(level: int, msg: str, *args: Any) -> None: ... +def debug(msg: str, *args: Any) -> None: ... +def info(msg: str, *args: Any) -> None: ... +def warn(msg: str, *args: Any) -> None: ... +def error(msg: str, *args: Any) -> None: ... +def fatal(msg: str, *args: Any) -> None: ... +def set_threshold(level: int) -> int: ... +def set_verbosity(v: int) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/msvccompiler.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/msvccompiler.pyi new file mode 100644 index 00000000..80872a6b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/msvccompiler.pyi @@ -0,0 +1,3 @@ +from distutils.ccompiler import CCompiler + +class MSVCCompiler(CCompiler): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/spawn.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/spawn.pyi new file mode 100644 index 00000000..2dc07dcd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/spawn.pyi @@ -0,0 +1,6 @@ +from _typeshed import Incomplete + +def spawn( + cmd: list[str], search_path: bool = ..., verbose: bool = ..., dry_run: bool = ..., env: Incomplete | None = ... +) -> None: ... +def find_executable(executable: str, path: str | None = ...) -> str | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/sysconfig.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/sysconfig.pyi new file mode 100644 index 00000000..bf7db9c8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/sysconfig.pyi @@ -0,0 +1,13 @@ +from collections.abc import Mapping +from distutils.ccompiler import CCompiler + +PREFIX: str +EXEC_PREFIX: str + +def get_config_var(name: str) -> int | str | None: ... +def get_config_vars(*args: str) -> Mapping[str, int | str]: ... +def get_config_h_filename() -> str: ... +def get_makefile_filename() -> str: ... +def get_python_inc(plat_specific: bool = ..., prefix: str | None = ...) -> str: ... +def get_python_lib(plat_specific: bool = ..., standard_lib: bool = ..., prefix: str | None = ...) -> str: ... +def customize_compiler(compiler: CCompiler) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/text_file.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/text_file.pyi new file mode 100644 index 00000000..ace642e0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/text_file.pyi @@ -0,0 +1,21 @@ +from typing import IO + +class TextFile: + def __init__( + self, + filename: str | None = ..., + file: IO[str] | None = ..., + *, + strip_comments: bool = ..., + lstrip_ws: bool = ..., + rstrip_ws: bool = ..., + skip_blanks: bool = ..., + join_lines: bool = ..., + collapse_join: bool = ..., + ) -> None: ... + def open(self, filename: str) -> None: ... + def close(self) -> None: ... + def warn(self, msg: str, line: list[int] | tuple[int, int] | int | None = ...) -> None: ... + def readline(self) -> str | None: ... + def readlines(self) -> list[str]: ... + def unreadline(self, line: str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/unixccompiler.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/unixccompiler.pyi new file mode 100644 index 00000000..e1d44347 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/unixccompiler.pyi @@ -0,0 +1,3 @@ +from distutils.ccompiler import CCompiler + +class UnixCCompiler(CCompiler): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/util.pyi new file mode 100644 index 00000000..6790712f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/util.pyi @@ -0,0 +1,30 @@ +from collections.abc import Callable, Mapping +from typing import Any +from typing_extensions import Literal + +def get_host_platform() -> str: ... +def get_platform() -> str: ... +def get_macosx_target_ver_from_syscfg(): ... +def get_macosx_target_ver(): ... +def split_version(s: str) -> list[int]: ... +def convert_path(pathname: str) -> str: ... +def change_root(new_root: str, pathname: str) -> str: ... +def check_environ() -> None: ... +def subst_vars(s: str, local_vars: Mapping[str, str]) -> None: ... +def grok_environment_error(exc: object, prefix: str = ...) -> str: ... +def split_quoted(s: str) -> list[str]: ... +def execute( + func: Callable[..., object], args: tuple[Any, ...], msg: str | None = ..., verbose: bool = ..., dry_run: bool = ... +) -> None: ... +def strtobool(val: str) -> Literal[0, 1]: ... +def byte_compile( + py_files: list[str], + optimize: int = ..., + force: bool = ..., + prefix: str | None = ..., + base_dir: str | None = ..., + verbose: bool = ..., + dry_run: bool = ..., + direct: bool | None = ..., +) -> None: ... +def rfc822_escape(header: str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/version.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/version.pyi new file mode 100644 index 00000000..b6662d04 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/_distutils/version.pyi @@ -0,0 +1,26 @@ +from re import Pattern +from typing_extensions import Self + +class Version: + def __init__(self, vstring: str | None = ...) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __lt__(self, other: Self | str) -> bool: ... + def __le__(self, other: Self | str) -> bool: ... + def __gt__(self, other: Self | str) -> bool: ... + def __ge__(self, other: Self | str) -> bool: ... + +class StrictVersion(Version): + version_re: Pattern[str] + version: tuple[int, int, int] + prerelease: tuple[str, int] | None + def __init__(self, vstring: str | None = ...) -> None: ... + def parse(self, vstring: str) -> Self: ... + def _cmp(self, other: Self | str) -> bool: ... + +class LooseVersion(Version): + component_re: Pattern[str] + vstring: str + version: tuple[str | int, ...] + def __init__(self, vstring: str | None = ...) -> None: ... + def parse(self, vstring: str) -> Self: ... + def _cmp(self, other: Self | str) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/archive_util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/archive_util.pyi new file mode 100644 index 00000000..86bb21a7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/archive_util.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete +from distutils.errors import DistutilsError +from typing import Any + +class UnrecognizedFormat(DistutilsError): ... + +def default_filter(src, dst): ... +def unpack_archive(filename, extract_dir, progress_filter=..., drivers: Incomplete | None = ...) -> None: ... +def unpack_directory(filename, extract_dir, progress_filter=...) -> None: ... +def unpack_zipfile(filename, extract_dir, progress_filter=...) -> None: ... +def unpack_tarfile(filename, extract_dir, progress_filter=...): ... + +extraction_drivers: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/build_meta.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/build_meta.pyi new file mode 100644 index 00000000..47bfc6e0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/build_meta.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete +from typing import Any + +from setuptools import dist + +class SetupRequirementsError(BaseException): + specifiers: Any + def __init__(self, specifiers) -> None: ... + +class Distribution(dist.Distribution): + def fetch_build_eggs(self, specifiers) -> None: ... + @classmethod + def patch(cls) -> None: ... + +class _BuildMetaBackend: + def run_setup(self, setup_script: str = ...) -> None: ... + def get_requires_for_build_wheel(self, config_settings: Incomplete | None = ...): ... + def get_requires_for_build_sdist(self, config_settings: Incomplete | None = ...): ... + def prepare_metadata_for_build_wheel(self, metadata_directory, config_settings: Incomplete | None = ...): ... + def build_wheel( + self, wheel_directory, config_settings: Incomplete | None = ..., metadata_directory: Incomplete | None = ... + ): ... + def build_sdist(self, sdist_directory, config_settings: Incomplete | None = ...): ... + +class _BuildMetaLegacyBackend(_BuildMetaBackend): + def run_setup(self, setup_script: str = ...) -> None: ... + +get_requires_for_build_wheel: Any +get_requires_for_build_sdist: Any +prepare_metadata_for_build_wheel: Any +build_wheel: Any +build_sdist: Any +__legacy__: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/alias.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/alias.pyi new file mode 100644 index 00000000..ff79d933 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/alias.pyi @@ -0,0 +1,18 @@ +from typing import Any + +from setuptools.command.setopt import option_base + +def shquote(arg): ... + +class alias(option_base): + description: str + command_consumes_arguments: bool + user_options: Any + boolean_options: Any + args: Any + remove: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + +def format_alias(name, aliases): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/bdist_egg.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/bdist_egg.pyi new file mode 100644 index 00000000..fca2f757 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/bdist_egg.pyi @@ -0,0 +1,48 @@ +from typing import Any + +from setuptools import Command + +def strip_module(filename): ... +def sorted_walk(dir) -> None: ... +def write_stub(resource, pyfile) -> None: ... + +class bdist_egg(Command): + description: str + user_options: Any + boolean_options: Any + bdist_dir: Any + plat_name: Any + keep_temp: int + dist_dir: Any + skip_build: int + egg_output: Any + exclude_source_files: Any + def initialize_options(self) -> None: ... + egg_info: Any + def finalize_options(self) -> None: ... + def do_install_data(self) -> None: ... + def get_outputs(self): ... + def call_command(self, cmdname, **kw): ... + stubs: Any + def run(self) -> None: ... + def zap_pyfiles(self) -> None: ... + def zip_safe(self): ... + def gen_header(self): ... + def copy_metadata_to(self, target_dir) -> None: ... + def get_ext_outputs(self): ... + +NATIVE_EXTENSIONS: Any + +def walk_egg(egg_dir) -> None: ... +def analyze_egg(egg_dir, stubs): ... +def write_safety_flag(egg_dir, safe) -> None: ... + +safety_flags: Any + +def scan_module(egg_dir, base, name, stubs): ... +def iter_symbols(code) -> None: ... +def can_scan(): ... + +INSTALL_DIRECTORY_ATTRS: Any + +def make_zipfile(zip_filename, base_dir, verbose: int = ..., dry_run: int = ..., compress: bool = ..., mode: str = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/bdist_rpm.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/bdist_rpm.pyi new file mode 100644 index 00000000..4942eafe --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/bdist_rpm.pyi @@ -0,0 +1,4 @@ +import distutils.command.bdist_rpm as orig + +class bdist_rpm(orig.bdist_rpm): + def run(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/build_clib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/build_clib.pyi new file mode 100644 index 00000000..c088f7d7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/build_clib.pyi @@ -0,0 +1,4 @@ +import distutils.command.build_clib as orig + +class build_clib(orig.build_clib): + def build_libraries(self, libraries) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/build_ext.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/build_ext.pyi new file mode 100644 index 00000000..b3475d89 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/build_ext.pyi @@ -0,0 +1,45 @@ +from _typeshed import Incomplete +from distutils.command.build_ext import build_ext as _build_ext +from typing import Any + +have_rtld: bool +use_stubs: bool +libtype: str + +def if_dl(s): ... +def get_abi3_suffix(): ... + +class build_ext(_build_ext): + inplace: Any + def run(self) -> None: ... + def copy_extensions_to_source(self) -> None: ... + def get_ext_filename(self, fullname): ... + shlib_compiler: Any + shlibs: Any + ext_map: Any + def initialize_options(self) -> None: ... + extensions: Any + def finalize_options(self) -> None: ... + def setup_shlib_compiler(self) -> None: ... + def get_export_symbols(self, ext): ... + compiler: Any + def build_extension(self, ext) -> None: ... + def links_to_dynamic(self, ext): ... + def get_outputs(self): ... + def write_stub(self, output_dir, ext, compile: bool = ...) -> None: ... + +def link_shared_object( + self, + objects, + output_libname, + output_dir: Incomplete | None = ..., + libraries: Incomplete | None = ..., + library_dirs: Incomplete | None = ..., + runtime_library_dirs: Incomplete | None = ..., + export_symbols: Incomplete | None = ..., + debug: int = ..., + extra_preargs: Incomplete | None = ..., + extra_postargs: Incomplete | None = ..., + build_temp: Incomplete | None = ..., + target_lang: Incomplete | None = ..., +) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/build_py.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/build_py.pyi new file mode 100644 index 00000000..927206eb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/build_py.pyi @@ -0,0 +1,25 @@ +import distutils.command.build_py as orig +from typing import Any + +def make_writable(target) -> None: ... + +class build_py(orig.build_py): + package_data: Any + exclude_package_data: Any + def finalize_options(self) -> None: ... + def run(self) -> None: ... + data_files: Any + def __getattr__(self, attr: str): ... + def build_module(self, module, module_file, package): ... + def find_data_files(self, package, src_dir): ... + def build_package_data(self) -> None: ... + manifest_files: Any + def analyze_manifest(self) -> None: ... + def get_data_files(self) -> None: ... + def check_package(self, package, package_dir): ... + packages_checked: Any + def initialize_options(self) -> None: ... + def get_package_dir(self, package): ... + def exclude_data_files(self, package, src_dir, files): ... + +def assert_relative(path): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/develop.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/develop.pyi new file mode 100644 index 00000000..f7c1058c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/develop.pyi @@ -0,0 +1,31 @@ +from typing import Any + +from setuptools import namespaces +from setuptools.command.easy_install import easy_install + +class develop(namespaces.DevelopInstaller, easy_install): + description: str + user_options: Any + boolean_options: Any + command_consumes_arguments: bool + multi_version: bool + def run(self) -> None: ... # type: ignore[override] + uninstall: Any + egg_path: Any + setup_path: Any + always_copy_from: str + def initialize_options(self) -> None: ... + args: Any + egg_link: Any + egg_base: Any + dist: Any + def finalize_options(self) -> None: ... + def install_for_development(self) -> None: ... + def uninstall_link(self) -> None: ... + def install_egg_scripts(self, dist): ... + def install_wrapper_scripts(self, dist): ... + +class VersionlessRequirement: + def __init__(self, dist) -> None: ... + def __getattr__(self, name: str): ... + def as_requirement(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/dist_info.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/dist_info.pyi new file mode 100644 index 00000000..aa9d3f40 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/dist_info.pyi @@ -0,0 +1,10 @@ +from distutils.core import Command +from typing import Any + +class dist_info(Command): + description: str + user_options: Any + egg_base: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/easy_install.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/easy_install.pyi new file mode 100644 index 00000000..d0289f62 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/easy_install.pyi @@ -0,0 +1,150 @@ +from _typeshed import Incomplete +from collections.abc import Iterator +from typing import Any + +from pkg_resources import Environment +from setuptools import Command, SetuptoolsDeprecationWarning + +class easy_install(Command): + description: str + command_consumes_arguments: bool + user_options: Any + boolean_options: Any + negative_opt: Any + create_index: Any + user: int + zip_ok: Any + install_dir: Any + index_url: Any + find_links: Any + build_directory: Any + args: Any + optimize: Any + upgrade: Any + editable: Any + root: Any + version: Any + install_purelib: Any + install_platlib: Any + install_headers: Any + install_lib: Any + install_scripts: Any + install_data: Any + install_base: Any + install_platbase: Any + install_userbase: Any + install_usersite: Any + no_find_links: Any + package_index: Any + pth_file: Any + site_dirs: Any + installed_projects: Any + verbose: Any + def initialize_options(self) -> None: ... + def delete_blockers(self, blockers) -> None: ... + config_vars: Any + script_dir: Any + all_site_dirs: Any + shadow_path: Any + local_index: Any + outputs: Any + def finalize_options(self) -> None: ... + def expand_basedirs(self) -> None: ... + def expand_dirs(self) -> None: ... + def run(self, show_deprecation: bool = ...) -> None: ... + def pseudo_tempname(self): ... + def warn_deprecated_options(self) -> None: ... + def check_site_dir(self) -> None: ... + def cant_write_to_target(self) -> None: ... + def check_pth_processing(self): ... + def install_egg_scripts(self, dist) -> None: ... + def add_output(self, path) -> None: ... + def not_editable(self, spec) -> None: ... + def check_editable(self, spec) -> None: ... + def easy_install(self, spec, deps: bool = ...): ... + def install_item(self, spec, download, tmpdir, deps, install_needed: bool = ...): ... + def select_scheme(self, name) -> None: ... + def process_distribution(self, requirement, dist, deps: bool = ..., *info) -> None: ... + def should_unzip(self, dist): ... + def maybe_move(self, spec, dist_filename, setup_base): ... + def install_wrapper_scripts(self, dist) -> None: ... + def install_script(self, dist, script_name, script_text, dev_path: Incomplete | None = ...) -> None: ... + def write_script(self, script_name, contents, mode: str = ..., blockers=...) -> None: ... + def install_eggs(self, spec, dist_filename, tmpdir): ... + def egg_distribution(self, egg_path): ... + def install_egg(self, egg_path, tmpdir): ... + def install_exe(self, dist_filename, tmpdir): ... + def exe_to_egg(self, dist_filename, egg_tmp): ... + def install_wheel(self, wheel_path, tmpdir): ... + def installation_report(self, req, dist, what: str = ...): ... + def report_editable(self, spec, setup_script): ... + def run_setup(self, setup_script, setup_base, args) -> None: ... + def build_and_install(self, setup_script, setup_base): ... + def update_pth(self, dist) -> None: ... + def unpack_progress(self, src, dst): ... + def unpack_and_compile(self, egg_path, destination): ... + def byte_compile(self, to_compile) -> None: ... + def create_home_path(self) -> None: ... + INSTALL_SCHEMES: Any + DEFAULT_SCHEME: Any + +def extract_wininst_cfg(dist_filename): ... +def get_exe_prefixes(exe_filename): ... + +class PthDistributions(Environment): + dirty: bool + filename: Any + sitedirs: Any + basedir: Any + def __init__(self, filename, sitedirs=...) -> None: ... + def save(self) -> None: ... + def add(self, dist) -> None: ... + def remove(self, dist) -> None: ... + def make_relative(self, path): ... + +class RewritePthDistributions(PthDistributions): + prelude: Any + postlude: Any + +class CommandSpec(list[str]): + options: Any + split_args: Any + @classmethod + def best(cls) -> type[CommandSpec]: ... + @classmethod + def from_param(cls, param) -> CommandSpec: ... + @classmethod + def from_environment(cls) -> CommandSpec: ... + @classmethod + def from_string(cls, string: str) -> CommandSpec: ... + def install_options(self, script_text: str) -> None: ... + def as_header(self) -> str: ... + +class WindowsCommandSpec(CommandSpec): + split_args: Any + +class ScriptWriter: + template: Any + command_spec_class: Any + @classmethod + def get_script_args(cls, dist, executable: Incomplete | None = ..., wininst: bool = ...) -> Iterator[tuple[str, str]]: ... + @classmethod + def get_script_header(cls, script_text, executable: Incomplete | None = ..., wininst: bool = ...) -> str: ... + @classmethod + def get_args(cls, dist, header: Incomplete | None = ...) -> Iterator[tuple[str, str]]: ... + @classmethod + def get_writer(cls, force_windows: bool) -> type[ScriptWriter]: ... + @classmethod + def best(cls) -> type[ScriptWriter]: ... + @classmethod + def get_header(cls, script_text: str = ..., executable: Incomplete | None = ...) -> str: ... + +class WindowsScriptWriter(ScriptWriter): + command_spec_class: Any + @classmethod + def get_writer(cls): ... + @classmethod + def best(cls): ... + +class WindowsExecutableLauncherWriter(WindowsScriptWriter): ... +class EasyInstallDeprecationWarning(SetuptoolsDeprecationWarning): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/egg_info.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/egg_info.pyi new file mode 100644 index 00000000..4d01d62c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/egg_info.pyi @@ -0,0 +1,85 @@ +from distutils.filelist import FileList as _FileList +from typing import Any + +from setuptools import Command, SetuptoolsDeprecationWarning +from setuptools.command.sdist import sdist + +def translate_pattern(glob): ... + +class InfoCommon: + tag_build: Any + tag_date: Any + @property + def name(self): ... + def tagged_version(self): ... + def tags(self): ... + @property + def vtags(self): ... + +class egg_info(InfoCommon, Command): + description: str + user_options: Any + boolean_options: Any + negative_opt: Any + egg_base: Any + egg_name: Any + egg_info: Any + egg_version: Any + broken_egg_info: bool + def initialize_options(self) -> None: ... + @property + def tag_svn_revision(self) -> None: ... + @tag_svn_revision.setter + def tag_svn_revision(self, value) -> None: ... + def save_version_info(self, filename) -> None: ... + def finalize_options(self) -> None: ... + def write_or_delete_file(self, what, filename, data, force: bool = ...) -> None: ... + def write_file(self, what, filename, data) -> None: ... + def delete_file(self, filename) -> None: ... + def run(self) -> None: ... + filelist: Any + def find_sources(self) -> None: ... + def check_broken_egg_info(self) -> None: ... + +class FileList(_FileList): + def __init__(self, warn=..., debug_print=..., ignore_egg_info_dir: bool = ...) -> None: ... + def process_template_line(self, line) -> None: ... + def include(self, pattern): ... + def exclude(self, pattern): ... + def recursive_include(self, dir, pattern): ... + def recursive_exclude(self, dir, pattern): ... + def graft(self, dir): ... + def prune(self, dir): ... + def global_include(self, pattern): ... + def global_exclude(self, pattern): ... + def append(self, item) -> None: ... + def extend(self, paths) -> None: ... + +class manifest_maker(sdist): + template: str + use_defaults: int + prune: int + manifest_only: int + force_manifest: int + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + filelist: Any + def run(self) -> None: ... + def write_manifest(self) -> None: ... + def warn(self, msg) -> None: ... + def add_defaults(self) -> None: ... + def add_license_files(self) -> None: ... + def prune_file_list(self) -> None: ... + +def write_file(filename, contents) -> None: ... +def write_pkg_info(cmd, basename, filename) -> None: ... +def warn_depends_obsolete(cmd, basename, filename) -> None: ... +def write_requirements(cmd, basename, filename) -> None: ... +def write_setup_requirements(cmd, basename, filename) -> None: ... +def write_toplevel_names(cmd, basename, filename) -> None: ... +def overwrite_arg(cmd, basename, filename) -> None: ... +def write_arg(cmd, basename, filename, force: bool = ...) -> None: ... +def write_entries(cmd, basename, filename) -> None: ... +def get_pkg_info_revision(): ... + +class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/install.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/install.pyi new file mode 100644 index 00000000..50594f2d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/install.pyi @@ -0,0 +1,16 @@ +import distutils.command.install as orig +from typing import Any + +class install(orig.install): + user_options: Any + boolean_options: Any + new_commands: Any + old_and_unmanageable: Any + single_version_externally_managed: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + path_file: Any + extra_dirs: str + def handle_extra_path(self): ... + def run(self): ... + def do_egg_install(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/install_egg_info.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/install_egg_info.pyi new file mode 100644 index 00000000..2977e32c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/install_egg_info.pyi @@ -0,0 +1,16 @@ +from typing import Any + +from setuptools import Command, namespaces + +class install_egg_info(namespaces.Installer, Command): + description: str + user_options: Any + install_dir: Any + def initialize_options(self) -> None: ... + source: Any + target: Any + outputs: Any + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def get_outputs(self): ... + def copytree(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/install_lib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/install_lib.pyi new file mode 100644 index 00000000..cd76c475 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/install_lib.pyi @@ -0,0 +1,9 @@ +import distutils.command.install_lib as orig + +class install_lib(orig.install_lib): + def run(self) -> None: ... + def get_exclusions(self): ... + def copy_tree( + self, infile, outfile, preserve_mode: int = ..., preserve_times: int = ..., preserve_symlinks: int = ..., level: int = ... + ): ... + def get_outputs(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/install_scripts.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/install_scripts.pyi new file mode 100644 index 00000000..354faa7f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/install_scripts.pyi @@ -0,0 +1,9 @@ +import distutils.command.install_scripts as orig +from typing import Any + +class install_scripts(orig.install_scripts): + no_ep: bool + def initialize_options(self) -> None: ... + outfiles: Any + def run(self) -> None: ... + def write_script(self, script_name, contents, mode: str = ..., *ignored) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/py36compat.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/py36compat.pyi new file mode 100644 index 00000000..eac93703 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/py36compat.pyi @@ -0,0 +1 @@ +class sdist_add_defaults: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/register.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/register.pyi new file mode 100644 index 00000000..02c63f18 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/register.pyi @@ -0,0 +1,4 @@ +import distutils.command.register as orig + +class register(orig.register): + def run(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/rotate.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/rotate.pyi new file mode 100644 index 00000000..5d5de6f1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/rotate.pyi @@ -0,0 +1,14 @@ +from typing import Any + +from setuptools import Command + +class rotate(Command): + description: str + user_options: Any + boolean_options: Any + match: Any + dist_dir: Any + keep: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/saveopts.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/saveopts.pyi new file mode 100644 index 00000000..09d435c6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/saveopts.pyi @@ -0,0 +1,5 @@ +from setuptools.command.setopt import option_base + +class saveopts(option_base): + description: str + def run(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/sdist.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/sdist.pyi new file mode 100644 index 00000000..0d150e7e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/sdist.pyi @@ -0,0 +1,19 @@ +import distutils.command.sdist as orig +from typing import Any + +from .py36compat import sdist_add_defaults + +def walk_revctrl(dirname: str = ...) -> None: ... + +class sdist(sdist_add_defaults, orig.sdist): + user_options: Any + negative_opt: Any + README_EXTENSIONS: Any + READMES: Any + filelist: Any + def run(self) -> None: ... + def initialize_options(self) -> None: ... + def make_distribution(self) -> None: ... + def check_readme(self) -> None: ... + def make_release_tree(self, base_dir, files) -> None: ... + def read_manifest(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/setopt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/setopt.pyi new file mode 100644 index 00000000..0461717a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/setopt.pyi @@ -0,0 +1,30 @@ +from abc import abstractmethod +from typing import Any + +from setuptools import Command + +def config_file(kind: str = ...): ... +def edit_config(filename, settings, dry_run: bool = ...) -> None: ... + +class option_base(Command): + user_options: Any + boolean_options: Any + global_config: Any + user_config: Any + filename: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + @abstractmethod + def run(self) -> None: ... + +class setopt(option_base): + description: str + user_options: Any + boolean_options: Any + command: Any + option: Any + set_value: Any + remove: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/test.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/test.pyi new file mode 100644 index 00000000..1b7a32f4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/test.pyi @@ -0,0 +1,42 @@ +from _typeshed import Incomplete, Unused +from collections.abc import Callable +from types import ModuleType +from typing import Any, Generic, TypeVar, overload +from typing_extensions import Self +from unittest import TestLoader, TestSuite + +from setuptools import Command + +_T = TypeVar("_T") + +class ScanningLoader(TestLoader): + def __init__(self) -> None: ... + def loadTestsFromModule(self, module: ModuleType, pattern: Incomplete | None = ...) -> list[TestSuite]: ... # type: ignore[override] + +class NonDataProperty(Generic[_T]): + fget: Callable[..., _T] + def __init__(self, fget: Callable[..., _T]) -> None: ... + @overload + def __get__(self, obj: None, objtype: Unused = None) -> Self: ... + @overload + def __get__(self, obj: Any, objtype: Unused = None) -> _T: ... + +class test(Command): + description: str + user_options: Any + test_suite: Any + test_module: Any + test_loader: Any + test_runner: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + @NonDataProperty + def test_args(self) -> list[str]: ... + def with_project_on_sys_path(self, func) -> None: ... + def project_on_sys_path(self, include_dists=...): ... + @staticmethod + def paths_on_pythonpath(paths) -> None: ... + @staticmethod + def install_dists(dist): ... + def run(self) -> None: ... + def run_tests(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/upload.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/upload.pyi new file mode 100644 index 00000000..ea60d6e6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/upload.pyi @@ -0,0 +1,4 @@ +from distutils.command import upload as orig + +class upload(orig.upload): + def run(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/upload_docs.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/upload_docs.pyi new file mode 100644 index 00000000..0660bff7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/command/upload_docs.pyi @@ -0,0 +1,18 @@ +from typing import Any, ClassVar + +from .upload import upload + +class upload_docs(upload): + DEFAULT_REPOSITORY: ClassVar[str] + description: ClassVar[str] + user_options: ClassVar[list[tuple[str, str | None, str]]] + boolean_options: ClassVar[list[str]] + def has_sphinx(self): ... + sub_commands: Any + upload_dir: Any + target_dir: Any + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def create_zipfile(self, filename) -> None: ... + def run(self) -> None: ... + def upload_file(self, filename) -> None: ... # type: ignore[override] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/config.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/config.pyi new file mode 100644 index 00000000..251f2b80 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/config.pyi @@ -0,0 +1,2 @@ +def read_configuration(filepath, find_others: bool = ..., ignore_option_errors: bool = ...): ... +def parse_configuration(distribution, command_options, ignore_option_errors: bool = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/dep_util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/dep_util.pyi new file mode 100644 index 00000000..0ac00802 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/dep_util.pyi @@ -0,0 +1 @@ +def newer_pairwise_group(sources_groups, targets): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/depends.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/depends.pyi new file mode 100644 index 00000000..a34fe539 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/depends.pyi @@ -0,0 +1,20 @@ +from _typeshed import Incomplete + +class Require: + def __init__( + self, + name, + requested_version, + module, + homepage: str = ..., + attribute: Incomplete | None = ..., + format: Incomplete | None = ..., + ) -> None: ... + def full_name(self): ... + def version_ok(self, version): ... + def get_version(self, paths: Incomplete | None = ..., default: str = ...): ... + def is_present(self, paths: Incomplete | None = ...): ... + def is_current(self, paths: Incomplete | None = ...): ... + +def get_module_constant(module, symbol, default: int = ..., paths: Incomplete | None = ...): ... +def extract_constant(code, symbol, default: int = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/dist.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/dist.pyi new file mode 100644 index 00000000..b47795f8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/dist.pyi @@ -0,0 +1,35 @@ +from _typeshed import Incomplete +from distutils.core import Distribution as _Distribution + +from setuptools import SetuptoolsDeprecationWarning + +class Distribution(_Distribution): + def patch_missing_pkg_info(self, attrs) -> None: ... + package_data: Incomplete + dist_files: Incomplete + src_root: Incomplete + dependency_links: Incomplete + setup_requires: Incomplete + def __init__(self, attrs: Incomplete | None = ...) -> None: ... + def warn_dash_deprecation(self, opt, section): ... + def make_option_lowercase(self, opt, section): ... + def parse_config_files(self, filenames: Incomplete | None = ..., ignore_option_errors: bool = ...) -> None: ... + def fetch_build_eggs(self, requires): ... + def finalize_options(self): ... + def get_egg_cache_dir(self): ... + def fetch_build_egg(self, req): ... + def get_command_class(self, command): ... + def print_commands(self): ... + def get_command_list(self): ... + def include(self, **attrs) -> None: ... + packages: Incomplete + py_modules: Incomplete + ext_modules: Incomplete + def exclude_package(self, package) -> None: ... + def has_contents_for(self, package): ... + def exclude(self, **attrs) -> None: ... + def get_cmdline_options(self): ... + def iter_distribution_names(self) -> None: ... + def handle_display_options(self, option_order): ... + +class DistDeprecationWarning(SetuptoolsDeprecationWarning): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/errors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/errors.pyi new file mode 100644 index 00000000..0791eeee --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/errors.pyi @@ -0,0 +1,3 @@ +from distutils.errors import DistutilsError + +class RemovedCommandError(DistutilsError, RuntimeError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/extension.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/extension.pyi new file mode 100644 index 00000000..ac7cf1c8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/extension.pyi @@ -0,0 +1,10 @@ +from distutils.core import Extension as _Extension +from typing import Any + +have_pyrex: Any + +class Extension(_Extension): + py_limited_api: Any + def __init__(self, name, sources, *args, **kw) -> None: ... + +class Library(Extension): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/extern/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/extern/__init__.pyi new file mode 100644 index 00000000..864dc18d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/extern/__init__.pyi @@ -0,0 +1,17 @@ +from _typeshed import Incomplete +from typing import Any + +class VendorImporter: + root_name: Any + vendored_names: Any + vendor_pkg: Any + def __init__(self, root_name, vendored_names=..., vendor_pkg: Incomplete | None = ...) -> None: ... + @property + def search_path(self) -> None: ... + def load_module(self, fullname): ... + def create_module(self, spec): ... + def exec_module(self, module) -> None: ... + def find_spec(self, fullname, path: Incomplete | None = ..., target: Incomplete | None = ...): ... + def install(self) -> None: ... + +names: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/glob.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/glob.pyi new file mode 100644 index 00000000..3da334de --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/glob.pyi @@ -0,0 +1,3 @@ +def glob(pathname, recursive: bool = ...): ... +def iglob(pathname, recursive: bool = ...): ... +def escape(pathname): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/installer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/installer.pyi new file mode 100644 index 00000000..0d3eaf68 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/installer.pyi @@ -0,0 +1,2 @@ +def fetch_build_egg(dist, req): ... +def strip_marker(req): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/launch.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/launch.pyi new file mode 100644 index 00000000..b88194ac --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/launch.pyi @@ -0,0 +1 @@ +def run() -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/monkey.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/monkey.pyi new file mode 100644 index 00000000..dca3245d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/monkey.pyi @@ -0,0 +1,6 @@ +def get_unpatched(item): ... +def get_unpatched_class(cls): ... +def patch_all() -> None: ... +def patch_func(replacement, target_mod, func_name) -> None: ... +def get_unpatched_function(candidate): ... +def patch_for_msvc_specialized_compiler(): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/msvc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/msvc.pyi new file mode 100644 index 00000000..952f1189 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/msvc.pyi @@ -0,0 +1,135 @@ +from _typeshed import Incomplete + +PLAT_SPEC_TO_RUNTIME: dict[str, str] + +def msvc14_get_vc_env(plat_spec: str) -> dict[str, Incomplete]: ... + +class PlatformInfo: + current_cpu: Incomplete + arch: Incomplete + def __init__(self, arch) -> None: ... + @property + def target_cpu(self): ... + def target_is_x86(self): ... + def current_is_x86(self): ... + def current_dir(self, hidex86: bool = ..., x64: bool = ...): ... + def target_dir(self, hidex86: bool = ..., x64: bool = ...): ... + def cross_dir(self, forcex86: bool = ...): ... + +class RegistryInfo: + HKEYS: Incomplete + pi: Incomplete + def __init__(self, platform_info) -> None: ... + @property + def visualstudio(self): ... + @property + def sxs(self): ... + @property + def vc(self): ... + @property + def vs(self): ... + @property + def vc_for_python(self): ... + @property + def microsoft_sdk(self): ... + @property + def windows_sdk(self): ... + @property + def netfx_sdk(self): ... + @property + def windows_kits_roots(self): ... + def microsoft(self, key, x86: bool = ...): ... + def lookup(self, key, name): ... + +class SystemInfo: + WinDir: Incomplete + ProgramFiles: Incomplete + ProgramFilesx86: Incomplete + ri: Incomplete + pi: Incomplete + known_vs_paths: Incomplete + vs_ver: Incomplete + def __init__(self, registry_info, vc_ver: Incomplete | None = ...) -> None: ... + def find_reg_vs_vers(self): ... + def find_programdata_vs_vers(self): ... + @property + def VSInstallDir(self): ... + @property + def VCInstallDir(self): ... + @property + def WindowsSdkVersion(self): ... + @property + def WindowsSdkLastVersion(self): ... + @property + def WindowsSdkDir(self): ... + @property + def WindowsSDKExecutablePath(self): ... + @property + def FSharpInstallDir(self): ... + @property + def UniversalCRTSdkDir(self): ... + @property + def UniversalCRTSdkLastVersion(self): ... + @property + def NetFxSdkVersion(self): ... + @property + def NetFxSdkDir(self): ... + @property + def FrameworkDir32(self): ... + @property + def FrameworkDir64(self): ... + @property + def FrameworkVersion32(self): ... + @property + def FrameworkVersion64(self): ... + +class EnvironmentInfo: + pi: Incomplete + ri: Incomplete + si: Incomplete + def __init__(self, arch, vc_ver: Incomplete | None = ..., vc_min_ver: int = ...) -> None: ... + @property + def vs_ver(self): ... + @property + def vc_ver(self): ... + @property + def VSTools(self): ... + @property + def VCIncludes(self): ... + @property + def VCLibraries(self): ... + @property + def VCStoreRefs(self): ... + @property + def VCTools(self): ... + @property + def OSLibraries(self): ... + @property + def OSIncludes(self): ... + @property + def OSLibpath(self): ... + @property + def SdkTools(self): ... + @property + def SdkSetup(self): ... + @property + def FxTools(self): ... + @property + def NetFxSDKLibraries(self): ... + @property + def NetFxSDKIncludes(self): ... + @property + def VsTDb(self): ... + @property + def MSBuild(self): ... + @property + def HTMLHelpWorkshop(self): ... + @property + def UCRTLibraries(self): ... + @property + def UCRTIncludes(self): ... + @property + def FSharp(self): ... + @property + def VCRuntimeRedist(self): ... + def return_env(self, exists: bool = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/namespaces.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/namespaces.pyi new file mode 100644 index 00000000..b85fcec7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/namespaces.pyi @@ -0,0 +1,10 @@ +from typing import Any + +flatten: Any + +class Installer: + nspkg_ext: str + def install_namespaces(self) -> None: ... + def uninstall_namespaces(self) -> None: ... + +class DevelopInstaller(Installer): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/package_index.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/package_index.pyi new file mode 100644 index 00000000..f672a104 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/package_index.pyi @@ -0,0 +1,85 @@ +import configparser +from _typeshed import Incomplete +from typing import Any + +from pkg_resources import Environment + +def parse_bdist_wininst(name): ... +def distros_for_url(url, metadata: Incomplete | None = ...) -> None: ... +def interpret_distro_name( + location, basename, metadata, py_version: Incomplete | None = ..., precedence=..., platform: Incomplete | None = ... +) -> None: ... + +class ContentChecker: + def feed(self, block) -> None: ... + def is_valid(self): ... + def report(self, reporter, template) -> None: ... + +class HashChecker(ContentChecker): + pattern: Any + hash_name: Any + hash: Any + expected: Any + def __init__(self, hash_name, expected) -> None: ... + @classmethod + def from_url(cls, url): ... + def feed(self, block) -> None: ... + def is_valid(self): ... + def report(self, reporter, template): ... + +class PackageIndex(Environment): + index_url: Any + scanned_urls: Any + fetched_urls: Any + package_pages: Any + allows: Any + to_scan: Any + opener: Any + def __init__( + self, index_url: str = ..., hosts=..., ca_bundle: Incomplete | None = ..., verify_ssl: bool = ..., *args, **kw + ) -> None: ... + def process_url(self, url, retrieve: bool = ...) -> None: ... + def process_filename(self, fn, nested: bool = ...) -> None: ... + def url_ok(self, url, fatal: bool = ...): ... + def scan_egg_links(self, search_path) -> None: ... + def scan_egg_link(self, path, entry) -> None: ... + def process_index(self, url, page): ... + def need_version_info(self, url) -> None: ... + def scan_all(self, msg: Incomplete | None = ..., *args) -> None: ... + def find_packages(self, requirement) -> None: ... + def obtain(self, requirement, installer: Incomplete | None = ...): ... + def check_hash(self, checker, filename, tfp) -> None: ... + def add_find_links(self, urls) -> None: ... + def prescan(self) -> None: ... + def not_found_in_index(self, requirement) -> None: ... + def download(self, spec, tmpdir): ... + def fetch_distribution( + self, + requirement, + tmpdir, + force_scan: bool = ..., + source: bool = ..., + develop_ok: bool = ..., + local_index: Incomplete | None = ..., + ): ... + def fetch(self, requirement, tmpdir, force_scan: bool = ..., source: bool = ...): ... + def gen_setup(self, filename, fragment, tmpdir): ... + dl_blocksize: int + def reporthook(self, url, filename, blocknum, blksize, size) -> None: ... + def open_url(self, url, warning: Incomplete | None = ...): ... + def scan_url(self, url) -> None: ... + def debug(self, msg, *args) -> None: ... + def info(self, msg, *args) -> None: ... + def warn(self, msg, *args) -> None: ... + +class Credential: + username: Any + password: Any + def __init__(self, username, password) -> None: ... + def __iter__(self): ... + +class PyPIConfig(configparser.RawConfigParser): + def __init__(self) -> None: ... + @property + def creds_by_repository(self): ... + def find_credential(self, url): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/sandbox.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/sandbox.pyi new file mode 100644 index 00000000..994ff292 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/sandbox.pyi @@ -0,0 +1,27 @@ +from distutils.errors import DistutilsError +from typing import Any + +class UnpickleableException(Exception): + @staticmethod + def dump(type, exc): ... + +class ExceptionSaver: + def __enter__(self): ... + def __exit__(self, type, exc, tb): ... + def resume(self) -> None: ... + +def run_setup(setup_script, args): ... + +class AbstractSandbox: + def __enter__(self) -> None: ... + def __exit__(self, exc_type, exc_value, traceback) -> None: ... + def run(self, func): ... + +class DirectorySandbox(AbstractSandbox): + write_ops: Any + def __init__(self, sandbox, exceptions=...) -> None: ... + def tmpnam(self) -> None: ... + def open(self, file, flags, mode: int = ..., *args, **kw): ... + +class SandboxViolation(DistutilsError): + tmpl: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/unicode_utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/unicode_utils.pyi new file mode 100644 index 00000000..6c799c8b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/unicode_utils.pyi @@ -0,0 +1,3 @@ +def decompose(path): ... +def filesys_decode(path): ... +def try_encode(string, enc): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/version.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/version.pyi new file mode 100644 index 00000000..bda5b5a7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/version.pyi @@ -0,0 +1 @@ +__version__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/wheel.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/wheel.pyi new file mode 100644 index 00000000..fe8d564a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/wheel.pyi @@ -0,0 +1,15 @@ +from typing import Any + +WHEEL_NAME: Any +NAMESPACE_PACKAGE_INIT: str + +def unpack(src_dir, dst_dir) -> None: ... + +class Wheel: + filename: Any + def __init__(self, filename) -> None: ... + def tags(self): ... + def is_compatible(self): ... + def egg_name(self): ... + def get_dist_info(self, zf): ... + def install_as_egg(self, destination_eggdir) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/windows_support.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/windows_support.pyi new file mode 100644 index 00000000..ec0c7586 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/setuptools/setuptools/windows_support.pyi @@ -0,0 +1,2 @@ +def windows_only(func): ... +def hide_file(path) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..d3000e8e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/@tests/stubtest_allowlist.txt @@ -0,0 +1,14 @@ +simplejson.JSONDecodeError.__init__ +simplejson.JSONDecoder.__init__ +simplejson.JSONDecoder.decode +simplejson.JSONDecoder.raw_decode +simplejson.JSONEncoder.__init__ +simplejson.decoder.JSONDecoder.__init__ +simplejson.decoder.JSONDecoder.decode +simplejson.decoder.JSONDecoder.raw_decode +simplejson.dump +simplejson.dumps +simplejson.encoder.JSONEncoder.__init__ +simplejson.load +simplejson.loads +simplejson.scanner.JSONDecodeError.__init__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/METADATA.toml new file mode 100644 index 00000000..5a55c100 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/METADATA.toml @@ -0,0 +1,4 @@ +version = "3.18.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/simplejson/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/simplejson/__init__.pyi new file mode 100644 index 00000000..5cfcc6d4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/simplejson/__init__.pyi @@ -0,0 +1,14 @@ +from typing import IO, Any +from typing_extensions import TypeAlias + +from simplejson.decoder import JSONDecoder as JSONDecoder +from simplejson.encoder import JSONEncoder as JSONEncoder, JSONEncoderForHTML as JSONEncoderForHTML +from simplejson.raw_json import RawJSON as RawJSON +from simplejson.scanner import JSONDecodeError as JSONDecodeError + +_LoadsString: TypeAlias = str | bytes | bytearray + +def dumps(obj: Any, *args: Any, **kwds: Any) -> str: ... +def dump(obj: Any, fp: IO[str], *args: Any, **kwds: Any) -> None: ... +def loads(s: _LoadsString, **kwds: Any) -> Any: ... +def load(fp: IO[str], **kwds: Any) -> Any: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/simplejson/decoder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/simplejson/decoder.pyi new file mode 100644 index 00000000..d2b1ac14 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/simplejson/decoder.pyi @@ -0,0 +1,7 @@ +from re import Match +from typing import Any + +class JSONDecoder: + def __init__(self, **kwargs: Any) -> None: ... + def decode(self, s: str, _w: Match[str], _PY3: bool) -> Any: ... + def raw_decode(self, s: str, idx: int, _w: Match[str], _PY3: bool) -> tuple[Any, int]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/simplejson/encoder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/simplejson/encoder.pyi new file mode 100644 index 00000000..a3603029 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/simplejson/encoder.pyi @@ -0,0 +1,10 @@ +from collections.abc import Iterator +from typing import Any, NoReturn + +class JSONEncoder: + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + def encode(self, o: Any) -> str: ... + def default(self, o: Any) -> NoReturn: ... + def iterencode(self, o: Any, _one_shot: bool = ...) -> Iterator[str]: ... + +class JSONEncoderForHTML(JSONEncoder): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/simplejson/errors.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/simplejson/errors.pyi new file mode 100644 index 00000000..10cff3f2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/simplejson/errors.pyi @@ -0,0 +1,16 @@ +__all__ = ["JSONDecodeError"] + +def linecol(doc: str, pos: int) -> tuple[int, int]: ... +def errmsg(msg: str, doc: str, pos: int, end: int | None = ...) -> str: ... + +class JSONDecodeError(ValueError): + msg: str + doc: str + pos: int + end: int | None + lineno: int + colno: int + endlineno: int | None + endcolno: int | None + def __init__(self, msg: str, doc: str, pos: int, end: int | None = ...) -> None: ... + def __reduce__(self) -> tuple[JSONDecodeError, tuple[str, str, int, int | None]]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/simplejson/raw_json.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/simplejson/raw_json.pyi new file mode 100644 index 00000000..bacd7550 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/simplejson/raw_json.pyi @@ -0,0 +1,3 @@ +class RawJSON: + encoded_json: str + def __init__(self, encoded_json: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/simplejson/scanner.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/simplejson/scanner.pyi new file mode 100644 index 00000000..42b828a6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/simplejson/simplejson/scanner.pyi @@ -0,0 +1,9 @@ +class JSONDecodeError(ValueError): + msg: str = ... + doc: str = ... + pos: int = ... + end: int | None = ... + lineno: int = ... + colno: int = ... + endlineno: int | None = ... + endcolno: int | None = ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/singledispatch/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/singledispatch/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..b47df9e9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/singledispatch/@tests/stubtest_allowlist.txt @@ -0,0 +1,2 @@ +# Internal utils, we are not interested in them: +singledispatch.helpers diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/singledispatch/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/singledispatch/METADATA.toml new file mode 100644 index 00000000..3188e8fc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/singledispatch/METADATA.toml @@ -0,0 +1 @@ +version = "4.0.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/singledispatch/singledispatch.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/singledispatch/singledispatch.pyi new file mode 100644 index 00000000..bd367147 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/singledispatch/singledispatch.pyi @@ -0,0 +1,31 @@ +from collections.abc import Callable, Mapping +from typing import Any, Generic, TypeVar, overload + +_T = TypeVar("_T") +_S = TypeVar("_S") + +class _SingleDispatchCallable(Generic[_T]): + registry: Mapping[Any, Callable[..., _T]] + def dispatch(self, cls: Any) -> Callable[..., _T]: ... + @overload + def register(self, cls: Any) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + @overload + def register(self, cls: Any, func: Callable[..., _T]) -> Callable[..., _T]: ... + def _clear_cache(self) -> None: ... + def __call__(self, *args: Any, **kwargs: Any) -> _T: ... + +def singledispatch(func: Callable[..., _T]) -> _SingleDispatchCallable[_T]: ... + +class singledispatchmethod(Generic[_T]): + dispatcher: _SingleDispatchCallable[_T] + func: Callable[..., _T] + def __init__(self, func: Callable[..., _T]) -> None: ... + @property + def __isabstractmethod__(self) -> bool: ... + @overload + def register(self, cls: type[Any], method: None = ...) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + @overload + def register(self, cls: Callable[..., _T], method: None = ...) -> Callable[..., _T]: ... + @overload + def register(self, cls: type[Any], method: Callable[..., _T]) -> Callable[..., _T]: ... + def __get__(self, obj: _S, cls: type[_S] | None = ...) -> Callable[..., _T]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..68a54f47 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/@tests/stubtest_allowlist.txt @@ -0,0 +1,35 @@ +# Problems inherited from the standard library +six.BytesIO.readlines +six.BytesIO.seek +six.StringIO.seek +six.StringIO.truncate +six.create_bound_method.__closure__ +six.create_bound_method.__defaults__ +six.moves.* + +# Implemented using "operator" functions in the implementation +six.get_function_closure +six.get_function_code +six.get_function_defaults +six.get_function_globals +six.get_method_function +six.get_method_self +six.viewitems +six.viewkeys +six.viewvalues +# Should be `operator.itemgetter[int]`. But a bug in mypy prevents using TypeVar in itemgetter__call__ +six.byte2int + +# Utils +six.Module_six_moves_urllib +six.Module_six_moves_urllib_error +six.Module_six_moves_urllib_parse +six.Module_six_moves_urllib_request +six.Module_six_moves_urllib_response +six.Module_six_moves_urllib_robotparser + +# Belongs to `django.utils.six` +six.iterlists + +# Unclear problems +six.callable diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/METADATA.toml new file mode 100644 index 00000000..7aac8045 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/METADATA.toml @@ -0,0 +1 @@ +version = "1.16.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/__init__.pyi new file mode 100644 index 00000000..c1977c05 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/__init__.pyi @@ -0,0 +1,115 @@ +import builtins +import operator +import types +import unittest +from _typeshed import IdentityFunction, Unused, _KT_contra, _VT_co +from builtins import next as next +from collections.abc import Callable, ItemsView, Iterable, Iterator as _Iterator, KeysView, Mapping, ValuesView +from functools import wraps as wraps +from importlib.util import spec_from_loader as spec_from_loader +from io import BytesIO as BytesIO, StringIO as StringIO +from re import Pattern +from typing import Any, AnyStr, NoReturn, Protocol, TypeVar, overload +from typing_extensions import Literal + +from six import moves as moves + +# TODO: We should switch to the _typeshed version of SupportsGetItem +# once mypy updates its vendored copy of typeshed and makes a new release +class _SupportsGetItem(Protocol[_KT_contra, _VT_co]): + def __contains__(self, __x: Any) -> bool: ... + def __getitem__(self, __key: _KT_contra) -> _VT_co: ... + +_T = TypeVar("_T") +_K = TypeVar("_K") +_V = TypeVar("_V") + +__author__: str +__version__: str + +PY2: Literal[False] +PY3: Literal[True] +PY34: Literal[True] + +string_types: tuple[type[str]] +integer_types: tuple[type[int]] +class_types: tuple[type[type]] +text_type = str +binary_type = bytes + +MAXSIZE: int + +callable = builtins.callable + +def get_unbound_function(unbound: types.FunctionType) -> types.FunctionType: ... + +create_bound_method = types.MethodType + +def create_unbound_method(func: types.FunctionType, cls: type) -> types.FunctionType: ... + +Iterator = object + +def get_method_function(meth: types.MethodType) -> types.FunctionType: ... +def get_method_self(meth: types.MethodType) -> object | None: ... +def get_function_closure(fun: types.FunctionType) -> tuple[types._Cell, ...] | None: ... +def get_function_code(fun: types.FunctionType) -> types.CodeType: ... +def get_function_defaults(fun: types.FunctionType) -> tuple[Any, ...] | None: ... +def get_function_globals(fun: types.FunctionType) -> dict[str, Any]: ... +def iterkeys(d: Mapping[_K, Any]) -> _Iterator[_K]: ... +def itervalues(d: Mapping[Any, _V]) -> _Iterator[_V]: ... +def iteritems(d: Mapping[_K, _V]) -> _Iterator[tuple[_K, _V]]: ... +def viewkeys(d: Mapping[_K, Any]) -> KeysView[_K]: ... +def viewvalues(d: Mapping[Any, _V]) -> ValuesView[_V]: ... +def viewitems(d: Mapping[_K, _V]) -> ItemsView[_K, _V]: ... +def b(s: str) -> bytes: ... +def u(s: str) -> str: ... + +unichr = chr + +def int2byte(i: int) -> bytes: ... + +# Should be `byte2int: operator.itemgetter[int]`. But a bug in mypy prevents using TypeVar in itemgetter.__call__ +def byte2int(obj: _SupportsGetItem[int, _T]) -> _T: ... + +indexbytes = operator.getitem +iterbytes = iter + +def assertCountEqual(self: unittest.TestCase, first: Iterable[_T], second: Iterable[_T], msg: str | None = ...) -> None: ... +@overload +def assertRaisesRegex(self: unittest.TestCase, msg: str | None = ...) -> Any: ... +@overload +def assertRaisesRegex(self: unittest.TestCase, callable_obj: Callable[..., object], *args: Any, **kwargs: Any) -> Any: ... +def assertRegex(self: unittest.TestCase, text: AnyStr, expected_regex: AnyStr | Pattern[AnyStr], msg: Any = ...) -> None: ... +def assertNotRegex(self: unittest.TestCase, text: AnyStr, expected_regex: AnyStr | Pattern[AnyStr], msg: Any = ...) -> None: ... + +exec_ = exec + +def reraise(tp: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None = ...) -> NoReturn: ... +def raise_from(value: BaseException | type[BaseException], from_value: BaseException | None) -> NoReturn: ... + +print_ = print + +def with_metaclass(meta: type, *bases: type) -> type: ... +def add_metaclass(metaclass: type) -> IdentityFunction: ... +def ensure_binary(s: bytes | str, encoding: str = ..., errors: str = ...) -> bytes: ... +def ensure_str(s: bytes | str, encoding: str = ..., errors: str = ...) -> str: ... +def ensure_text(s: bytes | str, encoding: str = ..., errors: str = ...) -> str: ... +def python_2_unicode_compatible(klass: _T) -> _T: ... + +class _LazyDescr: + name: str + def __init__(self, name: str) -> None: ... + def __get__(self, obj: object, tp: Unused) -> Any: ... + +class MovedModule(_LazyDescr): + mod: str + def __init__(self, name: str, old: str, new: str | None = ...) -> None: ... + def __getattr__(self, attr: str) -> Any: ... + +class MovedAttribute(_LazyDescr): + mod: str + attr: str + def __init__(self, name: str, old_mod: str, new_mod: str, old_attr: str | None = ..., new_attr: str | None = ...) -> None: ... + +def add_move(move: MovedModule | MovedAttribute) -> None: ... +def remove_move(name: str) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/BaseHTTPServer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/BaseHTTPServer.pyi new file mode 100644 index 00000000..0e1ad713 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/BaseHTTPServer.pyi @@ -0,0 +1 @@ +from http.server import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/CGIHTTPServer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/CGIHTTPServer.pyi new file mode 100644 index 00000000..0e1ad713 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/CGIHTTPServer.pyi @@ -0,0 +1 @@ +from http.server import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/SimpleHTTPServer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/SimpleHTTPServer.pyi new file mode 100644 index 00000000..0e1ad713 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/SimpleHTTPServer.pyi @@ -0,0 +1 @@ +from http.server import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/__init__.pyi new file mode 100644 index 00000000..ae64ae67 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/__init__.pyi @@ -0,0 +1,65 @@ +# Stubs for six.moves +# +# Note: Commented out items means they weren't implemented at the time. +# Uncomment them when the modules have been added to the typeshed. +import importlib +import shlex +from builtins import filter as filter, input as input, map as map, range as range, zip as zip +from collections import UserDict as UserDict, UserList as UserList, UserString as UserString +from functools import reduce as reduce +from io import StringIO as StringIO +from itertools import filterfalse as filterfalse, zip_longest as zip_longest +from os import getcwd as getcwd, getcwdb as getcwdb +from sys import intern as intern + +# import tkinter.font as tkinter_font +# import tkinter.messagebox as tkinter_messagebox +# import tkinter.simpledialog as tkinter_tksimpledialog +# import tkinter.dnd as tkinter_dnd +# import tkinter.colorchooser as tkinter_colorchooser +# import tkinter.scrolledtext as tkinter_scrolledtext +# import tkinter.simpledialog as tkinter_simpledialog +# import tkinter.tix as tkinter_tix +# import dbm.gnu as dbm_gnu +from . import ( + BaseHTTPServer as BaseHTTPServer, + CGIHTTPServer as CGIHTTPServer, + SimpleHTTPServer as SimpleHTTPServer, + _dummy_thread as _dummy_thread, + _thread as _thread, + builtins as builtins, + configparser as configparser, + copyreg as copyreg, + cPickle as cPickle, + email_mime_base as email_mime_base, + email_mime_multipart as email_mime_multipart, + email_mime_nonmultipart as email_mime_nonmultipart, + email_mime_text as email_mime_text, + html_entities as html_entities, + html_parser as html_parser, + http_client as http_client, + http_cookiejar as http_cookiejar, + http_cookies as http_cookies, + queue as queue, + reprlib as reprlib, + socketserver as socketserver, + tkinter as tkinter, + tkinter_commondialog as tkinter_commondialog, + tkinter_constants as tkinter_constants, + tkinter_dialog as tkinter_dialog, + tkinter_filedialog as tkinter_filedialog, + tkinter_tkfiledialog as tkinter_tkfiledialog, + tkinter_ttk as tkinter_ttk, + urllib as urllib, + urllib_error as urllib_error, + urllib_parse as urllib_parse, + urllib_robotparser as urllib_robotparser, +) + +# import xmlrpc.client as xmlrpc_client +# import xmlrpc.server as xmlrpc_server + +xrange = range +reload_module = importlib.reload +cStringIO = StringIO +shlex_quote = shlex.quote diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/_dummy_thread.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/_dummy_thread.pyi new file mode 100644 index 00000000..410232d0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/_dummy_thread.pyi @@ -0,0 +1,6 @@ +import sys + +if sys.version_info >= (3, 9): + from _thread import * +else: + from _dummy_thread import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/_thread.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/_thread.pyi new file mode 100644 index 00000000..25952a61 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/_thread.pyi @@ -0,0 +1 @@ +from _thread import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/builtins.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/builtins.pyi new file mode 100644 index 00000000..9596ba03 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/builtins.pyi @@ -0,0 +1 @@ +from builtins import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/cPickle.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/cPickle.pyi new file mode 100644 index 00000000..2b944b59 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/cPickle.pyi @@ -0,0 +1 @@ +from pickle import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/collections_abc.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/collections_abc.pyi new file mode 100644 index 00000000..dba0f153 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/collections_abc.pyi @@ -0,0 +1 @@ +from collections.abc import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/configparser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/configparser.pyi new file mode 100644 index 00000000..3367dbd1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/configparser.pyi @@ -0,0 +1,3 @@ +# Error is not included in __all__ so export it explicitly +from configparser import * +from configparser import Error as Error diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/copyreg.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/copyreg.pyi new file mode 100644 index 00000000..1848b74c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/copyreg.pyi @@ -0,0 +1 @@ +from copyreg import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/email_mime_base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/email_mime_base.pyi new file mode 100644 index 00000000..4df155c9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/email_mime_base.pyi @@ -0,0 +1 @@ +from email.mime.base import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/email_mime_multipart.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/email_mime_multipart.pyi new file mode 100644 index 00000000..4f312412 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/email_mime_multipart.pyi @@ -0,0 +1 @@ +from email.mime.multipart import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/email_mime_nonmultipart.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/email_mime_nonmultipart.pyi new file mode 100644 index 00000000..c15c8c04 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/email_mime_nonmultipart.pyi @@ -0,0 +1 @@ +from email.mime.nonmultipart import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/email_mime_text.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/email_mime_text.pyi new file mode 100644 index 00000000..51e14738 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/email_mime_text.pyi @@ -0,0 +1 @@ +from email.mime.text import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/html_entities.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/html_entities.pyi new file mode 100644 index 00000000..c1244ddb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/html_entities.pyi @@ -0,0 +1 @@ +from html.entities import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/html_parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/html_parser.pyi new file mode 100644 index 00000000..6db6dd83 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/html_parser.pyi @@ -0,0 +1 @@ +from html.parser import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/http_client.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/http_client.pyi new file mode 100644 index 00000000..ecfce42f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/http_client.pyi @@ -0,0 +1,61 @@ +# Many definitions are not included in http.client.__all__ +from http.client import * +from http.client import ( + ACCEPTED as ACCEPTED, + BAD_GATEWAY as BAD_GATEWAY, + BAD_REQUEST as BAD_REQUEST, + CONFLICT as CONFLICT, + CONTINUE as CONTINUE, + CREATED as CREATED, + EXPECTATION_FAILED as EXPECTATION_FAILED, + FAILED_DEPENDENCY as FAILED_DEPENDENCY, + FORBIDDEN as FORBIDDEN, + FOUND as FOUND, + GATEWAY_TIMEOUT as GATEWAY_TIMEOUT, + GONE as GONE, + HTTP_PORT as HTTP_PORT, + HTTP_VERSION_NOT_SUPPORTED as HTTP_VERSION_NOT_SUPPORTED, + HTTPS_PORT as HTTPS_PORT, + IM_USED as IM_USED, + INSUFFICIENT_STORAGE as INSUFFICIENT_STORAGE, + INTERNAL_SERVER_ERROR as INTERNAL_SERVER_ERROR, + LENGTH_REQUIRED as LENGTH_REQUIRED, + LOCKED as LOCKED, + METHOD_NOT_ALLOWED as METHOD_NOT_ALLOWED, + MOVED_PERMANENTLY as MOVED_PERMANENTLY, + MULTI_STATUS as MULTI_STATUS, + MULTIPLE_CHOICES as MULTIPLE_CHOICES, + NETWORK_AUTHENTICATION_REQUIRED as NETWORK_AUTHENTICATION_REQUIRED, + NO_CONTENT as NO_CONTENT, + NON_AUTHORITATIVE_INFORMATION as NON_AUTHORITATIVE_INFORMATION, + NOT_ACCEPTABLE as NOT_ACCEPTABLE, + NOT_EXTENDED as NOT_EXTENDED, + NOT_FOUND as NOT_FOUND, + NOT_IMPLEMENTED as NOT_IMPLEMENTED, + NOT_MODIFIED as NOT_MODIFIED, + OK as OK, + PARTIAL_CONTENT as PARTIAL_CONTENT, + PAYMENT_REQUIRED as PAYMENT_REQUIRED, + PRECONDITION_FAILED as PRECONDITION_FAILED, + PRECONDITION_REQUIRED as PRECONDITION_REQUIRED, + PROCESSING as PROCESSING, + PROXY_AUTHENTICATION_REQUIRED as PROXY_AUTHENTICATION_REQUIRED, + REQUEST_ENTITY_TOO_LARGE as REQUEST_ENTITY_TOO_LARGE, + REQUEST_HEADER_FIELDS_TOO_LARGE as REQUEST_HEADER_FIELDS_TOO_LARGE, + REQUEST_TIMEOUT as REQUEST_TIMEOUT, + REQUEST_URI_TOO_LONG as REQUEST_URI_TOO_LONG, + REQUESTED_RANGE_NOT_SATISFIABLE as REQUESTED_RANGE_NOT_SATISFIABLE, + RESET_CONTENT as RESET_CONTENT, + SEE_OTHER as SEE_OTHER, + SERVICE_UNAVAILABLE as SERVICE_UNAVAILABLE, + SWITCHING_PROTOCOLS as SWITCHING_PROTOCOLS, + TEMPORARY_REDIRECT as TEMPORARY_REDIRECT, + TOO_MANY_REQUESTS as TOO_MANY_REQUESTS, + UNAUTHORIZED as UNAUTHORIZED, + UNPROCESSABLE_ENTITY as UNPROCESSABLE_ENTITY, + UNSUPPORTED_MEDIA_TYPE as UNSUPPORTED_MEDIA_TYPE, + UPGRADE_REQUIRED as UPGRADE_REQUIRED, + USE_PROXY as USE_PROXY, + HTTPMessage as HTTPMessage, + parse_headers as parse_headers, +) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/http_cookiejar.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/http_cookiejar.pyi new file mode 100644 index 00000000..88a1aed6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/http_cookiejar.pyi @@ -0,0 +1 @@ +from http.cookiejar import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/http_cookies.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/http_cookies.pyi new file mode 100644 index 00000000..1e168c8e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/http_cookies.pyi @@ -0,0 +1,3 @@ +# Morsel is not included in __all__ so export it explicitly +from http.cookies import * +from http.cookies import Morsel as Morsel diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/queue.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/queue.pyi new file mode 100644 index 00000000..fe7be53a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/queue.pyi @@ -0,0 +1 @@ +from queue import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/reprlib.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/reprlib.pyi new file mode 100644 index 00000000..c329846f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/reprlib.pyi @@ -0,0 +1 @@ +from reprlib import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/socketserver.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/socketserver.pyi new file mode 100644 index 00000000..6101c8bb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/socketserver.pyi @@ -0,0 +1 @@ +from socketserver import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/tkinter.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/tkinter.pyi new file mode 100644 index 00000000..fc4d53a5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/tkinter.pyi @@ -0,0 +1 @@ +from tkinter import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/tkinter_commondialog.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/tkinter_commondialog.pyi new file mode 100644 index 00000000..34eb4196 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/tkinter_commondialog.pyi @@ -0,0 +1 @@ +from tkinter.commondialog import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/tkinter_constants.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/tkinter_constants.pyi new file mode 100644 index 00000000..3c04f6d8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/tkinter_constants.pyi @@ -0,0 +1 @@ +from tkinter.constants import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/tkinter_dialog.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/tkinter_dialog.pyi new file mode 100644 index 00000000..0da73c27 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/tkinter_dialog.pyi @@ -0,0 +1 @@ +from tkinter.dialog import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/tkinter_filedialog.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/tkinter_filedialog.pyi new file mode 100644 index 00000000..c4cc7c48 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/tkinter_filedialog.pyi @@ -0,0 +1 @@ +from tkinter.filedialog import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/tkinter_tkfiledialog.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/tkinter_tkfiledialog.pyi new file mode 100644 index 00000000..c4cc7c48 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/tkinter_tkfiledialog.pyi @@ -0,0 +1 @@ +from tkinter.filedialog import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/tkinter_ttk.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/tkinter_ttk.pyi new file mode 100644 index 00000000..14576f61 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/tkinter_ttk.pyi @@ -0,0 +1 @@ +from tkinter.ttk import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib/__init__.pyi new file mode 100644 index 00000000..fa6dc977 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib/__init__.pyi @@ -0,0 +1 @@ +from six.moves.urllib import error as error, parse as parse, request as request, response as response, robotparser as robotparser diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib/error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib/error.pyi new file mode 100644 index 00000000..4e10fe2f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib/error.pyi @@ -0,0 +1 @@ +from urllib.error import ContentTooShortError as ContentTooShortError, HTTPError as HTTPError, URLError as URLError diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib/parse.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib/parse.pyi new file mode 100644 index 00000000..20adc639 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib/parse.pyi @@ -0,0 +1,30 @@ +# Stubs for six.moves.urllib.parse +# +# Note: Commented out items means they weren't implemented at the time. +# Uncomment them when the modules have been added to the typeshed. +# from urllib.parse import splitquery as splitquery +# from urllib.parse import splittag as splittag +# from urllib.parse import splituser as splituser +from urllib.parse import ( + ParseResult as ParseResult, + SplitResult as SplitResult, + parse_qs as parse_qs, + parse_qsl as parse_qsl, + quote as quote, + quote_plus as quote_plus, + unquote as unquote, + unquote_plus as unquote_plus, + unquote_to_bytes as unquote_to_bytes, + urldefrag as urldefrag, + urlencode as urlencode, + urljoin as urljoin, + urlparse as urlparse, + urlsplit as urlsplit, + urlunparse as urlunparse, + urlunsplit as urlunsplit, + uses_fragment as uses_fragment, + uses_netloc as uses_netloc, + uses_params as uses_params, + uses_query as uses_query, + uses_relative as uses_relative, +) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib/request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib/request.pyi new file mode 100644 index 00000000..9b670b4d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib/request.pyi @@ -0,0 +1,41 @@ +# Stubs for six.moves.urllib.request +# +# Note: Commented out items means they weren't implemented at the time. +# Uncomment them when the modules have been added to the typeshed. +# from urllib.request import proxy_bypass as proxy_bypass +from urllib.request import ( + AbstractBasicAuthHandler as AbstractBasicAuthHandler, + AbstractDigestAuthHandler as AbstractDigestAuthHandler, + BaseHandler as BaseHandler, + CacheFTPHandler as CacheFTPHandler, + FancyURLopener as FancyURLopener, + FileHandler as FileHandler, + FTPHandler as FTPHandler, + HTTPBasicAuthHandler as HTTPBasicAuthHandler, + HTTPCookieProcessor as HTTPCookieProcessor, + HTTPDefaultErrorHandler as HTTPDefaultErrorHandler, + HTTPDigestAuthHandler as HTTPDigestAuthHandler, + HTTPErrorProcessor as HTTPErrorProcessor, + HTTPHandler as HTTPHandler, + HTTPPasswordMgr as HTTPPasswordMgr, + HTTPPasswordMgrWithDefaultRealm as HTTPPasswordMgrWithDefaultRealm, + HTTPRedirectHandler as HTTPRedirectHandler, + HTTPSHandler as HTTPSHandler, + OpenerDirector as OpenerDirector, + ProxyBasicAuthHandler as ProxyBasicAuthHandler, + ProxyDigestAuthHandler as ProxyDigestAuthHandler, + ProxyHandler as ProxyHandler, + Request as Request, + UnknownHandler as UnknownHandler, + URLopener as URLopener, + build_opener as build_opener, + getproxies as getproxies, + install_opener as install_opener, + parse_http_list as parse_http_list, + parse_keqv_list as parse_keqv_list, + pathname2url as pathname2url, + url2pathname as url2pathname, + urlcleanup as urlcleanup, + urlopen as urlopen, + urlretrieve as urlretrieve, +) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib/response.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib/response.pyi new file mode 100644 index 00000000..9f681ea3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib/response.pyi @@ -0,0 +1,8 @@ +# Stubs for six.moves.urllib.response +# +# Note: Commented out items means they weren't implemented at the time. +# Uncomment them when the modules have been added to the typeshed. +# from urllib.response import addbase as addbase +# from urllib.response import addclosehook as addclosehook +# from urllib.response import addinfo as addinfo +from urllib.response import addinfourl as addinfourl diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib/robotparser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib/robotparser.pyi new file mode 100644 index 00000000..bccda14b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib/robotparser.pyi @@ -0,0 +1 @@ +from urllib.robotparser import RobotFileParser as RobotFileParser diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib_error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib_error.pyi new file mode 100644 index 00000000..27200722 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib_error.pyi @@ -0,0 +1 @@ +from urllib.error import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib_parse.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib_parse.pyi new file mode 100644 index 00000000..b557bbbb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib_parse.pyi @@ -0,0 +1 @@ +from urllib.parse import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib_request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib_request.pyi new file mode 100644 index 00000000..dc03dcec --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib_request.pyi @@ -0,0 +1 @@ +from .urllib.request import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib_response.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib_response.pyi new file mode 100644 index 00000000..bbee5225 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib_response.pyi @@ -0,0 +1 @@ +from .urllib.response import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib_robotparser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib_robotparser.pyi new file mode 100644 index 00000000..bbf5c3ce --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/six/six/moves/urllib_robotparser.pyi @@ -0,0 +1 @@ +from urllib.robotparser import * diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/slumber/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/slumber/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..15230d95 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/slumber/@tests/stubtest_allowlist.txt @@ -0,0 +1,2 @@ +slumber.API.__init__ +slumber.Resource.as_raw diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/slumber/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/slumber/METADATA.toml new file mode 100644 index 00000000..55019ecc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/slumber/METADATA.toml @@ -0,0 +1,2 @@ +version = "0.7.*" +requires = ["types-requests"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/slumber/slumber/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/slumber/slumber/__init__.pyi new file mode 100644 index 00000000..f35d7855 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/slumber/slumber/__init__.pyi @@ -0,0 +1,39 @@ +from typing import Any +from typing_extensions import Self + +from requests import Response, Session +from requests.sessions import _Auth, _Data, _Files + +from .serialize import Serializer + +__all__ = ["Resource", "API"] + +class ResourceAttributesMixin: + # Exists at runtime: + def __getattr__(self, item: str) -> Any: ... + +class Resource(ResourceAttributesMixin): + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + def __call__(self, id: str | None = ..., format: str | None = ..., url_override: str | None = ...) -> Self: ... + def as_raw(self) -> Self: ... + def get(self, **kwargs: Any) -> Response: ... + def options(self, **kwargs: Any) -> Response: ... + def head(self, **kwargs: Any) -> Response: ... + def post(self, data: _Data | None = ..., files: _Files | None = ..., **kwargs: Any) -> Response: ... + def patch(self, data: _Data | None = ..., files: _Files | None = ..., **kwargs: Any) -> Response: ... + def put(self, data: _Data | None = ..., files: _Files | None = ..., **kwargs: Any) -> Response: ... + def delete(self, **kwargs: Any) -> Response: ... + def url(self) -> str: ... + +class API(ResourceAttributesMixin): + resource_class: type[Resource] + def __init__( + self, + base_url: str | None = ..., + auth: _Auth | None = ..., + format: str | None = ..., + append_slash: bool = ..., + session: Session | None = ..., + serializer: Serializer | None = ..., + raw: bool = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/slumber/slumber/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/slumber/slumber/exceptions.pyi new file mode 100644 index 00000000..20cac92d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/slumber/slumber/exceptions.pyi @@ -0,0 +1,13 @@ +from typing import Any + +class SlumberBaseException(Exception): ... + +class SlumberHttpBaseException(SlumberBaseException): + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + +class HttpClientError(SlumberHttpBaseException): ... +class HttpNotFoundError(HttpClientError): ... +class HttpServerError(SlumberHttpBaseException): ... +class SerializerNoAvailable(SlumberBaseException): ... +class SerializerNotAvailable(SlumberBaseException): ... +class ImproperlyConfigured(SlumberBaseException): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/slumber/slumber/serialize.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/slumber/slumber/serialize.pyi new file mode 100644 index 00000000..b9256756 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/slumber/slumber/serialize.pyi @@ -0,0 +1,28 @@ +from typing import Any +from typing_extensions import TypeAlias + +_Data: TypeAlias = str | bytes | bytearray + +class BaseSerializer: + content_types: list[str] | None + key: str | None + def get_content_type(self) -> str: ... + def loads(self, data: _Data) -> Any: ... + def dumps(self, data: _Data) -> Any: ... + +class JsonSerializer(BaseSerializer): + content_types: list[str] + key: str + +class YamlSerializer(BaseSerializer): + content_types: list[str] + key: str + +class Serializer: + serializers: list[BaseSerializer] + default: str + def __init__(self, default: str | None = ..., serializers: list[BaseSerializer] | None = ...) -> None: ... + def get_serializer(self, name: str | None = ..., content_type: str | None = ...) -> BaseSerializer: ... + def loads(self, data: _Data, format: str | None = ...) -> Any: ... + def dumps(self, data: _Data, format: str | None = ...) -> Any: ... + def get_content_type(self, format: str | None = ...) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/slumber/slumber/utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/slumber/slumber/utils.pyi new file mode 100644 index 00000000..3dc0c9ee --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/slumber/slumber/utils.pyi @@ -0,0 +1,10 @@ +from collections.abc import ItemsView, Mapping, MutableMapping +from typing import Any, TypeVar + +_KT = TypeVar("_KT") +_VT_co = TypeVar("_VT_co", covariant=True) +_MM = TypeVar("_MM", bound=MutableMapping[Any, Any]) + +def url_join(base: str, *args: str) -> str: ... +def copy_kwargs(dictionary: _MM) -> _MM: ... +def iterator(d: Mapping[_KT, _VT_co]) -> ItemsView[_KT, _VT_co]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stdlib-list/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stdlib-list/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..9a2ba762 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stdlib-list/@tests/stubtest_allowlist.txt @@ -0,0 +1,2 @@ +# looks like developer tooling that's not part of the public API +stdlib_list.fetch diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stdlib-list/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stdlib-list/METADATA.toml new file mode 100644 index 00000000..29511ee7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stdlib-list/METADATA.toml @@ -0,0 +1 @@ +version = "0.8.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stdlib-list/stdlib_list/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stdlib-list/stdlib_list/__init__.pyi new file mode 100644 index 00000000..33cfc9cf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stdlib-list/stdlib_list/__init__.pyi @@ -0,0 +1,9 @@ +from .base import ( + get_canonical_version as get_canonical_version, + in_stdlib as in_stdlib, + long_versions as long_versions, + short_versions as short_versions, + stdlib_list as stdlib_list, +) + +__version__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stdlib-list/stdlib_list/_version.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stdlib-list/stdlib_list/_version.pyi new file mode 100644 index 00000000..feb0b165 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stdlib-list/stdlib_list/_version.pyi @@ -0,0 +1,5 @@ +from typing import Any + +version_json: str + +def get_versions() -> dict[str, Any]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stdlib-list/stdlib_list/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stdlib-list/stdlib_list/base.pyi new file mode 100644 index 00000000..83321049 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stdlib-list/stdlib_list/base.pyi @@ -0,0 +1,6 @@ +long_versions: list[str] +short_versions: list[str] + +def get_canonical_version(version: str) -> str: ... +def stdlib_list(version: str | None = ...) -> list[str]: ... +def in_stdlib(module_name: str, version: str | None = ...) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..7512d81b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/@tests/stubtest_allowlist.txt @@ -0,0 +1,7 @@ +# The following methods have custom classmethod decorators +stripe\..*\.delete +stripe\..*PaymentIntent\.confirm +stripe\.api_resources\.test_helpers\.test_clock\.TestClock\.advance +stripe\.api_resources\.test_helpers\.TestClock\.advance +stripe\.api_resources\..*\.SearchableAPIResource\.search # Not defined on the actual class in v3, but expected to exist. +stripe\.api_resources\..*\.SearchableAPIResource\.search_auto_paging_iter # Not defined on the actual class in v3, but expected to exist. diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/METADATA.toml new file mode 100644 index 00000000..098add93 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/METADATA.toml @@ -0,0 +1,4 @@ +version = "3.5.*" + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/__init__.pyi new file mode 100644 index 00000000..d05cf52b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/__init__.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete +from typing import Any + +from stripe.api_resources import * +from stripe.oauth import OAuth as OAuth +from stripe.webhook import Webhook as Webhook, WebhookSignature as WebhookSignature + +api_key: str | None +client_id: Any +api_base: str +connect_api_base: str +upload_api_base: str +api_version: Any +verify_ssl_certs: bool +proxy: Any +default_http_client: Any +app_info: Any +enable_telemetry: bool +max_network_retries: int +ca_bundle_path: Any +log: Any + +def set_app_info( + name, partner_id: Incomplete | None = ..., url: Incomplete | None = ..., version: Incomplete | None = ... +) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_requestor.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_requestor.pyi new file mode 100644 index 00000000..858371a1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_requestor.pyi @@ -0,0 +1,33 @@ +from _typeshed import Incomplete +from typing import Any + +from stripe import error as error, http_client as http_client, oauth_error as oauth_error, version as version +from stripe.multipart_data_generator import MultipartDataGenerator as MultipartDataGenerator +from stripe.stripe_response import StripeResponse as StripeResponse, StripeStreamResponse as StripeStreamResponse + +class APIRequestor: + api_base: Any + api_key: Any + api_version: Any + stripe_account: Any + def __init__( + self, + key: Incomplete | None = ..., + client: Incomplete | None = ..., + api_base: Incomplete | None = ..., + api_version: Incomplete | None = ..., + account: Incomplete | None = ..., + ) -> None: ... + @classmethod + def format_app_info(cls, info): ... + def request(self, method, url, params: Incomplete | None = ..., headers: Incomplete | None = ...): ... + def request_stream(self, method, url, params: Incomplete | None = ..., headers: Incomplete | None = ...): ... + def handle_error_response(self, rbody, rcode, resp, rheaders) -> None: ... + def specific_api_error(self, rbody, rcode, resp, rheaders, error_data): ... + def specific_oauth_error(self, rbody, rcode, resp, rheaders, error_code): ... + def request_headers(self, api_key, method): ... + def request_raw( + self, method, url, params: Incomplete | None = ..., supplied_headers: Incomplete | None = ..., is_streaming: bool = ... + ): ... + def interpret_response(self, rbody, rcode, rheaders): ... + def interpret_streaming_response(self, stream, rcode, rheaders): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/__init__.pyi new file mode 100644 index 00000000..f37d1806 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/__init__.pyi @@ -0,0 +1,80 @@ +from stripe.api_resources import ( + billing_portal as billing_portal, + checkout as checkout, + identity as identity, + issuing as issuing, + radar as radar, + reporting as reporting, + sigma as sigma, + terminal as terminal, + test_helpers as test_helpers, +) +from stripe.api_resources.account import Account as Account +from stripe.api_resources.account_link import AccountLink as AccountLink +from stripe.api_resources.alipay_account import AlipayAccount as AlipayAccount +from stripe.api_resources.apple_pay_domain import ApplePayDomain as ApplePayDomain +from stripe.api_resources.application_fee import ApplicationFee as ApplicationFee +from stripe.api_resources.application_fee_refund import ApplicationFeeRefund as ApplicationFeeRefund +from stripe.api_resources.balance import Balance as Balance +from stripe.api_resources.balance_transaction import BalanceTransaction as BalanceTransaction +from stripe.api_resources.bank_account import BankAccount as BankAccount +from stripe.api_resources.bitcoin_receiver import BitcoinReceiver as BitcoinReceiver +from stripe.api_resources.bitcoin_transaction import BitcoinTransaction as BitcoinTransaction +from stripe.api_resources.capability import Capability as Capability +from stripe.api_resources.card import Card as Card +from stripe.api_resources.charge import Charge as Charge +from stripe.api_resources.country_spec import CountrySpec as CountrySpec +from stripe.api_resources.coupon import Coupon as Coupon +from stripe.api_resources.credit_note import CreditNote as CreditNote +from stripe.api_resources.credit_note_line_item import CreditNoteLineItem as CreditNoteLineItem +from stripe.api_resources.customer import Customer as Customer +from stripe.api_resources.customer_balance_transaction import CustomerBalanceTransaction as CustomerBalanceTransaction +from stripe.api_resources.dispute import Dispute as Dispute +from stripe.api_resources.ephemeral_key import EphemeralKey as EphemeralKey +from stripe.api_resources.error_object import ErrorObject as ErrorObject, OAuthErrorObject as OAuthErrorObject +from stripe.api_resources.event import Event as Event +from stripe.api_resources.exchange_rate import ExchangeRate as ExchangeRate +from stripe.api_resources.file import File as File, FileUpload as FileUpload +from stripe.api_resources.file_link import FileLink as FileLink +from stripe.api_resources.invoice import Invoice as Invoice +from stripe.api_resources.invoice_item import InvoiceItem as InvoiceItem +from stripe.api_resources.invoice_line_item import InvoiceLineItem as InvoiceLineItem +from stripe.api_resources.issuer_fraud_record import IssuerFraudRecord as IssuerFraudRecord +from stripe.api_resources.line_item import LineItem as LineItem +from stripe.api_resources.list_object import ListObject as ListObject +from stripe.api_resources.login_link import LoginLink as LoginLink +from stripe.api_resources.mandate import Mandate as Mandate +from stripe.api_resources.order import Order as Order +from stripe.api_resources.payment_intent import PaymentIntent as PaymentIntent +from stripe.api_resources.payment_method import PaymentMethod as PaymentMethod +from stripe.api_resources.payout import Payout as Payout +from stripe.api_resources.person import Person as Person +from stripe.api_resources.plan import Plan as Plan +from stripe.api_resources.price import Price as Price +from stripe.api_resources.product import Product as Product +from stripe.api_resources.promotion_code import PromotionCode as PromotionCode +from stripe.api_resources.quote import Quote as Quote +from stripe.api_resources.recipient import Recipient as Recipient +from stripe.api_resources.recipient_transfer import RecipientTransfer as RecipientTransfer +from stripe.api_resources.refund import Refund as Refund +from stripe.api_resources.reversal import Reversal as Reversal +from stripe.api_resources.review import Review as Review +from stripe.api_resources.search_result_object import SearchResultObject as SearchResultObject +from stripe.api_resources.setup_attempt import SetupAttempt as SetupAttempt +from stripe.api_resources.setup_intent import SetupIntent as SetupIntent +from stripe.api_resources.sku import SKU as SKU +from stripe.api_resources.source import Source as Source +from stripe.api_resources.source_transaction import SourceTransaction as SourceTransaction +from stripe.api_resources.subscription import Subscription as Subscription +from stripe.api_resources.subscription_item import SubscriptionItem as SubscriptionItem +from stripe.api_resources.subscription_schedule import SubscriptionSchedule as SubscriptionSchedule +from stripe.api_resources.tax_code import TaxCode as TaxCode +from stripe.api_resources.tax_id import TaxId as TaxId +from stripe.api_resources.tax_rate import TaxRate as TaxRate +from stripe.api_resources.three_d_secure import ThreeDSecure as ThreeDSecure +from stripe.api_resources.token import Token as Token +from stripe.api_resources.topup import Topup as Topup +from stripe.api_resources.transfer import Transfer as Transfer +from stripe.api_resources.usage_record import UsageRecord as UsageRecord +from stripe.api_resources.usage_record_summary import UsageRecordSummary as UsageRecordSummary +from stripe.api_resources.webhook_endpoint import WebhookEndpoint as WebhookEndpoint diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/__init__.pyi new file mode 100644 index 00000000..f73b09fd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/__init__.pyi @@ -0,0 +1,12 @@ +from stripe.api_resources.abstract.api_resource import APIResource as APIResource +from stripe.api_resources.abstract.createable_api_resource import CreateableAPIResource as CreateableAPIResource +from stripe.api_resources.abstract.custom_method import custom_method as custom_method +from stripe.api_resources.abstract.deletable_api_resource import DeletableAPIResource as DeletableAPIResource +from stripe.api_resources.abstract.listable_api_resource import ListableAPIResource as ListableAPIResource +from stripe.api_resources.abstract.nested_resource_class_methods import ( + nested_resource_class_methods as nested_resource_class_methods, +) +from stripe.api_resources.abstract.searchable_api_resource import SearchableAPIResource as SearchableAPIResource +from stripe.api_resources.abstract.singleton_api_resource import SingletonAPIResource as SingletonAPIResource +from stripe.api_resources.abstract.updateable_api_resource import UpdateableAPIResource as UpdateableAPIResource +from stripe.api_resources.abstract.verify_mixin import VerifyMixin as VerifyMixin diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/api_resource.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/api_resource.pyi new file mode 100644 index 00000000..8891f113 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/api_resource.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete +from typing_extensions import Self + +from stripe import api_requestor as api_requestor, error as error +from stripe.stripe_object import StripeObject as StripeObject + +class APIResource(StripeObject): + @classmethod + def retrieve(cls, id, api_key: Incomplete | None = ..., **params) -> Self: ... + def refresh(self) -> Self: ... + @classmethod + def class_url(cls) -> str: ... + def instance_url(self) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/createable_api_resource.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/createable_api_resource.pyi new file mode 100644 index 00000000..1191ea7f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/createable_api_resource.pyi @@ -0,0 +1,16 @@ +from _typeshed import Incomplete +from typing_extensions import Self + +from stripe import api_requestor as api_requestor +from stripe.api_resources.abstract.api_resource import APIResource as APIResource + +class CreateableAPIResource(APIResource): + @classmethod + def create( + cls, + api_key: Incomplete | None = ..., + idempotency_key: str | None = ..., + stripe_version: Incomplete | None = ..., + stripe_account: Incomplete | None = ..., + **params, + ) -> Self: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/custom_method.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/custom_method.pyi new file mode 100644 index 00000000..8ac66a25 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/custom_method.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +def custom_method(name, http_verb, http_path: Incomplete | None = ..., is_streaming: bool = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/deletable_api_resource.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/deletable_api_resource.pyi new file mode 100644 index 00000000..b82f72b7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/deletable_api_resource.pyi @@ -0,0 +1,7 @@ +from typing_extensions import Self + +from stripe.api_resources.abstract.api_resource import APIResource as APIResource + +class DeletableAPIResource(APIResource): + @classmethod + def delete(cls, sid: str = ..., **params) -> Self: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/listable_api_resource.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/listable_api_resource.pyi new file mode 100644 index 00000000..926e41fc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/listable_api_resource.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete +from collections.abc import Iterator +from typing import Any + +from stripe import api_requestor as api_requestor +from stripe.api_resources.abstract.api_resource import APIResource as APIResource +from stripe.api_resources.list_object import ListObject + +class ListableAPIResource(APIResource): + @classmethod + def auto_paging_iter(cls, *args, **params) -> Iterator[Any]: ... + @classmethod + def list( + cls, + api_key: Incomplete | None = ..., + stripe_version: Incomplete | None = ..., + stripe_account: Incomplete | None = ..., + **params, + ) -> ListObject: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/nested_resource_class_methods.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/nested_resource_class_methods.pyi new file mode 100644 index 00000000..080ce175 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/nested_resource_class_methods.pyi @@ -0,0 +1,7 @@ +from _typeshed import Incomplete + +from stripe import api_requestor as api_requestor + +def nested_resource_class_methods( + resource, path: Incomplete | None = ..., operations: Incomplete | None = ..., resource_plural: Incomplete | None = ... +): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/searchable_api_resource.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/searchable_api_resource.pyi new file mode 100644 index 00000000..cf683861 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/searchable_api_resource.pyi @@ -0,0 +1,11 @@ +from collections.abc import Iterator +from typing_extensions import Self + +from stripe.api_resources.abstract.api_resource import APIResource as APIResource +from stripe.api_resources.search_result_object import SearchResultObject + +class SearchableAPIResource(APIResource): + @classmethod + def search(cls, *args: str | None, **kwargs) -> SearchResultObject[Self]: ... + @classmethod + def search_auto_paging_iter(cls, *args: str | None, **kwargs) -> Iterator[Self]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/singleton_api_resource.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/singleton_api_resource.pyi new file mode 100644 index 00000000..602ae356 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/singleton_api_resource.pyi @@ -0,0 +1,8 @@ +from stripe.api_resources.abstract.api_resource import APIResource as APIResource + +class SingletonAPIResource(APIResource): + @classmethod + def retrieve(cls, **params): ... + @classmethod + def class_url(cls): ... + def instance_url(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/updateable_api_resource.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/updateable_api_resource.pyi new file mode 100644 index 00000000..ffbcbb14 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/updateable_api_resource.pyi @@ -0,0 +1,8 @@ +from typing_extensions import Self + +from stripe.api_resources.abstract.api_resource import APIResource as APIResource + +class UpdateableAPIResource(APIResource): + @classmethod + def modify(cls, sid: str, **params) -> Self: ... + def save(self, idempotency_key: str | None = ...) -> Self: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/verify_mixin.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/verify_mixin.pyi new file mode 100644 index 00000000..139533f2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/abstract/verify_mixin.pyi @@ -0,0 +1,2 @@ +class VerifyMixin: + def verify(self, idempotency_key: str | None = ..., **params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/account.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/account.pyi new file mode 100644 index 00000000..98c78960 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/account.pyi @@ -0,0 +1,73 @@ +from _typeshed import Incomplete + +from stripe import oauth as oauth +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + DeletableAPIResource as DeletableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, + custom_method as custom_method, + nested_resource_class_methods as nested_resource_class_methods, +) + +class Account(CreateableAPIResource, DeletableAPIResource, ListableAPIResource): + OBJECT_NAME: str + def reject(self, idempotency_key: str | None = ..., **params): ... + @classmethod + def retrieve(cls, id: Incomplete | None = ..., api_key: Incomplete | None = ..., **params): ... + @classmethod + def modify(cls, id: Incomplete | None = ..., **params): ... + def instance_url(self): ... + def persons(self, idempotency_key: str | None = ..., **params): ... + def deauthorize(self, **params): ... + def serialize(self, previous): ... + @classmethod + def capabilitys_url(cls, id, nested_id=...): ... + @classmethod + def capabilitys_request( + cls, method, url, api_key=..., idempotency_key=..., stripe_version=..., stripe_account=..., **params + ): ... + @classmethod + def retrieve_capability(cls, id, nested_id, **params): ... + @classmethod + def modify_capability(cls, id, nested_id, **params): ... + @classmethod + def list_capabilities(cls, id, **params): ... + @classmethod + def external_accounts_url(cls, id, nested_id=...): ... + @classmethod + def external_accounts_request( + cls, method, url, api_key=..., idempotency_key=..., stripe_version=..., stripe_account=..., **params + ): ... + @classmethod + def create_external_account(cls, id, **params): ... + @classmethod + def retrieve_external_account(cls, id, nested_id, **params): ... + @classmethod + def modify_external_account(cls, id, nested_id, **params): ... + @classmethod + def delete_external_account(cls, id, nested_id, **params): ... + @classmethod + def list_external_accounts(cls, id, **params): ... + @classmethod + def login_links_url(cls, id, nested_id=...): ... + @classmethod + def login_links_request( + cls, method, url, api_key=..., idempotency_key=..., stripe_version=..., stripe_account=..., **params + ): ... + @classmethod + def create_login_link(cls, id, **params): ... + @classmethod + def persons_url(cls, id, nested_id=...): ... + @classmethod + def persons_request(cls, method, url, api_key=..., idempotency_key=..., stripe_version=..., stripe_account=..., **params): ... + @classmethod + def create_person(cls, id, **params): ... + @classmethod + def retrieve_person(cls, id, nested_id, **params): ... + @classmethod + def modify_person(cls, id, nested_id, **params): ... + @classmethod + def delete_person(cls, id, nested_id, **params): ... + @classmethod + def list_persons(cls, id, **params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/account_link.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/account_link.pyi new file mode 100644 index 00000000..a50565c5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/account_link.pyi @@ -0,0 +1,4 @@ +from stripe.api_resources.abstract import CreateableAPIResource as CreateableAPIResource + +class AccountLink(CreateableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/alipay_account.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/alipay_account.pyi new file mode 100644 index 00000000..f18475a6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/alipay_account.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete +from typing import NoReturn + +from stripe.api_resources.abstract import ( + DeletableAPIResource as DeletableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, +) +from stripe.api_resources.customer import Customer as Customer + +class AlipayAccount(DeletableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str + def instance_url(self): ... + @classmethod + def modify(cls, customer, id, **params): ... + @classmethod + def retrieve( + cls, + id, + api_key: Incomplete | None = ..., + stripe_version: Incomplete | None = ..., + stripe_account: Incomplete | None = ..., + **params, + ) -> NoReturn: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/apple_pay_domain.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/apple_pay_domain.pyi new file mode 100644 index 00000000..0e933201 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/apple_pay_domain.pyi @@ -0,0 +1,10 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + DeletableAPIResource as DeletableAPIResource, + ListableAPIResource as ListableAPIResource, +) + +class ApplePayDomain(CreateableAPIResource, DeletableAPIResource, ListableAPIResource): + OBJECT_NAME: str + @classmethod + def class_url(cls): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/application_fee.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/application_fee.pyi new file mode 100644 index 00000000..e9d275a8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/application_fee.pyi @@ -0,0 +1,8 @@ +from stripe.api_resources.abstract import ( + ListableAPIResource as ListableAPIResource, + nested_resource_class_methods as nested_resource_class_methods, +) + +class ApplicationFee(ListableAPIResource): + OBJECT_NAME: str + def refund(self, idempotency_key: str | None = ..., **params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/application_fee_refund.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/application_fee_refund.pyi new file mode 100644 index 00000000..865bb0fc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/application_fee_refund.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete +from typing import NoReturn +from typing_extensions import Self + +from stripe.api_resources import ApplicationFee as ApplicationFee +from stripe.api_resources.abstract import UpdateableAPIResource as UpdateableAPIResource + +class ApplicationFeeRefund(UpdateableAPIResource): + OBJECT_NAME: str + @classmethod + def modify(cls, fee, sid: str, **params) -> Self: ... # type: ignore[override] + def instance_url(self) -> str: ... + @classmethod + def retrieve(cls, id, api_key: Incomplete | None = ..., **params) -> NoReturn: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/balance.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/balance.pyi new file mode 100644 index 00000000..03a33a9b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/balance.pyi @@ -0,0 +1,4 @@ +from stripe.api_resources.abstract import SingletonAPIResource as SingletonAPIResource + +class Balance(SingletonAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/balance_transaction.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/balance_transaction.pyi new file mode 100644 index 00000000..bdbf7b88 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/balance_transaction.pyi @@ -0,0 +1,4 @@ +from stripe.api_resources.abstract import ListableAPIResource as ListableAPIResource + +class BalanceTransaction(ListableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/bank_account.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/bank_account.pyi new file mode 100644 index 00000000..5698d914 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/bank_account.pyi @@ -0,0 +1,26 @@ +from _typeshed import Incomplete +from typing import NoReturn + +from stripe import error as error +from stripe.api_resources.abstract import ( + DeletableAPIResource as DeletableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, + VerifyMixin as VerifyMixin, +) +from stripe.api_resources.account import Account as Account +from stripe.api_resources.customer import Customer as Customer + +class BankAccount(DeletableAPIResource, UpdateableAPIResource, VerifyMixin): + OBJECT_NAME: str + def instance_url(self) -> str: ... + @classmethod + def modify(cls, sid, **params) -> NoReturn: ... + @classmethod + def retrieve( + cls, + id, + api_key: Incomplete | None = ..., + stripe_version: Incomplete | None = ..., + stripe_account: Incomplete | None = ..., + **params, + ) -> NoReturn: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/billing_portal/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/billing_portal/__init__.pyi new file mode 100644 index 00000000..8132f096 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/billing_portal/__init__.pyi @@ -0,0 +1,2 @@ +from stripe.api_resources.billing_portal.configuration import Configuration as Configuration +from stripe.api_resources.billing_portal.session import Session as Session diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/billing_portal/configuration.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/billing_portal/configuration.pyi new file mode 100644 index 00000000..875989ab --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/billing_portal/configuration.pyi @@ -0,0 +1,8 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, +) + +class Configuration(CreateableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/billing_portal/session.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/billing_portal/session.pyi new file mode 100644 index 00000000..1bce47c9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/billing_portal/session.pyi @@ -0,0 +1,4 @@ +from stripe.api_resources.abstract import CreateableAPIResource as CreateableAPIResource + +class Session(CreateableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/bitcoin_receiver.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/bitcoin_receiver.pyi new file mode 100644 index 00000000..933736a3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/bitcoin_receiver.pyi @@ -0,0 +1,8 @@ +from stripe.api_resources.abstract import ListableAPIResource as ListableAPIResource +from stripe.api_resources.customer import Customer as Customer + +class BitcoinReceiver(ListableAPIResource): + OBJECT_NAME: str + def instance_url(self): ... + @classmethod + def class_url(cls): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/bitcoin_transaction.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/bitcoin_transaction.pyi new file mode 100644 index 00000000..0f354550 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/bitcoin_transaction.pyi @@ -0,0 +1,4 @@ +from stripe.stripe_object import StripeObject as StripeObject + +class BitcoinTransaction(StripeObject): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/capability.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/capability.pyi new file mode 100644 index 00000000..fc0a30b3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/capability.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete +from typing import NoReturn + +from stripe.api_resources.abstract import UpdateableAPIResource as UpdateableAPIResource +from stripe.api_resources.account import Account as Account + +class Capability(UpdateableAPIResource): + OBJECT_NAME: str + def instance_url(self) -> str: ... + @classmethod + def modify(cls, sid, **params) -> NoReturn: ... + @classmethod + def retrieve(cls, id, api_key: Incomplete | None = ..., **params) -> NoReturn: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/card.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/card.pyi new file mode 100644 index 00000000..663cb32b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/card.pyi @@ -0,0 +1,26 @@ +from _typeshed import Incomplete +from typing import NoReturn + +from stripe import error as error +from stripe.api_resources.abstract import ( + DeletableAPIResource as DeletableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, +) +from stripe.api_resources.account import Account as Account +from stripe.api_resources.customer import Customer as Customer +from stripe.api_resources.recipient import Recipient as Recipient + +class Card(DeletableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str + def instance_url(self) -> str: ... + @classmethod + def modify(cls, sid, **params) -> NoReturn: ... + @classmethod + def retrieve( + cls, + id, + api_key: Incomplete | None = ..., + stripe_version: Incomplete | None = ..., + stripe_account: Incomplete | None = ..., + **params, + ) -> NoReturn: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/charge.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/charge.pyi new file mode 100644 index 00000000..a9c62466 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/charge.pyi @@ -0,0 +1,17 @@ +from stripe import api_requestor as api_requestor +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + ListableAPIResource as ListableAPIResource, + SearchableAPIResource as SearchableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, + custom_method as custom_method, +) + +class Charge(CreateableAPIResource, ListableAPIResource, SearchableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str + def capture(self, idempotency_key: str | None = ..., **params): ... + def refund(self, idempotency_key: str | None = ..., **params): ... + def update_dispute(self, idempotency_key: str | None = ..., **params): ... + def close_dispute(self, idempotency_key: str | None = ..., **params): ... + def mark_as_fraudulent(self, idempotency_key: str | None = ...): ... + def mark_as_safe(self, idempotency_key: str | None = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/checkout/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/checkout/__init__.pyi new file mode 100644 index 00000000..acbde55c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/checkout/__init__.pyi @@ -0,0 +1 @@ +from stripe.api_resources.checkout.session import Session as Session diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/checkout/session.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/checkout/session.pyi new file mode 100644 index 00000000..9e165da4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/checkout/session.pyi @@ -0,0 +1,8 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + ListableAPIResource as ListableAPIResource, + nested_resource_class_methods as nested_resource_class_methods, +) + +class Session(CreateableAPIResource, ListableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/country_spec.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/country_spec.pyi new file mode 100644 index 00000000..45613f08 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/country_spec.pyi @@ -0,0 +1,4 @@ +from stripe.api_resources.abstract import ListableAPIResource as ListableAPIResource + +class CountrySpec(ListableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/coupon.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/coupon.pyi new file mode 100644 index 00000000..d00e8f1c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/coupon.pyi @@ -0,0 +1,9 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + DeletableAPIResource as DeletableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, +) + +class Coupon(CreateableAPIResource, DeletableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/credit_note.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/credit_note.pyi new file mode 100644 index 00000000..906b80a1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/credit_note.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from stripe import api_requestor as api_requestor +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, + custom_method as custom_method, +) + +class CreditNote(CreateableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str + def void_credit_note(self, idempotency_key: str | None = ..., **params): ... + @classmethod + def preview( + cls, + api_key: Incomplete | None = ..., + stripe_version: Incomplete | None = ..., + stripe_account: Incomplete | None = ..., + **params, + ): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/credit_note_line_item.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/credit_note_line_item.pyi new file mode 100644 index 00000000..276818b9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/credit_note_line_item.pyi @@ -0,0 +1,4 @@ +from stripe.stripe_object import StripeObject as StripeObject + +class CreditNoteLineItem(StripeObject): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/customer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/customer.pyi new file mode 100644 index 00000000..36e3327d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/customer.pyi @@ -0,0 +1,54 @@ +from stripe import api_requestor as api_requestor +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + DeletableAPIResource as DeletableAPIResource, + ListableAPIResource as ListableAPIResource, + SearchableAPIResource as SearchableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, + custom_method as custom_method, + nested_resource_class_methods as nested_resource_class_methods, +) + +class Customer(CreateableAPIResource, DeletableAPIResource, ListableAPIResource, SearchableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str + def delete_discount(self, **params) -> None: ... + @classmethod + def balance_transactions_url(cls, id, nested_id=...): ... + @classmethod + def balance_transactions_request( + cls, method, url, api_key=..., idempotency_key=..., stripe_version=..., stripe_account=..., **params + ): ... + @classmethod + def create_balance_transaction(cls, id, **params): ... + @classmethod + def retrieve_balance_transaction(cls, id, nested_id, **params): ... + @classmethod + def modify_balance_transaction(cls, id, nested_id, **params): ... + @classmethod + def list_balance_transactions(cls, id, **params): ... + @classmethod + def sources_url(cls, id, nested_id=...): ... + @classmethod + def sources_request(cls, method, url, api_key=..., idempotency_key=..., stripe_version=..., stripe_account=..., **params): ... + @classmethod + def create_source(cls, id, **params): ... + @classmethod + def retrieve_source(cls, id, nested_id, **params): ... + @classmethod + def modify_source(cls, id, nested_id, **params): ... + @classmethod + def delete_source(cls, id, nested_id, **params): ... + @classmethod + def list_sources(cls, id, **params): ... + @classmethod + def tax_ids_url(cls, id, nested_id=...): ... + @classmethod + def tax_ids_request(cls, method, url, api_key=..., idempotency_key=..., stripe_version=..., stripe_account=..., **params): ... + @classmethod + def create_tax_id(cls, id, **params): ... + @classmethod + def retrieve_tax_id(cls, id, nested_id, **params): ... + @classmethod + def delete_tax_id(cls, id, nested_id, **params): ... + @classmethod + def list_tax_ids(cls, id, **params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/customer_balance_transaction.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/customer_balance_transaction.pyi new file mode 100644 index 00000000..6a27ca9c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/customer_balance_transaction.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete +from typing import NoReturn + +from stripe.api_resources.abstract import APIResource as APIResource +from stripe.api_resources.customer import Customer as Customer + +class CustomerBalanceTransaction(APIResource): + OBJECT_NAME: str + def instance_url(self) -> str: ... + @classmethod + def retrieve(cls, id, api_key: Incomplete | None = ..., **params) -> NoReturn: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/dispute.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/dispute.pyi new file mode 100644 index 00000000..6f79d2ae --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/dispute.pyi @@ -0,0 +1,9 @@ +from stripe.api_resources.abstract import ( + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, + custom_method as custom_method, +) + +class Dispute(ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str + def close(self, idempotency_key: str | None = ..., **params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/ephemeral_key.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/ephemeral_key.pyi new file mode 100644 index 00000000..287efb6b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/ephemeral_key.pyi @@ -0,0 +1,16 @@ +from _typeshed import Incomplete + +from stripe import api_requestor as api_requestor +from stripe.api_resources.abstract import DeletableAPIResource as DeletableAPIResource + +class EphemeralKey(DeletableAPIResource): + OBJECT_NAME: str + @classmethod + def create( + cls, + api_key: Incomplete | None = ..., + idempotency_key: str | None = ..., + stripe_version: Incomplete | None = ..., + stripe_account: Incomplete | None = ..., + **params, + ): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/error_object.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/error_object.pyi new file mode 100644 index 00000000..ba075a8a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/error_object.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete + +from stripe.stripe_object import StripeObject as StripeObject + +class ErrorObject(StripeObject): + def refresh_from( + self, + values, + api_key: Incomplete | None = ..., + partial: bool = ..., + stripe_version: Incomplete | None = ..., + stripe_account: Incomplete | None = ..., + last_response: Incomplete | None = ..., + ): ... + +class OAuthErrorObject(StripeObject): + def refresh_from( + self, + values, + api_key: Incomplete | None = ..., + partial: bool = ..., + stripe_version: Incomplete | None = ..., + stripe_account: Incomplete | None = ..., + last_response: Incomplete | None = ..., + ): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/event.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/event.pyi new file mode 100644 index 00000000..32ec04d3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/event.pyi @@ -0,0 +1,4 @@ +from stripe.api_resources.abstract import ListableAPIResource as ListableAPIResource + +class Event(ListableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/exchange_rate.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/exchange_rate.pyi new file mode 100644 index 00000000..c6ae5b9e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/exchange_rate.pyi @@ -0,0 +1,4 @@ +from stripe.api_resources.abstract import ListableAPIResource as ListableAPIResource + +class ExchangeRate(ListableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/file.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/file.pyi new file mode 100644 index 00000000..9c319856 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/file.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +from stripe import api_requestor as api_requestor +from stripe.api_resources.abstract import ListableAPIResource as ListableAPIResource + +class File(ListableAPIResource): + OBJECT_NAME: str + OBJECT_NAME_ALT: str + @classmethod + def class_url(cls): ... + @classmethod + def create( + cls, + api_key: Incomplete | None = ..., + api_version: Incomplete | None = ..., + stripe_version: Incomplete | None = ..., + stripe_account: Incomplete | None = ..., + **params, + ): ... + +FileUpload = File diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/file_link.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/file_link.pyi new file mode 100644 index 00000000..af55248f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/file_link.pyi @@ -0,0 +1,8 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, +) + +class FileLink(CreateableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/identity/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/identity/__init__.pyi new file mode 100644 index 00000000..6285d40f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/identity/__init__.pyi @@ -0,0 +1,2 @@ +from stripe.api_resources.identity.verification_report import VerificationReport as VerificationReport +from stripe.api_resources.identity.verification_session import VerificationSession as VerificationSession diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/identity/verification_report.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/identity/verification_report.pyi new file mode 100644 index 00000000..873d0d57 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/identity/verification_report.pyi @@ -0,0 +1,4 @@ +from stripe.api_resources.abstract import ListableAPIResource as ListableAPIResource + +class VerificationReport(ListableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/identity/verification_session.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/identity/verification_session.pyi new file mode 100644 index 00000000..5a9e29c6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/identity/verification_session.pyi @@ -0,0 +1,11 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, + custom_method as custom_method, +) + +class VerificationSession(CreateableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str + def cancel(self, idempotency_key: str | None = ..., **params): ... + def redact(self, idempotency_key: str | None = ..., **params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/invoice.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/invoice.pyi new file mode 100644 index 00000000..50d8c9f3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/invoice.pyi @@ -0,0 +1,28 @@ +from _typeshed import Incomplete +from typing_extensions import Self + +from stripe import api_requestor as api_requestor +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + DeletableAPIResource as DeletableAPIResource, + ListableAPIResource as ListableAPIResource, + SearchableAPIResource as SearchableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, + custom_method as custom_method, +) + +class Invoice(CreateableAPIResource, DeletableAPIResource, ListableAPIResource, SearchableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str + def finalize_invoice(self, idempotency_key: str | None = ..., **params) -> Self: ... + def mark_uncollectible(self, idempotency_key: str | None = ..., **params) -> Self: ... + def pay(self, idempotency_key: str | None = ..., **params) -> Self: ... + def send_invoice(self, idempotency_key: str | None = ..., **params) -> Self: ... + def void_invoice(self, idempotency_key: str | None = ..., **params) -> Self: ... + @classmethod + def upcoming( + cls, + api_key: Incomplete | None = ..., + stripe_version: Incomplete | None = ..., + stripe_account: Incomplete | None = ..., + **params, + ) -> Invoice: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/invoice_item.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/invoice_item.pyi new file mode 100644 index 00000000..cd5eee9b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/invoice_item.pyi @@ -0,0 +1,9 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + DeletableAPIResource as DeletableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, +) + +class InvoiceItem(CreateableAPIResource, DeletableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/invoice_line_item.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/invoice_line_item.pyi new file mode 100644 index 00000000..4d33401e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/invoice_line_item.pyi @@ -0,0 +1,4 @@ +from stripe.stripe_object import StripeObject as StripeObject + +class InvoiceLineItem(StripeObject): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuer_fraud_record.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuer_fraud_record.pyi new file mode 100644 index 00000000..25d5e80f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuer_fraud_record.pyi @@ -0,0 +1,4 @@ +from stripe.api_resources.abstract import ListableAPIResource as ListableAPIResource + +class IssuerFraudRecord(ListableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuing/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuing/__init__.pyi new file mode 100644 index 00000000..51cba722 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuing/__init__.pyi @@ -0,0 +1,6 @@ +from stripe.api_resources.issuing.authorization import Authorization as Authorization +from stripe.api_resources.issuing.card import Card as Card +from stripe.api_resources.issuing.card_details import CardDetails as CardDetails +from stripe.api_resources.issuing.cardholder import Cardholder as Cardholder +from stripe.api_resources.issuing.dispute import Dispute as Dispute +from stripe.api_resources.issuing.transaction import Transaction as Transaction diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuing/authorization.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuing/authorization.pyi new file mode 100644 index 00000000..dd758000 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuing/authorization.pyi @@ -0,0 +1,10 @@ +from stripe.api_resources.abstract import ( + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, + custom_method as custom_method, +) + +class Authorization(ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str + def approve(self, idempotency_key: str | None = ..., **params): ... + def decline(self, idempotency_key: str | None = ..., **params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuing/card.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuing/card.pyi new file mode 100644 index 00000000..0fdab4bb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuing/card.pyi @@ -0,0 +1,10 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, + custom_method as custom_method, +) + +class Card(CreateableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str + def details(self, idempotency_key: str | None = ..., **params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuing/card_details.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuing/card_details.pyi new file mode 100644 index 00000000..9d963f88 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuing/card_details.pyi @@ -0,0 +1,4 @@ +from stripe.stripe_object import StripeObject as StripeObject + +class CardDetails(StripeObject): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuing/cardholder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuing/cardholder.pyi new file mode 100644 index 00000000..c08889ad --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuing/cardholder.pyi @@ -0,0 +1,8 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, +) + +class Cardholder(CreateableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuing/dispute.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuing/dispute.pyi new file mode 100644 index 00000000..ed3b6741 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuing/dispute.pyi @@ -0,0 +1,10 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, + custom_method as custom_method, +) + +class Dispute(CreateableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str + def submit(self, idempotency_key: str | None = ..., **params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuing/transaction.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuing/transaction.pyi new file mode 100644 index 00000000..66ea728d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/issuing/transaction.pyi @@ -0,0 +1,7 @@ +from stripe.api_resources.abstract import ( + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, +) + +class Transaction(ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/line_item.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/line_item.pyi new file mode 100644 index 00000000..77317c4e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/line_item.pyi @@ -0,0 +1,4 @@ +from stripe.api_resources.abstract import APIResource as APIResource + +class LineItem(APIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/list_object.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/list_object.pyi new file mode 100644 index 00000000..be26574e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/list_object.pyi @@ -0,0 +1,57 @@ +from _typeshed import Incomplete +from collections.abc import Iterator +from typing import Any + +from stripe import api_requestor as api_requestor +from stripe.stripe_object import StripeObject as StripeObject + +class ListObject(StripeObject): + OBJECT_NAME: str + def list( + self, + api_key: Incomplete | None = ..., + stripe_version: Incomplete | None = ..., + stripe_account: Incomplete | None = ..., + **params, + ) -> ListObject: ... + def create( + self, + api_key: Incomplete | None = ..., + idempotency_key: str | None = ..., + stripe_version: Incomplete | None = ..., + stripe_account: Incomplete | None = ..., + **params, + ): ... + def retrieve( + self, + id, + api_key: Incomplete | None = ..., + stripe_version: Incomplete | None = ..., + stripe_account: Incomplete | None = ..., + **params, + ): ... + def __getitem__(self, k): ... + def __iter__(self): ... + def __len__(self) -> int: ... + def __reversed__(self): ... + def auto_paging_iter(self) -> Iterator[Any]: ... + @classmethod + def empty_list( + cls, api_key: Incomplete | None = ..., stripe_version: Incomplete | None = ..., stripe_account: Incomplete | None = ... + ) -> ListObject: ... + @property + def is_empty(self) -> bool: ... + def next_page( + self, + api_key: Incomplete | None = ..., + stripe_version: Incomplete | None = ..., + stripe_account: Incomplete | None = ..., + **params, + ) -> ListObject: ... + def previous_page( + self, + api_key: Incomplete | None = ..., + stripe_version: Incomplete | None = ..., + stripe_account: Incomplete | None = ..., + **params, + ) -> ListObject: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/login_link.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/login_link.pyi new file mode 100644 index 00000000..9333af64 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/login_link.pyi @@ -0,0 +1,4 @@ +from stripe.stripe_object import StripeObject as StripeObject + +class LoginLink(StripeObject): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/mandate.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/mandate.pyi new file mode 100644 index 00000000..eb16554d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/mandate.pyi @@ -0,0 +1,4 @@ +from stripe.api_resources.abstract import APIResource as APIResource + +class Mandate(APIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/order.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/order.pyi new file mode 100644 index 00000000..1a9dfac2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/order.pyi @@ -0,0 +1,13 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, + custom_method as custom_method, +) + +class Order(CreateableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str + def cancel(self, idempotency_key: str | None = ..., **params): ... + def list_line_items(self, idempotency_key: str | None = ..., **params): ... + def reopen(self, idempotency_key: str | None = ..., **params): ... + def submit(self, idempotency_key: str | None = ..., **params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/payment_intent.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/payment_intent.pyi new file mode 100644 index 00000000..203ce28f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/payment_intent.pyi @@ -0,0 +1,22 @@ +from typing import overload + +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + ListableAPIResource as ListableAPIResource, + SearchableAPIResource as SearchableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, + custom_method as custom_method, +) + +class PaymentIntent(CreateableAPIResource, ListableAPIResource, SearchableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str + def cancel(self, idempotency_key: str | None = ..., **params): ... + def capture(self, idempotency_key: str | None = ..., **params): ... + @overload + @classmethod + def confirm( + cls, intent: str, api_key: str | None = ..., stripe_version: str | None = ..., stripe_account: str | None = ..., **params + ): ... + @overload + @classmethod + def confirm(cls, idempotency_key: str | None = ..., **params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/payment_method.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/payment_method.pyi new file mode 100644 index 00000000..47310ad7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/payment_method.pyi @@ -0,0 +1,13 @@ +from typing_extensions import Self + +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, + custom_method as custom_method, +) + +class PaymentMethod(CreateableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str + def attach(self, idempotency_key: str | None = ..., **params) -> Self: ... + def detach(self, idempotency_key: str | None = ..., **params) -> Self: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/payout.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/payout.pyi new file mode 100644 index 00000000..95855d02 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/payout.pyi @@ -0,0 +1,11 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, + custom_method as custom_method, +) + +class Payout(CreateableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str + def cancel(self, idempotency_key: str | None = ..., **params): ... + def reverse(self, idempotency_key: str | None = ..., **params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/person.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/person.pyi new file mode 100644 index 00000000..487ae347 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/person.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete +from typing import NoReturn + +from stripe.api_resources.abstract import UpdateableAPIResource as UpdateableAPIResource +from stripe.api_resources.account import Account as Account + +class Person(UpdateableAPIResource): + OBJECT_NAME: str + def instance_url(self) -> str: ... + @classmethod + def modify(cls, sid, **params) -> NoReturn: ... + @classmethod + def retrieve(cls, id, api_key: Incomplete | None = ..., **params) -> NoReturn: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/plan.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/plan.pyi new file mode 100644 index 00000000..6f133807 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/plan.pyi @@ -0,0 +1,9 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + DeletableAPIResource as DeletableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, +) + +class Plan(CreateableAPIResource, DeletableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/price.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/price.pyi new file mode 100644 index 00000000..773254d0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/price.pyi @@ -0,0 +1,9 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + ListableAPIResource as ListableAPIResource, + SearchableAPIResource as SearchableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, +) + +class Price(CreateableAPIResource, ListableAPIResource, SearchableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/product.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/product.pyi new file mode 100644 index 00000000..10149e00 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/product.pyi @@ -0,0 +1,10 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + DeletableAPIResource as DeletableAPIResource, + ListableAPIResource as ListableAPIResource, + SearchableAPIResource as SearchableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, +) + +class Product(CreateableAPIResource, DeletableAPIResource, ListableAPIResource, SearchableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/promotion_code.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/promotion_code.pyi new file mode 100644 index 00000000..0bc9cb50 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/promotion_code.pyi @@ -0,0 +1,8 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, +) + +class PromotionCode(CreateableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/quote.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/quote.pyi new file mode 100644 index 00000000..fb5a68c5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/quote.pyi @@ -0,0 +1,24 @@ +from _typeshed import Incomplete + +from stripe import api_requestor as api_requestor +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, + custom_method as custom_method, +) + +class Quote(CreateableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str + def accept(self, idempotency_key: str | None = ..., **params): ... + def cancel(self, idempotency_key: str | None = ..., **params): ... + def finalize_quote(self, idempotency_key: str | None = ..., **params): ... + def list_line_items(self, idempotency_key: str | None = ..., **params): ... + def pdf( + self, + api_key: Incomplete | None = ..., + api_version: Incomplete | None = ..., + stripe_version: Incomplete | None = ..., + stripe_account: Incomplete | None = ..., + **params, + ): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/radar/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/radar/__init__.pyi new file mode 100644 index 00000000..a7fe6cd8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/radar/__init__.pyi @@ -0,0 +1,3 @@ +from stripe.api_resources.radar.early_fraud_warning import EarlyFraudWarning as EarlyFraudWarning +from stripe.api_resources.radar.value_list import ValueList as ValueList +from stripe.api_resources.radar.value_list_item import ValueListItem as ValueListItem diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/radar/early_fraud_warning.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/radar/early_fraud_warning.pyi new file mode 100644 index 00000000..e1a2b49a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/radar/early_fraud_warning.pyi @@ -0,0 +1,4 @@ +from stripe.api_resources.abstract import ListableAPIResource as ListableAPIResource + +class EarlyFraudWarning(ListableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/radar/value_list.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/radar/value_list.pyi new file mode 100644 index 00000000..956aba4f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/radar/value_list.pyi @@ -0,0 +1,9 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + DeletableAPIResource as DeletableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, +) + +class ValueList(CreateableAPIResource, DeletableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/radar/value_list_item.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/radar/value_list_item.pyi new file mode 100644 index 00000000..5e7deb33 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/radar/value_list_item.pyi @@ -0,0 +1,8 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + DeletableAPIResource as DeletableAPIResource, + ListableAPIResource as ListableAPIResource, +) + +class ValueListItem(CreateableAPIResource, DeletableAPIResource, ListableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/recipient.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/recipient.pyi new file mode 100644 index 00000000..de8758b7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/recipient.pyi @@ -0,0 +1,9 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + DeletableAPIResource as DeletableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, +) + +class Recipient(CreateableAPIResource, DeletableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/recipient_transfer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/recipient_transfer.pyi new file mode 100644 index 00000000..e7ce9880 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/recipient_transfer.pyi @@ -0,0 +1,4 @@ +from stripe.stripe_object import StripeObject as StripeObject + +class RecipientTransfer(StripeObject): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/refund.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/refund.pyi new file mode 100644 index 00000000..c401c30a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/refund.pyi @@ -0,0 +1,8 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, +) + +class Refund(CreateableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/reporting/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/reporting/__init__.pyi new file mode 100644 index 00000000..df2b4de9 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/reporting/__init__.pyi @@ -0,0 +1,2 @@ +from stripe.api_resources.reporting.report_run import ReportRun as ReportRun +from stripe.api_resources.reporting.report_type import ReportType as ReportType diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/reporting/report_run.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/reporting/report_run.pyi new file mode 100644 index 00000000..5e125149 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/reporting/report_run.pyi @@ -0,0 +1,7 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + ListableAPIResource as ListableAPIResource, +) + +class ReportRun(CreateableAPIResource, ListableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/reporting/report_type.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/reporting/report_type.pyi new file mode 100644 index 00000000..aef5fbf8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/reporting/report_type.pyi @@ -0,0 +1,4 @@ +from stripe.api_resources.abstract import ListableAPIResource as ListableAPIResource + +class ReportType(ListableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/reversal.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/reversal.pyi new file mode 100644 index 00000000..80d393fe --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/reversal.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete +from typing import NoReturn + +from stripe.api_resources.abstract import UpdateableAPIResource as UpdateableAPIResource +from stripe.api_resources.transfer import Transfer as Transfer + +class Reversal(UpdateableAPIResource): + OBJECT_NAME: str + def instance_url(self) -> str: ... + @classmethod + def modify(cls, sid, **params) -> NoReturn: ... + @classmethod + def retrieve(cls, id, api_key: Incomplete | None = ..., **params) -> NoReturn: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/review.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/review.pyi new file mode 100644 index 00000000..cc04d656 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/review.pyi @@ -0,0 +1,5 @@ +from stripe.api_resources.abstract import ListableAPIResource as ListableAPIResource, custom_method as custom_method + +class Review(ListableAPIResource): + OBJECT_NAME: str + def approve(self, idempotency_key: str | None = ..., **params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/search_result_object.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/search_result_object.pyi new file mode 100644 index 00000000..e447b582 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/search_result_object.pyi @@ -0,0 +1,32 @@ +from collections.abc import Iterator +from typing import Any, ClassVar, Generic, TypeVar +from typing_extensions import Literal, Self + +from stripe.stripe_object import StripeObject + +_T = TypeVar("_T") + +class SearchResultObject(StripeObject, Generic[_T]): + OBJECT_NAME: ClassVar[Literal["search_result"]] + url: str + has_more: bool + data: list[_T] + next_page: str + total_count: int + + def search( + self, api_key: str | None = ..., stripe_version: str | None = ..., stripe_account: str | None = ..., **params + ) -> Self: ... + def __getitem__(self, k: str) -> Any: ... + def __iter__(self) -> Iterator[_T]: ... + def __len__(self) -> int: ... + def auto_paging_iter(self) -> Iterator[_T]: ... + @classmethod + def empty_search_result( + cls, api_key: str | None = ..., stripe_version: str | None = ..., stripe_account: str | None = ... + ) -> Self: ... + @property + def is_empty(self) -> bool: ... + def next_search_result_page( + self, api_key: str | None = ..., stripe_version: str | None = ..., stripe_account: str | None = ..., **params + ) -> Self: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/setup_attempt.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/setup_attempt.pyi new file mode 100644 index 00000000..aa89f2ee --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/setup_attempt.pyi @@ -0,0 +1,4 @@ +from stripe.api_resources.abstract import ListableAPIResource as ListableAPIResource + +class SetupAttempt(ListableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/setup_intent.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/setup_intent.pyi new file mode 100644 index 00000000..8da11176 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/setup_intent.pyi @@ -0,0 +1,11 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, + custom_method as custom_method, +) + +class SetupIntent(CreateableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str + def cancel(self, idempotency_key: str | None = ..., **params): ... + def confirm(self, idempotency_key: str | None = ..., **params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/sigma/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/sigma/__init__.pyi new file mode 100644 index 00000000..0b669a58 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/sigma/__init__.pyi @@ -0,0 +1 @@ +from stripe.api_resources.sigma.scheduled_query_run import ScheduledQueryRun as ScheduledQueryRun diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/sigma/scheduled_query_run.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/sigma/scheduled_query_run.pyi new file mode 100644 index 00000000..5445216f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/sigma/scheduled_query_run.pyi @@ -0,0 +1,6 @@ +from stripe.api_resources.abstract import ListableAPIResource as ListableAPIResource + +class ScheduledQueryRun(ListableAPIResource): + OBJECT_NAME: str + @classmethod + def class_url(cls): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/sku.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/sku.pyi new file mode 100644 index 00000000..2b7b548f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/sku.pyi @@ -0,0 +1,9 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + DeletableAPIResource as DeletableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, +) + +class SKU(CreateableAPIResource, DeletableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/source.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/source.pyi new file mode 100644 index 00000000..547e229a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/source.pyi @@ -0,0 +1,13 @@ +from stripe import error as error +from stripe.api_resources import Customer as Customer +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, + VerifyMixin as VerifyMixin, + nested_resource_class_methods as nested_resource_class_methods, +) + +class Source(CreateableAPIResource, UpdateableAPIResource, VerifyMixin): + OBJECT_NAME: str + def detach(self, idempotency_key: str | None = ..., **params): ... + def source_transactions(self, **params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/source_transaction.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/source_transaction.pyi new file mode 100644 index 00000000..48570cbf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/source_transaction.pyi @@ -0,0 +1,4 @@ +from stripe.stripe_object import StripeObject as StripeObject + +class SourceTransaction(StripeObject): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/subscription.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/subscription.pyi new file mode 100644 index 00000000..511d2aef --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/subscription.pyi @@ -0,0 +1,15 @@ +from stripe import api_requestor as api_requestor +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + DeletableAPIResource as DeletableAPIResource, + ListableAPIResource as ListableAPIResource, + SearchableAPIResource as SearchableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, + custom_method as custom_method, +) + +class Subscription( + CreateableAPIResource, DeletableAPIResource, ListableAPIResource, SearchableAPIResource, UpdateableAPIResource +): + OBJECT_NAME: str + def delete_discount(self, **params) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/subscription_item.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/subscription_item.pyi new file mode 100644 index 00000000..df891e92 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/subscription_item.pyi @@ -0,0 +1,27 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + DeletableAPIResource as DeletableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, + nested_resource_class_methods as nested_resource_class_methods, +) + +class SubscriptionItem(CreateableAPIResource, DeletableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str + def usage_record_summaries(self, **params): ... + @classmethod + def usage_records_url(cls, id, nested_id=...): ... + @classmethod + def usage_records_request( + cls, method, url, api_key=..., idempotency_key=..., stripe_version=..., stripe_account=..., **params + ): ... + @classmethod + def create_usage_record(cls, id, **params): ... + @classmethod + def usage_record_summarys_url(cls, id, nested_id=...): ... + @classmethod + def usage_record_summarys_request( + cls, method, url, api_key=..., idempotency_key=..., stripe_version=..., stripe_account=..., **params + ): ... + @classmethod + def list_usage_record_summaries(cls, id, **params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/subscription_schedule.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/subscription_schedule.pyi new file mode 100644 index 00000000..315c15b6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/subscription_schedule.pyi @@ -0,0 +1,11 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, + custom_method as custom_method, +) + +class SubscriptionSchedule(CreateableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str + def cancel(self, idempotency_key: str | None = ..., **params): ... + def release(self, idempotency_key: str | None = ..., **params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/tax_code.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/tax_code.pyi new file mode 100644 index 00000000..ed37e567 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/tax_code.pyi @@ -0,0 +1,4 @@ +from stripe.api_resources.abstract import ListableAPIResource as ListableAPIResource + +class TaxCode(ListableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/tax_id.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/tax_id.pyi new file mode 100644 index 00000000..38432e32 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/tax_id.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete +from typing import NoReturn + +from stripe.api_resources.abstract import APIResource as APIResource +from stripe.api_resources.customer import Customer as Customer + +class TaxId(APIResource): + OBJECT_NAME: str + def instance_url(self) -> str: ... + @classmethod + def retrieve(cls, id, api_key: Incomplete | None = ..., **params) -> NoReturn: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/tax_rate.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/tax_rate.pyi new file mode 100644 index 00000000..41cad550 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/tax_rate.pyi @@ -0,0 +1,8 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, +) + +class TaxRate(CreateableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/terminal/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/terminal/__init__.pyi new file mode 100644 index 00000000..27b995d4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/terminal/__init__.pyi @@ -0,0 +1,3 @@ +from stripe.api_resources.terminal.connection_token import ConnectionToken as ConnectionToken +from stripe.api_resources.terminal.location import Location as Location +from stripe.api_resources.terminal.reader import Reader as Reader diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/terminal/connection_token.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/terminal/connection_token.pyi new file mode 100644 index 00000000..2d0dc48d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/terminal/connection_token.pyi @@ -0,0 +1,4 @@ +from stripe.api_resources.abstract import CreateableAPIResource as CreateableAPIResource + +class ConnectionToken(CreateableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/terminal/location.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/terminal/location.pyi new file mode 100644 index 00000000..1bb0ee00 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/terminal/location.pyi @@ -0,0 +1,9 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + DeletableAPIResource as DeletableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, +) + +class Location(CreateableAPIResource, DeletableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/terminal/reader.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/terminal/reader.pyi new file mode 100644 index 00000000..d8333b91 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/terminal/reader.pyi @@ -0,0 +1,9 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + DeletableAPIResource as DeletableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, +) + +class Reader(CreateableAPIResource, DeletableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/test_helpers/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/test_helpers/__init__.pyi new file mode 100644 index 00000000..ec08b0c2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/test_helpers/__init__.pyi @@ -0,0 +1 @@ +from stripe.api_resources.test_helpers.test_clock import TestClock as TestClock diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/test_helpers/test_clock.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/test_helpers/test_clock.pyi new file mode 100644 index 00000000..3caf15ec --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/test_helpers/test_clock.pyi @@ -0,0 +1,10 @@ +from typing import Any +from typing_extensions import Literal, Self + +from stripe.api_resources.abstract import CreateableAPIResource, DeletableAPIResource, ListableAPIResource + +class TestClock(CreateableAPIResource, DeletableAPIResource, ListableAPIResource): + OBJECT_NAME: Literal["test_helpers.test_clock"] + + @classmethod + def advance(cls, idempotency_key: str | None = ..., **params: Any) -> Self: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/three_d_secure.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/three_d_secure.pyi new file mode 100644 index 00000000..4ab7469a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/three_d_secure.pyi @@ -0,0 +1,6 @@ +from stripe.api_resources.abstract import CreateableAPIResource as CreateableAPIResource + +class ThreeDSecure(CreateableAPIResource): + OBJECT_NAME: str + @classmethod + def class_url(cls): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/token.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/token.pyi new file mode 100644 index 00000000..307de97b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/token.pyi @@ -0,0 +1,4 @@ +from stripe.api_resources.abstract import CreateableAPIResource as CreateableAPIResource + +class Token(CreateableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/topup.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/topup.pyi new file mode 100644 index 00000000..e6619c8c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/topup.pyi @@ -0,0 +1,10 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, + custom_method as custom_method, +) + +class Topup(CreateableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str + def cancel(self, idempotency_key: str | None = ..., **params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/transfer.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/transfer.pyi new file mode 100644 index 00000000..aa4edd44 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/transfer.pyi @@ -0,0 +1,11 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, + custom_method as custom_method, + nested_resource_class_methods as nested_resource_class_methods, +) + +class Transfer(CreateableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str + def cancel(self, idempotency_key: str | None = ..., **params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/usage_record.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/usage_record.pyi new file mode 100644 index 00000000..61bd11ab --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/usage_record.pyi @@ -0,0 +1,16 @@ +from _typeshed import Incomplete + +from stripe import api_requestor as api_requestor +from stripe.api_resources.abstract.api_resource import APIResource as APIResource + +class UsageRecord(APIResource): + OBJECT_NAME: str + @classmethod + def create( + cls, + api_key: Incomplete | None = ..., + idempotency_key: str | None = ..., + stripe_version: Incomplete | None = ..., + stripe_account: Incomplete | None = ..., + **params, + ): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/usage_record_summary.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/usage_record_summary.pyi new file mode 100644 index 00000000..1af44a75 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/usage_record_summary.pyi @@ -0,0 +1,4 @@ +from stripe.stripe_object import StripeObject as StripeObject + +class UsageRecordSummary(StripeObject): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/webhook_endpoint.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/webhook_endpoint.pyi new file mode 100644 index 00000000..508f943c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/api_resources/webhook_endpoint.pyi @@ -0,0 +1,9 @@ +from stripe.api_resources.abstract import ( + CreateableAPIResource as CreateableAPIResource, + DeletableAPIResource as DeletableAPIResource, + ListableAPIResource as ListableAPIResource, + UpdateableAPIResource as UpdateableAPIResource, +) + +class WebhookEndpoint(CreateableAPIResource, DeletableAPIResource, ListableAPIResource, UpdateableAPIResource): + OBJECT_NAME: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/error.pyi new file mode 100644 index 00000000..98b8bc17 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/error.pyi @@ -0,0 +1,76 @@ +from _typeshed import Incomplete +from typing import Any + +class StripeError(Exception): + http_body: Any + http_status: Any + json_body: Any + headers: Any + code: Any + request_id: Any + error: Any + def __init__( + self, + message: Incomplete | None = ..., + http_body: Incomplete | None = ..., + http_status: Incomplete | None = ..., + json_body: Incomplete | None = ..., + headers: Incomplete | None = ..., + code: Incomplete | None = ..., + ) -> None: ... + @property + def user_message(self): ... + def construct_error_object(self): ... + +class APIError(StripeError): ... + +class APIConnectionError(StripeError): + should_retry: Any + def __init__( + self, + message, + http_body: Incomplete | None = ..., + http_status: Incomplete | None = ..., + json_body: Incomplete | None = ..., + headers: Incomplete | None = ..., + code: Incomplete | None = ..., + should_retry: bool = ..., + ) -> None: ... + +class StripeErrorWithParamCode(StripeError): ... + +class CardError(StripeErrorWithParamCode): + param: Any + def __init__( + self, + message, + param, + code, + http_body: Incomplete | None = ..., + http_status: Incomplete | None = ..., + json_body: Incomplete | None = ..., + headers: Incomplete | None = ..., + ) -> None: ... + +class IdempotencyError(StripeError): ... + +class InvalidRequestError(StripeErrorWithParamCode): + param: Any + def __init__( + self, + message, + param, + code: Incomplete | None = ..., + http_body: Incomplete | None = ..., + http_status: Incomplete | None = ..., + json_body: Incomplete | None = ..., + headers: Incomplete | None = ..., + ) -> None: ... + +class AuthenticationError(StripeError): ... +class PermissionError(StripeError): ... +class RateLimitError(StripeError): ... + +class SignatureVerificationError(StripeError): + sig_header: Any + def __init__(self, message, sig_header, http_body: Incomplete | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/http_client.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/http_client.pyi new file mode 100644 index 00000000..968e29ba --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/http_client.pyi @@ -0,0 +1,52 @@ +from _typeshed import Incomplete +from typing import Any + +from stripe import error as error +from stripe.request_metrics import RequestMetrics as RequestMetrics + +version: Any +major: Any +minor: Any +patch: Any + +def new_default_http_client(*args, **kwargs): ... + +class HTTPClient: + MAX_DELAY: int + INITIAL_DELAY: float + MAX_RETRY_AFTER: int + def __init__(self, verify_ssl_certs: bool = ..., proxy: Incomplete | None = ...) -> None: ... + def request_with_retries(self, method, url, headers, post_data: Incomplete | None = ...): ... + def request_stream_with_retries(self, method, url, headers, post_data: Incomplete | None = ...): ... + def request(self, method, url, headers, post_data: Incomplete | None = ...) -> None: ... + def request_stream(self, method, url, headers, post_data: Incomplete | None = ...) -> None: ... + def close(self) -> None: ... + +class RequestsClient(HTTPClient): + name: str + def __init__(self, timeout: int = ..., session: Incomplete | None = ..., **kwargs) -> None: ... + def request(self, method, url, headers, post_data: Incomplete | None = ...): ... + def request_stream(self, method, url, headers, post_data: Incomplete | None = ...): ... + def close(self) -> None: ... + +class UrlFetchClient(HTTPClient): + name: str + def __init__(self, verify_ssl_certs: bool = ..., proxy: Incomplete | None = ..., deadline: int = ...) -> None: ... + def request(self, method, url, headers, post_data: Incomplete | None = ...): ... + def request_stream(self, method, url, headers, post_data: Incomplete | None = ...): ... + def close(self) -> None: ... + +class PycurlClient(HTTPClient): + name: str + def __init__(self, verify_ssl_certs: bool = ..., proxy: Incomplete | None = ...) -> None: ... + def parse_headers(self, data): ... + def request(self, method, url, headers, post_data: Incomplete | None = ...): ... + def request_stream(self, method, url, headers, post_data: Incomplete | None = ...): ... + def close(self) -> None: ... + +class Urllib2Client(HTTPClient): + name: str + def __init__(self, verify_ssl_certs: bool = ..., proxy: Incomplete | None = ...) -> None: ... + def request(self, method, url, headers, post_data: Incomplete | None = ...): ... + def request_stream(self, method, url, headers, post_data: Incomplete | None = ...): ... + def close(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/multipart_data_generator.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/multipart_data_generator.pyi new file mode 100644 index 00000000..7867aa9f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/multipart_data_generator.pyi @@ -0,0 +1,11 @@ +from typing import Any + +class MultipartDataGenerator: + data: Any + line_break: str + boundary: Any + chunk_size: Any + def __init__(self, chunk_size: int = ...) -> None: ... + def add_params(self, params) -> None: ... + def param_header(self): ... + def get_post_data(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/oauth.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/oauth.pyi new file mode 100644 index 00000000..ecec5d53 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/oauth.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete + +from stripe import api_requestor as api_requestor, connect_api_base as connect_api_base, error as error + +class OAuth: + @staticmethod + def authorize_url(express: bool = ..., **params): ... + @staticmethod + def token(api_key: Incomplete | None = ..., **params): ... + @staticmethod + def deauthorize(api_key: Incomplete | None = ..., **params): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/oauth_error.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/oauth_error.pyi new file mode 100644 index 00000000..a639ed02 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/oauth_error.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete + +from stripe.error import StripeError as StripeError + +class OAuthError(StripeError): + def __init__( + self, + code, + description, + http_body: Incomplete | None = ..., + http_status: Incomplete | None = ..., + json_body: Incomplete | None = ..., + headers: Incomplete | None = ..., + ) -> None: ... + def construct_error_object(self): ... + +class InvalidClientError(OAuthError): ... +class InvalidGrantError(OAuthError): ... +class InvalidRequestError(OAuthError): ... +class InvalidScopeError(OAuthError): ... +class UnsupportedGrantTypeError(OAuthError): ... +class UnsupportedResponseTypeError(OAuthError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/object_classes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/object_classes.pyi new file mode 100644 index 00000000..dca9ea4b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/object_classes.pyi @@ -0,0 +1,5 @@ +from typing import Any + +from stripe import api_resources as api_resources + +OBJECT_CLASSES: Any diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/request_metrics.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/request_metrics.pyi new file mode 100644 index 00000000..73952d61 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/request_metrics.pyi @@ -0,0 +1,7 @@ +from typing import Any + +class RequestMetrics: + request_id: Any + request_duration_ms: Any + def __init__(self, request_id, request_duration_ms) -> None: ... + def payload(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/stripe_object.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/stripe_object.pyi new file mode 100644 index 00000000..cfb021b2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/stripe_object.pyi @@ -0,0 +1,62 @@ +import json +from _typeshed import Incomplete +from typing import Any +from typing_extensions import Self + +from stripe import api_requestor as api_requestor + +class StripeObject(dict[Any, Any]): + class ReprJSONEncoder(json.JSONEncoder): + def default(self, obj): ... + + def __init__( + self, + id: Incomplete | None = ..., + api_key: Incomplete | None = ..., + stripe_version: Incomplete | None = ..., + stripe_account: Incomplete | None = ..., + last_response: Incomplete | None = ..., + **params, + ) -> None: ... + @property + def last_response(self): ... + def update(self, update_dict): ... + def __setattr__(self, k: str, v) -> None: ... + def __getattr__(self, k: str): ... + def __delattr__(self, k: str) -> None: ... + def __setitem__(self, k, v) -> None: ... + def __getitem__(self, k): ... + def __delitem__(self, k) -> None: ... + def __reduce__(self): ... + @classmethod + def construct_from( + cls, + values: Any, + key: str | None, + stripe_version: Incomplete | None = ..., + stripe_account: Incomplete | None = ..., + last_response: Incomplete | None = ..., + ) -> Self: ... + api_key: Any + stripe_version: Any + stripe_account: Any + def refresh_from( + self, + values: Any, + api_key: Incomplete | None = ..., + partial: bool = ..., + stripe_version: Incomplete | None = ..., + stripe_account: Incomplete | None = ..., + last_response: Incomplete | None = ..., + ) -> None: ... + @classmethod + def api_base(cls) -> None: ... + def request(self, method, url, params: Incomplete | None = ..., headers: Incomplete | None = ...): ... + def request_stream(self, method, url, params: Incomplete | None = ..., headers: Incomplete | None = ...): ... + def to_dict(self): ... + def to_dict_recursive(self): ... + @property + def stripe_id(self): ... + def serialize(self, previous): ... + def __copy__(self) -> StripeObject: ... + def __deepcopy__(self, memo: Any) -> StripeObject: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/stripe_response.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/stripe_response.pyi new file mode 100644 index 00000000..30585703 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/stripe_response.pyi @@ -0,0 +1,19 @@ +from typing import Any + +class StripeResponseBase: + code: Any + headers: Any + def __init__(self, code, headers) -> None: ... + @property + def idempotency_key(self): ... + @property + def request_id(self): ... + +class StripeResponse(StripeResponseBase): + body: Any + data: Any + def __init__(self, body, code, headers) -> None: ... + +class StripeStreamResponse(StripeResponseBase): + io: Any + def __init__(self, io, code, headers) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/util.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/util.pyi new file mode 100644 index 00000000..af50ed9b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/util.pyi @@ -0,0 +1,42 @@ +from _typeshed import Incomplete +from typing import Any, overload +from typing_extensions import TypeAlias + +from stripe.stripe_object import StripeObject +from stripe.stripe_response import StripeResponse + +def utf8(value): ... +def log_debug(message, **params) -> None: ... +def log_info(message, **params) -> None: ... +def dashboard_link(request_id): ... +def logfmt(props): ... + +class class_method_variant: + class_method_name: Any + def __init__(self, class_method_name) -> None: ... + method: Any + def __call__(self, method): ... + def __get__(self, obj, objtype: Incomplete | None = ...): ... + +@overload +def populate_headers(idempotency_key: None) -> None: ... +@overload +def populate_headers(idempotency_key: str) -> dict[str, str]: ... + +_RespType: TypeAlias = dict[Any, Any] | StripeObject | StripeResponse + +# undocumented +@overload +def convert_to_stripe_object( + resp: list[Any], + api_key: Incomplete | None = ..., + stripe_version: Incomplete | None = ..., + stripe_account: Incomplete | None = ..., +) -> list[Any]: ... +@overload +def convert_to_stripe_object( + resp: _RespType, + api_key: Incomplete | None = ..., + stripe_version: Incomplete | None = ..., + stripe_account: Incomplete | None = ..., +) -> StripeObject: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/version.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/version.pyi new file mode 100644 index 00000000..3acee936 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/version.pyi @@ -0,0 +1 @@ +VERSION: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/webhook.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/webhook.pyi new file mode 100644 index 00000000..12cfe57e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/stripe/stripe/webhook.pyi @@ -0,0 +1,17 @@ +from typing_extensions import Literal + +from stripe import Event, error as error + +class Webhook: + DEFAULT_TOLERANCE: int + @staticmethod + def construct_event( + payload: bytes | str, sig_header: str, secret: str, tolerance: int = ..., api_key: str | None = ... + ) -> Event: ... + +class WebhookSignature: + EXPECTED_SCHEME: str + @classmethod + def verify_header(cls, payload: bytes | str, header: str, secret: str, tolerance: int | None = ...) -> Literal[True]: ... + @staticmethod + def _compute_signature(payload: str, secret: str) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tabulate/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tabulate/METADATA.toml new file mode 100644 index 00000000..51e869b4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tabulate/METADATA.toml @@ -0,0 +1 @@ +version = "0.9.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tabulate/tabulate/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tabulate/tabulate/__init__.pyi new file mode 100644 index 00000000..31d625e7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tabulate/tabulate/__init__.pyi @@ -0,0 +1,65 @@ +from collections.abc import Callable, Container, Iterable, Mapping, Sequence +from typing import Any, NamedTuple +from typing_extensions import Final, Self, TypeAlias + +from .version import __version__ as __version__ + +__all__ = ["tabulate", "tabulate_formats", "simple_separated_format"] + +# These constants are meant to be configurable +# https://github.com/astanin/python-tabulate#text-formatting +PRESERVE_WHITESPACE: bool +MIN_PADDING: int +# https://github.com/astanin/python-tabulate#wide-fullwidth-cjk-symbols +WIDE_CHARS_MODE: bool +SEPARATING_LINE: str + +class Line(NamedTuple): + begin: str + hline: str + sep: str + end: str + +class DataRow(NamedTuple): + begin: str + sep: str + end: str + +_TableFormatLine: TypeAlias = None | Line | Callable[[list[int], list[str]], str] +_TableFormatRow: TypeAlias = None | DataRow | Callable[[list[Any], list[int], list[str]], str] + +class TableFormat(NamedTuple): + lineabove: _TableFormatLine + linebelowheader: _TableFormatLine + linebetweenrows: _TableFormatLine + linebelow: _TableFormatLine + headerrow: _TableFormatRow + datarow: _TableFormatRow + padding: int + with_header_hide: Container[str] | None + +LATEX_ESCAPE_RULES: Final[dict[str, str]] +tabulate_formats: list[str] +multiline_formats: dict[str, str] + +def simple_separated_format(separator: str) -> TableFormat: ... +def tabulate( + tabular_data: Mapping[str, Iterable[Any]] | Iterable[Iterable[Any]], + headers: str | dict[str, str] | Sequence[str] = ..., + tablefmt: str | TableFormat = ..., + floatfmt: str | Iterable[str] = ..., + intfmt: str | Iterable[str] = ..., + numalign: str | None = ..., + stralign: str | None = ..., + missingval: str | Iterable[str] = ..., + showindex: str | bool | Iterable[Any] = ..., + disable_numparse: bool | Iterable[int] = ..., + colalign: Iterable[str | None] | None = ..., + maxcolwidths: int | Iterable[int | None] | None = ..., + rowalign: str | Iterable[str] | None = ..., + maxheadercolwidths: int | Iterable[int] | None = ..., +) -> str: ... + +class JupyterHTMLStr(str): + @property + def str(self) -> Self: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tabulate/tabulate/version.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tabulate/tabulate/version.pyi new file mode 100644 index 00000000..e7799771 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tabulate/tabulate/version.pyi @@ -0,0 +1,6 @@ +from typing_extensions import Final + +version: Final[str] +__version__: Final[str] +version_tuple: Final[tuple[int, int, int]] +__version_tuple__: Final[tuple[int, int, int]] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tensorflow/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tensorflow/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..e42fc68f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tensorflow/@tests/stubtest_allowlist.txt @@ -0,0 +1,22 @@ +# Some methods are dynamically patched onto to instances as they +# may depend on whether code is executed in graph/eager/v1/v2/etc. +# Tensorflow supports multiple modes of execution which changes some +# of the attributes/methods/even class hierachies. +tensorflow.Tensor.__int__ +tensorflow.Tensor.numpy +tensorflow.Tensor.__index__ +# Incomplete +tensorflow.sparse.SparseTensor.__getattr__ +tensorflow.SparseTensor.__getattr__ +tensorflow.TensorShape.__getattr__ +tensorflow.dtypes.DType.__getattr__ +tensorflow.RaggedTensor.__getattr__ +tensorflow.DType.__getattr__ +tensorflow.Graph.__getattr__ +tensorflow.Operation.__getattr__ +tensorflow.Variable.__getattr__ +# Internal undocumented API +tensorflow.RaggedTensor.__init__ +# Has an undocumented extra argument that tf.Variable which acts like subclass +# (by dynamically patching tf.Tensor methods) does not preserve. +tensorflow.Tensor.__getitem__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tensorflow/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tensorflow/METADATA.toml new file mode 100644 index 00000000..00f7387c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tensorflow/METADATA.toml @@ -0,0 +1,6 @@ +version = "2.11.*" +# requires a version of numpy with a `py.typed` file +requires = ["numpy>=1.20"] + +[tool.stubtest] +ignore_missing_stub = true diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tensorflow/tensorflow/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tensorflow/tensorflow/__init__.pyi new file mode 100644 index 00000000..c6d0ba98 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tensorflow/tensorflow/__init__.pyi @@ -0,0 +1,256 @@ +from _typeshed import Incomplete, Unused +from abc import ABCMeta +from builtins import bool as _bool +from collections.abc import Callable, Iterable, Iterator, Sequence +from contextlib import contextmanager +from enum import Enum +from typing import Any, NoReturn, overload +from typing_extensions import Self, TypeAlias + +import numpy + +# Explicit import of DType is covered by the wildcard, but +# is necessary to avoid a crash in pytype. +from tensorflow.dtypes import * +from tensorflow.dtypes import DType as DType + +# Most tf.math functions are exported as tf, but sadly not all are. +from tensorflow.math import ( + abs as abs, + add as add, + add_n as add_n, + argmax as argmax, + argmin as argmin, + cos as cos, + cosh as cosh, + divide as divide, + equal as equal, + greater as greater, + greater_equal as greater_equal, + less as less, + less_equal as less_equal, + logical_and as logical_and, + logical_not as logical_not, + logical_or as logical_or, + maximum as maximum, + minimum as minimum, + multiply as multiply, + not_equal as not_equal, + pow as pow, + reduce_max as reduce_max, + reduce_mean as reduce_mean, + reduce_min as reduce_min, + reduce_prod as reduce_prod, + reduce_sum as reduce_sum, + sigmoid as sigmoid, + sign as sign, + sin as sin, + sinh as sinh, + sqrt as sqrt, + square as square, + subtract as subtract, + tanh as tanh, +) +from tensorflow.sparse import SparseTensor + +# Tensors ideally should be a generic type, but properly typing data type/shape +# will be a lot of work. Until we have good non-generic tensorflow stubs, +# we will skip making Tensor generic. Also good type hints for shapes will +# run quickly into many places where type system is not strong enough today. +# So shape typing is probably not worth doing anytime soon. +_Slice: TypeAlias = int | slice | None + +_FloatDataSequence: TypeAlias = Sequence[float] | Sequence[_FloatDataSequence] +_StrDataSequence: TypeAlias = Sequence[str] | Sequence[_StrDataSequence] +_ScalarTensorCompatible: TypeAlias = Tensor | str | float | numpy.ndarray[Any, Any] | numpy.number[Any] +_TensorCompatible: TypeAlias = _ScalarTensorCompatible | Sequence[_TensorCompatible] +_ShapeLike: TypeAlias = TensorShape | Iterable[_ScalarTensorCompatible | None] | int | Tensor +_DTypeLike: TypeAlias = DType | str | numpy.dtype[Any] + +class Tensor: + def __init__(self, op: Operation, value_index: int, dtype: DType) -> None: ... + def consumers(self) -> list[Incomplete]: ... + @property + def shape(self) -> TensorShape: ... + def get_shape(self) -> TensorShape: ... + @property + def dtype(self) -> DType: ... + @property + def graph(self) -> Graph: ... + @property + def name(self) -> str: ... + @property + def op(self) -> Operation: ... + def numpy(self) -> numpy.ndarray[Any, Any]: ... + def __int__(self) -> int: ... + def __abs__(self, name: str | None = None) -> Tensor: ... + def __add__(self, other: _TensorCompatible) -> Tensor: ... + def __radd__(self, other: _TensorCompatible) -> Tensor: ... + def __sub__(self, other: _TensorCompatible) -> Tensor: ... + def __rsub__(self, other: _TensorCompatible) -> Tensor: ... + def __mul__(self, other: _TensorCompatible) -> Tensor: ... + def __rmul__(self, other: _TensorCompatible) -> Tensor: ... + def __pow__(self, other: _TensorCompatible) -> Tensor: ... + def __matmul__(self, other: _TensorCompatible) -> Tensor: ... + def __rmatmul__(self, other: _TensorCompatible) -> Tensor: ... + def __floordiv__(self, other: _TensorCompatible) -> Tensor: ... + def __rfloordiv__(self, other: _TensorCompatible) -> Tensor: ... + def __truediv__(self, other: _TensorCompatible) -> Tensor: ... + def __rtruediv__(self, other: _TensorCompatible) -> Tensor: ... + def __neg__(self, name: str | None = None) -> Tensor: ... + def __and__(self, other: _TensorCompatible) -> Tensor: ... + def __rand__(self, other: _TensorCompatible) -> Tensor: ... + def __or__(self, other: _TensorCompatible) -> Tensor: ... + def __ror__(self, other: _TensorCompatible) -> Tensor: ... + def __eq__(self, other: _TensorCompatible) -> Tensor: ... # type: ignore[override] + def __ne__(self, other: _TensorCompatible) -> Tensor: ... # type: ignore[override] + def __ge__(self, other: _TensorCompatible, name: str | None = None) -> Tensor: ... + def __gt__(self, other: _TensorCompatible, name: str | None = None) -> Tensor: ... + def __le__(self, other: _TensorCompatible, name: str | None = None) -> Tensor: ... + def __lt__(self, other: _TensorCompatible, name: str | None = None) -> Tensor: ... + def __bool__(self) -> NoReturn: ... + def __getitem__(self, slice_spec: _Slice | tuple[_Slice, ...]) -> Tensor: ... + def __len__(self) -> int: ... + # This only works for rank 0 tensors. + def __index__(self) -> int: ... + def __getattr__(self, name: str) -> Incomplete: ... + +class VariableSynchronization(Enum): + AUTO = 0 + NONE = 1 + ON_WRITE = 2 + ON_READ = 3 + +class VariableAggregation(Enum): + AUTO = 0 + NONE = 1 + ON_WRITE = 2 + ON_READ = 3 + +class _VariableMetaclass(type): ... + +# Variable class in intent/documentation is a Tensor. In implementation there's +# TODO comment to make it Tensor. It is not actually Tensor type wise, but even +# dynamically patches on most methods of tf.Tensor +# https://github.com/tensorflow/tensorflow/blob/9524a636cae9ae3f0554203c1ba7ee29c85fcf12/tensorflow/python/ops/variables.py#L1086. +class Variable(Tensor, metaclass=_VariableMetaclass): + def __init__( + self, + initial_value: Tensor | Callable[[], Tensor] | None = None, + trainable: _bool | None = None, + validate_shape: _bool = True, + # Valid non-None values are deprecated. + caching_device: None = None, + name: str | None = None, + # Real type is VariableDef protobuf type. Can be added after adding script + # to generate tensorflow protobuf stubs with mypy-protobuf. + variable_def: Incomplete | None = None, + dtype: _DTypeLike | None = None, + import_scope: str | None = None, + constraint: Callable[[Tensor], Tensor] | None = None, + synchronization: VariableSynchronization = ..., + aggregation: VariableAggregation = ..., + shape: _ShapeLike | None = None, + experimental_enable_variable_lifting: _bool = True, + ) -> None: ... + def __getattr__(self, name: str) -> Incomplete: ... + +class RaggedTensor(metaclass=ABCMeta): + def bounding_shape( + self, axis: _TensorCompatible | None = None, name: str | None = None, out_type: _DTypeLike | None = None + ) -> Tensor: ... + @classmethod + def from_sparse(cls, st_input: SparseTensor, name: str | None = None, row_splits_dtype: _DTypeLike = ...) -> RaggedTensor: ... + def to_sparse(self, name: str | None = None) -> SparseTensor: ... + def to_tensor( + self, default_value: float | str | None = None, name: str | None = None, shape: _ShapeLike | None = None + ) -> Tensor: ... + def __add__(self, other: RaggedTensor | float, name: str | None = None) -> RaggedTensor: ... + def __radd__(self, other: RaggedTensor | float, name: str | None = None) -> RaggedTensor: ... + def __sub__(self, other: RaggedTensor | float, name: str | None = None) -> RaggedTensor: ... + def __mul__(self, other: RaggedTensor | float, name: str | None = None) -> RaggedTensor: ... + def __rmul__(self, other: RaggedTensor | float, name: str | None = None) -> RaggedTensor: ... + def __floordiv__(self, other: RaggedTensor | float, name: str | None = None) -> RaggedTensor: ... + def __truediv__(self, other: RaggedTensor | float, name: str | None = None) -> RaggedTensor: ... + def __getitem__(self, slice_spec: _Slice | tuple[_Slice, ...]) -> RaggedTensor: ... + def __getattr__(self, name: str) -> Incomplete: ... + +class Operation: + def __init__( + self, + node_def: Incomplete, + g: Graph, + # isinstance is used so can not be Sequence/Iterable. + inputs: list[Tensor] | None = None, + output_types: Unused = None, + control_inputs: Iterable[Tensor | Operation] | None = None, + input_types: Iterable[DType] | None = None, + original_op: Operation | None = None, + op_def: Incomplete = None, + ) -> None: ... + @property + def inputs(self) -> list[Tensor]: ... + @property + def outputs(self) -> list[Tensor]: ... + @property + def device(self) -> str: ... + @property + def name(self) -> str: ... + @property + def type(self) -> str: ... + def __getattr__(self, name: str) -> Incomplete: ... + +class TensorShape(metaclass=ABCMeta): + def __init__(self, dims: _ShapeLike) -> None: ... + @property + def rank(self) -> int: ... + def as_list(self) -> list[int | None]: ... + def assert_has_rank(self, rank: int) -> None: ... + def assert_is_compatible_with(self, other: Iterable[int | None]) -> None: ... + def __bool__(self) -> _bool: ... + @overload + def __getitem__(self, key: int) -> int | None: ... + @overload + def __getitem__(self, key: slice) -> TensorShape: ... + def __iter__(self) -> Iterator[int | None]: ... + def __len__(self) -> int: ... + def __add__(self, other: Iterable[int | None]) -> TensorShape: ... + def __radd__(self, other: Iterable[int | None]) -> TensorShape: ... + def __getattr__(self, name: str) -> Incomplete: ... + +class Graph: + def add_to_collection(self, name: str, value: object) -> None: ... + def add_to_collections(self, names: Iterable[str] | str, value: object) -> None: ... + @contextmanager + def as_default(self) -> Iterator[Self]: ... + def finalize(self) -> None: ... + def get_tensor_by_name(self, name: str) -> Tensor: ... + def get_operation_by_name(self, name: str) -> Operation: ... + def get_operations(self) -> list[Operation]: ... + def get_name_scope(self) -> str: ... + def __getattr__(self, name: str) -> Incomplete: ... + +class IndexedSlices(metaclass=ABCMeta): + def __init__(self, values: Tensor, indices: Tensor, dense_shape: None | Tensor = None) -> None: ... + @property + def values(self) -> Tensor: ... + @property + def indices(self) -> Tensor: ... + @property + def dense_shape(self) -> None | Tensor: ... + @property + def shape(self) -> TensorShape: ... + @property + def dtype(self) -> DType: ... + @property + def name(self) -> str: ... + @property + def op(self) -> Operation: ... + @property + def graph(self) -> Graph: ... + @property + def device(self) -> str: ... + def __neg__(self) -> IndexedSlices: ... + def consumers(self) -> list[Operation]: ... + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tensorflow/tensorflow/core/framework/variable_pb2.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tensorflow/tensorflow/core/framework/variable_pb2.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tensorflow/tensorflow/dtypes.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tensorflow/tensorflow/dtypes.pyi new file mode 100644 index 00000000..0ef3c1d2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tensorflow/tensorflow/dtypes.pyi @@ -0,0 +1,55 @@ +from _typeshed import Incomplete +from abc import ABCMeta +from builtins import bool as _bool +from typing import Any + +import numpy as np +from tensorflow import _DTypeLike + +class _DTypeMeta(ABCMeta): ... + +class DType(metaclass=_DTypeMeta): + @property + def name(self) -> str: ... + @property + def as_numpy_dtype(self) -> type[np.number[Any]]: ... + @property + def is_numpy_compatible(self) -> _bool: ... + @property + def is_bool(self) -> _bool: ... + @property + def is_floating(self) -> _bool: ... + @property + def is_integer(self) -> _bool: ... + @property + def is_quantized(self) -> _bool: ... + @property + def is_unsigned(self) -> _bool: ... + def __getattr__(self, name: str) -> Incomplete: ... + +bool: DType +complex128: DType +complex64: DType +bfloat16: DType +float16: DType +half: DType +float32: DType +float64: DType +double: DType +int8: DType +int16: DType +int32: DType +int64: DType +uint8: DType +uint16: DType +uint32: DType +uint64: DType +qint8: DType +qint16: DType +qint32: DType +quint8: DType +quint16: DType +string: DType + +def as_dtype(type_value: _DTypeLike) -> DType: ... +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tensorflow/tensorflow/math.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tensorflow/tensorflow/math.pyi new file mode 100644 index 00000000..66fc425d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tensorflow/tensorflow/math.pyi @@ -0,0 +1,282 @@ +from _typeshed import Incomplete +from collections.abc import Iterable +from typing import TypeVar, overload +from typing_extensions import TypeAlias + +from tensorflow import IndexedSlices, RaggedTensor, Tensor, _DTypeLike, _ShapeLike, _TensorCompatible +from tensorflow.sparse import SparseTensor + +_TensorCompatibleT = TypeVar("_TensorCompatibleT", bound=_TensorCompatible) +_SparseTensorCompatible: TypeAlias = _TensorCompatible | SparseTensor + +# Most operations support RaggedTensor. Documentation for them is here, +# https://www.tensorflow.org/api_docs/python/tf/ragged. +# Most operations do not support SparseTensor. Operations often don't document +# whether they support SparseTensor and it is best to test them manually. Typically +# if an operation outputs non-zero value for a zero input, it will not support +# SparseTensors. Binary operations with ragged tensors usually only work +# if both operands are ragged. +@overload +def abs(x: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def abs(x: SparseTensor, name: str | None = None) -> SparseTensor: ... +@overload +def abs(x: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def sin(x: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def sin(x: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def cos(x: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def cos(x: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def exp(x: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def exp(x: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def sinh(x: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def sinh(x: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def cosh(x: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def cosh(x: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def tanh(x: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def tanh(x: SparseTensor, name: str | None = None) -> SparseTensor: ... +@overload +def tanh(x: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def expm1(x: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def expm1(x: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def log(x: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def log(x: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def log1p(x: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def log1p(x: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def negative(x: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def negative(x: SparseTensor, name: str | None = None) -> SparseTensor: ... +@overload +def negative(x: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def sigmoid(x: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def sigmoid(x: SparseTensor, name: str | None = None) -> SparseTensor: ... +@overload +def add(x: _TensorCompatible, y: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def add(x: RaggedTensor, y: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def add_n(inputs: Iterable[_TensorCompatible | IndexedSlices], name: str | None = None) -> Tensor: ... +@overload +def add_n(inputs: Iterable[RaggedTensor], name: str | None = None) -> RaggedTensor: ... +@overload +def subtract(x: _TensorCompatible, y: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def subtract(x: _TensorCompatible | RaggedTensor, y: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def subtract( + x: _TensorCompatible | RaggedTensor, y: _TensorCompatible | RaggedTensor, name: str | None = None +) -> Tensor | RaggedTensor: ... +@overload +def multiply(x: _TensorCompatible, y: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def multiply(x: RaggedTensor, y: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def multiply_no_nan(x: _TensorCompatible, y: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def multiply_no_nan(x: RaggedTensor, y: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def divide(x: _TensorCompatible, y: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def divide(x: RaggedTensor, y: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def divide_no_nan(x: _TensorCompatible, y: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def divide_no_nan(x: RaggedTensor, y: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def floormod(x: _TensorCompatible, y: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def floormod(x: RaggedTensor, y: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def ceil(x: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def ceil(x: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def floor(x: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def floor(x: RaggedTensor, name: str | None = None) -> RaggedTensor: ... + +# Uses isinstance on list/tuple so other Sequence types are not supported. The TypeVar is to +# behave covariantly. +def accumulate_n( + inputs: list[_TensorCompatibleT] | tuple[_TensorCompatibleT, ...], + shape: _ShapeLike | None = None, + tensor_dtype: _DTypeLike | None = None, + name: str | None = None, +) -> Tensor: ... +@overload +def pow(x: _TensorCompatible, y: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def pow(x: RaggedTensor, y: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def reciprocal(x: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def reciprocal(x: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def is_nan(x: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def is_nan(x: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def minimum(x: _TensorCompatible, y: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def minimum(x: RaggedTensor, y: _TensorCompatible | RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def minimum(x: _TensorCompatible | RaggedTensor, y: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def maximum(x: _TensorCompatible, y: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def maximum(x: RaggedTensor, y: _TensorCompatible | RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def maximum(x: _TensorCompatible | RaggedTensor, y: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def logical_not(x: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def logical_not(x: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def logical_and(x: _TensorCompatible, y: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def logical_and(x: RaggedTensor, y: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def logical_or(x: _TensorCompatible, y: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def logical_or(x: RaggedTensor, y: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def logical_xor(x: _TensorCompatible, y: _TensorCompatible, name: str | None = "LogicalXor") -> Tensor: ... +@overload +def logical_xor(x: RaggedTensor, y: RaggedTensor, name: str | None = "LogicalXor") -> RaggedTensor: ... +@overload +def equal(x: _TensorCompatible, y: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def equal(x: RaggedTensor, y: RaggedTensor | float, name: str | None = None) -> RaggedTensor: ... +@overload +def not_equal(x: _TensorCompatible, y: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def not_equal(x: RaggedTensor, y: RaggedTensor | float, name: str | None = None) -> RaggedTensor: ... +@overload +def greater(x: _TensorCompatible, y: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def greater(x: RaggedTensor, y: RaggedTensor | float, name: str | None = None) -> RaggedTensor: ... +@overload +def greater_equal(x: _TensorCompatible, y: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def greater_equal(x: RaggedTensor, y: RaggedTensor | float, name: str | None = None) -> RaggedTensor: ... +@overload +def less(x: _TensorCompatible, y: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def less(x: RaggedTensor, y: RaggedTensor | float, name: str | None = None) -> RaggedTensor: ... +@overload +def less_equal(x: _TensorCompatible, y: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def less_equal(x: RaggedTensor, y: RaggedTensor | float, name: str | None = None) -> RaggedTensor: ... +def segment_sum(data: _TensorCompatible, segment_ids: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def sign(x: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def sign(x: SparseTensor, name: str | None = None) -> SparseTensor: ... +@overload +def sign(x: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def sqrt(x: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def sqrt(x: SparseTensor, name: str | None = None) -> SparseTensor: ... +@overload +def sqrt(x: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def rsqrt(x: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def rsqrt(x: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def square(x: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def square(x: SparseTensor, name: str | None = None) -> SparseTensor: ... +@overload +def square(x: RaggedTensor, name: str | None = None) -> RaggedTensor: ... +@overload +def softplus(features: _TensorCompatible, name: str | None = None) -> Tensor: ... +@overload +def softplus(features: RaggedTensor, name: str | None = None) -> RaggedTensor: ... + +# Depending on the method axis is either a rank 0 tensor or a rank 0/1 tensor. +def reduce_mean( + input_tensor: _TensorCompatible | RaggedTensor, + axis: _TensorCompatible | None = None, + keepdims: bool = False, + name: str | None = None, +) -> Tensor: ... +def reduce_sum( + input_tensor: _TensorCompatible | RaggedTensor, + axis: _TensorCompatible | None = None, + keepdims: bool = False, + name: str | None = None, +) -> Tensor: ... +def reduce_max( + input_tensor: _TensorCompatible | RaggedTensor, + axis: _TensorCompatible | None = None, + keepdims: bool = False, + name: str | None = None, +) -> Tensor: ... +def reduce_min( + input_tensor: _TensorCompatible | RaggedTensor, + axis: _TensorCompatible | None = None, + keepdims: bool = False, + name: str | None = None, +) -> Tensor: ... +def reduce_prod( + input_tensor: _TensorCompatible | RaggedTensor, + axis: _TensorCompatible | None = None, + keepdims: bool = False, + name: str | None = None, +) -> Tensor: ... +def reduce_std( + input_tensor: _TensorCompatible | RaggedTensor, + axis: _TensorCompatible | None = None, + keepdims: bool = False, + name: str | None = None, +) -> Tensor: ... +def argmax( + input: _TensorCompatible, axis: _TensorCompatible | None = None, output_type: _DTypeLike = ..., name: str | None = None +) -> Tensor: ... +def argmin( + input: _TensorCompatible, axis: _TensorCompatible | None = None, output_type: _DTypeLike = ..., name: str | None = None +) -> Tensor: ... + +# Only for bool tensors. +def reduce_any( + input_tensor: _TensorCompatible | RaggedTensor, + axis: _TensorCompatible | None = None, + keepdims: bool = False, + name: str | None = None, +) -> Tensor: ... +def reduce_all( + input_tensor: _TensorCompatible | RaggedTensor, + axis: _TensorCompatible | None = None, + keepdims: bool = False, + name: str | None = None, +) -> Tensor: ... +def count_nonzero( + input: _SparseTensorCompatible, + axis: _TensorCompatible | None = None, + keepdims: bool | None = None, + dtype: _DTypeLike = ..., + name: str | None = None, +) -> Tensor: ... +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tensorflow/tensorflow/sparse.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tensorflow/tensorflow/sparse.pyi new file mode 100644 index 00000000..625919c8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tensorflow/tensorflow/sparse.pyi @@ -0,0 +1,30 @@ +from _typeshed import Incomplete +from abc import ABCMeta +from typing_extensions import TypeAlias + +from tensorflow import Tensor, TensorShape, _TensorCompatible +from tensorflow.dtypes import DType + +_SparseTensorCompatible: TypeAlias = _TensorCompatible | SparseTensor + +class SparseTensor(metaclass=ABCMeta): + @property + def indices(self) -> Tensor: ... + @property + def values(self) -> Tensor: ... + @property + def dense_shape(self) -> Tensor: ... + @property + def shape(self) -> TensorShape: ... + @property + def dtype(self) -> DType: ... + name: str + def __init__(self, indices: _TensorCompatible, values: _TensorCompatible, dense_shape: _TensorCompatible) -> None: ... + def get_shape(self) -> TensorShape: ... + # Many arithmetic operations are not directly supported. Some have alternatives like tf.sparse.add instead of +. + def __div__(self, y: _SparseTensorCompatible) -> SparseTensor: ... + def __truediv__(self, y: _SparseTensorCompatible) -> SparseTensor: ... + def __mul__(self, y: _SparseTensorCompatible) -> SparseTensor: ... + def __getattr__(self, name: str) -> Incomplete: ... + +def __getattr__(name: str) -> Incomplete: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/termcolor/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/termcolor/METADATA.toml new file mode 100644 index 00000000..cfdea080 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/termcolor/METADATA.toml @@ -0,0 +1,2 @@ +version = "1.1.*" +obsolete_since = "2.0.0" # Released on 2022-09-11 diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/termcolor/termcolor.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/termcolor/termcolor.pyi new file mode 100644 index 00000000..71350db5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/termcolor/termcolor.pyi @@ -0,0 +1,14 @@ +from collections.abc import Iterable +from typing import Any + +__ALL__: list[str] +VERSION: tuple[int, ...] +ATTRIBUTES: dict[str, int] +COLORS: dict[str, int] +HIGHLIGHTS: dict[str, int] +RESET: str + +def colored(text: str, color: str | None = ..., on_color: str | None = ..., attrs: Iterable[str] | None = ...) -> str: ... +def cprint( + text: str, color: str | None = ..., on_color: str | None = ..., attrs: Iterable[str] | None = ..., **kwargs: Any +) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toml/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toml/METADATA.toml new file mode 100644 index 00000000..5c7ed21e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toml/METADATA.toml @@ -0,0 +1 @@ +version = "0.10.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toml/toml/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toml/toml/__init__.pyi new file mode 100644 index 00000000..61bf3485 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toml/toml/__init__.pyi @@ -0,0 +1,18 @@ +from . import decoder as decoder, encoder as encoder +from .decoder import ( + TomlDecodeError as TomlDecodeError, + TomlDecoder as TomlDecoder, + TomlPreserveCommentDecoder as TomlPreserveCommentDecoder, + load as load, + loads as loads, +) +from .encoder import ( + TomlArraySeparatorEncoder as TomlArraySeparatorEncoder, + TomlEncoder as TomlEncoder, + TomlNumpyEncoder as TomlNumpyEncoder, + TomlPathlibEncoder as TomlPathlibEncoder, + TomlPreserveCommentEncoder as TomlPreserveCommentEncoder, + TomlPreserveInlineDictEncoder as TomlPreserveInlineDictEncoder, + dump as dump, + dumps as dumps, +) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toml/toml/decoder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toml/toml/decoder.pyi new file mode 100644 index 00000000..a492ac9e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toml/toml/decoder.pyi @@ -0,0 +1,70 @@ +from _typeshed import SupportsRead +from collections.abc import Callable, MutableMapping +from pathlib import PurePath +from re import Pattern +from typing import Any, Generic, TypeVar, overload +from typing_extensions import TypeAlias + +_MutableMappingT = TypeVar("_MutableMappingT", bound=MutableMapping[str, Any]) +_PathLike: TypeAlias = str | bytes | PurePath + +FNFError = FileNotFoundError +TIME_RE: Pattern[str] + +class TomlDecodeError(ValueError): + msg: str + doc: str + pos: int + lineno: int + colno: int + def __init__(self, msg: str, doc: str, pos: int) -> None: ... + +class CommentValue: + val: Any + comment: str + def __init__(self, val: Any, comment: str, beginline: bool, _dict: type[MutableMapping[str, Any]]) -> None: ... + def __getitem__(self, key: Any) -> Any: ... + def __setitem__(self, key: Any, value: Any) -> None: ... + def dump(self, dump_value_func: Callable[[Any], str]) -> str: ... + +@overload +def load( + f: _PathLike | list[Any] | SupportsRead[str], # list[_PathLike] is invariance + _dict: type[_MutableMappingT], + decoder: TomlDecoder[_MutableMappingT] | None = ..., +) -> _MutableMappingT: ... +@overload +def load( + f: _PathLike | list[Any] | SupportsRead[str], # list[_PathLike] is invariance + _dict: type[dict[str, Any]] = ..., + decoder: TomlDecoder[dict[str, Any]] | None = ..., +) -> dict[str, Any]: ... +@overload +def loads(s: str, _dict: type[_MutableMappingT], decoder: TomlDecoder[_MutableMappingT] | None = ...) -> _MutableMappingT: ... +@overload +def loads(s: str, _dict: type[dict[str, Any]] = ..., decoder: TomlDecoder[dict[str, Any]] | None = ...) -> dict[str, Any]: ... + +class InlineTableDict: ... + +class TomlDecoder(Generic[_MutableMappingT]): + _dict: type[_MutableMappingT] + @overload + def __init__(self, _dict: type[_MutableMappingT]) -> None: ... + @overload + def __init__(self: TomlDecoder[dict[str, Any]], _dict: type[dict[str, Any]] = ...) -> None: ... + def get_empty_table(self) -> _MutableMappingT: ... + def get_empty_inline_table(self) -> InlineTableDict: ... # incomplete python/typing#213 + def load_inline_object( + self, line: str, currentlevel: _MutableMappingT, multikey: bool = ..., multibackslash: bool = ... + ) -> None: ... + def load_line( + self, line: str, currentlevel: _MutableMappingT, multikey: bool | None, multibackslash: bool + ) -> tuple[bool | None, str, bool] | None: ... + def load_value(self, v: str, strictly_valid: bool = ...) -> tuple[Any, str]: ... + def bounded_string(self, s: str) -> bool: ... + def load_array(self, a: str) -> list[Any]: ... + def preserve_comment(self, line_no: int, key: str, comment: str, beginline: bool) -> None: ... + def embed_comments(self, idx: int, currentlevel: _MutableMappingT) -> None: ... + +class TomlPreserveCommentDecoder(TomlDecoder[_MutableMappingT]): + saved_comments: dict[int, tuple[str, str, bool]] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toml/toml/encoder.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toml/toml/encoder.pyi new file mode 100644 index 00000000..c8be27da --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toml/toml/encoder.pyi @@ -0,0 +1,45 @@ +from _typeshed import SupportsWrite +from collections.abc import Callable, Iterable, Mapping, MutableMapping +from typing import Any, Generic, TypeVar, overload + +_MappingT = TypeVar("_MappingT", bound=Mapping[str, Any]) + +def dump(o: _MappingT, f: SupportsWrite[str], encoder: TomlEncoder[_MappingT] | None = ...) -> str: ... +def dumps(o: _MappingT, encoder: TomlEncoder[_MappingT] | None = ...) -> str: ... + +class TomlEncoder(Generic[_MappingT]): + _dict: type[_MappingT] + preserve: bool + dump_funcs: MutableMapping[type[Any], Callable[[Any], str]] + @overload + def __init__(self, _dict: type[_MappingT], preserve: bool = ...) -> None: ... + @overload + def __init__(self: TomlEncoder[dict[str, Any]], _dict: type[dict[str, Any]] = ..., preserve: bool = ...) -> None: ... + def get_empty_table(self) -> _MappingT: ... + def dump_list(self, v: Iterable[Any]) -> str: ... + def dump_inline_table(self, section: dict[str, Any] | Any) -> str: ... + def dump_value(self, v: Any) -> str: ... + def dump_sections(self, o: _MappingT, sup: str) -> tuple[str, _MappingT]: ... + +class TomlPreserveInlineDictEncoder(TomlEncoder[_MappingT]): + @overload + def __init__(self, _dict: type[_MappingT]) -> None: ... + @overload + def __init__(self: TomlPreserveInlineDictEncoder[dict[str, Any]], _dict: type[dict[str, Any]] = ...) -> None: ... + +class TomlArraySeparatorEncoder(TomlEncoder[_MappingT]): + separator: str + @overload + def __init__(self, _dict: type[_MappingT], preserve: bool = ..., separator: str = ...) -> None: ... + @overload + def __init__( + self: TomlArraySeparatorEncoder[dict[str, Any]], + _dict: type[dict[str, Any]] = ..., + preserve: bool = ..., + separator: str = ..., + ) -> None: ... + def dump_list(self, v: Iterable[Any]) -> str: ... + +class TomlNumpyEncoder(TomlEncoder[_MappingT]): ... +class TomlPreserveCommentEncoder(TomlEncoder[_MappingT]): ... +class TomlPathlibEncoder(TomlEncoder[_MappingT]): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toml/toml/ordered.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toml/toml/ordered.pyi new file mode 100644 index 00000000..1c2f223b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toml/toml/ordered.pyi @@ -0,0 +1,11 @@ +from collections import OrderedDict +from typing import Any + +from .decoder import TomlDecoder +from .encoder import TomlEncoder + +class TomlOrderedDecoder(TomlDecoder[OrderedDict[str, Any]]): + def __init__(self) -> None: ... + +class TomlOrderedEncoder(TomlEncoder[OrderedDict[str, Any]]): + def __init__(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toml/toml/tz.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toml/toml/tz.pyi new file mode 100644 index 00000000..054226bf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toml/toml/tz.pyi @@ -0,0 +1,10 @@ +from datetime import datetime, timedelta, tzinfo +from typing import Any +from typing_extensions import Self + +class TomlTz(tzinfo): + def __init__(self, toml_offset: str) -> None: ... + def __deepcopy__(self, memo: Any) -> Self: ... + def tzname(self, dt: datetime | None) -> str: ... + def utcoffset(self, dt: datetime | None) -> timedelta: ... + def dst(self, dt: datetime | None) -> timedelta: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toposort/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toposort/METADATA.toml new file mode 100644 index 00000000..8b9cadc7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toposort/METADATA.toml @@ -0,0 +1 @@ +version = "1.9" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toposort/toposort.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toposort/toposort.pyi new file mode 100644 index 00000000..76af5313 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/toposort/toposort.pyi @@ -0,0 +1,17 @@ +from _typeshed import SupportsItems +from collections.abc import Iterable, Iterator +from typing import Any, Protocol, TypeVar + +_KT_co = TypeVar("_KT_co", covariant=True) +_VT_co = TypeVar("_VT_co", covariant=True) +_T = TypeVar("_T") + +class _SupportsItemsAndLen(SupportsItems[_KT_co, _VT_co], Protocol[_KT_co, _VT_co]): + def __len__(self) -> int: ... + +class CircularDependencyError(ValueError): + data: dict[Any, set[Any]] + def __init__(self, data: dict[Any, set[Any]]) -> None: ... + +def toposort(data: _SupportsItemsAndLen[_T, Iterable[_T]]) -> Iterator[set[_T]]: ... +def toposort_flatten(data: _SupportsItemsAndLen[_T, Iterable[_T]], sort: bool = ...) -> list[_T]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..b142ecf1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/@tests/stubtest_allowlist.txt @@ -0,0 +1,4 @@ +# Cannot import in stubtest +tqdm.__main__ +# disco-py fails to install through pip and is an archived project +tqdm.contrib.discord diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/METADATA.toml new file mode 100644 index 00000000..1940d7fa --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/METADATA.toml @@ -0,0 +1,5 @@ +version = "4.64.*" + +[tool.stubtest] +extras = ["slack", "telegram"] +stubtest_requirements = ["dask", "pandas", "rich", "tensorflow"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/__init__.pyi new file mode 100644 index 00000000..a48d6f41 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/__init__.pyi @@ -0,0 +1,41 @@ +from _typeshed import Incomplete + +from ._monitor import TMonitor as TMonitor, TqdmSynchronisationWarning as TqdmSynchronisationWarning +from ._tqdm_pandas import tqdm_pandas as tqdm_pandas +from .cli import main as main +from .gui import tqdm as tqdm_gui, trange as tgrange +from .notebook import tqdm_notebook as tqdm_notebook_cls +from .std import ( + TqdmDeprecationWarning as TqdmDeprecationWarning, + TqdmExperimentalWarning as TqdmExperimentalWarning, + TqdmKeyError as TqdmKeyError, + TqdmMonitorWarning as TqdmMonitorWarning, + TqdmTypeError as TqdmTypeError, + TqdmWarning as TqdmWarning, + tqdm as tqdm, + trange as trange, +) +from .version import __version__ as __version__ + +__all__ = [ + "tqdm", + "tqdm_gui", + "trange", + "tgrange", + "tqdm_pandas", + "tqdm_notebook", + "tnrange", + "main", + "TMonitor", + "TqdmTypeError", + "TqdmKeyError", + "TqdmWarning", + "TqdmDeprecationWarning", + "TqdmExperimentalWarning", + "TqdmMonitorWarning", + "TqdmSynchronisationWarning", + "__version__", +] + +def tqdm_notebook(*args, **kwargs) -> tqdm_notebook_cls[Incomplete]: ... +def tnrange(*args, **kwargs) -> tqdm_notebook_cls[int]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/_dist_ver.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/_dist_ver.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/_main.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/_main.pyi new file mode 100644 index 00000000..81525aa3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/_main.pyi @@ -0,0 +1,4 @@ +from .cli import * + +# Names in __all__ with no definition: +# main diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/_monitor.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/_monitor.pyi new file mode 100644 index 00000000..bb8ee472 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/_monitor.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete +from threading import Thread + +__all__ = ["TMonitor", "TqdmSynchronisationWarning"] + +class TqdmSynchronisationWarning(RuntimeWarning): ... + +class TMonitor(Thread): + daemon: bool + woken: int + tqdm_cls: type[Incomplete] + sleep_interval: float + was_killed: Incomplete + def __init__(self, tqdm_cls: type[Incomplete], sleep_interval: float) -> None: ... + def exit(self): ... + def get_instances(self): ... + def run(self) -> None: ... + def report(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/_tqdm.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/_tqdm.pyi new file mode 100644 index 00000000..e2796836 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/_tqdm.pyi @@ -0,0 +1,11 @@ +from .std import * +from .std import TqdmDeprecationWarning as TqdmDeprecationWarning + +# Names in __all__ with no definition: +# TqdmExperimentalWarning +# TqdmKeyError +# TqdmMonitorWarning +# TqdmTypeError +# TqdmWarning +# tqdm +# trange diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/_tqdm_gui.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/_tqdm_gui.pyi new file mode 100644 index 00000000..4c489bd2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/_tqdm_gui.pyi @@ -0,0 +1,7 @@ +from .gui import * + +# Names in __all__ with no definition: +# tgrange +# tqdm +# tqdm_gui +# trange diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/_tqdm_notebook.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/_tqdm_notebook.pyi new file mode 100644 index 00000000..5c9a3612 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/_tqdm_notebook.pyi @@ -0,0 +1,7 @@ +from .notebook import * + +# Names in __all__ with no definition: +# tnrange +# tqdm +# tqdm_notebook +# trange diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/_tqdm_pandas.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/_tqdm_pandas.pyi new file mode 100644 index 00000000..9f484444 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/_tqdm_pandas.pyi @@ -0,0 +1,3 @@ +__all__ = ["tqdm_pandas"] + +def tqdm_pandas(tclass, **tqdm_kwargs) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/_utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/_utils.pyi new file mode 100644 index 00000000..2d60c39a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/_utils.pyi @@ -0,0 +1,10 @@ +from .std import TqdmDeprecationWarning as TqdmDeprecationWarning +from .utils import ( + CUR_OS as CUR_OS, + IS_NIX as IS_NIX, + IS_WIN as IS_WIN, + RE_ANSI as RE_ANSI, + Comparable as Comparable, + FormatReplace as FormatReplace, + SimpleTextIOWrapper as SimpleTextIOWrapper, +) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/asyncio.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/asyncio.pyi new file mode 100644 index 00000000..488fadbf --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/asyncio.pyi @@ -0,0 +1,209 @@ +from _typeshed import Incomplete, SupportsWrite +from collections.abc import Awaitable, Callable, Generator, Iterable, Iterator, Mapping +from typing import Generic, NoReturn, TypeVar, overload +from typing_extensions import Self + +from .std import tqdm as std_tqdm + +__all__ = ["tqdm_asyncio", "tarange", "tqdm", "trange"] + +_T = TypeVar("_T") + +class tqdm_asyncio(Generic[_T], std_tqdm[_T]): + iterable_awaitable: bool + iterable_next: Callable[[], _T | Awaitable[_T]] + iterable_iterator: Iterator[_T] + + def __aiter__(self) -> Self: ... + async def __anext__(self) -> Awaitable[_T]: ... + def send(self, *args, **kwargs): ... + @classmethod + def as_completed( + cls, + fs: Iterable[Awaitable[_T]], + *, + loop: bool | None = ..., + timeout: float | None = ..., + total: int | None = ..., + desc: str | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool | None = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., + ) -> Generator[Incomplete, Incomplete, None]: ... + @classmethod + async def gather( + cls, + *fs: Awaitable[_T], + loop: bool | None = ..., + timeout: float | None = ..., + total: int | None = ..., + iterable: Iterable[_T] = ..., + desc: str | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool | None = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., + ): ... + @overload + def __init__( + self, + iterable: Iterable[_T], + desc: str | None = ..., + total: float | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool | None = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., + gui: bool = ..., + **kwargs, + ) -> None: ... + @overload + def __init__( + self: tqdm_asyncio[NoReturn], + iterable: None = ..., + desc: str | None = ..., + total: float | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool | None = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., + gui: bool = ..., + **kwargs, + ) -> None: ... + +@overload +def tarange( + start: int, + stop: int, + step: int | None = ..., + *, + desc: str | None = ..., + total: float | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool | None = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., +) -> tqdm_asyncio[int]: ... +@overload +def tarange( + stop: int, + *, + desc: str | None = ..., + total: float | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool | None = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., +) -> tqdm_asyncio[int]: ... + +tqdm = tqdm_asyncio +trange = tarange diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/auto.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/auto.pyi new file mode 100644 index 00000000..037ba245 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/auto.pyi @@ -0,0 +1,3 @@ +from .asyncio import tqdm as tqdm, trange as trange + +__all__ = ["tqdm", "trange"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/autonotebook.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/autonotebook.pyi new file mode 100644 index 00000000..2e610d53 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/autonotebook.pyi @@ -0,0 +1,3 @@ +from .std import tqdm as tqdm, trange as trange + +__all__ = ["tqdm", "trange"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/cli.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/cli.pyi new file mode 100644 index 00000000..742ea294 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/cli.pyi @@ -0,0 +1,5 @@ +from collections.abc import Sequence + +__all__ = ["main"] + +def main(fp=..., argv: Sequence[str] | None = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/__init__.pyi new file mode 100644 index 00000000..fd7f0963 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/__init__.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete +from collections.abc import Callable, Generator + +from ..utils import ObjectWrapper + +__all__ = ["tenumerate", "tzip", "tmap"] + +class DummyTqdmFile(ObjectWrapper): + def __init__(self, wrapped) -> None: ... + def write(self, x, nolock: bool = ...) -> None: ... + def __del__(self) -> None: ... + +def tenumerate(iterable, start: int = ..., total: Incomplete | None = ..., tqdm_class: type[Incomplete] = ..., **tqdm_kwargs): ... +def tzip(iter1, *iter2plus, **tqdm_kwargs) -> Generator[Incomplete, None, None]: ... +def tmap(function: Callable[..., Incomplete], *sequences, **tqdm_kwargs) -> Generator[Incomplete, None, None]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/bells.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/bells.pyi new file mode 100644 index 00000000..97e27c8a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/bells.pyi @@ -0,0 +1,3 @@ +from ..auto import tqdm as tqdm, trange as trange + +__all__ = ["tqdm", "trange"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/concurrent.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/concurrent.pyi new file mode 100644 index 00000000..40e070a5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/concurrent.pyi @@ -0,0 +1,4 @@ +__all__ = ["thread_map", "process_map"] + +def thread_map(fn, *iterables, **tqdm_kwargs): ... +def process_map(fn, *iterables, **tqdm_kwargs): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/discord.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/discord.pyi new file mode 100644 index 00000000..4f2702e3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/discord.pyi @@ -0,0 +1,95 @@ +from _typeshed import Incomplete, SupportsWrite +from collections.abc import Iterable, Mapping +from typing import Generic, NoReturn, TypeVar, overload + +from ..auto import tqdm as tqdm_auto +from .utils_worker import MonoWorker + +__all__ = ["DiscordIO", "tqdm_discord", "tdrange", "tqdm", "trange"] + +class DiscordIO(MonoWorker): + text: Incomplete + message: Incomplete + def __init__(self, token, channel_id) -> None: ... + def write(self, s): ... + +_T = TypeVar("_T") + +class tqdm_discord(Generic[_T], tqdm_auto[_T]): + dio: Incomplete + @overload + def __init__( + self, + iterable: Iterable[_T], + desc: str | None = ..., + total: float | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool | None = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., + gui: bool = ..., + **kwargs, + ) -> None: ... + @overload + def __init__( + self: tqdm_discord[NoReturn], + iterable: None = ..., + desc: str | None = ..., + total: float | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool | None = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., + gui: bool = ..., + **kwargs, + ) -> None: ... + def display( + self, + msg: str | None = ..., + pos: int | None = ..., + close: bool = ..., + bar_style: Incomplete = ..., + check_delay: bool = ..., + ) -> None: ... + def clear(self, *args, **kwargs) -> None: ... + +def tdrange(*args, **kwargs) -> tqdm_discord[int]: ... + +tqdm = tqdm_discord +trange = tdrange diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/itertools.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/itertools.pyi new file mode 100644 index 00000000..991181ff --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/itertools.pyi @@ -0,0 +1,6 @@ +from _typeshed import Incomplete +from collections.abc import Generator, Iterable + +__all__ = ["product"] + +def product(*iterables: Iterable[Incomplete], **tqdm_kwargs) -> Generator[Incomplete, None, None]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/logging.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/logging.pyi new file mode 100644 index 00000000..0bb9e0e4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/logging.pyi @@ -0,0 +1,21 @@ +import logging +from _typeshed import Incomplete +from collections.abc import Callable, Sequence +from contextlib import _GeneratorContextManager +from typing import Any, TypeVar, overload + +from ..std import tqdm as std_tqdm + +_TqdmT = TypeVar("_TqdmT", bound=std_tqdm[Any]) + +def logging_redirect_tqdm( + loggers: Sequence[logging.Logger] | None = ..., tqdm_class: type[std_tqdm[Any]] = ... +) -> _GeneratorContextManager[None]: ... + +# TODO type *args, **kwargs here more precisely +# The type ignore is because mypy complains that the second overload will never be matched +# (I'm not sure that's true) +@overload +def tqdm_logging_redirect(*args, tqdm_class: Callable[..., _TqdmT], **kwargs) -> _GeneratorContextManager[_TqdmT]: ... +@overload +def tqdm_logging_redirect(*args, **kwargs) -> _GeneratorContextManager[std_tqdm[Incomplete]]: ... # type: ignore[misc] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/slack.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/slack.pyi new file mode 100644 index 00000000..cf69b09e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/slack.pyi @@ -0,0 +1,93 @@ +from _typeshed import Incomplete, SupportsWrite +from collections.abc import Iterable, Mapping +from typing import Generic, NoReturn, TypeVar, overload + +from ..auto import tqdm as tqdm_auto +from .utils_worker import MonoWorker + +__all__ = ["SlackIO", "tqdm_slack", "tsrange", "tqdm", "trange"] + +class SlackIO(MonoWorker): + client: Incomplete + text: Incomplete + message: Incomplete + def __init__(self, token, channel) -> None: ... + def write(self, s): ... + +_T = TypeVar("_T") + +class tqdm_slack(Generic[_T], tqdm_auto[_T]): + sio: Incomplete + @overload + def __init__( + self, + iterable: Iterable[_T], + desc: str | None = ..., + total: float | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool | None = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., + gui: bool = ..., + token: str = ..., + channel: int = ..., + **kwargs, + ) -> None: ... + @overload + def __init__( + self: tqdm_slack[NoReturn], + iterable: None = ..., + desc: str | None = ..., + total: float | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool | None = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., + gui: bool = ..., + token: str = ..., + channel: int = ..., + **kwargs, + ) -> None: ... + def display(self, *, msg: str | None = ..., pos: int | None = ..., close: bool = ..., bar_style: Incomplete = ..., check_delay: bool = ...) -> None: ... # type: ignore[override] + def clear(self, *args, **kwargs) -> None: ... + +def tsrange(*args, **kwargs) -> tqdm_slack[int]: ... + +tqdm = tqdm_slack +trange = tsrange diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/telegram.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/telegram.pyi new file mode 100644 index 00000000..fa77d2db --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/telegram.pyi @@ -0,0 +1,99 @@ +from _typeshed import Incomplete, SupportsWrite +from collections.abc import Iterable, Mapping +from typing import Generic, NoReturn, TypeVar, overload + +from ..auto import tqdm as tqdm_auto +from .utils_worker import MonoWorker + +__all__ = ["TelegramIO", "tqdm_telegram", "ttgrange", "tqdm", "trange"] + +class TelegramIO(MonoWorker): + API: str + token: Incomplete + chat_id: Incomplete + session: Incomplete + text: Incomplete + def __init__(self, token, chat_id) -> None: ... + @property + def message_id(self): ... + def write(self, s: str) -> Incomplete | None: ... + def delete(self): ... + +_T = TypeVar("_T") + +class tqdm_telegram(Generic[_T], tqdm_auto[_T]): + tgio: Incomplete + @overload + def __init__( + self, + iterable: Iterable[_T], + desc: str | None = ..., + total: float | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool | None = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., + gui: bool = ..., + token: str = ..., + chat_id: str = ..., + **kwargs, + ) -> None: ... + @overload + def __init__( + self: tqdm_telegram[NoReturn], + iterable: None = ..., + desc: str | None = ..., + total: float | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool | None = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., + gui: bool = ..., + token: str = ..., + chat_id: str = ..., + **kwargs, + ) -> None: ... + def display(self, *, msg: str | None = ..., pos: int | None = ..., close: bool = ..., bar_style: Incomplete = ..., check_delay: bool = ...) -> None: ... # type: ignore[override] + def clear(self, *args, **kwargs) -> None: ... + def close(self) -> None: ... + +def ttgrange(*args, **kwargs) -> tqdm_telegram[int]: ... + +tqdm = tqdm_telegram +trange = ttgrange diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/utils_worker.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/utils_worker.pyi new file mode 100644 index 00000000..bff82621 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/contrib/utils_worker.pyi @@ -0,0 +1,16 @@ +from _typeshed import Incomplete +from collections import deque +from collections.abc import Callable +from concurrent.futures import Future, ThreadPoolExecutor +from typing import TypeVar +from typing_extensions import ParamSpec + +__all__ = ["MonoWorker"] + +_P = ParamSpec("_P") +_R = TypeVar("_R") + +class MonoWorker: + pool: ThreadPoolExecutor + futures: deque[Future[Incomplete]] + def submit(self, func: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs) -> Future[_R]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/dask.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/dask.pyi new file mode 100644 index 00000000..5e55eadb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/dask.pyi @@ -0,0 +1,29 @@ +from _typeshed import Incomplete +from collections.abc import Callable +from typing import ClassVar +from typing_extensions import Self + +__all__ = ["TqdmCallback"] + +# dask.callbacks.Callback +class _Callback: + active: ClassVar[set[tuple[Callable[..., Incomplete] | None, ...]]] + def __init__( + self, + start: Incomplete | None, + start_state: Incomplete | None, + pretask: Incomplete | None, + posttask: Incomplete | None, + finish: Incomplete | None, + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args) -> None: ... + def register(self) -> None: ... + def unregister(self) -> None: ... + +class TqdmCallback(_Callback): + tqdm_class: type[Incomplete] + def __init__( + self, start: Incomplete | None = ..., pretask: Incomplete | None = ..., tqdm_class: type[Incomplete] = ..., **tqdm_kwargs + ) -> None: ... + def display(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/gui.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/gui.pyi new file mode 100644 index 00000000..a77cc976 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/gui.pyi @@ -0,0 +1,92 @@ +from _typeshed import Incomplete, SupportsWrite +from collections.abc import Iterable, Mapping +from typing import Generic, NoReturn, TypeVar, overload + +from .std import tqdm as std_tqdm + +__all__ = ["tqdm_gui", "tgrange", "tqdm", "trange"] + +_T = TypeVar("_T") + +class tqdm_gui(Generic[_T], std_tqdm[_T]): + mpl: Incomplete + plt: Incomplete + toolbar: Incomplete + mininterval: Incomplete + xdata: Incomplete + ydata: Incomplete + zdata: Incomplete + hspan: Incomplete + wasion: Incomplete + ax: Incomplete + disable: bool + def close(self) -> None: ... + def clear(self, *_, **__) -> None: ... + def display(self, *_, **__) -> None: ... + @overload + def __init__( + self, + iterable: Iterable[_T], + desc: str | None = ..., + total: float | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool | None = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., + gui: bool = ..., + **kwargs, + ) -> None: ... + @overload + def __init__( + self: tqdm_gui[NoReturn], + iterable: None = ..., + desc: str | None = ..., + total: float | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool | None = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., + gui: bool = ..., + **kwargs, + ) -> None: ... + +def tgrange(*args, **kwargs) -> tqdm_gui[int]: ... + +tqdm = tqdm_gui +trange = tgrange diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/keras.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/keras.pyi new file mode 100644 index 00000000..75e86e92 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/keras.pyi @@ -0,0 +1,52 @@ +from _typeshed import Incomplete + +__all__ = ["TqdmCallback"] + +# keras.callbacks.Callback +class _Callback: + validation_data: Incomplete | None + model: Incomplete | None + params: Incomplete + def __init__(self) -> None: ... + def set_params(self, params) -> None: ... + def set_model(self, model) -> None: ... + def on_batch_begin(self, batch, logs: Incomplete | None = ...) -> None: ... + def on_batch_end(self, batch, logs: Incomplete | None = ...) -> None: ... + def on_epoch_begin(self, epoch, logs: Incomplete | None = ...) -> None: ... + def on_epoch_end(self, epoch, logs: Incomplete | None = ...) -> None: ... + def on_train_batch_begin(self, batch, logs: Incomplete | None = ...) -> None: ... + def on_train_batch_end(self, batch, logs: Incomplete | None = ...) -> None: ... + def on_test_batch_begin(self, batch, logs: Incomplete | None = ...) -> None: ... + def on_test_batch_end(self, batch, logs: Incomplete | None = ...) -> None: ... + def on_predict_batch_begin(self, batch, logs: Incomplete | None = ...) -> None: ... + def on_predict_batch_end(self, batch, logs: Incomplete | None = ...) -> None: ... + def on_train_begin(self, logs: Incomplete | None = ...) -> None: ... + def on_train_end(self, logs: Incomplete | None = ...) -> None: ... + def on_test_begin(self, logs: Incomplete | None = ...) -> None: ... + def on_test_end(self, logs: Incomplete | None = ...) -> None: ... + def on_predict_begin(self, logs: Incomplete | None = ...) -> None: ... + def on_predict_end(self, logs: Incomplete | None = ...) -> None: ... + +class TqdmCallback(_Callback): + @staticmethod + def bar2callback(bar, pop: Incomplete | None = ..., delta=...): ... + tqdm_class: Incomplete + epoch_bar: Incomplete + on_epoch_end: Incomplete + batches: Incomplete + verbose: Incomplete + batch_bar: Incomplete + on_batch_end: Incomplete + def __init__( + self, + epochs: Incomplete | None = ..., + data_size: Incomplete | None = ..., + batch_size: Incomplete | None = ..., + verbose: int = ..., + tqdm_class=..., + **tqdm_kwargs, + ) -> None: ... + def on_train_begin(self, *_, **__) -> None: ... + def on_epoch_begin(self, epoch, *_, **__) -> None: ... + def on_train_end(self, *_, **__) -> None: ... + def display(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/notebook.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/notebook.pyi new file mode 100644 index 00000000..dbd7855b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/notebook.pyi @@ -0,0 +1,101 @@ +from _typeshed import Incomplete, SupportsWrite +from collections.abc import Iterable, Iterator, Mapping +from typing import Generic, NoReturn, TypeVar, overload + +from .std import tqdm as std_tqdm, trange as trange + +__all__ = ["tqdm_notebook", "tnrange", "tqdm", "trange"] + +_T = TypeVar("_T") + +class tqdm_notebook(Generic[_T], std_tqdm[_T]): + @staticmethod + def status_printer( + _: SupportsWrite[str] | None, total: float | None = ..., desc: str | None = ..., ncols: int | None = ... + ): ... + displayed: bool + def display( + self, + msg: str | None = ..., + pos: int | None = ..., + close: bool = ..., + bar_style: str | None = ..., + check_delay: bool = ..., + ) -> None: ... + @property + def colour(self): ... + @colour.setter + def colour(self, bar_color: str) -> None: ... + disp: Incomplete + ncols: Incomplete + container: Incomplete + @overload + def __init__( + self, + iterable: Iterable[_T], + desc: str | None = ..., + total: float | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., + display: bool = ..., + **kwargs, + ) -> None: ... + @overload + def __init__( + self: tqdm_notebook[NoReturn], + iterable: None = ..., + desc: str | None = ..., + total: float | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., + display: bool = ..., + **kwargs, + ) -> None: ... + def __iter__(self) -> Iterator[_T]: ... + def update(self, n: int = ...): ... # type: ignore[override] + def close(self) -> None: ... + def clear(self, *_, **__) -> None: ... + def reset(self, total: float | None = ...): ... + +tqdm = tqdm_notebook +tnrange = trange diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/rich.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/rich.pyi new file mode 100644 index 00000000..2a81cc21 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/rich.pyi @@ -0,0 +1,107 @@ +from _typeshed import Incomplete, SupportsWrite +from abc import ABC, abstractmethod +from collections.abc import Iterable, Mapping +from typing import Generic, NoReturn, TypeVar, overload + +from .std import tqdm as std_tqdm + +__all__ = ["tqdm_rich", "trrange", "tqdm", "trange"] + +# Actually rich.progress.ProgressColumn +class _ProgressColumn(ABC): + max_refresh: float | None + def __init__(self, table_column: Incomplete | None = ...) -> None: ... + def get_table_column(self) -> Incomplete: ... + def __call__(self, task: Incomplete) -> Incomplete: ... + @abstractmethod + def render(self, task: Incomplete) -> Incomplete: ... + +class FractionColumn(_ProgressColumn): + unit_scale: bool + unit_divisor: int + + def __init__(self, unit_scale: bool = ..., unit_divisor: int = ...) -> None: ... + def render(self, task): ... + +class RateColumn(_ProgressColumn): + unit: str + unit_scale: bool + unit_divisor: int + + def __init__(self, unit: str = ..., unit_scale: bool = ..., unit_divisor: int = ...) -> None: ... + def render(self, task): ... + +_T = TypeVar("_T") + +class tqdm_rich(Generic[_T], std_tqdm[_T]): + def close(self) -> None: ... + def clear(self, *_, **__) -> None: ... + def display(self, *_, **__) -> None: ... + def reset(self, total: Incomplete | None = ...) -> None: ... + @overload + def __init__( + self, + iterable: Iterable[_T], + desc: str | None = ..., + total: float | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool | None = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., + gui: bool = ..., + **kwargs, + ) -> None: ... + @overload + def __init__( + self: tqdm_rich[NoReturn], + iterable: None = ..., + desc: str | None = ..., + total: float | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool | None = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., + gui: bool = ..., + **kwargs, + ) -> None: ... + +def trrange(*args, **kwargs) -> tqdm_rich[int]: ... + +tqdm = tqdm_rich +trange = trrange diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/std.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/std.pyi new file mode 100644 index 00000000..a5dc22b3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/std.pyi @@ -0,0 +1,287 @@ +import contextlib +from _typeshed import Incomplete, SupportsWrite +from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping +from typing import Any, ClassVar, Generic, NoReturn, TypeVar, overload +from typing_extensions import Literal, Self + +from ._monitor import TMonitor +from .utils import Comparable + +__all__ = [ + "tqdm", + "trange", + "TqdmTypeError", + "TqdmKeyError", + "TqdmWarning", + "TqdmExperimentalWarning", + "TqdmDeprecationWarning", + "TqdmMonitorWarning", +] + +class TqdmTypeError(TypeError): ... +class TqdmKeyError(KeyError): ... + +class TqdmWarning(Warning): + def __init__(self, msg, fp_write: Incomplete | None = ..., *a, **k) -> None: ... + +class TqdmExperimentalWarning(TqdmWarning, FutureWarning): ... +class TqdmDeprecationWarning(TqdmWarning, DeprecationWarning): ... +class TqdmMonitorWarning(TqdmWarning, RuntimeWarning): ... + +_T = TypeVar("_T") + +class tqdm(Generic[_T], Iterable[_T], Comparable): + monitor_interval: ClassVar[int] + monitor: ClassVar[TMonitor | None] + + @staticmethod + def format_sizeof(num: float, suffix: str = ..., divisor: float = ...) -> str: ... + @staticmethod + def format_interval(t: float) -> str: ... + @staticmethod + def format_num(n: float) -> str: ... + @staticmethod + def status_printer(file: SupportsWrite[str]) -> Callable[[str], None]: ... + @staticmethod + def format_meter( + n: float, + total: float, + elapsed: float, + ncols: int | None = ..., + prefix: str | None = ..., + ascii: bool | str | None = ..., + unit: str | None = ..., + unit_scale: bool | float | None = ..., + rate: float | None = ..., + bar_format: str | None = ..., + postfix: str | Mapping[str, object] | None = ..., + unit_divisor: float | None = ..., + initial: float | None = ..., + colour: str | None = ..., + ) -> str: ... + @overload + def __init__( + self, + iterable: Iterable[_T], + desc: str | None = ..., + total: float | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool | None = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., + gui: bool = ..., + **kwargs, + ) -> None: ... + @overload + def __init__( + self: tqdm[NoReturn], + iterable: None = ..., + desc: str | None = ..., + total: float | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool | None = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., + gui: bool = ..., + **kwargs, + ) -> None: ... + def __new__(cls, *_, **__) -> Self: ... + @classmethod + def write(cls, s: str, file: SupportsWrite[str] | None = ..., end: str = ..., nolock: bool = ...) -> None: ... + @classmethod + def external_write_mode( + cls, file: SupportsWrite[str] | None = ..., nolock: bool = ... + ) -> contextlib._GeneratorContextManager[None]: ... + @classmethod + def set_lock(cls, lock) -> None: ... + @classmethod + def get_lock(cls): ... + @classmethod + def pandas( + cls, + *, + desc: str | None = ..., + total: float | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool | None = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., + ) -> None: ... + + iterable: Incomplete + disable: Incomplete + pos: Incomplete + n: Incomplete + total: Incomplete + leave: Incomplete + desc: Incomplete + fp: Incomplete + ncols: Incomplete + nrows: Incomplete + mininterval: Incomplete + maxinterval: Incomplete + miniters: Incomplete + dynamic_miniters: Incomplete + ascii: Incomplete + unit: Incomplete + unit_scale: Incomplete + unit_divisor: Incomplete + initial: Incomplete + lock_args: Incomplete + delay: Incomplete + gui: Incomplete + dynamic_ncols: Incomplete + smoothing: Incomplete + bar_format: Incomplete + postfix: Incomplete + colour: Incomplete + last_print_n: Incomplete + sp: Incomplete + last_print_t: Incomplete + start_t: Incomplete + + def __bool__(self) -> bool: ... + def __nonzero__(self) -> bool: ... + def __len__(self) -> int: ... + def __reversed__(self) -> Iterator[_T]: ... + def __contains__(self, item: object) -> bool: ... + def __enter__(self) -> Self: ... + def __exit__(self, exc_type: object, exc_value: object, traceback: object) -> None: ... + def __del__(self) -> None: ... + def __hash__(self) -> int: ... + def __iter__(self) -> Iterator[_T]: ... + def update(self, n: float | None = ...) -> bool | None: ... + def close(self) -> None: ... + def clear(self, nolock: bool = ...) -> None: ... + def refresh( + self, nolock: bool = ..., lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ... + ) -> None: ... + def unpause(self) -> None: ... + def reset(self, total: float | None = ...) -> None: ... + def set_description(self, desc: str | None = ..., refresh: bool | None = ...) -> None: ... + def set_description_str(self, desc: str | None = ..., refresh: bool | None = ...) -> None: ... + def set_postfix(self, ordered_dict: Mapping[str, object] | None = ..., refresh: bool | None = ..., **kwargs) -> None: ... + def set_postfix_str(self, s: str = ..., refresh: bool = ...) -> None: ... + def moveto(self, n) -> None: ... + @property + def format_dict(self) -> MutableMapping[str, Any]: ... + def display(self, msg: str | None = ..., pos: int | None = ...) -> None: ... + @classmethod + def wrapattr( + cls, stream, method: Literal["read", "write"], total: float | None = ..., bytes: bool | None = ..., **tqdm_kwargs + ) -> contextlib._GeneratorContextManager[Incomplete]: ... + +@overload +def trange( + start: int, + stop: int, + step: int | None = ..., + *, + desc: str | None = ..., + total: float | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool | None = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., +) -> tqdm[int]: ... +@overload +def trange( + stop: int, + *, + desc: str | None = ..., + total: float | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool | None = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., +) -> tqdm[int]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/tk.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/tk.pyi new file mode 100644 index 00000000..a89732e0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/tk.pyi @@ -0,0 +1,93 @@ +from _typeshed import Incomplete, SupportsWrite +from collections.abc import Iterable, Mapping +from typing import Generic, NoReturn, TypeVar, overload + +from .std import tqdm as std_tqdm + +__all__ = ["tqdm_tk", "ttkrange", "tqdm", "trange"] + +_T = TypeVar("_T") + +class tqdm_tk(Generic[_T], std_tqdm[_T]): + @overload + def __init__( + self, + iterable: Iterable[_T], + desc: str | None = ..., + total: float | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool | None = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., + gui: bool = ..., + grab=..., + tk_parent=..., + cancel_callback=..., + **kwargs, + ) -> None: ... + @overload + def __init__( + self: tqdm_tk[NoReturn], + iterable: None = ..., + desc: str | None = ..., + total: float | None = ..., + leave: bool | None = ..., + file: SupportsWrite[str] | None = ..., + ncols: int | None = ..., + mininterval: float = ..., + maxinterval: float = ..., + miniters: float | None = ..., + ascii: bool | str | None = ..., + disable: bool | None = ..., + unit: str = ..., + unit_scale: bool | float = ..., + dynamic_ncols: bool = ..., + smoothing: float = ..., + bar_format: str | None = ..., + initial: float = ..., + position: int | None = ..., + postfix: Mapping[str, object] | str | None = ..., + unit_divisor: float = ..., + write_bytes: bool | None = ..., + lock_args: tuple[bool | None, float | None] | tuple[bool | None] | None = ..., + nrows: int | None = ..., + colour: str | None = ..., + delay: float | None = ..., + gui: bool = ..., + grab=..., + tk_parent=..., + cancel_callback=..., + **kwargs, + ) -> None: ... + disable: bool + def close(self) -> None: ... + def clear(self, *_, **__) -> None: ... + def display(self, *_, **__) -> None: ... + def set_description(self, desc: str | None = ..., refresh: bool | None = ...) -> None: ... + desc: Incomplete + def set_description_str(self, desc: str | None = ..., refresh: bool | None = ...) -> None: ... + def cancel(self) -> None: ... + def reset(self, total: Incomplete | None = ...) -> None: ... + +def ttkrange(*args, **kwargs) -> tqdm_tk[int]: ... + +tqdm = tqdm_tk +trange = ttkrange diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/utils.pyi new file mode 100644 index 00000000..3055987c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/utils.pyi @@ -0,0 +1,55 @@ +from _typeshed import Incomplete +from collections.abc import Callable +from re import Pattern +from typing import Protocol, TypeVar +from typing_extensions import ParamSpec + +CUR_OS: str +IS_WIN: bool +IS_NIX: bool +RE_ANSI: Pattern[str] + +class FormatReplace: + replace: str + format_called: int + def __init__(self, replace: str = ...) -> None: ... + def __format__(self, _) -> str: ... + +class _Has__Comparable(Protocol): + _comparable: Incomplete + +class Comparable: + _comparable: Incomplete + def __lt__(self, other: _Has__Comparable) -> bool: ... + def __le__(self, other: _Has__Comparable) -> bool: ... + def __eq__(self, other: _Has__Comparable) -> bool: ... # type: ignore[override] + def __ne__(self, other: _Has__Comparable) -> bool: ... # type: ignore[override] + def __gt__(self, other: _Has__Comparable) -> bool: ... + def __ge__(self, other: _Has__Comparable) -> bool: ... + +class ObjectWrapper: + def __getattr__(self, name: str): ... + def __setattr__(self, name: str, value) -> None: ... + def wrapper_getattr(self, name): ... + def wrapper_setattr(self, name, value): ... + def __init__(self, wrapped) -> None: ... + +class SimpleTextIOWrapper(ObjectWrapper): + def __init__(self, wrapped, encoding) -> None: ... + def write(self, s: str): ... + def __eq__(self, other: object) -> bool: ... + +_P = ParamSpec("_P") +_R = TypeVar("_R") + +class DisableOnWriteError(ObjectWrapper): + @staticmethod + def disable_on_exception(tqdm_instance, func: Callable[_P, _R]) -> Callable[_P, _R]: ... + def __init__(self, wrapped, tqdm_instance) -> None: ... + def __eq__(self, other: object) -> bool: ... + +class CallbackIOWrapper(ObjectWrapper): + def __init__(self, callback: Callable[[int], object], stream, method: str = ...) -> None: ... + +def disp_len(data: str) -> int: ... +def disp_trim(data: str, length: int) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/version.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/version.pyi new file mode 100644 index 00000000..bda5b5a7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tqdm/tqdm/version.pyi @@ -0,0 +1 @@ +__version__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter-languages/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter-languages/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..13f325bd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter-languages/@tests/stubtest_allowlist.txt @@ -0,0 +1,2 @@ +# No idea what this is, seems to be auto-generated +tree_sitter_languages.core.__test__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter-languages/@tests/stubtest_allowlist_darwin.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter-languages/@tests/stubtest_allowlist_darwin.txt new file mode 100644 index 00000000..caab3d65 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter-languages/@tests/stubtest_allowlist_darwin.txt @@ -0,0 +1,2 @@ +# stubtest/stubgen sees languages.so as a module +tree_sitter_languages.languages diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter-languages/@tests/stubtest_allowlist_linux.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter-languages/@tests/stubtest_allowlist_linux.txt new file mode 100644 index 00000000..caab3d65 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter-languages/@tests/stubtest_allowlist_linux.txt @@ -0,0 +1,2 @@ +# stubtest/stubgen sees languages.so as a module +tree_sitter_languages.languages diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter-languages/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter-languages/METADATA.toml new file mode 100644 index 00000000..1de68d54 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter-languages/METADATA.toml @@ -0,0 +1,2 @@ +version = "1.5.*" +requires = ["types-tree-sitter"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter-languages/tree_sitter_languages/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter-languages/tree_sitter_languages/__init__.pyi new file mode 100644 index 00000000..59a08387 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter-languages/tree_sitter_languages/__init__.pyi @@ -0,0 +1,7 @@ +from .core import get_language as get_language, get_parser as get_parser + +__version__: str +__title__: str +__author__: str +__license__: str +__copyright__: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter-languages/tree_sitter_languages/core.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter-languages/tree_sitter_languages/core.pyi new file mode 100644 index 00000000..b59c21d3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter-languages/tree_sitter_languages/core.pyi @@ -0,0 +1,4 @@ +from tree_sitter import Language, Parser + +def get_language(language: str) -> Language: ... +def get_parser(language: str) -> Parser: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..45a56be7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter/@tests/stubtest_allowlist.txt @@ -0,0 +1,39 @@ +# "self" argument is missing when stubtest inspects these methods +tree_sitter.Node.child_by_field_id +tree_sitter.Node.child_by_field_name +tree_sitter.Node.children_by_field_id +tree_sitter.Node.children_by_field_name +tree_sitter.Node.sexp +tree_sitter.Node.walk +tree_sitter.Parser.parse +tree_sitter.Parser.set_language +tree_sitter.Tree.edit +tree_sitter.Tree.get_changed_ranges +tree_sitter.Tree.walk +tree_sitter.TreeCursor.copy +tree_sitter.TreeCursor.current_field_name +tree_sitter.TreeCursor.goto_first_child +tree_sitter.TreeCursor.goto_next_sibling +tree_sitter.TreeCursor.goto_parent +tree_sitter.binding.Node.child_by_field_id +tree_sitter.binding.Node.child_by_field_name +tree_sitter.binding.Node.children_by_field_id +tree_sitter.binding.Node.children_by_field_name +tree_sitter.binding.Node.sexp +tree_sitter.binding.Node.walk +tree_sitter.binding.Parser.parse +tree_sitter.binding.Parser.set_language +tree_sitter.binding.Query.captures +tree_sitter.binding.Query.matches +tree_sitter.binding.Tree.edit +tree_sitter.binding.Tree.get_changed_ranges +tree_sitter.binding.Tree.walk +tree_sitter.binding.TreeCursor.copy +tree_sitter.binding.TreeCursor.current_field_name +tree_sitter.binding.TreeCursor.goto_first_child +tree_sitter.binding.TreeCursor.goto_next_sibling +tree_sitter.binding.TreeCursor.goto_parent + +# Runtime takes *args and **kwargs and ignores them. Passing arguments is most likely a mistake. +tree_sitter.Parser.__init__ +tree_sitter.binding.Parser.__init__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter/METADATA.toml new file mode 100644 index 00000000..ae1fb69f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter/METADATA.toml @@ -0,0 +1 @@ +version = "0.20.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter/tree_sitter/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter/tree_sitter/__init__.pyi new file mode 100644 index 00000000..d26b1762 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter/tree_sitter/__init__.pyi @@ -0,0 +1,17 @@ +import ctypes +from _typeshed import StrPath +from collections.abc import Sequence + +# At runtime, Query and Range are available only in tree_sitter.binding +from tree_sitter.binding import Node as Node, Parser as Parser, Query, Tree as Tree, TreeCursor as TreeCursor + +class Language: + @staticmethod + def build_library(output_path: str, repo_paths: Sequence[StrPath]) -> bool: ... + name: str + lib: ctypes.CDLL + language_id: int + # library_path is passed into ctypes LoadLibrary + def __init__(self, library_path: str, name: str) -> None: ... + def field_id_for_name(self, name: str) -> int | None: ... + def query(self, source: str) -> Query: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter/tree_sitter/binding.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter/tree_sitter/binding.pyi new file mode 100644 index 00000000..f8d4d32d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tree-sitter/tree_sitter/binding.pyi @@ -0,0 +1,116 @@ +from typing import Any, ClassVar +from typing_extensions import final + +from tree_sitter import Language + +@final +class Node: + @property + def start_byte(self) -> int: ... + @property + def start_point(self) -> tuple[int, int]: ... + @property + def end_byte(self) -> int: ... + @property + def end_point(self) -> tuple[int, int]: ... + @property + def has_changes(self) -> bool: ... + @property + def has_error(self) -> bool: ... + @property + def id(self) -> int: ... + @property + def is_missing(self) -> bool: ... + @property + def is_named(self) -> bool: ... + @property + def child_count(self) -> int: ... + @property + def named_child_count(self) -> bool: ... + @property + def children(self) -> list[Node]: ... + @property + def named_children(self) -> list[Node]: ... + @property + def next_named_sibling(self) -> Node | None: ... + @property + def next_sibling(self) -> Node | None: ... + @property + def parent(self) -> Node | None: ... + @property + def prev_named_sibling(self) -> Node | None: ... + @property + def prev_sibling(self) -> Node | None: ... + @property + def text(self) -> bytes | Any: ... # can be None, but annoying to check + @property + def type(self) -> str: ... + def children_by_field_name(self, name: str) -> list[Node]: ... + def children_by_field_id(self, __id: int) -> list[Node]: ... + def field_name_for_child(self, __child_index: int) -> str: ... + def child_by_field_id(self, __id: int) -> Node | None: ... + def child_by_field_name(self, __name: str) -> Node | None: ... + __hash__: ClassVar[None] # type: ignore[assignment] + def sexp(self) -> str: ... + def walk(self) -> TreeCursor: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + # There are __ge__, __gt__, __le__, __lt__ methods but they always return False + # + # >>> n + # + # >>> n >= "", n <= "", n >= 0, n <= 0, n >= (0,0), n <= (0,0) + # (False, False, False, False, False, False) + +@final +class Parser: + # At runtime, Parser(1, 2, 3) ignores the arguments, but that's most likely buggy code + def __init__(self) -> None: ... + def parse(self, source: bytes, old_tree: Tree | None = ..., keep_text: bool = ...) -> Tree: ... + def set_language(self, __language: Language) -> None: ... + +@final +class Query: + # start_point and end_point arguments don't seem to do anything + # TODO: sync with + # https://github.com/tree-sitter/py-tree-sitter/blob/d3016edac2c33ce647653d896fbfb435ac2a6245/tree_sitter/binding.c#L1304 + def captures(self, node: Node) -> list[tuple[Node, str]]: ... + +@final +class Range: + @property + def start_byte(self) -> int: ... + @property + def end_byte(self) -> int: ... + @property + def start_point(self) -> tuple[int, int]: ... + @property + def end_point(self) -> tuple[int, int]: ... + +@final +class Tree: + @property + def root_node(self) -> Node: ... + @property + def text(self) -> bytes | Any: ... # technically ReadableBuffer | Any + def edit( + self, + start_byte: int, + old_end_byte: int, + new_end_byte: int, + start_point: tuple[int, int], + old_end_point: tuple[int, int], + new_end_point: tuple[int, int], + ) -> None: ... + def get_changed_ranges(self, new_tree: Tree) -> list[Range]: ... + def walk(self) -> TreeCursor: ... + +@final +class TreeCursor: + @property + def node(self) -> Node: ... + def copy(self) -> TreeCursor: ... + def current_field_name(self) -> str | None: ... + def goto_first_child(self) -> bool: ... + def goto_next_sibling(self) -> bool: ... + def goto_parent(self) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ttkthemes/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ttkthemes/METADATA.toml new file mode 100644 index 00000000..38c94680 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ttkthemes/METADATA.toml @@ -0,0 +1 @@ +version = "3.2.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ttkthemes/ttkthemes/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ttkthemes/ttkthemes/__init__.pyi new file mode 100644 index 00000000..df541b9f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ttkthemes/ttkthemes/__init__.pyi @@ -0,0 +1,7 @@ +from collections.abc import Sequence + +from ttkthemes.themed_style import ThemedStyle as ThemedStyle +from ttkthemes.themed_tk import ThemedTk as ThemedTk + +# actually a list, but shouldn't be modified +THEMES: Sequence[str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ttkthemes/ttkthemes/_imgops.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ttkthemes/ttkthemes/_imgops.pyi new file mode 100644 index 00000000..1c0b15f1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ttkthemes/ttkthemes/_imgops.pyi @@ -0,0 +1,2 @@ +def shift_hue(image, hue): ... +def make_transparent(image): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ttkthemes/ttkthemes/_utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ttkthemes/ttkthemes/_utils.pyi new file mode 100644 index 00000000..b60a742f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ttkthemes/ttkthemes/_utils.pyi @@ -0,0 +1,7 @@ +from _typeshed import Incomplete + +def temporary_chdir(new_dir) -> None: ... +def get_file_directory(): ... +def get_temp_directory(): ... +def get_themes_directory(theme_name: Incomplete | None = ..., png: bool = ...): ... +def create_directory(directory): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ttkthemes/ttkthemes/_widget.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ttkthemes/ttkthemes/_widget.pyi new file mode 100644 index 00000000..96900d6c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ttkthemes/ttkthemes/_widget.pyi @@ -0,0 +1,26 @@ +import _tkinter +from _typeshed import StrPath +from typing import ClassVar + +class ThemedWidget: + pixmap_themes: ClassVar[list[str]] + PACKAGES: ClassVar[dict[str, str]] + tk: _tkinter.TkappType + png_support: bool + def __init__(self, tk_interpreter, gif_override: bool = ...) -> None: ... + def set_theme(self, theme_name: str) -> None: ... + def get_themes(self) -> list[str]: ... + @property + def themes(self) -> list[str]: ... + @property + def current_theme(self) -> str: ... + def set_theme_advanced( + self, + theme_name: str, + brightness: float = ..., + saturation: float = ..., + hue: float = ..., + preserve_transparency: bool = ..., + output_dir: StrPath | None = ..., + advanced_name: str = ..., + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ttkthemes/ttkthemes/themed_style.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ttkthemes/ttkthemes/themed_style.pyi new file mode 100644 index 00000000..e70eb8a4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ttkthemes/ttkthemes/themed_style.pyi @@ -0,0 +1,12 @@ +import tkinter +from tkinter import ttk + +from ._widget import ThemedWidget + +class ThemedStyle(ttk.Style, ThemedWidget): + def __init__( + self, master: tkinter.Misc | None = ..., *, theme: str | None = ..., gif_override: bool | None = ..., **kwargs + ) -> None: ... + # theme_use() can't return None (differs from ttk.Style) + def theme_use(self, theme_name: str | None = ...) -> str: ... # type: ignore[override] + def theme_names(self) -> list[str]: ... # type: ignore[override] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ttkthemes/ttkthemes/themed_tk.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ttkthemes/ttkthemes/themed_tk.pyi new file mode 100644 index 00000000..b39ea3c6 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ttkthemes/ttkthemes/themed_tk.pyi @@ -0,0 +1,30 @@ +import tkinter +from _typeshed import Incomplete + +from ._widget import ThemedWidget + +class ThemedTk(tkinter.Tk, ThemedWidget): + def __init__( + self, + # non-keyword-only args copied from tkinter.Tk + screenName: str | None = ..., + baseName: str | None = ..., + className: str = ..., + useTk: bool = ..., + sync: bool = ..., + use: str | None = ..., + *, + theme: str | None = ..., + # fonts argument does nothing + toplevel: bool | None = ..., + themebg: bool | None = ..., + background: bool | None = ..., # old alias for themebg + gif_override: bool = ..., + ) -> None: ... + def set_theme(self, theme_name, toplevel: bool | None = ..., themebg: bool | None = ...) -> None: ... + # TODO: currently no good way to say "use the same big list of kwargs as parent class but also add these" + def config(self, kw: Incomplete | None = ..., **kwargs): ... # type: ignore[override] + def cget(self, k): ... + def configure(self, kw: Incomplete | None = ..., **kwargs): ... # type: ignore[override] + def __getitem__(self, k): ... + def __setitem__(self, k, v) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/typed-ast/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/typed-ast/METADATA.toml new file mode 100644 index 00000000..97ceca8a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/typed-ast/METADATA.toml @@ -0,0 +1 @@ +version = "1.5.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/typed-ast/typed_ast/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/typed-ast/typed_ast/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/typed-ast/typed_ast/ast27.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/typed-ast/typed_ast/ast27.pyi new file mode 100644 index 00000000..3a4d1993 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/typed-ast/typed_ast/ast27.pyi @@ -0,0 +1,337 @@ +from _typeshed import ReadableBuffer +from collections.abc import Iterator +from typing import Any +from typing_extensions import TypeAlias + +class NodeVisitor: + def visit(self, node: AST) -> Any: ... + def generic_visit(self, node: AST) -> None: ... + +class NodeTransformer(NodeVisitor): + def generic_visit(self, node: AST) -> None: ... + +def parse(source: str | ReadableBuffer, filename: str | ReadableBuffer = ..., mode: str = ...) -> AST: ... +def copy_location(new_node: AST, old_node: AST) -> AST: ... +def dump(node: AST, annotate_fields: bool = ..., include_attributes: bool = ...) -> str: ... +def fix_missing_locations(node: AST) -> AST: ... +def get_docstring(node: AST, clean: bool = ...) -> str | bytes | None: ... +def increment_lineno(node: AST, n: int = ...) -> AST: ... +def iter_child_nodes(node: AST) -> Iterator[AST]: ... +def iter_fields(node: AST) -> Iterator[tuple[str, Any]]: ... +def literal_eval(node_or_string: str | AST) -> Any: ... +def walk(node: AST) -> Iterator[AST]: ... + +PyCF_ONLY_AST: int + +# ast classes + +_Identifier: TypeAlias = str + +class AST: + _attributes: tuple[str, ...] + _fields: tuple[str, ...] + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + +class mod(AST): ... + +class Module(mod): + body: list[stmt] + type_ignores: list[TypeIgnore] + +class Interactive(mod): + body: list[stmt] + +class Expression(mod): + body: expr + +class FunctionType(mod): + argtypes: list[expr] + returns: expr + +class Suite(mod): + body: list[stmt] + +class stmt(AST): + lineno: int + col_offset: int + +class FunctionDef(stmt): + name: _Identifier + args: arguments + body: list[stmt] + decorator_list: list[expr] + type_comment: str | None + +class ClassDef(stmt): + name: _Identifier + bases: list[expr] + body: list[stmt] + decorator_list: list[expr] + +class Return(stmt): + value: expr | None + +class Delete(stmt): + targets: list[expr] + +class Assign(stmt): + targets: list[expr] + value: expr + type_comment: str | None + +class AugAssign(stmt): + target: expr + op: operator + value: expr + +class Print(stmt): + dest: expr | None + values: list[expr] + nl: bool + +class For(stmt): + target: expr + iter: expr + body: list[stmt] + orelse: list[stmt] + type_comment: str | None + +class While(stmt): + test: expr + body: list[stmt] + orelse: list[stmt] + +class If(stmt): + test: expr + body: list[stmt] + orelse: list[stmt] + +class With(stmt): + context_expr: expr + optional_vars: expr | None + body: list[stmt] + type_comment: str | None + +class Raise(stmt): + type: expr | None + inst: expr | None + tback: expr | None + +class TryExcept(stmt): + body: list[stmt] + handlers: list[ExceptHandler] + orelse: list[stmt] + +class TryFinally(stmt): + body: list[stmt] + finalbody: list[stmt] + +class Assert(stmt): + test: expr + msg: expr | None + +class Import(stmt): + names: list[alias] + +class ImportFrom(stmt): + module: _Identifier | None + names: list[alias] + level: int | None + +class Exec(stmt): + body: expr + globals: expr | None + locals: expr | None + +class Global(stmt): + names: list[_Identifier] + +class Expr(stmt): + value: expr + +class Pass(stmt): ... +class Break(stmt): ... +class Continue(stmt): ... +class slice(AST): ... + +_Slice: TypeAlias = slice # this lets us type the variable named 'slice' below + +class Slice(slice): + lower: expr | None + upper: expr | None + step: expr | None + +class ExtSlice(slice): + dims: list[slice] + +class Index(slice): + value: expr + +class Ellipsis(slice): ... + +class expr(AST): + lineno: int + col_offset: int + +class BoolOp(expr): + op: boolop + values: list[expr] + +class BinOp(expr): + left: expr + op: operator + right: expr + +class UnaryOp(expr): + op: unaryop + operand: expr + +class Lambda(expr): + args: arguments + body: expr + +class IfExp(expr): + test: expr + body: expr + orelse: expr + +class Dict(expr): + keys: list[expr] + values: list[expr] + +class Set(expr): + elts: list[expr] + +class ListComp(expr): + elt: expr + generators: list[comprehension] + +class SetComp(expr): + elt: expr + generators: list[comprehension] + +class DictComp(expr): + key: expr + value: expr + generators: list[comprehension] + +class GeneratorExp(expr): + elt: expr + generators: list[comprehension] + +class Yield(expr): + value: expr | None + +class Compare(expr): + left: expr + ops: list[cmpop] + comparators: list[expr] + +class Call(expr): + func: expr + args: list[expr] + keywords: list[keyword] + starargs: expr | None + kwargs: expr | None + +class Repr(expr): + value: expr + +class Num(expr): + n: int | float | complex + +class Str(expr): + s: str | bytes + kind: str + +class Attribute(expr): + value: expr + attr: _Identifier + ctx: expr_context + +class Subscript(expr): + value: expr + slice: _Slice + ctx: expr_context + +class Name(expr): + id: _Identifier + ctx: expr_context + +class List(expr): + elts: list[expr] + ctx: expr_context + +class Tuple(expr): + elts: list[expr] + ctx: expr_context + +class expr_context(AST): ... +class AugLoad(expr_context): ... +class AugStore(expr_context): ... +class Del(expr_context): ... +class Load(expr_context): ... +class Param(expr_context): ... +class Store(expr_context): ... +class boolop(AST): ... +class And(boolop): ... +class Or(boolop): ... +class operator(AST): ... +class Add(operator): ... +class BitAnd(operator): ... +class BitOr(operator): ... +class BitXor(operator): ... +class Div(operator): ... +class FloorDiv(operator): ... +class LShift(operator): ... +class Mod(operator): ... +class Mult(operator): ... +class Pow(operator): ... +class RShift(operator): ... +class Sub(operator): ... +class unaryop(AST): ... +class Invert(unaryop): ... +class Not(unaryop): ... +class UAdd(unaryop): ... +class USub(unaryop): ... +class cmpop(AST): ... +class Eq(cmpop): ... +class Gt(cmpop): ... +class GtE(cmpop): ... +class In(cmpop): ... +class Is(cmpop): ... +class IsNot(cmpop): ... +class Lt(cmpop): ... +class LtE(cmpop): ... +class NotEq(cmpop): ... +class NotIn(cmpop): ... + +class comprehension(AST): + target: expr + iter: expr + ifs: list[expr] + +class ExceptHandler(AST): + type: expr | None + name: expr | None + body: list[stmt] + lineno: int + col_offset: int + +class arguments(AST): + args: list[expr] + vararg: _Identifier | None + kwarg: _Identifier | None + defaults: list[expr] + type_comments: list[str | None] + +class keyword(AST): + arg: _Identifier + value: expr + +class alias(AST): + name: _Identifier + asname: _Identifier | None + +class TypeIgnore(AST): + lineno: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/typed-ast/typed_ast/ast3.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/typed-ast/typed_ast/ast3.pyi new file mode 100644 index 00000000..e4b5d0e1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/typed-ast/typed_ast/ast3.pyi @@ -0,0 +1,390 @@ +from _typeshed import ReadableBuffer +from collections.abc import Iterator +from typing import Any +from typing_extensions import TypeAlias + +LATEST_MINOR_VERSION: int + +class NodeVisitor: + def visit(self, node: AST) -> Any: ... + def generic_visit(self, node: AST) -> None: ... + +class NodeTransformer(NodeVisitor): + def generic_visit(self, node: AST) -> None: ... + +def parse( + source: str | ReadableBuffer, filename: str | ReadableBuffer = ..., mode: str = ..., feature_version: int = ... +) -> AST: ... +def copy_location(new_node: AST, old_node: AST) -> AST: ... +def dump(node: AST, annotate_fields: bool = ..., include_attributes: bool = ...) -> str: ... +def fix_missing_locations(node: AST) -> AST: ... +def get_docstring(node: AST, clean: bool = ...) -> str | None: ... +def increment_lineno(node: AST, n: int = ...) -> AST: ... +def iter_child_nodes(node: AST) -> Iterator[AST]: ... +def iter_fields(node: AST) -> Iterator[tuple[str, Any]]: ... +def literal_eval(node_or_string: str | AST) -> Any: ... +def walk(node: AST) -> Iterator[AST]: ... + +PyCF_ONLY_AST: int + +# ast classes + +_Identifier: TypeAlias = str + +class AST: + _attributes: tuple[str, ...] + _fields: tuple[str, ...] + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + +class mod(AST): ... + +class Module(mod): + body: list[stmt] + type_ignores: list[TypeIgnore] + +class Interactive(mod): + body: list[stmt] + +class Expression(mod): + body: expr + +class FunctionType(mod): + argtypes: list[expr] + returns: expr + +class Suite(mod): + body: list[stmt] + +class stmt(AST): + lineno: int + col_offset: int + +class FunctionDef(stmt): + name: _Identifier + args: arguments + body: list[stmt] + decorator_list: list[expr] + returns: expr | None + type_comment: str | None + +class AsyncFunctionDef(stmt): + name: _Identifier + args: arguments + body: list[stmt] + decorator_list: list[expr] + returns: expr | None + type_comment: str | None + +class ClassDef(stmt): + name: _Identifier + bases: list[expr] + keywords: list[keyword] + body: list[stmt] + decorator_list: list[expr] + +class Return(stmt): + value: expr | None + +class Delete(stmt): + targets: list[expr] + +class Assign(stmt): + targets: list[expr] + value: expr + type_comment: str | None + +class AugAssign(stmt): + target: expr + op: operator + value: expr + +class AnnAssign(stmt): + target: expr + annotation: expr + value: expr | None + simple: int + +class For(stmt): + target: expr + iter: expr + body: list[stmt] + orelse: list[stmt] + type_comment: str | None + +class AsyncFor(stmt): + target: expr + iter: expr + body: list[stmt] + orelse: list[stmt] + type_comment: str | None + +class While(stmt): + test: expr + body: list[stmt] + orelse: list[stmt] + +class If(stmt): + test: expr + body: list[stmt] + orelse: list[stmt] + +class With(stmt): + items: list[withitem] + body: list[stmt] + type_comment: str | None + +class AsyncWith(stmt): + items: list[withitem] + body: list[stmt] + type_comment: str | None + +class Raise(stmt): + exc: expr | None + cause: expr | None + +class Try(stmt): + body: list[stmt] + handlers: list[ExceptHandler] + orelse: list[stmt] + finalbody: list[stmt] + +class Assert(stmt): + test: expr + msg: expr | None + +class Import(stmt): + names: list[alias] + +class ImportFrom(stmt): + module: _Identifier | None + names: list[alias] + level: int | None + +class Global(stmt): + names: list[_Identifier] + +class Nonlocal(stmt): + names: list[_Identifier] + +class Expr(stmt): + value: expr + +class Pass(stmt): ... +class Break(stmt): ... +class Continue(stmt): ... +class slice(AST): ... + +_Slice: TypeAlias = slice # this lets us type the variable named 'slice' below + +class Slice(slice): + lower: expr | None + upper: expr | None + step: expr | None + +class ExtSlice(slice): + dims: list[slice] + +class Index(slice): + value: expr + +class expr(AST): + lineno: int + col_offset: int + +class BoolOp(expr): + op: boolop + values: list[expr] + +class BinOp(expr): + left: expr + op: operator + right: expr + +class UnaryOp(expr): + op: unaryop + operand: expr + +class Lambda(expr): + args: arguments + body: expr + +class IfExp(expr): + test: expr + body: expr + orelse: expr + +class Dict(expr): + keys: list[expr] + values: list[expr] + +class Set(expr): + elts: list[expr] + +class ListComp(expr): + elt: expr + generators: list[comprehension] + +class SetComp(expr): + elt: expr + generators: list[comprehension] + +class DictComp(expr): + key: expr + value: expr + generators: list[comprehension] + +class GeneratorExp(expr): + elt: expr + generators: list[comprehension] + +class Await(expr): + value: expr + +class Yield(expr): + value: expr | None + +class YieldFrom(expr): + value: expr + +class Compare(expr): + left: expr + ops: list[cmpop] + comparators: list[expr] + +class Call(expr): + func: expr + args: list[expr] + keywords: list[keyword] + +class Num(expr): + n: int | float | complex + +class Str(expr): + s: str + kind: str + +class FormattedValue(expr): + value: expr + conversion: int | None + format_spec: expr | None + +class JoinedStr(expr): + values: list[expr] + +class Bytes(expr): + s: bytes + +class NameConstant(expr): + value: Any + +class Ellipsis(expr): ... + +class Attribute(expr): + value: expr + attr: _Identifier + ctx: expr_context + +class Subscript(expr): + value: expr + slice: _Slice + ctx: expr_context + +class Starred(expr): + value: expr + ctx: expr_context + +class Name(expr): + id: _Identifier + ctx: expr_context + +class List(expr): + elts: list[expr] + ctx: expr_context + +class Tuple(expr): + elts: list[expr] + ctx: expr_context + +class expr_context(AST): ... +class AugLoad(expr_context): ... +class AugStore(expr_context): ... +class Del(expr_context): ... +class Load(expr_context): ... +class Param(expr_context): ... +class Store(expr_context): ... +class boolop(AST): ... +class And(boolop): ... +class Or(boolop): ... +class operator(AST): ... +class Add(operator): ... +class BitAnd(operator): ... +class BitOr(operator): ... +class BitXor(operator): ... +class Div(operator): ... +class FloorDiv(operator): ... +class LShift(operator): ... +class Mod(operator): ... +class Mult(operator): ... +class MatMult(operator): ... +class Pow(operator): ... +class RShift(operator): ... +class Sub(operator): ... +class unaryop(AST): ... +class Invert(unaryop): ... +class Not(unaryop): ... +class UAdd(unaryop): ... +class USub(unaryop): ... +class cmpop(AST): ... +class Eq(cmpop): ... +class Gt(cmpop): ... +class GtE(cmpop): ... +class In(cmpop): ... +class Is(cmpop): ... +class IsNot(cmpop): ... +class Lt(cmpop): ... +class LtE(cmpop): ... +class NotEq(cmpop): ... +class NotIn(cmpop): ... + +class comprehension(AST): + target: expr + iter: expr + ifs: list[expr] + is_async: int + +class ExceptHandler(AST): + type: expr | None + name: _Identifier | None + body: list[stmt] + lineno: int + col_offset: int + +class arguments(AST): + args: list[arg] + vararg: arg | None + kwonlyargs: list[arg] + kw_defaults: list[expr | None] + kwarg: arg | None + defaults: list[expr] + +class arg(AST): + arg: _Identifier + annotation: expr | None + lineno: int + col_offset: int + type_comment: str | None + +class keyword(AST): + arg: _Identifier | None + value: expr + +class alias(AST): + name: _Identifier + asname: _Identifier | None + +class withitem(AST): + context_expr: expr + optional_vars: expr | None + +class TypeIgnore(AST): + lineno: int diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/typed-ast/typed_ast/conversions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/typed-ast/typed_ast/conversions.pyi new file mode 100644 index 00000000..c7088ecc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/typed-ast/typed_ast/conversions.pyi @@ -0,0 +1,3 @@ +from . import ast3, ast27 + +def py2to3(ast: ast27.AST) -> ast3.AST: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tzlocal/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tzlocal/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..1fd3f670 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tzlocal/@tests/stubtest_allowlist.txt @@ -0,0 +1,3 @@ +# Implementation details +tzlocal.unix +tzlocal.win32 diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tzlocal/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tzlocal/METADATA.toml new file mode 100644 index 00000000..cfe45edd --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tzlocal/METADATA.toml @@ -0,0 +1,2 @@ +version = "4.2" +requires = ["types-pytz"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tzlocal/tzlocal/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tzlocal/tzlocal/__init__.pyi new file mode 100644 index 00000000..81c2351a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tzlocal/tzlocal/__init__.pyi @@ -0,0 +1,5 @@ +from pytz import BaseTzInfo + +def reload_localzone() -> None: ... +def get_localzone() -> BaseTzInfo: ... +def get_localzone_name() -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tzlocal/tzlocal/utils.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tzlocal/tzlocal/utils.pyi new file mode 100644 index 00000000..aedc34ac --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tzlocal/tzlocal/utils.pyi @@ -0,0 +1,16 @@ +import datetime +import sys + +import pytz + +if sys.version_info >= (3, 9): + import zoneinfo + + class ZoneInfoNotFoundError(pytz.UnknownTimeZoneError, zoneinfo.ZoneInfoNotFoundError): ... + +else: + class ZoneInfoNotFoundError(pytz.UnknownTimeZoneError): ... + +def get_system_offset() -> int: ... +def get_tz_offset(tz: datetime.tzinfo) -> int: ... +def assert_tz_offset(tz: datetime.tzinfo) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tzlocal/tzlocal/windows_tz.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tzlocal/tzlocal/windows_tz.pyi new file mode 100644 index 00000000..12dbac90 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/tzlocal/tzlocal/windows_tz.pyi @@ -0,0 +1,4 @@ +# Auto-generated for tzlocal.win32. But some libraries use it to get the mappings directly +win_tz: dict[str, str] +tz_names: dict[str, str] +tz_win: dict[str, str] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ujson/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ujson/METADATA.toml new file mode 100644 index 00000000..17781efc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ujson/METADATA.toml @@ -0,0 +1 @@ +version = "5.7.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ujson/ujson.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ujson/ujson.pyi new file mode 100644 index 00000000..b2528a9d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/ujson/ujson.pyi @@ -0,0 +1,37 @@ +from typing import IO, Any, AnyStr + +__version__: str + +def encode( + obj: Any, + ensure_ascii: bool = ..., + double_precision: int = ..., + encode_html_chars: bool = ..., + escape_forward_slashes: bool = ..., + sort_keys: bool = ..., + indent: int = ..., +) -> str: ... +def dumps( + obj: Any, + ensure_ascii: bool = ..., + double_precision: int = ..., + encode_html_chars: bool = ..., + escape_forward_slashes: bool = ..., + sort_keys: bool = ..., + indent: int = ..., +) -> str: ... +def dump( + obj: Any, + fp: IO[str], + ensure_ascii: bool = ..., + double_precision: int = ..., + encode_html_chars: bool = ..., + escape_forward_slashes: bool = ..., + sort_keys: bool = ..., + indent: int = ..., +) -> None: ... +def decode(s: AnyStr, precise_float: bool = ...) -> Any: ... +def loads(s: AnyStr, precise_float: bool = ...) -> Any: ... +def load(fp: IO[AnyStr], precise_float: bool = ...) -> Any: ... + +class JSONDecodeError(ValueError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/untangle/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/untangle/METADATA.toml new file mode 100644 index 00000000..249b0a7c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/untangle/METADATA.toml @@ -0,0 +1 @@ +version = "1.2.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/untangle/untangle.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/untangle/untangle.pyi new file mode 100644 index 00000000..f706aee0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/untangle/untangle.pyi @@ -0,0 +1,37 @@ +from collections.abc import Iterator, Mapping +from typing import Any +from typing_extensions import Self +from xml.sax import handler + +def is_string(x: object) -> bool: ... + +class Element: + children: list[Element] + is_root: bool + cdata: str + def __init__(self, name: str | None, attributes: Mapping[str, Any] | None) -> None: ... + def add_child(self, element: Element) -> None: ... + def add_cdata(self, cdata: str) -> None: ... + def get_attribute(self, key: str) -> Any | None: ... + def get_elements(self, name: str | None = ...) -> list[Element]: ... + def __getitem__(self, key: str) -> Any | None: ... + def __getattr__(self, key: str) -> Element: ... + def __hasattribute__(self, name: str) -> bool: ... + def __iter__(self) -> Iterator[Self]: ... + def __bool__(self) -> bool: ... + __nonzero__ = __bool__ + def __eq__(self, val: object) -> bool: ... + def __dir__(self) -> list[str]: ... + def __len__(self) -> int: ... + def __contains__(self, key: str) -> bool: ... + +class Handler(handler.ContentHandler): + root: Element + elements: list[Element] + def __init__(self) -> None: ... + def startElement(self, name: str, attributes: Mapping[str, Any]) -> None: ... + def endElement(self, name: str) -> None: ... + def characters(self, cdata: str) -> None: ... + +def parse(filename: str, **parser_features: bool) -> Element: ... +def is_url(string: str) -> bool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..943c818d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/@tests/stubtest_allowlist.txt @@ -0,0 +1,33 @@ +urllib3.NullHandler +urllib3._collections.HTTPHeaderDict.from_httplib +urllib3._collections.HTTPHeaderDict.getlist +urllib3._collections.RLock +urllib3.connection.HTTPConnection.request +urllib3.connection.HTTPSConnection.__init__ +urllib3.connection.VerifiedHTTPSConnection.__init__ +urllib3.connection.VerifiedHTTPSConnection.set_cert +urllib3.connectionpool.ConnectionError +# TODO: remove ResponseCls ignore when https://github.com/python/mypy/issues/13316 is closed +urllib3.connectionpool.HTTPConnectionPool.ResponseCls +urllib3.connectionpool.HTTPConnectionPool.__init__ +urllib3.connectionpool.HTTPConnectionPool.urlopen +urllib3.connectionpool.HTTPSConnectionPool.__init__ +urllib3.connectionpool.VerifiedHTTPSConnection.__init__ +urllib3.connectionpool.VerifiedHTTPSConnection.set_cert +urllib3.packages.ssl_match_hostname +urllib3.packages.ssl_match_hostname._implementation +urllib3.poolmanager.PoolManager.connection_from_host +urllib3.poolmanager.PoolManager.connection_from_url +urllib3.poolmanager.PoolManager.urlopen +urllib3.poolmanager.ProxyManager.__init__ +urllib3.poolmanager.ProxyManager.connection_from_host +urllib3.poolmanager.ProxyManager.urlopen +urllib3.request.RequestMethods.request_encode_url +urllib3.response.BrotliDecoder +urllib3.util.connection.poll +urllib3.util.connection.select +urllib3.util.ssl_.create_default_context +urllib3.util.ssl_.ssl_wrap_socket + +# Metaclass differs: +urllib3.util.retry.Retry diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/METADATA.toml new file mode 100644 index 00000000..791b8435 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/METADATA.toml @@ -0,0 +1,5 @@ +version = "1.26.*" + +[tool.stubtest] +ignore_missing_stub = true +extras = ["socks"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/__init__.pyi new file mode 100644 index 00000000..12ca6103 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/__init__.pyi @@ -0,0 +1,28 @@ +import logging +from typing import TextIO + +from . import connectionpool, filepost, poolmanager, response +from .util import request as _request, retry, timeout, url + +__author__: str +__license__: str +__version__: str + +HTTPConnectionPool = connectionpool.HTTPConnectionPool +HTTPSConnectionPool = connectionpool.HTTPSConnectionPool +connection_from_url = connectionpool.connection_from_url +encode_multipart_formdata = filepost.encode_multipart_formdata +PoolManager = poolmanager.PoolManager +ProxyManager = poolmanager.ProxyManager +proxy_from_url = poolmanager.proxy_from_url +HTTPResponse = response.HTTPResponse +make_headers = _request.make_headers +get_host = url.get_host +Timeout = timeout.Timeout +Retry = retry.Retry + +class NullHandler(logging.Handler): + def emit(self, record): ... + +def add_stderr_logger(level: int = ...) -> logging.StreamHandler[TextIO]: ... +def disable_warnings(category: type[Warning] = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/_collections.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/_collections.pyi new file mode 100644 index 00000000..416d9e91 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/_collections.pyi @@ -0,0 +1,52 @@ +from collections.abc import MutableMapping +from typing import Any, NoReturn, TypeVar + +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") + +class RLock: + def __enter__(self): ... + def __exit__(self, exc_type, exc_value, traceback): ... + +class RecentlyUsedContainer(MutableMapping[_KT, _VT]): + ContainerCls: Any + dispose_func: Any + lock: Any + def __init__(self, maxsize=..., dispose_func=...) -> None: ... + def __getitem__(self, key): ... + def __setitem__(self, key, value) -> None: ... + def __delitem__(self, key) -> None: ... + def __len__(self) -> int: ... + def __iter__(self): ... + def clear(self): ... + def keys(self): ... + +class HTTPHeaderDict(MutableMapping[str, str]): + def __init__(self, headers=..., **kwargs) -> None: ... + def __setitem__(self, key, val) -> None: ... + def __getitem__(self, key): ... + def __delitem__(self, key) -> None: ... + def __contains__(self, key): ... + def __eq__(self, other): ... + def __iter__(self) -> NoReturn: ... + def __len__(self) -> int: ... + def __ne__(self, other): ... + values: Any + get: Any + update: Any + iterkeys: Any + itervalues: Any + def pop(self, key, default=...): ... + def discard(self, key): ... + def add(self, key, val): ... + def extend(self, *args, **kwargs): ... + def getlist(self, key): ... + getheaders: Any + getallmatchingheaders: Any + iget: Any + def copy(self): ... + def iteritems(self): ... + def itermerged(self): ... + def items(self): ... + @classmethod + def from_httplib(cls, message, duplicates=...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/connection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/connection.pyi new file mode 100644 index 00000000..8baca880 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/connection.pyi @@ -0,0 +1,55 @@ +import ssl +from builtins import ConnectionError as ConnectionError +from http.client import HTTPConnection as _HTTPConnection, HTTPException as HTTPException +from typing import Any + +from . import exceptions, util +from .packages import ssl_match_hostname +from .util import ssl_ + +class DummyConnection: ... + +BaseSSLError = ssl.SSLError + +ConnectTimeoutError = exceptions.ConnectTimeoutError +SystemTimeWarning = exceptions.SystemTimeWarning +match_hostname = ssl_match_hostname.match_hostname +resolve_cert_reqs = ssl_.resolve_cert_reqs +resolve_ssl_version = ssl_.resolve_ssl_version +ssl_wrap_socket = ssl_.ssl_wrap_socket +assert_fingerprint = ssl_.assert_fingerprint +connection = util.connection + +port_by_scheme: Any +RECENT_DATE: Any + +class HTTPConnection(_HTTPConnection): + default_port: Any + default_socket_options: Any + is_verified: Any + source_address: Any + socket_options: Any + def __init__(self, *args, **kw) -> None: ... + def connect(self): ... + +class HTTPSConnection(HTTPConnection): + default_port: Any + key_file: Any + cert_file: Any + def __init__(self, host, port=..., key_file=..., cert_file=..., strict=..., timeout=..., **kw) -> None: ... + sock: Any + def connect(self): ... + +class VerifiedHTTPSConnection(HTTPSConnection): + cert_reqs: Any + ca_certs: Any + ssl_version: Any + assert_fingerprint: Any + key_file: Any + cert_file: Any + assert_hostname: Any + def set_cert(self, key_file=..., cert_file=..., cert_reqs=..., ca_certs=..., assert_hostname=..., assert_fingerprint=...): ... + sock: Any + auto_open: Any + is_verified: Any + def connect(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/connectionpool.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/connectionpool.pyi new file mode 100644 index 00000000..4d27996d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/connectionpool.pyi @@ -0,0 +1,130 @@ +import queue +from collections.abc import Mapping +from logging import Logger +from types import TracebackType +from typing import Any, ClassVar +from typing_extensions import Literal, Self, TypeAlias + +from . import connection, exceptions, request, response +from .connection import BaseSSLError as BaseSSLError, ConnectionError as ConnectionError, HTTPException as HTTPException +from .packages import ssl_match_hostname +from .util import Url, connection as _connection, queue as urllib3queue, retry, timeout, url + +ClosedPoolError = exceptions.ClosedPoolError +ProtocolError = exceptions.ProtocolError +EmptyPoolError = exceptions.EmptyPoolError +HostChangedError = exceptions.HostChangedError +LocationValueError = exceptions.LocationValueError +MaxRetryError = exceptions.MaxRetryError +ProxyError = exceptions.ProxyError +ReadTimeoutError = exceptions.ReadTimeoutError +SSLError = exceptions.SSLError +TimeoutError = exceptions.TimeoutError +InsecureRequestWarning = exceptions.InsecureRequestWarning +CertificateError = ssl_match_hostname.CertificateError +port_by_scheme = connection.port_by_scheme +DummyConnection = connection.DummyConnection +HTTPConnection = connection.HTTPConnection +HTTPSConnection = connection.HTTPSConnection +VerifiedHTTPSConnection = connection.VerifiedHTTPSConnection +RequestMethods = request.RequestMethods +HTTPResponse = response.HTTPResponse +is_connection_dropped = _connection.is_connection_dropped +Retry = retry.Retry +Timeout = timeout.Timeout +get_host = url.get_host + +_Timeout: TypeAlias = Timeout | float +_Retries: TypeAlias = Retry | bool | int + +xrange: Any +log: Logger + +class ConnectionPool: + scheme: ClassVar[str | None] + QueueCls: ClassVar[type[queue.Queue[Any]]] + host: str + port: int | None + def __init__(self, host: str, port: int | None = ...) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> Literal[False]: ... + def close(self) -> None: ... + +class HTTPConnectionPool(ConnectionPool, RequestMethods): + scheme: ClassVar[str] + ConnectionCls: ClassVar[type[HTTPConnection | HTTPSConnection]] + ResponseCls: ClassVar[type[HTTPResponse]] + strict: bool + timeout: _Timeout + retries: _Retries | None + pool: urllib3queue.LifoQueue | None + block: bool + proxy: Url | None + proxy_headers: Mapping[str, str] + num_connections: int + num_requests: int + conn_kw: Any + def __init__( + self, + host: str, + port: int | None = ..., + strict: bool = ..., + timeout: _Timeout = ..., + maxsize: int = ..., + block: bool = ..., + headers: Mapping[str, str] | None = ..., + retries: _Retries | None = ..., + _proxy: Url | None = ..., + _proxy_headers: Mapping[str, str] | None = ..., + **conn_kw, + ) -> None: ... + def close(self) -> None: ... + def is_same_host(self, url: str) -> bool: ... + def urlopen( + self, + method, + url, + body=..., + headers=..., + retries=..., + redirect=..., + assert_same_host=..., + timeout=..., + pool_timeout=..., + release_conn=..., + **response_kw, + ): ... + +class HTTPSConnectionPool(HTTPConnectionPool): + key_file: str | None + cert_file: str | None + cert_reqs: int | str | None + ca_certs: str | None + ssl_version: int | str | None + assert_hostname: str | Literal[False] | None + assert_fingerprint: str | None + def __init__( + self, + host: str, + port: int | None = ..., + strict: bool = ..., + timeout: _Timeout = ..., + maxsize: int = ..., + block: bool = ..., + headers: Mapping[str, str] | None = ..., + retries: _Retries | None = ..., + _proxy: Url | None = ..., + _proxy_headers: Mapping[str, str] | None = ..., + key_file: str | None = ..., + cert_file: str | None = ..., + cert_reqs: int | str | None = ..., + ca_certs: str | None = ..., + ssl_version: int | str | None = ..., + assert_hostname: str | Literal[False] | None = ..., + assert_fingerprint: str | None = ..., + **conn_kw, + ) -> None: ... + +def connection_from_url(url: str, **kw) -> HTTPConnectionPool: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/contrib/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/contrib/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/contrib/socks.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/contrib/socks.pyi new file mode 100644 index 00000000..f2137f8c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/contrib/socks.pyi @@ -0,0 +1,45 @@ +from collections.abc import Mapping +from typing import ClassVar +from typing_extensions import TypedDict + +from ..connection import HTTPConnection, HTTPSConnection +from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool +from ..poolmanager import PoolManager + +class _TYPE_SOCKS_OPTIONS(TypedDict): + socks_version: int + proxy_host: str | None + proxy_port: str | None + username: str | None + password: str | None + rdns: bool + +class SOCKSConnection(HTTPConnection): + def __init__(self, _socks_options: _TYPE_SOCKS_OPTIONS, *args, **kwargs) -> None: ... + +class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection): ... + +class SOCKSHTTPConnectionPool(HTTPConnectionPool): + ConnectionCls: ClassVar[type[SOCKSConnection]] + +class SOCKSHTTPSConnectionPool(HTTPSConnectionPool): + ConnectionCls: ClassVar[type[SOCKSHTTPSConnection]] + +class _ConnectionPoolClasses(TypedDict): + http: type[SOCKSHTTPConnectionPool] + https: type[SOCKSHTTPSConnectionPool] + +class SOCKSProxyManager(PoolManager): + # has a class-level default, but is overridden on instances, so not a ClassVar + pool_classes_by_scheme: _ConnectionPoolClasses + proxy_url: str + + def __init__( + self, + proxy_url: str, + username: str | None = ..., + password: str | None = ..., + num_pools: int = ..., + headers: Mapping[str, str] | None = ..., + **connection_pool_kw, + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/exceptions.pyi new file mode 100644 index 00000000..8c209061 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/exceptions.pyi @@ -0,0 +1,87 @@ +from email.errors import MessageDefect +from http.client import IncompleteRead as httplib_IncompleteRead +from typing import Any + +from urllib3.connectionpool import ConnectionPool, HTTPResponse +from urllib3.util.retry import Retry + +class HTTPError(Exception): ... +class HTTPWarning(Warning): ... + +class PoolError(HTTPError): + pool: ConnectionPool + def __init__(self, pool: ConnectionPool, message: str) -> None: ... + +class RequestError(PoolError): + url: str + def __init__(self, pool: ConnectionPool, url: str, message: str) -> None: ... + +class SSLError(HTTPError): ... + +class ProxyError(HTTPError): + original_error: Exception + def __init__(self, message: str, error: Exception, *args: Any) -> None: ... + +class DecodeError(HTTPError): ... +class ProtocolError(HTTPError): ... + +ConnectionError = ProtocolError + +class MaxRetryError(RequestError): + reason: Exception | None + def __init__(self, pool: ConnectionPool, url: str, reason: Exception | None = ...) -> None: ... + +class HostChangedError(RequestError): + retries: Retry | int + def __init__(self, pool: ConnectionPool, url: str, retries: Retry | int = ...) -> None: ... + +class TimeoutStateError(HTTPError): ... +class TimeoutError(HTTPError): ... +class ReadTimeoutError(TimeoutError, RequestError): ... +class ConnectTimeoutError(TimeoutError): ... +class NewConnectionError(ConnectTimeoutError, HTTPError): ... +class EmptyPoolError(PoolError): ... +class ClosedPoolError(PoolError): ... +class LocationValueError(ValueError, HTTPError): ... + +class LocationParseError(LocationValueError): + location: str + def __init__(self, location: str) -> None: ... + +class URLSchemeUnknown(LocationValueError): + scheme: str + def __init__(self, scheme: str) -> None: ... + +class ResponseError(HTTPError): + GENERIC_ERROR: str + SPECIFIC_ERROR: str + +class SecurityWarning(HTTPWarning): ... +class SubjectAltNameWarning(SecurityWarning): ... +class InsecureRequestWarning(SecurityWarning): ... +class SystemTimeWarning(SecurityWarning): ... +class InsecurePlatformWarning(SecurityWarning): ... +class SNIMissingWarning(HTTPWarning): ... +class DependencyWarning(HTTPWarning): ... +class ResponseNotChunked(ProtocolError, ValueError): ... +class BodyNotHttplibCompatible(HTTPError): ... + +class IncompleteRead(HTTPError, httplib_IncompleteRead): + def __init__(self, partial: bytes, expected: int | None) -> None: ... + +class InvalidChunkLength(HTTPError, httplib_IncompleteRead): + response: HTTPResponse + length: bytes + def __init__(self, response: HTTPResponse, length: bytes) -> None: ... + +class InvalidHeader(HTTPError): ... + +class ProxySchemeUnknown(AssertionError, URLSchemeUnknown): + def __init__(self, scheme: str | None) -> None: ... + +class ProxySchemeUnsupported(ValueError): ... + +class HeaderParsingError(HTTPError): + def __init__(self, defects: list[MessageDefect], unparsed_data: str | bytes | None) -> None: ... + +class UnrewindableBodyError(HTTPError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/fields.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/fields.pyi new file mode 100644 index 00000000..a7eb0255 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/fields.pyi @@ -0,0 +1,32 @@ +from collections.abc import Callable, Mapping +from typing import Any +from typing_extensions import TypeAlias + +_FieldValue: TypeAlias = str | bytes +_FieldValueTuple: TypeAlias = _FieldValue | tuple[str, _FieldValue] | tuple[str, _FieldValue, str] + +def guess_content_type(filename: str | None, default: str = ...) -> str: ... +def format_header_param_rfc2231(name: str, value: _FieldValue) -> str: ... +def format_header_param_html5(name: str, value: _FieldValue) -> str: ... + +format_header_param = format_header_param_html5 + +class RequestField: + data: Any + headers: Any + def __init__( + self, + name: str, + data: _FieldValue, + filename: str | None = ..., + headers: Mapping[str, str] | None = ..., + header_formatter: Callable[[str, _FieldValue], str] = ..., + ) -> None: ... + @classmethod + def from_tuples( + cls, fieldname: str, value: _FieldValueTuple, header_formatter: Callable[[str, _FieldValue], str] = ... + ) -> RequestField: ... + def render_headers(self) -> str: ... + def make_multipart( + self, content_disposition: str | None = ..., content_type: str | None = ..., content_location: str | None = ... + ) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/filepost.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/filepost.pyi new file mode 100644 index 00000000..49bd7e04 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/filepost.pyi @@ -0,0 +1,12 @@ +from typing import Any + +from . import fields + +RequestField = fields.RequestField + +writer: Any + +def choose_boundary(): ... +def iter_field_objects(fields): ... +def iter_fields(fields): ... +def encode_multipart_formdata(fields, boundary=...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/packages/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/packages/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/packages/ssl_match_hostname/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/packages/ssl_match_hostname/__init__.pyi new file mode 100644 index 00000000..1915c0e5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/packages/ssl_match_hostname/__init__.pyi @@ -0,0 +1,4 @@ +import ssl + +CertificateError = ssl.CertificateError +match_hostname = ssl.match_hostname diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/packages/ssl_match_hostname/_implementation.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/packages/ssl_match_hostname/_implementation.pyi new file mode 100644 index 00000000..c2199801 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/packages/ssl_match_hostname/_implementation.pyi @@ -0,0 +1,3 @@ +class CertificateError(ValueError): ... + +def match_hostname(cert, hostname): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/poolmanager.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/poolmanager.pyi new file mode 100644 index 00000000..68ad6962 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/poolmanager.pyi @@ -0,0 +1,28 @@ +from typing import Any + +from .request import RequestMethods + +class PoolManager(RequestMethods): + proxy: Any + connection_pool_kw: Any + pools: Any + def __init__(self, num_pools=..., headers=..., **connection_pool_kw) -> None: ... + def __enter__(self): ... + def __exit__(self, exc_type, exc_val, exc_tb): ... + def clear(self): ... + def connection_from_host(self, host, port=..., scheme=...): ... + def connection_from_url(self, url): ... + # TODO: This was the original signature -- copied another one from base class to fix complaint. + # def urlopen(self, method, url, redirect=True, **kw): ... + def urlopen(self, method, url, body=..., headers=..., encode_multipart=..., multipart_boundary=..., **kw): ... + +class ProxyManager(PoolManager): + proxy: Any + proxy_headers: Any + def __init__(self, proxy_url, num_pools=..., headers=..., proxy_headers=..., **connection_pool_kw) -> None: ... + def connection_from_host(self, host, port=..., scheme=...): ... + # TODO: This was the original signature -- copied another one from base class to fix complaint. + # def urlopen(self, method, url, redirect=True, **kw): ... + def urlopen(self, method, url, body=..., headers=..., encode_multipart=..., multipart_boundary=..., **kw): ... + +def proxy_from_url(url, **kw): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/request.pyi new file mode 100644 index 00000000..b95ab295 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/request.pyi @@ -0,0 +1,11 @@ +from typing import Any + +class RequestMethods: + headers: Any + def __init__(self, headers=...) -> None: ... + def urlopen(self, method, url, body=..., headers=..., encode_multipart=..., multipart_boundary=..., **kw): ... + def request(self, method, url, fields=..., headers=..., **urlopen_kw): ... + def request_encode_url(self, method, url, fields=..., **urlopen_kw): ... + def request_encode_body( + self, method, url, fields=..., headers=..., encode_multipart=..., multipart_boundary=..., **urlopen_kw + ): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/response.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/response.pyi new file mode 100644 index 00000000..a4f76d31 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/response.pyi @@ -0,0 +1,98 @@ +import io +from collections.abc import Iterable, Iterator, Mapping +from http.client import HTTPMessage as _HttplibHTTPMessage, HTTPResponse as _HttplibHTTPResponse +from typing import IO, Any +from typing_extensions import Literal, Self, TypeAlias + +from urllib3.connectionpool import HTTPConnection + +from . import HTTPConnectionPool, Retry +from ._collections import HTTPHeaderDict + +_TYPE_BODY: TypeAlias = bytes | IO[Any] | Iterable[bytes] | str + +class DeflateDecoder: + def __getattr__(self, name: str) -> Any: ... + def decompress(self, data: bytes) -> bytes: ... + +class GzipDecoderState: + FIRST_MEMBER: Literal[0] + OTHER_MEMBERS: Literal[1] + SWALLOW_DATA: Literal[2] + +class GzipDecoder: + def __getattr__(self, name: str) -> Any: ... + def decompress(self, data: bytes) -> bytes: ... + +# This class is only available if +# `brotli` is available for import. +class BrotliDecoder: + def flush(self) -> bytes: ... + +class MultiDecoder: + def __init__(self, modes: str) -> None: ... + def flush(self) -> bytes: ... + def decompress(self, data: bytes) -> bytes: ... + +class HTTPResponse(io.IOBase): + CONTENT_DECODERS: list[str] + REDIRECT_STATUSES: list[int] + headers: HTTPHeaderDict + status: int + version: int + reason: str | None + strict: int + decode_content: bool + retries: Retry | None + enforce_content_length: bool + auto_close: bool + msg: _HttplibHTTPMessage | None + chunked: bool + chunk_left: int | None + length_remaining: int | None + def __init__( + self, + body: _TYPE_BODY = ..., + headers: Mapping[str, str] | Mapping[bytes, bytes] | None = ..., + status: int = ..., + version: int = ..., + reason: str | None = ..., + strict: int = ..., + preload_content: bool = ..., + decode_content: bool = ..., + original_response: _HttplibHTTPResponse | None = ..., + pool: HTTPConnectionPool | None = ..., + connection: HTTPConnection | None = ..., + msg: _HttplibHTTPMessage | None = ..., + retries: Retry | None = ..., + enforce_content_length: bool = ..., + request_method: str | None = ..., + request_url: str | None = ..., + auto_close: bool = ..., + ) -> None: ... + def get_redirect_location(self) -> Literal[False] | str | None: ... + def release_conn(self) -> None: ... + def drain_conn(self) -> None: ... + @property + def data(self) -> bytes | Any: ... + @property + def connection(self) -> HTTPConnection | Any: ... + def isclosed(self) -> bool: ... + def tell(self) -> int: ... + def read(self, amt: int | None = ..., decode_content: bool | None = ..., cache_content: bool = ...) -> bytes: ... + def stream(self, amt: int | None = ..., decode_content: bool | None = ...) -> Iterator[bytes]: ... + @classmethod + def from_httplib(cls, r: _HttplibHTTPResponse, **response_kw: Any) -> Self: ... + def getheaders(self) -> HTTPHeaderDict: ... + def getheader(self, name, default=...) -> str | None: ... + def info(self) -> HTTPHeaderDict: ... + def close(self) -> None: ... + @property + def closed(self) -> bool: ... + def fileno(self) -> int: ... + def flush(self) -> None: ... + def readable(self) -> bool: ... + def readinto(self, b: bytearray) -> int: ... + def supports_chunked_reads(self) -> bool: ... + def read_chunked(self, amt: int | None = ..., decode_content: bool | None = ...) -> Iterator[bytes]: ... + def geturl(self) -> str | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/__init__.pyi new file mode 100644 index 00000000..2d0d66dc --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/__init__.pyi @@ -0,0 +1,20 @@ +import ssl + +from . import connection, request, response, retry, ssl_, timeout, url + +is_connection_dropped = connection.is_connection_dropped +make_headers = request.make_headers +is_fp_closed = response.is_fp_closed +SSLContext = ssl.SSLContext +HAS_SNI = ssl_.HAS_SNI +assert_fingerprint = ssl_.assert_fingerprint +resolve_cert_reqs = ssl_.resolve_cert_reqs +resolve_ssl_version = ssl_.resolve_ssl_version +ssl_wrap_socket = ssl_.ssl_wrap_socket +current_time = timeout.current_time +Timeout = timeout.Timeout +Retry = retry.Retry +get_host = url.get_host +parse_url = url.parse_url +split_first = url.split_first +Url = url.Url diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/connection.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/connection.pyi new file mode 100644 index 00000000..db77bd00 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/connection.pyi @@ -0,0 +1,8 @@ +from typing import Any + +poll: Any +select: Any +HAS_IPV6: bool + +def is_connection_dropped(conn): ... +def create_connection(address, timeout=..., source_address=..., socket_options=...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/queue.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/queue.pyi new file mode 100644 index 00000000..bffa6815 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/queue.pyi @@ -0,0 +1,4 @@ +from queue import Queue +from typing import Any + +class LifoQueue(Queue[Any]): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/request.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/request.pyi new file mode 100644 index 00000000..f770cd9f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/request.pyi @@ -0,0 +1,11 @@ +from typing import Any + +# from ..packages import six + +# b = six.b + +ACCEPT_ENCODING: Any + +def make_headers( + keep_alive=..., accept_encoding=..., user_agent=..., basic_auth=..., proxy_basic_auth=..., disable_cache=... +): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/response.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/response.pyi new file mode 100644 index 00000000..30463da4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/response.pyi @@ -0,0 +1 @@ +def is_fp_closed(obj): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/retry.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/retry.pyi new file mode 100644 index 00000000..8fdb9768 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/retry.pyi @@ -0,0 +1,84 @@ +import logging +from collections.abc import Collection +from types import TracebackType +from typing import Any, ClassVar, NamedTuple +from typing_extensions import Literal, Self + +from .. import exceptions +from ..connectionpool import ConnectionPool +from ..response import HTTPResponse + +ConnectTimeoutError = exceptions.ConnectTimeoutError +MaxRetryError = exceptions.MaxRetryError +ProtocolError = exceptions.ProtocolError +ReadTimeoutError = exceptions.ReadTimeoutError +ResponseError = exceptions.ResponseError + +log: logging.Logger + +class RequestHistory(NamedTuple): + method: str | None + url: str | None + error: Exception | None + status: int | None + redirect_location: str | None + +class Retry: + DEFAULT_ALLOWED_METHODS: ClassVar[frozenset[str]] + RETRY_AFTER_STATUS_CODES: ClassVar[frozenset[int]] + DEFAULT_REMOVE_HEADERS_ON_REDIRECT: ClassVar[frozenset[str]] + DEFAULT_BACKOFF_MAX: ClassVar[int] + + total: bool | int | None + connect: int | None + read: int | None + redirect: Literal[True] | int | None + status: int | None + other: int | None + allowed_methods: Collection[str] | Literal[False] | None + status_forcelist: Collection[int] + backoff_factor: float + raise_on_redirect: bool + raise_on_status: bool + history: tuple[RequestHistory, ...] + respect_retry_after_header: bool + remove_headers_on_redirect: frozenset[str] + def __init__( + self, + total: bool | int | None = ..., + connect: int | None = ..., + read: int | None = ..., + redirect: bool | int | None = ..., + status: int | None = ..., + other: int | None = ..., + allowed_methods: Collection[str] | Literal[False] | None = ..., + status_forcelist: Collection[int] | None = ..., + backoff_factor: float = ..., + raise_on_redirect: bool = ..., + raise_on_status: bool = ..., + history: tuple[RequestHistory, ...] | None = ..., + respect_retry_after_header: bool = ..., + remove_headers_on_redirect: Collection[str] = ..., + method_whitelist: Collection[str] | None = ..., + ) -> None: ... + def new(self, **kw: Any) -> Self: ... + @classmethod + def from_int( + cls, retries: Retry | bool | int | None, redirect: bool | int | None = ..., default: Retry | bool | int | None = ... + ) -> Retry: ... + def get_backoff_time(self) -> float: ... + def parse_retry_after(self, retry_after: str) -> float: ... + def get_retry_after(self, response: HTTPResponse) -> float | None: ... + def sleep_for_retry(self, response: HTTPResponse | None = ...) -> bool: ... + def sleep(self, response: HTTPResponse | None = ...) -> None: ... + def is_retry(self, method: str, status_code: int, has_retry_after: bool = ...) -> bool: ... + def is_exhausted(self) -> bool: ... + def increment( + self, + method: str | None = ..., + url: str | None = ..., + response: HTTPResponse | None = ..., + error: Exception | None = ..., + _pool: ConnectionPool | None = ..., + _stacktrace: TracebackType | None = ..., + ) -> Retry: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/ssl_.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/ssl_.pyi new file mode 100644 index 00000000..3a3c5392 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/ssl_.pyi @@ -0,0 +1,31 @@ +import ssl +from typing import Any + +from .. import exceptions + +SSLError = exceptions.SSLError +InsecurePlatformWarning = exceptions.InsecurePlatformWarning +SSLContext = ssl.SSLContext + +HAS_SNI: Any +create_default_context: Any +OP_NO_SSLv2: Any +OP_NO_SSLv3: Any +OP_NO_COMPRESSION: Any +DEFAULT_CIPHERS: str + +def assert_fingerprint(cert, fingerprint): ... +def resolve_cert_reqs(candidate): ... +def resolve_ssl_version(candidate): ... +def create_urllib3_context(ssl_version=..., cert_reqs=..., options=..., ciphers=...): ... +def ssl_wrap_socket( + sock, + keyfile=..., + certfile=..., + cert_reqs=..., + ca_certs=..., + server_hostname=..., + ssl_version=..., + ciphers=..., + ssl_context=..., +): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/timeout.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/timeout.pyi new file mode 100644 index 00000000..25879d68 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/timeout.pyi @@ -0,0 +1,21 @@ +from typing import Any + +from .. import exceptions + +TimeoutStateError = exceptions.TimeoutStateError + +def current_time(): ... + +class Timeout: + DEFAULT_TIMEOUT: Any + total: Any + def __init__(self, total=..., connect=..., read=...) -> None: ... + @classmethod + def from_float(cls, timeout): ... + def clone(self): ... + def start_connect(self): ... + def get_connect_duration(self): ... + @property + def connect_timeout(self): ... + @property + def read_timeout(self): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/url.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/url.pyi new file mode 100644 index 00000000..fe98d2a1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/urllib3/urllib3/util/url.pyi @@ -0,0 +1,40 @@ +from typing import NamedTuple + +from .. import exceptions + +LocationParseError = exceptions.LocationParseError + +url_attrs: list[str] + +class _UrlBase(NamedTuple): + auth: str | None + fragment: str | None + host: str | None + path: str | None + port: int | None + query: str | None + scheme: str | None + +class Url(_UrlBase): + def __new__( + cls, + scheme: str | None = ..., + auth: str | None = ..., + host: str | None = ..., + port: int | None = ..., + path: str | None = ..., + query: str | None = ..., + fragment: str | None = ..., + ): ... + @property + def hostname(self) -> str | None: ... + @property + def request_uri(self) -> str: ... + @property + def netloc(self) -> str | None: ... + @property + def url(self) -> str: ... + +def split_first(s: str, delims: str) -> tuple[str, str, str | None]: ... +def parse_url(url: str) -> Url: ... +def get_host(url: str) -> tuple[str, str | None, str | None]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..22f32983 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/@tests/stubtest_allowlist.txt @@ -0,0 +1,22 @@ +# implementation has *args and **kwds arguments that can't be used +vobject.base.VBase.__init__ + +# Only available on Windows +# This module is currently broken on Python 3 +# See: https://github.com/eventable/vobject/pull/187 +vobject.win32tz + +# dependencies +vobject.icalendar.Pytz +vobject.icalendar.pytz + +# python2 compat +vobject.base.basestring +vobject.base.str_ +vobject.base.to_unicode +vobject.base.to_basestring +vobject.vcard.basestring + +# implementation details that users shouldn't depend on +vobject.base.formatter +vobject.base.handler diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/METADATA.toml new file mode 100644 index 00000000..51e869b4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/METADATA.toml @@ -0,0 +1 @@ +version = "0.9.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/__init__.pyi new file mode 100644 index 00000000..2312c6db --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/__init__.pyi @@ -0,0 +1,4 @@ +from .base import Component + +def iCalendar() -> Component: ... +def vCard() -> Component: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/base.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/base.pyi new file mode 100644 index 00000000..138794c4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/base.pyi @@ -0,0 +1,154 @@ +import logging +from _typeshed import Incomplete, SupportsWrite +from collections.abc import Iterable, Iterator +from typing import Any, TypeVar, overload +from typing_extensions import Literal + +logger: logging.Logger +DEBUG: bool +CR: str +LF: str +CRLF: str +SPACE: str +TAB: str +SPACEORTAB: str + +_V = TypeVar("_V", bound=VBase) +_W = TypeVar("_W", bound=SupportsWrite[bytes]) + +class VBase: + group: Incomplete | None + behavior: Incomplete | None + parentBehavior: Incomplete | None + isNative: bool + def __init__(self, group: Incomplete | None = ...) -> None: ... + def copy(self, copyit: VBase) -> None: ... + def validate(self, *args, **kwds) -> bool: ... + def getChildren(self) -> list[Any]: ... + def clearBehavior(self, cascade: bool = ...) -> None: ... + def autoBehavior(self, cascade: bool = ...) -> None: ... + def setBehavior(self, behavior, cascade: bool = ...) -> None: ... + def transformToNative(self): ... + def transformFromNative(self): ... + def transformChildrenToNative(self) -> None: ... + def transformChildrenFromNative(self, clearBehavior: bool = ...) -> None: ... + @overload + def serialize( + self, buf: None = ..., lineLength: int = ..., validate: bool = ..., behavior: Incomplete | None = ... + ) -> str: ... + @overload + def serialize(self, buf: _W, lineLength: int = ..., validate: bool = ..., behavior: Incomplete | None = ...) -> _W: ... + +def toVName(name, stripNum: int = ..., upper: bool = ...): ... + +class ContentLine(VBase): + name: Any + encoded: Any + params: Any + singletonparams: Any + isNative: Any + lineNumber: Any + value: Any + def __init__( + self, + name, + params, + value, + group: Incomplete | None = ..., + encoded: bool = ..., + isNative: bool = ..., + lineNumber: Incomplete | None = ..., + *args, + **kwds, + ) -> None: ... + @classmethod + def duplicate(cls, copyit): ... + def copy(self, copyit) -> None: ... + def __eq__(self, other): ... + def __getattr__(self, name: str): ... + def __setattr__(self, name: str, value) -> None: ... + def __delattr__(self, name: str) -> None: ... + def valueRepr(self): ... + def __unicode__(self) -> str: ... + def prettyPrint(self, level: int = ..., tabwidth: int = ...) -> None: ... + +class Component(VBase): + contents: dict[str, list[VBase]] + name: Any + useBegin: bool + def __init__(self, name: Incomplete | None = ..., *args, **kwds) -> None: ... + @classmethod + def duplicate(cls, copyit): ... + def copy(self, copyit) -> None: ... + def setProfile(self, name) -> None: ... + def __getattr__(self, name: str): ... + normal_attributes: Any + def __setattr__(self, name: str, value) -> None: ... + def __delattr__(self, name: str) -> None: ... + def getChildValue(self, childName, default: Incomplete | None = ..., childNumber: int = ...): ... + @overload + def add(self, objOrName: _V, group: str | None = ...) -> _V: ... + @overload + def add(self, objOrName: Literal["vevent"], group: str | None = ...) -> Component: ... + @overload + def add( + self, objOrName: Literal["uid", "summary", "description", "dtstart", "dtend"], group: str | None = ... + ) -> ContentLine: ... + @overload + def add(self, objOrName: str, group: str | None = ...) -> Any: ... # returns VBase sub-class + def remove(self, obj) -> None: ... + def getChildren(self) -> list[Any]: ... + def components(self) -> Iterable[Component]: ... + def lines(self): ... + def sortChildKeys(self): ... + def getSortedChildren(self): ... + def setBehaviorFromVersionLine(self, versionLine) -> None: ... + def transformChildrenToNative(self) -> None: ... + def transformChildrenFromNative(self, clearBehavior: bool = ...) -> None: ... + def prettyPrint(self, level: int = ..., tabwidth: int = ...) -> None: ... + +class VObjectError(Exception): + msg: Any + lineNumber: Any + def __init__(self, msg, lineNumber: Incomplete | None = ...) -> None: ... + +class ParseError(VObjectError): ... +class ValidateError(VObjectError): ... +class NativeError(VObjectError): ... + +patterns: Any +param_values_re: Any +params_re: Any +line_re: Any +begin_re: Any + +def parseParams(string): ... +def parseLine(line, lineNumber: Incomplete | None = ...): ... + +wrap_re: Any +logical_lines_re: Any +testLines: str + +def getLogicalLines(fp, allowQP: bool = ...) -> None: ... +def textLineToContentLine(text, n: Incomplete | None = ...): ... +def dquoteEscape(param): ... +def foldOneLine(outbuf, input, lineLength: int = ...) -> None: ... +def defaultSerialize(obj, buf, lineLength): ... + +class Stack: + stack: Any + def __len__(self) -> int: ... + def top(self): ... + def topName(self): ... + def modifyTop(self, item) -> None: ... + def push(self, obj) -> None: ... + def pop(self): ... + +def readComponents( + streamOrString, validate: bool = ..., transform: bool = ..., ignoreUnreadable: bool = ..., allowQP: bool = ... +) -> Iterator[Component]: ... +def readOne(stream, validate: bool = ..., transform: bool = ..., ignoreUnreadable: bool = ..., allowQP: bool = ...): ... +def registerBehavior(behavior, name: Incomplete | None = ..., default: bool = ..., id: Incomplete | None = ...) -> None: ... +def getBehavior(name, id: Incomplete | None = ...): ... +def newFromBehavior(name, id: Incomplete | None = ...): ... +def backslashEscape(s): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/behavior.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/behavior.pyi new file mode 100644 index 00000000..9c6e5a6e --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/behavior.pyi @@ -0,0 +1,32 @@ +from typing import Any + +class Behavior: + name: str + description: str + versionString: str + knownChildren: Any + quotedPrintable: bool + defaultBehavior: Any + hasNative: bool + isComponent: bool + allowGroup: bool + forceUTC: bool + sortFirst: Any + @classmethod + def validate(cls, obj, raiseException: bool = ..., complainUnrecognized: bool = ...): ... + @classmethod + def lineValidate(cls, line, raiseException, complainUnrecognized): ... + @classmethod + def decode(cls, line) -> None: ... + @classmethod + def encode(cls, line) -> None: ... + @classmethod + def transformToNative(cls, obj): ... + @classmethod + def transformFromNative(cls, obj) -> None: ... + @classmethod + def generateImplicitParameters(cls, obj) -> None: ... + @classmethod + def serialize(cls, obj, buf, lineLength, validate: bool = ...): ... + @classmethod + def valueRepr(cls, line): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/change_tz.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/change_tz.pyi new file mode 100644 index 00000000..ed4de7b2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/change_tz.pyi @@ -0,0 +1,6 @@ +def change_tz(cal, new_timezone, default, utc_only: bool = ..., utc_tz=...) -> None: ... +def main() -> None: ... + +version: str + +def get_options(): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/hcalendar.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/hcalendar.pyi new file mode 100644 index 00000000..0c29d239 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/hcalendar.pyi @@ -0,0 +1,8 @@ +from _typeshed import Incomplete + +from .icalendar import VCalendar2_0 + +class HCalendar(VCalendar2_0): + name: str + @classmethod + def serialize(cls, obj, buf: Incomplete | None = ..., lineLength: Incomplete | None = ..., validate: bool = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/icalendar.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/icalendar.pyi new file mode 100644 index 00000000..ecfe79a1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/icalendar.pyi @@ -0,0 +1,237 @@ +from _typeshed import Incomplete +from datetime import timedelta +from typing import Any + +from .base import Component +from .behavior import Behavior + +DATENAMES: tuple[str, ...] +RULENAMES: tuple[str, ...] +DATESANDRULES: tuple[str, ...] +PRODID: str +WEEKDAYS: tuple[str, ...] +FREQUENCIES: tuple[str, ...] +zeroDelta: timedelta +twoHours: timedelta + +def toUnicode(s: str | bytes) -> str: ... +def registerTzid(tzid, tzinfo) -> None: ... +def getTzid(tzid, smart: bool = ...): ... + +utc: Any # dateutil.tz.tz.tzutc + +class TimezoneComponent(Component): + isNative: bool + behavior: Any + tzinfo: Any + name: str + useBegin: bool + def __init__(self, tzinfo: Incomplete | None = ..., *args, **kwds) -> None: ... + @classmethod + def registerTzinfo(cls, tzinfo): ... + def gettzinfo(self): ... + tzid: Any + daylight: Any + standard: Any + def settzinfo(self, tzinfo, start: int = ..., end: int = ...): ... + normal_attributes: Any + @staticmethod + def pickTzid(tzinfo, allowUTC: bool = ...): ... + def prettyPrint(self, level, tabwidth) -> None: ... # type: ignore[override] + +class RecurringComponent(Component): + isNative: bool + def __init__(self, *args, **kwds) -> None: ... + def getrruleset(self, addRDate: bool = ...): ... + def setrruleset(self, rruleset): ... + rruleset: Any + def __setattr__(self, name, value) -> None: ... + +class TextBehavior(Behavior): + base64string: str + @classmethod + def decode(cls, line) -> None: ... + @classmethod + def encode(cls, line) -> None: ... + +class VCalendarComponentBehavior(Behavior): + defaultBehavior: Any + isComponent: bool + +class RecurringBehavior(VCalendarComponentBehavior): + hasNative: bool + @staticmethod + def transformToNative(obj): ... + @staticmethod + def transformFromNative(obj): ... + @staticmethod + def generateImplicitParameters(obj) -> None: ... + +class DateTimeBehavior(Behavior): + hasNative: bool + @staticmethod + def transformToNative(obj): ... + @classmethod + def transformFromNative(cls, obj): ... + +class UTCDateTimeBehavior(DateTimeBehavior): + forceUTC: bool + +class DateOrDateTimeBehavior(Behavior): + hasNative: bool + @staticmethod + def transformToNative(obj): ... + @staticmethod + def transformFromNative(obj): ... + +class MultiDateBehavior(Behavior): + hasNative: bool + @staticmethod + def transformToNative(obj): ... + @staticmethod + def transformFromNative(obj): ... + +class MultiTextBehavior(Behavior): + listSeparator: str + @classmethod + def decode(cls, line) -> None: ... + @classmethod + def encode(cls, line) -> None: ... + +class SemicolonMultiTextBehavior(MultiTextBehavior): + listSeparator: str + +class VCalendar2_0(VCalendarComponentBehavior): + name: str + description: str + versionString: str + sortFirst: Any + knownChildren: Any + @classmethod + def generateImplicitParameters(cls, obj) -> None: ... + @classmethod + def serialize(cls, obj, buf, lineLength, validate: bool = ...): ... + +class VTimezone(VCalendarComponentBehavior): + name: str + hasNative: bool + description: str + sortFirst: Any + knownChildren: Any + @classmethod + def validate(cls, obj, raiseException, *args): ... + @staticmethod + def transformToNative(obj): ... + @staticmethod + def transformFromNative(obj): ... + +class TZID(Behavior): ... + +class DaylightOrStandard(VCalendarComponentBehavior): + hasNative: bool + knownChildren: Any + +class VEvent(RecurringBehavior): + name: str + sortFirst: Any + description: str + knownChildren: Any + @classmethod + def validate(cls, obj, raiseException, *args): ... + +class VTodo(RecurringBehavior): + name: str + description: str + knownChildren: Any + @classmethod + def validate(cls, obj, raiseException, *args): ... + +class VJournal(RecurringBehavior): + name: str + knownChildren: Any + +class VFreeBusy(VCalendarComponentBehavior): + name: str + description: str + sortFirst: Any + knownChildren: Any + +class VAlarm(VCalendarComponentBehavior): + name: str + description: str + knownChildren: Any + @staticmethod + def generateImplicitParameters(obj) -> None: ... + @classmethod + def validate(cls, obj, raiseException, *args): ... + +class VAvailability(VCalendarComponentBehavior): + name: str + description: str + sortFirst: Any + knownChildren: Any + @classmethod + def validate(cls, obj, raiseException, *args): ... + +class Available(RecurringBehavior): + name: str + sortFirst: Any + description: str + knownChildren: Any + @classmethod + def validate(cls, obj, raiseException, *args): ... + +class Duration(Behavior): + name: str + hasNative: bool + @staticmethod + def transformToNative(obj): ... + @staticmethod + def transformFromNative(obj): ... + +class Trigger(Behavior): + name: str + description: str + hasNative: bool + forceUTC: bool + @staticmethod + def transformToNative(obj): ... + @staticmethod + def transformFromNative(obj): ... + +class PeriodBehavior(Behavior): + hasNative: bool + @staticmethod + def transformToNative(obj): ... + @classmethod + def transformFromNative(cls, obj): ... + +class FreeBusy(PeriodBehavior): + name: str + forceUTC: bool + +class RRule(Behavior): ... + +utcDateTimeList: Any +dateTimeOrDateList: Any +textList: Any + +def numToDigits(num, places): ... +def timedeltaToString(delta): ... +def timeToString(dateOrDateTime): ... +def dateToString(date): ... +def dateTimeToString(dateTime, convertToUTC: bool = ...): ... +def deltaToOffset(delta): ... +def periodToString(period, convertToUTC: bool = ...): ... +def isDuration(s): ... +def stringToDate(s): ... +def stringToDateTime(s, tzinfo: Incomplete | None = ...): ... + +escapableCharList: str + +def stringToTextValues(s, listSeparator: str = ..., charList: Incomplete | None = ..., strict: bool = ...): ... +def stringToDurations(s, strict: bool = ...): ... +def parseDtstart(contentline, allowSignatureMismatch: bool = ...): ... +def stringToPeriod(s, tzinfo: Incomplete | None = ...): ... +def getTransition(transitionTo, year, tzinfo): ... +def tzinfo_eq(tzinfo1, tzinfo2, startYear: int = ..., endYear: int = ...): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/ics_diff.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/ics_diff.pyi new file mode 100644 index 00000000..b115a461 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/ics_diff.pyi @@ -0,0 +1,10 @@ +def getSortKey(component): ... +def sortByUID(components): ... +def deleteExtraneous(component, ignore_dtstamp: bool = ...) -> None: ... +def diff(left, right): ... +def prettyDiff(leftObj, rightObj) -> None: ... +def main() -> None: ... + +version: str + +def getOptions(): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/vcard.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/vcard.pyi new file mode 100644 index 00000000..54bdc9be --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/vcard.pyi @@ -0,0 +1,111 @@ +from _typeshed import Incomplete +from typing import Any + +from .behavior import Behavior + +class Name: + family: Any + given: Any + additional: Any + prefix: Any + suffix: Any + def __init__( + self, family: str = ..., given: str = ..., additional: str = ..., prefix: str = ..., suffix: str = ... + ) -> None: ... + @staticmethod + def toString(val): ... + def __eq__(self, other): ... + +class Address: + box: Any + extended: Any + street: Any + city: Any + region: Any + code: Any + country: Any + def __init__( + self, + street: str = ..., + city: str = ..., + region: str = ..., + code: str = ..., + country: str = ..., + box: str = ..., + extended: str = ..., + ) -> None: ... + @staticmethod + def toString(val, join_char: str = ...): ... + lines: Any + one_line: Any + def __eq__(self, other): ... + +class VCardTextBehavior(Behavior): + allowGroup: bool + base64string: str + @classmethod + def decode(cls, line) -> None: ... + @classmethod + def encode(cls, line) -> None: ... + +class VCardBehavior(Behavior): + allowGroup: bool + defaultBehavior: Any + +class VCard3_0(VCardBehavior): + name: str + description: str + versionString: str + isComponent: bool + sortFirst: Any + knownChildren: Any + @classmethod + def generateImplicitParameters(cls, obj) -> None: ... + +class FN(VCardTextBehavior): + name: str + description: str + +class Label(VCardTextBehavior): + name: str + description: str + +wacky_apple_photo_serialize: bool +REALLY_LARGE: float + +class Photo(VCardTextBehavior): + name: str + description: str + @classmethod + def valueRepr(cls, line): ... + @classmethod + def serialize(cls, obj, buf, lineLength, validate) -> None: ... # type: ignore[override] + +def toListOrString(string): ... +def splitFields(string): ... +def toList(stringOrList): ... +def serializeFields(obj, order: Incomplete | None = ...): ... + +NAME_ORDER: Any +ADDRESS_ORDER: Any + +class NameBehavior(VCardBehavior): + hasNative: bool + @staticmethod + def transformToNative(obj): ... + @staticmethod + def transformFromNative(obj): ... + +class AddressBehavior(VCardBehavior): + hasNative: bool + @staticmethod + def transformToNative(obj): ... + @staticmethod + def transformFromNative(obj): ... + +class OrgBehavior(VCardBehavior): + hasNative: bool + @staticmethod + def transformToNative(obj): ... + @staticmethod + def transformFromNative(obj): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/win32tz.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/win32tz.pyi new file mode 100644 index 00000000..32936fbe --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/vobject/vobject/win32tz.pyi @@ -0,0 +1,40 @@ +import datetime +import sys +from typing import Any + +if sys.platform == "win32": + handle: Any + tzparent: Any + parentsize: Any + localkey: Any + WEEKS: Any + def list_timezones(): ... + + class win32tz(datetime.tzinfo): + data: Any + def __init__(self, name) -> None: ... + def utcoffset(self, dt): ... + def dst(self, dt): ... + def tzname(self, dt): ... + + def pickNthWeekday(year, month, dayofweek, hour, minute, whichweek): ... + + class win32tz_data: + display: Any + dstname: Any + stdname: Any + stdoffset: Any + dstoffset: Any + stdmonth: Any + stddayofweek: Any + stdweeknumber: Any + stdhour: Any + stdminute: Any + dstmonth: Any + dstdayofweek: Any + dstweeknumber: Any + dsthour: Any + dstminute: Any + def __init__(self, path) -> None: ... + + def valuesToDict(key): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..ed036f64 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/@tests/stubtest_allowlist.txt @@ -0,0 +1,36 @@ +waitress.adjustments.Adjustments.clear_untrusted_proxy_headers +waitress.adjustments.PY2 +waitress.adjustments.string_types +waitress.buffers.FileBasedBuffer.__bool__ +waitress.buffers.OverflowableBuffer.__bool__ +waitress.buffers.ReadOnlyFileBasedBuffer.__next__ +waitress.channel.HTTPChannel.addr +waitress.channel.HTTPChannel.error_task_class +waitress.channel.HTTPChannel.parser_class +waitress.channel.HTTPChannel.request +waitress.channel.HTTPChannel.task_class +waitress.compat.PY2 +waitress.compat.PY3 +waitress.compat.ResourceWarning +waitress.compat.class_types +waitress.compat.exec_ +waitress.compat.integer_types +waitress.compat.qualname +waitress.compat.reraise +waitress.compat.set_nonblocking +waitress.compat.string_types +waitress.compat.text_ +waitress.compat.tobytes +waitress.compat.tostr +waitress.compat.unquote_bytes_to_wsgi +waitress.rfc7230.tobytes +waitress.server.BaseWSGIServer.channel_class +waitress.server.BaseWSGIServer.get_server_name +waitress.server.MultiSocketServer.__init__ +waitress.server.WSGIServer +waitress.task.ErrorTask.content_length +waitress.task.ThreadedTaskDispatcher.start_new_thread +waitress.task.WSGITask.content_length +waitress.rfc7230.BWS +waitress.wasyncore.map +waitress.wasyncore.dispatcher_with_send.handle_write diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/@tests/stubtest_allowlist_darwin.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/@tests/stubtest_allowlist_darwin.txt new file mode 100644 index 00000000..3aad82a7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/@tests/stubtest_allowlist_darwin.txt @@ -0,0 +1 @@ +waitress.server.UnixWSGIServer.get_server_name diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/@tests/stubtest_allowlist_linux.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/@tests/stubtest_allowlist_linux.txt new file mode 100644 index 00000000..3aad82a7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/@tests/stubtest_allowlist_linux.txt @@ -0,0 +1 @@ +waitress.server.UnixWSGIServer.get_server_name diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/METADATA.toml new file mode 100644 index 00000000..1819293d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/METADATA.toml @@ -0,0 +1,7 @@ +version = "2.1.*" +requires = [] + +[tool.stubtest] +ignore_missing_stub = true +# linux and darwin are equivalent +platforms = ["linux", "win32"] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/__init__.pyi new file mode 100644 index 00000000..f3aef6fb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/__init__.pyi @@ -0,0 +1,7 @@ +from typing import Any + +from waitress.server import create_server as create_server + +def serve(app: Any, **kw: Any) -> None: ... +def serve_paste(app: Any, global_conf: Any, **kw: Any) -> int: ... +def profile(cmd: Any, globals: Any, locals: Any, sort_order: tuple[str, ...], callers: bool) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/adjustments.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/adjustments.pyi new file mode 100644 index 00000000..433f7956 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/adjustments.pyi @@ -0,0 +1,62 @@ +from collections.abc import Iterable, Sequence +from socket import socket +from typing import Any + +from .compat import HAS_IPV6 as HAS_IPV6, PY2 as PY2, WIN as WIN, string_types as string_types +from .proxy_headers import PROXY_HEADERS as PROXY_HEADERS + +truthy: frozenset[Any] +KNOWN_PROXY_HEADERS: frozenset[Any] + +def asbool(s: bool | str | int | None) -> bool: ... +def asoctal(s: str) -> int: ... +def aslist_cronly(value: str) -> list[str]: ... +def aslist(value: str) -> list[str]: ... +def asset(value: str | None) -> set[str]: ... +def slash_fixed_str(s: str | None) -> str: ... +def str_iftruthy(s: str | None) -> str | None: ... +def as_socket_list(sockets: Sequence[object]) -> list[socket]: ... + +class _str_marker(str): ... +class _int_marker(int): ... +class _bool_marker: ... + +class Adjustments: + host: _str_marker = ... + port: _int_marker = ... + listen: list[str] = ... + threads: int = ... + trusted_proxy: str | None = ... + trusted_proxy_count: int | None = ... + trusted_proxy_headers: set[str] = ... + log_untrusted_proxy_headers: bool = ... + clear_untrusted_proxy_headers: _bool_marker | bool = ... + url_scheme: str = ... + url_prefix: str = ... + ident: str = ... + backlog: int = ... + recv_bytes: int = ... + send_bytes: int = ... + outbuf_overflow: int = ... + outbuf_high_watermark: int = ... + inbuf_overflow: int = ... + connection_limit: int = ... + cleanup_interval: int = ... + channel_timeout: int = ... + log_socket_errors: bool = ... + max_request_header_size: int = ... + max_request_body_size: int = ... + expose_tracebacks: bool = ... + unix_socket: str | None = ... + unix_socket_perms: int = ... + socket_options: list[tuple[int, int, int]] = ... + asyncore_loop_timeout: int = ... + asyncore_use_poll: bool = ... + ipv4: bool = ... + ipv6: bool = ... + sockets: list[socket] = ... + def __init__(self, **kw: Any) -> None: ... + @classmethod + def parse_args(cls, argv: str) -> tuple[dict[str, Any], Any]: ... + @classmethod + def check_sockets(cls, sockets: Iterable[socket]) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/buffers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/buffers.pyi new file mode 100644 index 00000000..0babddae --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/buffers.pyi @@ -0,0 +1,58 @@ +from collections.abc import Callable +from io import BufferedIOBase, BufferedRandom, BytesIO +from typing import Any + +COPY_BYTES: int +STRBUF_LIMIT: int + +class FileBasedBuffer: + remain: int = ... + file: BytesIO = ... + def __init__(self, file: BytesIO, from_buffer: BytesIO | None = ...) -> None: ... + def __len__(self) -> int: ... + def __nonzero__(self) -> bool: ... + __bool__: Callable[[], bool] = ... + def append(self, s: Any) -> None: ... + def get(self, numbytes: int = ..., skip: bool = ...) -> bytes: ... + def skip(self, numbytes: int, allow_prune: int = ...) -> None: ... + def newfile(self) -> Any: ... + def prune(self) -> None: ... + def getfile(self) -> Any: ... + def close(self) -> None: ... + +class TempfileBasedBuffer(FileBasedBuffer): + def __init__(self, from_buffer: BytesIO | None = ...) -> None: ... + def newfile(self) -> BufferedRandom: ... + +class BytesIOBasedBuffer(FileBasedBuffer): + file: BytesIO = ... + def __init__(self, from_buffer: BytesIO | None = ...) -> None: ... + def newfile(self) -> BytesIO: ... + +class ReadOnlyFileBasedBuffer(FileBasedBuffer): + file: BytesIO = ... + block_size: int = ... + def __init__(self, file: BytesIO, block_size: int = ...) -> None: ... + remain: int = ... + def prepare(self, size: int | None = ...) -> int: ... + def get(self, numbytes: int = ..., skip: bool = ...) -> bytes: ... + def __iter__(self) -> ReadOnlyFileBasedBuffer: ... + def next(self) -> bytes | None: ... + __next__: Callable[[], bytes | None] = ... + def append(self, s: Any) -> None: ... + +class OverflowableBuffer: + overflowed: bool = ... + buf: BufferedIOBase | None = ... + strbuf: bytes = ... + overflow: int = ... + def __init__(self, overflow: int) -> None: ... + def __len__(self) -> int: ... + def __nonzero__(self) -> bool: ... + __bool__: Callable[[], bool] = ... + def append(self, s: bytes) -> None: ... + def get(self, numbytes: int = ..., skip: bool = ...) -> bytes: ... + def skip(self, numbytes: int, allow_prune: bool = ...) -> None: ... + def prune(self) -> None: ... + def getfile(self) -> BytesIO: ... + def close(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/channel.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/channel.pyi new file mode 100644 index 00000000..d14dff86 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/channel.pyi @@ -0,0 +1,49 @@ +from collections.abc import Mapping, Sequence +from socket import socket +from threading import Condition, Lock + +from waitress.adjustments import Adjustments +from waitress.buffers import OverflowableBuffer +from waitress.parser import HTTPRequestParser +from waitress.server import BaseWSGIServer +from waitress.task import ErrorTask, WSGITask + +from . import wasyncore as wasyncore + +class ClientDisconnected(Exception): ... + +class HTTPChannel(wasyncore.dispatcher): + task_class: WSGITask = ... + error_task_class: ErrorTask = ... + parser_class: HTTPRequestParser = ... + request: HTTPRequestParser = ... + last_activity: float = ... + will_close: bool = ... + close_when_flushed: bool = ... + requests: Sequence[HTTPRequestParser] = ... + sent_continue: bool = ... + total_outbufs_len: int = ... + current_outbuf_count: int = ... + server: BaseWSGIServer = ... + adj: Adjustments = ... + outbufs: Sequence[OverflowableBuffer] = ... + creation_time: float = ... + sendbuf_len: int = ... + task_lock: Lock = ... + outbuf_lock: Condition = ... + addr: tuple[str, int] = ... + def __init__( + self, server: BaseWSGIServer, sock: socket, addr: str, adj: Adjustments, map: Mapping[int, socket] | None = ... + ) -> None: ... + def writable(self) -> bool: ... + def handle_write(self) -> None: ... + def readable(self) -> bool: ... + def handle_read(self) -> None: ... + def received(self, data: bytes) -> bool: ... + connected: bool = ... + def handle_close(self) -> None: ... + def add_channel(self, map: Mapping[int, socket] | None = ...) -> None: ... + def del_channel(self, map: Mapping[int, socket] | None = ...) -> None: ... + def write_soon(self, data: bytes) -> int: ... + def service(self) -> None: ... + def cancel(self) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/compat.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/compat.pyi new file mode 100644 index 00000000..e0bb2a30 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/compat.pyi @@ -0,0 +1,30 @@ +from io import TextIOWrapper +from typing import Any +from typing_extensions import Literal + +PY2: Literal[False] +PY3: Literal[True] +WIN: bool +string_types: tuple[str] +integer_types: tuple[int] +class_types: tuple[type] + +def unquote_bytes_to_wsgi(bytestring: bytes) -> str: ... +def text_(s: str, encoding: str = ..., errors: str = ...) -> str: ... +def tostr(s: str) -> str: ... +def tobytes(s: str) -> bytes: ... + +exec_: Any + +def reraise(tp: Any, value: BaseException, tb: str | None = ...) -> None: ... + +MAXINT: int +HAS_IPV6: bool +IPPROTO_IPV6: int +IPV6_V6ONLY: int + +def set_nonblocking(fd: TextIOWrapper) -> None: ... + +ResourceWarning: Warning + +def qualname(cls: Any) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/parser.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/parser.pyi new file mode 100644 index 00000000..a3867aa4 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/parser.pyi @@ -0,0 +1,43 @@ +from collections.abc import Mapping, Sequence +from io import BytesIO +from re import Pattern +from typing import Any + +from waitress.adjustments import Adjustments +from waitress.receiver import ChunkedReceiver, FixedStreamReceiver +from waitress.utilities import Error + +class ParsingError(Exception): ... +class TransferEncodingNotImplemented(Exception): ... + +class HTTPRequestParser: + completed: bool = ... + empty: bool = ... + expect_continue: bool = ... + headers_finished: bool = ... + header_plus: bytes = ... + chunked: bool = ... + content_length: int = ... + header_bytes_received: int = ... + body_bytes_received: int = ... + body_rcv: ChunkedReceiver | FixedStreamReceiver | None = ... + version: str = ... + error: Error | None = ... + connection_close: bool = ... + headers: Mapping[str, str] = ... + adj: Adjustments = ... + def __init__(self, adj: Adjustments) -> None: ... + def received(self, data: bytes) -> int: ... + first_line: str = ... + command: bytes = ... + url_scheme: str = ... + def parse_header(self, header_plus: bytes) -> None: ... + def get_body_stream(self) -> BytesIO: ... + def close(self) -> None: ... + +def split_uri(uri: bytes) -> tuple[str, str, bytes, str, str]: ... +def get_header_lines(header: bytes) -> Sequence[bytes]: ... + +first_line_re: Pattern[Any] + +def crack_first_line(line: str) -> tuple[bytes, bytes, bytes]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/proxy_headers.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/proxy_headers.pyi new file mode 100644 index 00000000..d5b77453 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/proxy_headers.pyi @@ -0,0 +1,36 @@ +from collections.abc import Callable, Mapping, Sequence +from logging import Logger +from typing import Any, NamedTuple + +from .utilities import BadRequest as BadRequest + +PROXY_HEADERS: frozenset[Any] + +class Forwarded(NamedTuple): + by: Any + for_: Any + host: Any + proto: Any + +class MalformedProxyHeader(Exception): + header: str = ... + reason: str = ... + value: str = ... + def __init__(self, header: str, reason: str, value: str) -> None: ... + +def proxy_headers_middleware( + app: Any, + trusted_proxy: str | None = ..., + trusted_proxy_count: int = ..., + trusted_proxy_headers: set[str] | None = ..., + clear_untrusted: bool = ..., + log_untrusted: bool = ..., + logger: Logger = ..., +) -> Callable[..., Any]: ... +def parse_proxy_headers( + environ: Mapping[str, str], trusted_proxy_count: int, trusted_proxy_headers: set[str], logger: Logger = ... +) -> set[str]: ... +def strip_brackets(addr: str) -> str: ... +def clear_untrusted_headers( + environ: Mapping[str, str], untrusted_headers: Sequence[str], log_warning: bool = ..., logger: Logger = ... +) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/receiver.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/receiver.pyi new file mode 100644 index 00000000..d05ea56b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/receiver.pyi @@ -0,0 +1,31 @@ +from io import BytesIO + +from waitress.buffers import OverflowableBuffer +from waitress.utilities import BadRequest + +class FixedStreamReceiver: + completed: bool = ... + error: None = ... + remain: int = ... + buf: OverflowableBuffer = ... + def __init__(self, cl: int, buf: OverflowableBuffer) -> None: ... + def __len__(self) -> int: ... + def received(self, data: bytes) -> int: ... + def getfile(self) -> BytesIO: ... + def getbuf(self) -> OverflowableBuffer: ... + +class ChunkedReceiver: + chunk_remainder: int = ... + validate_chunk_end: bool = ... + control_line: bytes = ... + chunk_end: bytes = ... + all_chunks_received: bool = ... + trailer: bytes = ... + completed: bool = ... + error: BadRequest | None = ... + buf: OverflowableBuffer = ... + def __init__(self, buf: OverflowableBuffer) -> None: ... + def __len__(self) -> int: ... + def received(self, s: bytes) -> int: ... + def getfile(self) -> BytesIO: ... + def getbuf(self) -> OverflowableBuffer: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/rfc7230.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/rfc7230.pyi new file mode 100644 index 00000000..27593779 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/rfc7230.pyi @@ -0,0 +1,13 @@ +from .compat import tobytes as tobytes + +WS: str +OWS: str +RWS: str +BWS = str +TCHAR: str +OBS_TEXT: str +TOKEN: str +VCHAR: str +FIELD_VCHAR: str +FIELD_CONTENT: str +FIELD_VALUE: str diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/runner.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/runner.pyi new file mode 100644 index 00000000..aeced93f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/runner.pyi @@ -0,0 +1,13 @@ +from collections.abc import Callable, Sequence +from io import TextIOWrapper +from re import Pattern +from typing import Any + +HELP: str +RUNNER_PATTERN: Pattern[Any] + +def match(obj_name: str) -> tuple[str, str]: ... +def resolve(module_name: str, object_name: str) -> Any: ... +def show_help(stream: TextIOWrapper, name: str, error: str | None = ...) -> None: ... +def show_exception(stream: TextIOWrapper) -> None: ... +def run(argv: Sequence[str] = ..., _serve: Callable[..., object] = ...) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/server.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/server.pyi new file mode 100644 index 00000000..ada4b1b2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/server.pyi @@ -0,0 +1,106 @@ +import sys +from _typeshed import Incomplete +from collections.abc import Sequence +from socket import socket +from typing import Any + +from waitress import wasyncore +from waitress.adjustments import Adjustments +from waitress.channel import HTTPChannel +from waitress.task import Task, ThreadedTaskDispatcher + +def create_server( + application: Any, + map: Incomplete | None = ..., + _start: bool = ..., + _sock: socket | None = ..., + _dispatcher: ThreadedTaskDispatcher | None = ..., + **kw: Any, +) -> MultiSocketServer | BaseWSGIServer: ... + +class MultiSocketServer: + asyncore: Any = ... + adj: Adjustments = ... + map: Any = ... + effective_listen: Sequence[tuple[str, int]] = ... + task_dispatcher: ThreadedTaskDispatcher = ... + def __init__( + self, + map: Incomplete | None = ..., + adj: Adjustments | None = ..., + effective_listen: Sequence[tuple[str, int]] | None = ..., + dispatcher: ThreadedTaskDispatcher | None = ..., + ) -> None: ... + def print_listen(self, format_str: str) -> None: ... + def run(self) -> None: ... + def close(self) -> None: ... + +class BaseWSGIServer(wasyncore.dispatcher): + channel_class: HTTPChannel = ... + next_channel_cleanup: int = ... + socketmod: socket = ... + asyncore: Any = ... + sockinfo: tuple[int, int, int, tuple[str, int]] = ... + family: int = ... + socktype: int = ... + application: Any = ... + adj: Adjustments = ... + trigger: int = ... + task_dispatcher: ThreadedTaskDispatcher = ... + server_name: str = ... + active_channels: HTTPChannel = ... + def __init__( + self, + application: Any, + map: Incomplete | None = ..., + _start: bool = ..., + _sock: Incomplete | None = ..., + dispatcher: ThreadedTaskDispatcher | None = ..., + adj: Adjustments | None = ..., + sockinfo: Incomplete | None = ..., + bind_socket: bool = ..., + **kw: Any, + ) -> None: ... + def bind_server_socket(self) -> None: ... + def get_server_name(self, ip: str) -> str: ... + def getsockname(self) -> Any: ... + accepting: bool = ... + def accept_connections(self) -> None: ... + def add_task(self, task: Task) -> None: ... + def readable(self) -> bool: ... + def writable(self) -> bool: ... + def handle_read(self) -> None: ... + def handle_connect(self) -> None: ... + def handle_accept(self) -> None: ... + def run(self) -> None: ... + def pull_trigger(self) -> None: ... + def set_socket_options(self, conn: Any) -> None: ... + def fix_addr(self, addr: Any) -> Any: ... + def maintenance(self, now: int) -> None: ... + def print_listen(self, format_str: str) -> None: ... + def close(self) -> None: ... + +class TcpWSGIServer(BaseWSGIServer): + def bind_server_socket(self) -> None: ... + def getsockname(self) -> tuple[str, tuple[str, int]]: ... + def set_socket_options(self, conn: socket) -> None: ... + +if sys.platform != "win32": + class UnixWSGIServer(BaseWSGIServer): + def __init__( + self, + application: Any, + map: Incomplete | None = ..., + _start: bool = ..., + _sock: Incomplete | None = ..., + dispatcher: Incomplete | None = ..., + adj: Adjustments | None = ..., + sockinfo: Incomplete | None = ..., + **kw: Any, + ) -> None: ... + def bind_server_socket(self) -> None: ... + def getsockname(self) -> tuple[str, tuple[str, int]]: ... + def fix_addr(self, addr: Any) -> tuple[str, None]: ... + def get_server_name(self, ip: Any) -> str: ... + +WSGIServer: TcpWSGIServer diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/task.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/task.pyi new file mode 100644 index 00000000..1f33f189 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/task.pyi @@ -0,0 +1,71 @@ +from _typeshed import Incomplete +from collections import deque +from collections.abc import Mapping, Sequence +from logging import Logger +from threading import Condition, Lock +from typing import Any + +from .channel import HTTPChannel +from .utilities import Error + +rename_headers: Mapping[str, str] +hop_by_hop: frozenset[Any] + +class ThreadedTaskDispatcher: + stop_count: int = ... + active_count: int = ... + logger: Logger = ... + queue_logger: Logger = ... + threads: set[Any] = ... + queue: deque[Task] = ... + lock: Lock = ... + queue_cv: Condition = ... + thread_exit_cv: Condition = ... + def start_new_thread(self, target: Any, args: Any) -> None: ... + def handler_thread(self, thread_no: int) -> None: ... + def set_thread_count(self, count: int) -> None: ... + def add_task(self, task: Task) -> None: ... + def shutdown(self, cancel_pending: bool = ..., timeout: int = ...) -> bool: ... + +class Task: + close_on_finish: bool = ... + status: str = ... + wrote_header: bool = ... + start_time: int = ... + content_length: int | None = ... + content_bytes_written: int = ... + logged_write_excess: bool = ... + logged_write_no_body: bool = ... + complete: bool = ... + chunked_response: bool = ... + logger: Logger = ... + channel: HTTPChannel = ... + request: Error = ... + response_headers: Sequence[tuple[str, str]] = ... + version: str = ... + def __init__(self, channel: HTTPChannel, request: Error) -> None: ... + def service(self) -> None: ... + @property + def has_body(self) -> bool: ... + def build_response_header(self) -> bytes: ... + def remove_content_length_header(self) -> None: ... + def start(self) -> None: ... + def finish(self) -> None: ... + def write(self, data: bytes) -> None: ... + +class ErrorTask(Task): + complete: bool = ... + status: str = ... + close_on_finish: bool = ... + content_length: int = ... + def execute(self) -> None: ... + +class WSGITask(Task): + environ: Incomplete | None = ... + response_headers: Sequence[tuple[str, str]] = ... + complete: bool = ... + status: str = ... + content_length: int = ... + close_on_finish: bool = ... + def execute(self) -> None: ... + def get_environment(self) -> Any: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/trigger.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/trigger.pyi new file mode 100644 index 00000000..45d1a40d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/trigger.pyi @@ -0,0 +1,30 @@ +import sys +from collections.abc import Callable, Mapping +from socket import socket +from threading import Lock +from typing_extensions import Literal + +from waitress import wasyncore as wasyncore + +class _triggerbase: + kind: str | None = ... + lock: Lock = ... + thunks: Callable[[None], None] = ... + def readable(self) -> Literal[True]: ... + def writable(self) -> Literal[False]: ... + def handle_connect(self) -> None: ... + def handle_close(self) -> None: ... + def close(self) -> None: ... + def pull_trigger(self, thunk: Callable[[None], object] | None = ...) -> None: ... + def handle_read(self) -> None: ... + +if sys.platform != "win32": + class trigger(_triggerbase, wasyncore.file_dispatcher): + kind: str = ... + def __init__(self, map: Mapping[str, _triggerbase]) -> None: ... + +else: + class trigger(_triggerbase, wasyncore.dispatcher): + kind: str = ... + trigger: socket = ... + def __init__(self, map: Mapping[str, _triggerbase]) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/utilities.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/utilities.pyi new file mode 100644 index 00000000..cf3d44ca --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/utilities.pyi @@ -0,0 +1,68 @@ +from _typeshed.wsgi import StartResponse +from collections.abc import Iterator, Mapping, Sequence +from logging import Logger +from re import Match, Pattern +from typing import Any + +logger: Logger +queue_logger: Logger + +def find_double_newline(s: bytes) -> int: ... +def concat(*args: Any) -> str: ... +def join(seq: Any, field: str = ...) -> str: ... +def group(s: Any) -> str: ... + +short_days: Sequence[str] +long_days: Sequence[str] +short_day_reg: str +long_day_reg: str +daymap: Mapping[str, int] +hms_reg: str +months: Sequence[str] +monmap: Mapping[str, int] +months_reg: str +rfc822_date: str +rfc822_reg: Pattern[Any] + +def unpack_rfc822(m: Match[Any]) -> tuple[int, int, int, int, int, int, int, int, int]: ... + +rfc850_date: str +rfc850_reg: Pattern[Any] + +def unpack_rfc850(m: Match[Any]) -> tuple[int, int, int, int, int, int, int, int, int]: ... + +weekdayname: Sequence[str] +monthname: Sequence[str] + +def build_http_date(when: int) -> str: ... +def parse_http_date(d: str) -> int: ... +def undquote(value: str) -> str: ... +def cleanup_unix_socket(path: str) -> None: ... + +class Error: + code: int = ... + reason: str = ... + body: str = ... + def __init__(self, body: str) -> None: ... + def to_response(self) -> tuple[str, Sequence[tuple[str, str]], str]: ... + def wsgi_response(self, environ: Any, start_response: StartResponse) -> Iterator[str]: ... + +class BadRequest(Error): + code: int = ... + reason: str = ... + +class RequestHeaderFieldsTooLarge(BadRequest): + code: int = ... + reason: str = ... + +class RequestEntityTooLarge(BadRequest): + code: int = ... + reason: str = ... + +class InternalServerError(Error): + code: int = ... + reason: str = ... + +class ServerNotImplemented(Error): + code: int = ... + reason: str = ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/wasyncore.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/wasyncore.pyi new file mode 100644 index 00000000..0125447f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/waitress/waitress/wasyncore.pyi @@ -0,0 +1,98 @@ +import sys +from collections.abc import Callable, Mapping +from io import BytesIO +from logging import Logger +from socket import socket +from typing import Any +from typing_extensions import TypeAlias + +from waitress import compat as compat, utilities as utilities + +_Socket: TypeAlias = socket + +socket_map: Mapping[int, socket] +map: Mapping[int, socket] + +class ExitNow(Exception): ... + +def read(obj: dispatcher) -> None: ... +def write(obj: dispatcher) -> None: ... +def readwrite(obj: dispatcher, flags: int) -> None: ... +def poll(timeout: float = ..., map: Mapping[int, socket] | None = ...) -> None: ... +def poll2(timeout: float = ..., map: Mapping[int, socket] | None = ...) -> None: ... + +poll3 = poll2 + +def loop(timeout: float = ..., use_poll: bool = ..., map: Mapping[int, socket] | None = ..., count: int | None = ...) -> None: ... +def compact_traceback() -> tuple[tuple[str, str, str], BaseException, BaseException, str]: ... + +class dispatcher: + debug: bool = ... + connected: bool = ... + accepting: bool = ... + connecting: bool = ... + closing: bool = ... + addr: tuple[str, int] | None = ... + ignore_log_types: frozenset[Any] + logger: Logger = ... + compact_traceback: Callable[[], tuple[tuple[str, str, str], BaseException, BaseException, str]] = ... + socket: _Socket | None = ... + def __init__(self, sock: _Socket | None = ..., map: Mapping[int, _Socket] | None = ...) -> None: ... + def add_channel(self, map: Mapping[int, _Socket] | None = ...) -> None: ... + def del_channel(self, map: Mapping[int, _Socket] | None = ...) -> None: ... + family_and_type: tuple[int, int] = ... + def create_socket(self, family: int = ..., type: int = ...) -> None: ... + def set_socket(self, sock: _Socket, map: Mapping[int, _Socket] | None = ...) -> None: ... + def set_reuse_addr(self) -> None: ... + def readable(self) -> bool: ... + def writable(self) -> bool: ... + def listen(self, num: int) -> None: ... + def bind(self, addr: tuple[str, int]) -> None: ... + def connect(self, address: tuple[str, int]) -> None: ... + def accept(self) -> tuple[_Socket, tuple[str, int]] | None: ... + def send(self, data: bytes, do_close: bool = ...) -> int: ... + def recv(self, buffer_size: int) -> bytes: ... + def close(self) -> None: ... + def log(self, message: str) -> None: ... + def log_info(self, message: str, type: str = ...) -> None: ... + def handle_read_event(self) -> None: ... + def handle_connect_event(self) -> None: ... + def handle_write_event(self) -> None: ... + def handle_expt_event(self) -> None: ... + def handle_error(self) -> None: ... + def handle_expt(self) -> None: ... + def handle_read(self) -> None: ... + def handle_write(self) -> None: ... + def handle_connect(self) -> None: ... + def handle_accept(self) -> None: ... + def handle_accepted(self, sock: _Socket, addr: Any) -> None: ... + def handle_close(self) -> None: ... + +class dispatcher_with_send(dispatcher): + out_buffer: bytes = ... + def __init__(self, sock: socket | None = ..., map: Mapping[int, socket] | None = ...) -> None: ... + def initiate_send(self) -> None: ... + handle_write: Callable[[], None] = ... + def writable(self) -> bool: ... + def send(self, data: bytes) -> None: ... # type: ignore[override] + +def close_all(map: Mapping[int, socket] | None = ..., ignore_all: bool = ...) -> None: ... + +if sys.platform != "win32": + class file_wrapper: + fd: BytesIO = ... + def __init__(self, fd: BytesIO) -> None: ... + def __del__(self) -> None: ... + def recv(self, *args: Any) -> bytes: ... + def send(self, *args: Any) -> bytes: ... + def getsockopt(self, level: int, optname: int, buflen: bool | None = ...) -> int: ... + read: Callable[..., bytes] = ... + write: Callable[..., bytes] = ... + def close(self) -> None: ... + def fileno(self) -> BytesIO: ... + + class file_dispatcher(dispatcher): + connected: bool = ... + def __init__(self, fd: BytesIO, map: Mapping[int, _Socket] | None = ...) -> None: ... + socket: _Socket = ... + def set_file(self, fd: BytesIO) -> None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/whatthepatch/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/whatthepatch/METADATA.toml new file mode 100644 index 00000000..f3e83f9c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/whatthepatch/METADATA.toml @@ -0,0 +1 @@ +version = "1.0.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/whatthepatch/whatthepatch/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/whatthepatch/whatthepatch/__init__.pyi new file mode 100644 index 00000000..600e4526 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/whatthepatch/whatthepatch/__init__.pyi @@ -0,0 +1,2 @@ +from .apply import apply_diff as apply_diff +from .patch import parse_patch as parse_patch diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/whatthepatch/whatthepatch/apply.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/whatthepatch/whatthepatch/apply.pyi new file mode 100644 index 00000000..c927a4ce --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/whatthepatch/whatthepatch/apply.pyi @@ -0,0 +1,8 @@ +from collections.abc import Iterable + +from . import patch as patch +from .exceptions import HunkApplyException as HunkApplyException, SubprocessException as SubprocessException +from .snippets import remove as remove, which as which + +def apply_patch(diffs: patch.diffobj | Iterable[patch.diffobj]) -> None: ... +def apply_diff(diff: patch.diffobj, text: str | Iterable[str], reverse: bool = ..., use_patch: bool = ...) -> list[str]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/whatthepatch/whatthepatch/exceptions.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/whatthepatch/whatthepatch/exceptions.pyi new file mode 100644 index 00000000..918275ea --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/whatthepatch/whatthepatch/exceptions.pyi @@ -0,0 +1,14 @@ +class WhatThePatchException(Exception): ... + +class HunkException(WhatThePatchException): + hunk: int | None + def __init__(self, msg: str, hunk: int | None = ...) -> None: ... + +class ApplyException(WhatThePatchException): ... + +class SubprocessException(ApplyException): + code: int + def __init__(self, msg: str, code: int) -> None: ... + +class HunkApplyException(HunkException, ApplyException, ValueError): ... +class ParseException(HunkException, ValueError): ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/whatthepatch/whatthepatch/patch.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/whatthepatch/whatthepatch/patch.pyi new file mode 100644 index 00000000..48864677 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/whatthepatch/whatthepatch/patch.pyi @@ -0,0 +1,90 @@ +from collections.abc import Iterable, Iterator +from re import Pattern +from typing import NamedTuple + +from . import exceptions as exceptions +from .snippets import findall_regex as findall_regex, split_by_regex as split_by_regex + +class header(NamedTuple): + index_path: str | None + old_path: str + old_version: str | None + new_path: str + new_version: str | None + +class diffobj(NamedTuple): + header: header | None + changes: list[Change] | None + text: str + +class Change(NamedTuple): + old: int | None + new: int | None + line: int | None + hunk: int + +file_timestamp_str: str + +diffcmd_header: Pattern[str] +unified_header_index: Pattern[str] +unified_header_old_line: Pattern[str] +unified_header_new_line: Pattern[str] +unified_hunk_start: Pattern[str] +unified_change: Pattern[str] + +context_header_old_line: Pattern[str] +context_header_new_line: Pattern[str] +context_hunk_start: Pattern[str] +context_hunk_old: Pattern[str] +context_hunk_new: Pattern[str] +context_change: Pattern[str] + +ed_hunk_start: Pattern[str] +ed_hunk_end: Pattern[str] +rcs_ed_hunk_start: Pattern[str] + +default_hunk_start: Pattern[str] +default_hunk_mid: Pattern[str] +default_change: Pattern[str] + +git_diffcmd_header: Pattern[str] +git_header_index: Pattern[str] +git_header_old_line: Pattern[str] +git_header_new_line: Pattern[str] +git_header_file_mode: Pattern[str] +git_header_binary_file: Pattern[str] +git_binary_patch_start: Pattern[str] +git_binary_literal_start: Pattern[str] +git_binary_delta_start: Pattern[str] +base85string: Pattern[str] + +bzr_header_index: Pattern[str] +bzr_header_old_line: Pattern[str] +bzr_header_new_line: Pattern[str] + +svn_header_index: Pattern[str] +svn_header_timestamp_version: Pattern[str] +svn_header_timestamp: Pattern[str] +cvs_header_index: Pattern[str] +cvs_header_rcs: Pattern[str] +cvs_header_timestamp: Pattern[str] +cvs_header_timestamp_colon: Pattern[str] +old_cvs_diffcmd_header: Pattern[str] + +def parse_patch(text: str | Iterable[str]) -> Iterator[diffobj]: ... +def parse_header(text: str | Iterable[str]) -> header | None: ... +def parse_scm_header(text: str | Iterable[str]) -> header | None: ... +def parse_diff_header(text: str | Iterable[str]) -> header | None: ... +def parse_diff(text: str | Iterable[str]) -> list[Change] | None: ... +def parse_git_header(text: str | Iterable[str]) -> header | None: ... +def parse_svn_header(text: str | Iterable[str]) -> header | None: ... +def parse_cvs_header(text: str | Iterable[str]) -> header | None: ... +def parse_diffcmd_header(text: str | Iterable[str]) -> header | None: ... +def parse_unified_header(text: str | Iterable[str]) -> header | None: ... +def parse_context_header(text: str | Iterable[str]) -> header | None: ... +def parse_default_diff(text: str | Iterable[str]) -> list[Change] | None: ... +def parse_unified_diff(text: str | Iterable[str]) -> list[Change] | None: ... +def parse_context_diff(text: str | Iterable[str]) -> list[Change] | None: ... +def parse_ed_diff(text: str | Iterable[str]) -> list[Change] | None: ... +def parse_rcs_ed_diff(text: str | Iterable[str]) -> list[Change] | None: ... +def parse_git_binary_diff(text: str | Iterable[str]) -> list[Change]: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/whatthepatch/whatthepatch/snippets.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/whatthepatch/whatthepatch/snippets.pyi new file mode 100644 index 00000000..edb748f5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/whatthepatch/whatthepatch/snippets.pyi @@ -0,0 +1,7 @@ +from collections.abc import Sequence +from re import Pattern + +def remove(path: str) -> None: ... +def findall_regex(items: Sequence[str], regex: Pattern[str]) -> list[int]: ... +def split_by_regex(items: Sequence[str], regex: Pattern[str]) -> list[Sequence[str]]: ... +def which(program: str) -> str | None: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/xmltodict/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/xmltodict/METADATA.toml new file mode 100644 index 00000000..9f7bb49c --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/xmltodict/METADATA.toml @@ -0,0 +1 @@ +version = "0.13.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/xmltodict/xmltodict.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/xmltodict/xmltodict.pyi new file mode 100644 index 00000000..c370329f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/xmltodict/xmltodict.pyi @@ -0,0 +1,38 @@ +from _typeshed import ReadableBuffer, SupportsRead, SupportsWrite +from collections import OrderedDict +from collections.abc import Mapping +from types import GeneratorType +from typing import Any, overload + +__license__: str + +class ParsingInterrupted(Exception): ... + +def parse( + xml_input: str | ReadableBuffer | SupportsRead[bytes] | GeneratorType[ReadableBuffer, Any, Any], + encoding: str | None = ..., + expat: Any = ..., + process_namespaces: bool = ..., + namespace_separator: str = ..., + disable_entities: bool = ..., + process_comments: bool = ..., + **kwargs: Any, +) -> OrderedDict[str, Any]: ... +@overload +def unparse( + input_dict: Mapping[str, Any], + output: SupportsWrite[bytes] | SupportsWrite[str], + encoding: str = ..., + full_document: bool = ..., + short_empty_elements: bool = ..., + **kwargs: Any, +) -> None: ... +@overload +def unparse( + input_dict: Mapping[str, Any], + output: None = ..., + encoding: str = ..., + full_document: bool = ..., + short_empty_elements: bool = ..., + **kwargs: Any, +) -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/xxhash/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/xxhash/METADATA.toml new file mode 100644 index 00000000..82b4d333 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/xxhash/METADATA.toml @@ -0,0 +1,2 @@ +version = "3.0.*" +obsolete_since = "3.1.0" # Released on 2022-10-19 diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/xxhash/xxhash/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/xxhash/xxhash/__init__.pyi new file mode 100644 index 00000000..a98e66ca --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/xxhash/xxhash/__init__.pyi @@ -0,0 +1,49 @@ +from _typeshed import ReadableBuffer +from hashlib import _Hash +from typing_extensions import SupportsIndex, final + +VERSION: str +XXHASH_VERSION: str +VERSION_TUPLE: tuple[int, ...] + +algorithms_available: set[str] + +class _IntDigestHash(_Hash): + @property + def seed(self) -> int: ... + @property + def digestsize(self) -> int: ... + def __init__(self, input: ReadableBuffer | str = ..., seed: SupportsIndex = ...) -> None: ... + def intdigest(self) -> int: ... + def reset(self) -> None: ... + +# python-xxhash v2.0.0 does not support the string or usedforsecurity kwargs +@final +class xxh32(_IntDigestHash): ... + +@final +class xxh64(_IntDigestHash): ... + +@final +class xxh3_64(_IntDigestHash): ... + +@final +class xxh3_128(_IntDigestHash): ... + +def xxh32_digest(input: ReadableBuffer | str, seed: SupportsIndex = ...) -> bytes: ... +def xxh32_intdigest(input: ReadableBuffer | str, seed: SupportsIndex = ...) -> int: ... +def xxh32_hexdigest(input: ReadableBuffer | str, seed: SupportsIndex = ...) -> str: ... +def xxh64_digest(input: ReadableBuffer | str, seed: SupportsIndex = ...) -> bytes: ... +def xxh64_intdigest(input: ReadableBuffer | str, seed: SupportsIndex = ...) -> int: ... +def xxh64_hexdigest(input: ReadableBuffer | str, seed: SupportsIndex = ...) -> str: ... +def xxh3_64_digest(input: ReadableBuffer | str, seed: SupportsIndex = ...) -> bytes: ... +def xxh3_64_intdigest(input: ReadableBuffer | str, seed: SupportsIndex = ...) -> int: ... +def xxh3_64_hexdigest(input: ReadableBuffer | str, seed: SupportsIndex = ...) -> str: ... +def xxh3_128_digest(input: ReadableBuffer | str, seed: SupportsIndex = ...) -> bytes: ... +def xxh3_128_intdigest(input: ReadableBuffer | str, seed: SupportsIndex = ...) -> int: ... +def xxh3_128_hexdigest(input: ReadableBuffer | str, seed: SupportsIndex = ...) -> str: ... + +xxh128 = xxh3_128 +xxh128_digest = xxh3_128_digest +xxh128_intdigest = xxh3_128_intdigest +xxh128_hexdigest = xxh3_128_hexdigest diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/xxhash/xxhash/version.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/xxhash/xxhash/version.pyi new file mode 100644 index 00000000..b18bf8c3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/xxhash/xxhash/version.pyi @@ -0,0 +1,4 @@ +from _typeshed import Incomplete + +VERSION: str +VERSION_TUPLE: Incomplete diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zstd/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zstd/METADATA.toml new file mode 100644 index 00000000..97ceca8a --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zstd/METADATA.toml @@ -0,0 +1 @@ +version = "1.5.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zstd/zstd.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zstd/zstd.pyi new file mode 100644 index 00000000..3384e905 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zstd/zstd.pyi @@ -0,0 +1,15 @@ +from _typeshed import ReadableBuffer + +class Error(Exception): ... + +def ZSTD_compress(__data: ReadableBuffer, __level: int = ..., __threads: int = ...) -> bytes: ... +def ZSTD_external() -> int: ... +def ZSTD_uncompress(__data: ReadableBuffer) -> bytes: ... +def ZSTD_version() -> str: ... +def ZSTD_version_number() -> int: ... +def compress(__data: ReadableBuffer, __level: int = ..., __threads: int = ...) -> bytes: ... +def decompress(__data: ReadableBuffer) -> bytes: ... +def dumps(__data: ReadableBuffer, __level: int = ..., __threads: int = ...) -> bytes: ... +def loads(__data: ReadableBuffer) -> bytes: ... +def uncompress(__data: ReadableBuffer) -> bytes: ... +def version() -> str: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/@tests/stubtest_allowlist.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/@tests/stubtest_allowlist.txt new file mode 100644 index 00000000..5842f595 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/@tests/stubtest_allowlist.txt @@ -0,0 +1 @@ +zxcvbn.__main__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/METADATA.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/METADATA.toml new file mode 100644 index 00000000..2e800d80 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/METADATA.toml @@ -0,0 +1 @@ +version = "4.4.*" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/zxcvbn/__init__.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/zxcvbn/__init__.pyi new file mode 100644 index 00000000..f363552b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/zxcvbn/__init__.pyi @@ -0,0 +1,18 @@ +import datetime +from collections.abc import Iterable +from decimal import Decimal +from typing_extensions import TypedDict + +from .feedback import _Feedback +from .matching import _Match +from .time_estimates import _TimeEstimate + +class _Result(_TimeEstimate, TypedDict): + password: str + guesses: Decimal + guesses_log10: float + sequence: list[_Match] + calc_time: datetime.timedelta + feedback: _Feedback + +def zxcvbn(password: str, user_inputs: Iterable[object] | None = ...) -> _Result: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/zxcvbn/adjacency_graphs.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/zxcvbn/adjacency_graphs.pyi new file mode 100644 index 00000000..8b1e8e82 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/zxcvbn/adjacency_graphs.pyi @@ -0,0 +1,5 @@ +from typing_extensions import TypeAlias + +_Graph: TypeAlias = dict[str, list[str | None]] + +ADJACENCY_GRAPHS: dict[str, _Graph] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/zxcvbn/feedback.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/zxcvbn/feedback.pyi new file mode 100644 index 00000000..31b8e63b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/zxcvbn/feedback.pyi @@ -0,0 +1,12 @@ +from collections.abc import Sequence +from typing_extensions import Literal, TypedDict + +from .matching import _Match + +class _Feedback(TypedDict): + warning: str + suggestions: list[str] + +def get_feedback(score: Literal[0, 1, 2, 3, 4], sequence: Sequence[_Match]) -> _Feedback: ... +def get_match_feedback(match: _Match, is_sole_match: bool) -> _Feedback: ... +def get_dictionary_match_feedback(match: _Match, is_sole_match: bool) -> _Feedback: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/zxcvbn/frequency_lists.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/zxcvbn/frequency_lists.pyi new file mode 100644 index 00000000..08445004 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/zxcvbn/frequency_lists.pyi @@ -0,0 +1 @@ +FREQUENCY_LISTS: dict[str, list[str]] diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/zxcvbn/matching.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/zxcvbn/matching.pyi new file mode 100644 index 00000000..8d787134 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/zxcvbn/matching.pyi @@ -0,0 +1,95 @@ +from collections.abc import Iterable, Mapping +from decimal import Decimal +from re import Pattern +from typing import Any +from typing_extensions import Literal, NotRequired, TypedDict + +from .adjacency_graphs import _Graph + +class _Match(TypedDict): + pattern: Literal["dictionary", "spatial", "repeat", "sequence", "regex", "date"] + token: str + i: int + j: int + guesses: NotRequired[int] # all patterns except 'date' + guesses_log10: NotRequired[float] # all patterns except 'date' + + # pattern == 'date' + separator: NotRequired[str] + year: NotRequired[int] + month: NotRequired[int] + day: NotRequired[int] + + # pattern == 'dictionary' + matched_word: NotRequired[str] + dictionary_name: NotRequired[str] + l33t: NotRequired[bool] + reversed: NotRequired[bool] + rank: NotRequired[int] + base_guesses: NotRequired[int | Decimal] # Decimal for 'repeat', see below + uppercase_variations: NotRequired[int] + l33t_variations: NotRequired[int] + + # pattern == 'spatial' + turns: NotRequired[int] + + # pattern == 'repeat' + base_token: NotRequired[str] + # base_guesses: NotRequired[Decimal] + base_matches: NotRequired[list[Any]] # Any = _Match, https://github.com/python/mypy/issues/731 + repeat_count: NotRequired[float] + + # pattern == 'regex' + regex_name: NotRequired[str] + +def build_ranked_dict(ordered_list: Iterable[str]) -> dict[str, int]: ... + +RANKED_DICTIONARIES: dict[str, dict[str, int]] + +def add_frequency_lists(frequency_lists_: Mapping[str, Iterable[str]]) -> None: ... + +GRAPHS: dict[str, dict[str, list[str | None]]] +L33T_TABLE: dict[str, list[str]] +REGEXEN: dict[str, Pattern[str]] +DATE_MAX_YEAR: int +DATE_MIN_YEAR: int +DATE_SPLITS: dict[int, list[list[int]]] + +def omnimatch(password: str, _ranked_dictionaries: dict[str, dict[str, int]] = ...) -> list[_Match]: ... +def dictionary_match(password: str, _ranked_dictionaries: dict[str, dict[str, int]] = ...) -> list[_Match]: ... +def reverse_dictionary_match(password: str, _ranked_dictionaries: dict[str, dict[str, int]] = ...) -> list[_Match]: ... +def relevant_l33t_subtable(password: str, table: Mapping[str, Iterable[str]]) -> dict[str, list[str]]: ... +def enumerate_l33t_subs(table: Mapping[str, Iterable[str]]) -> list[dict[str, str]]: ... +def translate(string: str, chr_map: Mapping[str, str]) -> str: ... +def l33t_match( + password: str, _ranked_dictionaries: dict[str, dict[str, int]] = ..., _l33t_table: dict[str, list[str]] = ... +) -> list[_Match]: ... +def repeat_match(password: str, _ranked_dictionaries: dict[str, dict[str, int]] = ...) -> list[_Match]: ... +def spatial_match( + password: str, _graphs: dict[str, _Graph] = ..., _ranked_dictionaries: dict[str, dict[str, int]] = ... +) -> list[_Match]: ... + +SHIFTED_RX: Pattern[str] + +def spatial_match_helper(password: str, graph: _Graph, graph_name: str) -> list[_Match]: ... + +MAX_DELTA: int + +def sequence_match(password: str, _ranked_dictionaries: dict[str, dict[str, int]] = ...) -> list[_Match]: ... +def regex_match( + password: str, _regexen: dict[str, Pattern[str]] = ..., _ranked_dictionaries: dict[str, dict[str, int]] = ... +) -> list[_Match]: ... +def date_match(password: str, _ranked_dictionaries: dict[str, dict[str, int]] = ...) -> list[_Match]: ... + +class _DM(TypedDict): + month: int + day: int + +class _DMY(TypedDict): + year: int + month: int + day: int + +def map_ints_to_dmy(ints: tuple[int, int, int]) -> _DMY | None: ... +def map_ints_to_dm(ints: tuple[int, int]) -> _DM | None: ... +def two_to_four_digit_year(year: int) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/zxcvbn/scoring.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/zxcvbn/scoring.pyi new file mode 100644 index 00000000..1d7d7be0 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/zxcvbn/scoring.pyi @@ -0,0 +1,47 @@ +from collections.abc import Iterable +from decimal import Decimal +from re import Pattern +from typing_extensions import TypedDict + +from .adjacency_graphs import _Graph +from .matching import _Match + +def calc_average_degree(graph: _Graph) -> float: ... + +BRUTEFORCE_CARDINALITY: int +MIN_GUESSES_BEFORE_GROWING_SEQUENCE: int +MIN_SUBMATCH_GUESSES_SINGLE_CHAR: int +MIN_SUBMATCH_GUESSES_MULTI_CHAR: int +MIN_YEAR_SPACE: int +REFERENCE_YEAR: int + +class _GuessesResult(TypedDict): + password: str + guesses: int + guesses_log10: float + sequence: list[_Match] + +def nCk(n: int, k: int) -> float: ... +def most_guessable_match_sequence(password: str, matches: Iterable[_Match], _exclude_additive: bool = ...) -> _GuessesResult: ... +def estimate_guesses(match: _Match, password: str) -> Decimal: ... +def bruteforce_guesses(match: _Match) -> int: ... +def dictionary_guesses(match: _Match) -> int: ... +def repeat_guesses(match: _Match) -> Decimal: ... +def sequence_guesses(match: _Match) -> int: ... +def regex_guesses(match: _Match) -> int | None: ... +def date_guesses(match: _Match) -> int: ... + +KEYBOARD_AVERAGE_DEGREE: float +KEYPAD_AVERAGE_DEGREE: float +KEYBOARD_STARTING_POSITIONS: int +KEYPAD_STARTING_POSITIONS: int + +def spatial_guesses(match: _Match) -> int: ... + +START_UPPER: Pattern[str] +END_UPPER: Pattern[str] +ALL_UPPER: Pattern[str] +ALL_LOWER: Pattern[str] + +def uppercase_variations(match: _Match) -> int: ... +def l33t_variations(match: _Match) -> int: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/zxcvbn/time_estimates.pyi b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/zxcvbn/time_estimates.pyi new file mode 100644 index 00000000..deafec46 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/stubs/zxcvbn/zxcvbn/time_estimates.pyi @@ -0,0 +1,24 @@ +from decimal import Decimal +from typing_extensions import Literal, TypedDict + +class _TimeEstimate(TypedDict): + crack_times_seconds: _CrackTimeSeconds + crack_times_display: _CrackTimesDisplay + score: Literal[0, 1, 2, 3, 4] + +class _CrackTimeSeconds(TypedDict): + online_throttling_100_per_hour: Decimal + online_no_throttling_10_per_second: Decimal + offline_slow_hashing_1e4_per_second: Decimal + offline_fast_hashing_1e10_per_second: Decimal + +class _CrackTimesDisplay(TypedDict): + online_throttling_100_per_hour: str + online_no_throttling_10_per_second: str + offline_slow_hashing_1e4_per_second: str + offline_fast_hashing_1e10_per_second: str + +def estimate_attack_times(guesses: Decimal | float) -> _TimeEstimate: ... +def guesses_to_score(guesses: Decimal) -> Literal[0, 1, 2, 3, 4]: ... +def display_time(seconds: float) -> str: ... +def float_to_decimal(f: float) -> Decimal: ... diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/darwin-py310.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/darwin-py310.txt new file mode 100644 index 00000000..23992673 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/darwin-py310.txt @@ -0,0 +1,15 @@ +_?curses.color_pair + +# Github Actions on macOS with Python 3.10.5 claims these are missing, but they do exist locally +(locale.bind_textdomain_codeset)? +(locale.bindtextdomain)? +(locale.dcgettext)? +(locale.dgettext)? +(locale.gettext)? +(locale.textdomain)? + +# ========== +# Allowlist entries that cannot or should not be fixed +# ========== + +distutils.command.bdist_msi # Only available on Windows and Python 3.10 and below diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/darwin-py311.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/darwin-py311.txt new file mode 100644 index 00000000..388f1d9b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/darwin-py311.txt @@ -0,0 +1,13 @@ +_?curses.color_pair +xxlimited.Xxo.x_exports + +# Exists at runtime, missing from stub +socket.TCP_CONNECTION_INFO + +(dbm.gnu)? +(locale.bind_textdomain_codeset)? +(locale.bindtextdomain)? +(locale.dcgettext) +(locale.dgettext)? +(locale.gettext)? +(locale.textdomain)? diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/darwin-py37.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/darwin-py37.txt new file mode 100644 index 00000000..e300b9f1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/darwin-py37.txt @@ -0,0 +1,9 @@ +ctypes.wintypes +pwd.getpwnam + +# ========== +# Allowlist entries that cannot or should not be fixed +# ========== + +distutils.command.bdist_msi # Only available on Windows and Python 3.10 and below +ftplib.FTP.trust_server_pasv_ipv4_address # Dangerous to use, intentionally undocumented, intentionally missing from typeshed. #6154 diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/darwin-py38.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/darwin-py38.txt new file mode 100644 index 00000000..eefb793f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/darwin-py38.txt @@ -0,0 +1,5 @@ +# ========== +# Allowlist entries that cannot or should not be fixed +# ========== + +distutils.command.bdist_msi # Only available on Windows and Python 3.10 and below diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/darwin-py39.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/darwin-py39.txt new file mode 100644 index 00000000..eefb793f --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/darwin-py39.txt @@ -0,0 +1,5 @@ +# ========== +# Allowlist entries that cannot or should not be fixed +# ========== + +distutils.command.bdist_msi # Only available on Windows and Python 3.10 and below diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/darwin.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/darwin.txt new file mode 100644 index 00000000..61b54564 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/darwin.txt @@ -0,0 +1,80 @@ +_?curses.A_ITALIC + +_posixsubprocess.cloexec_pipe +os.EX_NOTFOUND +os.SF_MNOWAIT +os.SF_NODISKIO +os.SF_SYNC +(os|posix).sched_param # system dependent. Unclear if macos has it. +posix.EX_NOTFOUND +readline.append_history_file # not defined in macos +select.KQ_FILTER_NETDEV # system dependent +select.kqueue.__init__ # default C signature is wrong +select.POLLMSG # system dependent + +# Sometimes these seem to exist on darwin, sometimes not +(_socket.MSG_NOSIGNAL)? +(socket.MsgFlag.MSG_NOSIGNAL)? +(socket.MSG_NOSIGNAL)? +(os.preadv)? +(os.pwritev)? +(posix.preadv)? +(posix.pwritev)? + +# Platform differences that cannot be captured by the type system +(posix.O_[A-Z_]+)? +(posix.ST_[A-Z]+)? +(termios.[A-Z0-9_]+)? + +# Exists at runtime, but missing from stubs +distutils.msvccompiler.MSVCCompiler.get_msvc_paths +distutils.msvccompiler.MSVCCompiler.set_path_env_var +distutils.msvccompiler.MacroExpander +mimetypes.MimeTypes.read_windows_registry +selectors.DefaultSelector.fileno +socket.PF_SYSTEM +socket.SYSPROTO_CONTROL + +_ctypes.dlclose +_ctypes.dlopen +_ctypes.dlsym + +posix.NGROUPS_MAX +select.POLLRDHUP +webbrowser.MacOSX.__init__ + +# ========== +# Allowlist entries that cannot or should not be fixed +# ========== + +# Modules that do not exist on macos systems +_msi +_winapi +asyncio.windows_events +asyncio.windows_utils +msilib(.[a-z]+)? +msvcrt +winreg +winsound +ossaudiodev +spwd + +# multiprocessing.popen_spawn_win32 exists on Darwin but fail to import +multiprocessing.popen_spawn_win32 + +# Platform differences that cannot be captured by the type system +os.SCHED_[A-Z_]+ +posix.SCHED_[A-Z_]+ + +# Some of these exist on non-windows, but they are useless and this is not intended +stat.FILE_ATTRIBUTE_[A-Z_]+ + +# Methods that come from __getattr__() at runtime +tkinter.Tk.createfilehandler +tkinter.Tk.deletefilehandler + +_?curses.ACS_.* # ACS codes are initialized only after initscr call +curses.COLORS # Initialized after start_color +curses.COLOR_PAIRS # Initialized after start_color +curses.COLS # Initialized only after initscr call +curses.LINES # Initialized only after initscr call diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/linux-py310.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/linux-py310.txt new file mode 100644 index 00000000..541a04ed --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/linux-py310.txt @@ -0,0 +1,22 @@ +_?curses.color_pair +(os|posix).EFD_CLOEXEC +(os|posix).EFD_NONBLOCK +(os|posix).EFD_SEMAPHORE +(os|posix).SPLICE_F_MORE +(os|posix).SPLICE_F_MOVE +(os|posix).SPLICE_F_NONBLOCK +(os|posix).setresgid +(os|posix).setresuid +(os|posix).sendfile +(os|posix).eventfd +(os|posix).eventfd_read +(os|posix).eventfd_write +(os|posix).splice +signal.sigtimedwait +signal.sigwaitinfo + +# ========== +# Allowlist entries that cannot or should not be fixed +# ========== + +distutils.command.bdist_msi # Only available on Windows and Python 3.10 and below diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/linux-py311.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/linux-py311.txt new file mode 100644 index 00000000..fbc4c211 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/linux-py311.txt @@ -0,0 +1,20 @@ +_?curses.color_pair +mmap.MAP_STACK +(os|posix).EFD_CLOEXEC +(os|posix).EFD_NONBLOCK +(os|posix).EFD_SEMAPHORE +(os|posix).SPLICE_F_MORE +(os|posix).SPLICE_F_MOVE +(os|posix).SPLICE_F_NONBLOCK +(os|posix).setresgid +(os|posix).setresuid +(os|posix).sendfile +(os|posix).eventfd +(os|posix).eventfd_read +(os|posix).eventfd_write +(os|posix).splice +signal.SIGSTKFLT +signal.Signals.SIGSTKFLT +signal.sigtimedwait +signal.sigwaitinfo +xxlimited.Xxo.x_exports diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/linux-py37.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/linux-py37.txt new file mode 100644 index 00000000..e300b9f1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/linux-py37.txt @@ -0,0 +1,9 @@ +ctypes.wintypes +pwd.getpwnam + +# ========== +# Allowlist entries that cannot or should not be fixed +# ========== + +distutils.command.bdist_msi # Only available on Windows and Python 3.10 and below +ftplib.FTP.trust_server_pasv_ipv4_address # Dangerous to use, intentionally undocumented, intentionally missing from typeshed. #6154 diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/linux-py38.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/linux-py38.txt new file mode 100644 index 00000000..e13cc7b1 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/linux-py38.txt @@ -0,0 +1,7 @@ +select.epoll.register + +# ========== +# Allowlist entries that cannot or should not be fixed +# ========== + +distutils.command.bdist_msi # Only available on Windows and Python 3.10 and below diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/linux-py39.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/linux-py39.txt new file mode 100644 index 00000000..65371563 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/linux-py39.txt @@ -0,0 +1,8 @@ +(os|posix).sendfile +select.epoll.register + +# ========== +# Allowlist entries that cannot or should not be fixed +# ========== + +distutils.command.bdist_msi # Only available on Windows and Python 3.10 and below diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/linux.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/linux.txt new file mode 100644 index 00000000..a10c7fc8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/linux.txt @@ -0,0 +1,68 @@ +_socket.* +_posixsubprocess.cloexec_pipe +os.EX_NOTFOUND +os.SF_MNOWAIT +os.SF_NODISKIO +os.SF_SYNC +os.plock +posix.EX_NOTFOUND +posix.NGROUPS_MAX +select.EPOLL_RDHUP +selectors.KqueueSelector +signal.SIGEMT +signal.SIGINFO +socket.[A-Z0-9_]+ +errno.[A-Z0-9]+ + +# Exists at runtime, but missing from stubs +distutils.msvccompiler.MSVCCompiler.get_msvc_paths +distutils.msvccompiler.MSVCCompiler.set_path_env_var +distutils.msvccompiler.MacroExpander +mimetypes.MimeTypes.read_windows_registry +selectors.DefaultSelector.fileno +spwd.struct_spwd.sp_nam +spwd.struct_spwd.sp_pwd + +# Platform differences that cannot be captured by the type system +(posix.O_[A-Z_]+)? +(posix.ST_[A-Z]+)? +(termios.[A-Z0-9_]+)? + +_ctypes.dlclose +_ctypes.dlopen +_ctypes.dlsym + +# ========== +# Allowlist entries that cannot or should not be fixed +# ========== + +# Modules that do not exist on Linux systems +_msi +_winapi +asyncio.windows_events +asyncio.windows_utils +msilib(.[a-z]+)? +msvcrt +winreg +winsound + +# multiprocessing.popen_spawn_win32 exists on Linux but fail to import +multiprocessing.popen_spawn_win32 + +# Platform differences that cannot be captured by the type system +fcntl.I_[A-Z0-9_]+ +os.SCHED_[A-Z_]+ +posix.SCHED_[A-Z_]+ + +# Some of these exist on non-windows, but they are useless and this is not intended +stat.FILE_ATTRIBUTE_[A-Z_]+ + +# Methods that come from __getattr__() at runtime +tkinter.Tk.createfilehandler +tkinter.Tk.deletefilehandler + +_?curses.ACS_.* # ACS codes are initialized only after initscr call +curses.COLORS # Initialized after start_color +curses.COLOR_PAIRS # Initialized after start_color +curses.COLS # Initialized only after initscr call +curses.LINES # Initialized only after initscr call diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/py310.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/py310.txt new file mode 100644 index 00000000..f71c76af --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/py310.txt @@ -0,0 +1,170 @@ +_collections_abc.AsyncGenerator.ag_await +_collections_abc.AsyncGenerator.ag_code +_collections_abc.AsyncGenerator.ag_frame +_collections_abc.AsyncGenerator.ag_running +_collections_abc.AsyncGenerator.athrow # async at runtime, deliberately not in the stub, see #7491. Pos-only differences also. +_collections_abc.ItemsView.__reversed__ +_collections_abc.KeysView.__reversed__ +_collections_abc.ValuesView.__reversed__ +_weakref.ProxyType.__reversed__ # Doesn't really exist +asyncio.BaseEventLoop.subprocess_exec # BaseEventLoop adds several parameters and stubtest fails on the difference if we add them +asyncio.base_events.BaseEventLoop.subprocess_exec # BaseEventLoop adds several parameters and stubtest fails on the difference if we add them +asyncio.Future.__init__ # Usually initialized from c object +asyncio.futures.Future.__init__ # Usually initialized from c object +builtins.float.__setformat__ # Internal method for CPython test suite +builtins.property.__set_name__ # Doesn't actually exist +contextlib.AbstractAsyncContextManager.__class_getitem__ +contextlib.AbstractContextManager.__class_getitem__ +fractions.Fraction.__new__ # overload is too complicated for stubtest to resolve +functools.cached_property.__set__ # Stub is a while lie; see comments in the stub +gettext.install +gettext.translation +hmac.new # Stub is a white lie; see comments in the stub +importlib.metadata._meta.SimplePath.__truediv__ # See comments in the stub +ipaddress.IPv4Interface.hostmask +ipaddress.IPv6Interface.hostmask +ipaddress._BaseNetwork.broadcast_address +ipaddress._BaseNetwork.hostmask +multiprocessing.spawn._main +pickle.Pickler.reducer_override # implemented in C pickler +# platform.uname_result's processor field is now dynamically made to exist +platform.uname_result.__match_args__ +platform.uname_result.__new__ +platform.uname_result.processor +sys.UnraisableHookArgs # Not exported from sys +tkinter.Tk.split +types.GenericAlias.__getattr__ +types.GenericAlias.__mro_entries__ +types.GenericAlias.__call__ # Would be complicated to fix properly, Any could silence problems. #6392 +typing._SpecialForm.__mro_entries__ +typing._TypedDict.__delitem__ +typing._TypedDict.__ior__ +typing._TypedDict.__or__ +typing._TypedDict.copy +typing._TypedDict.items +typing._TypedDict.keys +typing._TypedDict.pop +typing._TypedDict.setdefault +typing._TypedDict.update +typing._TypedDict.values +weakref.ProxyType.__reversed__ # Doesn't really exist +weakref.WeakValueDictionary.update + +# Runtime has *args, **kwargs, but will error if any are supplied +unittest.TestCase.__init_subclass__ +unittest.case.TestCase.__init_subclass__ + +# SpooledTemporaryFile implements IO except these methods before Python 3.11 +# See also https://github.com/python/typeshed/pull/2452#issuecomment-420657918 +tempfile.SpooledTemporaryFile.__next__ +tempfile.SpooledTemporaryFile.readable +tempfile.SpooledTemporaryFile.seekable +tempfile.SpooledTemporaryFile.writable + +# The "loop" argument exists at runtime, +# but raises TypeError if you try to provide any value for it +asyncio.BoundedSemaphore.__init__ +asyncio.Condition.__init__ +asyncio.Event.__init__ +asyncio.Lock.__init__ +asyncio.Queue.__init__ +asyncio.Semaphore.__init__ +asyncio.locks.BoundedSemaphore.__init__ +asyncio.locks.Semaphore.__init__ +asyncio.locks.Condition.__init__ +asyncio.locks.Event.__init__ +asyncio.locks.Lock.__init__ +asyncio.locks.Semaphore.__init__ +asyncio.queues.Queue.__init__ + +# Exists at runtime, but missing from stubs +_collections_abc.AsyncIterable.__class_getitem__ +_collections_abc.Awaitable.__class_getitem__ +_collections_abc.Container.__class_getitem__ +_collections_abc.Iterable.__class_getitem__ +_collections_abc.MappingView.__class_getitem__ +_csv.Reader +_csv.Writer +bdb.Breakpoint.clearBreakpoints +inspect.Signature.from_builtin # Removed in 3.11, can add if someone needs this +inspect.Signature.from_function # Removed in 3.11, can add if someone needs this +multiprocessing.managers.SharedMemoryServer.create +multiprocessing.managers.SharedMemoryServer.list_segments +multiprocessing.managers.SharedMemoryServer.public +multiprocessing.managers.SharedMemoryServer.release_segment +multiprocessing.managers.SharedMemoryServer.shutdown +multiprocessing.managers.SharedMemoryServer.track_segment + +# ========== +# Related to positional-only arguments +# ========== + +# These are not positional-only at runtime, but we treat them +# as positional-only to match dict. +_collections_abc.MutableMapping.pop +_collections_abc.MutableMapping.setdefault + +# typing.IO uses positional-or-keyword arguments, but in the stubs we prefer +# to mark these as positional-only for compatibility with existing sub-classes. +typing.BinaryIO.write +typing.IO.read +typing.IO.readline +typing.IO.readlines +typing.IO.seek +typing.IO.truncate +typing.IO.write +typing.IO.writelines + +# positional-only complaints caused by differences between typing aliases and the "real" classes in the stdlib +_collections_abc.Coroutine.send +_collections_abc.Coroutine.throw +_collections_abc.Generator.send +_collections_abc.Generator.throw + +# typing.SupportsRound.__round__ # pos-or-kw at runtime, but we pretend it's pos-only in the stub so that e.g. float.__round__ satisfies the interface +types.DynamicClassAttribute..* # In the stub we pretend it's an alias for property, but it has positional-only differences + +# These three have a pos-or-keyword first parameter at runtime, but deliberately have a pos-only first parameter in the stub. #6812 +posixpath.join +ntpath.join +os.path.join + +# ========== +# Allowlist entries that cannot or should not be fixed +# ========== + +# Side effects from module initialization +_compat_pickle.excname +email.contentmanager.maintype +email.contentmanager.subtype +inspect.k +inspect.mod_dict +inspect.v +json.encoder.i +lib2to3.pgen2.grammar.line +lib2to3.pgen2.grammar.name +lib2to3.pgen2.grammar.op +pydoc.Helper.symbol # Loop variable in class https://github.com/python/typeshed/issues/6401#issuecomment-981178522 +pydoc.Helper.symbols_ # Loop variable in class https://github.com/python/typeshed/issues/6401#issuecomment-981178522 +pydoc.Helper.topic # Loop variable in class https://github.com/python/typeshed/issues/6401#issuecomment-981178522 + +# C signature is broader than what is actually accepted +ast.Bytes.__new__ +ast.Ellipsis.__new__ +ast.ExtSlice.__new__ +ast.Index.__new__ +ast.NameConstant.__new__ +ast.Num.__new__ +ast.Str.__new__ +queue.SimpleQueue.__init__ +xml.etree.ElementTree.XMLParser.__init__ +xml.etree.cElementTree.XMLParser.__init__ + +ftplib.FTP.trust_server_pasv_ipv4_address # Dangerous to use, intentionally undocumented, intentionally missing from typeshed. #6154 +os.PathLike.__class_getitem__ # PathLike is a protocol; we don't expect all PathLike classes to implement class_getitem +types.CodeType.replace # stubtest thinks default values are None but None doesn't work at runtime +_ast.ImportFrom.level # None on the class, but never None on instances +ast.ImportFrom.level # None on the class, but never None on instances + +# White lies around defaults +dataclasses.KW_ONLY diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/py311.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/py311.txt new file mode 100644 index 00000000..d751add7 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/py311.txt @@ -0,0 +1,141 @@ +_collections_abc.AsyncGenerator.ag_await +_collections_abc.AsyncGenerator.ag_code +_collections_abc.AsyncGenerator.ag_frame +_collections_abc.AsyncGenerator.ag_running +_collections_abc.AsyncIterable.__class_getitem__ +_collections_abc.Awaitable.__class_getitem__ +_collections_abc.Container.__class_getitem__ +_collections_abc.ItemsView.__reversed__ +_collections_abc.Iterable.__class_getitem__ +_collections_abc.KeysView.__reversed__ +_collections_abc.MappingView.__class_getitem__ +_collections_abc.ValuesView.__reversed__ +_csv.Reader +_csv.Writer +argparse._MutuallyExclusiveGroup.add_mutually_exclusive_group +configparser.LegacyInterpolation.__init__ +enum.Enum.__init__ +# TODO: The stub for enum.auto is nothing like the implementation +enum.auto.__init__ +enum.auto.value +fractions.Fraction.__new__ # overload is too complicated for stubtest to resolve +ftplib.FTP.trust_server_pasv_ipv4_address +functools.cached_property.__set__ # Stub is a while lie; see comments in the stub +ipaddress.IPv4Interface.hostmask +ipaddress.IPv6Interface.hostmask +ipaddress._BaseNetwork.broadcast_address +ipaddress._BaseNetwork.hostmask +multiprocessing.managers.SharedMemoryServer.create +multiprocessing.managers.SharedMemoryServer.list_segments +multiprocessing.managers.SharedMemoryServer.public +multiprocessing.managers.SharedMemoryServer.release_segment +multiprocessing.managers.SharedMemoryServer.shutdown +multiprocessing.managers.SharedMemoryServer.track_segment +multiprocessing.spawn._main +# platform.uname_result's processor field is now dynamically made to exist +platform.uname_result.__match_args__ +platform.uname_result.__new__ +platform.uname_result.processor +queue.SimpleQueue.__init__ +sys.UnraisableHookArgs # Not exported from sys +tkinter._VersionInfoType.__doc__ +typing.NewType.__call__ +typing.NewType.__mro_entries__ +weakref.WeakValueDictionary.update + +# Runtime has *args, **kwargs, but will error if any are supplied +unittest.TestCase.__init_subclass__ +unittest.case.TestCase.__init_subclass__ + +# ========== +# Related to positional-only arguments +# ========== + +# These are not positional-only at runtime, but we treat them +# as positional-only to match dict. +_collections_abc.MutableMapping.pop +_collections_abc.MutableMapping.setdefault + +# typing.IO uses positional-or-keyword arguments, but in the stubs we prefer +# to mark these as positional-only for compatibility with existing sub-classes. +typing.BinaryIO.write +typing.IO.read +typing.IO.readline +typing.IO.readlines +typing.IO.seek +typing.IO.truncate +typing.IO.write +typing.IO.writelines + +# positional-only complaints caused by differences between typing aliases and the "real" classes in the stdlib +_collections_abc.Coroutine.send +_collections_abc.Coroutine.throw +_collections_abc.Generator.send +_collections_abc.Generator.throw + +# typing.SupportsRound.__round__ # pos-or-kw at runtime, but we pretend it's pos-only in the stub so that e.g. float.__round__ satisfies the interface +types.DynamicClassAttribute..* # In the stub we pretend it's an alias for property, but it has positional-only differences + +# These three have a pos-or-keyword first parameter at runtime, but deliberately have a pos-only first parameter in the stub. #6812 +posixpath.join +ntpath.join +os.path.join + +# ========== +# Allowlist entries that cannot or should not be fixed +# ========== + +_collections_abc.AsyncGenerator.athrow # async at runtime, deliberately not in the stub, see #7491. Pos-only differences also. +_weakref.ProxyType.__reversed__ # Doesn't really exist +builtins.property.__set_name__ # Doesn't actually exist +hmac.new # Stub is a white lie; see comments in the stub +http.HTTPMethod.description # mutable instance attribute at runtime but we pretend it's a property +pickle.Pickler.reducer_override # implemented in C pickler +types.GenericAlias.__call__ # Would be complicated to fix properly, Any could silence problems. #6392 +types.GenericAlias.__getattr__ +types.GenericAlias.__mro_entries__ +weakref.ProxyType.__reversed__ # Doesn't really exist +inspect._ParameterKind.description # Still exists, but stubtest can't see it +asyncio.BaseEventLoop.subprocess_exec # BaseEventLoop adds several parameters and stubtest fails on the difference if we add them +asyncio.base_events.BaseEventLoop.subprocess_exec # BaseEventLoop adds several parameters and stubtest fails on the difference if we add them + +# C signature is broader than what is actually accepted +ast.Bytes.__new__ +ast.Ellipsis.__new__ +ast.ExtSlice.__new__ +ast.Index.__new__ +ast.NameConstant.__new__ +ast.Num.__new__ +ast.Str.__new__ +asyncio.futures.Future.__init__ +asyncio.Future.__init__ +contextvars.Context.__init__ +queue.SimpleQueue.__init__ +xml.etree.ElementTree.XMLParser.__init__ +xml.etree.cElementTree.XMLParser.__init__ + +os.PathLike.__class_getitem__ # PathLike is a protocol; we don't expect all PathLike classes to implement class_getitem +types.CodeType.replace # stubtest thinks default values are None but None doesn't work at runtime +_ast.ImportFrom.level # None on the class, but never None on instances +ast.ImportFrom.level # None on the class, but never None on instances + +# Treated an alias of a typing class in the stubs, +# they are generic to type checkers anyway. +contextlib.AbstractAsyncContextManager.__class_getitem__ +contextlib.AbstractContextManager.__class_getitem__ + +# Super-special typing primitives +typing._SpecialForm.__mro_entries__ +typing._TypedDict.__delitem__ +typing._TypedDict.__ior__ +typing._TypedDict.__or__ +typing._TypedDict.copy +typing._TypedDict.items +typing._TypedDict.keys +typing._TypedDict.pop +typing._TypedDict.setdefault +typing._TypedDict.update +typing._TypedDict.values + +# White lies around defaults +dataclasses.KW_ONLY diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/py37.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/py37.txt new file mode 100644 index 00000000..f83cd84b --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/py37.txt @@ -0,0 +1,153 @@ +_collections_abc.AsyncGenerator.ag_await +_collections_abc.AsyncGenerator.ag_code +_collections_abc.AsyncGenerator.ag_frame +_collections_abc.AsyncGenerator.ag_running +_dummy_threading +asyncio.AbstractEventLoop.run_in_executor # allowed to return a Future, changed in 3.8 +asyncio.events.AbstractEventLoop.run_in_executor # allowed to return a Future, changed in 3.8 +asyncio.Future.__init__ # Usually initialized from c object +asyncio.futures.Future.__init__ # Usually initialized from c object +asyncio.locks._ContextManagerMixin.__enter__ # Always raises; deliberately omitted from the stub +asyncio.locks._ContextManagerMixin.__exit__ # Always raises; deliberately omitted from the stub +asyncio.transports.WriteTransport.get_write_buffer_limits # Documented. Exists in subclasses, but not in WriteTransport itself +asyncio.WriteTransport.get_write_buffer_limits # Documented. Exists in subclasses, but not in WriteTransport itself +builtins.float.__set_format__ # Internal method for CPython test suite +builtins.str.maketrans +builtins.input # Incorrect default value in text signature, fixed in 3.10 +cmath.log +collections.AsyncGenerator.asend # async at runtime, deliberately not in the stub, see #7491. Pos-only differences also. +collections.AsyncGenerator.__anext__ # async at runtime, deliberately not in the stub, see #7491 +collections.AsyncGenerator.aclose # async at runtime, deliberately not in the stub, see #7491 +collections.AsyncIterator.__anext__ # async at runtime, deliberately not in the stub, see #7491 +collections.AsyncGenerator.ag_await +collections.AsyncGenerator.ag_code +collections.AsyncGenerator.ag_frame +collections.AsyncGenerator.ag_running +collections.Callable +collections.Mapping.__reversed__ # Set to None at runtime for a better error message +contextvars.ContextVar.get +distutils.command.bdist_wininst # see #6523 +dummy_threading.Condition.acquire +dummy_threading.Condition.release +dummy_threading.Event.isSet +dummy_threading.local.__new__ +fractions.Fraction.__new__ # overload is too complicated for stubtest to resolve +inspect.Signature.from_builtin # Removed in 3.11, can add if someone needs this +inspect.Signature.from_function # Removed in 3.11, can add if someone needs this +ipaddress._BaseNetwork.__init__ +json.loads +(os|posix).utime +random.Random.randrange # missing undocumented arg _int +sched.Event.__doc__ # __slots__ is overridden +typing.NamedTuple._asdict +typing.NamedTuple._make +typing.NamedTuple._replace +typing._SpecialForm.__new__ +typing.runtime_checkable +uuid.UUID.int +uuid.UUID.is_safe +xml.etree.ElementTree.TreeBuilder.start # Discrepancy between Python and C modules, fixed in bpo-39495 +xml.etree.cElementTree.TreeBuilder.start # bpo-39495 + +collections.Coroutine.cr_await +collections.Coroutine.cr_code +collections.Coroutine.cr_frame +collections.Coroutine.cr_running +collections.Generator.gi_code +collections.Generator.gi_frame +collections.Generator.gi_running +collections.Generator.gi_yieldfrom +collections.Mapping.get # Adding None to the Union messed up mypy +collections.Sequence.index # Supporting None in end is not mandatory + +# SpooledTemporaryFile implements IO except these methods before Python 3.11 +# See also https://github.com/python/typeshed/pull/2452#issuecomment-420657918 +tempfile.SpooledTemporaryFile.__next__ +tempfile.SpooledTemporaryFile.readable +tempfile.SpooledTemporaryFile.seekable +tempfile.SpooledTemporaryFile.writable + +# Default values given in the stub are a white lie, see #9637 +tkinter.Tcl +tkinter.Tk.__init__ + +# Exists at runtime, but missing from stubs +contextvars.ContextVar.__class_getitem__ +datetime.datetime_CAPI +dummy_threading.Lock +dummy_threading.RLock +html.parser.HTMLParser.unescape +platform.popen +plistlib.Data.asBase64 +plistlib.Data.fromBase64 +ssl.OP_ENABLE_MIDDLEBOX_COMPAT +ssl.Options.OP_ENABLE_MIDDLEBOX_COMPAT +ssl.SSLObject.verify_client_post_handshake +ssl.SSLSocket.verify_client_post_handshake +tempfile.SpooledTemporaryFile.softspace +tkinter.Tk.split +tkinter.commondialog.[A-Z_]+ +tkinter.commondialog.TclVersion +tkinter.commondialog.TkVersion +tkinter.commondialog.wantobjects +tkinter.dialog.[A-Z_]+ +tkinter.dialog.TclVersion +tkinter.dialog.TkVersion +tkinter.dialog.wantobjects +tkinter.dnd.Icon +tkinter.dnd.Tester +tkinter.dnd.test +tkinter.filedialog.[A-Z_]+ +tkinter.filedialog.TclVersion +tkinter.filedialog.TkVersion +tkinter.filedialog.wantobjects +tkinter.simpledialog.wantobjects +tkinter.tix.wantobjects + +# ========== +# Allowlist entries that cannot or should not be fixed +# ========== + +# Side effects from module initialization +_compat_pickle.excname +email.contentmanager.maintype +email.contentmanager.subtype +inspect.k +inspect.mod_dict +inspect.v +json.encoder.i +lib2to3.pgen2.grammar.line +lib2to3.pgen2.grammar.name +lib2to3.pgen2.grammar.op +pydoc.Helper.symbol # Loop variable in class https://github.com/python/typeshed/issues/6401#issuecomment-981178522 +pydoc.Helper.symbols_ # Loop variable in class https://github.com/python/typeshed/issues/6401#issuecomment-981178522 +pydoc.Helper.topic # Loop variable in class https://github.com/python/typeshed/issues/6401#issuecomment-981178522 + +# Adding these reflected dunders to `typing.AbstractSet` causes a large number of false-positives. See #7414. +collections.Set.__rand__ +collections.Set.__ror__ +collections.Set.__rsub__ +collections.Set.__rxor__ + +builtins.memoryview.__iter__ # C type that implements __getitem__ +builtins.memoryview.cast # inspect.signature is incorrect about shape being kw-only + +# C signature is broader than what is actually accepted +queue.SimpleQueue.__init__ + +pyexpat.XMLParserType.ExternalEntityParserCreate # C signature is wrong - function gets only positional args +xml.parsers.expat.XMLParserType.ExternalEntityParserCreate # C signature is wrong - function gets only positional args +pyexpat.XMLParserType.intern # does exist but stubtest can't see it (https://github.com/python/cpython/blob/3.7/Modules/pyexpat.c#L1322) +xml.parsers.expat.XMLParserType.intern # does exist but stubtest can't see it (https://github.com/python/cpython/blob/3.7/Modules/pyexpat.c#L1322) + +# Runtime signature is incorrect (https://github.com/python/cpython/issues/93021) +builtins.classmethod.__get__ +builtins.property.__get__ +builtins.staticmethod.__get__ +types.FunctionType.__get__ +types.LambdaType.__get__ +types.ClassMethodDescriptorType.__get__ +types.GetSetDescriptorType.__get__ +types.MemberDescriptorType.__get__ +types.MethodDescriptorType.__get__ +types.WrapperDescriptorType.__get__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/py38.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/py38.txt new file mode 100644 index 00000000..9b50e8d2 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/py38.txt @@ -0,0 +1,171 @@ +_collections_abc.AsyncGenerator.ag_await +_collections_abc.AsyncGenerator.ag_code +_collections_abc.AsyncGenerator.ag_frame +_collections_abc.AsyncGenerator.ag_running +_collections_abc.ItemsView.__reversed__ +_collections_abc.KeysView.__reversed__ +_collections_abc.ValuesView.__reversed__ +_dummy_threading +ast.Bytes.__new__ +ast.Ellipsis.__new__ +ast.NameConstant.__new__ +ast.Num.__new__ +ast.Str.__new__ +asyncio.BaseEventLoop.subprocess_exec # BaseEventLoop adds several parameters and stubtest fails on the difference if we add them +asyncio.base_events.BaseEventLoop.subprocess_exec # BaseEventLoop adds several parameters and stubtest fails on the difference if we add them +asyncio.Future.__init__ # Usually initialized from c object +asyncio.futures.Future.__init__ # Usually initialized from c object +asyncio.locks._ContextManagerMixin.__enter__ # Always raises; deliberately omitted from the stub +asyncio.locks._ContextManagerMixin.__exit__ # Always raises; deliberately omitted from the stub +asyncio.transports.WriteTransport.get_write_buffer_limits # Documented. Exists in subclasses, but not in WriteTransport itself +asyncio.WriteTransport.get_write_buffer_limits # Documented. Exists in subclasses, but not in WriteTransport itself +builtins.float.__set_format__ # Internal method for CPython test suite +builtins.input # Incorrect default value in text signature, fixed in 3.10 +collections.AsyncGenerator.asend # async at runtime, deliberately not in the stub, see #7491. Pos-only differences also. +collections.AsyncGenerator.__anext__ # async at runtime, deliberately not in the stub, see #7491 +collections.AsyncGenerator.aclose # async at runtime, deliberately not in the stub, see #7491 +collections.AsyncIterator.__anext__ # async at runtime, deliberately not in the stub, see #7491 +collections.AsyncGenerator.ag_await +collections.AsyncGenerator.ag_code +collections.AsyncGenerator.ag_frame +collections.AsyncGenerator.ag_running +collections.Callable +collections.ItemsView.__reversed__ +collections.KeysView.__reversed__ +collections.ValuesView.__reversed__ +collections.Mapping.__reversed__ # Set to None at runtime for a better error message +distutils.command.bdist_wininst # see #6523 +dummy_threading.Condition.acquire +dummy_threading.Condition.release +dummy_threading.Event.isSet +dummy_threading.Thread.native_id +dummy_threading.local.__new__ +fractions.Fraction.__new__ # overload is too complicated for stubtest to resolve +ftplib.FTP.trust_server_pasv_ipv4_address # Dangerous to use, intentionally undocumented, intentionally missing from typeshed. #6154 +functools.cached_property.__set__ # Stub is a while lie; see comments in the stub +gettext.install # codeset default value is ['unspecified'] so can't be specified +gettext.translation # codeset default value is ['unspecified'] so can't be specified +hmac.new # Stub is a white lie; see comments in the stub +inspect.Signature.from_builtin # Removed in 3.11, can add if someone needs this +inspect.Signature.from_function # Removed in 3.11, can add if someone needs this +ipaddress.IPv4Interface.hostmask +ipaddress.IPv6Interface.hostmask +ipaddress._BaseNetwork.broadcast_address +ipaddress._BaseNetwork.hostmask +multiprocessing.spawn._main +pickle.Pickler.reducer_override # implemented in C pickler +random.Random.randrange # missing undocumented arg _int +sched.Event.__doc__ # __slots__ is overridden +sys.UnraisableHookArgs # Not exported from sys +typing.NamedTuple.__new__ +typing.NamedTuple._asdict +typing.NamedTuple._make +typing.NamedTuple._replace +typing._SpecialForm.__new__ +weakref.WeakValueDictionary.update +xml.etree.ElementTree.TreeBuilder.start # Discrepancy between Python and C modules, fixed in bpo-39495 +xml.etree.ElementTree.XMLParser.__init__ # Defined in C so has general signature +xml.etree.cElementTree.TreeBuilder.start # bpo-39495 +xml.etree.cElementTree.XMLParser.__init__ # Defined in C so has general signature + +collections.Coroutine.cr_await +collections.Coroutine.cr_code +collections.Coroutine.cr_frame +collections.Coroutine.cr_running +collections.Generator.gi_code +collections.Generator.gi_frame +collections.Generator.gi_running +collections.Generator.gi_yieldfrom +collections.Mapping.get # Adding None to the Union messed up mypy +collections.Sequence.index # Supporting None in end is not mandatory + +# SpooledTemporaryFile implements IO except these methods before Python 3.11 +# See also https://github.com/python/typeshed/pull/2452#issuecomment-420657918 +tempfile.SpooledTemporaryFile.__next__ +tempfile.SpooledTemporaryFile.readable +tempfile.SpooledTemporaryFile.seekable +tempfile.SpooledTemporaryFile.writable + +# Default values given in the stub are a white lie, see #9637 +tkinter.Tcl +tkinter.Tk.__init__ + +# Exists at runtime, but missing from stubs +contextvars.ContextVar.__class_getitem__ +datetime.datetime_CAPI +dummy_threading.ExceptHookArgs +dummy_threading.Lock +dummy_threading.RLock +html.parser.HTMLParser.unescape +multiprocessing.managers.SharedMemoryServer.create +multiprocessing.managers.SharedMemoryServer.list_segments +multiprocessing.managers.SharedMemoryServer.public +multiprocessing.managers.SharedMemoryServer.release_segment +multiprocessing.managers.SharedMemoryServer.shutdown +multiprocessing.managers.SharedMemoryServer.track_segment +plistlib.Data.asBase64 +plistlib.Data.fromBase64 +tempfile.SpooledTemporaryFile.softspace +tkinter.Tk.split +tkinter.commondialog.[A-Z_]+ +tkinter.commondialog.TclVersion +tkinter.commondialog.TkVersion +tkinter.commondialog.wantobjects +tkinter.dialog.[A-Z_]+ +tkinter.dialog.TclVersion +tkinter.dialog.TkVersion +tkinter.dialog.wantobjects +tkinter.dnd.Icon +tkinter.dnd.Tester +tkinter.dnd.test +tkinter.filedialog.[A-Z_]+ +tkinter.filedialog.TclVersion +tkinter.filedialog.TkVersion +tkinter.filedialog.wantobjects +tkinter.simpledialog.wantobjects +tkinter.tix.wantobjects + +# ========== +# Allowlist entries that cannot or should not be fixed +# ========== + +# Side effects from module initialization +_compat_pickle.excname +email.contentmanager.maintype +email.contentmanager.subtype +inspect.k +inspect.mod_dict +inspect.v +json.encoder.i +lib2to3.pgen2.grammar.line +lib2to3.pgen2.grammar.name +lib2to3.pgen2.grammar.op +pydoc.Helper.symbol # Loop variable in class https://github.com/python/typeshed/issues/6401#issuecomment-981178522 +pydoc.Helper.symbols_ # Loop variable in class https://github.com/python/typeshed/issues/6401#issuecomment-981178522 +pydoc.Helper.topic # Loop variable in class https://github.com/python/typeshed/issues/6401#issuecomment-981178522 + +# Adding these reflected dunders to `typing.AbstractSet` causes a large number of false-positives. See #7414. +collections.Set.__rand__ +collections.Set.__ror__ +collections.Set.__rsub__ +collections.Set.__rxor__ + +builtins.memoryview.__iter__ # C type that implements __getitem__ +builtins.memoryview.cast # inspect.signature is incorrect about shape being kw-only + +# C signature is broader than what is actually accepted +queue.SimpleQueue.__init__ + +types.CodeType.replace # stubtest thinks default values are None but None doesn't work at runtime + +# Runtime signature is incorrect (https://github.com/python/cpython/issues/93021) +builtins.classmethod.__get__ +builtins.property.__get__ +builtins.staticmethod.__get__ +types.FunctionType.__get__ +types.LambdaType.__get__ +types.ClassMethodDescriptorType.__get__ +types.GetSetDescriptorType.__get__ +types.MemberDescriptorType.__get__ +types.MethodDescriptorType.__get__ +types.WrapperDescriptorType.__get__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/py39.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/py39.txt new file mode 100644 index 00000000..e87d03c8 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/py39.txt @@ -0,0 +1,166 @@ +_collections_abc.AsyncGenerator.ag_await +_collections_abc.AsyncGenerator.ag_code +_collections_abc.AsyncGenerator.ag_frame +_collections_abc.AsyncGenerator.ag_running +_collections_abc.ItemsView.__reversed__ +_collections_abc.KeysView.__reversed__ +_collections_abc.ValuesView.__reversed__ +_weakref.ProxyType.__reversed__ # Doesn't really exist +ast.Bytes.__new__ +ast.Ellipsis.__new__ +ast.ExtSlice.__new__ +ast.Index.__new__ +ast.NameConstant.__new__ +ast.Num.__new__ +ast.Str.__new__ +asyncio.BaseEventLoop.subprocess_exec # BaseEventLoop adds several parameters and stubtest fails on the difference if we add them +asyncio.base_events.BaseEventLoop.subprocess_exec # BaseEventLoop adds several parameters and stubtest fails on the difference if we add them +asyncio.Future.__init__ # Usually initialized from c object +asyncio.futures.Future.__init__ # Usually initialized from c object +builtins.float.__setformat__ # Internal method for CPython test suite +builtins.input # Incorrect default value in text signature, fixed in 3.10 +collections.AsyncGenerator.asend # async at runtime, deliberately not in the stub, see #7491. Pos-only differences also. +collections.AsyncGenerator.__anext__ # async at runtime, deliberately not in the stub, see #7491 +collections.AsyncGenerator.aclose # async at runtime, deliberately not in the stub, see #7491 +collections.AsyncIterator.__anext__ # async at runtime, deliberately not in the stub, see #7491 +collections.AsyncGenerator.ag_await +collections.AsyncGenerator.ag_code +collections.AsyncGenerator.ag_frame +collections.AsyncGenerator.ag_running +collections.Callable +collections.Mapping.__reversed__ # Set to None at runtime for a better error message +collections.ItemsView.__reversed__ +collections.KeysView.__reversed__ +collections.ValuesView.__reversed__ +contextlib.AbstractAsyncContextManager.__class_getitem__ +contextlib.AbstractContextManager.__class_getitem__ +distutils.command.bdist_wininst # see #6523 +fractions.Fraction.__new__ # overload is too complicated for stubtest to resolve +functools.cached_property.__set__ # Stub is a while lie; see comments in the stub +gettext.install +gettext.translation +hmac.new # Stub is a white lie; see comments in the stub +inspect.Signature.from_builtin # Removed in 3.11, can add if someone needs this +inspect.Signature.from_function # Removed in 3.11, can add if someone needs this +ipaddress.IPv4Interface.hostmask +ipaddress.IPv6Interface.hostmask +ipaddress._BaseNetwork.broadcast_address +ipaddress._BaseNetwork.hostmask +multiprocessing.spawn._main +pickle.Pickler.reducer_override # implemented in C pickler +# platform.uname_result's processor field is now dynamically made to exist +platform.uname_result.__new__ +platform.uname_result._fields +platform.uname_result.processor +sched.Event.__doc__ # __slots__ is overridden +sys.UnraisableHookArgs # Not exported from sys +tkinter.Tk.split +types.CodeType.replace # stubtest thinks default values are None but None doesn't work at runtime +types.GenericAlias.__getattr__ +types.GenericAlias.__call__ # Would be complicated to fix properly, Any could silence problems. #6392 +typing._TypedDict.__delitem__ +typing._TypedDict.__ior__ +typing._TypedDict.__or__ +typing._TypedDict.copy +typing._TypedDict.items +typing._TypedDict.keys +typing._TypedDict.pop +typing._TypedDict.setdefault +typing._TypedDict.update +typing._TypedDict.values +weakref.ProxyType.__reversed__ # Doesn't really exist +weakref.WeakValueDictionary.update + +collections.Coroutine.cr_await +collections.Coroutine.cr_code +collections.Coroutine.cr_frame +collections.Coroutine.cr_running +collections.Generator.gi_code +collections.Generator.gi_frame +collections.Generator.gi_running +collections.Generator.gi_yieldfrom +collections.Mapping.get # Adding None to the Union messed up mypy +collections.Sequence.index # Supporting None in end is not mandatory + +# Exists at runtime, but missing from stubs +_collections_abc.AsyncIterable.__class_getitem__ +_collections_abc.Awaitable.__class_getitem__ +_collections_abc.Container.__class_getitem__ +_collections_abc.Iterable.__class_getitem__ +_collections_abc.MappingView.__class_getitem__ +collections.AsyncIterable.__class_getitem__ +collections.Awaitable.__class_getitem__ +collections.Container.__class_getitem__ +collections.Iterable.__class_getitem__ +collections.MappingView.__class_getitem__ +hmac.HMAC.digest_cons +hmac.HMAC.inner +hmac.HMAC.outer +multiprocessing.managers.SharedMemoryServer.create +multiprocessing.managers.SharedMemoryServer.list_segments +multiprocessing.managers.SharedMemoryServer.public +multiprocessing.managers.SharedMemoryServer.release_segment +multiprocessing.managers.SharedMemoryServer.shutdown +multiprocessing.managers.SharedMemoryServer.track_segment +types.GenericAlias.__mro_entries__ +typing._SpecialForm.__mro_entries__ + +# SpooledTemporaryFile implements IO except these methods before Python 3.11 +# See also https://github.com/python/typeshed/pull/2452#issuecomment-420657918 +tempfile.SpooledTemporaryFile.__next__ +tempfile.SpooledTemporaryFile.readable +tempfile.SpooledTemporaryFile.seekable +tempfile.SpooledTemporaryFile.writable + +# ========== +# Allowlist entries that cannot or should not be fixed +# ========== + +# Side effects from module initialization +_compat_pickle.excname +email.contentmanager.maintype +email.contentmanager.subtype +inspect.k +inspect.mod_dict +inspect.v +json.encoder.i +lib2to3.pgen2.grammar.line +lib2to3.pgen2.grammar.name +lib2to3.pgen2.grammar.op +pydoc.Helper.symbol # Loop variable in class https://github.com/python/typeshed/issues/6401#issuecomment-981178522 +pydoc.Helper.symbols_ # Loop variable in class https://github.com/python/typeshed/issues/6401#issuecomment-981178522 +pydoc.Helper.topic # Loop variable in class https://github.com/python/typeshed/issues/6401#issuecomment-981178522 + +# Adding these reflected dunders to `typing.AbstractSet` causes a large number of false-positives. See #7414. +collections.Set.__rand__ +collections.Set.__ror__ +collections.Set.__rsub__ +collections.Set.__rxor__ + +builtins.memoryview.__iter__ # C type that implements __getitem__ +builtins.memoryview.cast # inspect.signature is incorrect about shape being kw-only + +# C signature is broader than what is actually accepted +queue.SimpleQueue.__init__ +xml.etree.ElementTree.XMLParser.__init__ +xml.etree.cElementTree.XMLParser.__init__ + +ast.FormattedValue.conversion # None on the class, but never None on instances +_ast.FormattedValue.conversion # None on the class, but never None on instances +_ast.ImportFrom.level # None on the class, but never None on instances +ast.ImportFrom.level # None on the class, but never None on instances + +ftplib.FTP.trust_server_pasv_ipv4_address # Dangerous to use, intentionally undocumented, intentionally missing from typeshed. #6154 +os.PathLike.__class_getitem__ # PathLike is a protocol; we don't expect all PathLike classes to implement class_getitem + +# Runtime signature is incorrect (https://github.com/python/cpython/issues/93021) +builtins.classmethod.__get__ +builtins.property.__get__ +builtins.staticmethod.__get__ +types.FunctionType.__get__ +types.LambdaType.__get__ +types.ClassMethodDescriptorType.__get__ +types.GetSetDescriptorType.__get__ +types.MemberDescriptorType.__get__ +types.MethodDescriptorType.__get__ +types.WrapperDescriptorType.__get__ diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/py3_common.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/py3_common.txt new file mode 100644 index 00000000..402258be --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/py3_common.txt @@ -0,0 +1,707 @@ +# ========== +# Allowlist entries that should be fixed +# ========== + +# Please keep sorted alphabetically + +# Coroutine and Generator properties are added programmatically +_collections_abc.Coroutine.cr_await +_collections_abc.Coroutine.cr_code +_collections_abc.Coroutine.cr_frame +_collections_abc.Coroutine.cr_running +_collections_abc.Generator.gi_code +_collections_abc.Generator.gi_frame +_collections_abc.Generator.gi_running +_collections_abc.Generator.gi_yieldfrom +_collections_abc.Mapping.__reversed__ # set to None at runtime for a better error message +_collections_abc.Mapping.get # Adding None to the Union messed up mypy +_collections_abc.Sequence.index # Supporting None in end is not mandatory + +# Adding these reflected dunders to `typing.AbstractSet` causes a large number of false-positives. See #7414. +_collections_abc.Set.__rand__ +_collections_abc.Set.__ror__ +_collections_abc.Set.__rsub__ +_collections_abc.Set.__rxor__ + +_csv.Dialect.__init__ # C __init__ signature is inaccurate +_threading_local.local.__new__ +_weakref.ref.* # Alias for _weakref.ReferenceType, problems should be fixed there +_weakref.CallableProxyType.__getattr__ # Should have all attributes of proxy +_weakref.ProxyType.__getattr__ # Should have all attributes of proxy +_weakref.ReferenceType.__call__ # C function default annotation is wrong +_weakref.ReferenceType.__init__ # Runtime defines __new__ but stubtest thinks __init__ is also defined. +argparse.ArgumentParser.__init__ # stubtest doesn't recognise the runtime default (a class) as being compatible with a callback protocol (the stub annotation) +argparse.Namespace.__getattr__ # The whole point of this class is its attributes are dynamic +asynchat.async_chat.encoding # Removal planned for 3.12, can add if someone needs this +asynchat.async_chat.use_encoding # Removal planned for 3.12, can add if someone needs this +asynchat.find_prefix_at_end # Removal planned for 3.12, can add if someone needs this +# Condition functions are exported in __init__ +asyncio.Condition.acquire +asyncio.Condition.locked +asyncio.Condition.release +# Condition functions are exported in __init__ +asyncio.locks.Condition.acquire +asyncio.locks.Condition.locked +asyncio.locks.Condition.release +asyncio.proactor_events.BaseProactorEventLoop.sock_recv # nbytes parameter has different name 'n' in implementation +asyncio.selector_events.BaseSelectorEventLoop.sock_recv # nbytes parameter has different name 'n' in implementation +builtins.OSError.characters_written # GetSetDescriptor that always raises AttributeError +builtins.dict.get +builtins.ellipsis # type is not exposed anywhere +builtins.function +builtins.memoryview.__contains__ # C type that implements __getitem__ +builtins.object.__init__ # default C signature is incorrect +builtins.type.__dict__ # read-only but not actually a property; stubtest thinks it's a mutable attribute. +bz2.BZ2Decompressor.__init__ # function does not accept parameters but C signature is set +# The following CodecInfo properties are added in __new__ +codecs.CodecInfo.decode +codecs.CodecInfo.encode +codecs.CodecInfo.incrementaldecoder +codecs.CodecInfo.incrementalencoder +codecs.CodecInfo.streamreader +codecs.CodecInfo.streamwriter +collections.ChainMap.get # Adding None to the underlying Mapping Union messed up mypy +collections.ChainMap.fromkeys # Runtime has *args which can really only be one argument +collections.UserList.sort # Runtime has *args but will error if any are supplied +configparser.SectionProxy.__getattr__ # SectionProxy can have arbitrary attributes when custom converters are used +# SectionProxy get functions are set in __init__ +configparser.SectionProxy.getboolean +configparser.SectionProxy.getfloat +configparser.SectionProxy.getint +copy.PyStringMap # defined only in Jython +# The Dialect properties are initialized as None in Dialect but their values are enforced in _Dialect +csv.Dialect.delimiter +csv.Dialect.doublequote +csv.Dialect.lineterminator +csv.Dialect.quoting +csv.Dialect.skipinitialspace +csv.DictReader.__init__ # runtime sig has *args but will error if more than 5 positional args are supplied +csv.DictWriter.__init__ # runtime sig has *args but will error if more than 5 positional args are supplied +ctypes.Array._type_ # _type_ and _length_ are abstract, https://github.com/python/typeshed/pull/6361 +ctypes.Array._length_ +ctypes.CDLL._FuncPtr # None at class level but initialized in __init__ to this value +ctypes.memmove # CFunctionType +ctypes.memset # CFunctionType +ctypes.string_at # docstring argument name is wrong +ctypes.wstring_at # docstring argument name is wrong +distutils.command.bdist_packager # It exists in docs as package name but not in code except as a mention in a comment. +distutils.version.Version._cmp # class should have declared this +distutils.version.Version.parse # class should have declared this +enum.Enum._generate_next_value_ +hashlib.sha3_\d+ # Can be a class or a built-in function, can't be subclassed at runtime +hashlib.shake_\d+ # Can be a class or a built-in function, can't be subclassed at runtime +http.HTTPStatus.description # set in __new__ +http.HTTPStatus.phrase # set in __new__ +http.client.HTTPConnection.response_class # the actual type at runtime is abc.ABCMeta +importlib.abc.FileLoader.get_filename # Wrapped with _check_name decorator which changes runtime signature +importlib.abc.FileLoader.load_module # Wrapped with _check_name decorator which changes runtime signature +importlib.abc.Loader.exec_module # See Lib/importlib/_abc.py. Might be defined for backwards compatibility +importlib.abc.MetaPathFinder.find_spec # Not defined on the actual class, but expected to exist. +importlib.abc.PathEntryFinder.find_spec # Not defined on the actual class, but expected to exist. +importlib.machinery.ExtensionFileLoader.get_filename # Wrapped with _check_name decorator which changes runtime signature +inspect.Parameter.empty # set as private marker _empty +inspect.Signature.empty # set as private marker _empty +io.BufferedRandom.truncate +io.BufferedReader.seek +io.BufferedReader.truncate +io.BufferedWriter.seek +io.BufferedWriter.truncate +io.BytesIO.readlines +io.BytesIO.seek # Parameter name for a positional-only param differs from its name in the inherited method +io.FileIO.seek +io.StringIO.seek +io.StringIO.truncate +io.TextIOWrapper.truncate +ipaddress._BaseAddress.is_global +ipaddress._BaseAddress.is_link_local +ipaddress._BaseAddress.is_loopback +ipaddress._BaseAddress.is_multicast +ipaddress._BaseAddress.is_private +ipaddress._BaseAddress.is_reserved +ipaddress._BaseAddress.is_unspecified +ipaddress._BaseAddress.max_prefixlen +ipaddress._BaseAddress.packed +ipaddress._BaseNetwork.max_prefixlen +lib2to3.pygram.pattern_symbols +lib2to3.pygram.python_symbols +lib2to3.pytree.Base.__new__ +lib2to3.pytree.Base.children +lib2to3.pytree.Base.type +lib2to3.pytree.BasePattern.__new__ +lib2to3.pytree.BasePattern.type +lib2to3.pytree.NegatedPattern.match +lib2to3.pytree.NegatedPattern.match_seq +# LC_MESSAGES is sometimes present in __all__, sometimes not, +# so stubtest will sometimes complain about exported names being different at runtime to the exported names in the stub +(locale.__all__)? +multiprocessing.JoinableQueue +multiprocessing.Queue +multiprocessing.SimpleQueue +multiprocessing.managers.BaseManager.shutdown +multiprocessing.managers.SyncManager.Event +multiprocessing.managers.SyncManager.Lock +multiprocessing.managers.SyncManager.Namespace +multiprocessing.managers.SyncManager.RLock +multiprocessing.queues.JoinableQueue.__init__ +multiprocessing.queues.Queue.__init__ +multiprocessing.queues.Queue.put_nowait +multiprocessing.queues.SimpleQueue.__init__ +multiprocessing.queues.SimpleQueue.put +multiprocessing.synchronize.Barrier.__init__ +multiprocessing.synchronize.Condition.acquire +multiprocessing.synchronize.Condition.release +multiprocessing.synchronize.Event.__init__ +multiprocessing.synchronize.SemLock.__init__ +multiprocessing.synchronize.SemLock.acquire +multiprocessing.synchronize.SemLock.release +numbers.Number.__hash__ # typeshed marks this as abstract but code just sets this as None +optparse.Values.__getattr__ # Some attributes are set in __init__ using setattr +pickle.Pickler.persistent_id # C pickler persistent_id is an attribute +pickle.Unpickler.persistent_load # C unpickler persistent_load is an attribute +pickle._Unpickler\..* # Best effort typing for undocumented internals +pickle._Pickler\..* # Best effort typing for undocumented internals +poplib.POP3_SSL.stls # bad declaration of inherited function. See poplib.pyi +pyexpat.expat_CAPI +select.poll # Depends on configuration +selectors.DevpollSelector # Depends on configuration +shutil.rmtree # stubtest doesn't like that we have this as an instance of a callback protocol instead of a function +socketserver.BaseServer.fileno # implemented in derived classes +socketserver.BaseServer.get_request # implemented in derived classes +socketserver.BaseServer.server_bind # implemented in derived classes +ssl.Purpose.__new__ # the multiple inheritance confuses mypy +(sys.get_int_max_str_digits)? # Added in a patch release, backported to all security branches, but has yet to find its way to all GitHub Actions images +sys.implementation # Actually SimpleNamespace but then you wouldn't have convenient attributes +(sys.set_int_max_str_digits)? # Added in a patch release, backported to all security branches, but has yet to find its way to all GitHub Actions images +sys.thread_info +tarfile.TarFile.errors # errors is initialized for some reason as None even though it really only accepts str +threading.Condition.acquire # Condition functions are exported in __init__ +threading.Condition.release # Condition functions are exported in __init__ +tkinter.simpledialog.[A-Z_]+ +tkinter.simpledialog.TclVersion +tkinter.simpledialog.TkVersion +tkinter.tix.[A-Z_]+ +tkinter.tix.TclVersion +tkinter.tix.TkVersion +multiprocessing.dummy.Condition.acquire +multiprocessing.dummy.Condition.release +threading.Lock # A factory function that returns 'most efficient lock'. Marking it as a function will make it harder for users to mark the Lock type. +threading.RLock # Similar to above +multiprocessing.dummy.Lock # Similar to above +multiprocessing.dummy.RLock # Similar to above +# alias for a class defined elsewhere, mypy infers the variable has type `(*args) -> ForkingPickler` but stubtest infers the runtime type as +multiprocessing.reduction.AbstractReducer.ForkingPickler +tkinter.Misc.grid_propagate # The noarg placeholder is a set value list +tkinter.Misc.pack_propagate # The noarg placeholder is a set value list +tkinter.Tk.eval # from __getattr__ +tkinter.Tk.report_callback_exception # A bit of a lie, since it's actually a method, but typing it as an attribute allows it to be assigned to +tkinter.Wm.wm_iconphoto # Default value of argument can't be used without runtime error +tkinter.font.Font.__getitem__ # Argument name differs (doesn't matter for __dunder__ methods) +traceback.TracebackException.from_exception # explicitly expanding arguments going into TracebackException __init__ +types.MethodType.__closure__ # read-only but not actually a property; stubtest thinks it doesn't exist. +types.MethodType.__defaults__ # read-only but not actually a property; stubtest thinks it doesn't exist. +types.ModuleType.__dict__ # read-only but not actually a property; stubtest thinks it's a mutable attribute. +types.ModuleType.__getattr__ # this doesn't exist at runtime +types.SimpleNamespace.__init__ # class doesn't accept positional arguments but has default C signature +typing.IO.__next__ # Added because IO streams are iterable. See https://github.com/python/typeshed/commit/97bc450acd60c1bcdafef3ce8fbe3b95a9c0cac3 +typing.type_check_only # typing decorator that is not available at runtime +unittest.mock.patch # It's a complicated overload and I haven't been able to figure out why stubtest doesn't like it +urllib.request.HTTPPasswordMgrWithPriorAuth.__init__ # Args are passed as is to super, so super args are specified +weakref.CallableProxyType.__getattr__ # Should have all attributes of proxy +weakref.ProxyType.__getattr__ # Should have all attributes of proxy +weakref.ReferenceType.* # Alias for _weakref.ReferenceType, problems should be fixed there +weakref.WeakKeyDictionary.get +weakref.WeakKeyDictionary.update +weakref.WeakValueDictionary.get +weakref.WeakValueDictionary.setdefault # has a default value for the "default" argument, but always errors out if no value is supplied for the parameter by the user +weakref.ref.* # Alias for _weakref.ReferenceType, problems should be fixed there +webbrowser.UnixBrowser.remote_action # always overridden in inheriting class +webbrowser.UnixBrowser.remote_action_newtab # always overridden in inheriting class +webbrowser.UnixBrowser.remote_action_newwin # always overridden in inheriting class +xml.parsers.expat.expat_CAPI + +# ========== +# Exists at runtime, but missing from stubs +# ========== +_json.encode_basestring +_socket.CAPI +_thread.LockType.acquire_lock +_thread.LockType.locked_lock +_thread.LockType.release_lock +_thread.RLock +_thread.allocate +_thread.exit_thread +_thread.start_new +_threading_local._localimpl.localargs +_threading_local._localimpl.locallock +builtins.SyntaxError.print_file_and_line +bz2.BZ2File.peek +codecs.StreamReader.charbuffertype +codecs.StreamReader.seek +codecs.StreamWriter.seek +configparser.ParsingError.filename +configparser.RawConfigParser.converters +ctypes.ARRAY +ctypes.SetPointerType +ctypes.c_voidp +ctypes.util.test +importlib.abc.Finder.find_module +lib2to3.pgen2.grammar.Grammar.loads +logging.config.BaseConfigurator +logging.config.ConvertingDict +logging.config.ConvertingList +logging.config.ConvertingMixin +logging.config.ConvertingTuple +logging.config.DictConfigurator +logging.config.dictConfigClass +mimetypes.MimeTypes.add_type +modulefinder.test +multiprocessing.managers.Server.accepter +multiprocessing.managers.Server.create +multiprocessing.managers.Server.debug_info +multiprocessing.managers.Server.decref +multiprocessing.managers.Server.dummy +multiprocessing.managers.Server.fallback_getvalue +multiprocessing.managers.Server.fallback_mapping +multiprocessing.managers.Server.fallback_repr +multiprocessing.managers.Server.fallback_str +multiprocessing.managers.Server.get_methods +multiprocessing.managers.Server.handle_request +multiprocessing.managers.Server.incref +multiprocessing.managers.Server.number_of_objects +multiprocessing.managers.Server.public +multiprocessing.managers.Server.serve_client +multiprocessing.managers.Server.shutdown +multiprocessing.managers.SyncManager.Barrier +multiprocessing.managers.SyncManager.JoinableQueue +multiprocessing.managers.SyncManager.Pool +multiprocessing.pool.Pool.Process +multiprocessing.pool.ThreadPool.Process +multiprocessing.synchronize.Semaphore.get_value +socket.CAPI +tkinter.Misc.config +tkinter.font.Font.counter +tkinter.tix.CObjView +tkinter.tix.DialogShell +tkinter.tix.ExFileSelectDialog +tkinter.tix.FileSelectDialog +tkinter.tix.FileTypeList +tkinter.tix.Grid +tkinter.tix.NoteBookFrame +tkinter.tix.OptionName +tkinter.tix.ResizeHandle +tkinter.tix.ScrolledGrid +tkinter.tix.ScrolledHList +tkinter.tix.ScrolledListBox +tkinter.tix.ScrolledTList +tkinter.tix.ScrolledText +tkinter.tix.ScrolledWindow +tkinter.tix.Shell +turtle.TNavigator.speed + +# Undocumented implementation details of a deprecated class +importlib.machinery.WindowsRegistryFinder.DEBUG_BUILD +importlib.machinery.WindowsRegistryFinder.REGISTRY_KEY +importlib.machinery.WindowsRegistryFinder.REGISTRY_KEY_DEBUG + +# Undocumented implementation details +profile.Profile.dispatch +profile.Profile.fake_code +profile.Profile.fake_frame +profile.Profile.trace_dispatch +profile.Profile.trace_dispatch_c_call +profile.Profile.trace_dispatch_call +profile.Profile.trace_dispatch_exception +profile.Profile.trace_dispatch_i +profile.Profile.trace_dispatch_l +profile.Profile.trace_dispatch_mac +profile.Profile.trace_dispatch_return +cgi.FieldStorage.bufsize +cgi.FieldStorage.read_binary +cgi.FieldStorage.read_lines +cgi.FieldStorage.read_lines_to_eof +cgi.FieldStorage.read_lines_to_outerboundary +cgi.FieldStorage.read_multi +cgi.FieldStorage.read_single +cgi.FieldStorage.read_urlencoded +cgi.FieldStorage.skip_lines +email.contentmanager.get_and_fixup_unknown_message_content +email.contentmanager.get_message_content +email.contentmanager.get_non_text_content +email.contentmanager.get_text_content +email.contentmanager.set_bytes_content +email.contentmanager.set_message_content +email.contentmanager.set_text_content +pipes.Template.makepipeline +pipes.Template.open_r +pipes.Template.open_w +sunau.Au_read.initfp +sunau.Au_write.initfp +turtle.ScrolledCanvas.adjustScrolls +turtle.ScrolledCanvas.onResize +wave.Wave_read.initfp +wave.Wave_write.initfp + +_ctypes.Array +_ctypes.CFuncPtr +_ctypes.POINTER +_ctypes.PyObj_FromPtr +_ctypes.Py_DECREF +_ctypes.Py_INCREF +_ctypes.Structure +_ctypes.Union +_ctypes.addressof +_ctypes.alignment +_ctypes.buffer_info +_ctypes.byref +_ctypes.call_cdeclfunction +_ctypes.call_function +_ctypes.get_errno +_ctypes.pointer +_ctypes.resize +_ctypes.set_errno +_ctypes.sizeof + +# ========== +# Allowlist entries that cannot or should not be fixed +# ========== + +ctypes.Array.raw # exists but stubtest can't see it; only available if _CT == c_char + +_collections_abc.AsyncGenerator.asend # async at runtime, deliberately not in the stub, see #7491. Pos-only differences also. +_collections_abc.AsyncGenerator.__anext__ # async at runtime, deliberately not in the stub, see #7491 +_collections_abc.AsyncGenerator.aclose # async at runtime, deliberately not in the stub, see #7491 +_collections_abc.AsyncIterator.__anext__ # async at runtime, deliberately not in the stub, see #7491 +_pydecimal.* # See comments in file +_weakref.ProxyType.__bytes__ # Doesn't really exist +ast.NodeVisitor.visit_\w+ # Methods are discovered dynamically, see #3796 + +# Weird special builtins that are typed as functions, but aren't functions +builtins.copyright +builtins.credits +builtins.exit +builtins.help +builtins.license +builtins.quit + +builtins.float.__getformat__ # Internal method for CPython test suite + +# These super() dunders don't seem to be particularly useful, +# and having them pop up on autocomplete suggestions would be annoying +builtins.super.__self__ +builtins.super.__self_class__ +builtins.super.__thisclass__ + +# These enums derive from (int, IntEnum) or (str, Enum). +pstats.SortKey.__new__ +tkinter.EventType.__new__ + +# These multiprocessing proxy methods have *args, **kwargs signatures at runtime, +# But have more precise (accurate) signatures in the stub +multiprocessing.managers.BaseListProxy.__imul__ +multiprocessing.managers.BaseListProxy.__len__ +multiprocessing.managers.BaseListProxy.__reversed__ +multiprocessing.managers.BaseListProxy.reverse +multiprocessing.managers.BaseListProxy.sort +multiprocessing.managers.DictProxy.__iter__ +multiprocessing.managers.DictProxy.__len__ +multiprocessing.managers.DictProxy.clear +multiprocessing.managers.DictProxy.copy +multiprocessing.managers.DictProxy.items +multiprocessing.managers.DictProxy.keys +multiprocessing.managers.DictProxy.popitem +multiprocessing.managers.DictProxy.values + +# Items that depend on the existence and flags of SSL +imaplib.IMAP4_SSL.ssl +ssl.PROTOCOL_SSLv2 +ssl.PROTOCOL_SSLv3 +ssl.RAND_egd + +collections.abc.* # Types are re-exported from _collections_abc, so errors should be fixed there +distutils.command.check.SilentReporter # only defined if docutils in installed +hmac.HMAC.blocksize # use block_size instead +pickle.Pickler.memo # undocumented implementation detail, has different type in C/Python implementations +pickle.Unpickler.memo # undocumented implementation detail, has different type in C/Python implementations +re.Pattern.scanner # Undocumented and not useful. #6405 +tempfile._TemporaryFileWrapper.[\w_]+ # Dynamically specified by __getattr__, and thus don't exist on the class + +# Various classes in typing aren't types at runtime. In addition, mypy thinks some special forms are tautologically defined. +typing.[A-Z]\w+ +typing_extensions\..* + +# These are abstract properties at runtime, +# but marking them as such in the stub breaks half the the typed-Python ecosystem (see #8726) +typing.IO.closed +typing.IO.mode +typing.IO.name +typing.TextIO.buffer +typing.TextIO.encoding +typing.TextIO.errors +typing.TextIO.line_buffering +typing.TextIO.newlines + +# Typing-related weirdness +_collections_abc.Callable +_typeshed.* # Utility types for typeshed, doesn't exist at runtime +typing._SpecialForm.__call__ +typing._SpecialForm.__init__ + +# Builtins that type checkers pretends exist +builtins.reveal_locals +builtins.reveal_type + +# White lies around defaults +dataclasses.field + +# We can't distinguish not having a default value from having a default value of inspect.Parameter.empty +inspect.Parameter.__init__ +inspect.Signature.__init__ + +# C signature is broader than what is actually accepted +contextvars.Context.__init__ + +multiprocessing.(dummy|managers).Namespace.__[gs]etattr__ # Any field can be set on Namespace + +# sys attributes that are not always defined +sys.gettotalrefcount # Available on python debug builds +sys.last_traceback +sys.last_type +sys.last_value +sys.ps1 +sys.ps2 +sys.tracebacklimit + +# See comments in file. List out methods that are delegated by __getattr__ at runtime. +# Used to make the relevant class satisfy BinaryIO interface. +codecs.StreamReaderWriter.\w+ +codecs.StreamRecoder.\w+ +urllib.response.addbase.\w+ + +weakref.ProxyType.__bytes__ # Doesn't actually exist + +# Dynamically created, has unnecessary *args +turtle.ScrolledCanvas.find_all +turtle.ScrolledCanvas.select_clear +turtle.ScrolledCanvas.select_item + +# Unnecessary re-export +xml.dom.minidom.StringTypes + +# __all__-related weirdness (see #6523) +distutils.command.build +distutils.command.build_py +distutils.command.build_ext +distutils.command.build_clib +distutils.command.build_scripts +distutils.command.clean +distutils.command.install +distutils.command.install_lib +distutils.command.install_headers +distutils.command.install_scripts +distutils.command.install_data +distutils.command.sdist +distutils.command.register +distutils.command.bdist +distutils.command.bdist_dumb +distutils.command.bdist_rpm +distutils.command.check +distutils.command.upload +email.base64mime +email.charset +email.encoders +email.errors +email.feedparser +email.generator +email.header +email.iterators +email.message +email.mime +email.parser +email.quoprimime +email.utils +xml.dom +xml.etree +xml.sax + +# Platform differences that cannot be captured by the type system +os.O_[A-Z_]+ +socket.AF_DECnet + +# Loadable SQLite extensions are disabled on GitHub runners +(sqlite3(.dbapi2)?.Connection.enable_load_extension)? +(sqlite3(.dbapi2)?.Connection.load_extension)? + +# Missing aliases to existing methods that not many people seem to use. +# Complicated multiple inheritance, confuses type checkers. +tkinter.Grid.bbox +tkinter.Grid.columnconfigure +tkinter.Grid.config +tkinter.Grid.configure +tkinter.Grid.forget +tkinter.Grid.grid_bbox +tkinter.Grid.grid_columnconfigure +tkinter.Grid.grid_location +tkinter.Grid.grid_propagate +tkinter.Grid.grid_rowconfigure +tkinter.Grid.grid_size +tkinter.Grid.grid_slaves +tkinter.Grid.info +tkinter.Grid.propagate +tkinter.Grid.rowconfigure +tkinter.Grid.slaves +tkinter.Pack.config +tkinter.Pack.configure +tkinter.Pack.info +tkinter.Pack.pack_propagate +tkinter.Pack.pack_slaves +tkinter.Pack.slaves +tkinter.Place.config +tkinter.Place.configure +tkinter.Place.forget +tkinter.Place.place_slaves +tkinter.Place.slaves + +# Methods that come from __getattr__() at runtime +tkinter.Tk.adderrorinfo +tkinter.Tk.call +tkinter.Tk.createcommand +tkinter.Tk.createtimerhandler +tkinter.Tk.dooneevent +tkinter.Tk.evalfile +tkinter.Tk.exprboolean +tkinter.Tk.exprdouble +tkinter.Tk.exprlong +tkinter.Tk.exprstring +tkinter.Tk.globalgetvar +tkinter.Tk.globalsetvar +tkinter.Tk.globalunsetvar +tkinter.Tk.interpaddr +tkinter.Tk.record +tkinter.Tk.splitlist +tkinter.Tk.unsetvar +tkinter.Tk.wantobjects +tkinter.Tk.willdispatch + +# Undocumented and have a comment in the source code saying "State variables (don't mess with these)" +wsgiref.handlers.BaseHandler.bytes_sent +wsgiref.handlers.BaseHandler.headers +wsgiref.handlers.BaseHandler.headers_sent +wsgiref.handlers.BaseHandler.result +wsgiref.handlers.BaseHandler.status + +# Iterable classes that don't define __iter__ at runtime (usually iterable via __getitem__) +# These would ideally be special-cased by type checkers; see https://github.com/python/mypy/issues/2220 +ctypes.Array.__iter__ +mmap.mmap.__iter__ +mmap.mmap.__contains__ +xml.etree.ElementTree.Element.__iter__ +xml.etree.cElementTree.Element.__iter__ +typing.IO.__iter__ # See https://github.com/python/typeshed/commit/97bc450acd60c1bcdafef3ce8fbe3b95a9c0cac3 + +# ========== +# Missing from deprecated modules +# Any of these can be added if someone needs them +# ========== + +# Removal planned for 3.12 +asyncore.dispatcher.addr +asyncore.dispatcher.handle_accepted +# Missing from distutils module (deprecated, to be removed in 3.12) +distutils.archive_util.ARCHIVE_FORMATS +distutils.archive_util.check_archive_formats +distutils.bcppcompiler.BCPPCompiler.compiler_type +distutils.bcppcompiler.BCPPCompiler.exe_extension +distutils.bcppcompiler.BCPPCompiler.executables +distutils.bcppcompiler.BCPPCompiler.obj_extension +distutils.bcppcompiler.BCPPCompiler.shared_lib_extension +distutils.bcppcompiler.BCPPCompiler.shared_lib_format +distutils.bcppcompiler.BCPPCompiler.src_extensions +distutils.bcppcompiler.BCPPCompiler.static_lib_extension +distutils.bcppcompiler.BCPPCompiler.static_lib_format +distutils.ccompiler.CCompiler.EXECUTABLE +distutils.ccompiler.CCompiler.SHARED_LIBRARY +distutils.ccompiler.CCompiler.SHARED_OBJECT +distutils.ccompiler.CCompiler.compiler_type +distutils.ccompiler.CCompiler.exe_extension +distutils.ccompiler.CCompiler.language_map +distutils.ccompiler.CCompiler.language_order +distutils.ccompiler.CCompiler.obj_extension +distutils.ccompiler.CCompiler.set_executable +distutils.ccompiler.CCompiler.shared_lib_extension +distutils.ccompiler.CCompiler.shared_lib_format +distutils.ccompiler.CCompiler.src_extensions +distutils.ccompiler.CCompiler.static_lib_extension +distutils.ccompiler.CCompiler.static_lib_format +distutils.ccompiler.compiler_class +distutils.command.bdist +distutils.command.install.* +distutils.cygwinccompiler.CygwinCCompiler.compiler_type +distutils.cygwinccompiler.CygwinCCompiler.exe_extension +distutils.cygwinccompiler.CygwinCCompiler.obj_extension +distutils.cygwinccompiler.CygwinCCompiler.shared_lib_extension +distutils.cygwinccompiler.CygwinCCompiler.shared_lib_format +distutils.cygwinccompiler.CygwinCCompiler.static_lib_extension +distutils.cygwinccompiler.CygwinCCompiler.static_lib_format +distutils.cygwinccompiler.Mingw32CCompiler.compiler_type +distutils.dir_util.ensure_relative +distutils.dist.DistributionMetadata.set_classifiers +distutils.dist.DistributionMetadata.set_keywords +distutils.dist.DistributionMetadata.set_platforms +distutils.dist.fix_help_options +distutils.extension.read_setup_file +distutils.fancy_getopt.FancyGetopt.add_option +distutils.fancy_getopt.FancyGetopt.get_attr_name +distutils.fancy_getopt.FancyGetopt.has_option +distutils.fancy_getopt.FancyGetopt.print_help +distutils.fancy_getopt.FancyGetopt.set_aliases +distutils.fancy_getopt.FancyGetopt.set_negative_aliases +distutils.fancy_getopt.FancyGetopt.set_option_table +distutils.msvccompiler.MSVCCompiler.compiler_type +distutils.msvccompiler.MSVCCompiler.exe_extension +distutils.msvccompiler.MSVCCompiler.executables +distutils.msvccompiler.MSVCCompiler.find_exe +distutils.msvccompiler.MSVCCompiler.initialize +distutils.msvccompiler.MSVCCompiler.obj_extension +distutils.msvccompiler.MSVCCompiler.res_extension +distutils.msvccompiler.MSVCCompiler.shared_lib_extension +distutils.msvccompiler.MSVCCompiler.shared_lib_format +distutils.msvccompiler.MSVCCompiler.src_extensions +distutils.msvccompiler.MSVCCompiler.static_lib_extension +distutils.msvccompiler.MSVCCompiler.static_lib_format +distutils.msvccompiler.convert_mbcs +distutils.msvccompiler.get_build_architecture +distutils.msvccompiler.get_build_version +distutils.msvccompiler.normalize_and_reduce_paths +distutils.msvccompiler.read_keys +distutils.msvccompiler.read_values +distutils.sysconfig.build_flags +distutils.sysconfig.parse_config_h +distutils.sysconfig.parse_makefile +distutils.text_file.TextFile.default_options +distutils.text_file.TextFile.error +distutils.text_file.TextFile.gen_error +distutils.unixccompiler.UnixCCompiler.compiler_type +distutils.unixccompiler.UnixCCompiler.dylib_lib_extension +distutils.unixccompiler.UnixCCompiler.dylib_lib_format +distutils.unixccompiler.UnixCCompiler.executables +distutils.unixccompiler.UnixCCompiler.obj_extension +distutils.unixccompiler.UnixCCompiler.shared_lib_extension +distutils.unixccompiler.UnixCCompiler.shared_lib_format +distutils.unixccompiler.UnixCCompiler.src_extensions +distutils.unixccompiler.UnixCCompiler.static_lib_extension +distutils.unixccompiler.UnixCCompiler.static_lib_format +distutils.unixccompiler.UnixCCompiler.xcode_stub_lib_extension +distutils.unixccompiler.UnixCCompiler.xcode_stub_lib_format +distutils.util.grok_environment_error + +# Missing from pkgutil.ImpImporter/ImpLoader (both deprecated since 3.3, can add if someone needs it) +pkgutil.ImpImporter.find_module +pkgutil.ImpImporter.iter_modules +pkgutil.ImpLoader.code +pkgutil.ImpLoader.get_code +pkgutil.ImpLoader.get_data +pkgutil.ImpLoader.get_filename +pkgutil.ImpLoader.get_source +pkgutil.ImpLoader.is_package +pkgutil.ImpLoader.load_module +pkgutil.ImpLoader.source diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/win32-py310.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/win32-py310.txt new file mode 100644 index 00000000..264c4896 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/win32-py310.txt @@ -0,0 +1,11 @@ +# Exists at runtime, but missing from stubs +_winapi.CreateFileMapping +_winapi.MapViewOfFile +_winapi.OpenFileMapping +_winapi.VirtualQuerySize +asyncio.IocpProactor.recvfrom +asyncio.IocpProactor.sendto +asyncio.windows_events.IocpProactor.recvfrom +asyncio.windows_events.IocpProactor.sendto +msvcrt.GetErrorMode +subprocess.STARTUPINFO.copy diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/win32-py311.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/win32-py311.txt new file mode 100644 index 00000000..bfda0e03 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/win32-py311.txt @@ -0,0 +1,14 @@ + +_winapi.CreateFileMapping +_winapi.MapViewOfFile +_winapi.OpenFileMapping +_winapi.VirtualQuerySize +asyncio.IocpProactor.recvfrom +asyncio.IocpProactor.recvfrom_into +asyncio.IocpProactor.sendto +asyncio.windows_events.IocpProactor.recvfrom +asyncio.windows_events.IocpProactor.recvfrom_into +asyncio.windows_events.IocpProactor.sendto +msvcrt.GetErrorMode +os.EX_OK +subprocess.STARTUPINFO.copy diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/win32-py37.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/win32-py37.txt new file mode 100644 index 00000000..f82edeeb --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/win32-py37.txt @@ -0,0 +1,20 @@ + # The following methods were changed in point releases from Python 3.6 to 3.9 + # as part of a security fix. These excludes can be removed when the GitHub + # action workflow uses Python versions that include the fix (adding a + # separator argument). +cgi.FieldStorage.__init__ +cgi.parse +cgi.parse_multipart +urllib.parse.parse_qs +urllib.parse.parse_qsl + +os.startfile + +# ========== +# Allowlist entries that cannot or should not be fixed +# ========== + +# pathlib methods that exist on Windows, but always raise NotImplementedError, +# so are omitted from the stub +pathlib.WindowsPath.group +pathlib.WindowsPath.owner diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/win32-py38.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/win32-py38.txt new file mode 100644 index 00000000..91ec5926 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/win32-py38.txt @@ -0,0 +1,19 @@ +# Exists at runtime, but missing from stubs +_winapi.CreateFileMapping +_winapi.MapViewOfFile +_winapi.OpenFileMapping +_winapi.VirtualQuerySize +asyncio.IocpProactor.recvfrom +asyncio.IocpProactor.sendto +asyncio.windows_events.IocpProactor.recvfrom +asyncio.windows_events.IocpProactor.sendto +subprocess.STARTUPINFO.copy + +# ========== +# Allowlist entries that cannot or should not be fixed +# ========== + +# pathlib methods that exist on Windows, but always raise NotImplementedError, +# so are omitted from the stub +pathlib.WindowsPath.group +pathlib.WindowsPath.owner diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/win32-py39.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/win32-py39.txt new file mode 100644 index 00000000..c5756562 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/win32-py39.txt @@ -0,0 +1,10 @@ +# Exists at runtime, but missing from stubs +_winapi.CreateFileMapping +_winapi.MapViewOfFile +_winapi.OpenFileMapping +_winapi.VirtualQuerySize +asyncio.IocpProactor.recvfrom +asyncio.IocpProactor.sendto +asyncio.windows_events.IocpProactor.recvfrom +asyncio.windows_events.IocpProactor.sendto +subprocess.STARTUPINFO.copy diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/win32.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/win32.txt new file mode 100644 index 00000000..4d5ef5db --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/data/typeshed/tests/stubtest_allowlists/win32.txt @@ -0,0 +1,79 @@ +ctypes.GetLastError # Is actually a pointer +distutils.msvccompiler.HKEYS +locale.[A-Z0-9_]+ # Constants that should be moved to _locale and re-exported conditionally +locale.nl_langinfo # Function that should be moved to _locale and re-exported conditionally +mmap.PAGESIZE +# alias for a class defined elsewhere, +# mypy infers the variable has type `(*args) -> DupHandle` but stubtest infers the runtime type as +multiprocessing.reduction.AbstractReducer.DupHandle +msilib.text.dirname +msvcrt.CRT_ASSEMBLY_VERSION +selectors.KqueueSelector +signal.SIGEMT +signal.SIGINFO +winsound.SND_APPLICATION + +# Exists at runtime, but missing from stubs +_msi.MSIError +distutils.msvccompiler.MSVCCompiler.manifest_get_embed_info +distutils.msvccompiler.MSVCCompiler.manifest_setup_ldargs +distutils.msvccompiler.OldMSVCCompiler +msvcrt.SetErrorMode +ssl.SSLSocket.recvmsg +ssl.SSLSocket.recvmsg_into +ssl.SSLSocket.sendmsg +winreg.HKEYType.handle +_ctypes.FormatError +_ctypes.FreeLibrary +_ctypes.LoadLibrary +_ctypes.get_last_error +_ctypes.set_last_error + + +# ========== +# Allowlist entries that cannot or should not be fixed +# ========== + +# Modules that do not exist on Windows systems +_curses +_posixsubprocess +asyncio.unix_events +crypt +dbm.gnu +dbm.ndbm +fcntl +grp +nis +ossaudiodev +posix +pwd +readline +resource +spwd +syslog +termios +xxlimited + +# multiprocessing.popen_fork, popen_forkserver, and popen_spawn_posix exist on Windows but fail to import +multiprocessing.popen_fork +multiprocessing.popen_forkserver +multiprocessing.popen_spawn_posix + +# Modules that rely on _curses +curses +curses.ascii +curses.has_key +curses.panel +curses.textpad + +# Modules that rely on termios +pty +tty + +# pathlib functions that rely on modules that don't exist on Windows +pathlib.Path.owner +pathlib.Path.group +# pathlib methods that exist on Windows, but always raise NotImplementedError, +# so are omitted from the stub +pathlib.Path.is_mount +pathlib.WindowsPath.is_mount diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/pyproject.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/pyproject.toml new file mode 100644 index 00000000..62abe252 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/pyproject.toml @@ -0,0 +1,12 @@ +[project] +name = "performance_bm_typeshed_stats" +requires-python = ">=3.11" +dependencies = [ + "pyperf", +] +urls.repository = "https://github.com/python/pyperformance" +dynamic = ["version"] + +[tool.pyperformance] +name = "typeshed_stats" +tags = "apps" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/requirements.txt b/pyperformance/data-files/benchmarks/bm_typeshed_stats/requirements.txt new file mode 100644 index 00000000..d35c7f74 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/requirements.txt @@ -0,0 +1 @@ +./pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats \ No newline at end of file diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/run_benchmark.py b/pyperformance/data-files/benchmarks/bm_typeshed_stats/run_benchmark.py new file mode 100644 index 00000000..97760bd5 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/run_benchmark.py @@ -0,0 +1,21 @@ +""" +Gather stats on typeshed using the typeshed-stats package +""" + +import sys +from pathlib import Path + +import pyperf + + +TYPESHED_STATS_DIR = str(Path(__file__).parent / "typeshed_stats") +TYPESHED_DIR = str(Path(__file__).parent / "data" / "typeshed") + + +if __name__ == "__main__": + runner = pyperf.Runner() + runner.metadata["description"] = __doc__ + args = runner.parse_args() + sys.path.append(TYPESHED_STATS_DIR) + command = [sys.executable, "-m", "typeshed_stats", "--typeshed-dir", TYPESHED_DIR] + runner.bench_command("typeshed_stats", command) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats/LICENSE b/pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats/LICENSE new file mode 100644 index 00000000..96f1555d --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2018 The Python Packaging Authority + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats/README.md b/pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats/README.md new file mode 100644 index 00000000..40903265 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats/README.md @@ -0,0 +1,62 @@ +
+ +# typeshed-stats + + + +
+ +--- + +## A CLI tool and library to gather stats on [typeshed](https://github.com/python/typeshed) + +
+ +[![website](https://img.shields.io/website?down_color=red&down_message=Offline&style=for-the-badge&up_color=green&up_message=Running&url=https%3A%2F%2Falexwaygood.github.io%2Ftypeshed-stats%2F)](https://alexwaygood.github.io/typeshed-stats/)[![build status](https://img.shields.io/github/actions/workflow/status/AlexWaygood/typeshed-stats/test.yml?branch=main&label=Tests&style=for-the-badge)](https://github.com/AlexWaygood/typeshed-stats/actions/workflows/test.yml)[![Coveralls](https://img.shields.io/coverallsCoverage/github/AlexWaygood/typeshed-stats?style=for-the-badge)](https://coveralls.io/github/AlexWaygood/typeshed-stats) +
+[![Checked with mypy](https://img.shields.io/badge/mypy-checked-blue?style=for-the-badge)](http://mypy-lang.org/)[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg?style=for-the-badge)](https://github.com/psf/black)[![Imports: isort](https://img.shields.io/badge/%20imports-isort-%231674b1?style=for-the-badge&labelColor=ef8336)](https://pycqa.github.io/isort/)[![pre-commit](https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white&style=for-the-badge)](https://pre-commit.ci) +
+[![PyPI](https://img.shields.io/pypi/v/typeshed-stats?style=for-the-badge)](https://pypi.org/project/typeshed-stats/)![PyPI - Python Version](https://img.shields.io/pypi/pyversions/typeshed-stats?style=for-the-badge)![PyPI - Wheel](https://img.shields.io/pypi/wheel/typeshed-stats?style=for-the-badge)[![license](https://img.shields.io/github/license/AlexWaygood/typeshed-stats?style=for-the-badge)](https://opensource.org/licenses/MIT) + +--- + +
+
+ +## What's this project for? + +This project is for easy gathering of statistics relating to [typeshed](https://github.com/python/typeshed)'s stubs. As well as being a CLI tool and library, it also powers [a website](https://alexwaygood.github.io/typeshed-stats/) where stats about typeshed's stubs are uploaded twice a day. + +This project was created by Alex Waygood. It is not necessarily endorsed by any of the other typeshed maintainers. + +Some examples of things you can do from the command line: + +- Create a `.csv` file with stats on all typeshed stubs: `typeshed-stats --typeshed-dir --to-file stats.csv` (the `.csv` file extension will be automatically detected by the script to identify the format required). +- Pretty-print stats on typeshed stubs for emoji and redis to the terminal, in JSON format: `typeshed-stats --typeshed-dir --to-json emoji redis` +- Generate a MarkDown file detailing stats on typeshed's stubs for protobuf and the stdlib: `typeshed-stats --typeshed-dir --to-file stats.md stdlib protobuf` + +Example usage of the Python-level API: + +```python +from typeshed_stats.gather import tmpdir_typeshed, gather_stats + +with tmpdir_typeshed() as typeshed: + stats_on_all_packages = gather_stats_on_multiple_packages(typeshed_dir=typeshed) +``` + +## How can I use this? + +1. Run `pip install typeshed-stats[rich]` to install the package +1. Run `typeshed-stats --help` for information about various options + +## Are there any examples of things this script can produce, other than [the website](https://alexwaygood.github.io/typeshed-stats/)? + +I'm glad you asked! They're in the `examples/` folder in this repo. +(These examples are generated using the `regenerate.py` script in the `scripts/` directory.) + +## How do I run tests/linters? + +1. Clone the repo and `cd` into it +1. Create and activate a virtual environment +1. Run `pip install -e .[dev]` +1. Either run the linters/tests individually (see the `.github/workflows` directory for details about what's run in CI) or use the `scripts/runtests.py` convenience script to run them all in succession. diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats/pyproject.toml b/pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats/pyproject.toml new file mode 100644 index 00000000..216aacc3 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats/pyproject.toml @@ -0,0 +1,40 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "typeshed_stats" +dynamic = ["version"] +authors = [ + { name="Alex Waygood", email="alex.waygood@gmail.com" }, +] +description = "Library and command-line tool to gather stats on typeshed packages" +license = { text = "MIT" } +readme = "README.md" +requires-python = ">=3.11" +keywords = [ + "typeshed", + "typing", + "stubs", +] +classifiers = [ + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.11", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + "Topic :: Software Development", + "Typing :: Typed", +] + +[project.urls] +"Homepage" = "https://github.com/AlexWaygood/typeshed-stats" +"Bug Tracker" = "https://github.com/AlexWaygood/typeshed-stats/issues" + +[project.scripts] +typeshed-stats = "typeshed_stats._cli:main" + +[tool.hatch.version] +path = "src/typeshed_stats/__init__.py" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats/src/typeshed_stats/__init__.py b/pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats/src/typeshed_stats/__init__.py new file mode 100644 index 00000000..7ff51c12 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats/src/typeshed_stats/__init__.py @@ -0,0 +1,18 @@ +"""Library and command-line tool for getting stats on various typeshed packages.""" + +__all__ = [ + "__title__", + "__summary__", + "__about__", + "__author__", + "__email__", + "__license__", + "__version__", +] + +__title__ = "typeshed-stats" +__summary__ = __about__ = __doc__ +__author__ = "Alex Waygood" +__email__ = "alex.waygood@gmail.com" +__license__ = "MIT" +__version__ = "23.2.1" diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats/src/typeshed_stats/__main__.py b/pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats/src/typeshed_stats/__main__.py new file mode 100644 index 00000000..5621e954 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats/src/typeshed_stats/__main__.py @@ -0,0 +1,13 @@ +"""Library and command-line tool for getting stats on various typeshed packages.""" + +import sys + +if sys.version_info < (3, 11): + raise ImportError("Python 3.11+ is required!") # pragma: no cover + +__all__: list[str] = [] + +if __name__ == "__main__": + from ._cli import main + + main() diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats/src/typeshed_stats/_cli.py b/pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats/src/typeshed_stats/_cli.py new file mode 100644 index 00000000..e6b71176 --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats/src/typeshed_stats/_cli.py @@ -0,0 +1,155 @@ +"""Command-line interface.""" + +from __future__ import annotations + +import argparse +import logging +import pprint +import sys +from collections.abc import Sequence +from contextlib import ExitStack +from dataclasses import dataclass +from pathlib import Path +from typing import Literal, TypeAlias, get_args + +from typeshed_stats.gather import gather_stats_on_multiple_packages, tmpdir_typeshed + +__all__ = ["main"] + + +_LoggingLevels: TypeAlias = Literal[ + "NOTSET", "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL" +] + + +def _get_argument_parser() -> argparse.ArgumentParser: + """Parse arguments and do basic argument validation. + + *Don't* do any querying of whether paths actually exist, etc. + Leave that to _validate_options(). + """ + parser = argparse.ArgumentParser( + prog="typeshed-stats", description="Tool to gather stats on typeshed" + ) + parser.add_argument( + "packages", + type=str, + nargs="*", + action="extend", + help=( + "Packages to gather stats on" + " (defaults to all third-party packages, plus the stdlib)" + ), + ) + parser.add_argument( + "--log", + choices=get_args(_LoggingLevels), + default="INFO", + help="Specify the level of logging (defaults to logging.INFO)", + dest="logging_level", + ) + parser.add_argument( + "--pretty", + action="store_true", + help="Whether to pprint results or not (defaults to False)", + ) + + typeshed_options = parser.add_argument_group(title="Typeshed options") + typeshed_options_group = typeshed_options.add_mutually_exclusive_group( + required=True + ) + typeshed_options_group.add_argument( + "-t", + "--typeshed-dir", + type=Path, + help="Path to a local clone of typeshed, to be used as the basis for analysis", + ) + typeshed_options_group.add_argument( + "-d", + "--download-typeshed", + action="store_true", + help=( + "Download a fresh copy of typeshed into a temporary directory," + " and use that as the basis for analysis" + ), + ) + + return parser + + +@dataclass(init=False) +class _CmdArgs: + logging_level: _LoggingLevels + packages: list[str] + typeshed_dir: Path | None + download_typeshed: bool + overwrite: bool + writefile: Path | None + pretty: bool + + +def _validate_packages( + package_names: list[str], typeshed_dir: Path, *, parser: argparse.ArgumentParser +) -> None: + stubs_dir = typeshed_dir / "stubs" + for package_name in package_names: + if package_name != "stdlib": + package_dir = stubs_dir / package_name + if not (package_dir.exists() and package_dir.is_dir()): + parser.error(f"{package_name!r} does not have stubs in typeshed!") + + +def _validate_typeshed_dir( + typeshed_dir: Path, *, parser: argparse.ArgumentParser +) -> None: + for folder in typeshed_dir, (typeshed_dir / "stdlib"), (typeshed_dir / "stubs"): + if not (folder.exists() and folder.is_dir()): + parser.error(f'"{typeshed_dir}" is not a valid typeshed directory') + + +def _setup_logger(str_level: _LoggingLevels) -> logging.Logger: + assert str_level in get_args(_LoggingLevels) + logger = logging.getLogger("typeshed_stats") + level = getattr(logging, str_level) + assert isinstance(level, int) + logger.setLevel(level) + handler = logging.StreamHandler() + handler.setLevel(level) + logger.addHandler(handler) + return logger + + +def _run(argv: Sequence[str] | None = None) -> None: + parser = _get_argument_parser() + args: _CmdArgs = parser.parse_args(argv, namespace=_CmdArgs()) + logger = _setup_logger(args.logging_level) + + with ExitStack() as stack: + if args.download_typeshed: + logger.info("Cloning typeshed into a temporary directory...") + typeshed_dir = stack.enter_context(tmpdir_typeshed()) + else: + assert args.typeshed_dir is not None + typeshed_dir = args.typeshed_dir + _validate_typeshed_dir(typeshed_dir, parser=parser) + + packages: list[str] | None = args.packages or None + if packages: + _validate_packages(packages, typeshed_dir, parser=parser) + + logger.info("Gathering stats...") + stats = gather_stats_on_multiple_packages(packages, typeshed_dir=typeshed_dir) + + pprint.pprint({info_bundle.package_name: info_bundle for info_bundle in stats}) + + +def main(argv: Sequence[str] | None = None) -> None: + """CLI entry point.""" + try: + _run(argv) + except KeyboardInterrupt: + sys.stderr.write("Interrupted!") + code = 2 + else: + code = 0 + raise SystemExit(code) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats/src/typeshed_stats/gather.py b/pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats/src/typeshed_stats/gather.py new file mode 100644 index 00000000..8a4bdfba --- /dev/null +++ b/pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats/src/typeshed_stats/gather.py @@ -0,0 +1,1043 @@ +"""Tools for gathering stats about typeshed packages.""" + +import ast +import json +import os +import re +import tomllib +from collections import Counter +from collections.abc import Collection, Container, Iterable, Iterator, Mapping, Sequence +from contextlib import contextmanager +from dataclasses import dataclass +from enum import Enum +from functools import lru_cache, partial +from itertools import chain +from operator import attrgetter +from pathlib import Path +from typing import Any, Literal, NewType, Self, TypeAlias, TypeVar, final + +__all__ = [ + "AnnotationStats", + "FileInfo", + "PackageInfo", + "PackageName", + "PyrightSetting", + "StubtestSettings", + "StubtestStrictness", + "UploadStatus", + "gather_annotation_stats_on_file", + "gather_annotation_stats_on_package", + "gather_stats_on_file", + "gather_stats_on_multiple_packages", + "gather_stats_on_package", + "get_number_of_lines_of_file", + "get_package_extra_description", + "get_package_size", + "get_pyright_setting_for_package", + "get_pyright_setting_for_path", + "get_stub_distribution_name", + "get_stubtest_allowlist_length", + "get_stubtest_platforms", + "get_stubtest_settings", + "get_stubtest_strictness", + "get_upload_status", + "tmpdir_typeshed", +] + +PackageName: TypeAlias = str +_AbsolutePath = NewType("_AbsolutePath", Path) +_PathRelativeToTypeshed: TypeAlias = Path +_NiceReprEnumSelf = TypeVar("_NiceReprEnumSelf", bound="_NiceReprEnum") + + +class _NiceReprEnum(Enum): + """Base class for several public-API enums in this package.""" + + def __new__(cls, doc: str) -> Self: + assert isinstance(doc, str) + member = object.__new__(cls) + member._value_ = member.__doc__ = doc + return member + + def __repr__(self) -> str: + return f"{self.__class__.__name__}.{self.name}" + + @property + def formatted_name(self) -> str: + return " ".join(self.name.split("_")).lower() + + +@dataclass(slots=True) +class _SingleAnnotationAnalysis: + Any_in_annotation: bool = False + Incomplete_in_annotation: bool = False + + +class _SingleAnnotationAnalyzer(ast.NodeVisitor): + def __init__(self) -> None: + self.analysis = _SingleAnnotationAnalysis() + + def __repr__(self) -> str: + return f"{self.__class__.__name__}(analysis={self.analysis})" + + def visit_Name(self, node: ast.Name) -> None: + match node.id: + case "Any": + self.analysis.Any_in_annotation = True + case "Incomplete": + self.analysis.Incomplete_in_annotation = True + case _: + pass + self.generic_visit(node) + + def visit_Attribute(self, node: ast.Attribute) -> None: + value = node.value + if isinstance(value, ast.Name): + match f"{value.id}.{node.attr}": + case "typing.Any" | "typing_extensions.Any": + self.analysis.Any_in_annotation = True + case "_typeshed.Incomplete": + self.analysis.Incomplete_in_annotation = True + case _: + pass + self.generic_visit(node) + + +def _analyse_annotation(annotation: ast.AST) -> _SingleAnnotationAnalysis: + analyser = _SingleAnnotationAnalyzer() + analyser.visit(annotation) + return analyser.analysis + + +@final +@dataclass(slots=True) +class AnnotationStats: + """Stats on the annotations for a source file or a directory of source files.""" + + annotated_parameters: int = 0 + unannotated_parameters: int = 0 + annotated_returns: int = 0 + unannotated_returns: int = 0 + explicit_Incomplete_parameters: int = 0 + explicit_Incomplete_returns: int = 0 + explicit_Any_parameters: int = 0 + explicit_Any_returns: int = 0 + annotated_variables: int = 0 + explicit_Any_variables: int = 0 + explicit_Incomplete_variables: int = 0 + classdefs: int = 0 + classdefs_with_Any: int = 0 + classdefs_with_Incomplete: int = 0 + + +def _node_matches_name(node: ast.expr, name: str, from_: Container[str]) -> bool: + """Return True if `node` represents `name` from one of the modules in `from_`. + + ```pycon + >>> _is_TypeAlias = partial( + ... _node_matches_name, name="TypeAlias", from_={"typing", "typing_extensions"} + ... ) + >>> get_annotation_node = lambda source: ast.parse(source).body[0].annotation + >>> _is_TypeAlias(get_annotation_node("foo: TypeAlias = int")) + True + >>> _is_TypeAlias(get_annotation_node("foo: typing.TypeAlias = int")) + True + >>> _is_TypeAlias(get_annotation_node("foo: typing_extensions.TypeAlias = int")) + True + >>> _is_TypeAlias(get_annotation_node("foo: int")) + False + >>> _is_TypeAlias(get_annotation_node("foo: Final = 5")) + False + + ``` + """ + match node: + case ast.Name(id): + return id == name + case ast.Attribute(ast.Name(module), id): + return id == name and module in from_ + case _: + return False + + +_is_staticmethod = partial(_node_matches_name, name="staticmethod", from_={"builtins"}) +_is_TypeAlias = partial( + _node_matches_name, name="TypeAlias", from_={"typing", "typing_extensions"} +) + + +class _AnnotationStatsCollector(ast.NodeVisitor): + """AST Visitor for collecting stats on a single stub file.""" + + def __init__(self) -> None: + self.stats = AnnotationStats() + self._class_nesting = 0 + + def __repr__(self) -> str: + return f"{self.__class__.__name__}(stats={self.stats})" + + @property + def in_class(self) -> bool: + """Return `True` if we're currently visiting a class definition.""" + return bool(self._class_nesting) + + def visit_ClassDef(self, node: ast.ClassDef) -> None: + self.stats.classdefs += 1 + base_analyses = [_analyse_annotation(base) for base in node.bases] + self.stats.classdefs_with_Any += any( + analysis.Any_in_annotation for analysis in base_analyses + ) + self.stats.classdefs_with_Incomplete += any( + analysis.Incomplete_in_annotation for analysis in base_analyses + ) + + self._class_nesting += 1 + self.generic_visit(node) + self._class_nesting -= 1 + + def visit_AnnAssign(self, node: ast.AnnAssign) -> None: + self.generic_visit(node) + annotation = node.annotation + if _is_TypeAlias(annotation): + return + self.stats.annotated_variables += 1 + analysis = _analyse_annotation(annotation) + self.stats.explicit_Any_variables += analysis.Any_in_annotation + self.stats.explicit_Incomplete_variables += analysis.Incomplete_in_annotation + + def _visit_arg(self, node: ast.arg) -> None: + annotation = node.annotation + if annotation is None: + self.stats.unannotated_parameters += 1 + else: + self.stats.annotated_parameters += 1 + analysis = _analyse_annotation(annotation) + self.stats.explicit_Any_parameters += analysis.Any_in_annotation + self.stats.explicit_Incomplete_parameters += ( + analysis.Incomplete_in_annotation + ) + + def _visit_function(self, node: ast.FunctionDef | ast.AsyncFunctionDef) -> None: + self.generic_visit(node) + returns = node.returns + if returns is None: + self.stats.unannotated_returns += 1 + else: + self.stats.annotated_returns += 1 + analysis = _analyse_annotation(returns) + self.stats.explicit_Any_returns += analysis.Any_in_annotation + self.stats.explicit_Incomplete_returns += analysis.Incomplete_in_annotation + + args = node.args + + for i, arg_node in enumerate(chain(args.posonlyargs, args.args)): + if ( + i == 0 + and self.in_class + and not any( + _is_staticmethod(decorator) for decorator in node.decorator_list + ) + ): + # We don't want self/cls/metacls/mcls arguments to count towards the statistics + # Whatever they're called, they can easily be inferred + continue + self._visit_arg(arg_node) + + for arg_node in args.kwonlyargs: + self._visit_arg(arg_node) + + for arg_node in filter(None, (args.vararg, args.kwarg)): + self._visit_arg(arg_node) + + def visit_FunctionDef(self, node: ast.FunctionDef) -> None: + self._visit_function(node) + + def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> None: + self._visit_function(node) + + +def gather_annotation_stats_on_file(path: Path | str) -> AnnotationStats: + """Gather annotation stats on a single typeshed stub file. + + Parameters: + path: The location of the file to be analysed. + + Returns: + An [`AnnotationStats`](./#AnnotationStats) object + containing data about the annotations in the file. + + Examples: + >>> from typeshed_stats.gather import tmpdir_typeshed, gather_annotation_stats_on_file + >>> with tmpdir_typeshed() as typeshed: + ... stats_on_functools = gather_annotation_stats_on_file(typeshed / "stdlib" / "functools.pyi") + ... + >>> type(stats_on_functools) + + >>> stats_on_functools.unannotated_parameters + 0 + """ + visitor = _AnnotationStatsCollector() + with open(path, encoding="utf-8") as file: + visitor.visit(ast.parse(file.read())) + return visitor.stats + + +@lru_cache +def _get_package_directory(package_name: PackageName, typeshed_dir: Path | str) -> Path: + if package_name == "stdlib": + return Path(typeshed_dir, "stdlib") + return Path(typeshed_dir, "stubs", package_name) + + +def gather_annotation_stats_on_package( + package_name: PackageName, *, typeshed_dir: Path | str +) -> AnnotationStats: + """Aggregate annotation stats on a typeshed stubs package. + + Parameters: + package_name: The name of the stubs package to analyze. + typeshed_dir: A path pointing to the location of a typeshed directory + in which to find the stubs package source. + + Returns: + An [`AnnotationStats`](./#AnnotationStats) object + containing data about the annotations in the package. + + Examples: + >>> from typeshed_stats.gather import tmpdir_typeshed, gather_annotation_stats_on_package + >>> with tmpdir_typeshed() as typeshed: + ... mypy_extensions_stats = gather_annotation_stats_on_package("mypy-extensions", typeshed_dir=typeshed) + ... + >>> type(mypy_extensions_stats) + + >>> mypy_extensions_stats.unannotated_parameters + 0 + """ + combined: Counter[str] = Counter() + annot_stats_fields = AnnotationStats.__annotations__ + for path in _get_package_directory(package_name, typeshed_dir).rglob("*.pyi"): + file_stats = gather_annotation_stats_on_file(path) + for field in annot_stats_fields: + combined[field] += getattr(file_stats, field) + return AnnotationStats(**combined) + + +@lru_cache +def _get_package_metadata( + package_name: PackageName, typeshed_dir: Path | str +) -> Mapping[str, Any]: + package_directory = _get_package_directory(package_name, typeshed_dir) + with open(package_directory / "METADATA.toml", "rb") as f: + return tomllib.load(f) + + +def get_package_extra_description( + package_name: PackageName, *, typeshed_dir: Path | str +) -> str | None: + """Get the "extra description" of the package given in the `METADATA.toml` file. + + Each typeshed package comes with a `METADATA.toml` file, + containing various useful pieces of information about the package. + + Parameters: + package_name: The name of the package to find the extra description for. + typeshed_dir: A path pointing to a typeshed directory, + from which to retrieve the description. + + Returns: + The "extra description" of the package given in the `METADATA.toml` file, + if one is given, else [None][]. + + Examples: + >>> from typeshed_stats.gather import tmpdir_typeshed, get_package_extra_description + >>> with tmpdir_typeshed() as typeshed: + ... stdlib_description = get_package_extra_description("stdlib", typeshed_dir=typeshed) + ... protobuf_description = get_package_extra_description("protobuf", typeshed_dir=typeshed) + >>> stdlib_description is None + True + >>> isinstance(protobuf_description, str) + True + """ + if package_name == "stdlib": + return None + return _get_package_metadata(package_name, typeshed_dir).get("extra_description") + + +class StubtestStrictness(_NiceReprEnum): + """Enumeration of the various possible settings typeshed uses for [stubtest][] in CI.""" + + SKIPPED = "Stubtest is skipped in typeshed's CI for this package." + MISSING_STUBS_IGNORED = ( + "The `--ignore-missing-stub` stubtest setting is used in typeshed's CI." + ) + ERROR_ON_MISSING_STUB = ( + "Objects missing from the stub cause stubtest to emit an error " + "in typeshed's CI." + ) + + +@lru_cache +def _get_stubtest_config( + package_name: PackageName, typeshed_dir: Path | str +) -> Mapping[str, object]: + metadata = _get_package_metadata(package_name, typeshed_dir) + config = metadata.get("tool", {}).get("stubtest", {}) + assert isinstance(config, dict) + return config + + +def get_stubtest_strictness( + package_name: PackageName, *, typeshed_dir: Path | str +) -> StubtestStrictness: + """Get the setting typeshed uses in CI when [stubtest][] is run on a certain package. + + Parameters: + package_name: The name of the package to find the stubtest setting for. + typeshed_dir: A path pointing to a typeshed directory, + from which to retrieve the stubtest setting. + + Returns: + A member of the [`StubtestStrictness`](./#StubtestStrictness) + enumeration (see the docs on `StubtestStrictness` for details). + + Examples: + >>> from typeshed_stats.gather import tmpdir_typeshed, get_stubtest_strictness + >>> with tmpdir_typeshed() as typeshed: + ... stdlib_setting = get_stubtest_strictness("stdlib", typeshed_dir=typeshed) + ... gdb_setting = get_stubtest_strictness("gdb", typeshed_dir=typeshed) + >>> stdlib_setting + StubtestStrictness.ERROR_ON_MISSING_STUB + >>> help(_) + Help on StubtestStrictness in module typeshed_stats.gather: + + StubtestStrictness.ERROR_ON_MISSING_STUB + Objects missing from the stub cause stubtest to emit an error in typeshed's CI. + + >>> gdb_setting + StubtestStrictness.SKIPPED + """ + if package_name == "stdlib": + return StubtestStrictness.ERROR_ON_MISSING_STUB + match _get_stubtest_config(package_name, typeshed_dir): + case {"skip": True}: + return StubtestStrictness.SKIPPED + case {"ignore_missing_stub": True}: + return StubtestStrictness.MISSING_STUBS_IGNORED + case _: + return StubtestStrictness.ERROR_ON_MISSING_STUB + + +def get_stubtest_platforms( + package_name: PackageName, *, typeshed_dir: Path | str +) -> list[str]: + """Get the list of platforms on which [stubtest][] is run in typeshed's CI. + + Parameters: + package_name: The name of the package to find the stubtest setting for. + typeshed_dir: A path pointing to a typeshed directory, + from which to retrieve the stubtest configuration. + + Returns: + A list of strings describing platforms stubtest is run on. + The names correspond to the platform names + given by [sys.platform][] at runtime. + + Examples: + >>> from typeshed_stats.gather import tmpdir_typeshed, get_stubtest_platforms + >>> with tmpdir_typeshed() as typeshed: + ... pywin_platforms = get_stubtest_platforms("pywin32", typeshed_dir=typeshed) + >>> pywin_platforms + ['win32'] + """ + if package_name == "stdlib": + return ["darwin", "linux", "win32"] + match _get_stubtest_config(package_name, typeshed_dir): + case {"skip": True}: + return [] + case {"platforms": list() as platforms}: + return sorted(platforms) + case _: + return ["linux"] + + +def _num_allowlist_entries_in_file(path: Path) -> int: + with path.open(encoding="utf-8") as file: + return sum( + 1 for line in file if line.strip() and not line.strip().startswith("#") + ) + + +def get_stubtest_allowlist_length( + package_name: PackageName, *, typeshed_dir: Path | str +) -> int: + """Get the number of "allowlist entries" typeshed uses in CI when [stubtest][] is run on a certain package. + + An allowlist entry indicates a place in the stub where stubtest emits an error, + but typeshed has chosen to silence the error rather than "fix it". + Not all allowlist entries are bad: + sometimes there are good reasons to ignore an error emitted by stubtest. + + Parameters: + package_name: The name of the package + to find the number of allowlist entries for. + typeshed_dir: A path pointing to a typeshed directory, + from which to retrieve the number of stubtest allowlist entries. + + Returns: + The number of allowlist entries for that package. + + Examples: + >>> from typeshed_stats.gather import tmpdir_typeshed, get_stubtest_allowlist_length + >>> with tmpdir_typeshed() as typeshed: + ... num_stdlib_allows = get_stubtest_allowlist_length("stdlib", typeshed_dir=typeshed) + ... num_requests_allows = get_stubtest_allowlist_length("requests", typeshed_dir=typeshed) + >>> type(num_stdlib_allows) + + >>> num_stdlib_allows > 0 and num_requests_allows > 0 + True + """ + if package_name == "stdlib": + allowlist_dir = Path(typeshed_dir, "tests", "stubtest_allowlists") + return sum( + _num_allowlist_entries_in_file(file) for file in allowlist_dir.glob("*.txt") + ) + allowlist_dir = Path(typeshed_dir, "stubs", package_name, "@tests") + if not allowlist_dir.exists(): + return 0 + return sum( + _num_allowlist_entries_in_file(file) + for file in allowlist_dir.glob("stubtest_allowlist*.txt") + ) + + +@final +@dataclass(slots=True) +class StubtestSettings: + """Information on the settings under which [stubtest][] is run on a certain package.""" + + strictness: StubtestStrictness + platforms: list[str] + allowlist_length: int + + +def get_stubtest_settings( + package_name: PackageName, *, typeshed_dir: Path | str +) -> StubtestSettings: + """Get the [stubtest][] settings for a certain stubs package in typeshed. + + Parameters: + package_name: The name of the package to find the stubtest settings for. + typeshed_dir: A path pointing to a typeshed directory, + from which to retrieve the stubtest settings. + + Returns: + An instance of the [`StubtestSettings`](./#StubtestSettings) class. + """ + return StubtestSettings( + strictness=get_stubtest_strictness(package_name, typeshed_dir=typeshed_dir), + platforms=get_stubtest_platforms(package_name, typeshed_dir=typeshed_dir), + allowlist_length=get_stubtest_allowlist_length( + package_name, typeshed_dir=typeshed_dir + ), + ) + + +class UploadStatus(_NiceReprEnum): + """Whether or not a stubs package is currently uploaded to PyPI.""" + + UPLOADED = "These stubs are currently uploaded to PyPI." + NOT_CURRENTLY_UPLOADED = "These stubs are not currently uploaded to PyPI." + + +def get_upload_status( + package_name: PackageName, *, typeshed_dir: Path | str +) -> UploadStatus: + """Determine whether a certain package is currently uploaded to PyPI. + + Parameters: + package_name: The name of the package to find the upload status for. + typeshed_dir: A path pointing to a typeshed directory, + from which to retrieve the stubtest setting. + + Returns: + A member of the [`UploadStatus`](./#UploadStatus) enumeration + (see the docs on `UploadStatus` for details). + + Examples: + >>> from typeshed_stats.gather import tmpdir_typeshed, get_upload_status + >>> with tmpdir_typeshed() as typeshed: + ... stdlib_setting = get_upload_status("stdlib", typeshed_dir=typeshed) + ... requests_setting = get_upload_status("requests", typeshed_dir=typeshed) + >>> stdlib_setting + UploadStatus.NOT_CURRENTLY_UPLOADED + >>> help(_) + Help on UploadStatus in module typeshed_stats.gather: + + UploadStatus.NOT_CURRENTLY_UPLOADED + These stubs are not currently uploaded to PyPI. + + >>> requests_setting + UploadStatus.UPLOADED + """ + if package_name == "stdlib": + return UploadStatus.NOT_CURRENTLY_UPLOADED + match _get_package_metadata(package_name, typeshed_dir): + case {"upload": False}: + return UploadStatus.NOT_CURRENTLY_UPLOADED + case _: + return UploadStatus.UPLOADED + + +def get_stub_distribution_name( + package_name: PackageName, *, typeshed_dir: Path | str +) -> str: + """Get the name this stubs package is uploaded to PyPI under. + + For the vast majority of packages in typeshed, this is `types-{runtime-name}`, + but there may be a small number of packages + that are uploaded under nonstandard names to PyPI. + + Parameters: + package_name: The (runtime) name of the package + to find the stub distribution name for. + typeshed_dir: A path pointing to a typeshed directory, + from which to retrieve the information. + + Returns: + The name under which the stubs package is uploaded to PyPI. + + Examples: + >>> from typeshed_stats.gather import tmpdir_typeshed, get_stub_distribution_name + >>> with tmpdir_typeshed() as typeshed: + ... requests_stub_dist_name = get_stub_distribution_name("requests", typeshed_dir=typeshed) + ... pika_stub_dist_name = get_stub_distribution_name("pika", typeshed_dir=typeshed) + >>> requests_stub_dist_name + 'types-requests' + >>> pika_stub_dist_name + 'types-pika-ts' + """ + if package_name == "stdlib": + return "-" + match _get_package_metadata(package_name, typeshed_dir): + case {"stub_distribution": str() as stub_distribution}: + return stub_distribution + case _: + return f"types-{package_name}" + + +def get_number_of_lines_of_file(file_path: Path | str) -> int: + """Get the total number of lines of code for a single stub file in typeshed. + + Parameters: + file_path: A path to the file to analyse. + + Returns: + The number of lines of code the stubs file contains, + excluding empty lines. + """ + with open(file_path, encoding="utf-8") as file: + return sum(1 for line in file if line.strip()) + + +def get_package_size(package_name: PackageName, *, typeshed_dir: Path | str) -> int: + """Get the total number of lines of code for a stubs package in typeshed. + + Parameters: + package_name: The name of the stubs package to find the line number for. + typeshed_dir: A path pointing to a typeshed directory + in which to find the stubs package. + + Returns: + The number of lines of code the stubs package contains, + excluding empty lines. + + Examples: + >>> from typeshed_stats.gather import tmpdir_typeshed, get_package_size + >>> with tmpdir_typeshed() as typeshed: + ... mypy_extensions_size = get_package_size("mypy-extensions", typeshed_dir=typeshed) + ... + >>> type(mypy_extensions_size) is int and mypy_extensions_size > 0 + True + """ + return sum( + get_number_of_lines_of_file(file) + for file in _get_package_directory(package_name, typeshed_dir).rglob("*.pyi") + ) + + +@lru_cache +def _get_pyright_excludelist( + *, + typeshed_dir: Path | str, + config_filename: Literal["pyrightconfig.json", "pyrightconfig.stricter.json"], +) -> frozenset[Path]: + # Read the config file; + # do some pre-processing so that it can be passed to json.loads() + config_path = Path(typeshed_dir, config_filename) + with config_path.open(encoding="utf-8") as file: + # strip comments from the file + lines = [line for line in file if not line.strip().startswith("//")] + # strip trailing commas from the file + valid_json = re.sub(r",(\s*?[\}\]])", r"\1", "\n".join(lines)) + pyright_config = json.loads(valid_json) + assert isinstance(pyright_config, dict) + excludelist = pyright_config.get("exclude", []) + return frozenset(Path(typeshed_dir, item) for item in excludelist) + + +class PyrightSetting(_NiceReprEnum): + """The various possible [pyright][] settings typeshed uses in CI.""" + + ENTIRELY_EXCLUDED = ( + "All files in this stubs package " + "are excluded from the pyright check in typeshed's CI." + ) + SOME_FILES_EXCLUDED = ( + "Some files in this stubs package " + "are excluded from the pyright check in typeshed's CI." + ) + NOT_STRICT = ( + "This package is tested with pyright in typeshed's CI, " + "but all files in this stubs package " + "are excluded from the stricter pyright settings." + ) + STRICT_ON_SOME_FILES = ( + "Some files in this stubs package " + "are tested with the stricter pyright settings in typeshed's CI; " + "some are excluded from the stricter settings." + ) + STRICT = ( + "All files in this stubs package are tested with the stricter pyright settings " + "in typeshed's CI." + ) + + +def _path_or_path_ancestor_is_listed(path: Path, path_list: Collection[Path]) -> bool: + return path in path_list or any( + listed_path in path.parents for listed_path in path_list + ) + + +def _child_of_path_is_listed(path: Path, path_list: Collection[Path]) -> bool: + return any(path in listed_path.parents for listed_path in path_list) + + +def get_pyright_setting_for_path( + file_path: Path | str, *, typeshed_dir: Path | str +) -> PyrightSetting: + """Get the settings typeshed uses in CI when [pyright][] is run on a certain path. + + Parameters: + file_path: The path to query. + typeshed_dir: A path pointing to a typeshed directory, + from which to retrieve the pyright setting. + + Returns: + A member of the [`PyrightSetting`](./#PyrightSetting) enumeration + (see the docs on `PyrightSetting` for details). + """ + entirely_excluded_paths = _get_pyright_excludelist( + typeshed_dir=typeshed_dir, config_filename="pyrightconfig.json" + ) + paths_excluded_from_stricter_check = _get_pyright_excludelist( + typeshed_dir=typeshed_dir, config_filename="pyrightconfig.stricter.json" + ) + file_path = file_path if isinstance(file_path, Path) else Path(file_path) + + if _path_or_path_ancestor_is_listed(file_path, entirely_excluded_paths): + return PyrightSetting.ENTIRELY_EXCLUDED + if _child_of_path_is_listed(file_path, entirely_excluded_paths): + return PyrightSetting.SOME_FILES_EXCLUDED + if _path_or_path_ancestor_is_listed(file_path, paths_excluded_from_stricter_check): + return PyrightSetting.NOT_STRICT + if _child_of_path_is_listed(file_path, paths_excluded_from_stricter_check): + return PyrightSetting.STRICT_ON_SOME_FILES + return PyrightSetting.STRICT + + +def get_pyright_setting_for_package( + package_name: PackageName, *, typeshed_dir: Path | str +) -> PyrightSetting: + """Get the settings typeshed uses in CI when [pyright][] is run on a certain package. + + Parameters: + package_name: The name of the package to find the pyright setting for. + typeshed_dir: A path pointing to a typeshed directory, + from which to retrieve the pyright setting. + + Returns: + A member of the [`PyrightSetting`](./#PyrightSetting) enumeration + (see the docs on `PyrightSetting` for details). + + Examples: + >>> from typeshed_stats.gather import tmpdir_typeshed, get_pyright_setting_for_package + >>> with tmpdir_typeshed() as typeshed: + ... stdlib_setting = get_pyright_setting_for_package("stdlib", typeshed_dir=typeshed) + ... + >>> stdlib_setting + PyrightSetting.STRICT_ON_SOME_FILES + >>> help(_) + Help on PyrightSetting in module typeshed_stats.gather: + + PyrightSetting.STRICT_ON_SOME_FILES + Some files in this stubs package are tested with the stricter pyright settings in typeshed's CI; some are excluded from the stricter settings. + + """ + return get_pyright_setting_for_path( + file_path=_get_package_directory(package_name, typeshed_dir), + typeshed_dir=typeshed_dir, + ) + + +@final +@dataclass(slots=True) +class PackageInfo: + """Statistics about a single stubs package in typeshed.""" + + package_name: PackageName + stub_distribution_name: str + extra_description: str | None + number_of_lines: int + upload_status: UploadStatus + stubtest_settings: StubtestSettings + pyright_setting: PyrightSetting + annotation_stats: AnnotationStats + + +def gather_stats_on_package( + package_name: PackageName, *, typeshed_dir: Path | str +) -> PackageInfo: + """Gather miscellaneous statistics about a single stubs package in typeshed. + + Parameters: + package_name: The name of the package to gather statistics on. + typeshed_dir: A path pointing to a typeshed directory, + in which the source code for the stubs package can be found. + + Returns: + An instance of the [`PackageInfo`](./#PackageInfo) class. + + Examples: + >>> from typeshed_stats.gather import tmpdir_typeshed, gather_stats_on_package + >>> with tmpdir_typeshed() as typeshed: + ... stdlib_info = gather_stats_on_package("stdlib", typeshed_dir=typeshed) + ... + >>> stdlib_info.package_name + 'stdlib' + >>> stdlib_info.stubtest_settings.strictness + StubtestStrictness.ERROR_ON_MISSING_STUB + >>> type(stdlib_info.number_of_lines) is int and stdlib_info.number_of_lines > 0 + True + """ + return PackageInfo( + package_name=package_name, + stub_distribution_name=get_stub_distribution_name( + package_name, typeshed_dir=typeshed_dir + ), + extra_description=get_package_extra_description( + package_name, typeshed_dir=typeshed_dir + ), + number_of_lines=get_package_size(package_name, typeshed_dir=typeshed_dir), + upload_status=get_upload_status(package_name, typeshed_dir=typeshed_dir), + stubtest_settings=get_stubtest_settings( + package_name, typeshed_dir=typeshed_dir + ), + pyright_setting=get_pyright_setting_for_package( + package_name, typeshed_dir=typeshed_dir + ), + annotation_stats=gather_annotation_stats_on_package( + package_name, typeshed_dir=typeshed_dir + ), + ) + + +@final +@dataclass(slots=True) +class FileInfo: + """Statistics about a single `.pyi` file in typeshed.""" + + file_path: _PathRelativeToTypeshed + parent_package: PackageName + number_of_lines: int + pyright_setting: PyrightSetting + annotation_stats: AnnotationStats + + +@lru_cache +def _normalize_typeshed_dir(typeshed_dir: Path | str) -> _AbsolutePath: + if isinstance(typeshed_dir, str): + typeshed_dir = Path(typeshed_dir) + elif not isinstance(typeshed_dir, Path): + raise TypeError( + "Expected str or Path argument for typeshed_dir, got" + f" {typeshed_dir.__class__.__name__!r}" + ) + if not typeshed_dir.exists(): + raise ValueError(f"{typeshed_dir} does not exist!") + if not typeshed_dir.is_dir(): + raise ValueError(f"{typeshed_dir} is not a directory!") + return _AbsolutePath(typeshed_dir.absolute()) + + +@lru_cache +def _normalize_file_path( + file_path: Path | str, typeshed_dir: _AbsolutePath +) -> _AbsolutePath: + orig_file_path = file_path + if isinstance(file_path, str): + file_path = Path(file_path) + elif not isinstance(file_path, Path): + raise TypeError( + "Expected str or Path argument for file_path, got" + f" {file_path.__class__.__name__!r}" + ) + if typeshed_dir in file_path.absolute().parents: + file_path = _AbsolutePath(file_path.absolute()) + else: + file_path = _AbsolutePath(typeshed_dir / file_path) + if not file_path.exists(): + raise ValueError( + f"'{orig_file_path}' does not exist as an absolute path or as a path" + " relative to typeshed" + ) + if not file_path.is_file(): + raise ValueError(f"'{orig_file_path}' exists, but does not point to a file") + file_path_suffix = file_path.suffix + if file_path_suffix != ".pyi": + raise ValueError( + f"Expected a path pointing to a .pyi file, got a {file_path_suffix!r} file" + ) + return file_path + + +def _get_parent_package( + file_path: _AbsolutePath, typeshed_dir: _AbsolutePath +) -> PackageName: + if (typeshed_dir / "stdlib") in file_path.parents: + return "stdlib" + parent_path = next( # pragma: no branch + path for path in (typeshed_dir / "stubs").iterdir() if path in file_path.parents + ) + return parent_path.parts[-1] + + +def gather_stats_on_file( + file_path: Path | str, *, typeshed_dir: Path | str +) -> FileInfo: + """Gather stats on a single `.pyi` file in typeshed. + + Parameters: + file_path: A path pointing to the file on which to gather stats. + This can be an absolute path, + or a path relative to the `typeshed_dir` argument. + typeshed_dir: A path pointing to the overall typeshed directory. + This can be an absolute or relative path. + + Returns: + An instance of the [`FileInfo`](./#FileInfo) class. + + Examples: + >>> from typeshed_stats.gather import tmpdir_typeshed, gather_stats_on_file + >>> with tmpdir_typeshed() as typeshed: + ... # Paths can be relative to typeshed_dir + ... functools_info = gather_stats_on_file( + ... "stdlib/functools.pyi", typeshed_dir=typeshed + ... ) + ... # Absolute paths are also fine + ... stubs_dir = typeshed / "stubs" + ... requests_api_info = gather_stats_on_file( + ... stubs_dir / "requests/requests/api.pyi", typeshed_dir=typeshed + ... ) + ... # Gather per-file stats on a directory + ... markdown_per_file_stats = [ + ... gather_stats_on_file(module, typeshed_dir=typeshed) + ... for module in (stubs_dir / "Markdown").rglob("*.pyi") + ... ] + >>> type(functools_info) + + >>> functools_info.parent_package + 'stdlib' + >>> functools_info.file_path.as_posix() + 'stdlib/functools.pyi' + >>> requests_api_info.parent_package + 'requests' + >>> requests_api_info.file_path.as_posix() + 'stubs/requests/requests/api.pyi' + """ + typeshed_dir = _normalize_typeshed_dir(typeshed_dir) + file_path = _normalize_file_path(file_path, typeshed_dir) + return FileInfo( + file_path=Path(file_path).relative_to(typeshed_dir), + parent_package=_get_parent_package(file_path, typeshed_dir), + number_of_lines=get_number_of_lines_of_file(file_path), + pyright_setting=get_pyright_setting_for_path( + file_path, typeshed_dir=typeshed_dir + ), + annotation_stats=gather_annotation_stats_on_file(file_path), + ) + + +_get_package_name = attrgetter("package_name") + + +def gather_stats_on_multiple_packages( + packages: Iterable[str] | None = None, *, typeshed_dir: Path | str +) -> Sequence[PackageInfo]: + """Concurrently gather statistics on multiple packages. + + Parameters: + packages: An iterable of package names to be analysed, or [None][]. + If `None`, defaults to all third-party stubs, plus the stubs for the stdlib. + typeshed_dir: The path to a local clone of typeshed. + + Returns: + A sequence of [`PackageInfo`](./#PackageInfo) objects. + Each `PackageInfo` object contains information representing an analysis + of a certain stubs package in typeshed. + + Examples: + >>> from typeshed_stats.gather import PackageInfo, tmpdir_typeshed, gather_stats_on_multiple_packages + >>> with tmpdir_typeshed() as typeshed: + ... infos = gather_stats_on_multiple_packages( + ... ["stdlib", "aiofiles", "boto"], typeshed_dir=typeshed + ... ) + ... + >>> [info.package_name for info in infos] + ['aiofiles', 'boto', 'stdlib'] + >>> all(type(info) is PackageInfo for info in infos) + True + """ + if packages is None: + packages = os.listdir(Path(typeshed_dir, "stubs")) + ["stdlib"] + results = [ + gather_stats_on_package(package, typeshed_dir=typeshed_dir) + for package in packages + ] + return sorted(results, key=_get_package_name) + + +@contextmanager +def tmpdir_typeshed() -> Iterator[Path]: + """Clone typeshed into a tempdir, then yield a [`Path`][pathlib.Path] pointing to it. + + A context manager. + + Yields: + A [`Path`][pathlib.Path] pointing to a tempdir with a clone of typeshed inside. + """ + import subprocess + from tempfile import TemporaryDirectory + + args = [ + "git", + "clone", + "https://github.com/python/typeshed", + "--depth", + "1", + "--quiet", + ] + + with TemporaryDirectory() as td: + args.append(td) + subprocess.run(args, check=True) + yield Path(td) diff --git a/pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats/src/typeshed_stats/py.typed b/pyperformance/data-files/benchmarks/bm_typeshed_stats/typeshed-stats/src/typeshed_stats/py.typed new file mode 100644 index 00000000..e69de29b